Fix PatchApplier error handling.

1. For general errors, throw IOException instead of wrapping them with
PatchApplyException. The wrapping was moved (back) to ApplyCommand.
2. For file specific errors, log the errors as part of
PatchApplier::Result.
3. Change applyPatch() to receive the parsed Patch object, so the caller
can decide how to handle parsing errors.

Background: this utility class was extracted from ApplyCommand on V6.4.0.
During the extraction, we left the exception wrapping by
PatchApplyException intact. This attitude made it harder for the callers to
distinguish between the actual error causes.

Change-Id: Ib0f2b5e97a13df2339d8b65f2fea1c819c161ac3
This commit is contained in:
Nitzan Gur-Furman 2023-03-03 14:24:19 +01:00
parent 228e4de484
commit 3a913a8c34
8 changed files with 375 additions and 289 deletions

View File

@ -26,8 +26,6 @@
import java.nio.file.Files; import java.nio.file.Files;
import org.eclipse.jgit.annotations.Nullable; import org.eclipse.jgit.annotations.Nullable;
import org.eclipse.jgit.api.Git; import org.eclipse.jgit.api.Git;
import org.eclipse.jgit.api.errors.PatchApplyException;
import org.eclipse.jgit.api.errors.PatchFormatException;
import org.eclipse.jgit.attributes.FilterCommand; import org.eclipse.jgit.attributes.FilterCommand;
import org.eclipse.jgit.attributes.FilterCommandFactory; import org.eclipse.jgit.attributes.FilterCommandFactory;
import org.eclipse.jgit.attributes.FilterCommandRegistry; import org.eclipse.jgit.attributes.FilterCommandRegistry;
@ -112,15 +110,17 @@ protected String initPostImage(String aName) throws Exception {
return new String(postImage, StandardCharsets.UTF_8); return new String(postImage, StandardCharsets.UTF_8);
} }
protected Result applyPatch() protected Result applyPatch() throws IOException {
throws PatchApplyException, PatchFormatException, IOException { try (InputStream patchStream = getTestResource(name + ".patch")) {
InputStream patchStream = getTestResource(name + ".patch"); Patch patch = new Patch();
if (inCore) { patch.parse(patchStream);
try (ObjectInserter oi = db.newObjectInserter()) { if (inCore) {
return new PatchApplier(db, baseTip, oi).applyPatch(patchStream); try (ObjectInserter oi = db.newObjectInserter()) {
return new PatchApplier(db, baseTip, oi).applyPatch(patch);
}
} }
return new PatchApplier(db).applyPatch(patch);
} }
return new PatchApplier(db).applyPatch(patchStream);
} }
protected static InputStream getTestResource(String patchFile) { protected static InputStream getTestResource(String patchFile) {
@ -159,6 +159,7 @@ protected void verifyContent(Result result, String path,
void verifyChange(Result result, String aName, boolean exists) void verifyChange(Result result, String aName, boolean exists)
throws Exception { throws Exception {
assertEquals(0, result.getErrors().size());
assertEquals(1, result.getPaths().size()); assertEquals(1, result.getPaths().size());
verifyContent(result, aName, exists); verifyContent(result, aName, exists);
} }
@ -181,6 +182,7 @@ protected byte[] readBlob(ObjectId treeish, String path)
protected void checkBinary(Result result, int numberOfFiles) protected void checkBinary(Result result, int numberOfFiles)
throws Exception { throws Exception {
assertEquals(0, result.getErrors().size());
assertEquals(numberOfFiles, result.getPaths().size()); assertEquals(numberOfFiles, result.getPaths().size());
if (inCore) { if (inCore) {
assertArrayEquals(postImage, assertArrayEquals(postImage,
@ -380,6 +382,14 @@ public void testDoesNotAffectUnrelatedFiles() throws Exception {
verifyChange(result, "X"); verifyChange(result, "X");
verifyContent(result, "Unaffected", expectedUnaffectedText); verifyContent(result, "Unaffected", expectedUnaffectedText);
} }
@Test
public void testConflictFails() throws Exception {
init("conflict");
Result result = applyPatch();
assertEquals(1, result.getErrors().size());
}
} }
public static class InCore extends Base { public static class InCore extends Base {

View File

@ -17,7 +17,11 @@ applyBinaryBaseOidWrong=Cannot apply binary patch; OID for file {0} does not mat
applyBinaryForInCoreNotSupported=Applying binary patch for inCore repositories is not yet supported applyBinaryForInCoreNotSupported=Applying binary patch for inCore repositories is not yet supported
applyBinaryOidTooShort=Binary patch for file {0} does not have full IDs applyBinaryOidTooShort=Binary patch for file {0} does not have full IDs
applyBinaryPatchTypeNotSupported=Couldn't apply binary patch of type {0} applyBinaryPatchTypeNotSupported=Couldn't apply binary patch of type {0}
applyBinaryResultOidWrong=Result of binary patch for file {0} has wrong OID. applyTextPatchCannotApplyHunk=Hunk cannot be applied
applyTextPatchSingleClearingHunk=Expected a single hunk for clearing all content
applyBinaryResultOidWrong=Result of binary patch for file {0} has wrong OID
applyTextPatchUnorderedHunkApplications=Current hunk must be applied after the last hunk
applyTextPatchUnorderedHunks=Got unordered hunks
applyingCommit=Applying {0} applyingCommit=Applying {0}
archiveFormatAlreadyAbsent=Archive format already absent: {0} archiveFormatAlreadyAbsent=Archive format already absent: {0}
archiveFormatAlreadyRegistered=Archive format already registered with different implementation: {0} archiveFormatAlreadyRegistered=Archive format already registered with different implementation: {0}
@ -581,6 +585,8 @@ packWasDeleted=Pack file {0} was deleted, removing it from pack list
packWriterStatistics=Total {0,number,#0} (delta {1,number,#0}), reused {2,number,#0} (delta {3,number,#0}) packWriterStatistics=Total {0,number,#0} (delta {1,number,#0}), reused {2,number,#0} (delta {3,number,#0})
panicCantRenameIndexFile=Panic: index file {0} must be renamed to replace {1}; until then repository is corrupt panicCantRenameIndexFile=Panic: index file {0} must be renamed to replace {1}; until then repository is corrupt
patchApplyException=Cannot apply: {0} patchApplyException=Cannot apply: {0}
patchApplyErrorWithHunk=Error applying patch in {0}, hunk {1}: {2}
patchApplyErrorWithoutHunk=Error applying patch in {0}: {1}
patchFormatException=Format error: {0} patchFormatException=Format error: {0}
pathNotConfigured=Submodule path is not configured pathNotConfigured=Submodule path is not configured
peeledLineBeforeRef=Peeled line before ref. peeledLineBeforeRef=Peeled line before ref.

View File

@ -10,10 +10,16 @@
package org.eclipse.jgit.api; package org.eclipse.jgit.api;
import java.io.File; import java.io.File;
import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.text.MessageFormat;
import org.eclipse.jgit.api.errors.GitAPIException; import org.eclipse.jgit.api.errors.GitAPIException;
import org.eclipse.jgit.api.errors.PatchApplyException;
import org.eclipse.jgit.api.errors.PatchFormatException;
import org.eclipse.jgit.internal.JGitText; import org.eclipse.jgit.internal.JGitText;
import org.eclipse.jgit.lib.Repository; import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.patch.Patch;
import org.eclipse.jgit.patch.PatchApplier; import org.eclipse.jgit.patch.PatchApplier;
import org.eclipse.jgit.patch.PatchApplier.Result; import org.eclipse.jgit.patch.PatchApplier.Result;
@ -67,11 +73,31 @@ public ApplyCommand setPatch(InputStream in) {
public ApplyResult call() throws GitAPIException { public ApplyResult call() throws GitAPIException {
checkCallable(); checkCallable();
setCallable(false); setCallable(false);
Patch patch = new Patch();
try (InputStream inStream = in) {
patch.parse(inStream);
if (!patch.getErrors().isEmpty()) {
throw new PatchFormatException(patch.getErrors());
}
} catch (IOException e) {
throw new PatchApplyException(MessageFormat.format(
JGitText.get().patchApplyException, e.getMessage()), e);
}
ApplyResult r = new ApplyResult(); ApplyResult r = new ApplyResult();
PatchApplier patchApplier = new PatchApplier(repo); try {
Result applyResult = patchApplier.applyPatch(in); PatchApplier patchApplier = new PatchApplier(repo);
for (String p : applyResult.getPaths()) { Result applyResult = patchApplier.applyPatch(patch);
r.addUpdatedFile(new File(repo.getWorkTree(), p)); if (!applyResult.getErrors().isEmpty()) {
throw new PatchApplyException(
MessageFormat.format(JGitText.get().patchApplyException,
applyResult.getErrors()));
}
for (String p : applyResult.getPaths()) {
r.addUpdatedFile(new File(repo.getWorkTree(), p));
}
} catch (IOException e) {
throw new PatchApplyException(MessageFormat.format(JGitText.get().patchApplyException,
e.getMessage(), e));
} }
return r; return r;
} }

View File

@ -46,6 +46,11 @@ public static JGitText get() {
/***/ public String applyBinaryOidTooShort; /***/ public String applyBinaryOidTooShort;
/***/ public String applyBinaryPatchTypeNotSupported; /***/ public String applyBinaryPatchTypeNotSupported;
/***/ public String applyBinaryResultOidWrong; /***/ public String applyBinaryResultOidWrong;
/***/ public String applyTextPatchCannotApplyHunk;
/***/ public String applyTextPatchSingleClearingHunk;
/***/ public String applyTextPatchUnorderedHunkApplications;
/***/ public String applyTextPatchUnorderedHunks;
/***/ public String applyingCommit; /***/ public String applyingCommit;
/***/ public String archiveFormatAlreadyAbsent; /***/ public String archiveFormatAlreadyAbsent;
/***/ public String archiveFormatAlreadyRegistered; /***/ public String archiveFormatAlreadyRegistered;
@ -609,6 +614,8 @@ public static JGitText get() {
/***/ public String packWriterStatistics; /***/ public String packWriterStatistics;
/***/ public String panicCantRenameIndexFile; /***/ public String panicCantRenameIndexFile;
/***/ public String patchApplyException; /***/ public String patchApplyException;
/***/ public String patchApplyErrorWithHunk;
/***/ public String patchApplyErrorWithoutHunk;
/***/ public String patchFormatException; /***/ public String patchFormatException;
/***/ public String pathNotConfigured; /***/ public String pathNotConfigured;
/***/ public String peeledLineBeforeRef; /***/ public String peeledLineBeforeRef;

View File

@ -33,7 +33,6 @@
import java.util.zip.InflaterInputStream; import java.util.zip.InflaterInputStream;
import org.eclipse.jgit.annotations.Nullable; import org.eclipse.jgit.annotations.Nullable;
import org.eclipse.jgit.api.errors.FilterFailedException; import org.eclipse.jgit.api.errors.FilterFailedException;
import org.eclipse.jgit.api.errors.PatchApplyException;
import org.eclipse.jgit.api.errors.PatchFormatException; import org.eclipse.jgit.api.errors.PatchFormatException;
import org.eclipse.jgit.attributes.Attribute; import org.eclipse.jgit.attributes.Attribute;
import org.eclipse.jgit.attributes.Attributes; import org.eclipse.jgit.attributes.Attributes;
@ -134,11 +133,8 @@ public PatchApplier(Repository repo) {
* ID of the tree to apply the patch in * ID of the tree to apply the patch in
* @param oi * @param oi
* to be used for modifying objects * to be used for modifying objects
* @throws IOException
* in case of I/O errors
*/ */
public PatchApplier(Repository repo, RevTree beforeTree, ObjectInserter oi) public PatchApplier(Repository repo, RevTree beforeTree, ObjectInserter oi) {
throws IOException {
this.repo = repo; this.repo = repo;
this.beforeTree = beforeTree; this.beforeTree = beforeTree;
inserter = oi; inserter = oi;
@ -147,16 +143,45 @@ public PatchApplier(Repository repo, RevTree beforeTree, ObjectInserter oi)
/** /**
* A wrapper for returning both the applied tree ID and the applied files * A wrapper for returning both the applied tree ID and the applied files
* list. * list, as well as file specific errors.
* *
* @since 6.3 * @since 6.3
*/ */
public static class Result { public static class Result {
/**
* A wrapper for a patch applying error that affects a given file.
*
*/
public static class Error {
private String msg;
private String oldFileName;
private @Nullable HunkHeader hh;
private Error(String msg,String oldFileName, @Nullable HunkHeader hh) {
this.msg = msg;
this.oldFileName = oldFileName;
this.hh = hh;
}
@Override
public String toString() {
if(hh != null) {
return MessageFormat.format(JGitText.get().patchApplyErrorWithHunk,
oldFileName, hh, msg);
}
return MessageFormat.format(JGitText.get().patchApplyErrorWithoutHunk,
oldFileName, msg);
}
}
private ObjectId treeId; private ObjectId treeId;
private List<String> paths; private List<String> paths;
private List<Error> errors = new ArrayList<>();
/** /**
* @return List of modified paths. * @return List of modified paths.
*/ */
@ -170,6 +195,17 @@ public List<String> getPaths() {
public ObjectId getTreeId() { public ObjectId getTreeId() {
return treeId; return treeId;
} }
/**
* @return Errors occurred while applying the patch.
*/
public List<Error> getErrors() {
return errors;
}
private void addError(String msg,String oldFileName, @Nullable HunkHeader hh) {
errors.add(new Error(msg, oldFileName, hh));
}
} }
/** /**
@ -180,12 +216,13 @@ public ObjectId getTreeId() {
* @return the result of the patch * @return the result of the patch
* @throws PatchFormatException * @throws PatchFormatException
* if the patch cannot be parsed * if the patch cannot be parsed
* @throws PatchApplyException * @throws IOException
* if the patch cannot be applied * if the patch read fails
* @deprecated use {@link #applyPatch(Patch)} instead
*/ */
@Deprecated
public Result applyPatch(InputStream patchInput) public Result applyPatch(InputStream patchInput)
throws PatchFormatException, PatchApplyException { throws PatchFormatException, IOException {
Result result = new Result();
Patch p = new Patch(); Patch p = new Patch();
try (InputStream inStream = patchInput) { try (InputStream inStream = patchInput) {
p.parse(inStream); p.parse(inStream);
@ -193,105 +230,103 @@ public Result applyPatch(InputStream patchInput)
if (!p.getErrors().isEmpty()) { if (!p.getErrors().isEmpty()) {
throw new PatchFormatException(p.getErrors()); throw new PatchFormatException(p.getErrors());
} }
DirCache dirCache = inCore() ? DirCache.read(reader, beforeTree)
: repo.lockDirCache();
DirCacheBuilder dirCacheBuilder = dirCache.builder();
Set<String> modifiedPaths = new HashSet<>();
for (FileHeader fh : p.getFiles()) {
ChangeType type = fh.getChangeType();
switch (type) {
case ADD: {
File f = getFile(fh.getNewPath());
if (f != null) {
try {
FileUtils.mkdirs(f.getParentFile(), true);
FileUtils.createNewFile(f);
} catch (IOException e) {
throw new PatchApplyException(MessageFormat.format(
JGitText.get().createNewFileFailed, f), e);
}
}
apply(fh.getNewPath(), dirCache, dirCacheBuilder, f, fh);
}
break;
case MODIFY:
apply(fh.getOldPath(), dirCache, dirCacheBuilder,
getFile(fh.getOldPath()), fh);
break;
case DELETE:
if (!inCore()) {
File old = getFile(fh.getOldPath());
if (!old.delete())
throw new PatchApplyException(MessageFormat.format(
JGitText.get().cannotDeleteFile, old));
}
break;
case RENAME: {
File src = getFile(fh.getOldPath());
File dest = getFile(fh.getNewPath());
if (!inCore()) {
/*
* this is odd: we rename the file on the FS, but
* apply() will write a fresh stream anyway, which will
* overwrite if there were hunks in the patch.
*/
try {
FileUtils.mkdirs(dest.getParentFile(), true);
FileUtils.rename(src, dest,
StandardCopyOption.ATOMIC_MOVE);
} catch (IOException e) {
throw new PatchApplyException(MessageFormat.format(
JGitText.get().renameFileFailed, src, dest),
e);
}
}
String pathWithOriginalContent = inCore() ?
fh.getOldPath() : fh.getNewPath();
apply(pathWithOriginalContent, dirCache, dirCacheBuilder, dest, fh);
break;
}
case COPY: {
File dest = getFile(fh.getNewPath());
if (!inCore()) {
File src = getFile(fh.getOldPath());
FileUtils.mkdirs(dest.getParentFile(), true);
Files.copy(src.toPath(), dest.toPath());
}
apply(fh.getOldPath(), dirCache, dirCacheBuilder, dest, fh);
break;
}
}
if (fh.getChangeType() != ChangeType.DELETE)
modifiedPaths.add(fh.getNewPath());
if (fh.getChangeType() != ChangeType.COPY
&& fh.getChangeType() != ChangeType.ADD)
modifiedPaths.add(fh.getOldPath());
}
// We processed the patch. Now add things that weren't changed.
for (int i = 0; i < dirCache.getEntryCount(); i++) {
DirCacheEntry dce = dirCache.getEntry(i);
if (!modifiedPaths.contains(dce.getPathString())
|| dce.getStage() != DirCacheEntry.STAGE_0)
dirCacheBuilder.add(dce);
}
if (inCore())
dirCacheBuilder.finish();
else if (!dirCacheBuilder.commit()) {
throw new IndexWriteException();
}
result.treeId = dirCache.writeTree(inserter);
result.paths = modifiedPaths.stream().sorted()
.collect(Collectors.toList());
} catch (IOException e) {
throw new PatchApplyException(MessageFormat.format(
JGitText.get().patchApplyException, e.getMessage()), e);
} }
return applyPatch(p);
}
/**
* Applies the given patch
*
* @param p
* the patch to apply.
* @return the result of the patch
* @throws IOException
*/
public Result applyPatch(Patch p) throws IOException {
Result result = new Result();
DirCache dirCache = inCore() ? DirCache.read(reader, beforeTree)
: repo.lockDirCache();
DirCacheBuilder dirCacheBuilder = dirCache.builder();
Set<String> modifiedPaths = new HashSet<>();
for (FileHeader fh : p.getFiles()) {
ChangeType type = fh.getChangeType();
switch (type) {
case ADD: {
File f = getFile(fh.getNewPath());
if (f != null) {
FileUtils.mkdirs(f.getParentFile(), true);
FileUtils.createNewFile(f);
}
apply(fh.getNewPath(), dirCache, dirCacheBuilder, f, fh, result);
}
break;
case MODIFY:
apply(fh.getOldPath(), dirCache, dirCacheBuilder,
getFile(fh.getOldPath()), fh, result);
break;
case DELETE:
if (!inCore()) {
File old = getFile(fh.getOldPath());
if (!old.delete())
throw new IOException(MessageFormat.format(
JGitText.get().cannotDeleteFile, old));
}
break;
case RENAME: {
File src = getFile(fh.getOldPath());
File dest = getFile(fh.getNewPath());
if (!inCore()) {
/*
* this is odd: we rename the file on the FS, but
* apply() will write a fresh stream anyway, which will
* overwrite if there were hunks in the patch.
*/
FileUtils.mkdirs(dest.getParentFile(), true);
FileUtils.rename(src, dest,
StandardCopyOption.ATOMIC_MOVE);
}
String pathWithOriginalContent = inCore() ?
fh.getOldPath() : fh.getNewPath();
apply(pathWithOriginalContent, dirCache, dirCacheBuilder, dest, fh, result);
break;
}
case COPY: {
File dest = getFile(fh.getNewPath());
if (!inCore()) {
File src = getFile(fh.getOldPath());
FileUtils.mkdirs(dest.getParentFile(), true);
Files.copy(src.toPath(), dest.toPath());
}
apply(fh.getOldPath(), dirCache, dirCacheBuilder, dest, fh, result);
break;
}
}
if (fh.getChangeType() != ChangeType.DELETE)
modifiedPaths.add(fh.getNewPath());
if (fh.getChangeType() != ChangeType.COPY
&& fh.getChangeType() != ChangeType.ADD)
modifiedPaths.add(fh.getOldPath());
}
// We processed the patch. Now add things that weren't changed.
for (int i = 0; i < dirCache.getEntryCount(); i++) {
DirCacheEntry dce = dirCache.getEntry(i);
if (!modifiedPaths.contains(dce.getPathString())
|| dce.getStage() != DirCacheEntry.STAGE_0)
dirCacheBuilder.add(dce);
}
if (inCore())
dirCacheBuilder.finish();
else if (!dirCacheBuilder.commit()) {
throw new IndexWriteException();
}
result.treeId = dirCache.writeTree(inserter);
result.paths = modifiedPaths.stream().sorted()
.collect(Collectors.toList());
return result; return result;
} }
@ -302,34 +337,29 @@ private File getFile(String path) {
/* returns null if the path is not found. */ /* returns null if the path is not found. */
@Nullable @Nullable
private TreeWalk getTreeWalkForFile(String path, DirCache cache) private TreeWalk getTreeWalkForFile(String path, DirCache cache)
throws PatchApplyException { throws IOException {
try { if (inCore()) {
if (inCore()) { // Only this branch may return null.
// Only this branch may return null. // TODO: it would be nice if we could return a TreeWalk at EOF
// TODO: it would be nice if we could return a TreeWalk at EOF // iso. null.
// iso. null. return TreeWalk.forPath(repo, path, beforeTree);
return TreeWalk.forPath(repo, path, beforeTree);
}
TreeWalk walk = new TreeWalk(repo);
// Use a TreeWalk with a DirCacheIterator to pick up the correct
// clean/smudge filters.
int cacheTreeIdx = walk.addTree(new DirCacheIterator(cache));
FileTreeIterator files = new FileTreeIterator(repo);
if (FILE_TREE_INDEX != walk.addTree(files))
throw new IllegalStateException();
walk.setFilter(AndTreeFilter.create(
PathFilterGroup.createFromStrings(path),
new NotIgnoredFilter(FILE_TREE_INDEX)));
walk.setOperationType(OperationType.CHECKIN_OP);
walk.setRecursive(true);
files.setDirCacheIterator(walk, cacheTreeIdx);
return walk;
} catch (IOException e) {
throw new PatchApplyException(MessageFormat.format(
JGitText.get().patchApplyException, e.getMessage()), e);
} }
TreeWalk walk = new TreeWalk(repo);
// Use a TreeWalk with a DirCacheIterator to pick up the correct
// clean/smudge filters.
int cacheTreeIdx = walk.addTree(new DirCacheIterator(cache));
FileTreeIterator files = new FileTreeIterator(repo);
if (FILE_TREE_INDEX != walk.addTree(files))
throw new IllegalStateException();
walk.setFilter(AndTreeFilter.create(
PathFilterGroup.createFromStrings(path),
new NotIgnoredFilter(FILE_TREE_INDEX)));
walk.setOperationType(OperationType.CHECKIN_OP);
walk.setRecursive(true);
files.setDirCacheIterator(walk, cacheTreeIdx);
return walk;
} }
private static final int FILE_TREE_INDEX = 1; private static final int FILE_TREE_INDEX = 1;
@ -348,135 +378,135 @@ private TreeWalk getTreeWalkForFile(String path, DirCache cache)
* The file to update with new contents. Null for inCore usage. * The file to update with new contents. Null for inCore usage.
* @param fh * @param fh
* The patch header. * The patch header.
* @throws PatchApplyException * @param result
* The patch application result.
* @throws IOException
*/ */
private void apply(String pathWithOriginalContent, DirCache dirCache, private void apply(String pathWithOriginalContent, DirCache dirCache,
DirCacheBuilder dirCacheBuilder, @Nullable File f, FileHeader fh) DirCacheBuilder dirCacheBuilder, @Nullable File f, FileHeader fh, Result result)
throws PatchApplyException { throws IOException {
if (PatchType.BINARY.equals(fh.getPatchType())) { if (PatchType.BINARY.equals(fh.getPatchType())) {
// This patch type just says "something changed". We can't do // This patch type just says "something changed". We can't do
// anything with that. // anything with that.
// Maybe this should return an error code, though? // Maybe this should return an error code, though?
return; return;
} }
try { TreeWalk walk = getTreeWalkForFile(pathWithOriginalContent, dirCache);
TreeWalk walk = getTreeWalkForFile(pathWithOriginalContent, dirCache); boolean loadedFromTreeWalk = false;
boolean loadedFromTreeWalk = false; // CR-LF handling is determined by whether the file or the patch
// CR-LF handling is determined by whether the file or the patch // have CR-LF line endings.
// have CR-LF line endings. boolean convertCrLf = inCore() || needsCrLfConversion(f, fh);
boolean convertCrLf = inCore() || needsCrLfConversion(f, fh); EolStreamType streamType = convertCrLf ? EolStreamType.TEXT_CRLF
EolStreamType streamType = convertCrLf ? EolStreamType.TEXT_CRLF : EolStreamType.DIRECT;
String smudgeFilterCommand = null;
StreamSupplier fileStreamSupplier = null;
ObjectId fileId = ObjectId.zeroId();
if (walk == null) {
// For new files with inCore()==true, TreeWalk.forPath can be
// null. Stay with defaults.
} else if (inCore()) {
fileId = walk.getObjectId(0);
ObjectLoader loader = LfsFactory.getInstance()
.applySmudgeFilter(repo, reader.open(fileId, OBJ_BLOB),
null);
byte[] data = loader.getBytes();
convertCrLf = RawText.isCrLfText(data);
fileStreamSupplier = () -> new ByteArrayInputStream(data);
streamType = convertCrLf ? EolStreamType.TEXT_CRLF
: EolStreamType.DIRECT; : EolStreamType.DIRECT;
String smudgeFilterCommand = null; smudgeFilterCommand = walk
StreamSupplier fileStreamSupplier = null; .getFilterCommand(Constants.ATTR_FILTER_TYPE_SMUDGE);
ObjectId fileId = ObjectId.zeroId(); loadedFromTreeWalk = true;
if (walk == null) { } else if (walk.next()) {
// For new files with inCore()==true, TreeWalk.forPath can be // If the file on disk has no newline characters,
// null. Stay with defaults. // convertCrLf will be false. In that case we want to honor the
} else if (inCore()) { // normal git settings.
fileId = walk.getObjectId(0); streamType = convertCrLf ? EolStreamType.TEXT_CRLF
ObjectLoader loader = LfsFactory.getInstance() : walk.getEolStreamType(OperationType.CHECKOUT_OP);
.applySmudgeFilter(repo, reader.open(fileId, OBJ_BLOB), smudgeFilterCommand = walk
null); .getFilterCommand(Constants.ATTR_FILTER_TYPE_SMUDGE);
byte[] data = loader.getBytes(); FileTreeIterator file = walk.getTree(FILE_TREE_INDEX,
convertCrLf = RawText.isCrLfText(data); FileTreeIterator.class);
fileStreamSupplier = () -> new ByteArrayInputStream(data); if (file != null) {
streamType = convertCrLf ? EolStreamType.TEXT_CRLF fileId = file.getEntryObjectId();
: EolStreamType.DIRECT; fileStreamSupplier = file::openEntryStream;
smudgeFilterCommand = walk
.getFilterCommand(Constants.ATTR_FILTER_TYPE_SMUDGE);
loadedFromTreeWalk = true; loadedFromTreeWalk = true;
} else if (walk.next()) {
// If the file on disk has no newline characters,
// convertCrLf will be false. In that case we want to honor the
// normal git settings.
streamType = convertCrLf ? EolStreamType.TEXT_CRLF
: walk.getEolStreamType(OperationType.CHECKOUT_OP);
smudgeFilterCommand = walk
.getFilterCommand(Constants.ATTR_FILTER_TYPE_SMUDGE);
FileTreeIterator file = walk.getTree(FILE_TREE_INDEX,
FileTreeIterator.class);
if (file != null) {
fileId = file.getEntryObjectId();
fileStreamSupplier = file::openEntryStream;
loadedFromTreeWalk = true;
} else {
throw new PatchApplyException(MessageFormat.format(
JGitText.get().cannotReadFile,
pathWithOriginalContent));
}
}
if (fileStreamSupplier == null)
fileStreamSupplier = inCore() ? InputStream::nullInputStream
: () -> new FileInputStream(f);
FileMode fileMode = fh.getNewMode() != null ? fh.getNewMode()
: FileMode.REGULAR_FILE;
ContentStreamLoader resultStreamLoader;
if (PatchType.GIT_BINARY.equals(fh.getPatchType())) {
// binary patches are processed in a streaming fashion. Some
// binary patches do random access on the input data, so we can't
// overwrite the file while we're streaming.
resultStreamLoader = applyBinary(pathWithOriginalContent, f, fh,
fileStreamSupplier, fileId);
} else { } else {
String filterCommand = walk != null throw new IOException(MessageFormat.format(
? walk.getFilterCommand( JGitText.get().cannotReadFile,
Constants.ATTR_FILTER_TYPE_CLEAN)
: null;
RawText raw = getRawText(f, fileStreamSupplier, fileId,
pathWithOriginalContent, loadedFromTreeWalk, filterCommand,
convertCrLf);
resultStreamLoader = applyText(raw, fh);
}
if (f != null) {
// Write to a buffer and copy to the file only if everything was
// fine.
TemporaryBuffer buffer = new TemporaryBuffer.LocalFile(null);
try {
CheckoutMetadata metadata = new CheckoutMetadata(streamType,
smudgeFilterCommand);
try (TemporaryBuffer buf = buffer) {
DirCacheCheckout.getContent(repo, pathWithOriginalContent,
metadata, resultStreamLoader.supplier, workingTreeOptions,
buf);
}
try (InputStream bufIn = buffer.openInputStream()) {
Files.copy(bufIn, f.toPath(),
StandardCopyOption.REPLACE_EXISTING);
}
} finally {
buffer.destroy();
}
repo.getFS().setExecute(f,
fileMode == FileMode.EXECUTABLE_FILE);
}
Instant lastModified = f == null ? null
: repo.getFS().lastModifiedInstant(f);
Attributes attributes = walk != null ? walk.getAttributes()
: new Attributes();
DirCacheEntry dce = insertToIndex(
resultStreamLoader.supplier.load(),
fh.getNewPath().getBytes(StandardCharsets.UTF_8), fileMode,
lastModified, resultStreamLoader.length,
attributes.get(Constants.ATTR_FILTER));
dirCacheBuilder.add(dce);
if (PatchType.GIT_BINARY.equals(fh.getPatchType())
&& fh.getNewId() != null && fh.getNewId().isComplete()
&& !fh.getNewId().toObjectId().equals(dce.getObjectId())) {
throw new PatchApplyException(MessageFormat.format(
JGitText.get().applyBinaryResultOidWrong,
pathWithOriginalContent)); pathWithOriginalContent));
} }
} catch (IOException | UnsupportedOperationException e) { }
throw new PatchApplyException(MessageFormat.format(
JGitText.get().patchApplyException, e.getMessage()), e); if (fileStreamSupplier == null)
fileStreamSupplier = inCore() ? InputStream::nullInputStream
: () -> new FileInputStream(f);
FileMode fileMode = fh.getNewMode() != null ? fh.getNewMode()
: FileMode.REGULAR_FILE;
ContentStreamLoader resultStreamLoader;
if (PatchType.GIT_BINARY.equals(fh.getPatchType())) {
// binary patches are processed in a streaming fashion. Some
// binary patches do random access on the input data, so we can't
// overwrite the file while we're streaming.
resultStreamLoader = applyBinary(pathWithOriginalContent, f, fh,
fileStreamSupplier, fileId, result);
} else {
String filterCommand = walk != null
? walk.getFilterCommand(
Constants.ATTR_FILTER_TYPE_CLEAN)
: null;
RawText raw = getRawText(f, fileStreamSupplier, fileId,
pathWithOriginalContent, loadedFromTreeWalk, filterCommand,
convertCrLf);
resultStreamLoader = applyText(raw, fh, result);
}
if (resultStreamLoader == null || !result.getErrors().isEmpty()) {
return;
}
if (f != null) {
// Write to a buffer and copy to the file only if everything was
// fine.
TemporaryBuffer buffer = new TemporaryBuffer.LocalFile(null);
try {
CheckoutMetadata metadata = new CheckoutMetadata(streamType,
smudgeFilterCommand);
try (TemporaryBuffer buf = buffer) {
DirCacheCheckout.getContent(repo, pathWithOriginalContent,
metadata, resultStreamLoader.supplier, workingTreeOptions,
buf);
}
try (InputStream bufIn = buffer.openInputStream()) {
Files.copy(bufIn, f.toPath(),
StandardCopyOption.REPLACE_EXISTING);
}
} finally {
buffer.destroy();
}
repo.getFS().setExecute(f,
fileMode == FileMode.EXECUTABLE_FILE);
}
Instant lastModified = f == null ? null
: repo.getFS().lastModifiedInstant(f);
Attributes attributes = walk != null ? walk.getAttributes()
: new Attributes();
DirCacheEntry dce = insertToIndex(
resultStreamLoader.supplier.load(),
fh.getNewPath().getBytes(StandardCharsets.UTF_8), fileMode,
lastModified, resultStreamLoader.length,
attributes.get(Constants.ATTR_FILTER));
dirCacheBuilder.add(dce);
if (PatchType.GIT_BINARY.equals(fh.getPatchType())
&& fh.getNewId() != null && fh.getNewId().isComplete()
&& !fh.getNewId().toObjectId().equals(dce.getObjectId())) {
result.addError(MessageFormat.format(
JGitText.get().applyBinaryResultOidWrong,
pathWithOriginalContent), fh.getOldPath(), null);
} }
} }
@ -640,8 +670,8 @@ private ObjectId hash(File f) throws IOException {
} }
} }
private void checkOid(ObjectId baseId, ObjectId id, ChangeType type, File f, private boolean checkOid(ObjectId baseId, ObjectId id, ChangeType type, File f,
String path) throws PatchApplyException, IOException { String path, Result result) throws IOException {
boolean hashOk = false; boolean hashOk = false;
if (id != null) { if (id != null) {
hashOk = baseId.equals(id); hashOk = baseId.equals(id);
@ -660,9 +690,10 @@ private void checkOid(ObjectId baseId, ObjectId id, ChangeType type, File f,
} }
} }
if (!hashOk) { if (!hashOk) {
throw new PatchApplyException(MessageFormat result.addError(MessageFormat
.format(JGitText.get().applyBinaryBaseOidWrong, path)); .format(JGitText.get().applyBinaryBaseOidWrong, path), path, null);
} }
return hashOk;
} }
private boolean inCore() { private boolean inCore() {
@ -700,18 +731,19 @@ private static class ContentStreamLoader {
* a supplier for the contents of the old file * a supplier for the contents of the old file
* @param id * @param id
* SHA1 for the old content * SHA1 for the old content
* @return a loader for the new content. * @param result
* @throws PatchApplyException * The patch application result
* @return a loader for the new content, or null if invalid.
* @throws IOException * @throws IOException
* @throws UnsupportedOperationException * @throws UnsupportedOperationException
*/ */
private ContentStreamLoader applyBinary(String path, File f, FileHeader fh, private @Nullable ContentStreamLoader applyBinary(String path, File f, FileHeader fh,
StreamSupplier inputSupplier, ObjectId id) StreamSupplier inputSupplier, ObjectId id, Result result)
throws PatchApplyException, IOException, throws UnsupportedOperationException, IOException {
UnsupportedOperationException {
if (!fh.getOldId().isComplete() || !fh.getNewId().isComplete()) { if (!fh.getOldId().isComplete() || !fh.getNewId().isComplete()) {
throw new PatchApplyException(MessageFormat result.addError(MessageFormat
.format(JGitText.get().applyBinaryOidTooShort, path)); .format(JGitText.get().applyBinaryOidTooShort, path), path, null);
return null;
} }
BinaryHunk hunk = fh.getForwardBinaryHunk(); BinaryHunk hunk = fh.getForwardBinaryHunk();
// A BinaryHunk has the start at the "literal" or "delta" token. Data // A BinaryHunk has the start at the "literal" or "delta" token. Data
@ -723,8 +755,10 @@ private ContentStreamLoader applyBinary(String path, File f, FileHeader fh,
case LITERAL_DEFLATED: { case LITERAL_DEFLATED: {
// This just overwrites the file. We need to check the hash of // This just overwrites the file. We need to check the hash of
// the base. // the base.
checkOid(fh.getOldId().toObjectId(), id, fh.getChangeType(), f, if (!checkOid(fh.getOldId().toObjectId(), id, fh.getChangeType(), f,
path); path, result)) {
return null;
}
StreamSupplier supp = () -> new InflaterInputStream( StreamSupplier supp = () -> new InflaterInputStream(
new BinaryHunkInputStream(new ByteArrayInputStream( new BinaryHunkInputStream(new ByteArrayInputStream(
hunk.getBuffer(), start, length))); hunk.getBuffer(), start, length)));
@ -756,8 +790,8 @@ private ContentStreamLoader applyBinary(String path, File f, FileHeader fh,
} }
} }
private ContentStreamLoader applyText(RawText rt, FileHeader fh) private @Nullable ContentStreamLoader applyText(RawText rt, FileHeader fh, Result result)
throws IOException, PatchApplyException { throws IOException {
List<ByteBuffer> oldLines = new ArrayList<>(rt.size()); List<ByteBuffer> oldLines = new ArrayList<>(rt.size());
for (int i = 0; i < rt.size(); i++) { for (int i = 0; i < rt.size(); i++) {
oldLines.add(rt.getRawString(i)); oldLines.add(rt.getRawString(i));
@ -771,8 +805,8 @@ private ContentStreamLoader applyText(RawText rt, FileHeader fh)
for (HunkHeader hh : fh.getHunks()) { for (HunkHeader hh : fh.getHunks()) {
// We assume hunks to be ordered // We assume hunks to be ordered
if (hh.getNewStartLine() <= lastHunkNewLine) { if (hh.getNewStartLine() <= lastHunkNewLine) {
throw new PatchApplyException(MessageFormat result.addError(JGitText.get().applyTextPatchUnorderedHunks, fh.getOldPath(), hh);
.format(JGitText.get().patchApplyException, hh)); return null;
} }
lastHunkNewLine = hh.getNewStartLine(); lastHunkNewLine = hh.getNewStartLine();
@ -793,8 +827,9 @@ && canApplyAt(hunkLines, newLines, 0)) {
newLines.clear(); newLines.clear();
break; break;
} }
throw new PatchApplyException(MessageFormat result.addError(JGitText.get().applyTextPatchSingleClearingHunk,
.format(JGitText.get().patchApplyException, hh)); fh.getOldPath(), hh);
return null;
} }
// Hunk lines as reported by the hunk may be off, so don't rely on // Hunk lines as reported by the hunk may be off, so don't rely on
// them. // them.
@ -805,8 +840,9 @@ && canApplyAt(hunkLines, newLines, 0)) {
lineNumberShift = 0; lineNumberShift = 0;
} }
if (applyAt < afterLastHunk) { if (applyAt < afterLastHunk) {
throw new PatchApplyException(MessageFormat result.addError(JGitText.get().applyTextPatchUnorderedHunkApplications,
.format(JGitText.get().patchApplyException, hh)); fh.getOldPath(), hh);
return null;
} }
boolean applies = false; boolean applies = false;
int oldLinesInHunk = hh.getLinesContext() int oldLinesInHunk = hh.getLinesContext()
@ -844,8 +880,9 @@ && canApplyAt(hunkLines, newLines, 0)) {
} }
} }
if (!applies) { if (!applies) {
throw new PatchApplyException(MessageFormat result.addError(JGitText.get().applyTextPatchCannotApplyHunk,
.format(JGitText.get().patchApplyException, hh)); fh.getOldPath(), hh);
return null;
} }
// Hunk applies at applyAt. Apply it, and update afterLastHunk and // Hunk applies at applyAt. Apply it, and update afterLastHunk and
// lineNumberShift // lineNumberShift