Split out ApplyCommand logic to PatchApplier class

PatchApplier now routes updates through the index. This has two
results:

* we can now execute patches in-memory.

* the JGit apply command will now always update the
index to match the working tree.

Change-Id: Id60a88232f05d0367787d038d2518c670cdb543f
Co-authored-by: Han-Wen Nienhuys <hanwen@google.com>
Co-authored-by: Nitzan Gur-Furman <nitzan@google.com>
This commit is contained in:
Nitzan Gur-Furman 2022-08-31 19:26:13 +02:00
parent 57087e2b92
commit acde6c8f5b
6 changed files with 1555 additions and 1051 deletions

View File

@ -19,19 +19,13 @@
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import org.eclipse.jgit.api.errors.PatchApplyException;
import org.eclipse.jgit.api.errors.PatchFormatException;
import org.eclipse.jgit.attributes.FilterCommand;
import org.eclipse.jgit.attributes.FilterCommandFactory;
import org.eclipse.jgit.attributes.FilterCommandRegistry;
import org.eclipse.jgit.diff.RawText;
import org.eclipse.jgit.junit.RepositoryTestCase;
import org.eclipse.jgit.lib.Config;
import org.eclipse.jgit.lib.ConfigConstants;
import org.eclipse.jgit.util.FS;
import org.eclipse.jgit.util.IO;
import org.junit.Test;
@ -68,189 +62,6 @@ private ApplyResult init(final String name, final boolean preExists,
}
}
@Test
public void testCrLf() throws Exception {
try {
db.getConfig().setBoolean(ConfigConstants.CONFIG_CORE_SECTION, null,
ConfigConstants.CONFIG_KEY_AUTOCRLF, true);
ApplyResult result = init("crlf", true, true);
assertEquals(1, result.getUpdatedFiles().size());
assertEquals(new File(db.getWorkTree(), "crlf"),
result.getUpdatedFiles().get(0));
checkFile(new File(db.getWorkTree(), "crlf"),
b.getString(0, b.size(), false));
} finally {
db.getConfig().unset(ConfigConstants.CONFIG_CORE_SECTION, null,
ConfigConstants.CONFIG_KEY_AUTOCRLF);
}
}
@Test
public void testCrLfOff() throws Exception {
try {
db.getConfig().setBoolean(ConfigConstants.CONFIG_CORE_SECTION, null,
ConfigConstants.CONFIG_KEY_AUTOCRLF, false);
ApplyResult result = init("crlf", true, true);
assertEquals(1, result.getUpdatedFiles().size());
assertEquals(new File(db.getWorkTree(), "crlf"),
result.getUpdatedFiles().get(0));
checkFile(new File(db.getWorkTree(), "crlf"),
b.getString(0, b.size(), false));
} finally {
db.getConfig().unset(ConfigConstants.CONFIG_CORE_SECTION, null,
ConfigConstants.CONFIG_KEY_AUTOCRLF);
}
}
@Test
public void testCrLfEmptyCommitted() throws Exception {
try {
db.getConfig().setBoolean(ConfigConstants.CONFIG_CORE_SECTION, null,
ConfigConstants.CONFIG_KEY_AUTOCRLF, true);
ApplyResult result = init("crlf3", true, true);
assertEquals(1, result.getUpdatedFiles().size());
assertEquals(new File(db.getWorkTree(), "crlf3"),
result.getUpdatedFiles().get(0));
checkFile(new File(db.getWorkTree(), "crlf3"),
b.getString(0, b.size(), false));
} finally {
db.getConfig().unset(ConfigConstants.CONFIG_CORE_SECTION, null,
ConfigConstants.CONFIG_KEY_AUTOCRLF);
}
}
@Test
public void testCrLfNewFile() throws Exception {
try {
db.getConfig().setBoolean(ConfigConstants.CONFIG_CORE_SECTION, null,
ConfigConstants.CONFIG_KEY_AUTOCRLF, true);
ApplyResult result = init("crlf4", false, true);
assertEquals(1, result.getUpdatedFiles().size());
assertEquals(new File(db.getWorkTree(), "crlf4"),
result.getUpdatedFiles().get(0));
checkFile(new File(db.getWorkTree(), "crlf4"),
b.getString(0, b.size(), false));
} finally {
db.getConfig().unset(ConfigConstants.CONFIG_CORE_SECTION, null,
ConfigConstants.CONFIG_KEY_AUTOCRLF);
}
}
@Test
public void testPatchWithCrLf() throws Exception {
try {
db.getConfig().setBoolean(ConfigConstants.CONFIG_CORE_SECTION, null,
ConfigConstants.CONFIG_KEY_AUTOCRLF, false);
ApplyResult result = init("crlf2", true, true);
assertEquals(1, result.getUpdatedFiles().size());
assertEquals(new File(db.getWorkTree(), "crlf2"),
result.getUpdatedFiles().get(0));
checkFile(new File(db.getWorkTree(), "crlf2"),
b.getString(0, b.size(), false));
} finally {
db.getConfig().unset(ConfigConstants.CONFIG_CORE_SECTION, null,
ConfigConstants.CONFIG_KEY_AUTOCRLF);
}
}
@Test
public void testPatchWithCrLf2() throws Exception {
String name = "crlf2";
try (Git git = new Git(db)) {
db.getConfig().setBoolean(ConfigConstants.CONFIG_CORE_SECTION, null,
ConfigConstants.CONFIG_KEY_AUTOCRLF, false);
a = new RawText(readFile(name + "_PreImage"));
write(new File(db.getWorkTree(), name),
a.getString(0, a.size(), false));
git.add().addFilepattern(name).call();
git.commit().setMessage("PreImage").call();
b = new RawText(readFile(name + "_PostImage"));
db.getConfig().setBoolean(ConfigConstants.CONFIG_CORE_SECTION, null,
ConfigConstants.CONFIG_KEY_AUTOCRLF, true);
ApplyResult result = git.apply()
.setPatch(getTestResource(name + ".patch")).call();
assertEquals(1, result.getUpdatedFiles().size());
assertEquals(new File(db.getWorkTree(), name),
result.getUpdatedFiles().get(0));
checkFile(new File(db.getWorkTree(), name),
b.getString(0, b.size(), false));
} finally {
db.getConfig().unset(ConfigConstants.CONFIG_CORE_SECTION, null,
ConfigConstants.CONFIG_KEY_AUTOCRLF);
}
}
// Clean/smudge filter for testFiltering. The smudgetest test resources were
// created with C git using a clean filter sed -e "s/A/E/g" and the smudge
// filter sed -e "s/E/A/g". To keep the test independent of the presence of
// sed, implement this with a built-in filter.
private static class ReplaceFilter extends FilterCommand {
private final char toReplace;
private final char replacement;
ReplaceFilter(InputStream in, OutputStream out, char toReplace,
char replacement) {
super(in, out);
this.toReplace = toReplace;
this.replacement = replacement;
}
@Override
public int run() throws IOException {
int b = in.read();
if (b < 0) {
in.close();
out.close();
return -1;
}
if ((b & 0xFF) == toReplace) {
b = replacement;
}
out.write(b);
return 1;
}
}
@Test
public void testFiltering() throws Exception {
// Set up filter
FilterCommandFactory clean = (repo, in, out) -> {
return new ReplaceFilter(in, out, 'A', 'E');
};
FilterCommandFactory smudge = (repo, in, out) -> {
return new ReplaceFilter(in, out, 'E', 'A');
};
FilterCommandRegistry.register("jgit://builtin/a2e/clean", clean);
FilterCommandRegistry.register("jgit://builtin/a2e/smudge", smudge);
try (Git git = new Git(db)) {
Config config = db.getConfig();
config.setString(ConfigConstants.CONFIG_FILTER_SECTION, "a2e",
"clean", "jgit://builtin/a2e/clean");
config.setString(ConfigConstants.CONFIG_FILTER_SECTION, "a2e",
"smudge", "jgit://builtin/a2e/smudge");
write(new File(db.getWorkTree(), ".gitattributes"),
"smudgetest filter=a2e");
git.add().addFilepattern(".gitattributes").call();
git.commit().setMessage("Attributes").call();
ApplyResult result = init("smudgetest", true, true);
assertEquals(1, result.getUpdatedFiles().size());
assertEquals(new File(db.getWorkTree(), "smudgetest"),
result.getUpdatedFiles().get(0));
checkFile(new File(db.getWorkTree(), "smudgetest"),
b.getString(0, b.size(), false));
} finally {
// Tear down filter
FilterCommandRegistry.unregister("jgit://builtin/a2e/clean");
FilterCommandRegistry.unregister("jgit://builtin/a2e/smudge");
}
}
private void checkBinary(String name, boolean hasPreImage)
throws Exception {
checkBinary(name, hasPreImage, 1);
@ -279,21 +90,6 @@ private void checkBinary(String name, boolean hasPreImage,
}
}
@Test
public void testBinaryDelta() throws Exception {
checkBinary("delta", true);
}
@Test
public void testBinaryLiteral() throws Exception {
checkBinary("literal", true);
}
@Test
public void testBinaryLiteralAdd() throws Exception {
checkBinary("literal_add", false);
}
@Test
public void testEncodingChange() throws Exception {
// This is a text patch that changes a file containing ÄÖÜ in UTF-8 to
@ -410,188 +206,6 @@ public void testAddM1() throws Exception {
b.getString(0, b.size(), false));
}
@Test
public void testModifyM2() throws Exception {
ApplyResult result = init("M2", true, true);
assertEquals(1, result.getUpdatedFiles().size());
if (FS.DETECTED.supportsExecute()) {
assertTrue(FS.DETECTED.canExecute(result.getUpdatedFiles().get(0)));
}
checkFile(new File(db.getWorkTree(), "M2"),
b.getString(0, b.size(), false));
}
@Test
public void testModifyM3() throws Exception {
ApplyResult result = init("M3", true, true);
assertEquals(1, result.getUpdatedFiles().size());
if (FS.DETECTED.supportsExecute()) {
assertFalse(
FS.DETECTED.canExecute(result.getUpdatedFiles().get(0)));
}
checkFile(new File(db.getWorkTree(), "M3"),
b.getString(0, b.size(), false));
}
@Test
public void testModifyX() throws Exception {
ApplyResult result = init("X");
assertEquals(1, result.getUpdatedFiles().size());
assertEquals(new File(db.getWorkTree(), "X"), result.getUpdatedFiles()
.get(0));
checkFile(new File(db.getWorkTree(), "X"),
b.getString(0, b.size(), false));
}
@Test
public void testModifyY() throws Exception {
ApplyResult result = init("Y");
assertEquals(1, result.getUpdatedFiles().size());
assertEquals(new File(db.getWorkTree(), "Y"), result.getUpdatedFiles()
.get(0));
checkFile(new File(db.getWorkTree(), "Y"),
b.getString(0, b.size(), false));
}
@Test
public void testModifyZ() throws Exception {
ApplyResult result = init("Z");
assertEquals(1, result.getUpdatedFiles().size());
assertEquals(new File(db.getWorkTree(), "Z"), result.getUpdatedFiles()
.get(0));
checkFile(new File(db.getWorkTree(), "Z"),
b.getString(0, b.size(), false));
}
@Test
public void testModifyNL1() throws Exception {
ApplyResult result = init("NL1");
assertEquals(1, result.getUpdatedFiles().size());
assertEquals(new File(db.getWorkTree(), "NL1"), result
.getUpdatedFiles().get(0));
checkFile(new File(db.getWorkTree(), "NL1"),
b.getString(0, b.size(), false));
}
@Test
public void testNonASCII() throws Exception {
ApplyResult result = init("NonASCII");
assertEquals(1, result.getUpdatedFiles().size());
assertEquals(new File(db.getWorkTree(), "NonASCII"),
result.getUpdatedFiles().get(0));
checkFile(new File(db.getWorkTree(), "NonASCII"),
b.getString(0, b.size(), false));
}
@Test
public void testNonASCII2() throws Exception {
ApplyResult result = init("NonASCII2");
assertEquals(1, result.getUpdatedFiles().size());
assertEquals(new File(db.getWorkTree(), "NonASCII2"),
result.getUpdatedFiles().get(0));
checkFile(new File(db.getWorkTree(), "NonASCII2"),
b.getString(0, b.size(), false));
}
@Test
public void testNonASCIIAdd() throws Exception {
ApplyResult result = init("NonASCIIAdd");
assertEquals(1, result.getUpdatedFiles().size());
assertEquals(new File(db.getWorkTree(), "NonASCIIAdd"),
result.getUpdatedFiles().get(0));
checkFile(new File(db.getWorkTree(), "NonASCIIAdd"),
b.getString(0, b.size(), false));
}
@Test
public void testNonASCIIAdd2() throws Exception {
ApplyResult result = init("NonASCIIAdd2", false, true);
assertEquals(1, result.getUpdatedFiles().size());
assertEquals(new File(db.getWorkTree(), "NonASCIIAdd2"),
result.getUpdatedFiles().get(0));
checkFile(new File(db.getWorkTree(), "NonASCIIAdd2"),
b.getString(0, b.size(), false));
}
@Test
public void testNonASCIIDel() throws Exception {
ApplyResult result = init("NonASCIIDel", true, false);
assertEquals(1, result.getUpdatedFiles().size());
assertEquals(new File(db.getWorkTree(), "NonASCIIDel"),
result.getUpdatedFiles().get(0));
assertFalse(new File(db.getWorkTree(), "NonASCIIDel").exists());
}
@Test
public void testRenameNoHunks() throws Exception {
ApplyResult result = init("RenameNoHunks", true, true);
assertEquals(2, result.getUpdatedFiles().size());
assertTrue(result.getUpdatedFiles().contains(new File(db.getWorkTree(), "RenameNoHunks")));
assertTrue(result.getUpdatedFiles().contains(new File(db.getWorkTree(), "nested/subdir/Renamed")));
checkFile(new File(db.getWorkTree(), "nested/subdir/Renamed"),
b.getString(0, b.size(), false));
}
@Test
public void testRenameWithHunks() throws Exception {
ApplyResult result = init("RenameWithHunks", true, true);
assertEquals(2, result.getUpdatedFiles().size());
assertTrue(result.getUpdatedFiles().contains(new File(db.getWorkTree(), "RenameWithHunks")));
assertTrue(result.getUpdatedFiles().contains(new File(db.getWorkTree(), "nested/subdir/Renamed")));
checkFile(new File(db.getWorkTree(), "nested/subdir/Renamed"),
b.getString(0, b.size(), false));
}
@Test
public void testCopyWithHunks() throws Exception {
ApplyResult result = init("CopyWithHunks", true, true);
assertEquals(1, result.getUpdatedFiles().size());
assertEquals(new File(db.getWorkTree(), "CopyResult"), result.getUpdatedFiles()
.get(0));
checkFile(new File(db.getWorkTree(), "CopyResult"),
b.getString(0, b.size(), false));
}
@Test
public void testShiftUp() throws Exception {
ApplyResult result = init("ShiftUp");
assertEquals(1, result.getUpdatedFiles().size());
assertEquals(new File(db.getWorkTree(), "ShiftUp"),
result.getUpdatedFiles().get(0));
checkFile(new File(db.getWorkTree(), "ShiftUp"),
b.getString(0, b.size(), false));
}
@Test
public void testShiftUp2() throws Exception {
ApplyResult result = init("ShiftUp2");
assertEquals(1, result.getUpdatedFiles().size());
assertEquals(new File(db.getWorkTree(), "ShiftUp2"),
result.getUpdatedFiles().get(0));
checkFile(new File(db.getWorkTree(), "ShiftUp2"),
b.getString(0, b.size(), false));
}
@Test
public void testShiftDown() throws Exception {
ApplyResult result = init("ShiftDown");
assertEquals(1, result.getUpdatedFiles().size());
assertEquals(new File(db.getWorkTree(), "ShiftDown"),
result.getUpdatedFiles().get(0));
checkFile(new File(db.getWorkTree(), "ShiftDown"),
b.getString(0, b.size(), false));
}
@Test
public void testShiftDown2() throws Exception {
ApplyResult result = init("ShiftDown2");
assertEquals(1, result.getUpdatedFiles().size());
assertEquals(new File(db.getWorkTree(), "ShiftDown2"),
result.getUpdatedFiles().get(0));
checkFile(new File(db.getWorkTree(), "ShiftDown2"),
b.getString(0, b.size(), false));
}
private static byte[] readFile(String patchFile) throws IOException {
final InputStream in = getTestResource(patchFile);
if (in == null) {

View File

@ -0,0 +1,534 @@
/*
* Copyright (C) 2022, Google Inc. and others
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Distribution License v. 1.0 which is available at
* https://www.eclipse.org/org/documents/edl-v10.php.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
package org.eclipse.jgit.patch;
import static org.eclipse.jgit.lib.Constants.OBJ_BLOB;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import org.eclipse.jgit.api.Git;
import org.eclipse.jgit.api.errors.PatchApplyException;
import org.eclipse.jgit.api.errors.PatchFormatException;
import org.eclipse.jgit.attributes.FilterCommand;
import org.eclipse.jgit.attributes.FilterCommandFactory;
import org.eclipse.jgit.attributes.FilterCommandRegistry;
import org.eclipse.jgit.junit.RepositoryTestCase;
import org.eclipse.jgit.junit.TestRepository;
import org.eclipse.jgit.lib.Config;
import org.eclipse.jgit.lib.ConfigConstants;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.ObjectInserter;
import org.eclipse.jgit.patch.PatchApplier.Result;
import org.eclipse.jgit.revwalk.RevCommit;
import org.eclipse.jgit.revwalk.RevTree;
import org.eclipse.jgit.revwalk.RevWalk;
import org.eclipse.jgit.treewalk.TreeWalk;
import org.eclipse.jgit.util.FS;
import org.eclipse.jgit.util.IO;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Suite;
@RunWith(Suite.class)
@Suite.SuiteClasses({
PatchApplierTest.WithWorktree. class, //
PatchApplierTest.InCore.class, //
})
public class PatchApplierTest {
public abstract static class Base extends RepositoryTestCase {
protected String name;
/** data before patching. */
protected byte[] preImage;
/** expected data after patching. */
protected byte[] postImage;
protected String expectedText;
protected RevTree baseTip;
public boolean inCore;
Base(boolean inCore) {
this.inCore = inCore;
}
protected void init(final String name, final boolean preExists,
final boolean postExists) throws Exception {
/* Patch and pre/postimage are read from data org.eclipse.jgit.test/tst-rsrc/org/eclipse/jgit/diff/ */
this.name = name;
if (postExists) {
postImage = IO
.readWholeStream(getTestResource(name + "_PostImage"), 0)
.array();
expectedText = new String(postImage, StandardCharsets.UTF_8);
}
File f = new File(db.getWorkTree(), name);
if (preExists) {
preImage = IO
.readWholeStream(getTestResource(name + "_PreImage"), 0)
.array();
try (Git git = new Git(db)) {
Files.write(f.toPath(), preImage);
git.add().addFilepattern(name).call();
}
}
try (Git git = new Git(db)) {
RevCommit base = git.commit().setMessage("PreImage").call();
baseTip = base.getTree();
}
}
void init(final String name) throws Exception {
init(name, true, true);
}
protected Result applyPatch()
throws PatchApplyException, PatchFormatException, IOException {
InputStream patchStream = getTestResource(name + ".patch");
if (inCore) {
try (ObjectInserter oi = db.newObjectInserter()) {
return new PatchApplier(db, baseTip, oi).applyPatch(patchStream);
}
}
return new PatchApplier(db).applyPatch(patchStream);
}
protected static InputStream getTestResource(String patchFile) {
return PatchApplierTest.class.getClassLoader()
.getResourceAsStream("org/eclipse/jgit/diff/" + patchFile);
}
void verifyChange(Result result, String name) throws Exception {
verifyChange(result, name, true);
}
protected void verifyContent(Result result, String path, boolean exists) throws Exception {
if (inCore) {
byte[] output = readBlob(result.getTreeId(), path);
if (!exists)
assertNull(output);
else {
assertNotNull(output);
assertEquals(new String(output, StandardCharsets.UTF_8), expectedText);
}
} else {
File f = new File(db.getWorkTree(), path);
if (!exists)
assertFalse(f.exists());
else
checkFile(f, expectedText);
}
}
void verifyChange(Result result, String name, boolean exists) throws Exception {
assertEquals(1, result.getPaths().size());
verifyContent(result, name, exists);
}
protected byte[] readBlob(ObjectId treeish, String path) throws Exception {
try (TestRepository<?> tr = new TestRepository<>(db);
RevWalk rw = tr.getRevWalk()) {
db.incrementOpen();
RevTree tree = rw.parseTree(treeish);
try (TreeWalk tw = TreeWalk.forPath(db,path,tree)){
if (tw == null) {
return null;
}
return tw.getObjectReader().open(tw.getObjectId(0), OBJ_BLOB).getBytes();
}
}
}
protected void checkBinary(Result result, int numberOfFiles) throws Exception {
assertEquals(numberOfFiles, result.getPaths().size());
if (inCore) {
assertArrayEquals(postImage, readBlob(result.getTreeId(), result.getPaths().get(0)));
} else {
File f = new File(db.getWorkTree(), name);
assertArrayEquals(postImage, Files.readAllBytes(f.toPath()));
}
}
/* tests */
@Test
public void testBinaryDelta() throws Exception {
init("delta");
checkBinary(applyPatch(), 1);
}
@Test
public void testBinaryLiteral() throws Exception {
init("literal");
checkBinary(applyPatch(), 1);
}
@Test
public void testBinaryLiteralAdd() throws Exception {
init("literal_add", false, true);
checkBinary(applyPatch(), 1);
}
@Test
public void testModifyM2() throws Exception {
init("M2", true, true);
Result result = applyPatch();
if (!inCore && FS.DETECTED.supportsExecute()) {
assertEquals(1, result.getPaths().size());
File f = new File(db.getWorkTree(), result.getPaths().get(0));
assertTrue(FS.DETECTED.canExecute(f));
}
verifyChange(result, "M2");
}
@Test
public void testModifyM3() throws Exception {
init("M3", true, true);
Result result = applyPatch();
verifyChange(result, "M3");
if (!inCore && FS.DETECTED.supportsExecute()) {
File f = new File(db.getWorkTree(), result.getPaths().get(0));
assertFalse(FS.DETECTED.canExecute(f));
}
}
@Test
public void testModifyX() throws Exception {
init("X");
Result result = applyPatch();
verifyChange(result, "X");
}
@Test
public void testModifyY() throws Exception {
init("Y");
Result result = applyPatch();
verifyChange(result, "Y");
}
@Test
public void testModifyZ() throws Exception {
init("Z");
Result result = applyPatch();
verifyChange(result, "Z");
}
@Test
public void testNonASCII() throws Exception {
init("NonASCII");
Result result = applyPatch();
verifyChange(result, "NonASCII");
}
@Test
public void testNonASCII2() throws Exception {
init("NonASCII2");
Result result = applyPatch();
verifyChange(result, "NonASCII2");
}
@Test
public void testNonASCIIAdd() throws Exception {
init("NonASCIIAdd");
Result result = applyPatch();
verifyChange(result, "NonASCIIAdd");
}
@Test
public void testNonASCIIAdd2() throws Exception {
init("NonASCIIAdd2", false, true);
Result result = applyPatch();
verifyChange(result, "NonASCIIAdd2");
}
@Test
public void testNonASCIIDel() throws Exception {
init("NonASCIIDel", true, false);
Result result = applyPatch();
verifyChange(result, "NonASCIIDel", false);
assertEquals("NonASCIIDel", result.getPaths().get(0));
}
@Test
public void testRenameNoHunks() throws Exception {
init("RenameNoHunks", true, true);
Result result = applyPatch();
assertEquals(2, result.getPaths().size());
assertTrue(result.getPaths().contains("RenameNoHunks"));
assertTrue(result.getPaths().contains("nested/subdir/Renamed"));
verifyContent(result,"nested/subdir/Renamed", true);
}
@Test
public void testRenameWithHunks() throws Exception {
init("RenameWithHunks", true, true);
Result result = applyPatch();
assertEquals(2, result.getPaths().size());
assertTrue(result.getPaths().contains("RenameWithHunks"));
assertTrue(result.getPaths().contains("nested/subdir/Renamed"));
verifyContent(result,"nested/subdir/Renamed", true);
}
@Test
public void testCopyWithHunks() throws Exception {
init("CopyWithHunks", true, true);
Result result = applyPatch();
verifyChange(result, "CopyResult", true);
}
@Test
public void testShiftUp() throws Exception {
init("ShiftUp");
Result result = applyPatch();
verifyChange(result, "ShiftUp");
}
@Test
public void testShiftUp2() throws Exception {
init("ShiftUp2");
Result result = applyPatch();
verifyChange(result, "ShiftUp2");
}
@Test
public void testShiftDown() throws Exception {
init("ShiftDown");
Result result = applyPatch();
verifyChange(result, "ShiftDown");
}
@Test
public void testShiftDown2() throws Exception {
init("ShiftDown2");
Result result = applyPatch();
verifyChange(result, "ShiftDown2");
}
}
public static class InCore extends Base {
public InCore() {
super(true);
}
}
public static class WithWorktree extends Base {
public WithWorktree() { super(false); }
@Test
public void testModifyNL1() throws Exception {
init("NL1");
Result result = applyPatch();
verifyChange(result, "NL1");
}
@Test
public void testCrLf() throws Exception {
try {
db.getConfig().setBoolean(ConfigConstants.CONFIG_CORE_SECTION, null,
ConfigConstants.CONFIG_KEY_AUTOCRLF, true);
init("crlf", true, true);
Result result = applyPatch();
verifyChange(result, "crlf");
} finally {
db.getConfig().unset(ConfigConstants.CONFIG_CORE_SECTION, null,
ConfigConstants.CONFIG_KEY_AUTOCRLF);
}
}
@Test
public void testCrLfOff() throws Exception {
try {
db.getConfig().setBoolean(ConfigConstants.CONFIG_CORE_SECTION, null,
ConfigConstants.CONFIG_KEY_AUTOCRLF, false);
init("crlf", true, true);
Result result = applyPatch();
verifyChange(result, "crlf");
} finally {
db.getConfig().unset(ConfigConstants.CONFIG_CORE_SECTION, null,
ConfigConstants.CONFIG_KEY_AUTOCRLF);
}
}
@Test
public void testCrLfEmptyCommitted() throws Exception {
try {
db.getConfig().setBoolean(ConfigConstants.CONFIG_CORE_SECTION, null,
ConfigConstants.CONFIG_KEY_AUTOCRLF, true);
init("crlf3", true, true);
Result result = applyPatch();
verifyChange(result, "crlf3");
} finally {
db.getConfig().unset(ConfigConstants.CONFIG_CORE_SECTION, null,
ConfigConstants.CONFIG_KEY_AUTOCRLF);
}
}
@Test
public void testCrLfNewFile() throws Exception {
try {
db.getConfig().setBoolean(ConfigConstants.CONFIG_CORE_SECTION, null,
ConfigConstants.CONFIG_KEY_AUTOCRLF, true);
init("crlf4", false, true);
Result result = applyPatch();
verifyChange(result, "crlf4");
} finally {
db.getConfig().unset(ConfigConstants.CONFIG_CORE_SECTION, null,
ConfigConstants.CONFIG_KEY_AUTOCRLF);
}
}
@Test
public void testPatchWithCrLf() throws Exception {
try {
db.getConfig().setBoolean(ConfigConstants.CONFIG_CORE_SECTION, null,
ConfigConstants.CONFIG_KEY_AUTOCRLF, false);
init("crlf2", true, true);
Result result = applyPatch();
verifyChange(result, "crlf2");
} finally {
db.getConfig().unset(ConfigConstants.CONFIG_CORE_SECTION, null,
ConfigConstants.CONFIG_KEY_AUTOCRLF);
}
}
@Test
public void testPatchWithCrLf2() throws Exception {
String name = "crlf2";
try (Git git = new Git(db)) {
db.getConfig().setBoolean(ConfigConstants.CONFIG_CORE_SECTION, null,
ConfigConstants.CONFIG_KEY_AUTOCRLF, false);
init(name, true, true);
db.getConfig().setBoolean(ConfigConstants.CONFIG_CORE_SECTION, null,
ConfigConstants.CONFIG_KEY_AUTOCRLF, true);
Result result = applyPatch();
verifyChange(result, name);
} finally {
db.getConfig().unset(ConfigConstants.CONFIG_CORE_SECTION, null,
ConfigConstants.CONFIG_KEY_AUTOCRLF);
}
}
// Clean/smudge filter for testFiltering. The smudgetest test resources were
// created with C git using a clean filter sed -e "s/A/E/g" and the smudge
// filter sed -e "s/E/A/g". To keep the test independent of the presence of
// sed, implement this with a built-in filter.
private static class ReplaceFilter extends FilterCommand {
private final char toReplace;
private final char replacement;
ReplaceFilter(InputStream in, OutputStream out, char toReplace,
char replacement) {
super(in, out);
this.toReplace = toReplace;
this.replacement = replacement;
}
@Override
public int run() throws IOException {
int b = in.read();
if (b < 0) {
in.close();
out.close();
return -1;
}
if ((b & 0xFF) == toReplace) {
b = replacement;
}
out.write(b);
return 1;
}
}
@Test
public void testFiltering() throws Exception {
// Set up filter
FilterCommandFactory clean = (repo, in, out) -> new ReplaceFilter(in, out, 'A', 'E');
FilterCommandFactory smudge = (repo, in, out) -> new ReplaceFilter(in, out, 'E', 'A');
FilterCommandRegistry.register("jgit://builtin/a2e/clean", clean);
FilterCommandRegistry.register("jgit://builtin/a2e/smudge", smudge);
Config config = db.getConfig();
try (Git git = new Git(db)) {
config.setString(ConfigConstants.CONFIG_FILTER_SECTION, "a2e",
"clean", "jgit://builtin/a2e/clean");
config.setString(ConfigConstants.CONFIG_FILTER_SECTION, "a2e",
"smudge", "jgit://builtin/a2e/smudge");
write(new File(db.getWorkTree(), ".gitattributes"),
"smudgetest filter=a2e");
git.add().addFilepattern(".gitattributes").call();
git.commit().setMessage("Attributes").call();
init("smudgetest", true, true);
Result result = applyPatch();
verifyChange(result, name);
} finally {
config.unset(ConfigConstants.CONFIG_FILTER_SECTION, "a2e",
"clean");
config.unset(ConfigConstants.CONFIG_FILTER_SECTION, "a2e",
"smudge");
// Tear down filter
FilterCommandRegistry.unregister("jgit://builtin/a2e/clean");
FilterCommandRegistry.unregister("jgit://builtin/a2e/smudge");
}
}
}
}

View File

@ -14,7 +14,9 @@ aNewObjectIdIsRequired=A NewObjectId is required.
anExceptionOccurredWhileTryingToAddTheIdOfHEAD=An exception occurred while trying to add the Id of HEAD
anSSHSessionHasBeenAlreadyCreated=An SSH session has been already created
applyBinaryBaseOidWrong=Cannot apply binary patch; OID for file {0} does not match
applyBinaryForInCoreNotSupported=Applying binary patch for inCore repositories is not yet supported
applyBinaryOidTooShort=Binary patch for file {0} does not have full IDs
applyBinaryPatchTypeNotSupported=Couldn't apply binary patch of type {0}
applyBinaryResultOidWrong=Result of binary patch for file {0} has wrong OID.
applyingCommit=Applying {0}
archiveFormatAlreadyAbsent=Archive format already absent: {0}
@ -155,6 +157,7 @@ connectionFailed=connection failed
connectionTimeOut=Connection time out: {0}
contextMustBeNonNegative=context must be >= 0
cookieFilePathRelative=git config http.cookieFile contains a relative path, should be absolute: {0}
copyFileFailedNullFiles=Cannot copy file. Either origin or destination files are null
corruptionDetectedReReadingAt=Corruption detected re-reading at {0}
corruptObjectBadDate=bad date
corruptObjectBadEmail=bad email
@ -627,6 +630,7 @@ renameBranchFailedUnknownReason=Rename failed with unknown reason
renameBranchUnexpectedResult=Unexpected rename result {0}
renameCancelled=Rename detection was cancelled
renameFileFailed=Could not rename file {0} to {1}
renameFileFailedNullFiles=Cannot rename file. Either origin or destination files are null
renamesAlreadyFound=Renames have already been found.
renamesBreakingModifies=Breaking apart modified file pairs
renamesFindingByContent=Finding renames by content similarity

View File

@ -9,62 +9,13 @@
*/
package org.eclipse.jgit.api;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.nio.file.Files;
import java.nio.file.StandardCopyOption;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.zip.InflaterInputStream;
import org.eclipse.jgit.api.errors.FilterFailedException;
import org.eclipse.jgit.api.errors.GitAPIException;
import org.eclipse.jgit.api.errors.PatchApplyException;
import org.eclipse.jgit.api.errors.PatchFormatException;
import org.eclipse.jgit.attributes.FilterCommand;
import org.eclipse.jgit.attributes.FilterCommandRegistry;
import org.eclipse.jgit.diff.DiffEntry.ChangeType;
import org.eclipse.jgit.diff.RawText;
import org.eclipse.jgit.dircache.DirCache;
import org.eclipse.jgit.dircache.DirCacheCheckout;
import org.eclipse.jgit.dircache.DirCacheCheckout.CheckoutMetadata;
import org.eclipse.jgit.dircache.DirCacheIterator;
import org.eclipse.jgit.internal.JGitText;
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.CoreConfig.EolStreamType;
import org.eclipse.jgit.lib.FileMode;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.patch.BinaryHunk;
import org.eclipse.jgit.patch.FileHeader;
import org.eclipse.jgit.patch.FileHeader.PatchType;
import org.eclipse.jgit.patch.HunkHeader;
import org.eclipse.jgit.patch.Patch;
import org.eclipse.jgit.treewalk.FileTreeIterator;
import org.eclipse.jgit.treewalk.TreeWalk;
import org.eclipse.jgit.treewalk.TreeWalk.OperationType;
import org.eclipse.jgit.treewalk.filter.AndTreeFilter;
import org.eclipse.jgit.treewalk.filter.NotIgnoredFilter;
import org.eclipse.jgit.treewalk.filter.PathFilterGroup;
import org.eclipse.jgit.util.FS;
import org.eclipse.jgit.util.FS.ExecutionResult;
import org.eclipse.jgit.util.FileUtils;
import org.eclipse.jgit.util.IO;
import org.eclipse.jgit.util.RawParseUtils;
import org.eclipse.jgit.util.StringUtils;
import org.eclipse.jgit.util.TemporaryBuffer;
import org.eclipse.jgit.util.TemporaryBuffer.LocalFile;
import org.eclipse.jgit.util.io.BinaryDeltaInputStream;
import org.eclipse.jgit.util.io.BinaryHunkInputStream;
import org.eclipse.jgit.util.io.EolStreamTypeUtil;
import org.eclipse.jgit.util.sha1.SHA1;
import org.eclipse.jgit.patch.PatchApplier;
import org.eclipse.jgit.patch.PatchApplier.Result;
/**
* Apply a patch to files and/or to the index.
@ -80,10 +31,13 @@ public class ApplyCommand extends GitCommand<ApplyResult> {
/**
* Constructs the command.
*
* @param repo
* @param local
*/
ApplyCommand(Repository repo) {
super(repo);
ApplyCommand(Repository local) {
super(local);
if (local == null) {
throw new NullPointerException(JGitText.get().repositoryIsRequired);
}
}
/**
@ -101,6 +55,7 @@ public ApplyCommand setPatch(InputStream in) {
/**
* {@inheritDoc}
*
* <p>
* Executes the {@code ApplyCommand} command with all the options and
* parameters collected by the setter methods (e.g.
@ -109,621 +64,15 @@ public ApplyCommand setPatch(InputStream in) {
* method twice on an instance.
*/
@Override
public ApplyResult call() throws GitAPIException, PatchFormatException,
PatchApplyException {
public ApplyResult call() throws GitAPIException {
checkCallable();
setCallable(false);
ApplyResult r = new ApplyResult();
try {
final Patch p = new Patch();
try {
p.parse(in);
} finally {
in.close();
}
if (!p.getErrors().isEmpty()) {
throw new PatchFormatException(p.getErrors());
}
Repository repository = getRepository();
DirCache cache = repository.readDirCache();
for (FileHeader fh : p.getFiles()) {
ChangeType type = fh.getChangeType();
File f = null;
switch (type) {
case ADD:
f = getFile(fh.getNewPath(), true);
apply(repository, fh.getNewPath(), cache, f, fh);
break;
case MODIFY:
f = getFile(fh.getOldPath(), false);
apply(repository, fh.getOldPath(), cache, f, fh);
break;
case DELETE:
f = getFile(fh.getOldPath(), false);
if (!f.delete())
throw new PatchApplyException(MessageFormat.format(
JGitText.get().cannotDeleteFile, f));
break;
case RENAME:
f = getFile(fh.getOldPath(), false);
File dest = getFile(fh.getNewPath(), false);
try {
FileUtils.mkdirs(dest.getParentFile(), true);
FileUtils.rename(f, dest,
StandardCopyOption.ATOMIC_MOVE);
} catch (IOException e) {
throw new PatchApplyException(MessageFormat.format(
JGitText.get().renameFileFailed, f, dest), e);
}
apply(repository, fh.getOldPath(), cache, dest, fh);
r.addUpdatedFile(dest);
break;
case COPY:
File src = getFile(fh.getOldPath(), false);
f = getFile(fh.getNewPath(), false);
FileUtils.mkdirs(f.getParentFile(), true);
Files.copy(src.toPath(), f.toPath());
apply(repository, fh.getOldPath(), cache, f, fh);
}
r.addUpdatedFile(f);
}
} catch (IOException e) {
throw new PatchApplyException(MessageFormat.format(
JGitText.get().patchApplyException, e.getMessage()), e);
PatchApplier patchApplier = new PatchApplier(repo);
Result applyResult = patchApplier.applyPatch(in);
for (String p : applyResult.getPaths()) {
r.addUpdatedFile(new File(repo.getWorkTree(), p));
}
return r;
}
private File getFile(String path, boolean create)
throws PatchApplyException {
File f = new File(getRepository().getWorkTree(), path);
if (create) {
try {
File parent = f.getParentFile();
FileUtils.mkdirs(parent, true);
FileUtils.createNewFile(f);
} catch (IOException e) {
throw new PatchApplyException(MessageFormat.format(
JGitText.get().createNewFileFailed, f), e);
}
}
return f;
}
private void apply(Repository repository, String path, DirCache cache,
File f, FileHeader fh) throws IOException, PatchApplyException {
if (PatchType.BINARY.equals(fh.getPatchType())) {
return;
}
boolean convertCrLf = needsCrLfConversion(f, fh);
// Use a TreeWalk with a DirCacheIterator to pick up the correct
// clean/smudge filters. CR-LF handling is completely determined by
// whether the file or the patch have CR-LF line endings.
try (TreeWalk walk = new TreeWalk(repository)) {
walk.setOperationType(OperationType.CHECKIN_OP);
FileTreeIterator files = new FileTreeIterator(repository);
int fileIdx = walk.addTree(files);
int cacheIdx = walk.addTree(new DirCacheIterator(cache));
files.setDirCacheIterator(walk, cacheIdx);
walk.setFilter(AndTreeFilter.create(
PathFilterGroup.createFromStrings(path),
new NotIgnoredFilter(fileIdx)));
walk.setRecursive(true);
if (walk.next()) {
// If the file on disk has no newline characters, convertCrLf
// will be false. In that case we want to honor the normal git
// settings.
EolStreamType streamType = convertCrLf ? EolStreamType.TEXT_CRLF
: walk.getEolStreamType(OperationType.CHECKOUT_OP);
String command = walk.getFilterCommand(
Constants.ATTR_FILTER_TYPE_SMUDGE);
CheckoutMetadata checkOut = new CheckoutMetadata(streamType, command);
FileTreeIterator file = walk.getTree(fileIdx,
FileTreeIterator.class);
if (file != null) {
if (PatchType.GIT_BINARY.equals(fh.getPatchType())) {
applyBinary(repository, path, f, fh,
file::openEntryStream, file.getEntryObjectId(),
checkOut);
} else {
command = walk.getFilterCommand(
Constants.ATTR_FILTER_TYPE_CLEAN);
RawText raw;
// Can't use file.openEntryStream() as it would do CR-LF
// conversion as usual, not as wanted by us.
try (InputStream input = filterClean(repository, path,
new FileInputStream(f), convertCrLf, command)) {
raw = new RawText(
IO.readWholeStream(input, 0).array());
}
applyText(repository, path, raw, f, fh, checkOut);
}
return;
}
}
}
// File ignored?
RawText raw;
CheckoutMetadata checkOut;
if (PatchType.GIT_BINARY.equals(fh.getPatchType())) {
checkOut = new CheckoutMetadata(EolStreamType.DIRECT, null);
applyBinary(repository, path, f, fh, () -> new FileInputStream(f),
null, checkOut);
} else {
if (convertCrLf) {
try (InputStream input = EolStreamTypeUtil.wrapInputStream(
new FileInputStream(f), EolStreamType.TEXT_LF)) {
raw = new RawText(IO.readWholeStream(input, 0).array());
}
checkOut = new CheckoutMetadata(EolStreamType.TEXT_CRLF, null);
} else {
raw = new RawText(f);
checkOut = new CheckoutMetadata(EolStreamType.DIRECT, null);
}
applyText(repository, path, raw, f, fh, checkOut);
}
}
private boolean needsCrLfConversion(File f, FileHeader fileHeader)
throws IOException {
if (PatchType.GIT_BINARY.equals(fileHeader.getPatchType())) {
return false;
}
if (!hasCrLf(fileHeader)) {
try (InputStream input = new FileInputStream(f)) {
return RawText.isCrLfText(input);
}
}
return false;
}
private static boolean hasCrLf(FileHeader fileHeader) {
if (PatchType.GIT_BINARY.equals(fileHeader.getPatchType())) {
return false;
}
for (HunkHeader header : fileHeader.getHunks()) {
byte[] buf = header.getBuffer();
int hunkEnd = header.getEndOffset();
int lineStart = header.getStartOffset();
while (lineStart < hunkEnd) {
int nextLineStart = RawParseUtils.nextLF(buf, lineStart);
if (nextLineStart > hunkEnd) {
nextLineStart = hunkEnd;
}
if (nextLineStart <= lineStart) {
break;
}
if (nextLineStart - lineStart > 1) {
char first = (char) (buf[lineStart] & 0xFF);
if (first == ' ' || first == '-') {
// It's an old line. Does it end in CR-LF?
if (buf[nextLineStart - 2] == '\r') {
return true;
}
}
}
lineStart = nextLineStart;
}
}
return false;
}
private InputStream filterClean(Repository repository, String path,
InputStream fromFile, boolean convertCrLf, String filterCommand)
throws IOException {
InputStream input = fromFile;
if (convertCrLf) {
input = EolStreamTypeUtil.wrapInputStream(input,
EolStreamType.TEXT_LF);
}
if (StringUtils.isEmptyOrNull(filterCommand)) {
return input;
}
if (FilterCommandRegistry.isRegistered(filterCommand)) {
LocalFile buffer = new TemporaryBuffer.LocalFile(null);
FilterCommand command = FilterCommandRegistry.createFilterCommand(
filterCommand, repository, input, buffer);
while (command.run() != -1) {
// loop as long as command.run() tells there is work to do
}
return buffer.openInputStreamWithAutoDestroy();
}
FS fs = repository.getFS();
ProcessBuilder filterProcessBuilder = fs.runInShell(filterCommand,
new String[0]);
filterProcessBuilder.directory(repository.getWorkTree());
filterProcessBuilder.environment().put(Constants.GIT_DIR_KEY,
repository.getDirectory().getAbsolutePath());
ExecutionResult result;
try {
result = fs.execute(filterProcessBuilder, in);
} catch (IOException | InterruptedException e) {
throw new IOException(
new FilterFailedException(e, filterCommand, path));
}
int rc = result.getRc();
if (rc != 0) {
throw new IOException(new FilterFailedException(rc, filterCommand,
path, result.getStdout().toByteArray(4096), RawParseUtils
.decode(result.getStderr().toByteArray(4096))));
}
return result.getStdout().openInputStreamWithAutoDestroy();
}
private void initHash(SHA1 hash, long size) {
hash.update(Constants.encodedTypeString(Constants.OBJ_BLOB));
hash.update((byte) ' ');
hash.update(Constants.encodeASCII(size));
hash.update((byte) 0);
}
private ObjectId hash(File f) throws IOException {
SHA1 hash = SHA1.newInstance();
initHash(hash, f.length());
try (InputStream input = new FileInputStream(f)) {
byte[] buf = new byte[8192];
int n;
while ((n = input.read(buf)) >= 0) {
hash.update(buf, 0, n);
}
}
return hash.toObjectId();
}
private void checkOid(ObjectId baseId, ObjectId id, ChangeType type, File f,
String path)
throws PatchApplyException, IOException {
boolean hashOk = false;
if (id != null) {
hashOk = baseId.equals(id);
if (!hashOk && ChangeType.ADD.equals(type)
&& ObjectId.zeroId().equals(baseId)) {
// We create the file first. The OID of an empty file is not the
// zero id!
hashOk = Constants.EMPTY_BLOB_ID.equals(id);
}
} else {
if (ObjectId.zeroId().equals(baseId)) {
// File empty is OK.
hashOk = !f.exists() || f.length() == 0;
} else {
hashOk = baseId.equals(hash(f));
}
}
if (!hashOk) {
throw new PatchApplyException(MessageFormat
.format(JGitText.get().applyBinaryBaseOidWrong, path));
}
}
private void applyBinary(Repository repository, String path, File f,
FileHeader fh, DirCacheCheckout.StreamSupplier loader, ObjectId id,
CheckoutMetadata checkOut)
throws PatchApplyException, IOException {
if (!fh.getOldId().isComplete() || !fh.getNewId().isComplete()) {
throw new PatchApplyException(MessageFormat
.format(JGitText.get().applyBinaryOidTooShort, path));
}
BinaryHunk hunk = fh.getForwardBinaryHunk();
// A BinaryHunk has the start at the "literal" or "delta" token. Data
// starts on the next line.
int start = RawParseUtils.nextLF(hunk.getBuffer(),
hunk.getStartOffset());
int length = hunk.getEndOffset() - start;
SHA1 hash = SHA1.newInstance();
// Write to a buffer and copy to the file only if everything was fine
TemporaryBuffer buffer = new TemporaryBuffer.LocalFile(null);
try {
switch (hunk.getType()) {
case LITERAL_DEFLATED:
// This just overwrites the file. We need to check the hash of
// the base.
checkOid(fh.getOldId().toObjectId(), id, fh.getChangeType(), f,
path);
initHash(hash, hunk.getSize());
try (OutputStream out = buffer;
InputStream inflated = new SHA1InputStream(hash,
new InflaterInputStream(
new BinaryHunkInputStream(
new ByteArrayInputStream(
hunk.getBuffer(), start,
length))))) {
DirCacheCheckout.getContent(repository, path, checkOut,
() -> inflated, null, out);
if (!fh.getNewId().toObjectId().equals(hash.toObjectId())) {
throw new PatchApplyException(MessageFormat.format(
JGitText.get().applyBinaryResultOidWrong,
path));
}
}
try (InputStream bufIn = buffer.openInputStream()) {
Files.copy(bufIn, f.toPath(),
StandardCopyOption.REPLACE_EXISTING);
}
break;
case DELTA_DEFLATED:
// Unfortunately delta application needs random access to the
// base to construct the result.
byte[] base;
try (InputStream input = loader.load()) {
base = IO.readWholeStream(input, 0).array();
}
// At least stream the result!
try (BinaryDeltaInputStream input = new BinaryDeltaInputStream(
base,
new InflaterInputStream(new BinaryHunkInputStream(
new ByteArrayInputStream(hunk.getBuffer(),
start, length))))) {
long finalSize = input.getExpectedResultSize();
initHash(hash, finalSize);
try (OutputStream out = buffer;
SHA1InputStream hashed = new SHA1InputStream(hash,
input)) {
DirCacheCheckout.getContent(repository, path, checkOut,
() -> hashed, null, out);
if (!fh.getNewId().toObjectId()
.equals(hash.toObjectId())) {
throw new PatchApplyException(MessageFormat.format(
JGitText.get().applyBinaryResultOidWrong,
path));
}
}
}
try (InputStream bufIn = buffer.openInputStream()) {
Files.copy(bufIn, f.toPath(),
StandardCopyOption.REPLACE_EXISTING);
}
break;
default:
break;
}
} finally {
buffer.destroy();
}
}
private void applyText(Repository repository, String path, RawText rt,
File f, FileHeader fh, CheckoutMetadata checkOut)
throws IOException, PatchApplyException {
List<ByteBuffer> oldLines = new ArrayList<>(rt.size());
for (int i = 0; i < rt.size(); i++) {
oldLines.add(rt.getRawString(i));
}
List<ByteBuffer> newLines = new ArrayList<>(oldLines);
int afterLastHunk = 0;
int lineNumberShift = 0;
int lastHunkNewLine = -1;
for (HunkHeader hh : fh.getHunks()) {
// We assume hunks to be ordered
if (hh.getNewStartLine() <= lastHunkNewLine) {
throw new PatchApplyException(MessageFormat
.format(JGitText.get().patchApplyException, hh));
}
lastHunkNewLine = hh.getNewStartLine();
byte[] b = new byte[hh.getEndOffset() - hh.getStartOffset()];
System.arraycopy(hh.getBuffer(), hh.getStartOffset(), b, 0,
b.length);
RawText hrt = new RawText(b);
List<ByteBuffer> hunkLines = new ArrayList<>(hrt.size());
for (int i = 0; i < hrt.size(); i++) {
hunkLines.add(hrt.getRawString(i));
}
if (hh.getNewStartLine() == 0) {
// Must be the single hunk for clearing all content
if (fh.getHunks().size() == 1
&& canApplyAt(hunkLines, newLines, 0)) {
newLines.clear();
break;
}
throw new PatchApplyException(MessageFormat
.format(JGitText.get().patchApplyException, hh));
}
// Hunk lines as reported by the hunk may be off, so don't rely on
// them.
int applyAt = hh.getNewStartLine() - 1 + lineNumberShift;
// But they definitely should not go backwards.
if (applyAt < afterLastHunk && lineNumberShift < 0) {
applyAt = hh.getNewStartLine() - 1;
lineNumberShift = 0;
}
if (applyAt < afterLastHunk) {
throw new PatchApplyException(MessageFormat
.format(JGitText.get().patchApplyException, hh));
}
boolean applies = false;
int oldLinesInHunk = hh.getLinesContext()
+ hh.getOldImage().getLinesDeleted();
if (oldLinesInHunk <= 1) {
// Don't shift hunks without context lines. Just try the
// position corrected by the current lineNumberShift, and if
// that fails, the position recorded in the hunk header.
applies = canApplyAt(hunkLines, newLines, applyAt);
if (!applies && lineNumberShift != 0) {
applyAt = hh.getNewStartLine() - 1;
applies = applyAt >= afterLastHunk
&& canApplyAt(hunkLines, newLines, applyAt);
}
} else {
int maxShift = applyAt - afterLastHunk;
for (int shift = 0; shift <= maxShift; shift++) {
if (canApplyAt(hunkLines, newLines, applyAt - shift)) {
applies = true;
applyAt -= shift;
break;
}
}
if (!applies) {
// Try shifting the hunk downwards
applyAt = hh.getNewStartLine() - 1 + lineNumberShift;
maxShift = newLines.size() - applyAt - oldLinesInHunk;
for (int shift = 1; shift <= maxShift; shift++) {
if (canApplyAt(hunkLines, newLines, applyAt + shift)) {
applies = true;
applyAt += shift;
break;
}
}
}
}
if (!applies) {
throw new PatchApplyException(MessageFormat
.format(JGitText.get().patchApplyException, hh));
}
// Hunk applies at applyAt. Apply it, and update afterLastHunk and
// lineNumberShift
lineNumberShift = applyAt - hh.getNewStartLine() + 1;
int sz = hunkLines.size();
for (int j = 1; j < sz; j++) {
ByteBuffer hunkLine = hunkLines.get(j);
if (!hunkLine.hasRemaining()) {
// Completely empty line; accept as empty context line
applyAt++;
continue;
}
switch (hunkLine.array()[hunkLine.position()]) {
case ' ':
applyAt++;
break;
case '-':
newLines.remove(applyAt);
break;
case '+':
newLines.add(applyAt++, slice(hunkLine, 1));
break;
default:
break;
}
}
afterLastHunk = applyAt;
}
if (!isNoNewlineAtEndOfFile(fh)) {
newLines.add(null);
}
if (!rt.isMissingNewlineAtEnd()) {
oldLines.add(null);
}
if (oldLines.equals(newLines)) {
return; // Unchanged; don't touch the file
}
TemporaryBuffer buffer = new TemporaryBuffer.LocalFile(null);
try {
try (OutputStream out = buffer) {
for (Iterator<ByteBuffer> l = newLines.iterator(); l
.hasNext();) {
ByteBuffer line = l.next();
if (line == null) {
// Must be the marker for the final newline
break;
}
out.write(line.array(), line.position(), line.remaining());
if (l.hasNext()) {
out.write('\n');
}
}
}
try (OutputStream output = new FileOutputStream(f)) {
DirCacheCheckout.getContent(repository, path, checkOut,
buffer::openInputStream, null, output);
}
} finally {
buffer.destroy();
}
repository.getFS().setExecute(f,
fh.getNewMode() == FileMode.EXECUTABLE_FILE);
}
private boolean canApplyAt(List<ByteBuffer> hunkLines,
List<ByteBuffer> newLines, int line) {
int sz = hunkLines.size();
int limit = newLines.size();
int pos = line;
for (int j = 1; j < sz; j++) {
ByteBuffer hunkLine = hunkLines.get(j);
if (!hunkLine.hasRemaining()) {
// Empty line. Accept as empty context line.
if (pos >= limit || newLines.get(pos).hasRemaining()) {
return false;
}
pos++;
continue;
}
switch (hunkLine.array()[hunkLine.position()]) {
case ' ':
case '-':
if (pos >= limit
|| !newLines.get(pos).equals(slice(hunkLine, 1))) {
return false;
}
pos++;
break;
default:
break;
}
}
return true;
}
private ByteBuffer slice(ByteBuffer b, int off) {
int newOffset = b.position() + off;
return ByteBuffer.wrap(b.array(), newOffset, b.limit() - newOffset);
}
private boolean isNoNewlineAtEndOfFile(FileHeader fh) {
List<? extends HunkHeader> hunks = fh.getHunks();
if (hunks == null || hunks.isEmpty()) {
return false;
}
HunkHeader lastHunk = hunks.get(hunks.size() - 1);
byte[] buf = new byte[lastHunk.getEndOffset()
- lastHunk.getStartOffset()];
System.arraycopy(lastHunk.getBuffer(), lastHunk.getStartOffset(), buf,
0, buf.length);
RawText lhrt = new RawText(buf);
return lhrt.getString(lhrt.size() - 1)
.equals("\\ No newline at end of file"); //$NON-NLS-1$
}
/**
* An {@link InputStream} that updates a {@link SHA1} on every byte read.
* The hash is supposed to have been initialized before reading starts.
*/
private static class SHA1InputStream extends InputStream {
private final SHA1 hash;
private final InputStream in;
SHA1InputStream(SHA1 hash, InputStream in) {
this.hash = hash;
this.in = in;
}
@Override
public int read() throws IOException {
int b = in.read();
if (b >= 0) {
hash.update((byte) b);
}
return b;
}
@Override
public int read(byte[] b, int off, int len) throws IOException {
int n = in.read(b, off, len);
if (n > 0) {
hash.update(b, off, n);
}
return n;
}
@Override
public void close() throws IOException {
in.close();
}
}
}

View File

@ -42,7 +42,9 @@ public static JGitText get() {
/***/ public String anExceptionOccurredWhileTryingToAddTheIdOfHEAD;
/***/ public String anSSHSessionHasBeenAlreadyCreated;
/***/ public String applyBinaryBaseOidWrong;
/***/ public String applyBinaryForInCoreNotSupported;
/***/ public String applyBinaryOidTooShort;
/***/ public String applyBinaryPatchTypeNotSupported;
/***/ public String applyBinaryResultOidWrong;
/***/ public String applyingCommit;
/***/ public String archiveFormatAlreadyAbsent;
@ -183,6 +185,7 @@ public static JGitText get() {
/***/ public String connectionTimeOut;
/***/ public String contextMustBeNonNegative;
/***/ public String cookieFilePathRelative;
/***/ public String copyFileFailedNullFiles;
/***/ public String corruptionDetectedReReadingAt;
/***/ public String corruptObjectBadDate;
/***/ public String corruptObjectBadEmail;
@ -655,6 +658,7 @@ public static JGitText get() {
/***/ public String renameBranchUnexpectedResult;
/***/ public String renameCancelled;
/***/ public String renameFileFailed;
/***/ public String renameFileFailedNullFiles;
/***/ public String renamesAlreadyFound;
/***/ public String renamesBreakingModifies;
/***/ public String renamesFindingByContent;

View File

@ -0,0 +1,999 @@
/*
* Copyright (C) 2022, Google Inc. and others
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Distribution License v. 1.0 which is available at
* https://www.eclipse.org/org/documents/edl-v10.php.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
package org.eclipse.jgit.patch;
import static org.eclipse.jgit.lib.Constants.OBJ_BLOB;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.StandardCopyOption;
import java.text.MessageFormat;
import java.time.Instant;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.zip.InflaterInputStream;
import org.eclipse.jgit.annotations.Nullable;
import org.eclipse.jgit.api.errors.FilterFailedException;
import org.eclipse.jgit.api.errors.PatchApplyException;
import org.eclipse.jgit.api.errors.PatchFormatException;
import org.eclipse.jgit.attributes.Attribute;
import org.eclipse.jgit.attributes.Attributes;
import org.eclipse.jgit.attributes.FilterCommand;
import org.eclipse.jgit.attributes.FilterCommandRegistry;
import org.eclipse.jgit.diff.DiffEntry.ChangeType;
import org.eclipse.jgit.diff.RawText;
import org.eclipse.jgit.dircache.DirCache;
import org.eclipse.jgit.dircache.DirCacheBuilder;
import org.eclipse.jgit.dircache.DirCacheCheckout;
import org.eclipse.jgit.dircache.DirCacheCheckout.CheckoutMetadata;
import org.eclipse.jgit.dircache.DirCacheCheckout.StreamSupplier;
import org.eclipse.jgit.dircache.DirCacheEntry;
import org.eclipse.jgit.dircache.DirCacheIterator;
import org.eclipse.jgit.errors.IndexWriteException;
import org.eclipse.jgit.internal.JGitText;
import org.eclipse.jgit.lib.Config;
import org.eclipse.jgit.lib.ConfigConstants;
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.CoreConfig.EolStreamType;
import org.eclipse.jgit.lib.FileMode;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.ObjectInserter;
import org.eclipse.jgit.lib.ObjectLoader;
import org.eclipse.jgit.lib.ObjectReader;
import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.patch.FileHeader.PatchType;
import org.eclipse.jgit.revwalk.RevTree;
import org.eclipse.jgit.treewalk.FileTreeIterator;
import org.eclipse.jgit.treewalk.TreeWalk;
import org.eclipse.jgit.treewalk.TreeWalk.OperationType;
import org.eclipse.jgit.treewalk.WorkingTreeOptions;
import org.eclipse.jgit.treewalk.filter.AndTreeFilter;
import org.eclipse.jgit.treewalk.filter.NotIgnoredFilter;
import org.eclipse.jgit.treewalk.filter.PathFilterGroup;
import org.eclipse.jgit.util.FS.ExecutionResult;
import org.eclipse.jgit.util.FileUtils;
import org.eclipse.jgit.util.IO;
import org.eclipse.jgit.util.LfsFactory;
import org.eclipse.jgit.util.LfsFactory.LfsInputStream;
import org.eclipse.jgit.util.RawParseUtils;
import org.eclipse.jgit.util.TemporaryBuffer;
import org.eclipse.jgit.util.TemporaryBuffer.LocalFile;
import org.eclipse.jgit.util.io.BinaryDeltaInputStream;
import org.eclipse.jgit.util.io.BinaryHunkInputStream;
import org.eclipse.jgit.util.io.EolStreamTypeUtil;
import org.eclipse.jgit.util.sha1.SHA1;
/**
* Applies a patch to files and the index.
* <p>
* After instantiating, applyPatch() should be called once.
* </p>
*
* @since 6.3
*/
public class PatchApplier {
/** The tree before applying the patch. Only non-null for inCore operation. */
@Nullable
private final RevTree beforeTree;
private final Repository repo;
private final ObjectInserter inserter;
private final ObjectReader reader;
private WorkingTreeOptions workingTreeOptions;
private int inCoreSizeLimit;
/**
* @param repo
* repository to apply the patch in
*/
public PatchApplier(Repository repo) {
this.repo = repo;
inserter = repo.newObjectInserter();
reader = inserter.newReader();
beforeTree = null;
Config config = repo.getConfig();
workingTreeOptions = config.get(WorkingTreeOptions.KEY);
inCoreSizeLimit = config.getInt(ConfigConstants.CONFIG_MERGE_SECTION,
ConfigConstants.CONFIG_KEY_IN_CORE_LIMIT, 10 << 20);
}
/**
* @param repo
* repository to apply the patch in
* @param beforeTree
* ID of the tree to apply the patch in
* @param oi
* to be used for modifying objects
* @throws IOException
* in case of I/O errors
*/
public PatchApplier(Repository repo, RevTree beforeTree, ObjectInserter oi)
throws IOException {
this.repo = repo;
this.beforeTree = beforeTree;
inserter = oi;
reader = oi.newReader();
}
/**
* A wrapper for returning both the applied tree ID and the applied files
* list.
*
* @since 6.3
*/
public static class Result {
private ObjectId treeId;
private List<String> paths;
/**
* @return List of modified paths.
*/
public List<String> getPaths() {
return paths;
}
/**
* @return The applied tree ID.
*/
public ObjectId getTreeId() {
return treeId;
}
}
/**
* Applies the given patch
*
* @param patchInput
* the patch to apply.
* @return the result of the patch
* @throws PatchFormatException
* if the patch cannot be parsed
* @throws PatchApplyException
* if the patch cannot be applied
*/
public Result applyPatch(InputStream patchInput)
throws PatchFormatException, PatchApplyException {
Result result = new Result();
org.eclipse.jgit.patch.Patch p = new org.eclipse.jgit.patch.Patch();
try (InputStream inStream = patchInput) {
p.parse(inStream);
if (!p.getErrors().isEmpty()) {
throw new PatchFormatException(p.getErrors());
}
DirCache dirCache = (inCore()) ? DirCache.newInCore()
: repo.lockDirCache();
DirCacheBuilder dirCacheBuilder = dirCache.builder();
Set<String> modifiedPaths = new HashSet<>();
for (org.eclipse.jgit.patch.FileHeader fh : p.getFiles()) {
ChangeType type = fh.getChangeType();
switch (type) {
case ADD: {
File f = getFile(fh.getNewPath());
if (f != null) {
try {
FileUtils.mkdirs(f.getParentFile(), true);
FileUtils.createNewFile(f);
} catch (IOException e) {
throw new PatchApplyException(MessageFormat.format(
JGitText.get().createNewFileFailed, f), e);
}
}
apply(fh.getNewPath(), dirCache, dirCacheBuilder, f, fh);
}
break;
case MODIFY:
apply(fh.getOldPath(), dirCache, dirCacheBuilder,
getFile(fh.getOldPath()), fh);
break;
case DELETE:
if (!inCore()) {
File old = getFile(fh.getOldPath());
if (!old.delete())
throw new PatchApplyException(MessageFormat.format(
JGitText.get().cannotDeleteFile, old));
}
break;
case RENAME: {
File src = getFile(fh.getOldPath());
File dest = getFile(fh.getNewPath());
if (!inCore()) {
/*
* this is odd: we rename the file on the FS, but
* apply() will write a fresh stream anyway, which will
* overwrite if there were hunks in the patch.
*/
try {
FileUtils.mkdirs(dest.getParentFile(), true);
FileUtils.rename(src, dest,
StandardCopyOption.ATOMIC_MOVE);
} catch (IOException e) {
throw new PatchApplyException(MessageFormat.format(
JGitText.get().renameFileFailed, src, dest),
e);
}
}
String pathWithOriginalContent = inCore() ?
fh.getOldPath() : fh.getNewPath();
apply(pathWithOriginalContent, dirCache, dirCacheBuilder, dest, fh);
break;
}
case COPY: {
File dest = getFile(fh.getNewPath());
if (!inCore()) {
File src = getFile(fh.getOldPath());
FileUtils.mkdirs(dest.getParentFile(), true);
Files.copy(src.toPath(), dest.toPath());
}
apply(fh.getOldPath(), dirCache, dirCacheBuilder, dest, fh);
break;
}
}
if (fh.getChangeType() != ChangeType.DELETE)
modifiedPaths.add(fh.getNewPath());
if (fh.getChangeType() != ChangeType.COPY
&& fh.getChangeType() != ChangeType.ADD)
modifiedPaths.add(fh.getOldPath());
}
// We processed the patch. Now add things that weren't changed.
for (int i = 0; i < dirCache.getEntryCount(); i++) {
DirCacheEntry dce = dirCache.getEntry(i);
if (!modifiedPaths.contains(dce.getPathString())
|| dce.getStage() != DirCacheEntry.STAGE_0)
dirCacheBuilder.add(dce);
}
if (inCore())
dirCacheBuilder.finish();
else if (!dirCacheBuilder.commit()) {
throw new IndexWriteException();
}
result.treeId = dirCache.writeTree(inserter);
result.paths = modifiedPaths.stream().sorted()
.collect(Collectors.toList());
} catch (IOException e) {
throw new PatchApplyException(MessageFormat.format(
JGitText.get().patchApplyException, e.getMessage()), e);
}
return result;
}
private File getFile(String path) {
return (inCore()) ? null : new File(repo.getWorkTree(), path);
}
/* returns null if the path is not found. */
@Nullable
private TreeWalk getTreeWalkForFile(String path, DirCache cache)
throws PatchApplyException {
try {
if (inCore()) {
// Only this branch may return null.
// TODO: it would be nice if we could return a TreeWalk at EOF
// iso. null.
return TreeWalk.forPath(repo, path, beforeTree);
}
TreeWalk walk = new TreeWalk(repo);
// Use a TreeWalk with a DirCacheIterator to pick up the correct
// clean/smudge filters.
int cacheTreeIdx = walk.addTree(new DirCacheIterator(cache));
FileTreeIterator files = new FileTreeIterator(repo);
if (FILE_TREE_INDEX != walk.addTree(files))
throw new IllegalStateException();
walk.setFilter(AndTreeFilter.create(
PathFilterGroup.createFromStrings(path),
new NotIgnoredFilter(FILE_TREE_INDEX)));
walk.setOperationType(OperationType.CHECKIN_OP);
walk.setRecursive(true);
files.setDirCacheIterator(walk, cacheTreeIdx);
return walk;
} catch (IOException e) {
throw new PatchApplyException(MessageFormat.format(
JGitText.get().patchApplyException, e.getMessage()), e);
}
}
private static final int FILE_TREE_INDEX = 1;
/**
* Applies patch to a single file.
*
* @param pathWithOriginalContent
* The path to use for the pre-image. Also determines CRLF and
* smudge settings.
* @param dirCache
* Dircache to read existing data from.
* @param dirCacheBuilder
* Builder for Dircache to write new data to.
* @param f
* The file to update with new contents. Null for inCore usage.
* @param fh
* The patch header.
* @throws PatchApplyException
*/
private void apply(String pathWithOriginalContent, DirCache dirCache,
DirCacheBuilder dirCacheBuilder, @Nullable File f,
org.eclipse.jgit.patch.FileHeader fh) throws PatchApplyException {
if (PatchType.BINARY.equals(fh.getPatchType())) {
// This patch type just says "something changed". We can't do
// anything with that.
// Maybe this should return an error code, though?
return;
}
try {
TreeWalk walk = getTreeWalkForFile(pathWithOriginalContent, dirCache);
boolean loadedFromTreeWalk = false;
// CR-LF handling is determined by whether the file or the patch
// have CR-LF line endings.
boolean convertCrLf = inCore() || needsCrLfConversion(f, fh);
EolStreamType streamType = convertCrLf ? EolStreamType.TEXT_CRLF
: EolStreamType.DIRECT;
String smudgeFilterCommand = null;
StreamSupplier fileStreamSupplier = null;
ObjectId fileId = ObjectId.zeroId();
if (walk == null) {
// For new files with inCore()==true, TreeWalk.forPath can be
// null. Stay with defaults.
} else if (inCore()) {
fileId = walk.getObjectId(0);
ObjectLoader loader = LfsFactory.getInstance()
.applySmudgeFilter(repo, reader.open(fileId, OBJ_BLOB),
null);
byte[] data = loader.getBytes();
convertCrLf = RawText.isCrLfText(data);
fileStreamSupplier = () -> new ByteArrayInputStream(data);
streamType = convertCrLf ? EolStreamType.TEXT_CRLF
: EolStreamType.DIRECT;
smudgeFilterCommand = walk
.getFilterCommand(Constants.ATTR_FILTER_TYPE_SMUDGE);
loadedFromTreeWalk = true;
} else if (walk.next()) {
// If the file on disk has no newline characters,
// convertCrLf will be false. In that case we want to honor the
// normal git settings.
streamType = convertCrLf ? EolStreamType.TEXT_CRLF
: walk.getEolStreamType(OperationType.CHECKOUT_OP);
smudgeFilterCommand = walk
.getFilterCommand(Constants.ATTR_FILTER_TYPE_SMUDGE);
FileTreeIterator file = walk.getTree(FILE_TREE_INDEX,
FileTreeIterator.class);
if (file != null) {
fileId = file.getEntryObjectId();
fileStreamSupplier = file::openEntryStream;
loadedFromTreeWalk = true;
} else {
throw new PatchApplyException(MessageFormat.format(
JGitText.get().cannotReadFile,
pathWithOriginalContent));
}
}
if (fileStreamSupplier == null)
fileStreamSupplier = inCore() ? InputStream::nullInputStream
: () -> new FileInputStream(f);
FileMode fileMode = fh.getNewMode() != null ? fh.getNewMode()
: FileMode.REGULAR_FILE;
ContentStreamLoader resultStreamLoader;
if (PatchType.GIT_BINARY.equals(fh.getPatchType())) {
// binary patches are processed in a streaming fashion. Some
// binary patches do random access on the input data, so we can't
// overwrite the file while we're streaming.
resultStreamLoader = applyBinary(pathWithOriginalContent, f, fh,
fileStreamSupplier, fileId);
} else {
String filterCommand = walk != null
? walk.getFilterCommand(
Constants.ATTR_FILTER_TYPE_CLEAN)
: null;
RawText raw = getRawText(f, fileStreamSupplier, fileId,
pathWithOriginalContent, loadedFromTreeWalk, filterCommand,
convertCrLf);
resultStreamLoader = applyText(raw, fh);
}
if (f != null) {
// Write to a buffer and copy to the file only if everything was
// fine.
TemporaryBuffer buffer = new TemporaryBuffer.LocalFile(null);
try {
CheckoutMetadata metadata = new CheckoutMetadata(streamType,
smudgeFilterCommand);
try (TemporaryBuffer buf = buffer) {
DirCacheCheckout.getContent(repo, pathWithOriginalContent,
metadata, resultStreamLoader.supplier, workingTreeOptions,
buf);
}
try (InputStream bufIn = buffer.openInputStream()) {
Files.copy(bufIn, f.toPath(),
StandardCopyOption.REPLACE_EXISTING);
}
} finally {
buffer.destroy();
}
repo.getFS().setExecute(f,
fileMode == FileMode.EXECUTABLE_FILE);
}
Instant lastModified = f == null ? null
: repo.getFS().lastModifiedInstant(f);
Attributes attributes = walk != null ? walk.getAttributes()
: new Attributes();
DirCacheEntry dce = insertToIndex(
resultStreamLoader.supplier.load(),
fh.getNewPath().getBytes(StandardCharsets.UTF_8), fileMode,
lastModified, resultStreamLoader.length,
attributes.get(Constants.ATTR_FILTER));
dirCacheBuilder.add(dce);
if (PatchType.GIT_BINARY.equals(fh.getPatchType())
&& fh.getNewId() != null && fh.getNewId().isComplete()
&& !fh.getNewId().toObjectId().equals(dce.getObjectId())) {
throw new PatchApplyException(MessageFormat.format(
JGitText.get().applyBinaryResultOidWrong,
pathWithOriginalContent));
}
} catch (IOException | UnsupportedOperationException e) {
throw new PatchApplyException(MessageFormat.format(
JGitText.get().patchApplyException, e.getMessage()), e);
}
}
private DirCacheEntry insertToIndex(InputStream input, byte[] path,
FileMode fileMode, Instant lastModified, long length,
Attribute lfsAttribute) throws IOException {
DirCacheEntry dce = new DirCacheEntry(path, DirCacheEntry.STAGE_0);
dce.setFileMode(fileMode);
if (lastModified != null) {
dce.setLastModified(lastModified);
}
dce.setLength(length);
try (LfsInputStream is = org.eclipse.jgit.util.LfsFactory.getInstance()
.applyCleanFilter(repo, input, length, lfsAttribute)) {
dce.setObjectId(inserter.insert(OBJ_BLOB, is.getLength(), is));
}
return dce;
}
/**
* Gets the raw text of the given file.
*
* @param file
* to read from
* @param fileStreamSupplier
* if fromTreewalk, the stream of the file content
* @param fileId
* of the file
* @param path
* of the file
* @param fromTreeWalk
* whether the file was loaded by a {@link TreeWalk}
* @param filterCommand
* for reading the file content
* @param convertCrLf
* whether a CR-LF conversion is needed
* @return the result raw text
* @throws IOException
* in case of filtering issues
*/
private RawText getRawText(@Nullable File file,
StreamSupplier fileStreamSupplier, ObjectId fileId, String path,
boolean fromTreeWalk, String filterCommand, boolean convertCrLf)
throws IOException {
if (fromTreeWalk) {
// Can't use file.openEntryStream() as we cannot control its CR-LF
// conversion.
try (InputStream input = filterClean(repo, path,
fileStreamSupplier.load(), convertCrLf, filterCommand)) {
return new RawText(org.eclipse.jgit.util.IO
.readWholeStream(input, 0).array());
}
}
if (convertCrLf) {
try (InputStream input = EolStreamTypeUtil.wrapInputStream(
fileStreamSupplier.load(), EolStreamType.TEXT_LF)) {
return new RawText(org.eclipse.jgit.util.IO
.readWholeStream(input, 0).array());
}
}
if (inCore() && fileId.equals(ObjectId.zeroId())) {
return new RawText(new byte[] {});
}
return new RawText(file);
}
private InputStream filterClean(Repository repository, String path,
InputStream fromFile, boolean convertCrLf, String filterCommand)
throws IOException {
InputStream input = fromFile;
if (convertCrLf) {
input = EolStreamTypeUtil.wrapInputStream(input,
EolStreamType.TEXT_LF);
}
if (org.eclipse.jgit.util.StringUtils.isEmptyOrNull(filterCommand)) {
return input;
}
if (FilterCommandRegistry.isRegistered(filterCommand)) {
LocalFile buffer = new org.eclipse.jgit.util.TemporaryBuffer.LocalFile(
null, inCoreSizeLimit);
FilterCommand command = FilterCommandRegistry.createFilterCommand(
filterCommand, repository, input, buffer);
while (command.run() != -1) {
// loop as long as command.run() tells there is work to do
}
return buffer.openInputStreamWithAutoDestroy();
}
org.eclipse.jgit.util.FS fs = repository.getFS();
ProcessBuilder filterProcessBuilder = fs.runInShell(filterCommand,
new String[0]);
filterProcessBuilder.directory(repository.getWorkTree());
filterProcessBuilder.environment().put(Constants.GIT_DIR_KEY,
repository.getDirectory().getAbsolutePath());
ExecutionResult result;
try {
result = fs.execute(filterProcessBuilder, input);
} catch (IOException | InterruptedException e) {
throw new IOException(
new FilterFailedException(e, filterCommand, path));
}
int rc = result.getRc();
if (rc != 0) {
throw new IOException(new FilterFailedException(rc, filterCommand,
path, result.getStdout().toByteArray(4096),
org.eclipse.jgit.util.RawParseUtils
.decode(result.getStderr().toByteArray(4096))));
}
return result.getStdout().openInputStreamWithAutoDestroy();
}
private boolean needsCrLfConversion(File f,
org.eclipse.jgit.patch.FileHeader fileHeader) throws IOException {
if (PatchType.GIT_BINARY.equals(fileHeader.getPatchType())) {
return false;
}
if (!hasCrLf(fileHeader)) {
try (InputStream input = new FileInputStream(f)) {
return RawText.isCrLfText(input);
}
}
return false;
}
private static boolean hasCrLf(
org.eclipse.jgit.patch.FileHeader fileHeader) {
if (PatchType.GIT_BINARY.equals(fileHeader.getPatchType())) {
return false;
}
for (org.eclipse.jgit.patch.HunkHeader header : fileHeader.getHunks()) {
byte[] buf = header.getBuffer();
int hunkEnd = header.getEndOffset();
int lineStart = header.getStartOffset();
while (lineStart < hunkEnd) {
int nextLineStart = RawParseUtils.nextLF(buf, lineStart);
if (nextLineStart > hunkEnd) {
nextLineStart = hunkEnd;
}
if (nextLineStart <= lineStart) {
break;
}
if (nextLineStart - lineStart > 1) {
char first = (char) (buf[lineStart] & 0xFF);
if (first == ' ' || first == '-') {
// It's an old line. Does it end in CR-LF?
if (buf[nextLineStart - 2] == '\r') {
return true;
}
}
}
lineStart = nextLineStart;
}
}
return false;
}
private ObjectId hash(File f) throws IOException {
try (FileInputStream fis = new FileInputStream(f);
SHA1InputStream shaStream = new SHA1InputStream(fis,
f.length())) {
shaStream.transferTo(OutputStream.nullOutputStream());
return shaStream.getHash().toObjectId();
}
}
private void checkOid(ObjectId baseId, ObjectId id, ChangeType type, File f,
String path) throws PatchApplyException, IOException {
boolean hashOk = false;
if (id != null) {
hashOk = baseId.equals(id);
if (!hashOk && ChangeType.ADD.equals(type)
&& ObjectId.zeroId().equals(baseId)) {
// We create a new file. The OID of an empty file is not the
// zero id!
hashOk = Constants.EMPTY_BLOB_ID.equals(id);
}
} else if (!inCore()) {
if (ObjectId.zeroId().equals(baseId)) {
// File empty is OK.
hashOk = !f.exists() || f.length() == 0;
} else {
hashOk = baseId.equals(hash(f));
}
}
if (!hashOk) {
throw new PatchApplyException(MessageFormat
.format(JGitText.get().applyBinaryBaseOidWrong, path));
}
}
private boolean inCore() {
return beforeTree != null;
}
/**
* Provide stream, along with the length of the object. We use this once to
* patch to the working tree, once to write the index. For on-disk
* operation, presumably we could stream to the destination file, and then
* read back the stream from disk. We don't because it is more complex.
*/
private static class ContentStreamLoader {
StreamSupplier supplier;
long length;
ContentStreamLoader(StreamSupplier supplier, long length) {
this.supplier = supplier;
this.length = length;
}
}
/**
* Applies a binary patch.
*
* @param path
* pathname of the file to write.
* @param f
* destination file
* @param fh
* the patch to apply
* @param inputSupplier
* a supplier for the contents of the old file
* @param id
* SHA1 for the old content
* @return a loader for the new content.
* @throws PatchApplyException
* @throws IOException
* @throws UnsupportedOperationException
*/
private ContentStreamLoader applyBinary(String path, File f,
org.eclipse.jgit.patch.FileHeader fh, StreamSupplier inputSupplier,
ObjectId id) throws PatchApplyException, IOException,
UnsupportedOperationException {
if (!fh.getOldId().isComplete() || !fh.getNewId().isComplete()) {
throw new PatchApplyException(MessageFormat
.format(JGitText.get().applyBinaryOidTooShort, path));
}
org.eclipse.jgit.patch.BinaryHunk hunk = fh.getForwardBinaryHunk();
// A BinaryHunk has the start at the "literal" or "delta" token. Data
// starts on the next line.
int start = RawParseUtils.nextLF(hunk.getBuffer(),
hunk.getStartOffset());
int length = hunk.getEndOffset() - start;
switch (hunk.getType()) {
case LITERAL_DEFLATED: {
// This just overwrites the file. We need to check the hash of
// the base.
checkOid(fh.getOldId().toObjectId(), id, fh.getChangeType(), f,
path);
StreamSupplier supp = () -> new InflaterInputStream(
new BinaryHunkInputStream(new ByteArrayInputStream(
hunk.getBuffer(), start, length)));
return new ContentStreamLoader(supp, hunk.getSize());
}
case DELTA_DEFLATED: {
// Unfortunately delta application needs random access to the
// base to construct the result.
byte[] base;
try (InputStream in = inputSupplier.load()) {
base = IO.readWholeStream(in, 0).array();
}
// At least stream the result! We don't have to close these streams,
// as they don't hold resources.
StreamSupplier supp = () -> new BinaryDeltaInputStream(base,
new InflaterInputStream(
new BinaryHunkInputStream(new ByteArrayInputStream(
hunk.getBuffer(), start, length))));
// This just reads the first bits of the stream.
long finalSize = ((BinaryDeltaInputStream) supp.load()).getExpectedResultSize();
return new ContentStreamLoader(supp, finalSize);
}
default:
throw new UnsupportedOperationException(MessageFormat.format(
JGitText.get().applyBinaryPatchTypeNotSupported,
hunk.getType().name()));
}
}
private ContentStreamLoader applyText(RawText rt,
org.eclipse.jgit.patch.FileHeader fh)
throws IOException, PatchApplyException {
List<ByteBuffer> oldLines = new ArrayList<>(rt.size());
for (int i = 0; i < rt.size(); i++) {
oldLines.add(rt.getRawString(i));
}
List<ByteBuffer> newLines = new ArrayList<>(oldLines);
int afterLastHunk = 0;
int lineNumberShift = 0;
int lastHunkNewLine = -1;
for (org.eclipse.jgit.patch.HunkHeader hh : fh.getHunks()) {
// We assume hunks to be ordered
if (hh.getNewStartLine() <= lastHunkNewLine) {
throw new PatchApplyException(MessageFormat
.format(JGitText.get().patchApplyException, hh));
}
lastHunkNewLine = hh.getNewStartLine();
byte[] b = new byte[hh.getEndOffset() - hh.getStartOffset()];
System.arraycopy(hh.getBuffer(), hh.getStartOffset(), b, 0,
b.length);
RawText hrt = new RawText(b);
List<ByteBuffer> hunkLines = new ArrayList<>(hrt.size());
for (int i = 0; i < hrt.size(); i++) {
hunkLines.add(hrt.getRawString(i));
}
if (hh.getNewStartLine() == 0) {
// Must be the single hunk for clearing all content
if (fh.getHunks().size() == 1
&& canApplyAt(hunkLines, newLines, 0)) {
newLines.clear();
break;
}
throw new PatchApplyException(MessageFormat
.format(JGitText.get().patchApplyException, hh));
}
// Hunk lines as reported by the hunk may be off, so don't rely on
// them.
int applyAt = hh.getNewStartLine() - 1 + lineNumberShift;
// But they definitely should not go backwards.
if (applyAt < afterLastHunk && lineNumberShift < 0) {
applyAt = hh.getNewStartLine() - 1;
lineNumberShift = 0;
}
if (applyAt < afterLastHunk) {
throw new PatchApplyException(MessageFormat
.format(JGitText.get().patchApplyException, hh));
}
boolean applies = false;
int oldLinesInHunk = hh.getLinesContext()
+ hh.getOldImage().getLinesDeleted();
if (oldLinesInHunk <= 1) {
// Don't shift hunks without context lines. Just try the
// position corrected by the current lineNumberShift, and if
// that fails, the position recorded in the hunk header.
applies = canApplyAt(hunkLines, newLines, applyAt);
if (!applies && lineNumberShift != 0) {
applyAt = hh.getNewStartLine() - 1;
applies = applyAt >= afterLastHunk
&& canApplyAt(hunkLines, newLines, applyAt);
}
} else {
int maxShift = applyAt - afterLastHunk;
for (int shift = 0; shift <= maxShift; shift++) {
if (canApplyAt(hunkLines, newLines, applyAt - shift)) {
applies = true;
applyAt -= shift;
break;
}
}
if (!applies) {
// Try shifting the hunk downwards
applyAt = hh.getNewStartLine() - 1 + lineNumberShift;
maxShift = newLines.size() - applyAt - oldLinesInHunk;
for (int shift = 1; shift <= maxShift; shift++) {
if (canApplyAt(hunkLines, newLines, applyAt + shift)) {
applies = true;
applyAt += shift;
break;
}
}
}
}
if (!applies) {
throw new PatchApplyException(MessageFormat
.format(JGitText.get().patchApplyException, hh));
}
// Hunk applies at applyAt. Apply it, and update afterLastHunk and
// lineNumberShift
lineNumberShift = applyAt - hh.getNewStartLine() + 1;
int sz = hunkLines.size();
for (int j = 1; j < sz; j++) {
ByteBuffer hunkLine = hunkLines.get(j);
if (!hunkLine.hasRemaining()) {
// Completely empty line; accept as empty context line
applyAt++;
continue;
}
switch (hunkLine.array()[hunkLine.position()]) {
case ' ':
applyAt++;
break;
case '-':
newLines.remove(applyAt);
break;
case '+':
newLines.add(applyAt++, slice(hunkLine, 1));
break;
default:
break;
}
}
afterLastHunk = applyAt;
}
if (!isNoNewlineAtEndOfFile(fh)) {
newLines.add(null);
}
if (!rt.isMissingNewlineAtEnd()) {
oldLines.add(null);
}
// We could check if old == new, but the short-circuiting complicates
// logic for inCore patching, so just write the new thing regardless.
TemporaryBuffer buffer = new TemporaryBuffer.LocalFile(null);
try (OutputStream out = buffer) {
for (Iterator<ByteBuffer> l = newLines.iterator(); l.hasNext();) {
ByteBuffer line = l.next();
if (line == null) {
// Must be the marker for the final newline
break;
}
out.write(line.array(), line.position(), line.remaining());
if (l.hasNext()) {
out.write('\n');
}
}
return new ContentStreamLoader(buffer::openInputStream,
buffer.length());
}
}
private boolean canApplyAt(List<ByteBuffer> hunkLines,
List<ByteBuffer> newLines, int line) {
int sz = hunkLines.size();
int limit = newLines.size();
int pos = line;
for (int j = 1; j < sz; j++) {
ByteBuffer hunkLine = hunkLines.get(j);
if (!hunkLine.hasRemaining()) {
// Empty line. Accept as empty context line.
if (pos >= limit || newLines.get(pos).hasRemaining()) {
return false;
}
pos++;
continue;
}
switch (hunkLine.array()[hunkLine.position()]) {
case ' ':
case '-':
if (pos >= limit
|| !newLines.get(pos).equals(slice(hunkLine, 1))) {
return false;
}
pos++;
break;
default:
break;
}
}
return true;
}
private ByteBuffer slice(ByteBuffer b, int off) {
int newOffset = b.position() + off;
return ByteBuffer.wrap(b.array(), newOffset, b.limit() - newOffset);
}
private boolean isNoNewlineAtEndOfFile(
org.eclipse.jgit.patch.FileHeader fh) {
List<? extends org.eclipse.jgit.patch.HunkHeader> hunks = fh.getHunks();
if (hunks == null || hunks.isEmpty()) {
return false;
}
org.eclipse.jgit.patch.HunkHeader lastHunk = hunks
.get(hunks.size() - 1);
byte[] buf = new byte[lastHunk.getEndOffset()
- lastHunk.getStartOffset()];
System.arraycopy(lastHunk.getBuffer(), lastHunk.getStartOffset(), buf,
0, buf.length);
RawText lhrt = new RawText(buf);
return lhrt.getString(lhrt.size() - 1)
.equals("\\ No newline at end of file"); // $NON-NLS-1$,
// $NON-NLS-2$
}
/**
* An {@link InputStream} that updates a {@link SHA1} on every byte read.
*/
private static class SHA1InputStream extends InputStream {
private final SHA1 hash;
private final InputStream in;
SHA1InputStream(InputStream in, long size) {
hash = SHA1.newInstance();
hash.update(Constants.encodedTypeString(Constants.OBJ_BLOB));
hash.update((byte) ' ');
hash.update(Constants.encodeASCII(size));
hash.update((byte) 0);
this.in = in;
}
public SHA1 getHash() {
return hash;
}
@Override
public int read() throws IOException {
int b = in.read();
if (b >= 0) {
hash.update((byte) b);
}
return b;
}
@Override
public int read(byte[] b, int off, int len) throws IOException {
int n = in.read(b, off, len);
if (n > 0) {
hash.update(b, off, n);
}
return n;
}
@Override
public void close() throws IOException {
in.close();
}
}
}