Merge branch 'master' into stable-5.12

* master:
  RepoCommand: Do not set 'branch' if the revision is a tag
  pgm: rewrite parents when --parents flag is passed
  ApplyCommand: fix "no newline at end" detection
  ApplyCommand: handle completely empty context lines in text patches
  ApplyCommand: use byte arrays for text patches, not strings
  ApplyCommand: support binary patches
  ApplyCommand: add a stream to apply a delta patch
  ApplyCommand: add streams to read/write binary patch hunks
  ApplyCommand: add a base-85 codec
  ApplyCommand: convert to git internal format before applying patch
  SSH config: fix whitespace handling
  SSH config: fix negated patterns
  Fix @since tag for introduction of PUBKEY_ACCEPTED_ALGORITHMS
  Prepare 5.11.2-SNAPSHOT builds
  JGit v5.11.1.202105131744-r
  Add a cgit interoperability test for LockFile
  Add TemporaryBuffer.toString(int limit)
  LockFile: create OutputStream only when needed
  Add git config for conflict style merge/diff3

Change-Id: If7751ff99079eaea31ed1fce811d141ecf209727
Signed-off-by: Matthias Sohn <matthias.sohn@sap.com>
This commit is contained in:
Matthias Sohn 2021-05-26 17:21:02 +02:00
commit 2f2f6e51b9
61 changed files with 2167 additions and 126 deletions

View File

@ -0,0 +1,75 @@
/*
* Copyright (C) 2021, kylezhao <kylezhao@tencent.com> and others.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Distribution License v. 1.0 which is available at
* https://www.eclipse.org/org/documents/edl-v10.php.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
package org.eclipse.jgit.pgm;
import static org.junit.Assert.assertEquals;
import java.util.ArrayList;
import java.util.List;
import org.eclipse.jgit.api.Git;
import org.eclipse.jgit.lib.CLIRepositoryTestCase;
import org.eclipse.jgit.revwalk.RevCommit;
import org.junit.Before;
import org.junit.Test;
public class RevListTest extends CLIRepositoryTestCase {
private Git git;
@Override
@Before
public void setUp() throws Exception {
super.setUp();
git = new Git(db);
}
@Test
public void testWithParentsFlag() throws Exception {
List<RevCommit> commits = createCommitsForParentsFlag(git);
String result = toString(
execute("git rev-list HEAD --parents -- Test.txt"));
String expect = toString(
commits.get(3).name() + ' ' + commits.get(1).name(),
commits.get(1).name());
assertEquals(expect, result);
}
@Test
public void testWithoutParentsFlag() throws Exception {
List<RevCommit> commits = createCommitsForParentsFlag(git);
String result = toString(execute("git rev-list HEAD -- Test.txt"));
String expect = toString(commits.get(3).name(), commits.get(1).name());
assertEquals(expect, result);
}
private List<RevCommit> createCommitsForParentsFlag(Git git)
throws Exception {
List<RevCommit> commits = new ArrayList<>();
writeTrashFile("Test1.txt", "Hello world");
git.add().addFilepattern("Test1.txt").call();
commits.add(git.commit().setMessage("commit#0").call());
writeTrashFile("Test.txt", "Hello world!");
git.add().addFilepattern("Test.txt").call();
commits.add(git.commit().setMessage("commit#1").call());
writeTrashFile("Test1.txt", "Hello world!!");
git.add().addFilepattern("Test1.txt").call();
commits.add(git.commit().setMessage("commit#2").call());
writeTrashFile("Test.txt", "Hello world!!!");
git.add().addFilepattern("Test.txt").call();
commits.add(git.commit().setMessage("commit#3").call());
return commits;
}
}

View File

@ -129,6 +129,9 @@ protected void run() throws Exception {
walk.setTreeFilter(AndTreeFilter.create(pathFilter,
TreeFilter.ANY_DIFF));
}
if (parents) {
walk.setRewriteParents(true);
}
if (revLimiter.size() == 1)
walk.setRevFilter(revLimiter.get(0));

View File

@ -1,5 +1,5 @@
/*
* Copyright (C) 2008, 2017 Google Inc. and others
* Copyright (C) 2008, 2021 Google Inc. and others
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Distribution License v. 1.0 which is available at
@ -517,4 +517,76 @@ public void testEnVarSubstitution() throws Exception {
assertEquals("/tmp/${TST_VAR/bar",
c.getValue(SshConstants.IDENTITY_AGENT));
}
@Test
public void testNegativeMatch() throws Exception {
config("Host foo.bar !foobar.baz *.baz\n" + "Port 29418\n");
Host h = osc.lookup("foo.bar");
assertNotNull(h);
assertEquals(29418, h.getPort());
h = osc.lookup("foobar.baz");
assertNotNull(h);
assertEquals(22, h.getPort());
h = osc.lookup("foo.baz");
assertNotNull(h);
assertEquals(29418, h.getPort());
}
@Test
public void testNegativeMatch2() throws Exception {
// Negative match after the positive match.
config("Host foo.bar *.baz !foobar.baz\n" + "Port 29418\n");
Host h = osc.lookup("foo.bar");
assertNotNull(h);
assertEquals(29418, h.getPort());
h = osc.lookup("foobar.baz");
assertNotNull(h);
assertEquals(22, h.getPort());
h = osc.lookup("foo.baz");
assertNotNull(h);
assertEquals(29418, h.getPort());
}
@Test
public void testNoMatch() throws Exception {
config("Host !host1 !host2\n" + "Port 29418\n");
Host h = osc.lookup("host1");
assertNotNull(h);
assertEquals(22, h.getPort());
h = osc.lookup("host2");
assertNotNull(h);
assertEquals(22, h.getPort());
h = osc.lookup("host3");
assertNotNull(h);
assertEquals(22, h.getPort());
}
@Test
public void testMultipleMatch() throws Exception {
config("Host foo.bar\nPort 29418\nIdentityFile /foo\n\n"
+ "Host *.bar\nPort 22\nIdentityFile /bar\n"
+ "Host foo.bar\nPort 47\nIdentityFile /baz\n");
Host h = osc.lookup("foo.bar");
assertNotNull(h);
assertEquals(29418, h.getPort());
assertArrayEquals(new Object[] { "/foo", "/bar", "/baz" },
h.getConfig().getValues("IdentityFile"));
}
@Test
public void testWhitespace() throws Exception {
config("Host foo \tbar baz\nPort 29418\n");
Host h = osc.lookup("foo");
assertNotNull(h);
assertEquals(29418, h.getPort());
h = osc.lookup("bar");
assertNotNull(h);
assertEquals(29418, h.getPort());
h = osc.lookup("baz");
assertNotNull(h);
assertEquals(29418, h.getPort());
h = osc.lookup("\tbar");
assertNotNull(h);
assertEquals(22, h.getPort());
}
}

View File

@ -1,5 +1,6 @@
#Sat Dec 20 21:21:24 CET 2008
eclipse.preferences.version=1
encoding//tst-rsrc/org/eclipse/jgit/diff/umlaut.patch=ISO-8859-1
encoding//tst-rsrc/org/eclipse/jgit/diff/umlaut_PostImage=ISO-8859-1
encoding//tst-rsrc/org/eclipse/jgit/patch/testGetText_BothISO88591.patch=ISO-8859-1
encoding//tst-rsrc/org/eclipse/jgit/patch/testGetText_Convert.patch=ISO-8859-1
encoding//tst-rsrc/org/eclipse/jgit/patch/testGetText_DiffCc.patch=ISO-8859-1

View File

@ -0,0 +1,57 @@
/*
* Copyright (C) 2021 SAP SE and others
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Distribution License v. 1.0 which is available at
* https://www.eclipse.org/org/documents/edl-v10.php.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
package org.eclipse.jgit.internal.storage.file;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import org.eclipse.jgit.api.Git;
import org.eclipse.jgit.junit.RepositoryTestCase;
import org.eclipse.jgit.revwalk.RevCommit;
import org.eclipse.jgit.util.FS;
import org.eclipse.jgit.util.FS.ExecutionResult;
import org.junit.Test;
/**
* Unit tests of {@link LockFile} testing interoperability with C git
*/
public class CGitLockFileTest extends RepositoryTestCase {
@Test
public void testLockedTwiceFails() throws Exception {
try (Git git = new Git(db)) {
writeTrashFile("file.txt", "content");
git.add().addFilepattern("file.txt").call();
RevCommit commit1 = git.commit().setMessage("create file").call();
assertNotNull(commit1);
writeTrashFile("file.txt", "content2");
git.add().addFilepattern("file.txt").call();
assertNotNull(git.commit().setMessage("edit file").call());
LockFile lf = new LockFile(db.getIndexFile());
assertTrue(lf.lock());
try {
String[] command = new String[] { "git", "checkout",
commit1.name() };
ProcessBuilder pb = new ProcessBuilder(command);
pb.directory(db.getWorkTree());
ExecutionResult result = FS.DETECTED.execute(pb, null);
assertNotEquals(0, result.getRc());
String err = result.getStderr().toString().split("\\R")[0];
assertTrue(err.matches(
"fatal: Unable to create .*/\\.git/index\\.lock': File exists\\."));
} finally {
lf.unlock();
}
}
}
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.6 KiB

View File

@ -1,5 +1,5 @@
/*
* Copyright (C) 2011, 2020 IBM Corporation and others
* Copyright (C) 2011, 2021 IBM Corporation and others
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Distribution License v. 1.0 which is available at
@ -9,6 +9,7 @@
*/
package org.eclipse.jgit.api;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
@ -18,11 +19,20 @@
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import org.eclipse.jgit.api.errors.PatchApplyException;
import org.eclipse.jgit.api.errors.PatchFormatException;
import org.eclipse.jgit.attributes.FilterCommand;
import org.eclipse.jgit.attributes.FilterCommandFactory;
import org.eclipse.jgit.attributes.FilterCommandRegistry;
import org.eclipse.jgit.diff.RawText;
import org.eclipse.jgit.junit.RepositoryTestCase;
import org.eclipse.jgit.lib.Config;
import org.eclipse.jgit.lib.ConfigConstants;
import org.eclipse.jgit.util.IO;
import org.junit.Test;
public class ApplyCommandTest extends RepositoryTestCase {
@ -57,6 +67,260 @@ private ApplyResult init(final String name, final boolean preExists,
}
}
@Test
public void testCrLf() throws Exception {
try {
db.getConfig().setBoolean(ConfigConstants.CONFIG_CORE_SECTION, null,
ConfigConstants.CONFIG_KEY_AUTOCRLF, true);
ApplyResult result = init("crlf", true, true);
assertEquals(1, result.getUpdatedFiles().size());
assertEquals(new File(db.getWorkTree(), "crlf"),
result.getUpdatedFiles().get(0));
checkFile(new File(db.getWorkTree(), "crlf"),
b.getString(0, b.size(), false));
} finally {
db.getConfig().unset(ConfigConstants.CONFIG_CORE_SECTION, null,
ConfigConstants.CONFIG_KEY_AUTOCRLF);
}
}
@Test
public void testCrLfOff() throws Exception {
try {
db.getConfig().setBoolean(ConfigConstants.CONFIG_CORE_SECTION, null,
ConfigConstants.CONFIG_KEY_AUTOCRLF, false);
ApplyResult result = init("crlf", true, true);
assertEquals(1, result.getUpdatedFiles().size());
assertEquals(new File(db.getWorkTree(), "crlf"),
result.getUpdatedFiles().get(0));
checkFile(new File(db.getWorkTree(), "crlf"),
b.getString(0, b.size(), false));
} finally {
db.getConfig().unset(ConfigConstants.CONFIG_CORE_SECTION, null,
ConfigConstants.CONFIG_KEY_AUTOCRLF);
}
}
@Test
public void testCrLfEmptyCommitted() throws Exception {
try {
db.getConfig().setBoolean(ConfigConstants.CONFIG_CORE_SECTION, null,
ConfigConstants.CONFIG_KEY_AUTOCRLF, true);
ApplyResult result = init("crlf3", true, true);
assertEquals(1, result.getUpdatedFiles().size());
assertEquals(new File(db.getWorkTree(), "crlf3"),
result.getUpdatedFiles().get(0));
checkFile(new File(db.getWorkTree(), "crlf3"),
b.getString(0, b.size(), false));
} finally {
db.getConfig().unset(ConfigConstants.CONFIG_CORE_SECTION, null,
ConfigConstants.CONFIG_KEY_AUTOCRLF);
}
}
@Test
public void testCrLfNewFile() throws Exception {
try {
db.getConfig().setBoolean(ConfigConstants.CONFIG_CORE_SECTION, null,
ConfigConstants.CONFIG_KEY_AUTOCRLF, true);
ApplyResult result = init("crlf4", false, true);
assertEquals(1, result.getUpdatedFiles().size());
assertEquals(new File(db.getWorkTree(), "crlf4"),
result.getUpdatedFiles().get(0));
checkFile(new File(db.getWorkTree(), "crlf4"),
b.getString(0, b.size(), false));
} finally {
db.getConfig().unset(ConfigConstants.CONFIG_CORE_SECTION, null,
ConfigConstants.CONFIG_KEY_AUTOCRLF);
}
}
@Test
public void testPatchWithCrLf() throws Exception {
try {
db.getConfig().setBoolean(ConfigConstants.CONFIG_CORE_SECTION, null,
ConfigConstants.CONFIG_KEY_AUTOCRLF, false);
ApplyResult result = init("crlf2", true, true);
assertEquals(1, result.getUpdatedFiles().size());
assertEquals(new File(db.getWorkTree(), "crlf2"),
result.getUpdatedFiles().get(0));
checkFile(new File(db.getWorkTree(), "crlf2"),
b.getString(0, b.size(), false));
} finally {
db.getConfig().unset(ConfigConstants.CONFIG_CORE_SECTION, null,
ConfigConstants.CONFIG_KEY_AUTOCRLF);
}
}
@Test
public void testPatchWithCrLf2() throws Exception {
String name = "crlf2";
try (Git git = new Git(db)) {
db.getConfig().setBoolean(ConfigConstants.CONFIG_CORE_SECTION, null,
ConfigConstants.CONFIG_KEY_AUTOCRLF, false);
a = new RawText(readFile(name + "_PreImage"));
write(new File(db.getWorkTree(), name),
a.getString(0, a.size(), false));
git.add().addFilepattern(name).call();
git.commit().setMessage("PreImage").call();
b = new RawText(readFile(name + "_PostImage"));
db.getConfig().setBoolean(ConfigConstants.CONFIG_CORE_SECTION, null,
ConfigConstants.CONFIG_KEY_AUTOCRLF, true);
ApplyResult result = git.apply()
.setPatch(getTestResource(name + ".patch")).call();
assertEquals(1, result.getUpdatedFiles().size());
assertEquals(new File(db.getWorkTree(), name),
result.getUpdatedFiles().get(0));
checkFile(new File(db.getWorkTree(), name),
b.getString(0, b.size(), false));
} finally {
db.getConfig().unset(ConfigConstants.CONFIG_CORE_SECTION, null,
ConfigConstants.CONFIG_KEY_AUTOCRLF);
}
}
// Clean/smudge filter for testFiltering. The smudgetest test resources were
// created with C git using a clean filter sed -e "s/A/E/g" and the smudge
// filter sed -e "s/E/A/g". To keep the test independent of the presence of
// sed, implement this with a built-in filter.
private static class ReplaceFilter extends FilterCommand {
private final char toReplace;
private final char replacement;
ReplaceFilter(InputStream in, OutputStream out, char toReplace,
char replacement) {
super(in, out);
this.toReplace = toReplace;
this.replacement = replacement;
}
@Override
public int run() throws IOException {
int b = in.read();
if (b < 0) {
in.close();
out.close();
return -1;
}
if ((b & 0xFF) == toReplace) {
b = replacement;
}
out.write(b);
return 1;
}
}
@Test
public void testFiltering() throws Exception {
// Set up filter
FilterCommandFactory clean = (repo, in, out) -> {
return new ReplaceFilter(in, out, 'A', 'E');
};
FilterCommandFactory smudge = (repo, in, out) -> {
return new ReplaceFilter(in, out, 'E', 'A');
};
FilterCommandRegistry.register("jgit://builtin/a2e/clean", clean);
FilterCommandRegistry.register("jgit://builtin/a2e/smudge", smudge);
try (Git git = new Git(db)) {
Config config = db.getConfig();
config.setString(ConfigConstants.CONFIG_FILTER_SECTION, "a2e",
"clean", "jgit://builtin/a2e/clean");
config.setString(ConfigConstants.CONFIG_FILTER_SECTION, "a2e",
"smudge", "jgit://builtin/a2e/smudge");
write(new File(db.getWorkTree(), ".gitattributes"),
"smudgetest filter=a2e");
git.add().addFilepattern(".gitattributes").call();
git.commit().setMessage("Attributes").call();
ApplyResult result = init("smudgetest", true, true);
assertEquals(1, result.getUpdatedFiles().size());
assertEquals(new File(db.getWorkTree(), "smudgetest"),
result.getUpdatedFiles().get(0));
checkFile(new File(db.getWorkTree(), "smudgetest"),
b.getString(0, b.size(), false));
} finally {
// Tear down filter
FilterCommandRegistry.unregister("jgit://builtin/a2e/clean");
FilterCommandRegistry.unregister("jgit://builtin/a2e/smudge");
}
}
private void checkBinary(String name, boolean hasPreImage)
throws Exception {
checkBinary(name, hasPreImage, 1);
}
private void checkBinary(String name, boolean hasPreImage,
int numberOfFiles) throws Exception {
try (Git git = new Git(db)) {
byte[] post = IO
.readWholeStream(getTestResource(name + "_PostImage"), 0)
.array();
File f = new File(db.getWorkTree(), name);
if (hasPreImage) {
byte[] pre = IO
.readWholeStream(getTestResource(name + "_PreImage"), 0)
.array();
Files.write(f.toPath(), pre);
git.add().addFilepattern(name).call();
git.commit().setMessage("PreImage").call();
}
ApplyResult result = git.apply()
.setPatch(getTestResource(name + ".patch")).call();
assertEquals(numberOfFiles, result.getUpdatedFiles().size());
assertEquals(f, result.getUpdatedFiles().get(0));
assertArrayEquals(post, Files.readAllBytes(f.toPath()));
}
}
@Test
public void testBinaryDelta() throws Exception {
checkBinary("delta", true);
}
@Test
public void testBinaryLiteral() throws Exception {
checkBinary("literal", true);
}
@Test
public void testBinaryLiteralAdd() throws Exception {
checkBinary("literal_add", false);
}
@Test
public void testEncodingChange() throws Exception {
// This is a text patch that changes a file containing ÄÖÜ in UTF-8 to
// the same characters in ISO-8859-1. The patch file itself uses mixed
// encoding. Since checkFile() works with strings use the binary check.
checkBinary("umlaut", true);
}
@Test
public void testEmptyLine() throws Exception {
// C git accepts completely empty lines as empty context lines.
// According to comments in the C git sources (apply.c), newer GNU diff
// may produce such diffs.
checkBinary("emptyLine", true);
}
@Test
public void testMultiFileNoNewline() throws Exception {
// This test needs two files. One is in the test resources.
try (Git git = new Git(db)) {
Files.write(db.getWorkTree().toPath().resolve("yello"),
"yello".getBytes(StandardCharsets.US_ASCII));
git.add().addFilepattern("yello").call();
git.commit().setMessage("yello").call();
}
checkBinary("hello", true, 2);
}
@Test
public void testAddA1() throws Exception {
ApplyResult result = init("A1", false, true);

View File

@ -46,6 +46,8 @@
import org.eclipse.jgit.storage.file.FileBasedConfig;
import org.eclipse.jgit.treewalk.TreeWalk;
import org.eclipse.jgit.util.FS;
import org.eclipse.jgit.util.IO;
import org.eclipse.jgit.util.RawParseUtils;
import org.junit.Test;
public class RepoCommandTest extends RepositoryTestCase {
@ -749,9 +751,55 @@ public void testRevisionBare() throws Exception {
String gitlink = localDb.resolve(Constants.HEAD + ":foo").name();
assertEquals("The gitlink is same as remote head",
oldCommitId.name(), gitlink);
File dotmodules = new File(localDb.getWorkTree(),
Constants.DOT_GIT_MODULES);
assertTrue(dotmodules.exists());
// The .gitmodules file should have "branch" lines
String gitModulesContents = RawParseUtils
.decode(IO.readFully(dotmodules));
assertTrue(gitModulesContents.contains("branch = branch"));
}
}
@Test
public void testRevisionBare_ignoreTags() throws Exception {
Repository remoteDb = createBareRepository();
Repository tempDb = createWorkRepository();
StringBuilder xmlContent = new StringBuilder();
xmlContent.append("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n")
.append("<manifest>")
.append("<remote name=\"remote1\" fetch=\".\" />")
.append("<default revision=\"").append("refs/tags/" + TAG)
.append("\" remote=\"remote1\" />")
.append("<project path=\"foo\" name=\"")
.append(defaultUri)
.append("\" />").append("</manifest>");
JGitTestUtil.writeTrashFile(tempDb, "manifest.xml",
xmlContent.toString());
RepoCommand command = new RepoCommand(remoteDb);
command.setPath(
tempDb.getWorkTree().getAbsolutePath() + "/manifest.xml")
.setURI(rootUri).call();
// Clone it
File directory = createTempDirectory("testReplaceManifestBare");
File dotmodules;
try (Repository localDb = Git.cloneRepository().setDirectory(directory)
.setURI(remoteDb.getDirectory().toURI().toString()).call()
.getRepository()) {
dotmodules = new File(localDb.getWorkTree(),
Constants.DOT_GIT_MODULES);
assertTrue(dotmodules.exists());
}
// The .gitmodules file should not have "branch" lines
String gitModulesContents = RawParseUtils
.decode(IO.readFully(dotmodules));
assertFalse(gitModulesContents.contains("branch"));
assertTrue(gitModulesContents.contains("ref = refs/tags/" + TAG));
}
@Test
public void testCopyFileBare() throws Exception {
Repository remoteDb = createBareRepository();

View File

@ -0,0 +1,87 @@
/*
* Copyright (C) 2021 Thomas Wolf <thomas.wolf@paranor.ch> and others
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Distribution License v. 1.0 which is available at
* https://www.eclipse.org/org/documents/edl-v10.php.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
package org.eclipse.jgit.util;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertThrows;
import static org.junit.Assert.assertTrue;
import java.nio.charset.StandardCharsets;
import org.junit.Test;
/**
* Tests for {@link Base85}.
*/
public class Base85Test {
private static final String VALID_CHARS = "0123456789"
+ "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
+ "!#$%&()*+-;<=>?@^_`{|}~";
@Test
public void testChars() {
for (int i = 0; i < 256; i++) {
byte[] testData = { '1', '2', '3', '4', (byte) i };
if (VALID_CHARS.indexOf(i) >= 0) {
byte[] decoded = Base85.decode(testData, 4);
assertNotNull(decoded);
} else {
assertThrows(IllegalArgumentException.class,
() -> Base85.decode(testData, 4));
}
}
}
private void roundtrip(byte[] data, int expectedLength) {
byte[] encoded = Base85.encode(data);
assertEquals(expectedLength, encoded.length);
assertArrayEquals(data, Base85.decode(encoded, data.length));
}
private void roundtrip(String data, int expectedLength) {
roundtrip(data.getBytes(StandardCharsets.US_ASCII), expectedLength);
}
@Test
public void testPadding() {
roundtrip("", 0);
roundtrip("a", 5);
roundtrip("ab", 5);
roundtrip("abc", 5);
roundtrip("abcd", 5);
roundtrip("abcde", 10);
roundtrip("abcdef", 10);
roundtrip("abcdefg", 10);
roundtrip("abcdefgh", 10);
roundtrip("abcdefghi", 15);
}
@Test
public void testBinary() {
roundtrip(new byte[] { 1 }, 5);
roundtrip(new byte[] { 1, 2 }, 5);
roundtrip(new byte[] { 1, 2, 3 }, 5);
roundtrip(new byte[] { 1, 2, 3, 4 }, 5);
roundtrip(new byte[] { 1, 2, 3, 4, 5 }, 10);
roundtrip(new byte[] { 1, 2, 3, 4, 5, 0, 0, 0 }, 10);
roundtrip(new byte[] { 1, 2, 3, 4, 0, 0, 0, 5 }, 10);
}
@Test
public void testOverflow() {
IllegalArgumentException e = assertThrows(
IllegalArgumentException.class,
() -> Base85.decode(new byte[] { '~', '~', '~', '~', '~' }, 4));
assertTrue(e.getMessage().contains("overflow"));
}
}

View File

@ -0,0 +1,103 @@
/*
* Copyright (C) 2021 Thomas Wolf <thomas.wolf@paranor.ch> and others
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Distribution License v. 1.0 which is available at
* https://www.eclipse.org/org/documents/edl-v10.php.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
package org.eclipse.jgit.util.io;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.io.ByteArrayOutputStream;
import java.io.InputStream;
import java.util.zip.InflaterInputStream;
import org.junit.Test;
/**
* Crude tests for the {@link BinaryDeltaInputStream} using delta diffs
* generated by C git.
*/
public class BinaryDeltaInputStreamTest {
private InputStream getBinaryHunk(String name) {
return this.getClass().getResourceAsStream(name);
}
@Test
public void testBinaryDelta() throws Exception {
// Prepare our test data
byte[] data = new byte[8192];
for (int i = 0; i < data.length; i++) {
data[i] = (byte) (255 - (i % 256));
}
// Same, but with five 'x' inserted in the middle.
int middle = data.length / 2;
byte[] newData = new byte[data.length + 5];
System.arraycopy(data, 0, newData, 0, middle);
for (int i = 0; i < 5; i++) {
newData[middle + i] = 'x';
}
System.arraycopy(data, middle, newData, middle + 5, middle);
// delta1.forward has the instructions
// @formatter:off
// COPY 0 4096
// INSERT 5 xxxxx
// COPY 0 4096
// @formatter:on
// Note that the way we built newData could be expressed as
// @formatter:off
// COPY 0 4096
// INSERT 5 xxxxx
// COPY 4096 4096
// @formatter:on
try (ByteArrayOutputStream out = new ByteArrayOutputStream();
BinaryDeltaInputStream input = new BinaryDeltaInputStream(data,
new InflaterInputStream(new BinaryHunkInputStream(
getBinaryHunk("delta1.forward"))))) {
byte[] buf = new byte[1024];
int n;
while ((n = input.read(buf)) >= 0) {
out.write(buf, 0, n);
}
assertArrayEquals(newData, out.toByteArray());
assertTrue(input.isFullyConsumed());
}
// delta1.reverse has the instructions
// @formatter:off
// COPY 0 4096
// COPY 256 3840
// COPY 256 256
// @formatter:on
// Note that there are alternatives, for instance
// @formatter:off
// COPY 0 4096
// COPY 4101 4096
// @formatter:on
// or
// @formatter:off
// COPY 0 4096
// COPY 0 4096
// @formatter:on
try (ByteArrayOutputStream out = new ByteArrayOutputStream();
BinaryDeltaInputStream input = new BinaryDeltaInputStream(
newData,
new InflaterInputStream(new BinaryHunkInputStream(
getBinaryHunk("delta1.reverse"))))) {
long expectedSize = input.getExpectedResultSize();
assertEquals(data.length, expectedSize);
byte[] buf = new byte[1024];
int n;
while ((n = input.read(buf)) >= 0) {
out.write(buf, 0, n);
}
assertArrayEquals(data, out.toByteArray());
assertTrue(input.isFullyConsumed());
}
}
}

View File

@ -0,0 +1,146 @@
/*
* Copyright (C) 2021 Thomas Wolf <thomas.wolf@paranor.ch> and others
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Distribution License v. 1.0 which is available at
* https://www.eclipse.org/org/documents/edl-v10.php.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
package org.eclipse.jgit.util.io;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.Arrays;
import org.junit.Test;
/**
* Tests for {@link BinaryHunkInputStream} and {@link BinaryHunkOutputStream}.
*/
public class BinaryHunkStreamTest {
@Test
public void testRoundtripWholeBuffer() throws IOException {
for (int length = 1; length < 520 + 52; length++) {
byte[] data = new byte[length];
for (int i = 0; i < data.length; i++) {
data[i] = (byte) (255 - (i % 256));
}
try (ByteArrayOutputStream bos = new ByteArrayOutputStream();
BinaryHunkOutputStream out = new BinaryHunkOutputStream(
bos)) {
out.write(data);
out.flush();
byte[] encoded = bos.toByteArray();
assertFalse(Arrays.equals(data, encoded));
try (BinaryHunkInputStream in = new BinaryHunkInputStream(
new ByteArrayInputStream(encoded))) {
byte[] decoded = new byte[data.length];
int newLength = in.read(decoded);
assertEquals(newLength, decoded.length);
assertEquals(-1, in.read());
assertArrayEquals(data, decoded);
}
}
}
}
@Test
public void testRoundtripChunks() throws IOException {
for (int length = 1; length < 520 + 52; length++) {
byte[] data = new byte[length];
for (int i = 0; i < data.length; i++) {
data[i] = (byte) (255 - (i % 256));
}
try (ByteArrayOutputStream bos = new ByteArrayOutputStream();
BinaryHunkOutputStream out = new BinaryHunkOutputStream(
bos)) {
out.write(data, 0, data.length / 2);
out.write(data, data.length / 2, data.length - data.length / 2);
out.flush();
byte[] encoded = bos.toByteArray();
assertFalse(Arrays.equals(data, encoded));
try (BinaryHunkInputStream in = new BinaryHunkInputStream(
new ByteArrayInputStream(encoded))) {
byte[] decoded = new byte[data.length];
int p = 0;
int n;
while ((n = in.read(decoded, p,
Math.min(decoded.length - p, 57))) >= 0) {
p += n;
if (p == decoded.length) {
break;
}
}
assertEquals(p, decoded.length);
assertEquals(-1, in.read());
assertArrayEquals(data, decoded);
}
}
}
}
@Test
public void testRoundtripBytes() throws IOException {
for (int length = 1; length < 520 + 52; length++) {
byte[] data = new byte[length];
for (int i = 0; i < data.length; i++) {
data[i] = (byte) (255 - (i % 256));
}
try (ByteArrayOutputStream bos = new ByteArrayOutputStream();
BinaryHunkOutputStream out = new BinaryHunkOutputStream(
bos)) {
for (int i = 0; i < data.length; i++) {
out.write(data[i]);
}
out.flush();
byte[] encoded = bos.toByteArray();
assertFalse(Arrays.equals(data, encoded));
try (BinaryHunkInputStream in = new BinaryHunkInputStream(
new ByteArrayInputStream(encoded))) {
byte[] decoded = new byte[data.length];
for (int i = 0; i < decoded.length; i++) {
int val = in.read();
assertTrue(0 <= val && val <= 255);
decoded[i] = (byte) val;
}
assertEquals(-1, in.read());
assertArrayEquals(data, decoded);
}
}
}
}
@Test
public void testRoundtripWithClose() throws IOException {
for (int length = 1; length < 520 + 52; length++) {
byte[] data = new byte[length];
for (int i = 0; i < data.length; i++) {
data[i] = (byte) (255 - (i % 256));
}
try (ByteArrayOutputStream bos = new ByteArrayOutputStream()) {
try (BinaryHunkOutputStream out = new BinaryHunkOutputStream(
bos)) {
out.write(data);
}
byte[] encoded = bos.toByteArray();
assertFalse(Arrays.equals(data, encoded));
try (BinaryHunkInputStream in = new BinaryHunkInputStream(
new ByteArrayInputStream(encoded))) {
byte[] decoded = new byte[data.length];
int newLength = in.read(decoded);
assertEquals(newLength, decoded.length);
assertEquals(-1, in.read());
assertArrayEquals(data, decoded);
}
}
}
}
}

View File

@ -0,0 +1,11 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<component id="org.eclipse.jgit" version="2">
<resource path="src/org/eclipse/jgit/transport/SshConstants.java" type="org.eclipse.jgit.transport.SshConstants">
<filter id="1142947843">
<message_arguments>
<message_argument value="5.11.1"/>
<message_argument value="PUBKEY_ACCEPTED_ALGORITHMS"/>
</message_arguments>
</filter>
</resource>
</component>

View File

@ -13,6 +13,9 @@ ambiguousObjectAbbreviation=Object abbreviation {0} is ambiguous
aNewObjectIdIsRequired=A NewObjectId is required.
anExceptionOccurredWhileTryingToAddTheIdOfHEAD=An exception occurred while trying to add the Id of HEAD
anSSHSessionHasBeenAlreadyCreated=An SSH session has been already created
applyBinaryBaseOidWrong=Cannot apply binary patch; OID for file {0} does not match
applyBinaryOidTooShort=Binary patch for file {0} does not have full IDs
applyBinaryResultOidWrong=Result of binary patch for file {0} has wrong OID.
applyingCommit=Applying {0}
archiveFormatAlreadyAbsent=Archive format already absent: {0}
archiveFormatAlreadyRegistered=Archive format already registered with different implementation: {0}
@ -37,7 +40,19 @@ badRef=Bad ref: {0}: {1}
badSectionEntry=Bad section entry: {0}
badShallowLine=Bad shallow line: {0}
bareRepositoryNoWorkdirAndIndex=Bare Repository has neither a working tree, nor an index
base85invalidChar=Invalid base-85 character: 0x{0}
base85length=Base-85 encoded data must have a length that is a multiple of 5
base85overflow=Base-85 value overflow, does not fit into 32 bits: 0x{0}
base85tooLong=Extra base-85 encoded data for output size of {0} bytes
base85tooShort=Base-85 data decoded into less than {0} bytes
baseLengthIncorrect=base length incorrect
binaryDeltaBaseLengthMismatch=Binary delta base length does not match, expected {0}, got {1}
binaryDeltaInvalidOffset=Binary delta offset + length too large: {0} + {1}
binaryDeltaInvalidResultLength=Binary delta expected result length is negative
binaryHunkDecodeError=Binary hunk, line {0}: invalid input
binaryHunkInvalidLength=Binary hunk, line {0}: input corrupt; expected length byte, got 0x{1}
binaryHunkLineTooShort=Binary hunk, line {0}: input ended prematurely
binaryHunkMissingNewline=Binary hunk, line {0}: input line not terminated by newline
bitmapMissingObject=Bitmap at {0} is missing {1}.
bitmapsMustBePrepared=Bitmaps must be prepared before they may be written.
blameNotCommittedYet=Not Committed Yet

View File

@ -1,5 +1,5 @@
/*
* Copyright (C) 2011, 2020 IBM Corporation and others
* Copyright (C) 2011, 2021 IBM Corporation and others
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Distribution License v. 1.0 which is available at
@ -9,29 +9,68 @@
*/
package org.eclipse.jgit.api;
import java.io.BufferedInputStream;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.Writer;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.nio.file.Files;
import java.nio.file.StandardCopyOption;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.zip.InflaterInputStream;
import org.eclipse.jgit.api.errors.FilterFailedException;
import org.eclipse.jgit.api.errors.GitAPIException;
import org.eclipse.jgit.api.errors.PatchApplyException;
import org.eclipse.jgit.api.errors.PatchFormatException;
import org.eclipse.jgit.attributes.FilterCommand;
import org.eclipse.jgit.attributes.FilterCommandRegistry;
import org.eclipse.jgit.diff.DiffEntry.ChangeType;
import org.eclipse.jgit.diff.RawText;
import org.eclipse.jgit.dircache.DirCache;
import org.eclipse.jgit.dircache.DirCacheCheckout;
import org.eclipse.jgit.dircache.DirCacheCheckout.CheckoutMetadata;
import org.eclipse.jgit.dircache.DirCacheIterator;
import org.eclipse.jgit.errors.LargeObjectException;
import org.eclipse.jgit.errors.MissingObjectException;
import org.eclipse.jgit.internal.JGitText;
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.CoreConfig.EolStreamType;
import org.eclipse.jgit.lib.FileMode;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.ObjectLoader;
import org.eclipse.jgit.lib.ObjectStream;
import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.patch.BinaryHunk;
import org.eclipse.jgit.patch.FileHeader;
import org.eclipse.jgit.patch.FileHeader.PatchType;
import org.eclipse.jgit.patch.HunkHeader;
import org.eclipse.jgit.patch.Patch;
import org.eclipse.jgit.treewalk.FileTreeIterator;
import org.eclipse.jgit.treewalk.TreeWalk;
import org.eclipse.jgit.treewalk.TreeWalk.OperationType;
import org.eclipse.jgit.treewalk.filter.AndTreeFilter;
import org.eclipse.jgit.treewalk.filter.NotIgnoredFilter;
import org.eclipse.jgit.treewalk.filter.PathFilterGroup;
import org.eclipse.jgit.util.FS;
import org.eclipse.jgit.util.FS.ExecutionResult;
import org.eclipse.jgit.util.FileUtils;
import org.eclipse.jgit.util.IO;
import org.eclipse.jgit.util.RawParseUtils;
import org.eclipse.jgit.util.StringUtils;
import org.eclipse.jgit.util.TemporaryBuffer;
import org.eclipse.jgit.util.TemporaryBuffer.LocalFile;
import org.eclipse.jgit.util.io.BinaryDeltaInputStream;
import org.eclipse.jgit.util.io.BinaryHunkInputStream;
import org.eclipse.jgit.util.io.EolStreamTypeUtil;
import org.eclipse.jgit.util.sha1.SHA1;
/**
* Apply a patch to files and/or to the index.
@ -45,7 +84,7 @@ public class ApplyCommand extends GitCommand<ApplyResult> {
private InputStream in;
/**
* Constructs the command if the patch is to be applied to the index.
* Constructs the command.
*
* @param repo
*/
@ -79,6 +118,7 @@ public ApplyCommand setPatch(InputStream in) {
public ApplyResult call() throws GitAPIException, PatchFormatException,
PatchApplyException {
checkCallable();
setCallable(false);
ApplyResult r = new ApplyResult();
try {
final Patch p = new Patch();
@ -87,19 +127,22 @@ public ApplyResult call() throws GitAPIException, PatchFormatException,
} finally {
in.close();
}
if (!p.getErrors().isEmpty())
if (!p.getErrors().isEmpty()) {
throw new PatchFormatException(p.getErrors());
}
Repository repository = getRepository();
DirCache cache = repository.readDirCache();
for (FileHeader fh : p.getFiles()) {
ChangeType type = fh.getChangeType();
File f = null;
switch (type) {
case ADD:
f = getFile(fh.getNewPath(), true);
apply(f, fh);
apply(repository, fh.getNewPath(), cache, f, fh);
break;
case MODIFY:
f = getFile(fh.getOldPath(), false);
apply(f, fh);
apply(repository, fh.getOldPath(), cache, f, fh);
break;
case DELETE:
f = getFile(fh.getOldPath(), false);
@ -118,14 +161,14 @@ public ApplyResult call() throws GitAPIException, PatchFormatException,
throw new PatchApplyException(MessageFormat.format(
JGitText.get().renameFileFailed, f, dest), e);
}
apply(dest, fh);
apply(repository, fh.getOldPath(), cache, dest, fh);
break;
case COPY:
f = getFile(fh.getOldPath(), false);
File target = getFile(fh.getNewPath(), false);
FileUtils.mkdirs(target.getParentFile(), true);
Files.copy(f.toPath(), target.toPath());
apply(target, fh);
apply(repository, fh.getOldPath(), cache, target, fh);
}
r.addUpdatedFile(f);
}
@ -133,14 +176,13 @@ public ApplyResult call() throws GitAPIException, PatchFormatException,
throw new PatchApplyException(MessageFormat.format(
JGitText.get().patchApplyException, e.getMessage()), e);
}
setCallable(false);
return r;
}
private File getFile(String path, boolean create)
throws PatchApplyException {
File f = new File(getRepository().getWorkTree(), path);
if (create)
if (create) {
try {
File parent = f.getParentFile();
FileUtils.mkdirs(parent, true);
@ -149,22 +191,366 @@ private File getFile(String path, boolean create)
throw new PatchApplyException(MessageFormat.format(
JGitText.get().createNewFileFailed, f), e);
}
}
return f;
}
private void apply(Repository repository, String path, DirCache cache,
File f, FileHeader fh) throws IOException, PatchApplyException {
if (PatchType.BINARY.equals(fh.getPatchType())) {
return;
}
boolean convertCrLf = needsCrLfConversion(f, fh);
// Use a TreeWalk with a DirCacheIterator to pick up the correct
// clean/smudge filters. CR-LF handling is completely determined by
// whether the file or the patch have CR-LF line endings.
try (TreeWalk walk = new TreeWalk(repository)) {
walk.setOperationType(OperationType.CHECKIN_OP);
FileTreeIterator files = new FileTreeIterator(repository);
int fileIdx = walk.addTree(files);
int cacheIdx = walk.addTree(new DirCacheIterator(cache));
files.setDirCacheIterator(walk, cacheIdx);
walk.setFilter(AndTreeFilter.create(
PathFilterGroup.createFromStrings(path),
new NotIgnoredFilter(fileIdx)));
walk.setRecursive(true);
if (walk.next()) {
// If the file on disk has no newline characters, convertCrLf
// will be false. In that case we want to honor the normal git
// settings.
EolStreamType streamType = convertCrLf ? EolStreamType.TEXT_CRLF
: walk.getEolStreamType(OperationType.CHECKOUT_OP);
String command = walk.getFilterCommand(
Constants.ATTR_FILTER_TYPE_SMUDGE);
CheckoutMetadata checkOut = new CheckoutMetadata(streamType, command);
FileTreeIterator file = walk.getTree(fileIdx,
FileTreeIterator.class);
if (file != null) {
if (PatchType.GIT_BINARY.equals(fh.getPatchType())) {
applyBinary(repository, path, f, fh,
file::openEntryStream, file.getEntryObjectId(),
checkOut);
} else {
command = walk.getFilterCommand(
Constants.ATTR_FILTER_TYPE_CLEAN);
RawText raw;
// Can't use file.openEntryStream() as it would do CR-LF
// conversion as usual, not as wanted by us.
try (InputStream input = filterClean(repository, path,
new FileInputStream(f), convertCrLf, command)) {
raw = new RawText(
IO.readWholeStream(input, 0).array());
}
applyText(repository, path, raw, f, fh, checkOut);
}
return;
}
}
}
// File ignored?
RawText raw;
CheckoutMetadata checkOut;
if (PatchType.GIT_BINARY.equals(fh.getPatchType())) {
checkOut = new CheckoutMetadata(EolStreamType.DIRECT, null);
applyBinary(repository, path, f, fh, () -> new FileInputStream(f),
null, checkOut);
} else {
if (convertCrLf) {
try (InputStream input = EolStreamTypeUtil.wrapInputStream(
new FileInputStream(f), EolStreamType.TEXT_LF)) {
raw = new RawText(IO.readWholeStream(input, 0).array());
}
checkOut = new CheckoutMetadata(EolStreamType.TEXT_CRLF, null);
} else {
raw = new RawText(f);
checkOut = new CheckoutMetadata(EolStreamType.DIRECT, null);
}
applyText(repository, path, raw, f, fh, checkOut);
}
}
private boolean needsCrLfConversion(File f, FileHeader fileHeader)
throws IOException {
if (PatchType.GIT_BINARY.equals(fileHeader.getPatchType())) {
return false;
}
if (!hasCrLf(fileHeader)) {
try (InputStream input = new FileInputStream(f)) {
return RawText.isCrLfText(input);
}
}
return false;
}
private static boolean hasCrLf(FileHeader fileHeader) {
if (PatchType.GIT_BINARY.equals(fileHeader.getPatchType())) {
return false;
}
for (HunkHeader header : fileHeader.getHunks()) {
byte[] buf = header.getBuffer();
int hunkEnd = header.getEndOffset();
int lineStart = header.getStartOffset();
while (lineStart < hunkEnd) {
int nextLineStart = RawParseUtils.nextLF(buf, lineStart);
if (nextLineStart > hunkEnd) {
nextLineStart = hunkEnd;
}
if (nextLineStart <= lineStart) {
break;
}
if (nextLineStart - lineStart > 1) {
char first = (char) (buf[lineStart] & 0xFF);
if (first == ' ' || first == '-') {
// It's an old line. Does it end in CR-LF?
if (buf[nextLineStart - 2] == '\r') {
return true;
}
}
}
lineStart = nextLineStart;
}
}
return false;
}
private InputStream filterClean(Repository repository, String path,
InputStream fromFile, boolean convertCrLf, String filterCommand)
throws IOException {
InputStream input = fromFile;
if (convertCrLf) {
input = EolStreamTypeUtil.wrapInputStream(input,
EolStreamType.TEXT_LF);
}
if (StringUtils.isEmptyOrNull(filterCommand)) {
return input;
}
if (FilterCommandRegistry.isRegistered(filterCommand)) {
LocalFile buffer = new TemporaryBuffer.LocalFile(null);
FilterCommand command = FilterCommandRegistry.createFilterCommand(
filterCommand, repository, input, buffer);
while (command.run() != -1) {
// loop as long as command.run() tells there is work to do
}
return buffer.openInputStreamWithAutoDestroy();
}
FS fs = repository.getFS();
ProcessBuilder filterProcessBuilder = fs.runInShell(filterCommand,
new String[0]);
filterProcessBuilder.directory(repository.getWorkTree());
filterProcessBuilder.environment().put(Constants.GIT_DIR_KEY,
repository.getDirectory().getAbsolutePath());
ExecutionResult result;
try {
result = fs.execute(filterProcessBuilder, in);
} catch (IOException | InterruptedException e) {
throw new IOException(
new FilterFailedException(e, filterCommand, path));
}
int rc = result.getRc();
if (rc != 0) {
throw new IOException(new FilterFailedException(rc, filterCommand,
path, result.getStdout().toByteArray(4096), RawParseUtils
.decode(result.getStderr().toByteArray(4096))));
}
return result.getStdout().openInputStreamWithAutoDestroy();
}
/**
* @param f
* @param fh
* @throws IOException
* @throws PatchApplyException
* Something that can supply an {@link InputStream}.
*/
private void apply(File f, FileHeader fh)
private interface StreamSupplier {
InputStream load() throws IOException;
}
/**
* We write the patch result to a {@link TemporaryBuffer} and then use
* {@link DirCacheCheckout}.getContent() to run the result through the CR-LF
* and smudge filters. DirCacheCheckout needs an ObjectLoader, not a
* TemporaryBuffer, so this class bridges between the two, making any Stream
* provided by a {@link StreamSupplier} look like an ordinary git blob to
* DirCacheCheckout.
*/
private static class StreamLoader extends ObjectLoader {
private StreamSupplier data;
private long size;
StreamLoader(StreamSupplier data, long length) {
this.data = data;
this.size = length;
}
@Override
public int getType() {
return Constants.OBJ_BLOB;
}
@Override
public long getSize() {
return size;
}
@Override
public boolean isLarge() {
return true;
}
@Override
public byte[] getCachedBytes() throws LargeObjectException {
throw new LargeObjectException();
}
@Override
public ObjectStream openStream()
throws MissingObjectException, IOException {
return new ObjectStream.Filter(getType(), getSize(),
new BufferedInputStream(data.load()));
}
}
private void initHash(SHA1 hash, long size) {
hash.update(Constants.encodedTypeString(Constants.OBJ_BLOB));
hash.update((byte) ' ');
hash.update(Constants.encodeASCII(size));
hash.update((byte) 0);
}
private ObjectId hash(File f) throws IOException {
SHA1 hash = SHA1.newInstance();
initHash(hash, f.length());
try (InputStream input = new FileInputStream(f)) {
byte[] buf = new byte[8192];
int n;
while ((n = input.read(buf)) >= 0) {
hash.update(buf, 0, n);
}
}
return hash.toObjectId();
}
private void checkOid(ObjectId baseId, ObjectId id, ChangeType type, File f,
String path)
throws PatchApplyException, IOException {
boolean hashOk = false;
if (id != null) {
hashOk = baseId.equals(id);
if (!hashOk && ChangeType.ADD.equals(type)
&& ObjectId.zeroId().equals(baseId)) {
// We create the file first. The OID of an empty file is not the
// zero id!
hashOk = Constants.EMPTY_BLOB_ID.equals(id);
}
} else {
if (ObjectId.zeroId().equals(baseId)) {
// File empty is OK.
hashOk = !f.exists() || f.length() == 0;
} else {
hashOk = baseId.equals(hash(f));
}
}
if (!hashOk) {
throw new PatchApplyException(MessageFormat
.format(JGitText.get().applyBinaryBaseOidWrong, path));
}
}
private void applyBinary(Repository repository, String path, File f,
FileHeader fh, StreamSupplier loader, ObjectId id,
CheckoutMetadata checkOut)
throws PatchApplyException, IOException {
if (!fh.getOldId().isComplete() || !fh.getNewId().isComplete()) {
throw new PatchApplyException(MessageFormat
.format(JGitText.get().applyBinaryOidTooShort, path));
}
BinaryHunk hunk = fh.getForwardBinaryHunk();
// A BinaryHunk has the start at the "literal" or "delta" token. Data
// starts on the next line.
int start = RawParseUtils.nextLF(hunk.getBuffer(),
hunk.getStartOffset());
int length = hunk.getEndOffset() - start;
SHA1 hash = SHA1.newInstance();
// Write to a buffer and copy to the file only if everything was fine
TemporaryBuffer buffer = new TemporaryBuffer.LocalFile(null);
try {
switch (hunk.getType()) {
case LITERAL_DEFLATED:
// This just overwrites the file. We need to check the hash of
// the base.
checkOid(fh.getOldId().toObjectId(), id, fh.getChangeType(), f,
path);
initHash(hash, hunk.getSize());
try (OutputStream out = buffer;
InputStream inflated = new SHA1InputStream(hash,
new InflaterInputStream(
new BinaryHunkInputStream(
new ByteArrayInputStream(
hunk.getBuffer(), start,
length))))) {
DirCacheCheckout.getContent(repository, path, checkOut,
new StreamLoader(() -> inflated, hunk.getSize()),
null, out);
if (!fh.getNewId().toObjectId().equals(hash.toObjectId())) {
throw new PatchApplyException(MessageFormat.format(
JGitText.get().applyBinaryResultOidWrong,
path));
}
}
try (InputStream bufIn = buffer.openInputStream()) {
Files.copy(bufIn, f.toPath(),
StandardCopyOption.REPLACE_EXISTING);
}
break;
case DELTA_DEFLATED:
// Unfortunately delta application needs random access to the
// base to construct the result.
byte[] base;
try (InputStream input = loader.load()) {
base = IO.readWholeStream(input, 0).array();
}
// At least stream the result!
try (BinaryDeltaInputStream input = new BinaryDeltaInputStream(
base,
new InflaterInputStream(new BinaryHunkInputStream(
new ByteArrayInputStream(hunk.getBuffer(),
start, length))))) {
long finalSize = input.getExpectedResultSize();
initHash(hash, finalSize);
try (OutputStream out = buffer;
SHA1InputStream hashed = new SHA1InputStream(hash,
input)) {
DirCacheCheckout.getContent(repository, path, checkOut,
new StreamLoader(() -> hashed, finalSize), null,
out);
if (!fh.getNewId().toObjectId()
.equals(hash.toObjectId())) {
throw new PatchApplyException(MessageFormat.format(
JGitText.get().applyBinaryResultOidWrong,
path));
}
}
}
try (InputStream bufIn = buffer.openInputStream()) {
Files.copy(bufIn, f.toPath(),
StandardCopyOption.REPLACE_EXISTING);
}
break;
default:
break;
}
} finally {
buffer.destroy();
}
}
private void applyText(Repository repository, String path, RawText rt,
File f, FileHeader fh, CheckoutMetadata checkOut)
throws IOException, PatchApplyException {
RawText rt = new RawText(f);
List<String> oldLines = new ArrayList<>(rt.size());
for (int i = 0; i < rt.size(); i++)
oldLines.add(rt.getString(i));
List<String> newLines = new ArrayList<>(oldLines);
List<ByteBuffer> oldLines = new ArrayList<>(rt.size());
for (int i = 0; i < rt.size(); i++) {
oldLines.add(rt.getRawString(i));
}
List<ByteBuffer> newLines = new ArrayList<>(oldLines);
int afterLastHunk = 0;
int lineNumberShift = 0;
int lastHunkNewLine = -1;
@ -182,9 +568,9 @@ private void apply(File f, FileHeader fh)
b.length);
RawText hrt = new RawText(b);
List<String> hunkLines = new ArrayList<>(hrt.size());
List<ByteBuffer> hunkLines = new ArrayList<>(hrt.size());
for (int i = 0; i < hrt.size(); i++) {
hunkLines.add(hrt.getString(i));
hunkLines.add(hrt.getRawString(i));
}
if (hh.getNewStartLine() == 0) {
@ -253,8 +639,13 @@ && canApplyAt(hunkLines, newLines, 0)) {
lineNumberShift = applyAt - hh.getNewStartLine() + 1;
int sz = hunkLines.size();
for (int j = 1; j < sz; j++) {
String hunkLine = hunkLines.get(j);
switch (hunkLine.charAt(0)) {
ByteBuffer hunkLine = hunkLines.get(j);
if (!hunkLine.hasRemaining()) {
// Completely empty line; accept as empty context line
applyAt++;
continue;
}
switch (hunkLine.array()[hunkLine.position()]) {
case ' ':
applyAt++;
break;
@ -262,7 +653,7 @@ && canApplyAt(hunkLines, newLines, 0)) {
newLines.remove(applyAt);
break;
case '+':
newLines.add(applyAt++, hunkLine.substring(1));
newLines.add(applyAt++, slice(hunkLine, 1));
break;
default:
break;
@ -271,39 +662,64 @@ && canApplyAt(hunkLines, newLines, 0)) {
afterLastHunk = applyAt;
}
if (!isNoNewlineAtEndOfFile(fh)) {
newLines.add(""); //$NON-NLS-1$
newLines.add(null);
}
if (!rt.isMissingNewlineAtEnd()) {
oldLines.add(""); //$NON-NLS-1$
oldLines.add(null);
}
if (!isChanged(oldLines, newLines)) {
return; // Don't touch the file
if (oldLines.equals(newLines)) {
return; // Unchanged; don't touch the file
}
try (Writer fw = Files.newBufferedWriter(f.toPath())) {
for (Iterator<String> l = newLines.iterator(); l.hasNext();) {
fw.write(l.next());
if (l.hasNext()) {
// Don't bother handling line endings - if it was Windows,
// the \r is still there!
fw.write('\n');
TemporaryBuffer buffer = new TemporaryBuffer.LocalFile(null);
try {
try (OutputStream out = buffer) {
for (Iterator<ByteBuffer> l = newLines.iterator(); l
.hasNext();) {
ByteBuffer line = l.next();
if (line == null) {
// Must be the marker for the final newline
break;
}
out.write(line.array(), line.position(), line.remaining());
if (l.hasNext()) {
out.write('\n');
}
}
}
try (OutputStream output = new FileOutputStream(f)) {
DirCacheCheckout.getContent(repository, path, checkOut,
new StreamLoader(buffer::openInputStream,
buffer.length()),
null, output);
}
} finally {
buffer.destroy();
}
getRepository().getFS().setExecute(f, fh.getNewMode() == FileMode.EXECUTABLE_FILE);
repository.getFS().setExecute(f,
fh.getNewMode() == FileMode.EXECUTABLE_FILE);
}
private boolean canApplyAt(List<String> hunkLines, List<String> newLines,
int line) {
private boolean canApplyAt(List<ByteBuffer> hunkLines,
List<ByteBuffer> newLines, int line) {
int sz = hunkLines.size();
int limit = newLines.size();
int pos = line;
for (int j = 1; j < sz; j++) {
String hunkLine = hunkLines.get(j);
switch (hunkLine.charAt(0)) {
ByteBuffer hunkLine = hunkLines.get(j);
if (!hunkLine.hasRemaining()) {
// Empty line. Accept as empty context line.
if (pos >= limit || newLines.get(pos).hasRemaining()) {
return false;
}
pos++;
continue;
}
switch (hunkLine.array()[hunkLine.position()]) {
case ' ':
case '-':
if (pos >= limit
|| !newLines.get(pos).equals(hunkLine.substring(1))) {
|| !newLines.get(pos).equals(slice(hunkLine, 1))) {
return false;
}
pos++;
@ -315,13 +731,9 @@ private boolean canApplyAt(List<String> hunkLines, List<String> newLines,
return true;
}
private static boolean isChanged(List<String> ol, List<String> nl) {
if (ol.size() != nl.size())
return true;
for (int i = 0; i < ol.size(); i++)
if (!ol.get(i).equals(nl.get(i)))
return true;
return false;
private ByteBuffer slice(ByteBuffer b, int off) {
int newOffset = b.position() + off;
return ByteBuffer.wrap(b.array(), newOffset, b.limit() - newOffset);
}
private boolean isNoNewlineAtEndOfFile(FileHeader fh) {
@ -330,8 +742,51 @@ private boolean isNoNewlineAtEndOfFile(FileHeader fh) {
return false;
}
HunkHeader lastHunk = hunks.get(hunks.size() - 1);
RawText lhrt = new RawText(lastHunk.getBuffer());
byte[] buf = new byte[lastHunk.getEndOffset()
- lastHunk.getStartOffset()];
System.arraycopy(lastHunk.getBuffer(), lastHunk.getStartOffset(), buf,
0, buf.length);
RawText lhrt = new RawText(buf);
return lhrt.getString(lhrt.size() - 1)
.equals("\\ No newline at end of file"); //$NON-NLS-1$
}
/**
* An {@link InputStream} that updates a {@link SHA1} on every byte read.
* The hash is supposed to have been initialized before reading starts.
*/
private static class SHA1InputStream extends InputStream {
private final SHA1 hash;
private final InputStream in;
SHA1InputStream(SHA1 hash, InputStream in) {
this.hash = hash;
this.in = in;
}
@Override
public int read() throws IOException {
int b = in.read();
if (b >= 0) {
hash.update((byte) b);
}
return b;
}
@Override
public int read(byte[] b, int off, int len) throws IOException {
int n = in.read(b, off, len);
if (n > 0) {
hash.update(b, off, n);
}
return n;
}
@Override
public void close() throws IOException {
in.close();
}
}
}

View File

@ -86,6 +86,20 @@ public class MergeCommand extends GitCommand<MergeResult> {
private ProgressMonitor monitor = NullProgressMonitor.INSTANCE;
/**
* Values for the "merge.conflictStyle" git config.
*
* @since 5.12
*/
public enum ConflictStyle {
/** "merge" style: only ours/theirs. This is the default. */
MERGE,
/** "diff3" style: ours/base/theirs. */
DIFF3
}
/**
* The modes available for fast forward merges corresponding to the
* <code>--ff</code>, <code>--no-ff</code> and <code>--ff-only</code>

View File

@ -1,6 +1,6 @@
/*
* Copyright (C) 2009, Google Inc.
* Copyright (C) 2008-2009, Johannes E. Schindelin <johannes.schindelin@gmx.de> and others
* Copyright (C) 2008-2021, Johannes E. Schindelin <johannes.schindelin@gmx.de> and others
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Distribution License v. 1.0 which is available at
@ -16,6 +16,7 @@
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import org.eclipse.jgit.errors.BinaryBlobException;
import org.eclipse.jgit.errors.LargeObjectException;
@ -164,6 +165,27 @@ public String getString(int i) {
return getString(i, i + 1, true);
}
/**
* Get the raw text for a single line.
*
* @param i
* index of the line to extract. Note this is 0-based, so line
* number 1 is actually index 0.
* @return the text for the line, without a trailing LF, as a
* {@link ByteBuffer} that is backed by a slice of the
* {@link #getRawContent() raw content}, with the buffer's position
* on the start of the line and the limit at the end.
* @since 5.12
*/
public ByteBuffer getRawString(int i) {
int s = getStart(i);
int e = getEnd(i);
if (e > 0 && content[e - 1] == '\n') {
e--;
}
return ByteBuffer.wrap(content, s, e - s);
}
/**
* Get the text for a region of lines.
*

View File

@ -1610,11 +1610,9 @@ private static void runExternalFilterCommand(Repository repo, String path,
}
if (rc != 0) {
throw new IOException(new FilterFailedException(rc,
checkoutMetadata.smudgeFilterCommand,
path,
checkoutMetadata.smudgeFilterCommand, path,
result.getStdout().toByteArray(MAX_EXCEPTION_TEXT_SIZE),
RawParseUtils.decode(result.getStderr()
.toByteArray(MAX_EXCEPTION_TEXT_SIZE))));
result.getStderr().toString(MAX_EXCEPTION_TEXT_SIZE)));
}
}

View File

@ -12,6 +12,7 @@
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.eclipse.jgit.lib.Constants.DEFAULT_REMOTE_NAME;
import static org.eclipse.jgit.lib.Constants.R_REMOTES;
import static org.eclipse.jgit.lib.Constants.R_TAGS;
import java.io.File;
import java.io.FileInputStream;
@ -587,8 +588,11 @@ public RevCommit call() throws GitAPIException {
throw new RemoteUnavailableException(url);
}
if (recordRemoteBranch) {
// can be branch or tag
cfg.setString("submodule", name, "branch", //$NON-NLS-1$ //$NON-NLS-2$
// "branch" field is only for non-tag references.
// Keep tags in "ref" field as hint for other tools.
String field = proj.getRevision().startsWith(
R_TAGS) ? "ref" : "branch"; //$NON-NLS-1$ //$NON-NLS-2$
cfg.setString("submodule", name, field, //$NON-NLS-1$
proj.getRevision());
}

View File

@ -41,6 +41,9 @@ public static JGitText get() {
/***/ public String aNewObjectIdIsRequired;
/***/ public String anExceptionOccurredWhileTryingToAddTheIdOfHEAD;
/***/ public String anSSHSessionHasBeenAlreadyCreated;
/***/ public String applyBinaryBaseOidWrong;
/***/ public String applyBinaryOidTooShort;
/***/ public String applyBinaryResultOidWrong;
/***/ public String applyingCommit;
/***/ public String archiveFormatAlreadyAbsent;
/***/ public String archiveFormatAlreadyRegistered;
@ -65,7 +68,19 @@ public static JGitText get() {
/***/ public String badSectionEntry;
/***/ public String badShallowLine;
/***/ public String bareRepositoryNoWorkdirAndIndex;
/***/ public String base85invalidChar;
/***/ public String base85length;
/***/ public String base85overflow;
/***/ public String base85tooLong;
/***/ public String base85tooShort;
/***/ public String baseLengthIncorrect;
/***/ public String binaryDeltaBaseLengthMismatch;
/***/ public String binaryDeltaInvalidOffset;
/***/ public String binaryDeltaInvalidResultLength;
/***/ public String binaryHunkDecodeError;
/***/ public String binaryHunkInvalidLength;
/***/ public String binaryHunkLineTooShort;
/***/ public String binaryHunkMissingNewline;
/***/ public String bitmapMissingObject;
/***/ public String bitmapsMustBePrepared;
/***/ public String blameNotCommittedYet;

View File

@ -1,6 +1,6 @@
/*
* Copyright (C) 2008, 2017, Google Inc.
* Copyright (C) 2017, 2018, Thomas Wolf <thomas.wolf@paranor.ch> and others
* Copyright (C) 2017, 2021, Thomas Wolf <thomas.wolf@paranor.ch> and others
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Distribution License v. 1.0 which is available at
@ -21,7 +21,8 @@
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
@ -82,12 +83,6 @@
*/
public class OpenSshConfigFile implements SshConfigStore {
/**
* "Host" name of the HostEntry for the default options before the first
* host block in a config file.
*/
private static final String DEFAULT_NAME = ""; //$NON-NLS-1$
/** The user's home directory, as key files may be relative to here. */
private final File home;
@ -105,11 +100,9 @@ public class OpenSshConfigFile implements SshConfigStore {
* fully resolved entries created from that.
*/
private static class State {
// Keyed by pattern; if a "Host" line has multiple patterns, we generate
// duplicate HostEntry objects
Map<String, HostEntry> entries = new LinkedHashMap<>();
List<HostEntry> entries = new LinkedList<>();
// Keyed by user@hostname:port
// Previous lookups, keyed by user@hostname:port
Map<String, HostEntry> hosts = new HashMap<>();
@Override
@ -165,14 +158,16 @@ public HostEntry lookup(@NonNull String hostName, int port,
return h;
}
HostEntry fullConfig = new HostEntry();
// Initialize with default entries at the top of the file, before the
// first Host block.
fullConfig.merge(cache.entries.get(DEFAULT_NAME));
for (Map.Entry<String, HostEntry> e : cache.entries.entrySet()) {
String pattern = e.getKey();
if (isHostMatch(pattern, hostName)) {
fullConfig.merge(e.getValue());
}
Iterator<HostEntry> entries = cache.entries.iterator();
if (entries.hasNext()) {
// Should always have at least the first top entry containing
// key-value pairs before the first Host block
fullConfig.merge(entries.next());
entries.forEachRemaining(entry -> {
if (entry.matches(hostName)) {
fullConfig.merge(entry);
}
});
}
fullConfig.substitute(hostName, port, userName, localUserName, home);
cache.hosts.put(cacheKey, fullConfig);
@ -208,20 +203,19 @@ private synchronized State refresh() {
return state;
}
private Map<String, HostEntry> parse(BufferedReader reader)
private List<HostEntry> parse(BufferedReader reader)
throws IOException {
final Map<String, HostEntry> entries = new LinkedHashMap<>();
final List<HostEntry> current = new ArrayList<>(4);
String line;
final List<HostEntry> entries = new LinkedList<>();
// The man page doesn't say so, but the openssh parser (readconf.c)
// starts out in active mode and thus always applies any lines that
// occur before the first host block. We gather those options in a
// HostEntry for DEFAULT_NAME.
HostEntry defaults = new HostEntry();
current.add(defaults);
entries.put(DEFAULT_NAME, defaults);
HostEntry current = defaults;
entries.add(defaults);
String line;
while ((line = reader.readLine()) != null) {
// OpenSsh ignores trailing comments on a line. Anything after the
// first # on a line is trimmed away (yes, even if the hash is
@ -246,38 +240,17 @@ private Map<String, HostEntry> parse(BufferedReader reader)
String argValue = parts.length > 1 ? parts[1].trim() : ""; //$NON-NLS-1$
if (StringUtils.equalsIgnoreCase(SshConstants.HOST, keyword)) {
current.clear();
for (String name : parseList(argValue)) {
if (name == null || name.isEmpty()) {
// null should not occur, but better be safe than sorry.
continue;
}
HostEntry c = entries.get(name);
if (c == null) {
c = new HostEntry();
entries.put(name, c);
}
current.add(c);
}
continue;
}
if (current.isEmpty()) {
// We received an option outside of a Host block. We
// don't know who this should match against, so skip.
current = new HostEntry(parseList(argValue));
entries.add(current);
continue;
}
if (HostEntry.isListKey(keyword)) {
List<String> args = validate(keyword, parseList(argValue));
for (HostEntry entry : current) {
entry.setValue(keyword, args);
}
current.setValue(keyword, args);
} else if (!argValue.isEmpty()) {
argValue = validate(keyword, dequote(argValue));
for (HostEntry entry : current) {
entry.setValue(keyword, argValue);
}
current.setValue(keyword, argValue);
}
}
@ -300,7 +273,7 @@ private List<String> parseList(String argument) {
int length = argument.length();
while (start < length) {
// Skip whitespace
if (Character.isSpaceChar(argument.charAt(start))) {
if (Character.isWhitespace(argument.charAt(start))) {
start++;
continue;
}
@ -315,7 +288,7 @@ private List<String> parseList(String argument) {
} else {
int stop = start + 1;
while (stop < length
&& !Character.isSpaceChar(argument.charAt(stop))) {
&& !Character.isWhitespace(argument.charAt(stop))) {
stop++;
}
result.add(argument.substring(start, stop));
@ -358,13 +331,6 @@ protected List<String> validate(String key, List<String> value) {
return value;
}
private static boolean isHostMatch(String pattern, String name) {
if (pattern.startsWith("!")) { //$NON-NLS-1$
return !patternMatchesHost(pattern.substring(1), name);
}
return patternMatchesHost(pattern, name);
}
private static boolean patternMatchesHost(String pattern, String name) {
if (pattern.indexOf('*') >= 0 || pattern.indexOf('?') >= 0) {
final FileNameMatcher fn;
@ -389,9 +355,12 @@ private static String dequote(String value) {
private static String stripWhitespace(String value) {
final StringBuilder b = new StringBuilder();
for (int i = 0; i < value.length(); i++) {
if (!Character.isSpaceChar(value.charAt(i)))
b.append(value.charAt(i));
int length = value.length();
for (int i = 0; i < length; i++) {
char ch = value.charAt(i);
if (!Character.isWhitespace(ch)) {
b.append(ch);
}
}
return b.toString();
}
@ -511,6 +480,32 @@ public static class HostEntry implements SshConfigStore.HostConfig {
private Map<String, List<String>> listOptions;
private final List<String> patterns;
// Constructor used to build the merged entry; never matches anything
HostEntry() {
this.patterns = Collections.emptyList();
}
HostEntry(List<String> patterns) {
this.patterns = patterns;
}
boolean matches(String hostName) {
boolean doesMatch = false;
for (String pattern : patterns) {
if (pattern.startsWith("!")) { //$NON-NLS-1$
if (patternMatchesHost(pattern.substring(1), hostName)) {
return false;
}
} else if (!doesMatch
&& patternMatchesHost(pattern, hostName)) {
doesMatch = true;
}
}
return doesMatch;
}
private static String toKey(String key) {
String k = ALIASES.get(key);
return k != null ? k : key;

View File

@ -397,8 +397,16 @@ public final class ConfigConstants {
/** The "ff" key */
public static final String CONFIG_KEY_FF = "ff";
/**
* The "conflictStyle" key.
*
* @since 5.12
*/
public static final String CONFIG_KEY_CONFLICTSTYLE = "conflictStyle";
/**
* The "checkstat" key
*
* @since 3.0
*/
public static final String CONFIG_KEY_CHECKSTAT = "checkstat";

View File

@ -118,7 +118,7 @@ private SshConstants() {
* Key in an ssh config file; defines signature algorithms for public key
* authentication as a comma-separated list.
*
* @since 5.11
* @since 5.11.1
*/
public static final String PUBKEY_ACCEPTED_ALGORITHMS = "PubkeyAcceptedAlgorithms";

View File

@ -502,8 +502,7 @@ private InputStream filterClean(InputStream in, OperationType opType)
throw new IOException(new FilterFailedException(rc,
filterCommand, getEntryPathString(),
result.getStdout().toByteArray(MAX_EXCEPTION_TEXT_SIZE),
RawParseUtils.decode(result.getStderr()
.toByteArray(MAX_EXCEPTION_TEXT_SIZE))));
result.getStderr().toString(MAX_EXCEPTION_TEXT_SIZE)));
}
return result.getStdout().openInputStreamWithAutoDestroy();
}

View File

@ -0,0 +1,195 @@
/*
* Copyright (C) 2021 Thomas Wolf <thomas.wolf@paranor.ch> and others
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Distribution License v. 1.0 which is available at
* https://www.eclipse.org/org/documents/edl-v10.php.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
package org.eclipse.jgit.util;
import java.nio.charset.StandardCharsets;
import java.text.MessageFormat;
import java.util.Arrays;
import org.eclipse.jgit.internal.JGitText;
/**
* Base-85 encoder/decoder.
*
* @since 5.12
*/
public final class Base85 {
private static final byte[] ENCODE = ("0123456789" //$NON-NLS-1$
+ "ABCDEFGHIJKLMNOPQRSTUVWXYZ" //$NON-NLS-1$
+ "abcdefghijklmnopqrstuvwxyz" //$NON-NLS-1$
+ "!#$%&()*+-;<=>?@^_`{|}~") //$NON-NLS-1$
.getBytes(StandardCharsets.US_ASCII);
private static final int[] DECODE = new int[256];
static {
Arrays.fill(DECODE, -1);
for (int i = 0; i < ENCODE.length; i++) {
DECODE[ENCODE[i]] = i;
}
}
private Base85() {
// No instantiation
}
/**
* Determines the length of the base-85 encoding for {@code rawLength}
* bytes.
*
* @param rawLength
* number of bytes to encode
* @return number of bytes needed for the base-85 encoding of
* {@code rawLength} bytes
*/
public static int encodedLength(int rawLength) {
return (rawLength + 3) / 4 * 5;
}
/**
* Encodes the given {@code data} in Base-85.
*
* @param data
* to encode
* @return encoded data
*/
public static byte[] encode(byte[] data) {
return encode(data, 0, data.length);
}
/**
* Encodes {@code length} bytes of {@code data} in Base-85, beginning at the
* {@code start} index.
*
* @param data
* to encode
* @param start
* index of the first byte to encode
* @param length
* number of bytes to encode
* @return encoded data
*/
public static byte[] encode(byte[] data, int start, int length) {
byte[] result = new byte[encodedLength(length)];
int end = start + length;
int in = start;
int out = 0;
while (in < end) {
// Accumulate remaining bytes MSB first as a 32bit value
long accumulator = ((long) (data[in++] & 0xFF)) << 24;
if (in < end) {
accumulator |= (data[in++] & 0xFF) << 16;
if (in < end) {
accumulator |= (data[in++] & 0xFF) << 8;
if (in < end) {
accumulator |= (data[in++] & 0xFF);
}
}
}
// Write the 32bit value in base-85 encoding, also MSB first
for (int i = 4; i >= 0; i--) {
result[out + i] = ENCODE[(int) (accumulator % 85)];
accumulator /= 85;
}
out += 5;
}
return result;
}
/**
* Decodes the Base-85 {@code encoded} data into a byte array of
* {@code expectedSize} bytes.
*
* @param encoded
* Base-85 encoded data
* @param expectedSize
* of the result
* @return the decoded bytes
* @throws IllegalArgumentException
* if expectedSize doesn't match, the encoded data has a length
* that is not a multiple of 5, or there are invalid characters
* in the encoded data
*/
public static byte[] decode(byte[] encoded, int expectedSize) {
return decode(encoded, 0, encoded.length, expectedSize);
}
/**
* Decodes {@code length} bytes of Base-85 {@code encoded} data, beginning
* at the {@code start} index, into a byte array of {@code expectedSize}
* bytes.
*
* @param encoded
* Base-85 encoded data
* @param start
* index at which the data to decode starts in {@code encoded}
* @param length
* of the Base-85 encoded data
* @param expectedSize
* of the result
* @return the decoded bytes
* @throws IllegalArgumentException
* if expectedSize doesn't match, {@code length} is not a
* multiple of 5, or there are invalid characters in the encoded
* data
*/
public static byte[] decode(byte[] encoded, int start, int length,
int expectedSize) {
if (length % 5 != 0) {
throw new IllegalArgumentException(JGitText.get().base85length);
}
byte[] result = new byte[expectedSize];
int end = start + length;
int in = start;
int out = 0;
while (in < end && out < expectedSize) {
// Accumulate 5 bytes, "MSB" first
long accumulator = 0;
for (int i = 4; i >= 0; i--) {
int val = DECODE[encoded[in++] & 0xFF];
if (val < 0) {
throw new IllegalArgumentException(MessageFormat.format(
JGitText.get().base85invalidChar,
Integer.toHexString(encoded[in - 1] & 0xFF)));
}
accumulator = accumulator * 85 + val;
}
if (accumulator > 0xFFFF_FFFFL) {
throw new IllegalArgumentException(
MessageFormat.format(JGitText.get().base85overflow,
Long.toHexString(accumulator)));
}
// Write remaining bytes, MSB first
result[out++] = (byte) (accumulator >>> 24);
if (out < expectedSize) {
result[out++] = (byte) (accumulator >>> 16);
if (out < expectedSize) {
result[out++] = (byte) (accumulator >>> 8);
if (out < expectedSize) {
result[out++] = (byte) accumulator;
}
}
}
}
// Should have exhausted 'in' and filled 'out' completely
if (in < end) {
throw new IllegalArgumentException(
MessageFormat.format(JGitText.get().base85tooLong,
Integer.valueOf(expectedSize)));
}
if (out < expectedSize) {
throw new IllegalArgumentException(
MessageFormat.format(JGitText.get().base85tooShort,
Integer.valueOf(expectedSize)));
}
return result;
}
}

View File

@ -18,6 +18,7 @@
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.UncheckedIOException;
import java.util.ArrayList;
import org.eclipse.jgit.internal.JGitText;
@ -212,6 +213,24 @@ public byte[] toByteArray() throws IOException {
return out;
}
/**
* Convert first {@code limit} number of bytes of the buffer content to
* String.
*
* @param limit
* the maximum number of bytes to be converted to String
* @return first {@code limit} number of bytes of the buffer content
* converted to String.
* @since 5.12
*/
public String toString(int limit) {
try {
return RawParseUtils.decode(toByteArray(limit));
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
/**
* Convert this buffer's contents into a contiguous byte array. If this size
* of the buffer exceeds the limit only return the first {@code limit} bytes

View File

@ -0,0 +1,206 @@
/*
* Copyright (C) 2021 Thomas Wolf <thomas.wolf@paranor.ch> and others
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Distribution License v. 1.0 which is available at
* https://www.eclipse.org/org/documents/edl-v10.php.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
package org.eclipse.jgit.util.io;
import java.io.EOFException;
import java.io.IOException;
import java.io.InputStream;
import java.io.StreamCorruptedException;
import java.text.MessageFormat;
import org.eclipse.jgit.internal.JGitText;
/**
* An {@link InputStream} that applies a binary delta to a base on the fly.
* <p>
* Delta application to a base needs random access to the base data. The delta
* is expressed as a sequence of copy and insert instructions. A copy
* instruction has the form "COPY fromOffset length" and says "copy length bytes
* from the base, starting at offset fromOffset, to the result". An insert
* instruction has the form "INSERT length" followed by length bytes and says
* "copy the next length bytes from the delta to the result".
* </p>
* <p>
* These instructions are generated using a content-defined chunking algorithm
* (currently C git uses the standard Rabin variant; but there are others that
* could be used) that identifies equal chunks. It is entirely possible that a
* later copy instruction has a fromOffset that is before the fromOffset of an
* earlier copy instruction.
* </p>
* <p>
* This makes it impossible to stream the base.
* </p>
* <p>
* JGit is limited to 2GB maximum size for the base since array indices are
* signed 32bit values.
*
* @since 5.12
*/
public class BinaryDeltaInputStream extends InputStream {
private final byte[] base;
private final InputStream delta;
private long resultLength;
private long toDeliver = -1;
private int fromBase;
private int fromDelta;
private int baseOffset = -1;
/**
* Creates a new {@link BinaryDeltaInputStream} that applies {@code delta}
* to {@code base}.
*
* @param base
* data to apply the delta to
* @param delta
* {@link InputStream} delivering the delta to apply
*/
public BinaryDeltaInputStream(byte[] base, InputStream delta) {
this.base = base;
this.delta = delta;
}
@Override
public int read() throws IOException {
int b = readNext();
if (b >= 0) {
toDeliver--;
}
return b;
}
private void initialize() throws IOException {
long baseSize = readVarInt(delta);
if (baseSize > Integer.MAX_VALUE || baseSize < 0
|| (int) baseSize != base.length) {
throw new IOException(MessageFormat.format(
JGitText.get().binaryDeltaBaseLengthMismatch,
Integer.valueOf(base.length), Long.valueOf(baseSize)));
}
resultLength = readVarInt(delta);
if (resultLength < 0) {
throw new StreamCorruptedException(
JGitText.get().binaryDeltaInvalidResultLength);
}
toDeliver = resultLength;
baseOffset = 0;
}
private int readNext() throws IOException {
if (baseOffset < 0) {
initialize();
}
if (fromBase > 0) {
fromBase--;
return base[baseOffset++] & 0xFF;
} else if (fromDelta > 0) {
fromDelta--;
return delta.read();
}
int command = delta.read();
if (command < 0) {
return -1;
}
if ((command & 0x80) != 0) {
// Decode offset and length to read from base
long copyOffset = 0;
for (int i = 1, shift = 0; i < 0x10; i *= 2, shift += 8) {
if ((command & i) != 0) {
copyOffset |= ((long) next(delta)) << shift;
}
}
int copySize = 0;
for (int i = 0x10, shift = 0; i < 0x80; i *= 2, shift += 8) {
if ((command & i) != 0) {
copySize |= next(delta) << shift;
}
}
if (copySize == 0) {
copySize = 0x10000;
}
if (copyOffset > base.length - copySize) {
throw new StreamCorruptedException(MessageFormat.format(
JGitText.get().binaryDeltaInvalidOffset,
Long.valueOf(copyOffset), Integer.valueOf(copySize)));
}
baseOffset = (int) copyOffset;
fromBase = copySize;
return readNext();
} else if (command != 0) {
// The next 'command' bytes come from the delta
fromDelta = command - 1;
return delta.read();
} else {
// Zero is reserved
throw new StreamCorruptedException(
JGitText.get().unsupportedCommand0);
}
}
private int next(InputStream in) throws IOException {
int b = in.read();
if (b < 0) {
throw new EOFException();
}
return b;
}
private long readVarInt(InputStream in) throws IOException {
long val = 0;
int shift = 0;
int b;
do {
b = next(in);
val |= ((long) (b & 0x7f)) << shift;
shift += 7;
} while ((b & 0x80) != 0);
return val;
}
/**
* Tells the expected size of the final result.
*
* @return the size
* @throws IOException
* if the size cannot be determined from {@code delta}
*/
public long getExpectedResultSize() throws IOException {
if (baseOffset < 0) {
initialize();
}
return resultLength;
}
/**
* Tells whether the delta has been fully consumed, and the expected number
* of bytes for the combined result have been read from this
* {@link BinaryDeltaInputStream}.
*
* @return whether delta application was successful
*/
public boolean isFullyConsumed() {
try {
return toDeliver == 0 && delta.read() < 0;
} catch (IOException e) {
return toDeliver == 0;
}
}
@Override
public void close() throws IOException {
delta.close();
}
}

View File

@ -0,0 +1,113 @@
/*
* Copyright (C) 2021 Thomas Wolf <thomas.wolf@paranor.ch> and others
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Distribution License v. 1.0 which is available at
* https://www.eclipse.org/org/documents/edl-v10.php.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
package org.eclipse.jgit.util.io;
import java.io.EOFException;
import java.io.IOException;
import java.io.InputStream;
import java.io.StreamCorruptedException;
import java.text.MessageFormat;
import org.eclipse.jgit.internal.JGitText;
import org.eclipse.jgit.util.Base85;
/**
* A stream that decodes git binary patch data on the fly.
*
* @since 5.12
*/
public class BinaryHunkInputStream extends InputStream {
private final InputStream in;
private int lineNumber;
private byte[] buffer;
private int pos = 0;
/**
* Creates a new {@link BinaryHunkInputStream}.
*
* @param in
* {@link InputStream} to read the base-85 encoded patch data
* from
*/
public BinaryHunkInputStream(InputStream in) {
this.in = in;
}
@Override
public int read() throws IOException {
if (pos < 0) {
return -1;
}
if (buffer == null || pos == buffer.length) {
fillBuffer();
}
if (pos >= 0) {
return buffer[pos++] & 0xFF;
}
return -1;
}
@Override
public void close() throws IOException {
in.close();
buffer = null;
}
private void fillBuffer() throws IOException {
int length = in.read();
if (length < 0) {
pos = length;
buffer = null;
return;
}
lineNumber++;
// Length is encoded with characters, A..Z for 1..26 and a..z for 27..52
if ('A' <= length && length <= 'Z') {
length = length - 'A' + 1;
} else if ('a' <= length && length <= 'z') {
length = length - 'a' + 27;
} else {
throw new StreamCorruptedException(MessageFormat.format(
JGitText.get().binaryHunkInvalidLength,
Integer.valueOf(lineNumber), Integer.toHexString(length)));
}
byte[] encoded = new byte[Base85.encodedLength(length)];
for (int i = 0; i < encoded.length; i++) {
int b = in.read();
if (b < 0 || b == '\n') {
throw new EOFException(MessageFormat.format(
JGitText.get().binaryHunkInvalidLength,
Integer.valueOf(lineNumber)));
}
encoded[i] = (byte) b;
}
// Must be followed by a newline; tolerate EOF.
int b = in.read();
if (b >= 0 && b != '\n') {
throw new StreamCorruptedException(MessageFormat.format(
JGitText.get().binaryHunkMissingNewline,
Integer.valueOf(lineNumber)));
}
try {
buffer = Base85.decode(encoded, length);
} catch (IllegalArgumentException e) {
StreamCorruptedException ex = new StreamCorruptedException(
MessageFormat.format(JGitText.get().binaryHunkDecodeError,
Integer.valueOf(lineNumber)));
ex.initCause(e);
throw ex;
}
pos = 0;
}
}

View File

@ -0,0 +1,116 @@
/*
* Copyright (C) 2021 Thomas Wolf <thomas.wolf@paranor.ch> and others
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Distribution License v. 1.0 which is available at
* https://www.eclipse.org/org/documents/edl-v10.php.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
package org.eclipse.jgit.util.io;
import java.io.IOException;
import java.io.OutputStream;
import org.eclipse.jgit.util.Base85;
/**
* An {@link OutputStream} that encodes data for a git binary patch.
*
* @since 5.12
*/
public class BinaryHunkOutputStream extends OutputStream {
private static final int MAX_BYTES = 52;
private final OutputStream out;
private final byte[] buffer = new byte[MAX_BYTES];
private int pos;
/**
* Creates a new {@link BinaryHunkOutputStream}.
*
* @param out
* {@link OutputStream} to write the encoded data to
*/
public BinaryHunkOutputStream(OutputStream out) {
this.out = out;
}
/**
* Flushes and closes this stream, and closes the underlying
* {@link OutputStream}.
*/
@Override
public void close() throws IOException {
flush();
out.close();
}
/**
* Writes any buffered output as a binary patch line to the underlying
* {@link OutputStream} and flushes that stream, too.
*/
@Override
public void flush() throws IOException {
if (pos > 0) {
encode(buffer, 0, pos);
pos = 0;
}
out.flush();
}
@Override
public void write(int b) throws IOException {
buffer[pos++] = (byte) b;
if (pos == buffer.length) {
encode(buffer, 0, pos);
pos = 0;
}
}
@Override
public void write(byte[] b, int off, int len) throws IOException {
if (len == 0) {
return;
}
int toCopy = len;
int in = off;
if (pos > 0) {
// Fill the buffer
int chunk = Math.min(toCopy, buffer.length - pos);
System.arraycopy(b, in, buffer, pos, chunk);
in += chunk;
pos += chunk;
toCopy -= chunk;
if (pos == buffer.length) {
encode(buffer, 0, pos);
pos = 0;
}
if (toCopy == 0) {
return;
}
}
while (toCopy >= MAX_BYTES) {
encode(b, in, MAX_BYTES);
toCopy -= MAX_BYTES;
in += MAX_BYTES;
}
if (toCopy > 0) {
System.arraycopy(b, in, buffer, 0, toCopy);
pos = toCopy;
}
}
private void encode(byte[] data, int off, int length) throws IOException {
if (length <= 26) {
out.write('A' + length - 1);
} else {
out.write('a' + length - 27);
}
out.write(Base85.encode(data, off, length));
out.write('\n');
}
}