Merge changes from topic "apply"

* changes:
  ApplyCommand: fix "no newline at end" detection
  ApplyCommand: handle completely empty context lines in text patches
  ApplyCommand: use byte arrays for text patches, not strings
  ApplyCommand: support binary patches
  ApplyCommand: add a stream to apply a delta patch
  ApplyCommand: add streams to read/write binary patch hunks
  ApplyCommand: add a base-85 codec
  ApplyCommand: convert to git internal format before applying patch
This commit is contained in:
Matthias Sohn 2021-05-26 08:12:17 -04:00 committed by Gerrit Code Review @ Eclipse.org
commit 7a925205db
47 changed files with 1791 additions and 53 deletions

View File

@ -1,5 +1,6 @@
#Sat Dec 20 21:21:24 CET 2008
eclipse.preferences.version=1
encoding//tst-rsrc/org/eclipse/jgit/diff/umlaut.patch=ISO-8859-1
encoding//tst-rsrc/org/eclipse/jgit/diff/umlaut_PostImage=ISO-8859-1
encoding//tst-rsrc/org/eclipse/jgit/patch/testGetText_BothISO88591.patch=ISO-8859-1
encoding//tst-rsrc/org/eclipse/jgit/patch/testGetText_Convert.patch=ISO-8859-1
encoding//tst-rsrc/org/eclipse/jgit/patch/testGetText_DiffCc.patch=ISO-8859-1

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.6 KiB

View File

@ -1,5 +1,5 @@
/*
* Copyright (C) 2011, 2020 IBM Corporation and others
* Copyright (C) 2011, 2021 IBM Corporation and others
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Distribution License v. 1.0 which is available at
@ -9,6 +9,7 @@
*/
package org.eclipse.jgit.api;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
@ -18,11 +19,20 @@
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import org.eclipse.jgit.api.errors.PatchApplyException;
import org.eclipse.jgit.api.errors.PatchFormatException;
import org.eclipse.jgit.attributes.FilterCommand;
import org.eclipse.jgit.attributes.FilterCommandFactory;
import org.eclipse.jgit.attributes.FilterCommandRegistry;
import org.eclipse.jgit.diff.RawText;
import org.eclipse.jgit.junit.RepositoryTestCase;
import org.eclipse.jgit.lib.Config;
import org.eclipse.jgit.lib.ConfigConstants;
import org.eclipse.jgit.util.IO;
import org.junit.Test;
public class ApplyCommandTest extends RepositoryTestCase {
@ -57,6 +67,260 @@ private ApplyResult init(final String name, final boolean preExists,
}
}
@Test
public void testCrLf() throws Exception {
try {
db.getConfig().setBoolean(ConfigConstants.CONFIG_CORE_SECTION, null,
ConfigConstants.CONFIG_KEY_AUTOCRLF, true);
ApplyResult result = init("crlf", true, true);
assertEquals(1, result.getUpdatedFiles().size());
assertEquals(new File(db.getWorkTree(), "crlf"),
result.getUpdatedFiles().get(0));
checkFile(new File(db.getWorkTree(), "crlf"),
b.getString(0, b.size(), false));
} finally {
db.getConfig().unset(ConfigConstants.CONFIG_CORE_SECTION, null,
ConfigConstants.CONFIG_KEY_AUTOCRLF);
}
}
@Test
public void testCrLfOff() throws Exception {
try {
db.getConfig().setBoolean(ConfigConstants.CONFIG_CORE_SECTION, null,
ConfigConstants.CONFIG_KEY_AUTOCRLF, false);
ApplyResult result = init("crlf", true, true);
assertEquals(1, result.getUpdatedFiles().size());
assertEquals(new File(db.getWorkTree(), "crlf"),
result.getUpdatedFiles().get(0));
checkFile(new File(db.getWorkTree(), "crlf"),
b.getString(0, b.size(), false));
} finally {
db.getConfig().unset(ConfigConstants.CONFIG_CORE_SECTION, null,
ConfigConstants.CONFIG_KEY_AUTOCRLF);
}
}
@Test
public void testCrLfEmptyCommitted() throws Exception {
try {
db.getConfig().setBoolean(ConfigConstants.CONFIG_CORE_SECTION, null,
ConfigConstants.CONFIG_KEY_AUTOCRLF, true);
ApplyResult result = init("crlf3", true, true);
assertEquals(1, result.getUpdatedFiles().size());
assertEquals(new File(db.getWorkTree(), "crlf3"),
result.getUpdatedFiles().get(0));
checkFile(new File(db.getWorkTree(), "crlf3"),
b.getString(0, b.size(), false));
} finally {
db.getConfig().unset(ConfigConstants.CONFIG_CORE_SECTION, null,
ConfigConstants.CONFIG_KEY_AUTOCRLF);
}
}
@Test
public void testCrLfNewFile() throws Exception {
try {
db.getConfig().setBoolean(ConfigConstants.CONFIG_CORE_SECTION, null,
ConfigConstants.CONFIG_KEY_AUTOCRLF, true);
ApplyResult result = init("crlf4", false, true);
assertEquals(1, result.getUpdatedFiles().size());
assertEquals(new File(db.getWorkTree(), "crlf4"),
result.getUpdatedFiles().get(0));
checkFile(new File(db.getWorkTree(), "crlf4"),
b.getString(0, b.size(), false));
} finally {
db.getConfig().unset(ConfigConstants.CONFIG_CORE_SECTION, null,
ConfigConstants.CONFIG_KEY_AUTOCRLF);
}
}
@Test
public void testPatchWithCrLf() throws Exception {
try {
db.getConfig().setBoolean(ConfigConstants.CONFIG_CORE_SECTION, null,
ConfigConstants.CONFIG_KEY_AUTOCRLF, false);
ApplyResult result = init("crlf2", true, true);
assertEquals(1, result.getUpdatedFiles().size());
assertEquals(new File(db.getWorkTree(), "crlf2"),
result.getUpdatedFiles().get(0));
checkFile(new File(db.getWorkTree(), "crlf2"),
b.getString(0, b.size(), false));
} finally {
db.getConfig().unset(ConfigConstants.CONFIG_CORE_SECTION, null,
ConfigConstants.CONFIG_KEY_AUTOCRLF);
}
}
@Test
public void testPatchWithCrLf2() throws Exception {
String name = "crlf2";
try (Git git = new Git(db)) {
db.getConfig().setBoolean(ConfigConstants.CONFIG_CORE_SECTION, null,
ConfigConstants.CONFIG_KEY_AUTOCRLF, false);
a = new RawText(readFile(name + "_PreImage"));
write(new File(db.getWorkTree(), name),
a.getString(0, a.size(), false));
git.add().addFilepattern(name).call();
git.commit().setMessage("PreImage").call();
b = new RawText(readFile(name + "_PostImage"));
db.getConfig().setBoolean(ConfigConstants.CONFIG_CORE_SECTION, null,
ConfigConstants.CONFIG_KEY_AUTOCRLF, true);
ApplyResult result = git.apply()
.setPatch(getTestResource(name + ".patch")).call();
assertEquals(1, result.getUpdatedFiles().size());
assertEquals(new File(db.getWorkTree(), name),
result.getUpdatedFiles().get(0));
checkFile(new File(db.getWorkTree(), name),
b.getString(0, b.size(), false));
} finally {
db.getConfig().unset(ConfigConstants.CONFIG_CORE_SECTION, null,
ConfigConstants.CONFIG_KEY_AUTOCRLF);
}
}
// Clean/smudge filter for testFiltering. The smudgetest test resources were
// created with C git using a clean filter sed -e "s/A/E/g" and the smudge
// filter sed -e "s/E/A/g". To keep the test independent of the presence of
// sed, implement this with a built-in filter.
private static class ReplaceFilter extends FilterCommand {
private final char toReplace;
private final char replacement;
ReplaceFilter(InputStream in, OutputStream out, char toReplace,
char replacement) {
super(in, out);
this.toReplace = toReplace;
this.replacement = replacement;
}
@Override
public int run() throws IOException {
int b = in.read();
if (b < 0) {
in.close();
out.close();
return -1;
}
if ((b & 0xFF) == toReplace) {
b = replacement;
}
out.write(b);
return 1;
}
}
@Test
public void testFiltering() throws Exception {
// Set up filter
FilterCommandFactory clean = (repo, in, out) -> {
return new ReplaceFilter(in, out, 'A', 'E');
};
FilterCommandFactory smudge = (repo, in, out) -> {
return new ReplaceFilter(in, out, 'E', 'A');
};
FilterCommandRegistry.register("jgit://builtin/a2e/clean", clean);
FilterCommandRegistry.register("jgit://builtin/a2e/smudge", smudge);
try (Git git = new Git(db)) {
Config config = db.getConfig();
config.setString(ConfigConstants.CONFIG_FILTER_SECTION, "a2e",
"clean", "jgit://builtin/a2e/clean");
config.setString(ConfigConstants.CONFIG_FILTER_SECTION, "a2e",
"smudge", "jgit://builtin/a2e/smudge");
write(new File(db.getWorkTree(), ".gitattributes"),
"smudgetest filter=a2e");
git.add().addFilepattern(".gitattributes").call();
git.commit().setMessage("Attributes").call();
ApplyResult result = init("smudgetest", true, true);
assertEquals(1, result.getUpdatedFiles().size());
assertEquals(new File(db.getWorkTree(), "smudgetest"),
result.getUpdatedFiles().get(0));
checkFile(new File(db.getWorkTree(), "smudgetest"),
b.getString(0, b.size(), false));
} finally {
// Tear down filter
FilterCommandRegistry.unregister("jgit://builtin/a2e/clean");
FilterCommandRegistry.unregister("jgit://builtin/a2e/smudge");
}
}
private void checkBinary(String name, boolean hasPreImage)
throws Exception {
checkBinary(name, hasPreImage, 1);
}
private void checkBinary(String name, boolean hasPreImage,
int numberOfFiles) throws Exception {
try (Git git = new Git(db)) {
byte[] post = IO
.readWholeStream(getTestResource(name + "_PostImage"), 0)
.array();
File f = new File(db.getWorkTree(), name);
if (hasPreImage) {
byte[] pre = IO
.readWholeStream(getTestResource(name + "_PreImage"), 0)
.array();
Files.write(f.toPath(), pre);
git.add().addFilepattern(name).call();
git.commit().setMessage("PreImage").call();
}
ApplyResult result = git.apply()
.setPatch(getTestResource(name + ".patch")).call();
assertEquals(numberOfFiles, result.getUpdatedFiles().size());
assertEquals(f, result.getUpdatedFiles().get(0));
assertArrayEquals(post, Files.readAllBytes(f.toPath()));
}
}
@Test
public void testBinaryDelta() throws Exception {
checkBinary("delta", true);
}
@Test
public void testBinaryLiteral() throws Exception {
checkBinary("literal", true);
}
@Test
public void testBinaryLiteralAdd() throws Exception {
checkBinary("literal_add", false);
}
@Test
public void testEncodingChange() throws Exception {
// This is a text patch that changes a file containing ÄÖÜ in UTF-8 to
// the same characters in ISO-8859-1. The patch file itself uses mixed
// encoding. Since checkFile() works with strings use the binary check.
checkBinary("umlaut", true);
}
@Test
public void testEmptyLine() throws Exception {
// C git accepts completely empty lines as empty context lines.
// According to comments in the C git sources (apply.c), newer GNU diff
// may produce such diffs.
checkBinary("emptyLine", true);
}
@Test
public void testMultiFileNoNewline() throws Exception {
// This test needs two files. One is in the test resources.
try (Git git = new Git(db)) {
Files.write(db.getWorkTree().toPath().resolve("yello"),
"yello".getBytes(StandardCharsets.US_ASCII));
git.add().addFilepattern("yello").call();
git.commit().setMessage("yello").call();
}
checkBinary("hello", true, 2);
}
@Test
public void testAddA1() throws Exception {
ApplyResult result = init("A1", false, true);

View File

@ -0,0 +1,87 @@
/*
* Copyright (C) 2021 Thomas Wolf <thomas.wolf@paranor.ch> and others
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Distribution License v. 1.0 which is available at
* https://www.eclipse.org/org/documents/edl-v10.php.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
package org.eclipse.jgit.util;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertThrows;
import static org.junit.Assert.assertTrue;
import java.nio.charset.StandardCharsets;
import org.junit.Test;
/**
* Tests for {@link Base85}.
*/
public class Base85Test {
private static final String VALID_CHARS = "0123456789"
+ "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
+ "!#$%&()*+-;<=>?@^_`{|}~";
@Test
public void testChars() {
for (int i = 0; i < 256; i++) {
byte[] testData = { '1', '2', '3', '4', (byte) i };
if (VALID_CHARS.indexOf(i) >= 0) {
byte[] decoded = Base85.decode(testData, 4);
assertNotNull(decoded);
} else {
assertThrows(IllegalArgumentException.class,
() -> Base85.decode(testData, 4));
}
}
}
private void roundtrip(byte[] data, int expectedLength) {
byte[] encoded = Base85.encode(data);
assertEquals(expectedLength, encoded.length);
assertArrayEquals(data, Base85.decode(encoded, data.length));
}
private void roundtrip(String data, int expectedLength) {
roundtrip(data.getBytes(StandardCharsets.US_ASCII), expectedLength);
}
@Test
public void testPadding() {
roundtrip("", 0);
roundtrip("a", 5);
roundtrip("ab", 5);
roundtrip("abc", 5);
roundtrip("abcd", 5);
roundtrip("abcde", 10);
roundtrip("abcdef", 10);
roundtrip("abcdefg", 10);
roundtrip("abcdefgh", 10);
roundtrip("abcdefghi", 15);
}
@Test
public void testBinary() {
roundtrip(new byte[] { 1 }, 5);
roundtrip(new byte[] { 1, 2 }, 5);
roundtrip(new byte[] { 1, 2, 3 }, 5);
roundtrip(new byte[] { 1, 2, 3, 4 }, 5);
roundtrip(new byte[] { 1, 2, 3, 4, 5 }, 10);
roundtrip(new byte[] { 1, 2, 3, 4, 5, 0, 0, 0 }, 10);
roundtrip(new byte[] { 1, 2, 3, 4, 0, 0, 0, 5 }, 10);
}
@Test
public void testOverflow() {
IllegalArgumentException e = assertThrows(
IllegalArgumentException.class,
() -> Base85.decode(new byte[] { '~', '~', '~', '~', '~' }, 4));
assertTrue(e.getMessage().contains("overflow"));
}
}

View File

@ -0,0 +1,103 @@
/*
* Copyright (C) 2021 Thomas Wolf <thomas.wolf@paranor.ch> and others
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Distribution License v. 1.0 which is available at
* https://www.eclipse.org/org/documents/edl-v10.php.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
package org.eclipse.jgit.util.io;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.io.ByteArrayOutputStream;
import java.io.InputStream;
import java.util.zip.InflaterInputStream;
import org.junit.Test;
/**
* Crude tests for the {@link BinaryDeltaInputStream} using delta diffs
* generated by C git.
*/
public class BinaryDeltaInputStreamTest {
private InputStream getBinaryHunk(String name) {
return this.getClass().getResourceAsStream(name);
}
@Test
public void testBinaryDelta() throws Exception {
// Prepare our test data
byte[] data = new byte[8192];
for (int i = 0; i < data.length; i++) {
data[i] = (byte) (255 - (i % 256));
}
// Same, but with five 'x' inserted in the middle.
int middle = data.length / 2;
byte[] newData = new byte[data.length + 5];
System.arraycopy(data, 0, newData, 0, middle);
for (int i = 0; i < 5; i++) {
newData[middle + i] = 'x';
}
System.arraycopy(data, middle, newData, middle + 5, middle);
// delta1.forward has the instructions
// @formatter:off
// COPY 0 4096
// INSERT 5 xxxxx
// COPY 0 4096
// @formatter:on
// Note that the way we built newData could be expressed as
// @formatter:off
// COPY 0 4096
// INSERT 5 xxxxx
// COPY 4096 4096
// @formatter:on
try (ByteArrayOutputStream out = new ByteArrayOutputStream();
BinaryDeltaInputStream input = new BinaryDeltaInputStream(data,
new InflaterInputStream(new BinaryHunkInputStream(
getBinaryHunk("delta1.forward"))))) {
byte[] buf = new byte[1024];
int n;
while ((n = input.read(buf)) >= 0) {
out.write(buf, 0, n);
}
assertArrayEquals(newData, out.toByteArray());
assertTrue(input.isFullyConsumed());
}
// delta1.reverse has the instructions
// @formatter:off
// COPY 0 4096
// COPY 256 3840
// COPY 256 256
// @formatter:on
// Note that there are alternatives, for instance
// @formatter:off
// COPY 0 4096
// COPY 4101 4096
// @formatter:on
// or
// @formatter:off
// COPY 0 4096
// COPY 0 4096
// @formatter:on
try (ByteArrayOutputStream out = new ByteArrayOutputStream();
BinaryDeltaInputStream input = new BinaryDeltaInputStream(
newData,
new InflaterInputStream(new BinaryHunkInputStream(
getBinaryHunk("delta1.reverse"))))) {
long expectedSize = input.getExpectedResultSize();
assertEquals(data.length, expectedSize);
byte[] buf = new byte[1024];
int n;
while ((n = input.read(buf)) >= 0) {
out.write(buf, 0, n);
}
assertArrayEquals(data, out.toByteArray());
assertTrue(input.isFullyConsumed());
}
}
}

View File

@ -0,0 +1,146 @@
/*
* Copyright (C) 2021 Thomas Wolf <thomas.wolf@paranor.ch> and others
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Distribution License v. 1.0 which is available at
* https://www.eclipse.org/org/documents/edl-v10.php.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
package org.eclipse.jgit.util.io;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.Arrays;
import org.junit.Test;
/**
* Tests for {@link BinaryHunkInputStream} and {@link BinaryHunkOutputStream}.
*/
public class BinaryHunkStreamTest {
@Test
public void testRoundtripWholeBuffer() throws IOException {
for (int length = 1; length < 520 + 52; length++) {
byte[] data = new byte[length];
for (int i = 0; i < data.length; i++) {
data[i] = (byte) (255 - (i % 256));
}
try (ByteArrayOutputStream bos = new ByteArrayOutputStream();
BinaryHunkOutputStream out = new BinaryHunkOutputStream(
bos)) {
out.write(data);
out.flush();
byte[] encoded = bos.toByteArray();
assertFalse(Arrays.equals(data, encoded));
try (BinaryHunkInputStream in = new BinaryHunkInputStream(
new ByteArrayInputStream(encoded))) {
byte[] decoded = new byte[data.length];
int newLength = in.read(decoded);
assertEquals(newLength, decoded.length);
assertEquals(-1, in.read());
assertArrayEquals(data, decoded);
}
}
}
}
@Test
public void testRoundtripChunks() throws IOException {
for (int length = 1; length < 520 + 52; length++) {
byte[] data = new byte[length];
for (int i = 0; i < data.length; i++) {
data[i] = (byte) (255 - (i % 256));
}
try (ByteArrayOutputStream bos = new ByteArrayOutputStream();
BinaryHunkOutputStream out = new BinaryHunkOutputStream(
bos)) {
out.write(data, 0, data.length / 2);
out.write(data, data.length / 2, data.length - data.length / 2);
out.flush();
byte[] encoded = bos.toByteArray();
assertFalse(Arrays.equals(data, encoded));
try (BinaryHunkInputStream in = new BinaryHunkInputStream(
new ByteArrayInputStream(encoded))) {
byte[] decoded = new byte[data.length];
int p = 0;
int n;
while ((n = in.read(decoded, p,
Math.min(decoded.length - p, 57))) >= 0) {
p += n;
if (p == decoded.length) {
break;
}
}
assertEquals(p, decoded.length);
assertEquals(-1, in.read());
assertArrayEquals(data, decoded);
}
}
}
}
@Test
public void testRoundtripBytes() throws IOException {
for (int length = 1; length < 520 + 52; length++) {
byte[] data = new byte[length];
for (int i = 0; i < data.length; i++) {
data[i] = (byte) (255 - (i % 256));
}
try (ByteArrayOutputStream bos = new ByteArrayOutputStream();
BinaryHunkOutputStream out = new BinaryHunkOutputStream(
bos)) {
for (int i = 0; i < data.length; i++) {
out.write(data[i]);
}
out.flush();
byte[] encoded = bos.toByteArray();
assertFalse(Arrays.equals(data, encoded));
try (BinaryHunkInputStream in = new BinaryHunkInputStream(
new ByteArrayInputStream(encoded))) {
byte[] decoded = new byte[data.length];
for (int i = 0; i < decoded.length; i++) {
int val = in.read();
assertTrue(0 <= val && val <= 255);
decoded[i] = (byte) val;
}
assertEquals(-1, in.read());
assertArrayEquals(data, decoded);
}
}
}
}
@Test
public void testRoundtripWithClose() throws IOException {
for (int length = 1; length < 520 + 52; length++) {
byte[] data = new byte[length];
for (int i = 0; i < data.length; i++) {
data[i] = (byte) (255 - (i % 256));
}
try (ByteArrayOutputStream bos = new ByteArrayOutputStream()) {
try (BinaryHunkOutputStream out = new BinaryHunkOutputStream(
bos)) {
out.write(data);
}
byte[] encoded = bos.toByteArray();
assertFalse(Arrays.equals(data, encoded));
try (BinaryHunkInputStream in = new BinaryHunkInputStream(
new ByteArrayInputStream(encoded))) {
byte[] decoded = new byte[data.length];
int newLength = in.read(decoded);
assertEquals(newLength, decoded.length);
assertEquals(-1, in.read());
assertArrayEquals(data, decoded);
}
}
}
}
}

View File

@ -13,6 +13,9 @@ ambiguousObjectAbbreviation=Object abbreviation {0} is ambiguous
aNewObjectIdIsRequired=A NewObjectId is required.
anExceptionOccurredWhileTryingToAddTheIdOfHEAD=An exception occurred while trying to add the Id of HEAD
anSSHSessionHasBeenAlreadyCreated=An SSH session has been already created
applyBinaryBaseOidWrong=Cannot apply binary patch; OID for file {0} does not match
applyBinaryOidTooShort=Binary patch for file {0} does not have full IDs
applyBinaryResultOidWrong=Result of binary patch for file {0} has wrong OID.
applyingCommit=Applying {0}
archiveFormatAlreadyAbsent=Archive format already absent: {0}
archiveFormatAlreadyRegistered=Archive format already registered with different implementation: {0}
@ -37,7 +40,19 @@ badRef=Bad ref: {0}: {1}
badSectionEntry=Bad section entry: {0}
badShallowLine=Bad shallow line: {0}
bareRepositoryNoWorkdirAndIndex=Bare Repository has neither a working tree, nor an index
base85invalidChar=Invalid base-85 character: 0x{0}
base85length=Base-85 encoded data must have a length that is a multiple of 5
base85overflow=Base-85 value overflow, does not fit into 32 bits: 0x{0}
base85tooLong=Extra base-85 encoded data for output size of {0} bytes
base85tooShort=Base-85 data decoded into less than {0} bytes
baseLengthIncorrect=base length incorrect
binaryDeltaBaseLengthMismatch=Binary delta base length does not match, expected {0}, got {1}
binaryDeltaInvalidOffset=Binary delta offset + length too large: {0} + {1}
binaryDeltaInvalidResultLength=Binary delta expected result length is negative
binaryHunkDecodeError=Binary hunk, line {0}: invalid input
binaryHunkInvalidLength=Binary hunk, line {0}: input corrupt; expected length byte, got 0x{1}
binaryHunkLineTooShort=Binary hunk, line {0}: input ended prematurely
binaryHunkMissingNewline=Binary hunk, line {0}: input line not terminated by newline
bitmapMissingObject=Bitmap at {0} is missing {1}.
bitmapsMustBePrepared=Bitmaps must be prepared before they may be written.
blameNotCommittedYet=Not Committed Yet

View File

@ -1,5 +1,5 @@
/*
* Copyright (C) 2011, 2020 IBM Corporation and others
* Copyright (C) 2011, 2021 IBM Corporation and others
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Distribution License v. 1.0 which is available at
@ -9,29 +9,68 @@
*/
package org.eclipse.jgit.api;
import java.io.BufferedInputStream;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.Writer;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.nio.file.Files;
import java.nio.file.StandardCopyOption;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.zip.InflaterInputStream;
import org.eclipse.jgit.api.errors.FilterFailedException;
import org.eclipse.jgit.api.errors.GitAPIException;
import org.eclipse.jgit.api.errors.PatchApplyException;
import org.eclipse.jgit.api.errors.PatchFormatException;
import org.eclipse.jgit.attributes.FilterCommand;
import org.eclipse.jgit.attributes.FilterCommandRegistry;
import org.eclipse.jgit.diff.DiffEntry.ChangeType;
import org.eclipse.jgit.diff.RawText;
import org.eclipse.jgit.dircache.DirCache;
import org.eclipse.jgit.dircache.DirCacheCheckout;
import org.eclipse.jgit.dircache.DirCacheCheckout.CheckoutMetadata;
import org.eclipse.jgit.dircache.DirCacheIterator;
import org.eclipse.jgit.errors.LargeObjectException;
import org.eclipse.jgit.errors.MissingObjectException;
import org.eclipse.jgit.internal.JGitText;
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.CoreConfig.EolStreamType;
import org.eclipse.jgit.lib.FileMode;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.ObjectLoader;
import org.eclipse.jgit.lib.ObjectStream;
import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.patch.BinaryHunk;
import org.eclipse.jgit.patch.FileHeader;
import org.eclipse.jgit.patch.FileHeader.PatchType;
import org.eclipse.jgit.patch.HunkHeader;
import org.eclipse.jgit.patch.Patch;
import org.eclipse.jgit.treewalk.FileTreeIterator;
import org.eclipse.jgit.treewalk.TreeWalk;
import org.eclipse.jgit.treewalk.TreeWalk.OperationType;
import org.eclipse.jgit.treewalk.filter.AndTreeFilter;
import org.eclipse.jgit.treewalk.filter.NotIgnoredFilter;
import org.eclipse.jgit.treewalk.filter.PathFilterGroup;
import org.eclipse.jgit.util.FS;
import org.eclipse.jgit.util.FS.ExecutionResult;
import org.eclipse.jgit.util.FileUtils;
import org.eclipse.jgit.util.IO;
import org.eclipse.jgit.util.RawParseUtils;
import org.eclipse.jgit.util.StringUtils;
import org.eclipse.jgit.util.TemporaryBuffer;
import org.eclipse.jgit.util.TemporaryBuffer.LocalFile;
import org.eclipse.jgit.util.io.BinaryDeltaInputStream;
import org.eclipse.jgit.util.io.BinaryHunkInputStream;
import org.eclipse.jgit.util.io.EolStreamTypeUtil;
import org.eclipse.jgit.util.sha1.SHA1;
/**
* Apply a patch to files and/or to the index.
@ -45,7 +84,7 @@ public class ApplyCommand extends GitCommand<ApplyResult> {
private InputStream in;
/**
* Constructs the command if the patch is to be applied to the index.
* Constructs the command.
*
* @param repo
*/
@ -79,6 +118,7 @@ public ApplyCommand setPatch(InputStream in) {
public ApplyResult call() throws GitAPIException, PatchFormatException,
PatchApplyException {
checkCallable();
setCallable(false);
ApplyResult r = new ApplyResult();
try {
final Patch p = new Patch();
@ -87,19 +127,22 @@ public ApplyResult call() throws GitAPIException, PatchFormatException,
} finally {
in.close();
}
if (!p.getErrors().isEmpty())
if (!p.getErrors().isEmpty()) {
throw new PatchFormatException(p.getErrors());
}
Repository repository = getRepository();
DirCache cache = repository.readDirCache();
for (FileHeader fh : p.getFiles()) {
ChangeType type = fh.getChangeType();
File f = null;
switch (type) {
case ADD:
f = getFile(fh.getNewPath(), true);
apply(f, fh);
apply(repository, fh.getNewPath(), cache, f, fh);
break;
case MODIFY:
f = getFile(fh.getOldPath(), false);
apply(f, fh);
apply(repository, fh.getOldPath(), cache, f, fh);
break;
case DELETE:
f = getFile(fh.getOldPath(), false);
@ -118,14 +161,14 @@ public ApplyResult call() throws GitAPIException, PatchFormatException,
throw new PatchApplyException(MessageFormat.format(
JGitText.get().renameFileFailed, f, dest), e);
}
apply(dest, fh);
apply(repository, fh.getOldPath(), cache, dest, fh);
break;
case COPY:
f = getFile(fh.getOldPath(), false);
File target = getFile(fh.getNewPath(), false);
FileUtils.mkdirs(target.getParentFile(), true);
Files.copy(f.toPath(), target.toPath());
apply(target, fh);
apply(repository, fh.getOldPath(), cache, target, fh);
}
r.addUpdatedFile(f);
}
@ -133,14 +176,13 @@ public ApplyResult call() throws GitAPIException, PatchFormatException,
throw new PatchApplyException(MessageFormat.format(
JGitText.get().patchApplyException, e.getMessage()), e);
}
setCallable(false);
return r;
}
private File getFile(String path, boolean create)
throws PatchApplyException {
File f = new File(getRepository().getWorkTree(), path);
if (create)
if (create) {
try {
File parent = f.getParentFile();
FileUtils.mkdirs(parent, true);
@ -149,22 +191,366 @@ private File getFile(String path, boolean create)
throw new PatchApplyException(MessageFormat.format(
JGitText.get().createNewFileFailed, f), e);
}
}
return f;
}
private void apply(Repository repository, String path, DirCache cache,
File f, FileHeader fh) throws IOException, PatchApplyException {
if (PatchType.BINARY.equals(fh.getPatchType())) {
return;
}
boolean convertCrLf = needsCrLfConversion(f, fh);
// Use a TreeWalk with a DirCacheIterator to pick up the correct
// clean/smudge filters. CR-LF handling is completely determined by
// whether the file or the patch have CR-LF line endings.
try (TreeWalk walk = new TreeWalk(repository)) {
walk.setOperationType(OperationType.CHECKIN_OP);
FileTreeIterator files = new FileTreeIterator(repository);
int fileIdx = walk.addTree(files);
int cacheIdx = walk.addTree(new DirCacheIterator(cache));
files.setDirCacheIterator(walk, cacheIdx);
walk.setFilter(AndTreeFilter.create(
PathFilterGroup.createFromStrings(path),
new NotIgnoredFilter(fileIdx)));
walk.setRecursive(true);
if (walk.next()) {
// If the file on disk has no newline characters, convertCrLf
// will be false. In that case we want to honor the normal git
// settings.
EolStreamType streamType = convertCrLf ? EolStreamType.TEXT_CRLF
: walk.getEolStreamType(OperationType.CHECKOUT_OP);
String command = walk.getFilterCommand(
Constants.ATTR_FILTER_TYPE_SMUDGE);
CheckoutMetadata checkOut = new CheckoutMetadata(streamType, command);
FileTreeIterator file = walk.getTree(fileIdx,
FileTreeIterator.class);
if (file != null) {
if (PatchType.GIT_BINARY.equals(fh.getPatchType())) {
applyBinary(repository, path, f, fh,
file::openEntryStream, file.getEntryObjectId(),
checkOut);
} else {
command = walk.getFilterCommand(
Constants.ATTR_FILTER_TYPE_CLEAN);
RawText raw;
// Can't use file.openEntryStream() as it would do CR-LF
// conversion as usual, not as wanted by us.
try (InputStream input = filterClean(repository, path,
new FileInputStream(f), convertCrLf, command)) {
raw = new RawText(
IO.readWholeStream(input, 0).array());
}
applyText(repository, path, raw, f, fh, checkOut);
}
return;
}
}
}
// File ignored?
RawText raw;
CheckoutMetadata checkOut;
if (PatchType.GIT_BINARY.equals(fh.getPatchType())) {
checkOut = new CheckoutMetadata(EolStreamType.DIRECT, null);
applyBinary(repository, path, f, fh, () -> new FileInputStream(f),
null, checkOut);
} else {
if (convertCrLf) {
try (InputStream input = EolStreamTypeUtil.wrapInputStream(
new FileInputStream(f), EolStreamType.TEXT_LF)) {
raw = new RawText(IO.readWholeStream(input, 0).array());
}
checkOut = new CheckoutMetadata(EolStreamType.TEXT_CRLF, null);
} else {
raw = new RawText(f);
checkOut = new CheckoutMetadata(EolStreamType.DIRECT, null);
}
applyText(repository, path, raw, f, fh, checkOut);
}
}
private boolean needsCrLfConversion(File f, FileHeader fileHeader)
throws IOException {
if (PatchType.GIT_BINARY.equals(fileHeader.getPatchType())) {
return false;
}
if (!hasCrLf(fileHeader)) {
try (InputStream input = new FileInputStream(f)) {
return RawText.isCrLfText(input);
}
}
return false;
}
private static boolean hasCrLf(FileHeader fileHeader) {
if (PatchType.GIT_BINARY.equals(fileHeader.getPatchType())) {
return false;
}
for (HunkHeader header : fileHeader.getHunks()) {
byte[] buf = header.getBuffer();
int hunkEnd = header.getEndOffset();
int lineStart = header.getStartOffset();
while (lineStart < hunkEnd) {
int nextLineStart = RawParseUtils.nextLF(buf, lineStart);
if (nextLineStart > hunkEnd) {
nextLineStart = hunkEnd;
}
if (nextLineStart <= lineStart) {
break;
}
if (nextLineStart - lineStart > 1) {
char first = (char) (buf[lineStart] & 0xFF);
if (first == ' ' || first == '-') {
// It's an old line. Does it end in CR-LF?
if (buf[nextLineStart - 2] == '\r') {
return true;
}
}
}
lineStart = nextLineStart;
}
}
return false;
}
private InputStream filterClean(Repository repository, String path,
InputStream fromFile, boolean convertCrLf, String filterCommand)
throws IOException {
InputStream input = fromFile;
if (convertCrLf) {
input = EolStreamTypeUtil.wrapInputStream(input,
EolStreamType.TEXT_LF);
}
if (StringUtils.isEmptyOrNull(filterCommand)) {
return input;
}
if (FilterCommandRegistry.isRegistered(filterCommand)) {
LocalFile buffer = new TemporaryBuffer.LocalFile(null);
FilterCommand command = FilterCommandRegistry.createFilterCommand(
filterCommand, repository, input, buffer);
while (command.run() != -1) {
// loop as long as command.run() tells there is work to do
}
return buffer.openInputStreamWithAutoDestroy();
}
FS fs = repository.getFS();
ProcessBuilder filterProcessBuilder = fs.runInShell(filterCommand,
new String[0]);
filterProcessBuilder.directory(repository.getWorkTree());
filterProcessBuilder.environment().put(Constants.GIT_DIR_KEY,
repository.getDirectory().getAbsolutePath());
ExecutionResult result;
try {
result = fs.execute(filterProcessBuilder, in);
} catch (IOException | InterruptedException e) {
throw new IOException(
new FilterFailedException(e, filterCommand, path));
}
int rc = result.getRc();
if (rc != 0) {
throw new IOException(new FilterFailedException(rc, filterCommand,
path, result.getStdout().toByteArray(4096), RawParseUtils
.decode(result.getStderr().toByteArray(4096))));
}
return result.getStdout().openInputStreamWithAutoDestroy();
}
/**
* @param f
* @param fh
* @throws IOException
* @throws PatchApplyException
* Something that can supply an {@link InputStream}.
*/
private void apply(File f, FileHeader fh)
private interface StreamSupplier {
InputStream load() throws IOException;
}
/**
* We write the patch result to a {@link TemporaryBuffer} and then use
* {@link DirCacheCheckout}.getContent() to run the result through the CR-LF
* and smudge filters. DirCacheCheckout needs an ObjectLoader, not a
* TemporaryBuffer, so this class bridges between the two, making any Stream
* provided by a {@link StreamSupplier} look like an ordinary git blob to
* DirCacheCheckout.
*/
private static class StreamLoader extends ObjectLoader {
private StreamSupplier data;
private long size;
StreamLoader(StreamSupplier data, long length) {
this.data = data;
this.size = length;
}
@Override
public int getType() {
return Constants.OBJ_BLOB;
}
@Override
public long getSize() {
return size;
}
@Override
public boolean isLarge() {
return true;
}
@Override
public byte[] getCachedBytes() throws LargeObjectException {
throw new LargeObjectException();
}
@Override
public ObjectStream openStream()
throws MissingObjectException, IOException {
return new ObjectStream.Filter(getType(), getSize(),
new BufferedInputStream(data.load()));
}
}
private void initHash(SHA1 hash, long size) {
hash.update(Constants.encodedTypeString(Constants.OBJ_BLOB));
hash.update((byte) ' ');
hash.update(Constants.encodeASCII(size));
hash.update((byte) 0);
}
private ObjectId hash(File f) throws IOException {
SHA1 hash = SHA1.newInstance();
initHash(hash, f.length());
try (InputStream input = new FileInputStream(f)) {
byte[] buf = new byte[8192];
int n;
while ((n = input.read(buf)) >= 0) {
hash.update(buf, 0, n);
}
}
return hash.toObjectId();
}
private void checkOid(ObjectId baseId, ObjectId id, ChangeType type, File f,
String path)
throws PatchApplyException, IOException {
boolean hashOk = false;
if (id != null) {
hashOk = baseId.equals(id);
if (!hashOk && ChangeType.ADD.equals(type)
&& ObjectId.zeroId().equals(baseId)) {
// We create the file first. The OID of an empty file is not the
// zero id!
hashOk = Constants.EMPTY_BLOB_ID.equals(id);
}
} else {
if (ObjectId.zeroId().equals(baseId)) {
// File empty is OK.
hashOk = !f.exists() || f.length() == 0;
} else {
hashOk = baseId.equals(hash(f));
}
}
if (!hashOk) {
throw new PatchApplyException(MessageFormat
.format(JGitText.get().applyBinaryBaseOidWrong, path));
}
}
private void applyBinary(Repository repository, String path, File f,
FileHeader fh, StreamSupplier loader, ObjectId id,
CheckoutMetadata checkOut)
throws PatchApplyException, IOException {
if (!fh.getOldId().isComplete() || !fh.getNewId().isComplete()) {
throw new PatchApplyException(MessageFormat
.format(JGitText.get().applyBinaryOidTooShort, path));
}
BinaryHunk hunk = fh.getForwardBinaryHunk();
// A BinaryHunk has the start at the "literal" or "delta" token. Data
// starts on the next line.
int start = RawParseUtils.nextLF(hunk.getBuffer(),
hunk.getStartOffset());
int length = hunk.getEndOffset() - start;
SHA1 hash = SHA1.newInstance();
// Write to a buffer and copy to the file only if everything was fine
TemporaryBuffer buffer = new TemporaryBuffer.LocalFile(null);
try {
switch (hunk.getType()) {
case LITERAL_DEFLATED:
// This just overwrites the file. We need to check the hash of
// the base.
checkOid(fh.getOldId().toObjectId(), id, fh.getChangeType(), f,
path);
initHash(hash, hunk.getSize());
try (OutputStream out = buffer;
InputStream inflated = new SHA1InputStream(hash,
new InflaterInputStream(
new BinaryHunkInputStream(
new ByteArrayInputStream(
hunk.getBuffer(), start,
length))))) {
DirCacheCheckout.getContent(repository, path, checkOut,
new StreamLoader(() -> inflated, hunk.getSize()),
null, out);
if (!fh.getNewId().toObjectId().equals(hash.toObjectId())) {
throw new PatchApplyException(MessageFormat.format(
JGitText.get().applyBinaryResultOidWrong,
path));
}
}
try (InputStream bufIn = buffer.openInputStream()) {
Files.copy(bufIn, f.toPath(),
StandardCopyOption.REPLACE_EXISTING);
}
break;
case DELTA_DEFLATED:
// Unfortunately delta application needs random access to the
// base to construct the result.
byte[] base;
try (InputStream input = loader.load()) {
base = IO.readWholeStream(input, 0).array();
}
// At least stream the result!
try (BinaryDeltaInputStream input = new BinaryDeltaInputStream(
base,
new InflaterInputStream(new BinaryHunkInputStream(
new ByteArrayInputStream(hunk.getBuffer(),
start, length))))) {
long finalSize = input.getExpectedResultSize();
initHash(hash, finalSize);
try (OutputStream out = buffer;
SHA1InputStream hashed = new SHA1InputStream(hash,
input)) {
DirCacheCheckout.getContent(repository, path, checkOut,
new StreamLoader(() -> hashed, finalSize), null,
out);
if (!fh.getNewId().toObjectId()
.equals(hash.toObjectId())) {
throw new PatchApplyException(MessageFormat.format(
JGitText.get().applyBinaryResultOidWrong,
path));
}
}
}
try (InputStream bufIn = buffer.openInputStream()) {
Files.copy(bufIn, f.toPath(),
StandardCopyOption.REPLACE_EXISTING);
}
break;
default:
break;
}
} finally {
buffer.destroy();
}
}
private void applyText(Repository repository, String path, RawText rt,
File f, FileHeader fh, CheckoutMetadata checkOut)
throws IOException, PatchApplyException {
RawText rt = new RawText(f);
List<String> oldLines = new ArrayList<>(rt.size());
for (int i = 0; i < rt.size(); i++)
oldLines.add(rt.getString(i));
List<String> newLines = new ArrayList<>(oldLines);
List<ByteBuffer> oldLines = new ArrayList<>(rt.size());
for (int i = 0; i < rt.size(); i++) {
oldLines.add(rt.getRawString(i));
}
List<ByteBuffer> newLines = new ArrayList<>(oldLines);
int afterLastHunk = 0;
int lineNumberShift = 0;
int lastHunkNewLine = -1;
@ -182,9 +568,9 @@ private void apply(File f, FileHeader fh)
b.length);
RawText hrt = new RawText(b);
List<String> hunkLines = new ArrayList<>(hrt.size());
List<ByteBuffer> hunkLines = new ArrayList<>(hrt.size());
for (int i = 0; i < hrt.size(); i++) {
hunkLines.add(hrt.getString(i));
hunkLines.add(hrt.getRawString(i));
}
if (hh.getNewStartLine() == 0) {
@ -253,8 +639,13 @@ && canApplyAt(hunkLines, newLines, 0)) {
lineNumberShift = applyAt - hh.getNewStartLine() + 1;
int sz = hunkLines.size();
for (int j = 1; j < sz; j++) {
String hunkLine = hunkLines.get(j);
switch (hunkLine.charAt(0)) {
ByteBuffer hunkLine = hunkLines.get(j);
if (!hunkLine.hasRemaining()) {
// Completely empty line; accept as empty context line
applyAt++;
continue;
}
switch (hunkLine.array()[hunkLine.position()]) {
case ' ':
applyAt++;
break;
@ -262,7 +653,7 @@ && canApplyAt(hunkLines, newLines, 0)) {
newLines.remove(applyAt);
break;
case '+':
newLines.add(applyAt++, hunkLine.substring(1));
newLines.add(applyAt++, slice(hunkLine, 1));
break;
default:
break;
@ -271,39 +662,64 @@ && canApplyAt(hunkLines, newLines, 0)) {
afterLastHunk = applyAt;
}
if (!isNoNewlineAtEndOfFile(fh)) {
newLines.add(""); //$NON-NLS-1$
newLines.add(null);
}
if (!rt.isMissingNewlineAtEnd()) {
oldLines.add(""); //$NON-NLS-1$
oldLines.add(null);
}
if (!isChanged(oldLines, newLines)) {
return; // Don't touch the file
if (oldLines.equals(newLines)) {
return; // Unchanged; don't touch the file
}
try (Writer fw = Files.newBufferedWriter(f.toPath())) {
for (Iterator<String> l = newLines.iterator(); l.hasNext();) {
fw.write(l.next());
if (l.hasNext()) {
// Don't bother handling line endings - if it was Windows,
// the \r is still there!
fw.write('\n');
TemporaryBuffer buffer = new TemporaryBuffer.LocalFile(null);
try {
try (OutputStream out = buffer) {
for (Iterator<ByteBuffer> l = newLines.iterator(); l
.hasNext();) {
ByteBuffer line = l.next();
if (line == null) {
// Must be the marker for the final newline
break;
}
out.write(line.array(), line.position(), line.remaining());
if (l.hasNext()) {
out.write('\n');
}
}
}
try (OutputStream output = new FileOutputStream(f)) {
DirCacheCheckout.getContent(repository, path, checkOut,
new StreamLoader(buffer::openInputStream,
buffer.length()),
null, output);
}
} finally {
buffer.destroy();
}
getRepository().getFS().setExecute(f, fh.getNewMode() == FileMode.EXECUTABLE_FILE);
repository.getFS().setExecute(f,
fh.getNewMode() == FileMode.EXECUTABLE_FILE);
}
private boolean canApplyAt(List<String> hunkLines, List<String> newLines,
int line) {
private boolean canApplyAt(List<ByteBuffer> hunkLines,
List<ByteBuffer> newLines, int line) {
int sz = hunkLines.size();
int limit = newLines.size();
int pos = line;
for (int j = 1; j < sz; j++) {
String hunkLine = hunkLines.get(j);
switch (hunkLine.charAt(0)) {
ByteBuffer hunkLine = hunkLines.get(j);
if (!hunkLine.hasRemaining()) {
// Empty line. Accept as empty context line.
if (pos >= limit || newLines.get(pos).hasRemaining()) {
return false;
}
pos++;
continue;
}
switch (hunkLine.array()[hunkLine.position()]) {
case ' ':
case '-':
if (pos >= limit
|| !newLines.get(pos).equals(hunkLine.substring(1))) {
|| !newLines.get(pos).equals(slice(hunkLine, 1))) {
return false;
}
pos++;
@ -315,13 +731,9 @@ private boolean canApplyAt(List<String> hunkLines, List<String> newLines,
return true;
}
private static boolean isChanged(List<String> ol, List<String> nl) {
if (ol.size() != nl.size())
return true;
for (int i = 0; i < ol.size(); i++)
if (!ol.get(i).equals(nl.get(i)))
return true;
return false;
private ByteBuffer slice(ByteBuffer b, int off) {
int newOffset = b.position() + off;
return ByteBuffer.wrap(b.array(), newOffset, b.limit() - newOffset);
}
private boolean isNoNewlineAtEndOfFile(FileHeader fh) {
@ -330,8 +742,51 @@ private boolean isNoNewlineAtEndOfFile(FileHeader fh) {
return false;
}
HunkHeader lastHunk = hunks.get(hunks.size() - 1);
RawText lhrt = new RawText(lastHunk.getBuffer());
byte[] buf = new byte[lastHunk.getEndOffset()
- lastHunk.getStartOffset()];
System.arraycopy(lastHunk.getBuffer(), lastHunk.getStartOffset(), buf,
0, buf.length);
RawText lhrt = new RawText(buf);
return lhrt.getString(lhrt.size() - 1)
.equals("\\ No newline at end of file"); //$NON-NLS-1$
}
/**
* An {@link InputStream} that updates a {@link SHA1} on every byte read.
* The hash is supposed to have been initialized before reading starts.
*/
private static class SHA1InputStream extends InputStream {
private final SHA1 hash;
private final InputStream in;
SHA1InputStream(SHA1 hash, InputStream in) {
this.hash = hash;
this.in = in;
}
@Override
public int read() throws IOException {
int b = in.read();
if (b >= 0) {
hash.update((byte) b);
}
return b;
}
@Override
public int read(byte[] b, int off, int len) throws IOException {
int n = in.read(b, off, len);
if (n > 0) {
hash.update(b, off, n);
}
return n;
}
@Override
public void close() throws IOException {
in.close();
}
}
}

View File

@ -1,6 +1,6 @@
/*
* Copyright (C) 2009, Google Inc.
* Copyright (C) 2008-2009, Johannes E. Schindelin <johannes.schindelin@gmx.de> and others
* Copyright (C) 2008-2021, Johannes E. Schindelin <johannes.schindelin@gmx.de> and others
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Distribution License v. 1.0 which is available at
@ -16,6 +16,7 @@
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import org.eclipse.jgit.errors.BinaryBlobException;
import org.eclipse.jgit.errors.LargeObjectException;
@ -164,6 +165,27 @@ public String getString(int i) {
return getString(i, i + 1, true);
}
/**
* Get the raw text for a single line.
*
* @param i
* index of the line to extract. Note this is 0-based, so line
* number 1 is actually index 0.
* @return the text for the line, without a trailing LF, as a
* {@link ByteBuffer} that is backed by a slice of the
* {@link #getRawContent() raw content}, with the buffer's position
* on the start of the line and the limit at the end.
* @since 5.12
*/
public ByteBuffer getRawString(int i) {
int s = getStart(i);
int e = getEnd(i);
if (e > 0 && content[e - 1] == '\n') {
e--;
}
return ByteBuffer.wrap(content, s, e - s);
}
/**
* Get the text for a region of lines.
*

View File

@ -41,6 +41,9 @@ public static JGitText get() {
/***/ public String aNewObjectIdIsRequired;
/***/ public String anExceptionOccurredWhileTryingToAddTheIdOfHEAD;
/***/ public String anSSHSessionHasBeenAlreadyCreated;
/***/ public String applyBinaryBaseOidWrong;
/***/ public String applyBinaryOidTooShort;
/***/ public String applyBinaryResultOidWrong;
/***/ public String applyingCommit;
/***/ public String archiveFormatAlreadyAbsent;
/***/ public String archiveFormatAlreadyRegistered;
@ -65,7 +68,19 @@ public static JGitText get() {
/***/ public String badSectionEntry;
/***/ public String badShallowLine;
/***/ public String bareRepositoryNoWorkdirAndIndex;
/***/ public String base85invalidChar;
/***/ public String base85length;
/***/ public String base85overflow;
/***/ public String base85tooLong;
/***/ public String base85tooShort;
/***/ public String baseLengthIncorrect;
/***/ public String binaryDeltaBaseLengthMismatch;
/***/ public String binaryDeltaInvalidOffset;
/***/ public String binaryDeltaInvalidResultLength;
/***/ public String binaryHunkDecodeError;
/***/ public String binaryHunkInvalidLength;
/***/ public String binaryHunkLineTooShort;
/***/ public String binaryHunkMissingNewline;
/***/ public String bitmapMissingObject;
/***/ public String bitmapsMustBePrepared;
/***/ public String blameNotCommittedYet;

View File

@ -0,0 +1,195 @@
/*
* Copyright (C) 2021 Thomas Wolf <thomas.wolf@paranor.ch> and others
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Distribution License v. 1.0 which is available at
* https://www.eclipse.org/org/documents/edl-v10.php.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
package org.eclipse.jgit.util;
import java.nio.charset.StandardCharsets;
import java.text.MessageFormat;
import java.util.Arrays;
import org.eclipse.jgit.internal.JGitText;
/**
* Base-85 encoder/decoder.
*
* @since 5.12
*/
public final class Base85 {
private static final byte[] ENCODE = ("0123456789" //$NON-NLS-1$
+ "ABCDEFGHIJKLMNOPQRSTUVWXYZ" //$NON-NLS-1$
+ "abcdefghijklmnopqrstuvwxyz" //$NON-NLS-1$
+ "!#$%&()*+-;<=>?@^_`{|}~") //$NON-NLS-1$
.getBytes(StandardCharsets.US_ASCII);
private static final int[] DECODE = new int[256];
static {
Arrays.fill(DECODE, -1);
for (int i = 0; i < ENCODE.length; i++) {
DECODE[ENCODE[i]] = i;
}
}
private Base85() {
// No instantiation
}
/**
* Determines the length of the base-85 encoding for {@code rawLength}
* bytes.
*
* @param rawLength
* number of bytes to encode
* @return number of bytes needed for the base-85 encoding of
* {@code rawLength} bytes
*/
public static int encodedLength(int rawLength) {
return (rawLength + 3) / 4 * 5;
}
/**
* Encodes the given {@code data} in Base-85.
*
* @param data
* to encode
* @return encoded data
*/
public static byte[] encode(byte[] data) {
return encode(data, 0, data.length);
}
/**
* Encodes {@code length} bytes of {@code data} in Base-85, beginning at the
* {@code start} index.
*
* @param data
* to encode
* @param start
* index of the first byte to encode
* @param length
* number of bytes to encode
* @return encoded data
*/
public static byte[] encode(byte[] data, int start, int length) {
byte[] result = new byte[encodedLength(length)];
int end = start + length;
int in = start;
int out = 0;
while (in < end) {
// Accumulate remaining bytes MSB first as a 32bit value
long accumulator = ((long) (data[in++] & 0xFF)) << 24;
if (in < end) {
accumulator |= (data[in++] & 0xFF) << 16;
if (in < end) {
accumulator |= (data[in++] & 0xFF) << 8;
if (in < end) {
accumulator |= (data[in++] & 0xFF);
}
}
}
// Write the 32bit value in base-85 encoding, also MSB first
for (int i = 4; i >= 0; i--) {
result[out + i] = ENCODE[(int) (accumulator % 85)];
accumulator /= 85;
}
out += 5;
}
return result;
}
/**
* Decodes the Base-85 {@code encoded} data into a byte array of
* {@code expectedSize} bytes.
*
* @param encoded
* Base-85 encoded data
* @param expectedSize
* of the result
* @return the decoded bytes
* @throws IllegalArgumentException
* if expectedSize doesn't match, the encoded data has a length
* that is not a multiple of 5, or there are invalid characters
* in the encoded data
*/
public static byte[] decode(byte[] encoded, int expectedSize) {
return decode(encoded, 0, encoded.length, expectedSize);
}
/**
* Decodes {@code length} bytes of Base-85 {@code encoded} data, beginning
* at the {@code start} index, into a byte array of {@code expectedSize}
* bytes.
*
* @param encoded
* Base-85 encoded data
* @param start
* index at which the data to decode starts in {@code encoded}
* @param length
* of the Base-85 encoded data
* @param expectedSize
* of the result
* @return the decoded bytes
* @throws IllegalArgumentException
* if expectedSize doesn't match, {@code length} is not a
* multiple of 5, or there are invalid characters in the encoded
* data
*/
public static byte[] decode(byte[] encoded, int start, int length,
int expectedSize) {
if (length % 5 != 0) {
throw new IllegalArgumentException(JGitText.get().base85length);
}
byte[] result = new byte[expectedSize];
int end = start + length;
int in = start;
int out = 0;
while (in < end && out < expectedSize) {
// Accumulate 5 bytes, "MSB" first
long accumulator = 0;
for (int i = 4; i >= 0; i--) {
int val = DECODE[encoded[in++] & 0xFF];
if (val < 0) {
throw new IllegalArgumentException(MessageFormat.format(
JGitText.get().base85invalidChar,
Integer.toHexString(encoded[in - 1] & 0xFF)));
}
accumulator = accumulator * 85 + val;
}
if (accumulator > 0xFFFF_FFFFL) {
throw new IllegalArgumentException(
MessageFormat.format(JGitText.get().base85overflow,
Long.toHexString(accumulator)));
}
// Write remaining bytes, MSB first
result[out++] = (byte) (accumulator >>> 24);
if (out < expectedSize) {
result[out++] = (byte) (accumulator >>> 16);
if (out < expectedSize) {
result[out++] = (byte) (accumulator >>> 8);
if (out < expectedSize) {
result[out++] = (byte) accumulator;
}
}
}
}
// Should have exhausted 'in' and filled 'out' completely
if (in < end) {
throw new IllegalArgumentException(
MessageFormat.format(JGitText.get().base85tooLong,
Integer.valueOf(expectedSize)));
}
if (out < expectedSize) {
throw new IllegalArgumentException(
MessageFormat.format(JGitText.get().base85tooShort,
Integer.valueOf(expectedSize)));
}
return result;
}
}

View File

@ -0,0 +1,206 @@
/*
* Copyright (C) 2021 Thomas Wolf <thomas.wolf@paranor.ch> and others
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Distribution License v. 1.0 which is available at
* https://www.eclipse.org/org/documents/edl-v10.php.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
package org.eclipse.jgit.util.io;
import java.io.EOFException;
import java.io.IOException;
import java.io.InputStream;
import java.io.StreamCorruptedException;
import java.text.MessageFormat;
import org.eclipse.jgit.internal.JGitText;
/**
* An {@link InputStream} that applies a binary delta to a base on the fly.
* <p>
* Delta application to a base needs random access to the base data. The delta
* is expressed as a sequence of copy and insert instructions. A copy
* instruction has the form "COPY fromOffset length" and says "copy length bytes
* from the base, starting at offset fromOffset, to the result". An insert
* instruction has the form "INSERT length" followed by length bytes and says
* "copy the next length bytes from the delta to the result".
* </p>
* <p>
* These instructions are generated using a content-defined chunking algorithm
* (currently C git uses the standard Rabin variant; but there are others that
* could be used) that identifies equal chunks. It is entirely possible that a
* later copy instruction has a fromOffset that is before the fromOffset of an
* earlier copy instruction.
* </p>
* <p>
* This makes it impossible to stream the base.
* </p>
* <p>
* JGit is limited to 2GB maximum size for the base since array indices are
* signed 32bit values.
*
* @since 5.12
*/
public class BinaryDeltaInputStream extends InputStream {
private final byte[] base;
private final InputStream delta;
private long resultLength;
private long toDeliver = -1;
private int fromBase;
private int fromDelta;
private int baseOffset = -1;
/**
* Creates a new {@link BinaryDeltaInputStream} that applies {@code delta}
* to {@code base}.
*
* @param base
* data to apply the delta to
* @param delta
* {@link InputStream} delivering the delta to apply
*/
public BinaryDeltaInputStream(byte[] base, InputStream delta) {
this.base = base;
this.delta = delta;
}
@Override
public int read() throws IOException {
int b = readNext();
if (b >= 0) {
toDeliver--;
}
return b;
}
private void initialize() throws IOException {
long baseSize = readVarInt(delta);
if (baseSize > Integer.MAX_VALUE || baseSize < 0
|| (int) baseSize != base.length) {
throw new IOException(MessageFormat.format(
JGitText.get().binaryDeltaBaseLengthMismatch,
Integer.valueOf(base.length), Long.valueOf(baseSize)));
}
resultLength = readVarInt(delta);
if (resultLength < 0) {
throw new StreamCorruptedException(
JGitText.get().binaryDeltaInvalidResultLength);
}
toDeliver = resultLength;
baseOffset = 0;
}
private int readNext() throws IOException {
if (baseOffset < 0) {
initialize();
}
if (fromBase > 0) {
fromBase--;
return base[baseOffset++] & 0xFF;
} else if (fromDelta > 0) {
fromDelta--;
return delta.read();
}
int command = delta.read();
if (command < 0) {
return -1;
}
if ((command & 0x80) != 0) {
// Decode offset and length to read from base
long copyOffset = 0;
for (int i = 1, shift = 0; i < 0x10; i *= 2, shift += 8) {
if ((command & i) != 0) {
copyOffset |= ((long) next(delta)) << shift;
}
}
int copySize = 0;
for (int i = 0x10, shift = 0; i < 0x80; i *= 2, shift += 8) {
if ((command & i) != 0) {
copySize |= next(delta) << shift;
}
}
if (copySize == 0) {
copySize = 0x10000;
}
if (copyOffset > base.length - copySize) {
throw new StreamCorruptedException(MessageFormat.format(
JGitText.get().binaryDeltaInvalidOffset,
Long.valueOf(copyOffset), Integer.valueOf(copySize)));
}
baseOffset = (int) copyOffset;
fromBase = copySize;
return readNext();
} else if (command != 0) {
// The next 'command' bytes come from the delta
fromDelta = command - 1;
return delta.read();
} else {
// Zero is reserved
throw new StreamCorruptedException(
JGitText.get().unsupportedCommand0);
}
}
private int next(InputStream in) throws IOException {
int b = in.read();
if (b < 0) {
throw new EOFException();
}
return b;
}
private long readVarInt(InputStream in) throws IOException {
long val = 0;
int shift = 0;
int b;
do {
b = next(in);
val |= ((long) (b & 0x7f)) << shift;
shift += 7;
} while ((b & 0x80) != 0);
return val;
}
/**
* Tells the expected size of the final result.
*
* @return the size
* @throws IOException
* if the size cannot be determined from {@code delta}
*/
public long getExpectedResultSize() throws IOException {
if (baseOffset < 0) {
initialize();
}
return resultLength;
}
/**
* Tells whether the delta has been fully consumed, and the expected number
* of bytes for the combined result have been read from this
* {@link BinaryDeltaInputStream}.
*
* @return whether delta application was successful
*/
public boolean isFullyConsumed() {
try {
return toDeliver == 0 && delta.read() < 0;
} catch (IOException e) {
return toDeliver == 0;
}
}
@Override
public void close() throws IOException {
delta.close();
}
}

View File

@ -0,0 +1,113 @@
/*
* Copyright (C) 2021 Thomas Wolf <thomas.wolf@paranor.ch> and others
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Distribution License v. 1.0 which is available at
* https://www.eclipse.org/org/documents/edl-v10.php.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
package org.eclipse.jgit.util.io;
import java.io.EOFException;
import java.io.IOException;
import java.io.InputStream;
import java.io.StreamCorruptedException;
import java.text.MessageFormat;
import org.eclipse.jgit.internal.JGitText;
import org.eclipse.jgit.util.Base85;
/**
* A stream that decodes git binary patch data on the fly.
*
* @since 5.12
*/
public class BinaryHunkInputStream extends InputStream {
private final InputStream in;
private int lineNumber;
private byte[] buffer;
private int pos = 0;
/**
* Creates a new {@link BinaryHunkInputStream}.
*
* @param in
* {@link InputStream} to read the base-85 encoded patch data
* from
*/
public BinaryHunkInputStream(InputStream in) {
this.in = in;
}
@Override
public int read() throws IOException {
if (pos < 0) {
return -1;
}
if (buffer == null || pos == buffer.length) {
fillBuffer();
}
if (pos >= 0) {
return buffer[pos++] & 0xFF;
}
return -1;
}
@Override
public void close() throws IOException {
in.close();
buffer = null;
}
private void fillBuffer() throws IOException {
int length = in.read();
if (length < 0) {
pos = length;
buffer = null;
return;
}
lineNumber++;
// Length is encoded with characters, A..Z for 1..26 and a..z for 27..52
if ('A' <= length && length <= 'Z') {
length = length - 'A' + 1;
} else if ('a' <= length && length <= 'z') {
length = length - 'a' + 27;
} else {
throw new StreamCorruptedException(MessageFormat.format(
JGitText.get().binaryHunkInvalidLength,
Integer.valueOf(lineNumber), Integer.toHexString(length)));
}
byte[] encoded = new byte[Base85.encodedLength(length)];
for (int i = 0; i < encoded.length; i++) {
int b = in.read();
if (b < 0 || b == '\n') {
throw new EOFException(MessageFormat.format(
JGitText.get().binaryHunkInvalidLength,
Integer.valueOf(lineNumber)));
}
encoded[i] = (byte) b;
}
// Must be followed by a newline; tolerate EOF.
int b = in.read();
if (b >= 0 && b != '\n') {
throw new StreamCorruptedException(MessageFormat.format(
JGitText.get().binaryHunkMissingNewline,
Integer.valueOf(lineNumber)));
}
try {
buffer = Base85.decode(encoded, length);
} catch (IllegalArgumentException e) {
StreamCorruptedException ex = new StreamCorruptedException(
MessageFormat.format(JGitText.get().binaryHunkDecodeError,
Integer.valueOf(lineNumber)));
ex.initCause(e);
throw ex;
}
pos = 0;
}
}

View File

@ -0,0 +1,116 @@
/*
* Copyright (C) 2021 Thomas Wolf <thomas.wolf@paranor.ch> and others
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Distribution License v. 1.0 which is available at
* https://www.eclipse.org/org/documents/edl-v10.php.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
package org.eclipse.jgit.util.io;
import java.io.IOException;
import java.io.OutputStream;
import org.eclipse.jgit.util.Base85;
/**
* An {@link OutputStream} that encodes data for a git binary patch.
*
* @since 5.12
*/
public class BinaryHunkOutputStream extends OutputStream {
private static final int MAX_BYTES = 52;
private final OutputStream out;
private final byte[] buffer = new byte[MAX_BYTES];
private int pos;
/**
* Creates a new {@link BinaryHunkOutputStream}.
*
* @param out
* {@link OutputStream} to write the encoded data to
*/
public BinaryHunkOutputStream(OutputStream out) {
this.out = out;
}
/**
* Flushes and closes this stream, and closes the underlying
* {@link OutputStream}.
*/
@Override
public void close() throws IOException {
flush();
out.close();
}
/**
* Writes any buffered output as a binary patch line to the underlying
* {@link OutputStream} and flushes that stream, too.
*/
@Override
public void flush() throws IOException {
if (pos > 0) {
encode(buffer, 0, pos);
pos = 0;
}
out.flush();
}
@Override
public void write(int b) throws IOException {
buffer[pos++] = (byte) b;
if (pos == buffer.length) {
encode(buffer, 0, pos);
pos = 0;
}
}
@Override
public void write(byte[] b, int off, int len) throws IOException {
if (len == 0) {
return;
}
int toCopy = len;
int in = off;
if (pos > 0) {
// Fill the buffer
int chunk = Math.min(toCopy, buffer.length - pos);
System.arraycopy(b, in, buffer, pos, chunk);
in += chunk;
pos += chunk;
toCopy -= chunk;
if (pos == buffer.length) {
encode(buffer, 0, pos);
pos = 0;
}
if (toCopy == 0) {
return;
}
}
while (toCopy >= MAX_BYTES) {
encode(b, in, MAX_BYTES);
toCopy -= MAX_BYTES;
in += MAX_BYTES;
}
if (toCopy > 0) {
System.arraycopy(b, in, buffer, 0, toCopy);
pos = toCopy;
}
}
private void encode(byte[] data, int off, int length) throws IOException {
if (length <= 26) {
out.write('A' + length - 1);
} else {
out.write('a' + length - 27);
}
out.write(Base85.encode(data, off, length));
out.write('\n');
}
}