Reformat PatchApplier and PatchApplierTest

Some lines were too long, unnecessary fully qualified class names,
and an assertEquals(actual, expected) when it should have been
assertEquals(expected, actual).

Change-Id: I3b3c46c963afe2fb82a79c1e93970e73778877e5
Signed-off-by: Thomas Wolf <twolf@apache.org>
This commit is contained in:
Thomas Wolf 2022-12-16 23:40:42 +01:00
parent 6722f25d56
commit aeb74f63d4
2 changed files with 65 additions and 59 deletions

View File

@ -73,7 +73,8 @@ public abstract static class Base extends RepositoryTestCase {
protected void init(String aName, boolean preExists, boolean postExists)
throws Exception {
/* Patch and pre/postimage are read from data org.eclipse.jgit.test/tst-rsrc/org/eclipse/jgit/diff/ */
// Patch and pre/postimage are read from data
// org.eclipse.jgit.test/tst-rsrc/org/eclipse/jgit/diff/
this.name = aName;
if (postExists) {
postImage = IO
@ -121,14 +122,16 @@ void verifyChange(Result result, String aName) throws Exception {
verifyChange(result, aName, true);
}
protected void verifyContent(Result result, String path, boolean exists) throws Exception {
protected void verifyContent(Result result, String path, boolean exists)
throws Exception {
if (inCore) {
byte[] output = readBlob(result.getTreeId(), path);
if (!exists)
assertNull(output);
else {
assertNotNull(output);
assertEquals(new String(output, StandardCharsets.UTF_8), expectedText);
assertEquals(expectedText,
new String(output, StandardCharsets.UTF_8));
}
} else {
File f = new File(db.getWorkTree(), path);
@ -139,12 +142,14 @@ protected void verifyContent(Result result, String path, boolean exists) throws
}
}
void verifyChange(Result result, String aName, boolean exists) throws Exception {
void verifyChange(Result result, String aName, boolean exists)
throws Exception {
assertEquals(1, result.getPaths().size());
verifyContent(result, aName, exists);
}
protected byte[] readBlob(ObjectId treeish, String path) throws Exception {
protected byte[] readBlob(ObjectId treeish, String path)
throws Exception {
try (TestRepository<?> tr = new TestRepository<>(db);
RevWalk rw = tr.getRevWalk()) {
db.incrementOpen();
@ -153,15 +158,18 @@ protected byte[] readBlob(ObjectId treeish, String path) throws Exception {
if (tw == null) {
return null;
}
return tw.getObjectReader().open(tw.getObjectId(0), OBJ_BLOB).getBytes();
return tw.getObjectReader()
.open(tw.getObjectId(0), OBJ_BLOB).getBytes();
}
}
}
protected void checkBinary(Result result, int numberOfFiles) throws Exception {
protected void checkBinary(Result result, int numberOfFiles)
throws Exception {
assertEquals(numberOfFiles, result.getPaths().size());
if (inCore) {
assertArrayEquals(postImage, readBlob(result.getTreeId(), result.getPaths().get(0)));
assertArrayEquals(postImage,
readBlob(result.getTreeId(), result.getPaths().get(0)));
} else {
File f = new File(db.getWorkTree(), name);
assertArrayEquals(postImage, Files.readAllBytes(f.toPath()));
@ -369,8 +377,8 @@ public void testModifyNL1() throws Exception {
@Test
public void testCrLf() throws Exception {
try {
db.getConfig().setBoolean(ConfigConstants.CONFIG_CORE_SECTION, null,
ConfigConstants.CONFIG_KEY_AUTOCRLF, true);
db.getConfig().setBoolean(ConfigConstants.CONFIG_CORE_SECTION,
null, ConfigConstants.CONFIG_KEY_AUTOCRLF, true);
init("crlf", true, true);
Result result = applyPatch();
@ -385,8 +393,8 @@ public void testCrLf() throws Exception {
@Test
public void testCrLfOff() throws Exception {
try {
db.getConfig().setBoolean(ConfigConstants.CONFIG_CORE_SECTION, null,
ConfigConstants.CONFIG_KEY_AUTOCRLF, false);
db.getConfig().setBoolean(ConfigConstants.CONFIG_CORE_SECTION,
null, ConfigConstants.CONFIG_KEY_AUTOCRLF, false);
init("crlf", true, true);
Result result = applyPatch();
@ -401,8 +409,8 @@ public void testCrLfOff() throws Exception {
@Test
public void testCrLfEmptyCommitted() throws Exception {
try {
db.getConfig().setBoolean(ConfigConstants.CONFIG_CORE_SECTION, null,
ConfigConstants.CONFIG_KEY_AUTOCRLF, true);
db.getConfig().setBoolean(ConfigConstants.CONFIG_CORE_SECTION,
null, ConfigConstants.CONFIG_KEY_AUTOCRLF, true);
init("crlf3", true, true);
Result result = applyPatch();
@ -417,8 +425,8 @@ public void testCrLfEmptyCommitted() throws Exception {
@Test
public void testCrLfNewFile() throws Exception {
try {
db.getConfig().setBoolean(ConfigConstants.CONFIG_CORE_SECTION, null,
ConfigConstants.CONFIG_KEY_AUTOCRLF, true);
db.getConfig().setBoolean(ConfigConstants.CONFIG_CORE_SECTION,
null, ConfigConstants.CONFIG_KEY_AUTOCRLF, true);
init("crlf4", false, true);
Result result = applyPatch();
@ -433,8 +441,8 @@ public void testCrLfNewFile() throws Exception {
@Test
public void testPatchWithCrLf() throws Exception {
try {
db.getConfig().setBoolean(ConfigConstants.CONFIG_CORE_SECTION, null,
ConfigConstants.CONFIG_KEY_AUTOCRLF, false);
db.getConfig().setBoolean(ConfigConstants.CONFIG_CORE_SECTION,
null, ConfigConstants.CONFIG_KEY_AUTOCRLF, false);
init("crlf2", true, true);
Result result = applyPatch();
@ -450,11 +458,11 @@ public void testPatchWithCrLf() throws Exception {
public void testPatchWithCrLf2() throws Exception {
String aName = "crlf2";
try (Git git = new Git(db)) {
db.getConfig().setBoolean(ConfigConstants.CONFIG_CORE_SECTION, null,
ConfigConstants.CONFIG_KEY_AUTOCRLF, false);
db.getConfig().setBoolean(ConfigConstants.CONFIG_CORE_SECTION,
null, ConfigConstants.CONFIG_KEY_AUTOCRLF, false);
init(aName, true, true);
db.getConfig().setBoolean(ConfigConstants.CONFIG_CORE_SECTION, null,
ConfigConstants.CONFIG_KEY_AUTOCRLF, true);
db.getConfig().setBoolean(ConfigConstants.CONFIG_CORE_SECTION,
null, ConfigConstants.CONFIG_KEY_AUTOCRLF, true);
Result result = applyPatch();
@ -465,10 +473,10 @@ public void testPatchWithCrLf2() throws Exception {
}
}
// Clean/smudge filter for testFiltering. The smudgetest test resources were
// created with C git using a clean filter sed -e "s/A/E/g" and the smudge
// filter sed -e "s/E/A/g". To keep the test independent of the presence of
// sed, implement this with a built-in filter.
// Clean/smudge filter for testFiltering. The smudgetest test resources
// were created with C git using a clean filter sed -e "s/A/E/g" and the
// smudge filter sed -e "s/E/A/g". To keep the test independent of the
// presence of sed, implement this with a built-in filter.
private static class ReplaceFilter extends FilterCommand {
private final char toReplace;
@ -501,8 +509,10 @@ public int run() throws IOException {
@Test
public void testFiltering() throws Exception {
// Set up filter
FilterCommandFactory clean = (repo, in, out) -> new ReplaceFilter(in, out, 'A', 'E');
FilterCommandFactory smudge = (repo, in, out) -> new ReplaceFilter(in, out, 'E', 'A');
FilterCommandFactory clean =
(repo, in, out) -> new ReplaceFilter(in, out, 'A', 'E');
FilterCommandFactory smudge =
(repo, in, out) -> new ReplaceFilter(in, out, 'E', 'A');
FilterCommandRegistry.register("jgit://builtin/a2e/clean", clean);
FilterCommandRegistry.register("jgit://builtin/a2e/smudge", smudge);
Config config = db.getConfig();

View File

@ -68,12 +68,14 @@
import org.eclipse.jgit.treewalk.filter.AndTreeFilter;
import org.eclipse.jgit.treewalk.filter.NotIgnoredFilter;
import org.eclipse.jgit.treewalk.filter.PathFilterGroup;
import org.eclipse.jgit.util.FS;
import org.eclipse.jgit.util.FS.ExecutionResult;
import org.eclipse.jgit.util.FileUtils;
import org.eclipse.jgit.util.IO;
import org.eclipse.jgit.util.LfsFactory;
import org.eclipse.jgit.util.LfsFactory.LfsInputStream;
import org.eclipse.jgit.util.RawParseUtils;
import org.eclipse.jgit.util.StringUtils;
import org.eclipse.jgit.util.TemporaryBuffer;
import org.eclipse.jgit.util.TemporaryBuffer.LocalFile;
import org.eclipse.jgit.util.io.BinaryDeltaInputStream;
@ -180,7 +182,7 @@ public ObjectId getTreeId() {
public Result applyPatch(InputStream patchInput)
throws PatchFormatException, PatchApplyException {
Result result = new Result();
org.eclipse.jgit.patch.Patch p = new org.eclipse.jgit.patch.Patch();
Patch p = new Patch();
try (InputStream inStream = patchInput) {
p.parse(inStream);
@ -193,7 +195,7 @@ public Result applyPatch(InputStream patchInput)
DirCacheBuilder dirCacheBuilder = dirCache.builder();
Set<String> modifiedPaths = new HashSet<>();
for (org.eclipse.jgit.patch.FileHeader fh : p.getFiles()) {
for (FileHeader fh : p.getFiles()) {
ChangeType type = fh.getChangeType();
switch (type) {
case ADD: {
@ -345,8 +347,8 @@ private TreeWalk getTreeWalkForFile(String path, DirCache cache)
* @throws PatchApplyException
*/
private void apply(String pathWithOriginalContent, DirCache dirCache,
DirCacheBuilder dirCacheBuilder, @Nullable File f,
org.eclipse.jgit.patch.FileHeader fh) throws PatchApplyException {
DirCacheBuilder dirCacheBuilder, @Nullable File f, FileHeader fh)
throws PatchApplyException {
if (PatchType.BINARY.equals(fh.getPatchType())) {
// This patch type just says "something changed". We can't do
// anything with that.
@ -484,7 +486,7 @@ private DirCacheEntry insertToIndex(InputStream input, byte[] path,
}
dce.setLength(length);
try (LfsInputStream is = org.eclipse.jgit.util.LfsFactory.getInstance()
try (LfsInputStream is = LfsFactory.getInstance()
.applyCleanFilter(repo, input, length, lfsAttribute)) {
dce.setObjectId(inserter.insert(OBJ_BLOB, is.getLength(), is));
}
@ -522,15 +524,13 @@ private RawText getRawText(@Nullable File file,
// conversion.
try (InputStream input = filterClean(repo, path,
fileStreamSupplier.load(), convertCrLf, filterCommand)) {
return new RawText(org.eclipse.jgit.util.IO
.readWholeStream(input, 0).array());
return new RawText(IO.readWholeStream(input, 0).array());
}
}
if (convertCrLf) {
try (InputStream input = EolStreamTypeUtil.wrapInputStream(
fileStreamSupplier.load(), EolStreamType.TEXT_LF)) {
return new RawText(org.eclipse.jgit.util.IO
.readWholeStream(input, 0).array());
return new RawText(IO.readWholeStream(input, 0).array());
}
}
if (inCore() && fileId.equals(ObjectId.zeroId())) {
@ -547,12 +547,12 @@ private InputStream filterClean(Repository repository, String path,
input = EolStreamTypeUtil.wrapInputStream(input,
EolStreamType.TEXT_LF);
}
if (org.eclipse.jgit.util.StringUtils.isEmptyOrNull(filterCommand)) {
if (StringUtils.isEmptyOrNull(filterCommand)) {
return input;
}
if (FilterCommandRegistry.isRegistered(filterCommand)) {
LocalFile buffer = new org.eclipse.jgit.util.TemporaryBuffer.LocalFile(
null, inCoreSizeLimit);
LocalFile buffer = new TemporaryBuffer.LocalFile(null,
inCoreSizeLimit);
FilterCommand command = FilterCommandRegistry.createFilterCommand(
filterCommand, repository, input, buffer);
while (command.run() != -1) {
@ -560,7 +560,7 @@ private InputStream filterClean(Repository repository, String path,
}
return buffer.openInputStreamWithAutoDestroy();
}
org.eclipse.jgit.util.FS fs = repository.getFS();
FS fs = repository.getFS();
ProcessBuilder filterProcessBuilder = fs.runInShell(filterCommand,
new String[0]);
filterProcessBuilder.directory(repository.getWorkTree());
@ -577,14 +577,14 @@ private InputStream filterClean(Repository repository, String path,
if (rc != 0) {
throw new IOException(new FilterFailedException(rc, filterCommand,
path, result.getStdout().toByteArray(4096),
org.eclipse.jgit.util.RawParseUtils
RawParseUtils
.decode(result.getStderr().toByteArray(4096))));
}
return result.getStdout().openInputStreamWithAutoDestroy();
}
private boolean needsCrLfConversion(File f,
org.eclipse.jgit.patch.FileHeader fileHeader) throws IOException {
private boolean needsCrLfConversion(File f, FileHeader fileHeader)
throws IOException {
if (PatchType.GIT_BINARY.equals(fileHeader.getPatchType())) {
return false;
}
@ -596,12 +596,11 @@ private boolean needsCrLfConversion(File f,
return false;
}
private static boolean hasCrLf(
org.eclipse.jgit.patch.FileHeader fileHeader) {
private static boolean hasCrLf(FileHeader fileHeader) {
if (PatchType.GIT_BINARY.equals(fileHeader.getPatchType())) {
return false;
}
for (org.eclipse.jgit.patch.HunkHeader header : fileHeader.getHunks()) {
for (HunkHeader header : fileHeader.getHunks()) {
byte[] buf = header.getBuffer();
int hunkEnd = header.getEndOffset();
int lineStart = header.getStartOffset();
@ -702,15 +701,15 @@ private static class ContentStreamLoader {
* @throws IOException
* @throws UnsupportedOperationException
*/
private ContentStreamLoader applyBinary(String path, File f,
org.eclipse.jgit.patch.FileHeader fh, StreamSupplier inputSupplier,
ObjectId id) throws PatchApplyException, IOException,
private ContentStreamLoader applyBinary(String path, File f, FileHeader fh,
StreamSupplier inputSupplier, ObjectId id)
throws PatchApplyException, IOException,
UnsupportedOperationException {
if (!fh.getOldId().isComplete() || !fh.getNewId().isComplete()) {
throw new PatchApplyException(MessageFormat
.format(JGitText.get().applyBinaryOidTooShort, path));
}
org.eclipse.jgit.patch.BinaryHunk hunk = fh.getForwardBinaryHunk();
BinaryHunk hunk = fh.getForwardBinaryHunk();
// A BinaryHunk has the start at the "literal" or "delta" token. Data
// starts on the next line.
int start = RawParseUtils.nextLF(hunk.getBuffer(),
@ -753,8 +752,7 @@ private ContentStreamLoader applyBinary(String path, File f,
}
}
private ContentStreamLoader applyText(RawText rt,
org.eclipse.jgit.patch.FileHeader fh)
private ContentStreamLoader applyText(RawText rt, FileHeader fh)
throws IOException, PatchApplyException {
List<ByteBuffer> oldLines = new ArrayList<>(rt.size());
for (int i = 0; i < rt.size(); i++) {
@ -764,7 +762,7 @@ private ContentStreamLoader applyText(RawText rt,
int afterLastHunk = 0;
int lineNumberShift = 0;
int lastHunkNewLine = -1;
for (org.eclipse.jgit.patch.HunkHeader hh : fh.getHunks()) {
for (HunkHeader hh : fh.getHunks()) {
// We assume hunks to be ordered
if (hh.getNewStartLine() <= lastHunkNewLine) {
throw new PatchApplyException(MessageFormat
@ -933,14 +931,12 @@ private ByteBuffer slice(ByteBuffer b, int off) {
return ByteBuffer.wrap(b.array(), newOffset, b.limit() - newOffset);
}
private boolean isNoNewlineAtEndOfFile(
org.eclipse.jgit.patch.FileHeader fh) {
List<? extends org.eclipse.jgit.patch.HunkHeader> hunks = fh.getHunks();
private boolean isNoNewlineAtEndOfFile(FileHeader fh) {
List<? extends HunkHeader> hunks = fh.getHunks();
if (hunks == null || hunks.isEmpty()) {
return false;
}
org.eclipse.jgit.patch.HunkHeader lastHunk = hunks
.get(hunks.size() - 1);
HunkHeader lastHunk = hunks.get(hunks.size() - 1);
byte[] buf = new byte[lastHunk.getEndOffset()
- lastHunk.getStartOffset()];
System.arraycopy(lastHunk.getBuffer(), lastHunk.getStartOffset(), buf,