Merge changes Ideaefd51,I6c347393
* changes: Reapply "Create util class for work tree updating in both filesystem and index." ResolveMerger: add coverage for inCore file => directory transition
This commit is contained in:
commit
4d6c6df108
|
@ -1002,6 +1002,39 @@ public void checkContentMergeConflict_noTree(MergeStrategy strategy)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Theory
|
||||||
|
public void fileBecomesDir_noTree(MergeStrategy strategy)
|
||||||
|
throws Exception {
|
||||||
|
Git git = Git.wrap(db);
|
||||||
|
|
||||||
|
writeTrashFile("file", "1\n2\n3");
|
||||||
|
writeTrashFile("side", "1\n2\n3");
|
||||||
|
git.add().addFilepattern("file").addFilepattern("side").call();
|
||||||
|
RevCommit first = git.commit().setMessage("base").call();
|
||||||
|
|
||||||
|
writeTrashFile("side", "our changed");
|
||||||
|
RevCommit ours = git.commit().setAll(true)
|
||||||
|
.setMessage("ours").call();
|
||||||
|
|
||||||
|
git.checkout().setCreateBranch(true).setStartPoint(first)
|
||||||
|
.setName("theirs").call();
|
||||||
|
deleteTrashFile("file");
|
||||||
|
writeTrashFile("file/file", "in subdir");
|
||||||
|
git.add().addFilepattern("file/file").call();
|
||||||
|
|
||||||
|
RevCommit theirs = git.commit().setAll(true)
|
||||||
|
.setMessage("theirs").call();
|
||||||
|
|
||||||
|
// Exercise inCore flavor of the merge.
|
||||||
|
try (ObjectInserter ins = db.newObjectInserter()) {
|
||||||
|
ResolveMerger merger =
|
||||||
|
(ResolveMerger) strategy.newMerger(ins, db.getConfig());
|
||||||
|
boolean success = merger.merge(ours, theirs);
|
||||||
|
assertTrue(success);
|
||||||
|
assertTrue(merger.getModifiedFiles().isEmpty());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Merging after criss-cross merges. In this case we merge together two
|
* Merging after criss-cross merges. In this case we merge together two
|
||||||
* commits which have two equally good common ancestors
|
* commits which have two equally good common ancestors
|
||||||
|
|
|
@ -9,7 +9,6 @@
|
||||||
*/
|
*/
|
||||||
package org.eclipse.jgit.api;
|
package org.eclipse.jgit.api;
|
||||||
|
|
||||||
import java.io.BufferedInputStream;
|
|
||||||
import java.io.ByteArrayInputStream;
|
import java.io.ByteArrayInputStream;
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.io.FileInputStream;
|
import java.io.FileInputStream;
|
||||||
|
@ -25,7 +24,6 @@
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.zip.InflaterInputStream;
|
import java.util.zip.InflaterInputStream;
|
||||||
|
|
||||||
import org.eclipse.jgit.api.errors.FilterFailedException;
|
import org.eclipse.jgit.api.errors.FilterFailedException;
|
||||||
import org.eclipse.jgit.api.errors.GitAPIException;
|
import org.eclipse.jgit.api.errors.GitAPIException;
|
||||||
import org.eclipse.jgit.api.errors.PatchApplyException;
|
import org.eclipse.jgit.api.errors.PatchApplyException;
|
||||||
|
@ -38,15 +36,11 @@
|
||||||
import org.eclipse.jgit.dircache.DirCacheCheckout;
|
import org.eclipse.jgit.dircache.DirCacheCheckout;
|
||||||
import org.eclipse.jgit.dircache.DirCacheCheckout.CheckoutMetadata;
|
import org.eclipse.jgit.dircache.DirCacheCheckout.CheckoutMetadata;
|
||||||
import org.eclipse.jgit.dircache.DirCacheIterator;
|
import org.eclipse.jgit.dircache.DirCacheIterator;
|
||||||
import org.eclipse.jgit.errors.LargeObjectException;
|
|
||||||
import org.eclipse.jgit.errors.MissingObjectException;
|
|
||||||
import org.eclipse.jgit.internal.JGitText;
|
import org.eclipse.jgit.internal.JGitText;
|
||||||
import org.eclipse.jgit.lib.Constants;
|
import org.eclipse.jgit.lib.Constants;
|
||||||
import org.eclipse.jgit.lib.CoreConfig.EolStreamType;
|
import org.eclipse.jgit.lib.CoreConfig.EolStreamType;
|
||||||
import org.eclipse.jgit.lib.FileMode;
|
import org.eclipse.jgit.lib.FileMode;
|
||||||
import org.eclipse.jgit.lib.ObjectId;
|
import org.eclipse.jgit.lib.ObjectId;
|
||||||
import org.eclipse.jgit.lib.ObjectLoader;
|
|
||||||
import org.eclipse.jgit.lib.ObjectStream;
|
|
||||||
import org.eclipse.jgit.lib.Repository;
|
import org.eclipse.jgit.lib.Repository;
|
||||||
import org.eclipse.jgit.patch.BinaryHunk;
|
import org.eclipse.jgit.patch.BinaryHunk;
|
||||||
import org.eclipse.jgit.patch.FileHeader;
|
import org.eclipse.jgit.patch.FileHeader;
|
||||||
|
@ -64,6 +58,7 @@
|
||||||
import org.eclipse.jgit.util.FileUtils;
|
import org.eclipse.jgit.util.FileUtils;
|
||||||
import org.eclipse.jgit.util.IO;
|
import org.eclipse.jgit.util.IO;
|
||||||
import org.eclipse.jgit.util.RawParseUtils;
|
import org.eclipse.jgit.util.RawParseUtils;
|
||||||
|
import org.eclipse.jgit.util.WorkTreeUpdater;
|
||||||
import org.eclipse.jgit.util.StringUtils;
|
import org.eclipse.jgit.util.StringUtils;
|
||||||
import org.eclipse.jgit.util.TemporaryBuffer;
|
import org.eclipse.jgit.util.TemporaryBuffer;
|
||||||
import org.eclipse.jgit.util.TemporaryBuffer.LocalFile;
|
import org.eclipse.jgit.util.TemporaryBuffer.LocalFile;
|
||||||
|
@ -355,60 +350,6 @@ private InputStream filterClean(Repository repository, String path,
|
||||||
return result.getStdout().openInputStreamWithAutoDestroy();
|
return result.getStdout().openInputStreamWithAutoDestroy();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Something that can supply an {@link InputStream}.
|
|
||||||
*/
|
|
||||||
private interface StreamSupplier {
|
|
||||||
InputStream load() throws IOException;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* We write the patch result to a {@link TemporaryBuffer} and then use
|
|
||||||
* {@link DirCacheCheckout}.getContent() to run the result through the CR-LF
|
|
||||||
* and smudge filters. DirCacheCheckout needs an ObjectLoader, not a
|
|
||||||
* TemporaryBuffer, so this class bridges between the two, making any Stream
|
|
||||||
* provided by a {@link StreamSupplier} look like an ordinary git blob to
|
|
||||||
* DirCacheCheckout.
|
|
||||||
*/
|
|
||||||
private static class StreamLoader extends ObjectLoader {
|
|
||||||
|
|
||||||
private StreamSupplier data;
|
|
||||||
|
|
||||||
private long size;
|
|
||||||
|
|
||||||
StreamLoader(StreamSupplier data, long length) {
|
|
||||||
this.data = data;
|
|
||||||
this.size = length;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int getType() {
|
|
||||||
return Constants.OBJ_BLOB;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public long getSize() {
|
|
||||||
return size;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isLarge() {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public byte[] getCachedBytes() throws LargeObjectException {
|
|
||||||
throw new LargeObjectException();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ObjectStream openStream()
|
|
||||||
throws MissingObjectException, IOException {
|
|
||||||
return new ObjectStream.Filter(getType(), getSize(),
|
|
||||||
new BufferedInputStream(data.load()));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void initHash(SHA1 hash, long size) {
|
private void initHash(SHA1 hash, long size) {
|
||||||
hash.update(Constants.encodedTypeString(Constants.OBJ_BLOB));
|
hash.update(Constants.encodedTypeString(Constants.OBJ_BLOB));
|
||||||
hash.update((byte) ' ');
|
hash.update((byte) ' ');
|
||||||
|
@ -456,7 +397,7 @@ private void checkOid(ObjectId baseId, ObjectId id, ChangeType type, File f,
|
||||||
}
|
}
|
||||||
|
|
||||||
private void applyBinary(Repository repository, String path, File f,
|
private void applyBinary(Repository repository, String path, File f,
|
||||||
FileHeader fh, StreamSupplier loader, ObjectId id,
|
FileHeader fh, WorkTreeUpdater.StreamSupplier loader, ObjectId id,
|
||||||
CheckoutMetadata checkOut)
|
CheckoutMetadata checkOut)
|
||||||
throws PatchApplyException, IOException {
|
throws PatchApplyException, IOException {
|
||||||
if (!fh.getOldId().isComplete() || !fh.getNewId().isComplete()) {
|
if (!fh.getOldId().isComplete() || !fh.getNewId().isComplete()) {
|
||||||
|
@ -488,7 +429,8 @@ private void applyBinary(Repository repository, String path, File f,
|
||||||
hunk.getBuffer(), start,
|
hunk.getBuffer(), start,
|
||||||
length))))) {
|
length))))) {
|
||||||
DirCacheCheckout.getContent(repository, path, checkOut,
|
DirCacheCheckout.getContent(repository, path, checkOut,
|
||||||
new StreamLoader(() -> inflated, hunk.getSize()),
|
WorkTreeUpdater.createStreamLoader(() -> inflated,
|
||||||
|
hunk.getSize()),
|
||||||
null, out);
|
null, out);
|
||||||
if (!fh.getNewId().toObjectId().equals(hash.toObjectId())) {
|
if (!fh.getNewId().toObjectId().equals(hash.toObjectId())) {
|
||||||
throw new PatchApplyException(MessageFormat.format(
|
throw new PatchApplyException(MessageFormat.format(
|
||||||
|
@ -520,8 +462,8 @@ private void applyBinary(Repository repository, String path, File f,
|
||||||
SHA1InputStream hashed = new SHA1InputStream(hash,
|
SHA1InputStream hashed = new SHA1InputStream(hash,
|
||||||
input)) {
|
input)) {
|
||||||
DirCacheCheckout.getContent(repository, path, checkOut,
|
DirCacheCheckout.getContent(repository, path, checkOut,
|
||||||
new StreamLoader(() -> hashed, finalSize), null,
|
WorkTreeUpdater.createStreamLoader(() -> hashed, finalSize),
|
||||||
out);
|
null, out);
|
||||||
if (!fh.getNewId().toObjectId()
|
if (!fh.getNewId().toObjectId()
|
||||||
.equals(hash.toObjectId())) {
|
.equals(hash.toObjectId())) {
|
||||||
throw new PatchApplyException(MessageFormat.format(
|
throw new PatchApplyException(MessageFormat.format(
|
||||||
|
@ -689,7 +631,7 @@ && canApplyAt(hunkLines, newLines, 0)) {
|
||||||
}
|
}
|
||||||
try (OutputStream output = new FileOutputStream(f)) {
|
try (OutputStream output = new FileOutputStream(f)) {
|
||||||
DirCacheCheckout.getContent(repository, path, checkOut,
|
DirCacheCheckout.getContent(repository, path, checkOut,
|
||||||
new StreamLoader(buffer::openInputStream,
|
WorkTreeUpdater.createStreamLoader(buffer::openInputStream,
|
||||||
buffer.length()),
|
buffer.length()),
|
||||||
null, output);
|
null, output);
|
||||||
}
|
}
|
||||||
|
|
|
@ -195,9 +195,6 @@ protected RevCommit getBaseCommit(RevCommit a, RevCommit b, int callDepth)
|
||||||
inCore = oldIncore;
|
inCore = oldIncore;
|
||||||
dircache = oldDircache;
|
dircache = oldDircache;
|
||||||
workingTreeIterator = oldWTreeIt;
|
workingTreeIterator = oldWTreeIt;
|
||||||
toBeCheckedOut.clear();
|
|
||||||
toBeDeleted.clear();
|
|
||||||
modifiedFiles.clear();
|
|
||||||
unmergedPaths.clear();
|
unmergedPaths.clear();
|
||||||
mergeResults.clear();
|
mergeResults.clear();
|
||||||
failingPaths.clear();
|
failingPaths.clear();
|
||||||
|
|
|
@ -20,23 +20,15 @@
|
||||||
import static org.eclipse.jgit.lib.ConfigConstants.CONFIG_KEY_ALGORITHM;
|
import static org.eclipse.jgit.lib.ConfigConstants.CONFIG_KEY_ALGORITHM;
|
||||||
import static org.eclipse.jgit.lib.Constants.OBJ_BLOB;
|
import static org.eclipse.jgit.lib.Constants.OBJ_BLOB;
|
||||||
|
|
||||||
import java.io.BufferedOutputStream;
|
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.io.FileNotFoundException;
|
|
||||||
import java.io.FileOutputStream;
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.InputStream;
|
|
||||||
import java.io.OutputStream;
|
|
||||||
import java.time.Instant;
|
import java.time.Instant;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.Iterator;
|
|
||||||
import java.util.LinkedList;
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
import org.eclipse.jgit.annotations.NonNull;
|
import org.eclipse.jgit.annotations.NonNull;
|
||||||
import org.eclipse.jgit.attributes.Attributes;
|
import org.eclipse.jgit.attributes.Attributes;
|
||||||
import org.eclipse.jgit.diff.DiffAlgorithm;
|
import org.eclipse.jgit.diff.DiffAlgorithm;
|
||||||
|
@ -46,18 +38,10 @@
|
||||||
import org.eclipse.jgit.diff.Sequence;
|
import org.eclipse.jgit.diff.Sequence;
|
||||||
import org.eclipse.jgit.dircache.DirCache;
|
import org.eclipse.jgit.dircache.DirCache;
|
||||||
import org.eclipse.jgit.dircache.DirCacheBuildIterator;
|
import org.eclipse.jgit.dircache.DirCacheBuildIterator;
|
||||||
import org.eclipse.jgit.dircache.DirCacheBuilder;
|
|
||||||
import org.eclipse.jgit.dircache.DirCacheCheckout;
|
|
||||||
import org.eclipse.jgit.dircache.DirCacheCheckout.CheckoutMetadata;
|
import org.eclipse.jgit.dircache.DirCacheCheckout.CheckoutMetadata;
|
||||||
import org.eclipse.jgit.dircache.DirCacheEntry;
|
import org.eclipse.jgit.dircache.DirCacheEntry;
|
||||||
import org.eclipse.jgit.errors.BinaryBlobException;
|
import org.eclipse.jgit.errors.BinaryBlobException;
|
||||||
import org.eclipse.jgit.errors.CorruptObjectException;
|
|
||||||
import org.eclipse.jgit.errors.IncorrectObjectTypeException;
|
|
||||||
import org.eclipse.jgit.errors.IndexWriteException;
|
|
||||||
import org.eclipse.jgit.errors.MissingObjectException;
|
|
||||||
import org.eclipse.jgit.errors.NoWorkTreeException;
|
|
||||||
import org.eclipse.jgit.lib.Config;
|
import org.eclipse.jgit.lib.Config;
|
||||||
import org.eclipse.jgit.lib.ConfigConstants;
|
|
||||||
import org.eclipse.jgit.lib.Constants;
|
import org.eclipse.jgit.lib.Constants;
|
||||||
import org.eclipse.jgit.lib.CoreConfig.EolStreamType;
|
import org.eclipse.jgit.lib.CoreConfig.EolStreamType;
|
||||||
import org.eclipse.jgit.lib.FileMode;
|
import org.eclipse.jgit.lib.FileMode;
|
||||||
|
@ -72,20 +56,19 @@
|
||||||
import org.eclipse.jgit.treewalk.CanonicalTreeParser;
|
import org.eclipse.jgit.treewalk.CanonicalTreeParser;
|
||||||
import org.eclipse.jgit.treewalk.NameConflictTreeWalk;
|
import org.eclipse.jgit.treewalk.NameConflictTreeWalk;
|
||||||
import org.eclipse.jgit.treewalk.TreeWalk;
|
import org.eclipse.jgit.treewalk.TreeWalk;
|
||||||
import org.eclipse.jgit.treewalk.TreeWalk.OperationType;
|
|
||||||
import org.eclipse.jgit.treewalk.WorkingTreeIterator;
|
import org.eclipse.jgit.treewalk.WorkingTreeIterator;
|
||||||
import org.eclipse.jgit.treewalk.WorkingTreeOptions;
|
|
||||||
import org.eclipse.jgit.treewalk.filter.TreeFilter;
|
import org.eclipse.jgit.treewalk.filter.TreeFilter;
|
||||||
import org.eclipse.jgit.util.FS;
|
import org.eclipse.jgit.util.FS;
|
||||||
import org.eclipse.jgit.util.LfsFactory;
|
import org.eclipse.jgit.util.LfsFactory;
|
||||||
import org.eclipse.jgit.util.LfsFactory.LfsInputStream;
|
import org.eclipse.jgit.util.WorkTreeUpdater;
|
||||||
|
import org.eclipse.jgit.util.WorkTreeUpdater.StreamLoader;
|
||||||
import org.eclipse.jgit.util.TemporaryBuffer;
|
import org.eclipse.jgit.util.TemporaryBuffer;
|
||||||
import org.eclipse.jgit.util.io.EolStreamTypeUtil;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A three-way merger performing a content-merge if necessary
|
* A three-way merger performing a content-merge if necessary
|
||||||
*/
|
*/
|
||||||
public class ResolveMerger extends ThreeWayMerger {
|
public class ResolveMerger extends ThreeWayMerger {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* If the merge fails (means: not stopped because of unresolved conflicts)
|
* If the merge fails (means: not stopped because of unresolved conflicts)
|
||||||
* this enum is used to explain why it failed
|
* this enum is used to explain why it failed
|
||||||
|
@ -149,11 +132,9 @@ public enum MergeFailureReason {
|
||||||
protected static final int T_FILE = 4;
|
protected static final int T_FILE = 4;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Builder to update the cache during this merge.
|
* Handler for repository I/O actions.
|
||||||
*
|
|
||||||
* @since 3.4
|
|
||||||
*/
|
*/
|
||||||
protected DirCacheBuilder builder;
|
protected WorkTreeUpdater workTreeUpdater;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* merge result as tree
|
* merge result as tree
|
||||||
|
@ -162,6 +143,11 @@ public enum MergeFailureReason {
|
||||||
*/
|
*/
|
||||||
protected ObjectId resultTree;
|
protected ObjectId resultTree;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Files modified during this operation. Note this list is only updated after a successful write.
|
||||||
|
*/
|
||||||
|
protected List<String> modifiedFiles = new ArrayList<>();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Paths that could not be merged by this merger because of an unsolvable
|
* Paths that could not be merged by this merger because of an unsolvable
|
||||||
* conflict.
|
* conflict.
|
||||||
|
@ -170,29 +156,6 @@ public enum MergeFailureReason {
|
||||||
*/
|
*/
|
||||||
protected List<String> unmergedPaths = new ArrayList<>();
|
protected List<String> unmergedPaths = new ArrayList<>();
|
||||||
|
|
||||||
/**
|
|
||||||
* Files modified during this merge operation.
|
|
||||||
*
|
|
||||||
* @since 3.4
|
|
||||||
*/
|
|
||||||
protected List<String> modifiedFiles = new LinkedList<>();
|
|
||||||
|
|
||||||
/**
|
|
||||||
* If the merger has nothing to do for a file but check it out at the end of
|
|
||||||
* the operation, it can be added here.
|
|
||||||
*
|
|
||||||
* @since 3.4
|
|
||||||
*/
|
|
||||||
protected Map<String, DirCacheEntry> toBeCheckedOut = new HashMap<>();
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Paths in this list will be deleted from the local copy at the end of the
|
|
||||||
* operation.
|
|
||||||
*
|
|
||||||
* @since 3.4
|
|
||||||
*/
|
|
||||||
protected List<String> toBeDeleted = new ArrayList<>();
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Low-level textual merge results. Will be passed on to the callers in case
|
* Low-level textual merge results. Will be passed on to the callers in case
|
||||||
* of conflicts.
|
* of conflicts.
|
||||||
|
@ -226,15 +189,6 @@ public enum MergeFailureReason {
|
||||||
*/
|
*/
|
||||||
protected boolean inCore;
|
protected boolean inCore;
|
||||||
|
|
||||||
/**
|
|
||||||
* Set to true if this merger should use the default dircache of the
|
|
||||||
* repository and should handle locking and unlocking of the dircache. If
|
|
||||||
* this merger should work in-core or if an explicit dircache was specified
|
|
||||||
* during construction then this field is set to false.
|
|
||||||
* @since 3.0
|
|
||||||
*/
|
|
||||||
protected boolean implicitDirCache;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Directory cache
|
* Directory cache
|
||||||
* @since 3.0
|
* @since 3.0
|
||||||
|
@ -254,20 +208,6 @@ public enum MergeFailureReason {
|
||||||
*/
|
*/
|
||||||
protected MergeAlgorithm mergeAlgorithm;
|
protected MergeAlgorithm mergeAlgorithm;
|
||||||
|
|
||||||
/**
|
|
||||||
* The {@link WorkingTreeOptions} are needed to determine line endings for
|
|
||||||
* merged files.
|
|
||||||
*
|
|
||||||
* @since 4.11
|
|
||||||
*/
|
|
||||||
protected WorkingTreeOptions workingTreeOptions;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* The size limit (bytes) which controls a file to be stored in {@code Heap}
|
|
||||||
* or {@code LocalFile} during the merge.
|
|
||||||
*/
|
|
||||||
private int inCoreLimit;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The {@link ContentMergeStrategy} to use for "resolve" and "recursive"
|
* The {@link ContentMergeStrategy} to use for "resolve" and "recursive"
|
||||||
* merges.
|
* merges.
|
||||||
|
@ -275,16 +215,6 @@ public enum MergeFailureReason {
|
||||||
@NonNull
|
@NonNull
|
||||||
private ContentMergeStrategy contentStrategy = ContentMergeStrategy.CONFLICT;
|
private ContentMergeStrategy contentStrategy = ContentMergeStrategy.CONFLICT;
|
||||||
|
|
||||||
/**
|
|
||||||
* Keeps {@link CheckoutMetadata} for {@link #checkout()}.
|
|
||||||
*/
|
|
||||||
private Map<String, CheckoutMetadata> checkoutMetadata;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Keeps {@link CheckoutMetadata} for {@link #cleanUp()}.
|
|
||||||
*/
|
|
||||||
private Map<String, CheckoutMetadata> cleanupMetadata;
|
|
||||||
|
|
||||||
private static MergeAlgorithm getMergeAlgorithm(Config config) {
|
private static MergeAlgorithm getMergeAlgorithm(Config config) {
|
||||||
SupportedAlgorithm diffAlg = config.getEnum(
|
SupportedAlgorithm diffAlg = config.getEnum(
|
||||||
CONFIG_DIFF_SECTION, null, CONFIG_KEY_ALGORITHM,
|
CONFIG_DIFF_SECTION, null, CONFIG_KEY_ALGORITHM,
|
||||||
|
@ -292,13 +222,8 @@ private static MergeAlgorithm getMergeAlgorithm(Config config) {
|
||||||
return new MergeAlgorithm(DiffAlgorithm.getAlgorithm(diffAlg));
|
return new MergeAlgorithm(DiffAlgorithm.getAlgorithm(diffAlg));
|
||||||
}
|
}
|
||||||
|
|
||||||
private static int getInCoreLimit(Config config) {
|
|
||||||
return config.getInt(
|
|
||||||
ConfigConstants.CONFIG_MERGE_SECTION, ConfigConstants.CONFIG_KEY_IN_CORE_LIMIT, 10 << 20);
|
|
||||||
}
|
|
||||||
|
|
||||||
private static String[] defaultCommitNames() {
|
private static String[] defaultCommitNames() {
|
||||||
return new String[] { "BASE", "OURS", "THEIRS" }; //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
|
return new String[]{"BASE", "OURS", "THEIRS"}; //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
|
||||||
}
|
}
|
||||||
|
|
||||||
private static final Attributes NO_ATTRIBUTES = new Attributes();
|
private static final Attributes NO_ATTRIBUTES = new Attributes();
|
||||||
|
@ -315,17 +240,8 @@ protected ResolveMerger(Repository local, boolean inCore) {
|
||||||
super(local);
|
super(local);
|
||||||
Config config = local.getConfig();
|
Config config = local.getConfig();
|
||||||
mergeAlgorithm = getMergeAlgorithm(config);
|
mergeAlgorithm = getMergeAlgorithm(config);
|
||||||
inCoreLimit = getInCoreLimit(config);
|
|
||||||
commitNames = defaultCommitNames();
|
commitNames = defaultCommitNames();
|
||||||
this.inCore = inCore;
|
this.inCore = inCore;
|
||||||
|
|
||||||
if (inCore) {
|
|
||||||
implicitDirCache = false;
|
|
||||||
dircache = DirCache.newInCore();
|
|
||||||
} else {
|
|
||||||
implicitDirCache = true;
|
|
||||||
workingTreeOptions = local.getConfig().get(WorkingTreeOptions.KEY);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -352,8 +268,6 @@ protected ResolveMerger(ObjectInserter inserter, Config config) {
|
||||||
mergeAlgorithm = getMergeAlgorithm(config);
|
mergeAlgorithm = getMergeAlgorithm(config);
|
||||||
commitNames = defaultCommitNames();
|
commitNames = defaultCommitNames();
|
||||||
inCore = true;
|
inCore = true;
|
||||||
implicitDirCache = false;
|
|
||||||
dircache = DirCache.newInCore();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -382,81 +296,8 @@ public void setContentMergeStrategy(ContentMergeStrategy strategy) {
|
||||||
/** {@inheritDoc} */
|
/** {@inheritDoc} */
|
||||||
@Override
|
@Override
|
||||||
protected boolean mergeImpl() throws IOException {
|
protected boolean mergeImpl() throws IOException {
|
||||||
if (implicitDirCache) {
|
return mergeTrees(mergeBase(), sourceTrees[0], sourceTrees[1],
|
||||||
dircache = nonNullRepo().lockDirCache();
|
false);
|
||||||
}
|
|
||||||
if (!inCore) {
|
|
||||||
checkoutMetadata = new HashMap<>();
|
|
||||||
cleanupMetadata = new HashMap<>();
|
|
||||||
}
|
|
||||||
try {
|
|
||||||
return mergeTrees(mergeBase(), sourceTrees[0], sourceTrees[1],
|
|
||||||
false);
|
|
||||||
} finally {
|
|
||||||
checkoutMetadata = null;
|
|
||||||
cleanupMetadata = null;
|
|
||||||
if (implicitDirCache) {
|
|
||||||
dircache.unlock();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void checkout() throws NoWorkTreeException, IOException {
|
|
||||||
// Iterate in reverse so that "folder/file" is deleted before
|
|
||||||
// "folder". Otherwise this could result in a failing path because
|
|
||||||
// of a non-empty directory, for which delete() would fail.
|
|
||||||
for (int i = toBeDeleted.size() - 1; i >= 0; i--) {
|
|
||||||
String fileName = toBeDeleted.get(i);
|
|
||||||
File f = new File(nonNullRepo().getWorkTree(), fileName);
|
|
||||||
if (!f.delete())
|
|
||||||
if (!f.isDirectory())
|
|
||||||
failingPaths.put(fileName,
|
|
||||||
MergeFailureReason.COULD_NOT_DELETE);
|
|
||||||
modifiedFiles.add(fileName);
|
|
||||||
}
|
|
||||||
for (Map.Entry<String, DirCacheEntry> entry : toBeCheckedOut
|
|
||||||
.entrySet()) {
|
|
||||||
DirCacheEntry cacheEntry = entry.getValue();
|
|
||||||
if (cacheEntry.getFileMode() == FileMode.GITLINK) {
|
|
||||||
new File(nonNullRepo().getWorkTree(), entry.getKey()).mkdirs();
|
|
||||||
} else {
|
|
||||||
DirCacheCheckout.checkoutEntry(db, cacheEntry, reader, false,
|
|
||||||
checkoutMetadata.get(entry.getKey()));
|
|
||||||
modifiedFiles.add(entry.getKey());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Reverts the worktree after an unsuccessful merge. We know that for all
|
|
||||||
* modified files the old content was in the old index and the index
|
|
||||||
* contained only stage 0. In case if inCore operation just clear the
|
|
||||||
* history of modified files.
|
|
||||||
*
|
|
||||||
* @throws java.io.IOException
|
|
||||||
* @throws org.eclipse.jgit.errors.CorruptObjectException
|
|
||||||
* @throws org.eclipse.jgit.errors.NoWorkTreeException
|
|
||||||
* @since 3.4
|
|
||||||
*/
|
|
||||||
protected void cleanUp() throws NoWorkTreeException,
|
|
||||||
CorruptObjectException,
|
|
||||||
IOException {
|
|
||||||
if (inCore) {
|
|
||||||
modifiedFiles.clear();
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
DirCache dc = nonNullRepo().readDirCache();
|
|
||||||
Iterator<String> mpathsIt=modifiedFiles.iterator();
|
|
||||||
while(mpathsIt.hasNext()) {
|
|
||||||
String mpath = mpathsIt.next();
|
|
||||||
DirCacheEntry entry = dc.getEntry(mpath);
|
|
||||||
if (entry != null) {
|
|
||||||
DirCacheCheckout.checkoutEntry(db, entry, reader, false,
|
|
||||||
cleanupMetadata.get(mpath));
|
|
||||||
}
|
|
||||||
mpathsIt.remove();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -472,13 +313,9 @@ protected void cleanUp() throws NoWorkTreeException,
|
||||||
private DirCacheEntry add(byte[] path, CanonicalTreeParser p, int stage,
|
private DirCacheEntry add(byte[] path, CanonicalTreeParser p, int stage,
|
||||||
Instant lastMod, long len) {
|
Instant lastMod, long len) {
|
||||||
if (p != null && !p.getEntryFileMode().equals(FileMode.TREE)) {
|
if (p != null && !p.getEntryFileMode().equals(FileMode.TREE)) {
|
||||||
DirCacheEntry e = new DirCacheEntry(path, stage);
|
return workTreeUpdater.addExistingToIndex(p.getEntryObjectId(), path,
|
||||||
e.setFileMode(p.getEntryFileMode());
|
p.getEntryFileMode(), stage,
|
||||||
e.setObjectId(p.getEntryObjectId());
|
lastMod, (int) len);
|
||||||
e.setLastModified(lastMod);
|
|
||||||
e.setLength(len);
|
|
||||||
builder.add(e);
|
|
||||||
return e;
|
|
||||||
}
|
}
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
@ -493,41 +330,8 @@ private DirCacheEntry add(byte[] path, CanonicalTreeParser p, int stage,
|
||||||
* @return the entry which was added to the index
|
* @return the entry which was added to the index
|
||||||
*/
|
*/
|
||||||
private DirCacheEntry keep(DirCacheEntry e) {
|
private DirCacheEntry keep(DirCacheEntry e) {
|
||||||
DirCacheEntry newEntry = new DirCacheEntry(e.getRawPath(),
|
return workTreeUpdater.addExistingToIndex(e.getObjectId(), e.getRawPath(), e.getFileMode(),
|
||||||
e.getStage());
|
e.getStage(), e.getLastModifiedInstant(), e.getLength());
|
||||||
newEntry.setFileMode(e.getFileMode());
|
|
||||||
newEntry.setObjectId(e.getObjectId());
|
|
||||||
newEntry.setLastModified(e.getLastModifiedInstant());
|
|
||||||
newEntry.setLength(e.getLength());
|
|
||||||
builder.add(newEntry);
|
|
||||||
return newEntry;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Remembers the {@link CheckoutMetadata} for the given path; it may be
|
|
||||||
* needed in {@link #checkout()} or in {@link #cleanUp()}.
|
|
||||||
*
|
|
||||||
* @param map
|
|
||||||
* to add the metadata to
|
|
||||||
* @param path
|
|
||||||
* of the current node
|
|
||||||
* @param attributes
|
|
||||||
* to use for determining the metadata
|
|
||||||
* @throws IOException
|
|
||||||
* if the smudge filter cannot be determined
|
|
||||||
* @since 6.1
|
|
||||||
*/
|
|
||||||
protected void addCheckoutMetadata(Map<String, CheckoutMetadata> map,
|
|
||||||
String path, Attributes attributes)
|
|
||||||
throws IOException {
|
|
||||||
if (map != null) {
|
|
||||||
EolStreamType eol = EolStreamTypeUtil.detectStreamType(
|
|
||||||
OperationType.CHECKOUT_OP, workingTreeOptions,
|
|
||||||
attributes);
|
|
||||||
CheckoutMetadata data = new CheckoutMetadata(eol,
|
|
||||||
tw.getSmudgeCommand(attributes));
|
|
||||||
map.put(path, data);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -547,14 +351,17 @@ protected void addCheckoutMetadata(Map<String, CheckoutMetadata> map,
|
||||||
protected void addToCheckout(String path, DirCacheEntry entry,
|
protected void addToCheckout(String path, DirCacheEntry entry,
|
||||||
Attributes[] attributes)
|
Attributes[] attributes)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
toBeCheckedOut.put(path, entry);
|
EolStreamType cleanupStreamType = workTreeUpdater.detectCheckoutStreamType(attributes[T_OURS]);
|
||||||
addCheckoutMetadata(cleanupMetadata, path, attributes[T_OURS]);
|
String cleanupSmudgeCommand = tw.getSmudgeCommand(attributes[T_OURS]);
|
||||||
addCheckoutMetadata(checkoutMetadata, path, attributes[T_THEIRS]);
|
EolStreamType checkoutStreamType = workTreeUpdater.detectCheckoutStreamType(attributes[T_THEIRS]);
|
||||||
|
String checkoutSmudgeCommand = tw.getSmudgeCommand(attributes[T_THEIRS]);
|
||||||
|
workTreeUpdater.addToCheckout(path, entry, cleanupStreamType, cleanupSmudgeCommand,
|
||||||
|
checkoutStreamType, checkoutSmudgeCommand);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Remember a path for deletion, and remember its {@link CheckoutMetadata}
|
* Remember a path for deletion, and remember its {@link CheckoutMetadata}
|
||||||
* in case it has to be restored in {@link #cleanUp()}.
|
* in case it has to be restored in the cleanUp.
|
||||||
*
|
*
|
||||||
* @param path
|
* @param path
|
||||||
* of the entry
|
* of the entry
|
||||||
|
@ -568,10 +375,13 @@ protected void addToCheckout(String path, DirCacheEntry entry,
|
||||||
*/
|
*/
|
||||||
protected void addDeletion(String path, boolean isFile,
|
protected void addDeletion(String path, boolean isFile,
|
||||||
Attributes attributes) throws IOException {
|
Attributes attributes) throws IOException {
|
||||||
toBeDeleted.add(path);
|
if (db == null || nonNullRepo().isBare() || !isFile)
|
||||||
if (isFile) {
|
return;
|
||||||
addCheckoutMetadata(cleanupMetadata, path, attributes);
|
|
||||||
}
|
File file = new File(nonNullRepo().getWorkTree(), path);
|
||||||
|
EolStreamType streamType = workTreeUpdater.detectCheckoutStreamType(attributes);
|
||||||
|
String smudgeCommand = tw.getSmudgeCommand(attributes);
|
||||||
|
workTreeUpdater.deleteFile(path, file, streamType, smudgeCommand);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -615,9 +425,6 @@ protected void addDeletion(String path, boolean isFile,
|
||||||
* @return <code>false</code> if the merge will fail because the index entry
|
* @return <code>false</code> if the merge will fail because the index entry
|
||||||
* didn't match ours or the working-dir file was dirty and a
|
* didn't match ours or the working-dir file was dirty and a
|
||||||
* conflict occurred
|
* conflict occurred
|
||||||
* @throws org.eclipse.jgit.errors.MissingObjectException
|
|
||||||
* @throws org.eclipse.jgit.errors.IncorrectObjectTypeException
|
|
||||||
* @throws org.eclipse.jgit.errors.CorruptObjectException
|
|
||||||
* @throws java.io.IOException
|
* @throws java.io.IOException
|
||||||
* @since 6.1
|
* @since 6.1
|
||||||
*/
|
*/
|
||||||
|
@ -625,20 +432,21 @@ protected boolean processEntry(CanonicalTreeParser base,
|
||||||
CanonicalTreeParser ours, CanonicalTreeParser theirs,
|
CanonicalTreeParser ours, CanonicalTreeParser theirs,
|
||||||
DirCacheBuildIterator index, WorkingTreeIterator work,
|
DirCacheBuildIterator index, WorkingTreeIterator work,
|
||||||
boolean ignoreConflicts, Attributes[] attributes)
|
boolean ignoreConflicts, Attributes[] attributes)
|
||||||
throws MissingObjectException, IncorrectObjectTypeException,
|
throws IOException {
|
||||||
CorruptObjectException, IOException {
|
|
||||||
enterSubtree = true;
|
enterSubtree = true;
|
||||||
final int modeO = tw.getRawMode(T_OURS);
|
final int modeO = tw.getRawMode(T_OURS);
|
||||||
final int modeT = tw.getRawMode(T_THEIRS);
|
final int modeT = tw.getRawMode(T_THEIRS);
|
||||||
final int modeB = tw.getRawMode(T_BASE);
|
final int modeB = tw.getRawMode(T_BASE);
|
||||||
boolean gitLinkMerging = isGitLink(modeO) || isGitLink(modeT)
|
boolean gitLinkMerging = isGitLink(modeO) || isGitLink(modeT)
|
||||||
|| isGitLink(modeB);
|
|| isGitLink(modeB);
|
||||||
if (modeO == 0 && modeT == 0 && modeB == 0)
|
if (modeO == 0 && modeT == 0 && modeB == 0) {
|
||||||
// File is either untracked or new, staged but uncommitted
|
// File is either untracked or new, staged but uncommitted
|
||||||
return true;
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
if (isIndexDirty())
|
if (isIndexDirty()) {
|
||||||
return false;
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
DirCacheEntry ourDce = null;
|
DirCacheEntry ourDce = null;
|
||||||
|
|
||||||
|
@ -706,8 +514,9 @@ protected boolean processEntry(CanonicalTreeParser base,
|
||||||
if (modeB == modeT && tw.idEqual(T_BASE, T_THEIRS)) {
|
if (modeB == modeT && tw.idEqual(T_BASE, T_THEIRS)) {
|
||||||
// THEIRS was not changed compared to BASE. All changes must be in
|
// THEIRS was not changed compared to BASE. All changes must be in
|
||||||
// OURS. OURS is chosen. We can keep the existing entry.
|
// OURS. OURS is chosen. We can keep the existing entry.
|
||||||
if (ourDce != null)
|
if (ourDce != null) {
|
||||||
keep(ourDce);
|
keep(ourDce);
|
||||||
|
}
|
||||||
// no checkout needed!
|
// no checkout needed!
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
@ -717,8 +526,9 @@ protected boolean processEntry(CanonicalTreeParser base,
|
||||||
// THEIRS. THEIRS is chosen.
|
// THEIRS. THEIRS is chosen.
|
||||||
|
|
||||||
// Check worktree before checking out THEIRS
|
// Check worktree before checking out THEIRS
|
||||||
if (isWorktreeDirty(work, ourDce))
|
if (isWorktreeDirty(work, ourDce)) {
|
||||||
return false;
|
return false;
|
||||||
|
}
|
||||||
if (nonTree(modeT)) {
|
if (nonTree(modeT)) {
|
||||||
// we know about length and lastMod only after we have written
|
// we know about length and lastMod only after we have written
|
||||||
// the new content.
|
// the new content.
|
||||||
|
@ -759,12 +569,15 @@ protected boolean processEntry(CanonicalTreeParser base,
|
||||||
enterSubtree = false;
|
enterSubtree = false;
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
if (nonTree(modeB))
|
if (nonTree(modeB)) {
|
||||||
add(tw.getRawPath(), base, DirCacheEntry.STAGE_1, EPOCH, 0);
|
add(tw.getRawPath(), base, DirCacheEntry.STAGE_1, EPOCH, 0);
|
||||||
if (nonTree(modeO))
|
}
|
||||||
|
if (nonTree(modeO)) {
|
||||||
add(tw.getRawPath(), ours, DirCacheEntry.STAGE_2, EPOCH, 0);
|
add(tw.getRawPath(), ours, DirCacheEntry.STAGE_2, EPOCH, 0);
|
||||||
if (nonTree(modeT))
|
}
|
||||||
|
if (nonTree(modeT)) {
|
||||||
add(tw.getRawPath(), theirs, DirCacheEntry.STAGE_3, EPOCH, 0);
|
add(tw.getRawPath(), theirs, DirCacheEntry.STAGE_3, EPOCH, 0);
|
||||||
|
}
|
||||||
unmergedPaths.add(tw.getPathString());
|
unmergedPaths.add(tw.getPathString());
|
||||||
enterSubtree = false;
|
enterSubtree = false;
|
||||||
return true;
|
return true;
|
||||||
|
@ -774,8 +587,9 @@ protected boolean processEntry(CanonicalTreeParser base,
|
||||||
// tells us we are in a subtree because of index or working-dir).
|
// tells us we are in a subtree because of index or working-dir).
|
||||||
// If they are both folders no content-merge is required - we can
|
// If they are both folders no content-merge is required - we can
|
||||||
// return here.
|
// return here.
|
||||||
if (!nonTree(modeO))
|
if (!nonTree(modeO)) {
|
||||||
return true;
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
// ours and theirs are both files, just fall out of the if block
|
// ours and theirs are both files, just fall out of the if block
|
||||||
// and do the content merge
|
// and do the content merge
|
||||||
|
@ -806,16 +620,16 @@ protected boolean processEntry(CanonicalTreeParser base,
|
||||||
} else if (!attributes[T_OURS].canBeContentMerged()) {
|
} else if (!attributes[T_OURS].canBeContentMerged()) {
|
||||||
// File marked as binary
|
// File marked as binary
|
||||||
switch (getContentMergeStrategy()) {
|
switch (getContentMergeStrategy()) {
|
||||||
case OURS:
|
case OURS:
|
||||||
keep(ourDce);
|
keep(ourDce);
|
||||||
return true;
|
return true;
|
||||||
case THEIRS:
|
case THEIRS:
|
||||||
DirCacheEntry theirEntry = add(tw.getRawPath(), theirs,
|
DirCacheEntry theirEntry = add(tw.getRawPath(), theirs,
|
||||||
DirCacheEntry.STAGE_0, EPOCH, 0);
|
DirCacheEntry.STAGE_0, EPOCH, 0);
|
||||||
addToCheckout(tw.getPathString(), theirEntry, attributes);
|
addToCheckout(tw.getPathString(), theirEntry, attributes);
|
||||||
return true;
|
return true;
|
||||||
default:
|
default:
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
add(tw.getRawPath(), base, DirCacheEntry.STAGE_1, EPOCH, 0);
|
add(tw.getRawPath(), base, DirCacheEntry.STAGE_1, EPOCH, 0);
|
||||||
add(tw.getRawPath(), ours, DirCacheEntry.STAGE_2, EPOCH, 0);
|
add(tw.getRawPath(), ours, DirCacheEntry.STAGE_2, EPOCH, 0);
|
||||||
|
@ -837,18 +651,18 @@ protected boolean processEntry(CanonicalTreeParser base,
|
||||||
getContentMergeStrategy());
|
getContentMergeStrategy());
|
||||||
} catch (BinaryBlobException e) {
|
} catch (BinaryBlobException e) {
|
||||||
switch (getContentMergeStrategy()) {
|
switch (getContentMergeStrategy()) {
|
||||||
case OURS:
|
case OURS:
|
||||||
keep(ourDce);
|
keep(ourDce);
|
||||||
return true;
|
return true;
|
||||||
case THEIRS:
|
case THEIRS:
|
||||||
DirCacheEntry theirEntry = add(tw.getRawPath(), theirs,
|
DirCacheEntry theirEntry = add(tw.getRawPath(), theirs,
|
||||||
DirCacheEntry.STAGE_0, EPOCH, 0);
|
DirCacheEntry.STAGE_0, EPOCH, 0);
|
||||||
addToCheckout(tw.getPathString(), theirEntry, attributes);
|
addToCheckout(tw.getPathString(), theirEntry, attributes);
|
||||||
return true;
|
return true;
|
||||||
default:
|
default:
|
||||||
result = new MergeResult<>(Collections.emptyList());
|
result = new MergeResult<>(Collections.emptyList());
|
||||||
result.setContainsConflicts(true);
|
result.setContainsConflicts(true);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (ignoreConflicts) {
|
if (ignoreConflicts) {
|
||||||
|
@ -859,11 +673,9 @@ protected boolean processEntry(CanonicalTreeParser base,
|
||||||
if (result.containsConflicts() && !ignoreConflicts) {
|
if (result.containsConflicts() && !ignoreConflicts) {
|
||||||
unmergedPaths.add(currentPath);
|
unmergedPaths.add(currentPath);
|
||||||
}
|
}
|
||||||
modifiedFiles.add(currentPath);
|
workTreeUpdater.markAsModified(currentPath);
|
||||||
addCheckoutMetadata(cleanupMetadata, currentPath,
|
// Entry is null - only adds the metadata.
|
||||||
attributes[T_OURS]);
|
addToCheckout(currentPath, null, attributes);
|
||||||
addCheckoutMetadata(checkoutMetadata, currentPath,
|
|
||||||
attributes[T_THEIRS]);
|
|
||||||
} else if (modeO != modeT) {
|
} else if (modeO != modeT) {
|
||||||
// OURS or THEIRS has been deleted
|
// OURS or THEIRS has been deleted
|
||||||
if (((modeO != 0 && !tw.idEqual(T_BASE, T_OURS)) || (modeT != 0 && !tw
|
if (((modeO != 0 && !tw.idEqual(T_BASE, T_OURS)) || (modeT != 0 && !tw
|
||||||
|
@ -975,8 +787,9 @@ private MergeResult<RawText> contentMerge(CanonicalTreeParser base,
|
||||||
}
|
}
|
||||||
|
|
||||||
private boolean isIndexDirty() {
|
private boolean isIndexDirty() {
|
||||||
if (inCore)
|
if (inCore) {
|
||||||
return false;
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
final int modeI = tw.getRawMode(T_INDEX);
|
final int modeI = tw.getRawMode(T_INDEX);
|
||||||
final int modeO = tw.getRawMode(T_OURS);
|
final int modeO = tw.getRawMode(T_OURS);
|
||||||
|
@ -984,37 +797,42 @@ private boolean isIndexDirty() {
|
||||||
// Index entry has to match ours to be considered clean
|
// Index entry has to match ours to be considered clean
|
||||||
final boolean isDirty = nonTree(modeI)
|
final boolean isDirty = nonTree(modeI)
|
||||||
&& !(modeO == modeI && tw.idEqual(T_INDEX, T_OURS));
|
&& !(modeO == modeI && tw.idEqual(T_INDEX, T_OURS));
|
||||||
if (isDirty)
|
if (isDirty) {
|
||||||
failingPaths
|
failingPaths
|
||||||
.put(tw.getPathString(), MergeFailureReason.DIRTY_INDEX);
|
.put(tw.getPathString(), MergeFailureReason.DIRTY_INDEX);
|
||||||
|
}
|
||||||
return isDirty;
|
return isDirty;
|
||||||
}
|
}
|
||||||
|
|
||||||
private boolean isWorktreeDirty(WorkingTreeIterator work,
|
private boolean isWorktreeDirty(WorkingTreeIterator work,
|
||||||
DirCacheEntry ourDce) throws IOException {
|
DirCacheEntry ourDce) throws IOException {
|
||||||
if (work == null)
|
if (work == null) {
|
||||||
return false;
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
final int modeF = tw.getRawMode(T_FILE);
|
final int modeF = tw.getRawMode(T_FILE);
|
||||||
final int modeO = tw.getRawMode(T_OURS);
|
final int modeO = tw.getRawMode(T_OURS);
|
||||||
|
|
||||||
// Worktree entry has to match ours to be considered clean
|
// Worktree entry has to match ours to be considered clean
|
||||||
boolean isDirty;
|
boolean isDirty;
|
||||||
if (ourDce != null)
|
if (ourDce != null) {
|
||||||
isDirty = work.isModified(ourDce, true, reader);
|
isDirty = work.isModified(ourDce, true, reader);
|
||||||
else {
|
} else {
|
||||||
isDirty = work.isModeDifferent(modeO);
|
isDirty = work.isModeDifferent(modeO);
|
||||||
if (!isDirty && nonTree(modeF))
|
if (!isDirty && nonTree(modeF)) {
|
||||||
isDirty = !tw.idEqual(T_FILE, T_OURS);
|
isDirty = !tw.idEqual(T_FILE, T_OURS);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Ignore existing empty directories
|
// Ignore existing empty directories
|
||||||
if (isDirty && modeF == FileMode.TYPE_TREE
|
if (isDirty && modeF == FileMode.TYPE_TREE
|
||||||
&& modeO == FileMode.TYPE_MISSING)
|
&& modeO == FileMode.TYPE_MISSING) {
|
||||||
isDirty = false;
|
isDirty = false;
|
||||||
if (isDirty)
|
}
|
||||||
|
if (isDirty) {
|
||||||
failingPaths.put(tw.getPathString(),
|
failingPaths.put(tw.getPathString(),
|
||||||
MergeFailureReason.DIRTY_WORKTREE);
|
MergeFailureReason.DIRTY_WORKTREE);
|
||||||
|
}
|
||||||
return isDirty;
|
return isDirty;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1029,14 +847,12 @@ private boolean isWorktreeDirty(WorkingTreeIterator work,
|
||||||
* @param theirs
|
* @param theirs
|
||||||
* @param result
|
* @param result
|
||||||
* @param attributes
|
* @param attributes
|
||||||
* @throws FileNotFoundException
|
|
||||||
* @throws IOException
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
private void updateIndex(CanonicalTreeParser base,
|
private void updateIndex(CanonicalTreeParser base,
|
||||||
CanonicalTreeParser ours, CanonicalTreeParser theirs,
|
CanonicalTreeParser ours, CanonicalTreeParser theirs,
|
||||||
MergeResult<RawText> result, Attributes attributes)
|
MergeResult<RawText> result, Attributes attributes)
|
||||||
throws FileNotFoundException,
|
throws IOException {
|
||||||
IOException {
|
|
||||||
TemporaryBuffer rawMerged = null;
|
TemporaryBuffer rawMerged = null;
|
||||||
try {
|
try {
|
||||||
rawMerged = doMerge(result);
|
rawMerged = doMerge(result);
|
||||||
|
@ -1055,21 +871,17 @@ private void updateIndex(CanonicalTreeParser base,
|
||||||
|
|
||||||
// No conflict occurred, the file will contain fully merged content.
|
// No conflict occurred, the file will contain fully merged content.
|
||||||
// The index will be populated with the new merged version.
|
// The index will be populated with the new merged version.
|
||||||
DirCacheEntry dce = new DirCacheEntry(tw.getPathString());
|
Instant lastModified =
|
||||||
|
mergedFile == null ? null : nonNullRepo().getFS().lastModifiedInstant(mergedFile);
|
||||||
// Set the mode for the new content. Fall back to REGULAR_FILE if
|
// Set the mode for the new content. Fall back to REGULAR_FILE if
|
||||||
// we can't merge modes of OURS and THEIRS.
|
// we can't merge modes of OURS and THEIRS.
|
||||||
int newMode = mergeFileModes(tw.getRawMode(0), tw.getRawMode(1),
|
int newMode = mergeFileModes(tw.getRawMode(0), tw.getRawMode(1),
|
||||||
tw.getRawMode(2));
|
tw.getRawMode(2));
|
||||||
dce.setFileMode(newMode == FileMode.MISSING.getBits()
|
FileMode mode = newMode == FileMode.MISSING.getBits()
|
||||||
? FileMode.REGULAR_FILE : FileMode.fromBits(newMode));
|
? FileMode.REGULAR_FILE : FileMode.fromBits(newMode);
|
||||||
if (mergedFile != null) {
|
workTreeUpdater.insertToIndex(rawMerged.openInputStream(), tw.getPathString().getBytes(UTF_8), mode,
|
||||||
dce.setLastModified(
|
DirCacheEntry.STAGE_0, lastModified, (int) rawMerged.length(),
|
||||||
nonNullRepo().getFS().lastModifiedInstant(mergedFile));
|
attributes.get(Constants.ATTR_MERGE));
|
||||||
dce.setLength((int) mergedFile.length());
|
|
||||||
}
|
|
||||||
dce.setObjectId(insertMergeResult(rawMerged, attributes));
|
|
||||||
builder.add(dce);
|
|
||||||
} finally {
|
} finally {
|
||||||
if (rawMerged != null) {
|
if (rawMerged != null) {
|
||||||
rawMerged.destroy();
|
rawMerged.destroy();
|
||||||
|
@ -1085,34 +897,30 @@ private void updateIndex(CanonicalTreeParser base,
|
||||||
* @param attributes
|
* @param attributes
|
||||||
* the files .gitattributes entries
|
* the files .gitattributes entries
|
||||||
* @return the working tree file to which the merged content was written.
|
* @return the working tree file to which the merged content was written.
|
||||||
* @throws FileNotFoundException
|
|
||||||
* @throws IOException
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
private File writeMergedFile(TemporaryBuffer rawMerged,
|
private File writeMergedFile(TemporaryBuffer rawMerged,
|
||||||
Attributes attributes)
|
Attributes attributes)
|
||||||
throws FileNotFoundException, IOException {
|
throws IOException {
|
||||||
File workTree = nonNullRepo().getWorkTree();
|
File workTree = nonNullRepo().getWorkTree();
|
||||||
FS fs = nonNullRepo().getFS();
|
FS fs = nonNullRepo().getFS();
|
||||||
File of = new File(workTree, tw.getPathString());
|
File of = new File(workTree, tw.getPathString());
|
||||||
File parentFolder = of.getParentFile();
|
File parentFolder = of.getParentFile();
|
||||||
|
EolStreamType eol = workTreeUpdater.detectCheckoutStreamType(attributes);
|
||||||
if (!fs.exists(parentFolder)) {
|
if (!fs.exists(parentFolder)) {
|
||||||
parentFolder.mkdirs();
|
parentFolder.mkdirs();
|
||||||
}
|
}
|
||||||
EolStreamType streamType = EolStreamTypeUtil.detectStreamType(
|
StreamLoader contentLoader = WorkTreeUpdater.createStreamLoader(rawMerged::openInputStream,
|
||||||
OperationType.CHECKOUT_OP, workingTreeOptions,
|
rawMerged.length());
|
||||||
attributes);
|
workTreeUpdater.updateFileWithContent(contentLoader,
|
||||||
try (OutputStream os = EolStreamTypeUtil.wrapOutputStream(
|
eol, tw.getSmudgeCommand(attributes), of.getPath(), of, false);
|
||||||
new BufferedOutputStream(new FileOutputStream(of)),
|
|
||||||
streamType)) {
|
|
||||||
rawMerged.writeTo(os, null);
|
|
||||||
}
|
|
||||||
return of;
|
return of;
|
||||||
}
|
}
|
||||||
|
|
||||||
private TemporaryBuffer doMerge(MergeResult<RawText> result)
|
private TemporaryBuffer doMerge(MergeResult<RawText> result)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
TemporaryBuffer.LocalFile buf = new TemporaryBuffer.LocalFile(
|
TemporaryBuffer.LocalFile buf = new TemporaryBuffer.LocalFile(
|
||||||
db != null ? nonNullRepo().getDirectory() : null, inCoreLimit);
|
db != null ? nonNullRepo().getDirectory() : null, workTreeUpdater.getInCoreFileSizeLimit());
|
||||||
boolean success = false;
|
boolean success = false;
|
||||||
try {
|
try {
|
||||||
new MergeFormatter().formatMerge(buf, result,
|
new MergeFormatter().formatMerge(buf, result,
|
||||||
|
@ -1127,16 +935,6 @@ private TemporaryBuffer doMerge(MergeResult<RawText> result)
|
||||||
return buf;
|
return buf;
|
||||||
}
|
}
|
||||||
|
|
||||||
private ObjectId insertMergeResult(TemporaryBuffer buf,
|
|
||||||
Attributes attributes) throws IOException {
|
|
||||||
InputStream in = buf.openInputStream();
|
|
||||||
try (LfsInputStream is = LfsFactory.getInstance().applyCleanFilter(
|
|
||||||
getRepository(), in,
|
|
||||||
buf.length(), attributes.get(Constants.ATTR_MERGE))) {
|
|
||||||
return getObjectInserter().insert(OBJ_BLOB, is.getLength(), is);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Try to merge filemodes. If only ours or theirs have changed the mode
|
* Try to merge filemodes. If only ours or theirs have changed the mode
|
||||||
* (compared to base) we choose that one. If ours and theirs have equal
|
* (compared to base) we choose that one. If ours and theirs have equal
|
||||||
|
@ -1154,22 +952,26 @@ private ObjectId insertMergeResult(TemporaryBuffer buf,
|
||||||
* conflict
|
* conflict
|
||||||
*/
|
*/
|
||||||
private int mergeFileModes(int modeB, int modeO, int modeT) {
|
private int mergeFileModes(int modeB, int modeO, int modeT) {
|
||||||
if (modeO == modeT)
|
if (modeO == modeT) {
|
||||||
return modeO;
|
return modeO;
|
||||||
if (modeB == modeO)
|
}
|
||||||
|
if (modeB == modeO) {
|
||||||
// Base equal to Ours -> chooses Theirs if that is not missing
|
// Base equal to Ours -> chooses Theirs if that is not missing
|
||||||
return (modeT == FileMode.MISSING.getBits()) ? modeO : modeT;
|
return (modeT == FileMode.MISSING.getBits()) ? modeO : modeT;
|
||||||
if (modeB == modeT)
|
}
|
||||||
|
if (modeB == modeT) {
|
||||||
// Base equal to Theirs -> chooses Ours if that is not missing
|
// Base equal to Theirs -> chooses Ours if that is not missing
|
||||||
return (modeO == FileMode.MISSING.getBits()) ? modeT : modeO;
|
return (modeO == FileMode.MISSING.getBits()) ? modeT : modeO;
|
||||||
|
}
|
||||||
return FileMode.MISSING.getBits();
|
return FileMode.MISSING.getBits();
|
||||||
}
|
}
|
||||||
|
|
||||||
private RawText getRawText(ObjectId id,
|
private RawText getRawText(ObjectId id,
|
||||||
Attributes attributes)
|
Attributes attributes)
|
||||||
throws IOException, BinaryBlobException {
|
throws IOException, BinaryBlobException {
|
||||||
if (id.equals(ObjectId.zeroId()))
|
if (id.equals(ObjectId.zeroId())) {
|
||||||
return new RawText(new byte[] {});
|
return new RawText(new byte[]{});
|
||||||
|
}
|
||||||
|
|
||||||
ObjectLoader loader = LfsFactory.getInstance().applySmudgeFilter(
|
ObjectLoader loader = LfsFactory.getInstance().applySmudgeFilter(
|
||||||
getRepository(), reader.open(id, OBJ_BLOB),
|
getRepository(), reader.open(id, OBJ_BLOB),
|
||||||
|
@ -1233,7 +1035,7 @@ public List<String> getUnmergedPaths() {
|
||||||
* superset of the files listed by {@link #getUnmergedPaths()}.
|
* superset of the files listed by {@link #getUnmergedPaths()}.
|
||||||
*/
|
*/
|
||||||
public List<String> getModifiedFiles() {
|
public List<String> getModifiedFiles() {
|
||||||
return modifiedFiles;
|
return workTreeUpdater != null ? workTreeUpdater.getModifiedFiles() : modifiedFiles;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -1247,7 +1049,7 @@ public List<String> getModifiedFiles() {
|
||||||
* for this path.
|
* for this path.
|
||||||
*/
|
*/
|
||||||
public Map<String, DirCacheEntry> getToBeCheckedOut() {
|
public Map<String, DirCacheEntry> getToBeCheckedOut() {
|
||||||
return toBeCheckedOut;
|
return workTreeUpdater.getToBeCheckedOut();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -1297,7 +1099,6 @@ public boolean failed() {
|
||||||
*/
|
*/
|
||||||
public void setDirCache(DirCache dc) {
|
public void setDirCache(DirCache dc) {
|
||||||
this.dircache = dc;
|
this.dircache = dc;
|
||||||
implicitDirCache = false;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -1352,53 +1153,48 @@ public void setWorkingTreeIterator(WorkingTreeIterator workingTreeIterator) {
|
||||||
protected boolean mergeTrees(AbstractTreeIterator baseTree,
|
protected boolean mergeTrees(AbstractTreeIterator baseTree,
|
||||||
RevTree headTree, RevTree mergeTree, boolean ignoreConflicts)
|
RevTree headTree, RevTree mergeTree, boolean ignoreConflicts)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
|
try {
|
||||||
|
workTreeUpdater = inCore ?
|
||||||
|
WorkTreeUpdater.createInCoreWorkTreeUpdater(db, dircache, getObjectInserter()) :
|
||||||
|
WorkTreeUpdater.createWorkTreeUpdater(db, dircache);
|
||||||
|
dircache = workTreeUpdater.getLockedDirCache();
|
||||||
|
tw = new NameConflictTreeWalk(db, reader);
|
||||||
|
|
||||||
builder = dircache.builder();
|
tw.addTree(baseTree);
|
||||||
DirCacheBuildIterator buildIt = new DirCacheBuildIterator(builder);
|
tw.setHead(tw.addTree(headTree));
|
||||||
|
tw.addTree(mergeTree);
|
||||||
tw = new NameConflictTreeWalk(db, reader);
|
DirCacheBuildIterator buildIt = workTreeUpdater.createDirCacheBuildIterator();
|
||||||
tw.addTree(baseTree);
|
int dciPos = tw.addTree(buildIt);
|
||||||
tw.setHead(tw.addTree(headTree));
|
if (workingTreeIterator != null) {
|
||||||
tw.addTree(mergeTree);
|
tw.addTree(workingTreeIterator);
|
||||||
int dciPos = tw.addTree(buildIt);
|
workingTreeIterator.setDirCacheIterator(tw, dciPos);
|
||||||
if (workingTreeIterator != null) {
|
} else {
|
||||||
tw.addTree(workingTreeIterator);
|
tw.setFilter(TreeFilter.ANY_DIFF);
|
||||||
workingTreeIterator.setDirCacheIterator(tw, dciPos);
|
|
||||||
} else {
|
|
||||||
tw.setFilter(TreeFilter.ANY_DIFF);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!mergeTreeWalk(tw, ignoreConflicts)) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!inCore) {
|
|
||||||
// No problem found. The only thing left to be done is to
|
|
||||||
// checkout all files from "theirs" which have been selected to
|
|
||||||
// go into the new index.
|
|
||||||
checkout();
|
|
||||||
|
|
||||||
// All content-merges are successfully done. If we can now write the
|
|
||||||
// new index we are on quite safe ground. Even if the checkout of
|
|
||||||
// files coming from "theirs" fails the user can work around such
|
|
||||||
// failures by checking out the index again.
|
|
||||||
if (!builder.commit()) {
|
|
||||||
cleanUp();
|
|
||||||
throw new IndexWriteException();
|
|
||||||
}
|
}
|
||||||
builder = null;
|
|
||||||
|
|
||||||
} else {
|
if (!mergeTreeWalk(tw, ignoreConflicts)) {
|
||||||
builder.finish();
|
return false;
|
||||||
builder = null;
|
}
|
||||||
}
|
|
||||||
|
|
||||||
if (getUnmergedPaths().isEmpty() && !failed()) {
|
workTreeUpdater.writeWorkTreeChanges(true);
|
||||||
resultTree = dircache.writeTree(getObjectInserter());
|
if (getUnmergedPaths().isEmpty() && !failed()) {
|
||||||
return true;
|
WorkTreeUpdater.Result result = workTreeUpdater.writeIndexChanges();
|
||||||
|
resultTree = result.treeId;
|
||||||
|
modifiedFiles = result.modifiedFiles;
|
||||||
|
for (String f : result.failedToDelete) {
|
||||||
|
failingPaths.put(f, MergeFailureReason.COULD_NOT_DELETE);
|
||||||
|
}
|
||||||
|
return result.failedToDelete.isEmpty();
|
||||||
|
}
|
||||||
|
resultTree = null;
|
||||||
|
return false;
|
||||||
|
} finally {
|
||||||
|
if(modifiedFiles.isEmpty()) {
|
||||||
|
modifiedFiles = workTreeUpdater.getModifiedFiles();
|
||||||
|
}
|
||||||
|
workTreeUpdater.close();
|
||||||
|
workTreeUpdater = null;
|
||||||
}
|
}
|
||||||
resultTree = null;
|
|
||||||
return false;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -1419,8 +1215,8 @@ protected boolean mergeTreeWalk(TreeWalk treeWalk, boolean ignoreConflicts)
|
||||||
boolean hasAttributeNodeProvider = treeWalk
|
boolean hasAttributeNodeProvider = treeWalk
|
||||||
.getAttributesNodeProvider() != null;
|
.getAttributesNodeProvider() != null;
|
||||||
while (treeWalk.next()) {
|
while (treeWalk.next()) {
|
||||||
Attributes[] attributes = { NO_ATTRIBUTES, NO_ATTRIBUTES,
|
Attributes[] attributes = {NO_ATTRIBUTES, NO_ATTRIBUTES,
|
||||||
NO_ATTRIBUTES };
|
NO_ATTRIBUTES};
|
||||||
if (hasAttributeNodeProvider) {
|
if (hasAttributeNodeProvider) {
|
||||||
attributes[T_BASE] = treeWalk.getAttributes(T_BASE);
|
attributes[T_BASE] = treeWalk.getAttributes(T_BASE);
|
||||||
attributes[T_OURS] = treeWalk.getAttributes(T_OURS);
|
attributes[T_OURS] = treeWalk.getAttributes(T_OURS);
|
||||||
|
@ -1434,11 +1230,12 @@ protected boolean mergeTreeWalk(TreeWalk treeWalk, boolean ignoreConflicts)
|
||||||
hasWorkingTreeIterator ? treeWalk.getTree(T_FILE,
|
hasWorkingTreeIterator ? treeWalk.getTree(T_FILE,
|
||||||
WorkingTreeIterator.class) : null,
|
WorkingTreeIterator.class) : null,
|
||||||
ignoreConflicts, attributes)) {
|
ignoreConflicts, attributes)) {
|
||||||
cleanUp();
|
workTreeUpdater.revertModifiedFiles();
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
if (treeWalk.isSubtree() && enterSubtree)
|
if (treeWalk.isSubtree() && enterSubtree) {
|
||||||
treeWalk.enterSubtree();
|
treeWalk.enterSubtree();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,693 @@
|
||||||
|
/*
|
||||||
|
* Copyright (C) 2022, Google Inc. and others
|
||||||
|
*
|
||||||
|
* This program and the accompanying materials are made available under the
|
||||||
|
* terms of the Eclipse Distribution License v. 1.0 which is available at
|
||||||
|
* https://www.eclipse.org/org/documents/edl-v10.php.
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: BSD-3-Clause
|
||||||
|
*/
|
||||||
|
package org.eclipse.jgit.util;
|
||||||
|
|
||||||
|
import static org.eclipse.jgit.lib.Constants.OBJ_BLOB;
|
||||||
|
|
||||||
|
import java.io.BufferedInputStream;
|
||||||
|
import java.io.Closeable;
|
||||||
|
import java.io.File;
|
||||||
|
import java.io.FileOutputStream;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.InputStream;
|
||||||
|
import java.io.OutputStream;
|
||||||
|
import java.nio.file.Files;
|
||||||
|
import java.nio.file.StandardCopyOption;
|
||||||
|
import java.time.Instant;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.LinkedList;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.TreeMap;
|
||||||
|
import org.eclipse.jgit.annotations.Nullable;
|
||||||
|
import org.eclipse.jgit.attributes.Attribute;
|
||||||
|
import org.eclipse.jgit.attributes.Attributes;
|
||||||
|
import org.eclipse.jgit.dircache.DirCache;
|
||||||
|
import org.eclipse.jgit.dircache.DirCacheBuildIterator;
|
||||||
|
import org.eclipse.jgit.dircache.DirCacheBuilder;
|
||||||
|
import org.eclipse.jgit.dircache.DirCacheCheckout;
|
||||||
|
import org.eclipse.jgit.dircache.DirCacheCheckout.CheckoutMetadata;
|
||||||
|
import org.eclipse.jgit.dircache.DirCacheEntry;
|
||||||
|
import org.eclipse.jgit.errors.IndexWriteException;
|
||||||
|
import org.eclipse.jgit.errors.LargeObjectException;
|
||||||
|
import org.eclipse.jgit.errors.NoWorkTreeException;
|
||||||
|
import org.eclipse.jgit.internal.JGitText;
|
||||||
|
import org.eclipse.jgit.lib.Config;
|
||||||
|
import org.eclipse.jgit.lib.ConfigConstants;
|
||||||
|
import org.eclipse.jgit.lib.Constants;
|
||||||
|
import org.eclipse.jgit.lib.CoreConfig.EolStreamType;
|
||||||
|
import org.eclipse.jgit.lib.FileMode;
|
||||||
|
import org.eclipse.jgit.lib.ObjectId;
|
||||||
|
import org.eclipse.jgit.lib.ObjectInserter;
|
||||||
|
import org.eclipse.jgit.lib.ObjectLoader;
|
||||||
|
import org.eclipse.jgit.lib.ObjectReader;
|
||||||
|
import org.eclipse.jgit.lib.ObjectStream;
|
||||||
|
import org.eclipse.jgit.lib.Repository;
|
||||||
|
import org.eclipse.jgit.treewalk.TreeWalk.OperationType;
|
||||||
|
import org.eclipse.jgit.treewalk.WorkingTreeOptions;
|
||||||
|
import org.eclipse.jgit.util.LfsFactory.LfsInputStream;
|
||||||
|
import org.eclipse.jgit.util.io.EolStreamTypeUtil;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handles work tree updates on both the checkout and the index.
|
||||||
|
* <p>
|
||||||
|
* You should use a single instance for all of your file changes. In case of an error, make sure
|
||||||
|
* your instance is released, and initiate a new one if necessary.
|
||||||
|
*/
|
||||||
|
public class WorkTreeUpdater implements Closeable {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The result of writing the index changes.
|
||||||
|
*/
|
||||||
|
public static class Result {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Files modified during this operation.
|
||||||
|
*/
|
||||||
|
public List<String> modifiedFiles = new LinkedList<>();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Files in this list were failed to be deleted.
|
||||||
|
*/
|
||||||
|
public List<String> failedToDelete = new LinkedList<>();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Modified tree ID if any, or null otherwise.
|
||||||
|
*/
|
||||||
|
public ObjectId treeId = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
Result result = new Result();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The repository this handler operates on.
|
||||||
|
*/
|
||||||
|
@Nullable
|
||||||
|
private final Repository repo;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set to true if this operation should work in-memory. The repo's dircache and
|
||||||
|
* workingtree are not touched by this method. Eventually needed files are
|
||||||
|
* created as temporary files and a new empty, in-memory dircache will be
|
||||||
|
* used instead the repo's one. Often used for bare repos where the repo
|
||||||
|
* doesn't even have a workingtree and dircache.
|
||||||
|
*/
|
||||||
|
private final boolean inCore;
|
||||||
|
|
||||||
|
private final ObjectInserter inserter;
|
||||||
|
private final ObjectReader reader;
|
||||||
|
private DirCache dirCache;
|
||||||
|
private boolean implicitDirCache = false;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Builder to update the dir cache during this operation.
|
||||||
|
*/
|
||||||
|
private DirCacheBuilder builder = null;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The {@link WorkingTreeOptions} are needed to determine line endings for affected files.
|
||||||
|
*/
|
||||||
|
private WorkingTreeOptions workingTreeOptions;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The size limit (bytes) which controls a file to be stored in {@code Heap} or {@code LocalFile}
|
||||||
|
* during the operation.
|
||||||
|
*/
|
||||||
|
private int inCoreFileSizeLimit;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* If the operation has nothing to do for a file but check it out at the end of the operation, it
|
||||||
|
* can be added here.
|
||||||
|
*/
|
||||||
|
private final Map<String, DirCacheEntry> toBeCheckedOut = new HashMap<>();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Files in this list will be deleted from the local copy at the end of the operation.
|
||||||
|
*/
|
||||||
|
private final TreeMap<String, File> toBeDeleted = new TreeMap<>();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Keeps {@link CheckoutMetadata} for {@link #checkout()}.
|
||||||
|
*/
|
||||||
|
private Map<String, CheckoutMetadata> checkoutMetadata;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Keeps {@link CheckoutMetadata} for {@link #revertModifiedFiles()}.
|
||||||
|
*/
|
||||||
|
private Map<String, CheckoutMetadata> cleanupMetadata;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Whether the changes were successfully written
|
||||||
|
*/
|
||||||
|
private boolean indexChangesWritten = false;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param repo the {@link org.eclipse.jgit.lib.Repository}.
|
||||||
|
* @param dirCache if set, use the provided dir cache. Otherwise, use the default repository one
|
||||||
|
*/
|
||||||
|
private WorkTreeUpdater(
|
||||||
|
Repository repo,
|
||||||
|
DirCache dirCache) {
|
||||||
|
this.repo = repo;
|
||||||
|
this.dirCache = dirCache;
|
||||||
|
|
||||||
|
this.inCore = false;
|
||||||
|
this.inserter = repo.newObjectInserter();
|
||||||
|
this.reader = inserter.newReader();
|
||||||
|
this.workingTreeOptions = repo.getConfig().get(WorkingTreeOptions.KEY);
|
||||||
|
this.checkoutMetadata = new HashMap<>();
|
||||||
|
this.cleanupMetadata = new HashMap<>();
|
||||||
|
this.inCoreFileSizeLimit = setInCoreFileSizeLimit(repo.getConfig());
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param repo the {@link org.eclipse.jgit.lib.Repository}.
|
||||||
|
* @param dirCache if set, use the provided dir cache. Otherwise, use the default repository one
|
||||||
|
* @return an IO handler.
|
||||||
|
*/
|
||||||
|
public static WorkTreeUpdater createWorkTreeUpdater(Repository repo, DirCache dirCache) {
|
||||||
|
return new WorkTreeUpdater(repo, dirCache);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param repo the {@link org.eclipse.jgit.lib.Repository}.
|
||||||
|
* @param dirCache if set, use the provided dir cache. Otherwise, creates a new one
|
||||||
|
* @param oi to use for writing the modified objects with.
|
||||||
|
*/
|
||||||
|
private WorkTreeUpdater(
|
||||||
|
Repository repo,
|
||||||
|
DirCache dirCache,
|
||||||
|
ObjectInserter oi) {
|
||||||
|
this.repo = repo;
|
||||||
|
this.dirCache = dirCache;
|
||||||
|
this.inserter = oi;
|
||||||
|
|
||||||
|
this.inCore = true;
|
||||||
|
this.reader = oi.newReader();
|
||||||
|
if (repo != null) {
|
||||||
|
this.inCoreFileSizeLimit = setInCoreFileSizeLimit(repo.getConfig());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param repo the {@link org.eclipse.jgit.lib.Repository}.
|
||||||
|
* @param dirCache if set, use the provided dir cache. Otherwise, creates a new one
|
||||||
|
* @param oi to use for writing the modified objects with.
|
||||||
|
* @return an IO handler.
|
||||||
|
*/
|
||||||
|
public static WorkTreeUpdater createInCoreWorkTreeUpdater(Repository repo, DirCache dirCache,
|
||||||
|
ObjectInserter oi) {
|
||||||
|
return new WorkTreeUpdater(repo, dirCache, oi);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Something that can supply an {@link InputStream}.
|
||||||
|
*/
|
||||||
|
public interface StreamSupplier {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Loads the input stream.
|
||||||
|
*
|
||||||
|
* @return the loaded stream
|
||||||
|
* @throws IOException if any reading error occurs
|
||||||
|
*/
|
||||||
|
InputStream load() throws IOException;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* We write the patch result to a {@link org.eclipse.jgit.util.TemporaryBuffer} and then use
|
||||||
|
* {@link DirCacheCheckout}.getContent() to run the result through the CR-LF and smudge filters.
|
||||||
|
* DirCacheCheckout needs an ObjectLoader, not a TemporaryBuffer, so this class bridges between
|
||||||
|
* the two, making any Stream provided by a {@link StreamSupplier} look like an ordinary git blob
|
||||||
|
* to DirCacheCheckout.
|
||||||
|
*/
|
||||||
|
public static class StreamLoader extends ObjectLoader {
|
||||||
|
|
||||||
|
private final StreamSupplier data;
|
||||||
|
|
||||||
|
private final long size;
|
||||||
|
|
||||||
|
private StreamLoader(StreamSupplier data, long length) {
|
||||||
|
this.data = data;
|
||||||
|
this.size = length;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int getType() {
|
||||||
|
return Constants.OBJ_BLOB;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public long getSize() {
|
||||||
|
return size;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean isLarge() {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public byte[] getCachedBytes() throws LargeObjectException {
|
||||||
|
throw new LargeObjectException();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ObjectStream openStream() throws IOException {
|
||||||
|
return new ObjectStream.Filter(getType(), getSize(), new BufferedInputStream(data.load()));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates stream loader for the given supplier.
|
||||||
|
*
|
||||||
|
* @param supplier to wrap
|
||||||
|
* @param length of the supplied content
|
||||||
|
* @return the result stream loader
|
||||||
|
*/
|
||||||
|
public static StreamLoader createStreamLoader(StreamSupplier supplier, long length) {
|
||||||
|
return new StreamLoader(supplier, length);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static int setInCoreFileSizeLimit(Config config) {
|
||||||
|
return config.getInt(
|
||||||
|
ConfigConstants.CONFIG_MERGE_SECTION, ConfigConstants.CONFIG_KEY_IN_CORE_LIMIT, 10 << 20);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets the size limit for in-core files in this config.
|
||||||
|
*
|
||||||
|
* @return the size
|
||||||
|
*/
|
||||||
|
public int getInCoreFileSizeLimit() {
|
||||||
|
return inCoreFileSizeLimit;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets dir cache for the repo. Locked if not inCore.
|
||||||
|
*
|
||||||
|
* @return the result dir cache
|
||||||
|
* @throws IOException is case the dir cache cannot be read
|
||||||
|
*/
|
||||||
|
public DirCache getLockedDirCache() throws IOException {
|
||||||
|
if (dirCache == null) {
|
||||||
|
implicitDirCache = true;
|
||||||
|
if (inCore) {
|
||||||
|
dirCache = DirCache.newInCore();
|
||||||
|
} else {
|
||||||
|
dirCache = nonNullNonBareRepo().lockDirCache();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (builder == null) {
|
||||||
|
builder = dirCache.builder();
|
||||||
|
}
|
||||||
|
return dirCache;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates build iterator for the handler's builder.
|
||||||
|
*
|
||||||
|
* @return the iterator
|
||||||
|
*/
|
||||||
|
public DirCacheBuildIterator createDirCacheBuildIterator() {
|
||||||
|
return new DirCacheBuildIterator(builder);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Writes the changes to the WorkTree (but not the index).
|
||||||
|
*
|
||||||
|
* @param shouldCheckoutTheirs before committing the changes
|
||||||
|
* @throws IOException if any of the writes fail
|
||||||
|
*/
|
||||||
|
public void writeWorkTreeChanges(boolean shouldCheckoutTheirs) throws IOException {
|
||||||
|
handleDeletedFiles();
|
||||||
|
|
||||||
|
if (inCore) {
|
||||||
|
builder.finish();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (shouldCheckoutTheirs) {
|
||||||
|
// No problem found. The only thing left to be done is to
|
||||||
|
// check out all files from "theirs" which have been selected to
|
||||||
|
// go into the new index.
|
||||||
|
checkout();
|
||||||
|
}
|
||||||
|
|
||||||
|
// All content operations are successfully done. If we can now write the
|
||||||
|
// new index we are on quite safe ground. Even if the checkout of
|
||||||
|
// files coming from "theirs" fails the user can work around such
|
||||||
|
// failures by checking out the index again.
|
||||||
|
if (!builder.commit()) {
|
||||||
|
revertModifiedFiles();
|
||||||
|
throw new IndexWriteException();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Writes the changes to the index.
|
||||||
|
*
|
||||||
|
* @return the Result of the operation.
|
||||||
|
* @throws IOException if any of the writes fail
|
||||||
|
*/
|
||||||
|
public Result writeIndexChanges() throws IOException {
|
||||||
|
result.treeId = getLockedDirCache().writeTree(inserter);
|
||||||
|
indexChangesWritten = true;
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Adds a {@link DirCacheEntry} for direct checkout and remembers its {@link CheckoutMetadata}.
|
||||||
|
*
|
||||||
|
* @param path of the entry
|
||||||
|
* @param entry to add
|
||||||
|
* @param cleanupStreamType to use for the cleanup metadata
|
||||||
|
* @param cleanupSmudgeCommand to use for the cleanup metadata
|
||||||
|
* @param checkoutStreamType to use for the checkout metadata
|
||||||
|
* @param checkoutSmudgeCommand to use for the checkout metadata
|
||||||
|
* @since 6.1
|
||||||
|
*/
|
||||||
|
public void addToCheckout(
|
||||||
|
String path, DirCacheEntry entry, EolStreamType cleanupStreamType,
|
||||||
|
String cleanupSmudgeCommand, EolStreamType checkoutStreamType, String checkoutSmudgeCommand) {
|
||||||
|
if (entry != null) {
|
||||||
|
// In some cases, we just want to add the metadata.
|
||||||
|
toBeCheckedOut.put(path, entry);
|
||||||
|
}
|
||||||
|
addCheckoutMetadata(cleanupMetadata, path, cleanupStreamType, cleanupSmudgeCommand);
|
||||||
|
addCheckoutMetadata(checkoutMetadata, path, checkoutStreamType, checkoutSmudgeCommand);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get a map which maps the paths of files which have to be checked out because the operation
|
||||||
|
* created new fully-merged content for this file into the index.
|
||||||
|
*
|
||||||
|
* <p>This means: the operation wrote a new stage 0 entry for this path.</p>
|
||||||
|
*
|
||||||
|
* @return the map
|
||||||
|
*/
|
||||||
|
public Map<String, DirCacheEntry> getToBeCheckedOut() {
|
||||||
|
return toBeCheckedOut;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Deletes the given file
|
||||||
|
* <p>
|
||||||
|
* Note the actual deletion is only done in {@link #writeWorkTreeChanges}
|
||||||
|
*
|
||||||
|
* @param path of the file to be deleted
|
||||||
|
* @param file to be deleted
|
||||||
|
* @param streamType to use for cleanup metadata
|
||||||
|
* @param smudgeCommand to use for cleanup metadata
|
||||||
|
* @throws IOException if the file cannot be deleted
|
||||||
|
*/
|
||||||
|
public void deleteFile(String path, File file, EolStreamType streamType, String smudgeCommand)
|
||||||
|
throws IOException {
|
||||||
|
toBeDeleted.put(path, file);
|
||||||
|
if (file != null && file.isFile()) {
|
||||||
|
addCheckoutMetadata(cleanupMetadata, path, streamType, smudgeCommand);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Remembers the {@link CheckoutMetadata} for the given path; it may be needed in {@link
|
||||||
|
* #checkout()} or in {@link #revertModifiedFiles()}.
|
||||||
|
*
|
||||||
|
* @param map to add the metadata to
|
||||||
|
* @param path of the current node
|
||||||
|
* @param streamType to use for the metadata
|
||||||
|
* @param smudgeCommand to use for the metadata
|
||||||
|
* @since 6.1
|
||||||
|
*/
|
||||||
|
private void addCheckoutMetadata(
|
||||||
|
Map<String, CheckoutMetadata> map, String path, EolStreamType streamType,
|
||||||
|
String smudgeCommand) {
|
||||||
|
if (inCore || map == null) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
map.put(path, new CheckoutMetadata(streamType, smudgeCommand));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Detects if CRLF conversion has been configured.
|
||||||
|
* <p></p>
|
||||||
|
* See {@link EolStreamTypeUtil#detectStreamType} for more info.
|
||||||
|
*
|
||||||
|
* @param attributes of the file for which the type is to be detected
|
||||||
|
* @return the detected type
|
||||||
|
*/
|
||||||
|
public EolStreamType detectCheckoutStreamType(Attributes attributes) {
|
||||||
|
if (inCore) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
return EolStreamTypeUtil.detectStreamType(
|
||||||
|
OperationType.CHECKOUT_OP, workingTreeOptions, attributes);
|
||||||
|
}
|
||||||
|
|
||||||
|
private void handleDeletedFiles() {
|
||||||
|
// Iterate in reverse so that "folder/file" is deleted before
|
||||||
|
// "folder". Otherwise, this could result in a failing path because
|
||||||
|
// of a non-empty directory, for which delete() would fail.
|
||||||
|
for (String path : toBeDeleted.descendingKeySet()) {
|
||||||
|
File file = inCore ? null : toBeDeleted.get(path);
|
||||||
|
if (file != null && !file.delete()) {
|
||||||
|
if (!file.isDirectory()) {
|
||||||
|
result.failedToDelete.add(path);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Marks the given path as modified in the operation.
|
||||||
|
*
|
||||||
|
* @param path to mark as modified
|
||||||
|
*/
|
||||||
|
public void markAsModified(String path) {
|
||||||
|
result.modifiedFiles.add(path);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets the list of files which were modified in this operation.
|
||||||
|
*
|
||||||
|
* @return the list
|
||||||
|
*/
|
||||||
|
public List<String> getModifiedFiles() {
|
||||||
|
return result.modifiedFiles;
|
||||||
|
}
|
||||||
|
|
||||||
|
private void checkout() throws NoWorkTreeException, IOException {
|
||||||
|
// Iterate in reverse so that "folder/file" is deleted before
|
||||||
|
// "folder". Otherwise, this could result in a failing path because
|
||||||
|
// of a non-empty directory, for which delete() would fail.
|
||||||
|
for (Map.Entry<String, DirCacheEntry> entry : toBeCheckedOut.entrySet()) {
|
||||||
|
DirCacheEntry dirCacheEntry = entry.getValue();
|
||||||
|
if (dirCacheEntry.getFileMode() == FileMode.GITLINK) {
|
||||||
|
new File(nonNullNonBareRepo().getWorkTree(), entry.getKey()).mkdirs();
|
||||||
|
} else {
|
||||||
|
DirCacheCheckout.checkoutEntry(
|
||||||
|
repo, dirCacheEntry, reader, false, checkoutMetadata.get(entry.getKey()));
|
||||||
|
result.modifiedFiles.add(entry.getKey());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Reverts any uncommitted changes in the worktree. We know that for all modified files the
|
||||||
|
* old content was in the old index and the index contained only stage 0. In case if inCore
|
||||||
|
* operation just clear the history of modified files.
|
||||||
|
*
|
||||||
|
* @throws java.io.IOException in case the cleaning up failed
|
||||||
|
*/
|
||||||
|
public void revertModifiedFiles() throws IOException {
|
||||||
|
if (inCore) {
|
||||||
|
result.modifiedFiles.clear();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (indexChangesWritten) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
for (String path : result.modifiedFiles) {
|
||||||
|
DirCacheEntry entry = dirCache.getEntry(path);
|
||||||
|
if (entry != null) {
|
||||||
|
DirCacheCheckout.checkoutEntry(
|
||||||
|
repo, entry, reader, false, cleanupMetadata.get(path));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void close() throws IOException {
|
||||||
|
if (implicitDirCache) {
|
||||||
|
dirCache.unlock();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Updates the file in the checkout with the given content.
|
||||||
|
*
|
||||||
|
* @param resultStreamLoader with the content to be updated
|
||||||
|
* @param streamType for parsing the content
|
||||||
|
* @param smudgeCommand for formatting the content
|
||||||
|
* @param path of the file to be updated
|
||||||
|
* @param file to be updated
|
||||||
|
* @param safeWrite whether the content should be written to a buffer first
|
||||||
|
* @throws IOException if the {@link CheckoutMetadata} cannot be determined
|
||||||
|
*/
|
||||||
|
public void updateFileWithContent(
|
||||||
|
StreamLoader resultStreamLoader,
|
||||||
|
EolStreamType streamType,
|
||||||
|
String smudgeCommand,
|
||||||
|
String path,
|
||||||
|
File file,
|
||||||
|
boolean safeWrite)
|
||||||
|
throws IOException {
|
||||||
|
if (inCore) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
CheckoutMetadata checkoutMetadata = new CheckoutMetadata(streamType, smudgeCommand);
|
||||||
|
if (safeWrite) {
|
||||||
|
try (org.eclipse.jgit.util.TemporaryBuffer buffer =
|
||||||
|
new org.eclipse.jgit.util.TemporaryBuffer.LocalFile(null)) {
|
||||||
|
// Write to a buffer and copy to the file only if everything was fine.
|
||||||
|
DirCacheCheckout.getContent(
|
||||||
|
repo, path, checkoutMetadata, resultStreamLoader, null, buffer);
|
||||||
|
InputStream bufIn = buffer.openInputStream();
|
||||||
|
Files.copy(bufIn, file.toPath(), StandardCopyOption.REPLACE_EXISTING);
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
OutputStream outputStream = new FileOutputStream(file);
|
||||||
|
DirCacheCheckout.getContent(
|
||||||
|
repo, path, checkoutMetadata, resultStreamLoader, null, outputStream);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a path with the given content, and adds it to the specified stage to the index builder
|
||||||
|
*
|
||||||
|
* @param inputStream with the content to be updated
|
||||||
|
* @param path of the file to be updated
|
||||||
|
* @param fileMode of the modified file
|
||||||
|
* @param entryStage of the new entry
|
||||||
|
* @param lastModified instant of the modified file
|
||||||
|
* @param len of the content
|
||||||
|
* @param lfsAttribute for checking for LFS enablement
|
||||||
|
* @return the entry which was added to the index
|
||||||
|
* @throws IOException if inserting the content fails
|
||||||
|
*/
|
||||||
|
public DirCacheEntry insertToIndex(
|
||||||
|
InputStream inputStream,
|
||||||
|
byte[] path,
|
||||||
|
FileMode fileMode,
|
||||||
|
int entryStage,
|
||||||
|
Instant lastModified,
|
||||||
|
int len,
|
||||||
|
Attribute lfsAttribute) throws IOException {
|
||||||
|
StreamLoader contentLoader = createStreamLoader(() -> inputStream, len);
|
||||||
|
return insertToIndex(contentLoader, path, fileMode, entryStage, lastModified, len,
|
||||||
|
lfsAttribute);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a path with the given content, and adds it to the specified stage to the index builder
|
||||||
|
*
|
||||||
|
* @param resultStreamLoader with the content to be updated
|
||||||
|
* @param path of the file to be updated
|
||||||
|
* @param fileMode of the modified file
|
||||||
|
* @param entryStage of the new entry
|
||||||
|
* @param lastModified instant of the modified file
|
||||||
|
* @param len of the content
|
||||||
|
* @param lfsAttribute for checking for LFS enablement
|
||||||
|
* @return the entry which was added to the index
|
||||||
|
* @throws IOException if inserting the content fails
|
||||||
|
*/
|
||||||
|
public DirCacheEntry insertToIndex(
|
||||||
|
StreamLoader resultStreamLoader,
|
||||||
|
byte[] path,
|
||||||
|
FileMode fileMode,
|
||||||
|
int entryStage,
|
||||||
|
Instant lastModified,
|
||||||
|
int len,
|
||||||
|
Attribute lfsAttribute) throws IOException {
|
||||||
|
return addExistingToIndex(insertResult(resultStreamLoader, lfsAttribute),
|
||||||
|
path, fileMode, entryStage, lastModified, len);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Adds a path with the specified stage to the index builder
|
||||||
|
*
|
||||||
|
* @param objectId of the existing object to add
|
||||||
|
* @param path of the modified file
|
||||||
|
* @param fileMode of the modified file
|
||||||
|
* @param entryStage of the new entry
|
||||||
|
* @param lastModified instant of the modified file
|
||||||
|
* @param len of the modified file content
|
||||||
|
* @return the entry which was added to the index
|
||||||
|
*/
|
||||||
|
public DirCacheEntry addExistingToIndex(
|
||||||
|
ObjectId objectId,
|
||||||
|
byte[] path,
|
||||||
|
FileMode fileMode,
|
||||||
|
int entryStage,
|
||||||
|
Instant lastModified,
|
||||||
|
int len) {
|
||||||
|
DirCacheEntry dce = new DirCacheEntry(path, entryStage);
|
||||||
|
dce.setFileMode(fileMode);
|
||||||
|
if (lastModified != null) {
|
||||||
|
dce.setLastModified(lastModified);
|
||||||
|
}
|
||||||
|
dce.setLength(inCore ? 0 : len);
|
||||||
|
|
||||||
|
dce.setObjectId(objectId);
|
||||||
|
builder.add(dce);
|
||||||
|
return dce;
|
||||||
|
}
|
||||||
|
|
||||||
|
private ObjectId insertResult(StreamLoader resultStreamLoader, Attribute lfsAttribute)
|
||||||
|
throws IOException {
|
||||||
|
try (LfsInputStream is =
|
||||||
|
org.eclipse.jgit.util.LfsFactory.getInstance()
|
||||||
|
.applyCleanFilter(
|
||||||
|
repo,
|
||||||
|
resultStreamLoader.data.load(),
|
||||||
|
resultStreamLoader.size,
|
||||||
|
lfsAttribute)) {
|
||||||
|
return inserter.insert(OBJ_BLOB, is.getLength(), is);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets non-null repository instance
|
||||||
|
*
|
||||||
|
* @return non-null repository instance
|
||||||
|
* @throws java.lang.NullPointerException if the handler was constructed without a repository.
|
||||||
|
*/
|
||||||
|
private Repository nonNullRepo() throws NullPointerException {
|
||||||
|
if (repo == null) {
|
||||||
|
throw new NullPointerException(JGitText.get().repositoryIsRequired);
|
||||||
|
}
|
||||||
|
return repo;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets non-null and non-bare repository instance
|
||||||
|
*
|
||||||
|
* @return non-null and non-bare repository instance
|
||||||
|
* @throws java.lang.NullPointerException if the handler was constructed without a repository.
|
||||||
|
* @throws NoWorkTreeException if the handler was constructed with a bare repository
|
||||||
|
*/
|
||||||
|
private Repository nonNullNonBareRepo() throws NullPointerException, NoWorkTreeException {
|
||||||
|
if (nonNullRepo().isBare()) {
|
||||||
|
throw new NoWorkTreeException();
|
||||||
|
}
|
||||||
|
return repo;
|
||||||
|
}
|
||||||
|
}
|
Loading…
Reference in New Issue