+ * 1. in the root of the working tree + * 2. in the index + * 3. in the HEAD, for bare repositories this is the only place + * that is searched + *+ * + * Values from the .lfsconfig are used only if not specified in another git + * config file to allow local override without modifiction of a committed file. + */ +public class LfsConfig { + private Repository db; + private Config delegate; + + /** + * Create a new instance of the LfsConfig. + * + * @param db + * the associated repo + * @throws IOException + */ + public LfsConfig(Repository db) throws IOException { + this.db = db; + delegate = this.load(); + } + + /** + * Read the .lfsconfig file from the repository + * + * @return The loaded lfs config or null if it does not exist + * + * @throws IOException + */ + private Config load() throws IOException { + Config result = null; + + if (!db.isBare()) { + result = loadFromWorkingTree(); + if (result == null) { + result = loadFromIndex(); + } + } + + if (result == null) { + result = loadFromHead(); + } + + if (result == null) { + result = emptyConfig(); + } + + return result; + } + + /** + * Try to read the lfs config from a file called .lfsconfig at the top level + * of the working tree. + * + * @return the config, or
null
+ * @throws IOException
+ */
+ @Nullable
+ private Config loadFromWorkingTree()
+ throws IOException {
+ File lfsConfig = db.getFS().resolve(db.getWorkTree(),
+ Constants.DOT_LFS_CONFIG);
+ if (lfsConfig.exists() && lfsConfig.isFile()) {
+ FileBasedConfig config = new FileBasedConfig(lfsConfig, db.getFS());
+ try {
+ config.load();
+ return config;
+ } catch (ConfigInvalidException e) {
+ throw new LfsConfigInvalidException(
+ LfsText.get().dotLfsConfigReadFailed, e);
+ }
+ }
+ return null;
+ }
+
+ /**
+ * Try to read the lfs config from an entry called .lfsconfig contained in
+ * the index.
+ *
+ * @return the config, or null
if the entry does not exist
+ * @throws IOException
+ */
+ @Nullable
+ private Config loadFromIndex()
+ throws IOException {
+ try {
+ DirCacheEntry entry = db.readDirCache()
+ .getEntry(Constants.DOT_LFS_CONFIG);
+ if (entry != null) {
+ return new BlobBasedConfig(null, db, entry.getObjectId());
+ }
+ } catch (ConfigInvalidException e) {
+ throw new LfsConfigInvalidException(
+ LfsText.get().dotLfsConfigReadFailed, e);
+ }
+ return null;
+ }
+
+ /**
+ * Try to read the lfs config from an entry called .lfsconfig contained in
+ * the head revision.
+ *
+ * @return the config, or null
if the file does not exist
+ * @throws IOException
+ */
+ @Nullable
+ private Config loadFromHead() throws IOException {
+ try (RevWalk revWalk = new RevWalk(db)) {
+ ObjectId headCommitId = db.resolve(HEAD);
+ if (headCommitId == null) {
+ return null;
+ }
+ RevCommit commit = revWalk.parseCommit(headCommitId);
+ RevTree tree = commit.getTree();
+ TreeWalk treewalk = TreeWalk.forPath(db, Constants.DOT_LFS_CONFIG,
+ tree);
+ if (treewalk != null) {
+ return new BlobBasedConfig(null, db, treewalk.getObjectId(0));
+ }
+ } catch (ConfigInvalidException e) {
+ throw new LfsConfigInvalidException(
+ LfsText.get().dotLfsConfigReadFailed, e);
+ }
+ return null;
+ }
+
+ /**
+ * Create an empty config as fallback to avoid null pointer checks.
+ *
+ * @return an empty config
+ */
+ private Config emptyConfig() {
+ return new Config();
+ }
+
+ /**
+ * Get string value or null if not found.
+ *
+ * First tries to find the value in the git config files. If not found tries
+ * to find data in .lfsconfig.
+ *
+ * @param section
+ * the section
+ * @param subsection
+ * the subsection for the value
+ * @param name
+ * the key name
+ * @return a String value from the config, null
if not found
+ */
+ public String getString(final String section, final String subsection,
+ final String name) {
+ String result = db.getConfig().getString(section, subsection, name);
+ if (result == null) {
+ result = delegate.getString(section, subsection, name);
+ }
+ return result;
+ }
+}
diff --git a/org.eclipse.jgit.lfs/src/org/eclipse/jgit/lfs/internal/LfsConnectionFactory.java b/org.eclipse.jgit.lfs/src/org/eclipse/jgit/lfs/internal/LfsConnectionFactory.java
index 5a17d411c..12b688d15 100644
--- a/org.eclipse.jgit.lfs/src/org/eclipse/jgit/lfs/internal/LfsConnectionFactory.java
+++ b/org.eclipse.jgit.lfs/src/org/eclipse/jgit/lfs/internal/LfsConnectionFactory.java
@@ -45,7 +45,6 @@
* Provides means to get a valid LFS connection for a given repository.
*/
public class LfsConnectionFactory {
-
private static final int SSH_AUTH_TIMEOUT_SECONDS = 30;
private static final String SCHEME_HTTPS = "https"; //$NON-NLS-1$
private static final String SCHEME_SSH = "ssh"; //$NON-NLS-1$
@@ -104,19 +103,19 @@ public static HttpConnection getLfsConnection(Repository db, String method,
* additional headers that can be used to connect to LFS server
* @return the URL for the LFS server. e.g.
* "https://github.com/github/git-lfs.git/info/lfs"
- * @throws LfsConfigInvalidException
- * if the LFS config is invalid
+ * @throws IOException
+ * if the LFS config is invalid or cannot be accessed
* @see
* Server Discovery documentation
*/
- static String getLfsUrl(Repository db, String purpose,
+ private static String getLfsUrl(Repository db, String purpose,
Mapfalse
if the merge will fail because the index entry
* didn't match ours or the working-dir file was dirty and a
* conflict occurred
@@ -607,12 +619,12 @@ protected void addDeletion(String path, boolean isFile,
* @throws org.eclipse.jgit.errors.IncorrectObjectTypeException
* @throws org.eclipse.jgit.errors.CorruptObjectException
* @throws java.io.IOException
- * @since 4.9
+ * @since 6.1
*/
protected boolean processEntry(CanonicalTreeParser base,
CanonicalTreeParser ours, CanonicalTreeParser theirs,
DirCacheBuildIterator index, WorkingTreeIterator work,
- boolean ignoreConflicts, Attributes attributes)
+ boolean ignoreConflicts, Attributes[] attributes)
throws MissingObjectException, IncorrectObjectTypeException,
CorruptObjectException, IOException {
enterSubtree = true;
@@ -729,7 +741,7 @@ protected boolean processEntry(CanonicalTreeParser base,
// Base, ours, and theirs all contain a folder: don't delete
return true;
}
- addDeletion(tw.getPathString(), nonTree(modeO), attributes);
+ addDeletion(tw.getPathString(), nonTree(modeO), attributes[T_OURS]);
return true;
}
@@ -772,7 +784,7 @@ protected boolean processEntry(CanonicalTreeParser base,
if (nonTree(modeO) && nonTree(modeT)) {
// Check worktree before modifying files
boolean worktreeDirty = isWorktreeDirty(work, ourDce);
- if (!attributes.canBeContentMerged() && worktreeDirty) {
+ if (!attributes[T_OURS].canBeContentMerged() && worktreeDirty) {
return false;
}
@@ -791,7 +803,7 @@ protected boolean processEntry(CanonicalTreeParser base,
mergeResults.put(tw.getPathString(), result);
unmergedPaths.add(tw.getPathString());
return true;
- } else if (!attributes.canBeContentMerged()) {
+ } else if (!attributes[T_OURS].canBeContentMerged()) {
// File marked as binary
switch (getContentMergeStrategy()) {
case OURS:
@@ -842,13 +854,16 @@ protected boolean processEntry(CanonicalTreeParser base,
if (ignoreConflicts) {
result.setContainsConflicts(false);
}
- updateIndex(base, ours, theirs, result, attributes);
+ updateIndex(base, ours, theirs, result, attributes[T_OURS]);
String currentPath = tw.getPathString();
if (result.containsConflicts() && !ignoreConflicts) {
unmergedPaths.add(currentPath);
}
modifiedFiles.add(currentPath);
- addCheckoutMetadata(currentPath, attributes);
+ addCheckoutMetadata(cleanupMetadata, currentPath,
+ attributes[T_OURS]);
+ addCheckoutMetadata(checkoutMetadata, currentPath,
+ attributes[T_THEIRS]);
} else if (modeO != modeT) {
// OURS or THEIRS has been deleted
if (((modeO != 0 && !tw.idEqual(T_BASE, T_OURS)) || (modeT != 0 && !tw
@@ -881,7 +896,8 @@ protected boolean processEntry(CanonicalTreeParser base,
// markers). But also stage 0 of the index is filled
// with that content.
result.setContainsConflicts(false);
- updateIndex(base, ours, theirs, result, attributes);
+ updateIndex(base, ours, theirs, result,
+ attributes[T_OURS]);
} else {
add(tw.getRawPath(), base, DirCacheEntry.STAGE_1, EPOCH,
0);
@@ -896,11 +912,9 @@ protected boolean processEntry(CanonicalTreeParser base,
if (isWorktreeDirty(work, ourDce)) {
return false;
}
- if (nonTree(modeT)) {
- if (e != null) {
- addToCheckout(tw.getPathString(), e,
- attributes);
- }
+ if (nonTree(modeT) && e != null) {
+ addToCheckout(tw.getPathString(), e,
+ attributes);
}
}
@@ -945,14 +959,16 @@ private static MergeResult
@@ -556,25 +585,51 @@ public AttributesNodeProvider getAttributesNodeProvider() {
*/
@Override
public Attributes getAttributes() {
- if (attrs != null)
- return attrs;
+ return getAttributes(headIndex);
+ }
+ /**
+ * Retrieves the git attributes based on the given tree.
+ *
+ * @param index
+ * of the tree to use as base for the attributes
+ * @return the attributes
+ * @since 6.1
+ */
+ public Attributes getAttributes(int index) {
+ int attrIndex = index + 1;
+ Attributes result = attrs[attrIndex];
+ if (result != null) {
+ return result;
+ }
if (attributesNodeProvider == null) {
- // The work tree should have a AttributesNodeProvider to be able to
- // retrieve the info and global attributes node
throw new IllegalStateException(
"The tree walk should have one AttributesNodeProvider set in order to compute the git attributes."); //$NON-NLS-1$
}
try {
- // Lazy create the attributesHandler on the first access of
- // attributes. This requires the info, global and root
- // attributes nodes
- if (attributesHandler == null) {
- attributesHandler = new AttributesHandler(this);
+ AttributesHandler handler = attributesHandlers[attrIndex];
+ if (handler == null) {
+ if (index < 0) {
+ // Legacy behavior (headIndex not set, getAttributes() above
+ // called)
+ handler = new AttributesHandler(this, () -> {
+ return getTree(CanonicalTreeParser.class);
+ });
+ } else {
+ handler = new AttributesHandler(this, () -> {
+ AbstractTreeIterator tree = trees[index];
+ if (tree instanceof CanonicalTreeParser) {
+ return (CanonicalTreeParser) tree;
+ }
+ return null;
+ });
+ }
+ attributesHandlers[attrIndex] = handler;
}
- attrs = attributesHandler.getAttributes();
- return attrs;
+ result = handler.getAttributes();
+ attrs[attrIndex] = result;
+ return result;
} catch (IOException e) {
throw new JGitInternalException("Error while parsing attributes", //$NON-NLS-1$
e);
@@ -595,11 +650,34 @@ public Attributes getAttributes() {
*/
@Nullable
public EolStreamType getEolStreamType(OperationType opType) {
- if (attributesNodeProvider == null || config == null)
+ if (attributesNodeProvider == null || config == null) {
return null;
- return EolStreamTypeUtil.detectStreamType(
- opType != null ? opType : operationType,
- config.get(WorkingTreeOptions.KEY), getAttributes());
+ }
+ OperationType op = opType != null ? opType : operationType;
+ return EolStreamTypeUtil.detectStreamType(op,
+ config.get(WorkingTreeOptions.KEY), getAttributes());
+ }
+
+ /**
+ * Get the EOL stream type of the current entry for checking out using the
+ * config and {@link #getAttributes()}.
+ *
+ * @param tree
+ * index of the tree the check-out is to be from
+ * @return the EOL stream type of the current entry using the config and
+ * {@link #getAttributes()}. Note that this method may return null
+ * if the {@link org.eclipse.jgit.treewalk.TreeWalk} is not based on
+ * a working tree
+ * @since 6.1
+ */
+ @Nullable
+ public EolStreamType getCheckoutEolStreamType(int tree) {
+ if (attributesNodeProvider == null || config == null) {
+ return null;
+ }
+ Attributes attr = getAttributes(tree);
+ return EolStreamTypeUtil.detectStreamType(OperationType.CHECKOUT_OP,
+ config.get(WorkingTreeOptions.KEY), attr);
}
/**
@@ -607,7 +685,8 @@ public EolStreamType getEolStreamType(OperationType opType) {
*/
public void reset() {
attrs = null;
- attributesHandler = null;
+ attributesHandlers = null;
+ headIndex = -1;
trees = NO_TREES;
advance = false;
depth = 0;
@@ -651,7 +730,9 @@ public void reset(AnyObjectId id) throws MissingObjectException,
advance = false;
depth = 0;
- attrs = null;
+ attrs = new Attributes[2];
+ attributesHandlers = new AttributesHandler[2];
+ headIndex = -1;
}
/**
@@ -701,7 +782,14 @@ public void reset(AnyObjectId... ids) throws MissingObjectException,
trees = r;
advance = false;
depth = 0;
- attrs = null;
+ if (oldLen == newLen) {
+ Arrays.fill(attrs, null);
+ Arrays.fill(attributesHandlers, null);
+ } else {
+ attrs = new Attributes[newLen + 1];
+ attributesHandlers = new AttributesHandler[newLen + 1];
+ }
+ headIndex = -1;
}
/**
@@ -758,6 +846,16 @@ public int addTree(AbstractTreeIterator p) {
p.matchShift = 0;
trees = newTrees;
+ if (attrs == null) {
+ attrs = new Attributes[n + 2];
+ } else {
+ attrs = Arrays.copyOf(attrs, n + 2);
+ }
+ if (attributesHandlers == null) {
+ attributesHandlers = new AttributesHandler[n + 2];
+ } else {
+ attributesHandlers = Arrays.copyOf(attributesHandlers, n + 2);
+ }
return n;
}
@@ -800,7 +898,7 @@ public boolean next() throws MissingObjectException,
}
for (;;) {
- attrs = null;
+ Arrays.fill(attrs, null);
final AbstractTreeIterator t = min();
if (t.eof()) {
if (depth > 0) {
@@ -1255,7 +1353,7 @@ public boolean isPostChildren() {
*/
public void enterSubtree() throws MissingObjectException,
IncorrectObjectTypeException, CorruptObjectException, IOException {
- attrs = null;
+ Arrays.fill(attrs, null);
final AbstractTreeIterator ch = currentHead;
final AbstractTreeIterator[] tmp = new AbstractTreeIterator[trees.length];
for (int i = 0; i < trees.length; i++) {
@@ -1374,11 +1472,12 @@ public