Merge "WorkTreeUpdater: re-format and clean-up"

This commit is contained in:
Han-Wen NIenhuys 2022-08-18 11:22:46 -04:00 committed by Gerrit Code Review @ Eclipse.org
commit d718127a7e
1 changed files with 257 additions and 213 deletions

View File

@ -25,7 +25,10 @@
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.TreeMap;
import org.eclipse.jgit.annotations.NonNull;
import org.eclipse.jgit.annotations.Nullable;
import org.eclipse.jgit.attributes.Attribute;
import org.eclipse.jgit.attributes.Attributes;
@ -96,43 +99,48 @@ public static class Result {
private final Repository repo;
/**
* Set to true if this operation should work in-memory. The repo's dircache and
* workingtree are not touched by this method. Eventually needed files are
* created as temporary files and a new empty, in-memory dircache will be
* used instead the repo's one. Often used for bare repos where the repo
* Set to true if this operation should work in-memory. The repo's dircache
* and workingtree are not touched by this method. Eventually needed files
* are created as temporary files and a new empty, in-memory dircache will
* be used instead the repo's one. Often used for bare repos where the repo
* doesn't even have a workingtree and dircache.
*/
private final boolean inCore;
private final ObjectInserter inserter;
private final ObjectReader reader;
private DirCache dirCache;
private boolean implicitDirCache = false;
/**
* Builder to update the dir cache during this operation.
*/
private DirCacheBuilder builder = null;
private DirCacheBuilder builder;
/**
* The {@link WorkingTreeOptions} are needed to determine line endings for affected files.
* The {@link WorkingTreeOptions} are needed to determine line endings for
* affected files.
*/
private WorkingTreeOptions workingTreeOptions;
/**
* The size limit (bytes) which controls a file to be stored in {@code Heap} or {@code LocalFile}
* during the operation.
* The size limit (bytes) which controls a file to be stored in {@code Heap}
* or {@code LocalFile} during the operation.
*/
private int inCoreFileSizeLimit;
/**
* If the operation has nothing to do for a file but check it out at the end of the operation, it
* can be added here.
* If the operation has nothing to do for a file but check it out at the end
* of the operation, it can be added here.
*/
private final Map<String, DirCacheEntry> toBeCheckedOut = new HashMap<>();
/**
* Files in this list will be deleted from the local copy at the end of the operation.
* Files in this list will be deleted from the local copy at the end of the
* operation.
*/
private final TreeMap<String, File> toBeDeleted = new TreeMap<>();
@ -147,46 +155,56 @@ public static class Result {
private Map<String, CheckoutMetadata> cleanupMetadata;
/**
* Whether the changes were successfully written
* Whether the changes were successfully written.
*/
private boolean indexChangesWritten = false;
private boolean indexChangesWritten;
/**
* @param repo the {@link org.eclipse.jgit.lib.Repository}.
* @param dirCache if set, use the provided dir cache. Otherwise, use the default repository one
* @param repo
* the {@link Repository}.
* @param dirCache
* if set, use the provided dir cache. Otherwise, use the default
* repository one
*/
private WorkTreeUpdater(
Repository repo,
DirCache dirCache) {
private WorkTreeUpdater(Repository repo, DirCache dirCache) {
this.repo = repo;
this.dirCache = dirCache;
this.inCore = false;
this.inserter = repo.newObjectInserter();
this.reader = inserter.newReader();
this.workingTreeOptions = repo.getConfig().get(WorkingTreeOptions.KEY);
Config config = repo.getConfig();
this.workingTreeOptions = config.get(WorkingTreeOptions.KEY);
this.inCoreFileSizeLimit = getInCoreFileSizeLimit(config);
this.checkoutMetadata = new HashMap<>();
this.cleanupMetadata = new HashMap<>();
this.inCoreFileSizeLimit = setInCoreFileSizeLimit(repo.getConfig());
}
/**
* @param repo the {@link org.eclipse.jgit.lib.Repository}.
* @param dirCache if set, use the provided dir cache. Otherwise, use the default repository one
* @return an IO handler.
* Creates a new {@link WorkTreeUpdater} for the given repository.
*
* @param repo
* the {@link Repository}.
* @param dirCache
* if set, use the provided dir cache. Otherwise, use the default
* repository one
* @return the {@link WorkTreeUpdater}.
*/
public static WorkTreeUpdater createWorkTreeUpdater(Repository repo, DirCache dirCache) {
public static WorkTreeUpdater createWorkTreeUpdater(Repository repo,
DirCache dirCache) {
return new WorkTreeUpdater(repo, dirCache);
}
/**
* @param repo the {@link org.eclipse.jgit.lib.Repository}.
* @param dirCache if set, use the provided dir cache. Otherwise, creates a new one
* @param oi to use for writing the modified objects with.
* @param repo
* the {@link Repository}.
* @param dirCache
* if set, use the provided dir cache. Otherwise, creates a new
* one
* @param oi
* to use for writing the modified objects with.
*/
private WorkTreeUpdater(
Repository repo,
DirCache dirCache,
private WorkTreeUpdater(Repository repo, DirCache dirCache,
ObjectInserter oi) {
this.repo = repo;
this.dirCache = dirCache;
@ -195,18 +213,24 @@ private WorkTreeUpdater(
this.inCore = true;
this.reader = oi.newReader();
if (repo != null) {
this.inCoreFileSizeLimit = setInCoreFileSizeLimit(repo.getConfig());
this.inCoreFileSizeLimit = getInCoreFileSizeLimit(repo.getConfig());
}
}
/**
* @param repo the {@link org.eclipse.jgit.lib.Repository}.
* @param dirCache if set, use the provided dir cache. Otherwise, creates a new one
* @param oi to use for writing the modified objects with.
* @return an IO handler.
* Creates a new {@link WorkTreeUpdater} that works in memory only.
*
* @param repo
* the {@link Repository}.
* @param dirCache
* if set, use the provided dir cache. Otherwise, creates a new
* one
* @param oi
* to use for writing the modified objects with.
* @return the {@link WorkTreeUpdater}
*/
public static WorkTreeUpdater createInCoreWorkTreeUpdater(Repository repo, DirCache dirCache,
ObjectInserter oi) {
public static WorkTreeUpdater createInCoreWorkTreeUpdater(Repository repo,
DirCache dirCache, ObjectInserter oi) {
return new WorkTreeUpdater(repo, dirCache, oi);
}
@ -219,17 +243,15 @@ public interface StreamSupplier {
* Loads the input stream.
*
* @return the loaded stream
* @throws IOException if any reading error occurs
* @throws IOException
* if any reading error occurs
*/
InputStream load() throws IOException;
}
/**
* We write the patch result to a {@link org.eclipse.jgit.util.TemporaryBuffer} and then use
* {@link DirCacheCheckout}.getContent() to run the result through the CR-LF and smudge filters.
* DirCacheCheckout needs an ObjectLoader, not a TemporaryBuffer, so this class bridges between
* the two, making any Stream provided by a {@link StreamSupplier} look like an ordinary git blob
* to DirCacheCheckout.
* We want to use DirCacheCheckout for its CR-LF and smudge filters, but DirCacheCheckout needs an
* ObjectLoader rather than InputStream. This class provides a bridge between the two.
*/
public static class StreamLoader extends ObjectLoader {
@ -264,24 +286,28 @@ public byte[] getCachedBytes() throws LargeObjectException {
@Override
public ObjectStream openStream() throws IOException {
return new ObjectStream.Filter(getType(), getSize(), new BufferedInputStream(data.load()));
return new ObjectStream.Filter(getType(), getSize(),
new BufferedInputStream(data.load()));
}
}
/**
* Creates stream loader for the given supplier.
*
* @param supplier to wrap
* @param length of the supplied content
* @param supplier
* to wrap
* @param length
* of the supplied content
* @return the result stream loader
*/
public static StreamLoader createStreamLoader(StreamSupplier supplier, long length) {
public static StreamLoader createStreamLoader(StreamSupplier supplier,
long length) {
return new StreamLoader(supplier, length);
}
private static int setInCoreFileSizeLimit(Config config) {
return config.getInt(
ConfigConstants.CONFIG_MERGE_SECTION, ConfigConstants.CONFIG_KEY_IN_CORE_LIMIT, 10 << 20);
private static int getInCoreFileSizeLimit(Config config) {
return config.getInt(ConfigConstants.CONFIG_MERGE_SECTION,
ConfigConstants.CONFIG_KEY_IN_CORE_LIMIT, 10 << 20);
}
/**
@ -297,7 +323,8 @@ public int getInCoreFileSizeLimit() {
* Gets dir cache for the repo. Locked if not inCore.
*
* @return the result dir cache
* @throws IOException is case the dir cache cannot be read
* @throws IOException
* is case the dir cache cannot be read
*/
public DirCache getLockedDirCache() throws IOException {
if (dirCache == null) {
@ -305,7 +332,7 @@ public DirCache getLockedDirCache() throws IOException {
if (inCore) {
dirCache = DirCache.newInCore();
} else {
dirCache = nonNullNonBareRepo().lockDirCache();
dirCache = nonNullRepo().lockDirCache();
}
}
if (builder == null) {
@ -315,21 +342,25 @@ public DirCache getLockedDirCache() throws IOException {
}
/**
* Creates build iterator for the handler's builder.
* Creates a {@link DirCacheBuildIterator} for the builder of this
* {@link WorkTreeUpdater}.
*
* @return the iterator
* @return the {@link DirCacheBuildIterator}
*/
public DirCacheBuildIterator createDirCacheBuildIterator() {
return new DirCacheBuildIterator(builder);
}
/**
* Writes the changes to the WorkTree (but not the index).
* Writes the changes to the working tree (but not to the index).
*
* @param shouldCheckoutTheirs before committing the changes
* @throws IOException if any of the writes fail
* @param shouldCheckoutTheirs
* before committing the changes
* @throws IOException
* if any of the writes fail
*/
public void writeWorkTreeChanges(boolean shouldCheckoutTheirs) throws IOException {
public void writeWorkTreeChanges(boolean shouldCheckoutTheirs)
throws IOException {
handleDeletedFiles();
if (inCore) {
@ -356,8 +387,9 @@ public void writeWorkTreeChanges(boolean shouldCheckoutTheirs) throws IOExceptio
/**
* Writes the changes to the index.
*
* @return the Result of the operation.
* @throws IOException if any of the writes fail
* @return the {@link Result} of the operation.
* @throws IOException
* if any of the writes fail
*/
public Result writeIndexChanges() throws IOException {
result.treeId = getLockedDirCache().writeTree(inserter);
@ -366,32 +398,42 @@ public Result writeIndexChanges() throws IOException {
}
/**
* Adds a {@link DirCacheEntry} for direct checkout and remembers its {@link CheckoutMetadata}.
* Adds a {@link DirCacheEntry} for direct checkout and remembers its
* {@link CheckoutMetadata}.
*
* @param path of the entry
* @param entry to add
* @param cleanupStreamType to use for the cleanup metadata
* @param cleanupSmudgeCommand to use for the cleanup metadata
* @param checkoutStreamType to use for the checkout metadata
* @param checkoutSmudgeCommand to use for the checkout metadata
* @since 6.1
* @param path
* of the entry
* @param entry
* to add
* @param cleanupStreamType
* to use for the cleanup metadata
* @param cleanupSmudgeCommand
* to use for the cleanup metadata
* @param checkoutStreamType
* to use for the checkout metadata
* @param checkoutSmudgeCommand
* to use for the checkout metadata
*/
public void addToCheckout(
String path, DirCacheEntry entry, EolStreamType cleanupStreamType,
String cleanupSmudgeCommand, EolStreamType checkoutStreamType, String checkoutSmudgeCommand) {
public void addToCheckout(String path, DirCacheEntry entry,
EolStreamType cleanupStreamType, String cleanupSmudgeCommand,
EolStreamType checkoutStreamType, String checkoutSmudgeCommand) {
if (entry != null) {
// In some cases, we just want to add the metadata.
toBeCheckedOut.put(path, entry);
}
addCheckoutMetadata(cleanupMetadata, path, cleanupStreamType, cleanupSmudgeCommand);
addCheckoutMetadata(checkoutMetadata, path, checkoutStreamType, checkoutSmudgeCommand);
addCheckoutMetadata(cleanupMetadata, path, cleanupStreamType,
cleanupSmudgeCommand);
addCheckoutMetadata(checkoutMetadata, path, checkoutStreamType,
checkoutSmudgeCommand);
}
/**
* Get a map which maps the paths of files which have to be checked out because the operation
* created new fully-merged content for this file into the index.
*
* <p>This means: the operation wrote a new stage 0 entry for this path.</p>
* Gets a map which maps the paths of files which have to be checked out
* because the operation created new fully-merged content for this file into
* the index.
* <p>
* This means: the operation wrote a new stage 0 entry for this path.
* </p>
*
* @return the map
*/
@ -400,37 +442,43 @@ public Map<String, DirCacheEntry> getToBeCheckedOut() {
}
/**
* Deletes the given file
* Remembers the given file to be deleted.
* <p>
* Note the actual deletion is only done in {@link #writeWorkTreeChanges}
* Note the actual deletion is only done in {@link #writeWorkTreeChanges}.
*
* @param path of the file to be deleted
* @param file to be deleted
* @param streamType to use for cleanup metadata
* @param smudgeCommand to use for cleanup metadata
* @throws IOException if the file cannot be deleted
* @param path
* of the file to be deleted
* @param file
* to be deleted
* @param streamType
* to use for cleanup metadata
* @param smudgeCommand
* to use for cleanup metadata
*/
public void deleteFile(String path, File file, EolStreamType streamType, String smudgeCommand)
throws IOException {
public void deleteFile(String path, File file, EolStreamType streamType,
String smudgeCommand) {
toBeDeleted.put(path, file);
if (file != null && file.isFile()) {
addCheckoutMetadata(cleanupMetadata, path, streamType, smudgeCommand);
addCheckoutMetadata(cleanupMetadata, path, streamType,
smudgeCommand);
}
}
/**
* Remembers the {@link CheckoutMetadata} for the given path; it may be needed in {@link
* #checkout()} or in {@link #revertModifiedFiles()}.
* Remembers the {@link CheckoutMetadata} for the given path; it may be
* needed in {@link #checkout()} or in {@link #revertModifiedFiles()}.
*
* @param map to add the metadata to
* @param path of the current node
* @param streamType to use for the metadata
* @param smudgeCommand to use for the metadata
* @since 6.1
* @param map
* to add the metadata to
* @param path
* of the current node
* @param streamType
* to use for the metadata
* @param smudgeCommand
* to use for the metadata
*/
private void addCheckoutMetadata(
Map<String, CheckoutMetadata> map, String path, EolStreamType streamType,
String smudgeCommand) {
private void addCheckoutMetadata(Map<String, CheckoutMetadata> map,
String path, EolStreamType streamType, String smudgeCommand) {
if (inCore || map == null) {
return;
}
@ -439,18 +487,20 @@ private void addCheckoutMetadata(
/**
* Detects if CRLF conversion has been configured.
* <p></p>
* <p>
* </p>
* See {@link EolStreamTypeUtil#detectStreamType} for more info.
*
* @param attributes of the file for which the type is to be detected
* @param attributes
* of the file for which the type is to be detected
* @return the detected type
*/
public EolStreamType detectCheckoutStreamType(Attributes attributes) {
if (inCore) {
return null;
}
return EolStreamTypeUtil.detectStreamType(
OperationType.CHECKOUT_OP, workingTreeOptions, attributes);
return EolStreamTypeUtil.detectStreamType(OperationType.CHECKOUT_OP,
workingTreeOptions, attributes);
}
private void handleDeletedFiles() {
@ -470,7 +520,8 @@ private void handleDeletedFiles() {
/**
* Marks the given path as modified in the operation.
*
* @param path to mark as modified
* @param path
* to mark as modified
*/
public void markAsModified(String path) {
result.modifiedFiles.add(path);
@ -486,17 +537,15 @@ public List<String> getModifiedFiles() {
}
private void checkout() throws NoWorkTreeException, IOException {
// Iterate in reverse so that "folder/file" is deleted before
// "folder". Otherwise, this could result in a failing path because
// of a non-empty directory, for which delete() would fail.
for (Map.Entry<String, DirCacheEntry> entry : toBeCheckedOut.entrySet()) {
for (Map.Entry<String, DirCacheEntry> entry : toBeCheckedOut
.entrySet()) {
DirCacheEntry dirCacheEntry = entry.getValue();
if (dirCacheEntry.getFileMode() == FileMode.GITLINK) {
new File(nonNullNonBareRepo().getWorkTree(), entry.getKey()).mkdirs();
new File(nonNullRepo().getWorkTree(), entry.getKey())
.mkdirs();
} else {
DirCacheCheckout.checkoutEntry(
repo, dirCacheEntry, reader, false,
checkoutMetadata.get(entry.getKey()),
DirCacheCheckout.checkoutEntry(repo, dirCacheEntry, reader,
false, checkoutMetadata.get(entry.getKey()),
workingTreeOptions);
result.modifiedFiles.add(entry.getKey());
}
@ -504,11 +553,13 @@ private void checkout() throws NoWorkTreeException, IOException {
}
/**
* Reverts any uncommitted changes in the worktree. We know that for all modified files the
* old content was in the old index and the index contained only stage 0. In case if inCore
* operation just clear the history of modified files.
* Reverts any uncommitted changes in the worktree. We know that for all
* modified files the old content was in the old index and the index
* contained only stage 0. In case of inCore operation just clear the
* history of modified files.
*
* @throws java.io.IOException in case the cleaning up failed
* @throws IOException
* in case the cleaning up failed
*/
public void revertModifiedFiles() throws IOException {
if (inCore) {
@ -521,9 +572,8 @@ public void revertModifiedFiles() throws IOException {
for (String path : result.modifiedFiles) {
DirCacheEntry entry = dirCache.getEntry(path);
if (entry != null) {
DirCacheCheckout.checkoutEntry(
repo, entry, reader, false, cleanupMetadata.get(path),
workingTreeOptions);
DirCacheCheckout.checkoutEntry(repo, entry, reader, false,
cleanupMetadata.get(path), workingTreeOptions);
}
}
}
@ -538,22 +588,24 @@ public void close() throws IOException {
/**
* Updates the file in the checkout with the given content.
*
* @param resultStreamLoader with the content to be updated
* @param streamType for parsing the content
* @param smudgeCommand for formatting the content
* @param path of the file to be updated
* @param file to be updated
* @param safeWrite whether the content should be written to a buffer first
* @throws IOException if the {@link CheckoutMetadata} cannot be determined
* @param resultStreamLoader
* with the content to be updated
* @param streamType
* for parsing the content
* @param smudgeCommand
* for formatting the content
* @param path
* of the file to be updated
* @param file
* to be updated
* @param safeWrite
* whether the content should be written to a buffer first
* @throws IOException
* if the file cannot be updated
*/
public void updateFileWithContent(
StreamLoader resultStreamLoader,
EolStreamType streamType,
String smudgeCommand,
String path,
File file,
boolean safeWrite)
throws IOException {
public void updateFileWithContent(StreamLoader resultStreamLoader,
EolStreamType streamType, String smudgeCommand, String path,
File file, boolean safeWrite) throws IOException {
if (inCore) {
return;
}
@ -584,74 +636,85 @@ public void updateFileWithContent(
}
/**
* Creates a path with the given content, and adds it to the specified stage to the index builder
* Creates a path with the given content, and adds it to the specified stage
* to the index builder.
*
* @param inputStream with the content to be updated
* @param path of the file to be updated
* @param fileMode of the modified file
* @param entryStage of the new entry
* @param lastModified instant of the modified file
* @param len of the content
* @param lfsAttribute for checking for LFS enablement
* @param inputStream
* with the content to be updated
* @param path
* of the file to be updated
* @param fileMode
* of the modified file
* @param entryStage
* of the new entry
* @param lastModified
* instant of the modified file
* @param len
* of the content
* @param lfsAttribute
* for checking for LFS enablement
* @return the entry which was added to the index
* @throws IOException if inserting the content fails
* @throws IOException
* if inserting the content fails
*/
public DirCacheEntry insertToIndex(
InputStream inputStream,
byte[] path,
FileMode fileMode,
int entryStage,
Instant lastModified,
int len,
public DirCacheEntry insertToIndex(InputStream inputStream, byte[] path,
FileMode fileMode, int entryStage, Instant lastModified, int len,
Attribute lfsAttribute) throws IOException {
StreamLoader contentLoader = createStreamLoader(() -> inputStream, len);
return insertToIndex(contentLoader, path, fileMode, entryStage, lastModified, len,
lfsAttribute);
return insertToIndex(contentLoader, path, fileMode, entryStage,
lastModified, len, lfsAttribute);
}
/**
* Creates a path with the given content, and adds it to the specified stage to the index builder
* Creates a path with the given content, and adds it to the specified stage
* to the index builder.
*
* @param resultStreamLoader with the content to be updated
* @param path of the file to be updated
* @param fileMode of the modified file
* @param entryStage of the new entry
* @param lastModified instant of the modified file
* @param len of the content
* @param lfsAttribute for checking for LFS enablement
* @param resultStreamLoader
* with the content to be updated
* @param path
* of the file to be updated
* @param fileMode
* of the modified file
* @param entryStage
* of the new entry
* @param lastModified
* instant of the modified file
* @param len
* of the content
* @param lfsAttribute
* for checking for LFS enablement
* @return the entry which was added to the index
* @throws IOException if inserting the content fails
* @throws IOException
* if inserting the content fails
*/
public DirCacheEntry insertToIndex(
StreamLoader resultStreamLoader,
byte[] path,
FileMode fileMode,
int entryStage,
Instant lastModified,
int len,
Attribute lfsAttribute) throws IOException {
return addExistingToIndex(insertResult(resultStreamLoader, lfsAttribute),
path, fileMode, entryStage, lastModified, len);
public DirCacheEntry insertToIndex(StreamLoader resultStreamLoader,
byte[] path, FileMode fileMode, int entryStage,
Instant lastModified, int len, Attribute lfsAttribute)
throws IOException {
return addExistingToIndex(
insertResult(resultStreamLoader, lfsAttribute), path, fileMode,
entryStage, lastModified, len);
}
/**
* Adds a path with the specified stage to the index builder
* Adds a path with the specified stage to the index builder.
*
* @param objectId of the existing object to add
* @param path of the modified file
* @param fileMode of the modified file
* @param entryStage of the new entry
* @param lastModified instant of the modified file
* @param len of the modified file content
* @param objectId
* of the existing object to add
* @param path
* of the modified file
* @param fileMode
* of the modified file
* @param entryStage
* of the new entry
* @param lastModified
* instant of the modified file
* @param len
* of the modified file content
* @return the entry which was added to the index
*/
public DirCacheEntry addExistingToIndex(
ObjectId objectId,
byte[] path,
FileMode fileMode,
int entryStage,
Instant lastModified,
int len) {
public DirCacheEntry addExistingToIndex(ObjectId objectId, byte[] path,
FileMode fileMode, int entryStage, Instant lastModified, int len) {
DirCacheEntry dce = new DirCacheEntry(path, entryStage);
dce.setFileMode(fileMode);
if (lastModified != null) {
@ -664,44 +727,25 @@ public DirCacheEntry addExistingToIndex(
return dce;
}
private ObjectId insertResult(StreamLoader resultStreamLoader, Attribute lfsAttribute)
throws IOException {
try (LfsInputStream is =
org.eclipse.jgit.util.LfsFactory.getInstance()
.applyCleanFilter(
repo,
resultStreamLoader.data.load(),
resultStreamLoader.size,
lfsAttribute)) {
private ObjectId insertResult(StreamLoader resultStreamLoader,
Attribute lfsAttribute) throws IOException {
try (LfsInputStream is = LfsFactory.getInstance().applyCleanFilter(repo,
resultStreamLoader.data.load(), resultStreamLoader.size,
lfsAttribute)) {
return inserter.insert(OBJ_BLOB, is.getLength(), is);
}
}
/**
* Gets non-null repository instance
* Gets the non-null repository instance of this {@link WorkTreeUpdater}.
*
* @return non-null repository instance
* @throws java.lang.NullPointerException if the handler was constructed without a repository.
* @throws NullPointerException
* if the handler was constructed without a repository.
*/
@NonNull
private Repository nonNullRepo() throws NullPointerException {
if (repo == null) {
throw new NullPointerException(JGitText.get().repositoryIsRequired);
}
return repo;
}
/**
* Gets non-null and non-bare repository instance
*
* @return non-null and non-bare repository instance
* @throws java.lang.NullPointerException if the handler was constructed without a repository.
* @throws NoWorkTreeException if the handler was constructed with a bare repository
*/
private Repository nonNullNonBareRepo() throws NullPointerException, NoWorkTreeException {
if (nonNullRepo().isBare()) {
throw new NoWorkTreeException();
}
return repo;
return Objects.requireNonNull(repo,
() -> JGitText.get().repositoryIsRequired);
}
}