Merge branch 'master' into stable-6.3

* master:
  Move WorkTreeUpdater to merge package
  WorkTreeUpdater: use DirCacheCheckout#StreamSupplier
  DirCacheCheckout#getContent: also take InputStream supplier
  WorkTreeUpdater: remove safeWrite option

Change-Id: I8be570dbc4ad0d0b46046b85cbda24c3adcba170
This commit is contained in:
Matthias Sohn 2022-09-06 15:58:47 +02:00
commit 370d1170e8
4 changed files with 110 additions and 186 deletions

View File

@ -58,7 +58,6 @@
import org.eclipse.jgit.util.FileUtils;
import org.eclipse.jgit.util.IO;
import org.eclipse.jgit.util.RawParseUtils;
import org.eclipse.jgit.util.WorkTreeUpdater;
import org.eclipse.jgit.util.StringUtils;
import org.eclipse.jgit.util.TemporaryBuffer;
import org.eclipse.jgit.util.TemporaryBuffer.LocalFile;
@ -398,7 +397,7 @@ private void checkOid(ObjectId baseId, ObjectId id, ChangeType type, File f,
}
private void applyBinary(Repository repository, String path, File f,
FileHeader fh, WorkTreeUpdater.StreamSupplier loader, ObjectId id,
FileHeader fh, DirCacheCheckout.StreamSupplier loader, ObjectId id,
CheckoutMetadata checkOut)
throws PatchApplyException, IOException {
if (!fh.getOldId().isComplete() || !fh.getNewId().isComplete()) {
@ -430,9 +429,7 @@ private void applyBinary(Repository repository, String path, File f,
hunk.getBuffer(), start,
length))))) {
DirCacheCheckout.getContent(repository, path, checkOut,
WorkTreeUpdater.createStreamLoader(() -> inflated,
hunk.getSize()),
null, out);
() -> inflated, null, out);
if (!fh.getNewId().toObjectId().equals(hash.toObjectId())) {
throw new PatchApplyException(MessageFormat.format(
JGitText.get().applyBinaryResultOidWrong,
@ -463,8 +460,7 @@ private void applyBinary(Repository repository, String path, File f,
SHA1InputStream hashed = new SHA1InputStream(hash,
input)) {
DirCacheCheckout.getContent(repository, path, checkOut,
WorkTreeUpdater.createStreamLoader(() -> hashed, finalSize),
null, out);
() -> hashed, null, out);
if (!fh.getNewId().toObjectId()
.equals(hash.toObjectId())) {
throw new PatchApplyException(MessageFormat.format(
@ -632,9 +628,7 @@ && canApplyAt(hunkLines, newLines, 0)) {
}
try (OutputStream output = new FileOutputStream(f)) {
DirCacheCheckout.getContent(repository, path, checkOut,
WorkTreeUpdater.createStreamLoader(buffer::openInputStream,
buffer.length()),
null, output);
buffer::openInputStream, null, output);
}
} finally {
buffer.destroy();

View File

@ -21,6 +21,7 @@
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.file.StandardCopyOption;
import java.text.MessageFormat;
@ -1605,6 +1606,60 @@ public static void getContent(Repository repo, String path,
CheckoutMetadata checkoutMetadata, ObjectLoader ol,
WorkingTreeOptions opt, OutputStream os)
throws IOException {
getContent(repo, path, checkoutMetadata, ol::openStream, opt, os);
}
/**
* Something that can supply an {@link InputStream}.
*
* @since 6.3
*/
public interface StreamSupplier {
/**
* Loads the input stream.
*
* @return the loaded stream
* @throws IOException
* if any reading error occurs
*/
InputStream load() throws IOException;
}
/**
* Return filtered content for blob contents. EOL handling and smudge-filter
* handling are applied in the same way as it would be done during a
* checkout.
*
* @param repo
* the repository
* @param path
* the path used to determine the correct filters for the object
* @param checkoutMetadata
* containing
* <ul>
* <li>smudgeFilterCommand to be run for smudging the object</li>
* <li>eolStreamType used for stream conversion (can be
* null)</li>
* </ul>
* @param inputStream
* A supplier for the raw content of the object. Each call should
* yield a fresh stream of the same object.
* @param opt
* the working tree options where only 'core.autocrlf' is used
* for EOL handling if 'checkoutMetadata.eolStreamType' is not
* valid
* @param os
* the output stream the filtered content is written to. The
* caller is responsible to close the stream.
* @throws IOException
* @since 6.3
*/
public static void getContent(Repository repo, String path,
CheckoutMetadata checkoutMetadata, StreamSupplier inputStream,
WorkingTreeOptions opt, OutputStream os)
throws IOException {
EolStreamType nonNullEolStreamType;
if (checkoutMetadata.eolStreamType != null) {
nonNullEolStreamType = checkoutMetadata.eolStreamType;
@ -1618,21 +1673,23 @@ public static void getContent(Repository repo, String path,
if (checkoutMetadata.smudgeFilterCommand != null) {
if (FilterCommandRegistry
.isRegistered(checkoutMetadata.smudgeFilterCommand)) {
runBuiltinFilterCommand(repo, checkoutMetadata, ol,
runBuiltinFilterCommand(repo, checkoutMetadata, inputStream,
channel);
} else {
runExternalFilterCommand(repo, path, checkoutMetadata, ol,
runExternalFilterCommand(repo, path, checkoutMetadata, inputStream,
channel);
}
} else {
ol.copyTo(channel);
try (InputStream in = inputStream.load()) {
in.transferTo(channel);
}
}
}
}
// Run an external filter command
private static void runExternalFilterCommand(Repository repo, String path,
CheckoutMetadata checkoutMetadata, ObjectLoader ol,
CheckoutMetadata checkoutMetadata, StreamSupplier inputStream,
OutputStream channel) throws IOException {
FS fs = repo.getFS();
ProcessBuilder filterProcessBuilder = fs.runInShell(
@ -1644,7 +1701,9 @@ private static void runExternalFilterCommand(Repository repo, String path,
int rc;
try {
// TODO: wire correctly with AUTOCRLF
result = fs.execute(filterProcessBuilder, ol.openStream());
try (InputStream in = inputStream.load()) {
result = fs.execute(filterProcessBuilder, in);
}
rc = result.getRc();
if (rc == 0) {
result.getStdout().writeTo(channel,
@ -1665,31 +1724,35 @@ private static void runExternalFilterCommand(Repository repo, String path,
// Run a builtin filter command
private static void runBuiltinFilterCommand(Repository repo,
CheckoutMetadata checkoutMetadata, ObjectLoader ol,
CheckoutMetadata checkoutMetadata, StreamSupplier inputStream,
OutputStream channel) throws MissingObjectException, IOException {
boolean isMandatory = repo.getConfig().getBoolean(
ConfigConstants.CONFIG_FILTER_SECTION,
ConfigConstants.CONFIG_SECTION_LFS,
ConfigConstants.CONFIG_KEY_REQUIRED, false);
FilterCommand command = null;
try {
command = FilterCommandRegistry.createFilterCommand(
checkoutMetadata.smudgeFilterCommand, repo, ol.openStream(),
channel);
} catch (IOException e) {
LOG.error(JGitText.get().failedToDetermineFilterDefinition, e);
if (!isMandatory) {
// In case an IOException occurred during creating of the
// command then proceed as if there would not have been a
// builtin filter (only if the filter is not mandatory).
ol.copyTo(channel);
} else {
throw e;
try (InputStream in = inputStream.load()) {
try {
command = FilterCommandRegistry.createFilterCommand(
checkoutMetadata.smudgeFilterCommand, repo, in,
channel);
} catch (IOException e) {
LOG.error(JGitText.get().failedToDetermineFilterDefinition, e);
if (!isMandatory) {
// In case an IOException occurred during creating of the
// command then proceed as if there would not have been a
// builtin filter (only if the filter is not mandatory).
try (InputStream again = inputStream.load()) {
again.transferTo(channel);
}
} else {
throw e;
}
}
}
if (command != null) {
while (command.run() != -1) {
// loop as long as command.run() tells there is work to do
if (command != null) {
while (command.run() != -1) {
// loop as long as command.run() tells there is work to do
}
}
}
}

View File

@ -60,8 +60,6 @@
import org.eclipse.jgit.treewalk.filter.TreeFilter;
import org.eclipse.jgit.util.FS;
import org.eclipse.jgit.util.LfsFactory;
import org.eclipse.jgit.util.WorkTreeUpdater;
import org.eclipse.jgit.util.WorkTreeUpdater.StreamLoader;
import org.eclipse.jgit.util.TemporaryBuffer;
/**
@ -912,10 +910,8 @@ private File writeMergedFile(TemporaryBuffer rawMerged,
if (!fs.exists(parentFolder)) {
parentFolder.mkdirs();
}
StreamLoader contentLoader = WorkTreeUpdater.createStreamLoader(rawMerged::openInputStream,
rawMerged.length());
workTreeUpdater.updateFileWithContent(contentLoader,
eol, tw.getSmudgeCommand(attributes), of.getPath(), of, false);
workTreeUpdater.updateFileWithContent(rawMerged::openInputStream,
eol, tw.getSmudgeCommand(attributes), of.getPath(), of);
return of;
}

View File

@ -7,19 +7,16 @@
*
* SPDX-License-Identifier: BSD-3-Clause
*/
package org.eclipse.jgit.util;
package org.eclipse.jgit.merge;
import static org.eclipse.jgit.lib.Constants.OBJ_BLOB;
import java.io.BufferedInputStream;
import java.io.Closeable;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.file.Files;
import java.nio.file.StandardCopyOption;
import java.time.Instant;
import java.util.HashMap;
import java.util.LinkedList;
@ -36,25 +33,23 @@
import org.eclipse.jgit.dircache.DirCacheBuildIterator;
import org.eclipse.jgit.dircache.DirCacheBuilder;
import org.eclipse.jgit.dircache.DirCacheCheckout;
import org.eclipse.jgit.dircache.DirCacheCheckout.StreamSupplier;
import org.eclipse.jgit.dircache.DirCacheCheckout.CheckoutMetadata;
import org.eclipse.jgit.dircache.DirCacheEntry;
import org.eclipse.jgit.errors.IndexWriteException;
import org.eclipse.jgit.errors.LargeObjectException;
import org.eclipse.jgit.errors.NoWorkTreeException;
import org.eclipse.jgit.internal.JGitText;
import org.eclipse.jgit.lib.Config;
import org.eclipse.jgit.lib.ConfigConstants;
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.CoreConfig.EolStreamType;
import org.eclipse.jgit.lib.FileMode;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.ObjectInserter;
import org.eclipse.jgit.lib.ObjectLoader;
import org.eclipse.jgit.lib.ObjectReader;
import org.eclipse.jgit.lib.ObjectStream;
import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.treewalk.TreeWalk.OperationType;
import org.eclipse.jgit.treewalk.WorkingTreeOptions;
import org.eclipse.jgit.util.LfsFactory;
import org.eclipse.jgit.util.LfsFactory.LfsInputStream;
import org.eclipse.jgit.util.io.EolStreamTypeUtil;
@ -64,10 +59,8 @@
* You should use a single instance for all of your file changes. In case of an
* error, make sure your instance is released, and initiate a new one if
* necessary.
*
* @since 6.3
*/
public class WorkTreeUpdater implements Closeable {
class WorkTreeUpdater implements Closeable {
/**
* The result of writing the index changes.
@ -247,77 +240,6 @@ public static WorkTreeUpdater createInCoreWorkTreeUpdater(Repository repo,
return new WorkTreeUpdater(repo, dirCache, oi);
}
/**
* Something that can supply an {@link InputStream}.
*/
public interface StreamSupplier {
/**
* Loads the input stream.
*
* @return the loaded stream
* @throws IOException
* if any reading error occurs
*/
InputStream load() throws IOException;
}
/**
* We want to use DirCacheCheckout for its CR-LF and smudge filters, but DirCacheCheckout needs an
* ObjectLoader rather than InputStream. This class provides a bridge between the two.
*/
public static class StreamLoader extends ObjectLoader {
private final StreamSupplier data;
private final long size;
private StreamLoader(StreamSupplier data, long length) {
this.data = data;
this.size = length;
}
@Override
public int getType() {
return Constants.OBJ_BLOB;
}
@Override
public long getSize() {
return size;
}
@Override
public boolean isLarge() {
return true;
}
@Override
public byte[] getCachedBytes() throws LargeObjectException {
throw new LargeObjectException();
}
@Override
public ObjectStream openStream() throws IOException {
return new ObjectStream.Filter(getType(), getSize(),
new BufferedInputStream(data.load()));
}
}
/**
* Creates stream loader for the given supplier.
*
* @param supplier
* to wrap
* @param length
* of the supplied content
* @return the result stream loader
*/
public static StreamLoader createStreamLoader(StreamSupplier supplier,
long length) {
return new StreamLoader(supplier, length);
}
private static int getInCoreFileSizeLimit(Config config) {
return config.getInt(ConfigConstants.CONFIG_MERGE_SECTION,
ConfigConstants.CONFIG_KEY_IN_CORE_LIMIT, 10 << 20);
@ -601,8 +523,8 @@ public void close() throws IOException {
/**
* Updates the file in the checkout with the given content.
*
* @param resultStreamLoader
* with the content to be updated
* @param inputStream
* the content to be updated
* @param streamType
* for parsing the content
* @param smudgeCommand
@ -611,40 +533,21 @@ public void close() throws IOException {
* of the file to be updated
* @param file
* to be updated
* @param safeWrite
* whether the content should be written to a buffer first
* @throws IOException
* if the file cannot be updated
*/
public void updateFileWithContent(StreamLoader resultStreamLoader,
public void updateFileWithContent(StreamSupplier inputStream,
EolStreamType streamType, String smudgeCommand, String path,
File file, boolean safeWrite) throws IOException {
File file) throws IOException {
if (inCore) {
return;
}
CheckoutMetadata metadata = new CheckoutMetadata(streamType,
smudgeCommand);
if (safeWrite) {
// Write to a buffer and copy to the file only if everything was
// fine.
TemporaryBuffer buffer = new TemporaryBuffer.LocalFile(null);
try {
try (TemporaryBuffer buf = buffer) {
DirCacheCheckout.getContent(repo, path, metadata,
resultStreamLoader, workingTreeOptions, buf);
}
try (InputStream bufIn = buffer.openInputStream()) {
Files.copy(bufIn, file.toPath(),
StandardCopyOption.REPLACE_EXISTING);
}
} finally {
buffer.destroy();
}
return;
}
try (OutputStream outputStream = new FileOutputStream(file)) {
DirCacheCheckout.getContent(repo, path, metadata,
resultStreamLoader, workingTreeOptions, outputStream);
inputStream, workingTreeOptions, outputStream);
}
}
@ -652,8 +555,8 @@ public void updateFileWithContent(StreamLoader resultStreamLoader,
* Creates a path with the given content, and adds it to the specified stage
* to the index builder.
*
* @param inputStream
* with the content to be updated
* @param input
* the content to be updated
* @param path
* of the file to be updated
* @param fileMode
@ -670,43 +573,12 @@ public void updateFileWithContent(StreamLoader resultStreamLoader,
* @throws IOException
* if inserting the content fails
*/
public DirCacheEntry insertToIndex(InputStream inputStream, byte[] path,
FileMode fileMode, int entryStage, Instant lastModified, int len,
Attribute lfsAttribute) throws IOException {
StreamLoader contentLoader = createStreamLoader(() -> inputStream, len);
return insertToIndex(contentLoader, path, fileMode, entryStage,
lastModified, len, lfsAttribute);
}
/**
* Creates a path with the given content, and adds it to the specified stage
* to the index builder.
*
* @param resultStreamLoader
* with the content to be updated
* @param path
* of the file to be updated
* @param fileMode
* of the modified file
* @param entryStage
* of the new entry
* @param lastModified
* instant of the modified file
* @param len
* of the content
* @param lfsAttribute
* for checking for LFS enablement
* @return the entry which was added to the index
* @throws IOException
* if inserting the content fails
*/
public DirCacheEntry insertToIndex(StreamLoader resultStreamLoader,
public DirCacheEntry insertToIndex(InputStream input,
byte[] path, FileMode fileMode, int entryStage,
Instant lastModified, int len, Attribute lfsAttribute)
throws IOException {
return addExistingToIndex(
insertResult(resultStreamLoader, lfsAttribute), path, fileMode,
entryStage, lastModified, len);
return addExistingToIndex(insertResult(input, lfsAttribute, len), path,
fileMode, entryStage, lastModified, len);
}
/**
@ -734,16 +606,15 @@ public DirCacheEntry addExistingToIndex(ObjectId objectId, byte[] path,
dce.setLastModified(lastModified);
}
dce.setLength(inCore ? 0 : len);
dce.setObjectId(objectId);
builder.add(dce);
return dce;
}
private ObjectId insertResult(StreamLoader resultStreamLoader,
Attribute lfsAttribute) throws IOException {
private ObjectId insertResult(InputStream input,
Attribute lfsAttribute, long length) throws IOException {
try (LfsInputStream is = LfsFactory.getInstance().applyCleanFilter(repo,
resultStreamLoader.data.load(), resultStreamLoader.size,
input, length,
lfsAttribute)) {
return inserter.insert(OBJ_BLOB, is.getLength(), is);
}