From 6d2bcb6043c1f32dc7fd7029a29d671babf271cf Mon Sep 17 00:00:00 2001 From: Han-Wen Nienhuys Date: Tue, 30 Aug 2022 10:07:21 +0200 Subject: [PATCH] DirCacheCheckout#getContent: also take InputStream supplier This lets us use DirCacheCheckout for routines that want to write files in the worktree that aren't available as a git object. DirCacheCheckout#getContent takes a InputStream supplier rather than InputStream: if filtering fails with IOException, the data is placed unfiltered in the checkout. This means that the stream has to be read again, from the start. Use it in this way in ApplyCommand. This use is incorrect, though: the same InputStream is returned twice, so if the read to be retried, the stream will return 0 bytes. It doesn't really matter, because in either case, the SHA1 will not match up, and the patch fails. Change-Id: I2efa9a6da06806ff79b155032fe4b34be8fec09e --- .../org/eclipse/jgit/api/ApplyCommand.java | 6 +- .../jgit/dircache/DirCacheCheckout.java | 109 ++++++++++++++---- 2 files changed, 88 insertions(+), 27 deletions(-) diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/api/ApplyCommand.java b/org.eclipse.jgit/src/org/eclipse/jgit/api/ApplyCommand.java index 64fba98b4..c955d7983 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/api/ApplyCommand.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/api/ApplyCommand.java @@ -398,7 +398,7 @@ private void checkOid(ObjectId baseId, ObjectId id, ChangeType type, File f, } private void applyBinary(Repository repository, String path, File f, - FileHeader fh, WorkTreeUpdater.StreamSupplier loader, ObjectId id, + FileHeader fh, DirCacheCheckout.StreamSupplier loader, ObjectId id, CheckoutMetadata checkOut) throws PatchApplyException, IOException { if (!fh.getOldId().isComplete() || !fh.getNewId().isComplete()) { @@ -430,9 +430,7 @@ private void applyBinary(Repository repository, String path, File f, hunk.getBuffer(), start, length))))) { DirCacheCheckout.getContent(repository, path, checkOut, - WorkTreeUpdater.createStreamLoader(() -> inflated, - hunk.getSize()), - null, out); + () -> inflated, null, out); if (!fh.getNewId().toObjectId().equals(hash.toObjectId())) { throw new PatchApplyException(MessageFormat.format( JGitText.get().applyBinaryResultOidWrong, diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/dircache/DirCacheCheckout.java b/org.eclipse.jgit/src/org/eclipse/jgit/dircache/DirCacheCheckout.java index 2365c90d0..1fb81b71e 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/dircache/DirCacheCheckout.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/dircache/DirCacheCheckout.java @@ -21,6 +21,7 @@ import java.io.File; import java.io.FileOutputStream; import java.io.IOException; +import java.io.InputStream; import java.io.OutputStream; import java.nio.file.StandardCopyOption; import java.text.MessageFormat; @@ -1605,6 +1606,60 @@ public static void getContent(Repository repo, String path, CheckoutMetadata checkoutMetadata, ObjectLoader ol, WorkingTreeOptions opt, OutputStream os) throws IOException { + getContent(repo, path, checkoutMetadata, ol::openStream, opt, os); + } + + + /** + * Something that can supply an {@link InputStream}. + * + * @since 6.3 + */ + public interface StreamSupplier { + + /** + * Loads the input stream. + * + * @return the loaded stream + * @throws IOException + * if any reading error occurs + */ + InputStream load() throws IOException; + } + + /** + * Return filtered content for blob contents. EOL handling and smudge-filter + * handling are applied in the same way as it would be done during a + * checkout. + * + * @param repo + * the repository + * @param path + * the path used to determine the correct filters for the object + * @param checkoutMetadata + * containing + * + * @param inputStream + * A supplier for the raw content of the object. Each call should + * yield a fresh stream of the same object. + * @param opt + * the working tree options where only 'core.autocrlf' is used + * for EOL handling if 'checkoutMetadata.eolStreamType' is not + * valid + * @param os + * the output stream the filtered content is written to. The + * caller is responsible to close the stream. + * @throws IOException + * @since 6.3 + */ + public static void getContent(Repository repo, String path, + CheckoutMetadata checkoutMetadata, StreamSupplier inputStream, + WorkingTreeOptions opt, OutputStream os) + throws IOException { EolStreamType nonNullEolStreamType; if (checkoutMetadata.eolStreamType != null) { nonNullEolStreamType = checkoutMetadata.eolStreamType; @@ -1618,21 +1673,23 @@ public static void getContent(Repository repo, String path, if (checkoutMetadata.smudgeFilterCommand != null) { if (FilterCommandRegistry .isRegistered(checkoutMetadata.smudgeFilterCommand)) { - runBuiltinFilterCommand(repo, checkoutMetadata, ol, + runBuiltinFilterCommand(repo, checkoutMetadata, inputStream, channel); } else { - runExternalFilterCommand(repo, path, checkoutMetadata, ol, + runExternalFilterCommand(repo, path, checkoutMetadata, inputStream, channel); } } else { - ol.copyTo(channel); + try (InputStream in = inputStream.load()) { + in.transferTo(channel); + } } } } // Run an external filter command private static void runExternalFilterCommand(Repository repo, String path, - CheckoutMetadata checkoutMetadata, ObjectLoader ol, + CheckoutMetadata checkoutMetadata, StreamSupplier inputStream, OutputStream channel) throws IOException { FS fs = repo.getFS(); ProcessBuilder filterProcessBuilder = fs.runInShell( @@ -1644,7 +1701,9 @@ private static void runExternalFilterCommand(Repository repo, String path, int rc; try { // TODO: wire correctly with AUTOCRLF - result = fs.execute(filterProcessBuilder, ol.openStream()); + try (InputStream in = inputStream.load()) { + result = fs.execute(filterProcessBuilder, in); + } rc = result.getRc(); if (rc == 0) { result.getStdout().writeTo(channel, @@ -1665,31 +1724,35 @@ private static void runExternalFilterCommand(Repository repo, String path, // Run a builtin filter command private static void runBuiltinFilterCommand(Repository repo, - CheckoutMetadata checkoutMetadata, ObjectLoader ol, + CheckoutMetadata checkoutMetadata, StreamSupplier inputStream, OutputStream channel) throws MissingObjectException, IOException { boolean isMandatory = repo.getConfig().getBoolean( ConfigConstants.CONFIG_FILTER_SECTION, ConfigConstants.CONFIG_SECTION_LFS, ConfigConstants.CONFIG_KEY_REQUIRED, false); FilterCommand command = null; - try { - command = FilterCommandRegistry.createFilterCommand( - checkoutMetadata.smudgeFilterCommand, repo, ol.openStream(), - channel); - } catch (IOException e) { - LOG.error(JGitText.get().failedToDetermineFilterDefinition, e); - if (!isMandatory) { - // In case an IOException occurred during creating of the - // command then proceed as if there would not have been a - // builtin filter (only if the filter is not mandatory). - ol.copyTo(channel); - } else { - throw e; + try (InputStream in = inputStream.load()) { + try { + command = FilterCommandRegistry.createFilterCommand( + checkoutMetadata.smudgeFilterCommand, repo, in, + channel); + } catch (IOException e) { + LOG.error(JGitText.get().failedToDetermineFilterDefinition, e); + if (!isMandatory) { + // In case an IOException occurred during creating of the + // command then proceed as if there would not have been a + // builtin filter (only if the filter is not mandatory). + try (InputStream again = inputStream.load()) { + again.transferTo(channel); + } + } else { + throw e; + } } - } - if (command != null) { - while (command.run() != -1) { - // loop as long as command.run() tells there is work to do + if (command != null) { + while (command.run() != -1) { + // loop as long as command.run() tells there is work to do + } } } }