Merge branch 'stable-6.3'

* stable-6.3:
  [benchmarks] Remove profiler configuration
  Add SHA1 benchmark
  [benchmarks] Set version of maven-compiler-plugin to 3.8.1
  Fix running JMH benchmarks
  Add option to allow using JDK's SHA1 implementation
  Fix API breakage caused by extracting WorkTreeUpdater
  Extract Exception -> HTTP status code mapping for reuse
  Don't handle internal git errors as an HTTP error
  Ignore IllegalStateException if JVM is already shutting down
  Allow to perform PackedBatchRefUpdate without locking loose refs

Change-Id: Ib58879be292c54a2a7f4936ac0986997985c822b
This commit is contained in:
Matthias Sohn 2022-11-16 10:15:30 +01:00
commit 0fb9d26eff
22 changed files with 1622 additions and 1183 deletions

View File

@ -37,13 +37,17 @@
<groupId>org.openjdk.jmh</groupId>
<artifactId>jmh-generator-annprocess</artifactId>
<version>${jmh.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.eclipse.jgit</groupId>
<artifactId>org.eclipse.jgit</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.eclipse.jgit</groupId>
<artifactId>org.eclipse.jgit.junit</artifactId>
<version>${project.version}</version>
</dependency>
</dependencies>
<build>

View File

@ -69,8 +69,6 @@ public FileSnapshot testCreateFileSnapshot() {
public static void main(String[] args) throws RunnerException {
Options opt = new OptionsBuilder()
.include(CreateFileSnapshotBenchmark.class.getSimpleName())
// .addProfiler(StackProfiler.class)
// .addProfiler(GCProfiler.class)
.forks(1).jvmArgs("-ea").build();
new Runner(opt).run();
}

View File

@ -128,8 +128,6 @@ public static void main(String[] args) throws RunnerException {
Options opt = new OptionsBuilder()
.include(FileMoveBenchmark.class
.getSimpleName())
// .addProfiler(StackProfiler.class)
// .addProfiler(GCProfiler.class)
.forks(1).jvmArgs("-ea").build();
new Runner(opt).run();
}

View File

@ -56,8 +56,6 @@ public FileStore testLookupFileStore() throws IOException {
public static void main(String[] args) throws RunnerException {
Options opt = new OptionsBuilder()
.include(LookupFileStoreBenchmark.class.getSimpleName())
.addProfiler(StackProfiler.class)
// .addProfiler(GCProfiler.class)
.forks(1).jvmArgs("-ea").build();
new Runner(opt).run();
}

View File

@ -0,0 +1,99 @@
/*
* Copyright (C) 2022, Matthias Sohn <matthias.sohn@sap.com> and others
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Distribution License v. 1.0 which is available at
* https://www.eclipse.org/org/documents/edl-v10.php.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
package org.eclipse.jgit.benchmarks;
import java.security.NoSuchAlgorithmException;
import java.security.SecureRandom;
import java.util.concurrent.TimeUnit;
import org.eclipse.jgit.junit.MockSystemReader;
import org.eclipse.jgit.util.SystemReader;
import org.eclipse.jgit.util.sha1.SHA1;
import org.eclipse.jgit.util.sha1.SHA1.Sha1Implementation;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;
import org.openjdk.jmh.annotations.Measurement;
import org.openjdk.jmh.annotations.Mode;
import org.openjdk.jmh.annotations.OutputTimeUnit;
import org.openjdk.jmh.annotations.Param;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.annotations.TearDown;
import org.openjdk.jmh.annotations.Warmup;
import org.openjdk.jmh.infra.Blackhole;
import org.openjdk.jmh.runner.Runner;
import org.openjdk.jmh.runner.RunnerException;
import org.openjdk.jmh.runner.options.Options;
import org.openjdk.jmh.runner.options.OptionsBuilder;
@State(Scope.Thread)
public class SHA1Benchmark {
@State(Scope.Benchmark)
public static class BenchmarkState {
@Param({ "1", "2", "4", "8", "16", "32", "64" })
int size;
@Param({ "false", "true" })
boolean detectCollision;
@Param({ "java", "jdkNative" })
String impl;
private SecureRandom rnd;
byte[] content;
@Setup
public void setupBenchmark() {
SystemReader.setInstance(new MockSystemReader());
if (impl.equalsIgnoreCase(Sha1Implementation.JDKNATIVE.name())) {
System.setProperty("org.eclipse.jgit.util.sha1.implementation",
Sha1Implementation.JDKNATIVE.name());
}
content = new byte[size * 1024];
try {
rnd = SecureRandom.getInstanceStrong();
} catch (NoSuchAlgorithmException e) {
// ignore
}
rnd.nextBytes(content);
}
@TearDown
public void teardown() {
SystemReader.setInstance(null);
rnd = null;
}
}
@Benchmark
@BenchmarkMode({ Mode.AverageTime })
@OutputTimeUnit(TimeUnit.MICROSECONDS)
@Warmup(iterations = 2, time = 100, timeUnit = TimeUnit.MILLISECONDS)
@Measurement(iterations = 2, time = 5, timeUnit = TimeUnit.SECONDS)
@Fork(1)
public void testSHA1(Blackhole blackhole, BenchmarkState state) {
SHA1 hash = SHA1.newInstance();
hash.setDetectCollision(state.detectCollision);
hash.update(state.content);
blackhole.consume(hash.digest());
}
public static void main(String[] args) throws RunnerException {
Options opt = new OptionsBuilder()
.include(SHA1Benchmark.class.getSimpleName())
.forks(1).jvmArgs("-ea").build();
new Runner(opt).run();
}
}

View File

@ -71,8 +71,6 @@ public SimpleLruCache<String, String> testCacheRead() {
public static void main(String[] args) throws RunnerException {
Options opt = new OptionsBuilder()
.include(SimpleLruCacheBenchmark.class.getSimpleName())
// .addProfiler(StackProfiler.class)
// .addProfiler(GCProfiler.class)
.forks(1).jvmArgs("-ea").build();
new Runner(opt).run();
}

View File

@ -0,0 +1,11 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<component id="org.eclipse.jgit.http.server" version="2">
<resource path="src/org/eclipse/jgit/http/server/UploadPackErrorHandler.java" type="org.eclipse.jgit.http.server.UploadPackErrorHandler">
<filter id="1210056707">
<message_arguments>
<message_argument value="6.1.1"/>
<message_argument value="statusCodeForThrowable(Throwable)"/>
</message_arguments>
</filter>
</resource>
</component>

View File

@ -9,13 +9,19 @@
*/
package org.eclipse.jgit.http.server;
import static javax.servlet.http.HttpServletResponse.SC_FORBIDDEN;
import static javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR;
import static javax.servlet.http.HttpServletResponse.SC_OK;
import java.io.IOException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.eclipse.jgit.errors.PackProtocolException;
import org.eclipse.jgit.transport.ServiceMayNotContinueException;
import org.eclipse.jgit.transport.UploadPack;
import org.eclipse.jgit.transport.resolver.ServiceNotEnabledException;
/**
* Handle git-upload-pack errors.
@ -34,6 +40,24 @@
* @since 5.6
*/
public interface UploadPackErrorHandler {
/**
* Maps a thrown git related Exception to an appropriate HTTP status code.
*
* @param error
* The thrown Exception.
* @return the HTTP status code as an int
* @since 6.1.1
*/
public static int statusCodeForThrowable(Throwable error) {
if (error instanceof ServiceNotEnabledException) {
return SC_FORBIDDEN;
}
if (error instanceof PackProtocolException) {
// Internal git errors are not errors from an HTTP standpoint.
return SC_OK;
}
return SC_INTERNAL_SERVER_ERROR;
}
/**
* @param req
* The HTTP request

View File

@ -10,9 +10,7 @@
package org.eclipse.jgit.http.server;
import static javax.servlet.http.HttpServletResponse.SC_BAD_REQUEST;
import static javax.servlet.http.HttpServletResponse.SC_FORBIDDEN;
import static javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR;
import static javax.servlet.http.HttpServletResponse.SC_UNAUTHORIZED;
import static javax.servlet.http.HttpServletResponse.SC_UNSUPPORTED_MEDIA_TYPE;
import static org.eclipse.jgit.http.server.GitSmartHttpTools.UPLOAD_PACK;
@ -23,6 +21,7 @@
import static org.eclipse.jgit.http.server.ServletUtils.consumeRequestBody;
import static org.eclipse.jgit.http.server.ServletUtils.getInputStream;
import static org.eclipse.jgit.http.server.ServletUtils.getRepository;
import static org.eclipse.jgit.http.server.UploadPackErrorHandler.statusCodeForThrowable;
import static org.eclipse.jgit.util.HttpSupport.HDR_USER_AGENT;
import java.io.IOException;
@ -49,7 +48,6 @@
import org.eclipse.jgit.transport.ServiceMayNotContinueException;
import org.eclipse.jgit.transport.UploadPack;
import org.eclipse.jgit.transport.UploadPackInternalServerErrorException;
import org.eclipse.jgit.transport.WantNotValidException;
import org.eclipse.jgit.transport.resolver.ServiceNotAuthorizedException;
import org.eclipse.jgit.transport.resolver.ServiceNotEnabledException;
import org.eclipse.jgit.transport.resolver.UploadPackFactory;
@ -153,16 +151,6 @@ public void destroy() {
}
}
private static int statusCodeForThrowable(Throwable error) {
if (error instanceof ServiceNotEnabledException) {
return SC_FORBIDDEN;
}
if (error instanceof WantNotValidException) {
return SC_BAD_REQUEST;
}
return SC_INTERNAL_SERVER_ERROR;
}
private final UploadPackErrorHandler handler;
UploadPackServlet(@Nullable UploadPackErrorHandler handler) {

View File

@ -537,9 +537,9 @@ public void testFetchBySHA1Unreachable() throws Exception {
Collections.singletonList(
new RefSpec(unreachableCommit.name()))));
assertTrue(e.getMessage().contains(
"Bad Request"));
"want " + unreachableCommit.name() + " not valid"));
}
assertLastRequestStatusCode(400);
assertLastRequestStatusCode(200);
}
@Test
@ -560,9 +560,9 @@ protected Map<String, Ref> getAdvertisedRefs(Repository repository,
() -> t.fetch(NullProgressMonitor.INSTANCE,
Collections.singletonList(new RefSpec(A.name()))));
assertTrue(
e.getMessage().contains("Bad Request"));
e.getMessage().contains("want " + A.name() + " not valid"));
}
assertLastRequestStatusCode(400);
assertLastRequestStatusCode(200);
}
@Test
@ -1610,9 +1610,9 @@ public void testInvalidWant() throws Exception {
fail("Server accepted want " + id.name());
} catch (TransportException err) {
assertTrue(err.getMessage()
.contains("Bad Request"));
.contains("want " + id.name() + " not valid"));
}
assertLastRequestStatusCode(400);
assertLastRequestStatusCode(200);
}
@Test

View File

@ -14,6 +14,7 @@
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.junit.Assume.assumeFalse;
import static org.junit.Assume.assumeTrue;
import java.io.IOException;
@ -22,11 +23,20 @@
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import org.eclipse.jgit.junit.MockSystemReader;
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.util.IO;
import org.junit.Test;
import org.eclipse.jgit.util.SystemReader;
import org.eclipse.jgit.util.sha1.SHA1.Sha1Implementation;
import org.junit.After;
import org.junit.Before;
import org.junit.experimental.theories.DataPoints;
import org.junit.experimental.theories.Theories;
import org.junit.experimental.theories.Theory;
import org.junit.runner.RunWith;
@RunWith(Theories.class)
public class SHA1Test {
private static final String TEST1 = "abc";
@ -34,7 +44,32 @@ public class SHA1Test {
private static final String TEST2b = "jkijkljklmklmnlmnomnopnopq";
private static final String TEST2 = TEST2a + TEST2b;
@Test
@DataPoints
public static Sha1Implementation[] getDataPoints() {
return new Sha1Implementation[] { Sha1Implementation.JAVA,
Sha1Implementation.JDKNATIVE };
}
private Sha1Implementation sha1Implementation;
public SHA1Test(Sha1Implementation impl) {
this.sha1Implementation = impl;
}
@Before
public void setUp() throws Exception {
MockSystemReader mockSystemReader = new MockSystemReader();
SystemReader.setInstance(mockSystemReader);
System.setProperty("org.eclipse.jgit.util.sha1.implementation",
sha1Implementation.name());
}
@After
public void tearDown() {
SystemReader.setInstance(null);
}
@Theory
public void test0() throws NoSuchAlgorithmException {
ObjectId exp = ObjectId
.fromString("da39a3ee5e6b4b0d3255bfef95601890afd80709");
@ -56,7 +91,7 @@ public void test0() throws NoSuchAlgorithmException {
assertEquals(exp, s2);
}
@Test
@Theory
public void test1() throws NoSuchAlgorithmException {
ObjectId exp = ObjectId
.fromString("a9993e364706816aba3e25717850c26c9cd0d89d");
@ -78,7 +113,7 @@ public void test1() throws NoSuchAlgorithmException {
assertEquals(exp, s2);
}
@Test
@Theory
public void test2() throws NoSuchAlgorithmException {
ObjectId exp = ObjectId
.fromString("84983e441c3bd26ebaae4aa1f95129e5e54670f1");
@ -100,9 +135,13 @@ public void test2() throws NoSuchAlgorithmException {
assertEquals(exp, s2);
}
@Test
@Theory
public void shatteredCollision()
throws IOException, NoSuchAlgorithmException {
assumeFalse(
System.getProperty("org.eclipse.jgit.util.sha1.implementation")
.equalsIgnoreCase("jdkNative"));
byte[] pdf1 = read("shattered-1.pdf", 422435);
byte[] pdf2 = read("shattered-2.pdf", 422435);
MessageDigest md;
@ -149,8 +188,12 @@ public void shatteredCollision()
}
}
@Test
@Theory
public void shatteredStoredInGitBlob() throws IOException {
assumeFalse(
System.getProperty("org.eclipse.jgit.util.sha1.implementation")
.equalsIgnoreCase("jdkNative"));
byte[] pdf1 = read("shattered-1.pdf", 422435);
byte[] pdf2 = read("shattered-2.pdf", 422435);
@ -158,8 +201,10 @@ public void shatteredStoredInGitBlob() throws IOException {
// the Git blob header permutes the data enough for this specific
// attack example to not be detected as a collision. (A different file
// pair that takes the Git header into account however, would.)
ObjectId id1 = blob(pdf1, SHA1.newInstance().setDetectCollision(true));
ObjectId id2 = blob(pdf2, SHA1.newInstance().setDetectCollision(true));
ObjectId id1 = blob(pdf1,
SHA1.newInstance().setDetectCollision(true));
ObjectId id2 = blob(pdf2,
SHA1.newInstance().setDetectCollision(true));
assertEquals(
ObjectId.fromString("ba9aaa145ccd24ef760cf31c74d8f7ca1a2e47b0"),
@ -169,8 +214,12 @@ public void shatteredStoredInGitBlob() throws IOException {
id2);
}
@Test
@Theory
public void detectsShatteredByDefault() throws IOException {
assumeFalse(
System.getProperty("org.eclipse.jgit.util.sha1.implementation")
.equalsIgnoreCase("jdkNative"));
assumeTrue(System.getProperty("org.eclipse.jgit.util.sha1.detectCollision") == null);
assumeTrue(System.getProperty("org.eclipse.jgit.util.sha1.safeHash") == null);

View File

@ -1,11 +0,0 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<component id="org.eclipse.jgit" version="2">
<resource path="src/org/eclipse/jgit/transport/Transport.java" type="org.eclipse.jgit.transport.Transport">
<filter id="336658481">
<message_arguments>
<message_argument value="org.eclipse.jgit.transport.Transport"/>
<message_argument value="DEFAULT_PUSH_USE_BITMAPS"/>
</message_arguments>
</filter>
</resource>
</component>

View File

@ -86,10 +86,16 @@
*/
class PackedBatchRefUpdate extends BatchRefUpdate {
private RefDirectory refdb;
private boolean shouldLockLooseRefs;
PackedBatchRefUpdate(RefDirectory refdb) {
super(refdb);
this.refdb = refdb;
this(refdb, true);
}
PackedBatchRefUpdate(RefDirectory refdb, boolean shouldLockLooseRefs) {
super(refdb);
this.refdb = refdb;
this.shouldLockLooseRefs = shouldLockLooseRefs;
}
/** {@inheritDoc} */
@ -155,7 +161,7 @@ public void execute(RevWalk walk, ProgressMonitor monitor,
refdb.inProcessPackedRefsLock.lock();
try {
PackedRefList oldPackedList;
if (!refdb.isInClone()) {
if (!refdb.isInClone() && shouldLockLooseRefs) {
locks = lockLooseRefs(pending);
if (locks == null) {
return;

View File

@ -586,6 +586,21 @@ public PackedBatchRefUpdate newBatchUpdate() {
return new PackedBatchRefUpdate(this);
}
/**
* Create a new batch update to attempt on this database.
*
* @param shouldLockLooseRefs
* whether loose refs should be locked during the batch ref
* update. Note that this should only be set to {@code false} if
* the application using this ensures that no other ref updates
* run concurrently to avoid lost updates caused by a race. In
* such cases it can improve performance.
* @return a new batch update object
*/
public PackedBatchRefUpdate newBatchUpdate(boolean shouldLockLooseRefs) {
return new PackedBatchRefUpdate(this, shouldLockLooseRefs);
}
/** {@inheritDoc} */
@Override
public boolean performsAtomicTransactions() {

View File

@ -537,6 +537,13 @@ public final class ConfigConstants {
*/
public static final String CONFIG_KEY_SUPPORTSATOMICFILECREATION = "supportsatomicfilecreation";
/**
* The "sha1Implementation" key in the "core" section
*
* @since 5.13.2
*/
public static final String SHA1_IMPLEMENTATION = "sha1implementation";
/**
* The "noprefix" key in the "diff" section
* @since 3.0

View File

@ -3,7 +3,8 @@
* Copyright (C) 2010-2012, Matthias Sohn <matthias.sohn@sap.com>
* Copyright (C) 2012, Research In Motion Limited
* Copyright (C) 2017, Obeo (mathieu.cartaud@obeo.fr)
* Copyright (C) 2018, 2022 Thomas Wolf <twolf@apache.org> and others
* Copyright (C) 2018, 2022 Thomas Wolf <twolf@apache.org>
* Copyright (C) 2022, Google Inc. and others
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Distribution License v. 1.0 which is available at
@ -20,16 +21,26 @@
import static org.eclipse.jgit.lib.ConfigConstants.CONFIG_KEY_ALGORITHM;
import static org.eclipse.jgit.lib.Constants.OBJ_BLOB;
import java.io.Closeable;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.time.Instant;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.TreeMap;
import org.eclipse.jgit.annotations.NonNull;
import org.eclipse.jgit.annotations.Nullable;
import org.eclipse.jgit.attributes.Attribute;
import org.eclipse.jgit.attributes.Attributes;
import org.eclipse.jgit.diff.DiffAlgorithm;
import org.eclipse.jgit.diff.DiffAlgorithm.SupportedAlgorithm;
@ -38,16 +49,24 @@
import org.eclipse.jgit.diff.Sequence;
import org.eclipse.jgit.dircache.DirCache;
import org.eclipse.jgit.dircache.DirCacheBuildIterator;
import org.eclipse.jgit.dircache.DirCacheBuilder;
import org.eclipse.jgit.dircache.DirCacheCheckout;
import org.eclipse.jgit.dircache.DirCacheCheckout.CheckoutMetadata;
import org.eclipse.jgit.dircache.DirCacheCheckout.StreamSupplier;
import org.eclipse.jgit.dircache.DirCacheEntry;
import org.eclipse.jgit.errors.BinaryBlobException;
import org.eclipse.jgit.errors.IndexWriteException;
import org.eclipse.jgit.errors.NoWorkTreeException;
import org.eclipse.jgit.internal.JGitText;
import org.eclipse.jgit.lib.Config;
import org.eclipse.jgit.lib.ConfigConstants;
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.CoreConfig.EolStreamType;
import org.eclipse.jgit.lib.FileMode;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.ObjectInserter;
import org.eclipse.jgit.lib.ObjectLoader;
import org.eclipse.jgit.lib.ObjectReader;
import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.revwalk.RevTree;
import org.eclipse.jgit.storage.pack.PackConfig;
@ -56,17 +75,609 @@
import org.eclipse.jgit.treewalk.CanonicalTreeParser;
import org.eclipse.jgit.treewalk.NameConflictTreeWalk;
import org.eclipse.jgit.treewalk.TreeWalk;
import org.eclipse.jgit.treewalk.TreeWalk.OperationType;
import org.eclipse.jgit.treewalk.WorkingTreeIterator;
import org.eclipse.jgit.treewalk.WorkingTreeOptions;
import org.eclipse.jgit.treewalk.filter.TreeFilter;
import org.eclipse.jgit.util.FS;
import org.eclipse.jgit.util.LfsFactory;
import org.eclipse.jgit.util.LfsFactory.LfsInputStream;
import org.eclipse.jgit.util.TemporaryBuffer;
import org.eclipse.jgit.util.io.EolStreamTypeUtil;
/**
* A three-way merger performing a content-merge if necessary
*/
public class ResolveMerger extends ThreeWayMerger {
/**
* Handles work tree updates on both the checkout and the index.
* <p>
* You should use a single instance for all of your file changes. In case of
* an error, make sure your instance is released, and initiate a new one if
* necessary.
*
* @since 6.3.1
*/
protected static class WorkTreeUpdater implements Closeable {
/**
* The result of writing the index changes.
*/
public static class Result {
private final List<String> modifiedFiles = new LinkedList<>();
private final List<String> failedToDelete = new LinkedList<>();
private ObjectId treeId = null;
/**
* @return Modified tree ID if any, or null otherwise.
*/
public ObjectId getTreeId() {
return treeId;
}
/**
* @return Files that couldn't be deleted.
*/
public List<String> getFailedToDelete() {
return failedToDelete;
}
/**
* @return Files modified during this operation.
*/
public List<String> getModifiedFiles() {
return modifiedFiles;
}
}
Result result = new Result();
/**
* The repository this handler operates on.
*/
@Nullable
private final Repository repo;
/**
* Set to true if this operation should work in-memory. The repo's
* dircache and workingtree are not touched by this method. Eventually
* needed files are created as temporary files and a new empty,
* in-memory dircache will be used instead the repo's one. Often used
* for bare repos where the repo doesn't even have a workingtree and
* dircache.
*/
private final boolean inCore;
private final ObjectInserter inserter;
private final ObjectReader reader;
private DirCache dirCache;
private boolean implicitDirCache = false;
/**
* Builder to update the dir cache during this operation.
*/
private DirCacheBuilder builder;
/**
* The {@link WorkingTreeOptions} are needed to determine line endings
* for affected files.
*/
private WorkingTreeOptions workingTreeOptions;
/**
* The size limit (bytes) which controls a file to be stored in
* {@code Heap} or {@code LocalFile} during the operation.
*/
private int inCoreFileSizeLimit;
/**
* If the operation has nothing to do for a file but check it out at the
* end of the operation, it can be added here.
*/
private final Map<String, DirCacheEntry> toBeCheckedOut = new HashMap<>();
/**
* Files in this list will be deleted from the local copy at the end of
* the operation.
*/
private final TreeMap<String, File> toBeDeleted = new TreeMap<>();
/**
* Keeps {@link CheckoutMetadata} for {@link #checkout()}.
*/
private Map<String, CheckoutMetadata> checkoutMetadataByPath;
/**
* Keeps {@link CheckoutMetadata} for {@link #revertModifiedFiles()}.
*/
private Map<String, CheckoutMetadata> cleanupMetadataByPath;
/**
* Whether the changes were successfully written.
*/
private boolean indexChangesWritten;
/**
* @param repo
* the {@link Repository}.
* @param dirCache
* if set, use the provided dir cache. Otherwise, use the
* default repository one
*/
private WorkTreeUpdater(Repository repo, DirCache dirCache) {
this.repo = repo;
this.dirCache = dirCache;
this.inCore = false;
this.inserter = repo.newObjectInserter();
this.reader = inserter.newReader();
Config config = repo.getConfig();
this.workingTreeOptions = config.get(WorkingTreeOptions.KEY);
this.inCoreFileSizeLimit = getInCoreFileSizeLimit(config);
this.checkoutMetadataByPath = new HashMap<>();
this.cleanupMetadataByPath = new HashMap<>();
}
/**
* Creates a new {@link WorkTreeUpdater} for the given repository.
*
* @param repo
* the {@link Repository}.
* @param dirCache
* if set, use the provided dir cache. Otherwise, use the
* default repository one
* @return the {@link WorkTreeUpdater}.
*/
public static WorkTreeUpdater createWorkTreeUpdater(Repository repo,
DirCache dirCache) {
return new WorkTreeUpdater(repo, dirCache);
}
/**
* @param repo
* the {@link Repository}.
* @param dirCache
* if set, use the provided dir cache. Otherwise, creates a
* new one
* @param oi
* to use for writing the modified objects with.
*/
private WorkTreeUpdater(Repository repo, DirCache dirCache,
ObjectInserter oi) {
this.repo = repo;
this.dirCache = dirCache;
this.inserter = oi;
this.inCore = true;
this.reader = oi.newReader();
if (repo != null) {
this.inCoreFileSizeLimit = getInCoreFileSizeLimit(
repo.getConfig());
}
}
/**
* Creates a new {@link WorkTreeUpdater} that works in memory only.
*
* @param repo
* the {@link Repository}.
* @param dirCache
* if set, use the provided dir cache. Otherwise, creates a
* new one
* @param oi
* to use for writing the modified objects with.
* @return the {@link WorkTreeUpdater}
*/
public static WorkTreeUpdater createInCoreWorkTreeUpdater(
Repository repo, DirCache dirCache, ObjectInserter oi) {
return new WorkTreeUpdater(repo, dirCache, oi);
}
private static int getInCoreFileSizeLimit(Config config) {
return config.getInt(ConfigConstants.CONFIG_MERGE_SECTION,
ConfigConstants.CONFIG_KEY_IN_CORE_LIMIT, 10 << 20);
}
/**
* Gets the size limit for in-core files in this config.
*
* @return the size
*/
public int getInCoreFileSizeLimit() {
return inCoreFileSizeLimit;
}
/**
* Gets dir cache for the repo. Locked if not inCore.
*
* @return the result dir cache
* @throws IOException
* is case the dir cache cannot be read
*/
public DirCache getLockedDirCache() throws IOException {
if (dirCache == null) {
implicitDirCache = true;
if (inCore) {
dirCache = DirCache.newInCore();
} else {
dirCache = nonNullRepo().lockDirCache();
}
}
if (builder == null) {
builder = dirCache.builder();
}
return dirCache;
}
/**
* Creates a {@link DirCacheBuildIterator} for the builder of this
* {@link WorkTreeUpdater}.
*
* @return the {@link DirCacheBuildIterator}
*/
public DirCacheBuildIterator createDirCacheBuildIterator() {
return new DirCacheBuildIterator(builder);
}
/**
* Writes the changes to the working tree (but not to the index).
*
* @param shouldCheckoutTheirs
* before committing the changes
* @throws IOException
* if any of the writes fail
*/
public void writeWorkTreeChanges(boolean shouldCheckoutTheirs)
throws IOException {
handleDeletedFiles();
if (inCore) {
builder.finish();
return;
}
if (shouldCheckoutTheirs) {
// No problem found. The only thing left to be done is to
// check out all files from "theirs" which have been selected to
// go into the new index.
checkout();
}
// All content operations are successfully done. If we can now write
// the
// new index we are on quite safe ground. Even if the checkout of
// files coming from "theirs" fails the user can work around such
// failures by checking out the index again.
if (!builder.commit()) {
revertModifiedFiles();
throw new IndexWriteException();
}
}
/**
* Writes the changes to the index.
*
* @return the {@link Result} of the operation.
* @throws IOException
* if any of the writes fail
*/
public Result writeIndexChanges() throws IOException {
result.treeId = getLockedDirCache().writeTree(inserter);
indexChangesWritten = true;
return result;
}
/**
* Adds a {@link DirCacheEntry} for direct checkout and remembers its
* {@link CheckoutMetadata}.
*
* @param path
* of the entry
* @param entry
* to add
* @param cleanupStreamType
* to use for the cleanup metadata
* @param cleanupSmudgeCommand
* to use for the cleanup metadata
* @param checkoutStreamType
* to use for the checkout metadata
* @param checkoutSmudgeCommand
* to use for the checkout metadata
*/
public void addToCheckout(String path, DirCacheEntry entry,
EolStreamType cleanupStreamType, String cleanupSmudgeCommand,
EolStreamType checkoutStreamType,
String checkoutSmudgeCommand) {
if (entry != null) {
// In some cases, we just want to add the metadata.
toBeCheckedOut.put(path, entry);
}
addCheckoutMetadata(cleanupMetadataByPath, path, cleanupStreamType,
cleanupSmudgeCommand);
addCheckoutMetadata(checkoutMetadataByPath, path,
checkoutStreamType, checkoutSmudgeCommand);
}
/**
* Gets a map which maps the paths of files which have to be checked out
* because the operation created new fully-merged content for this file
* into the index.
* <p>
* This means: the operation wrote a new stage 0 entry for this path.
* </p>
*
* @return the map
*/
public Map<String, DirCacheEntry> getToBeCheckedOut() {
return toBeCheckedOut;
}
/**
* Remembers the given file to be deleted.
* <p>
* Note the actual deletion is only done in
* {@link #writeWorkTreeChanges}.
*
* @param path
* of the file to be deleted
* @param file
* to be deleted
* @param streamType
* to use for cleanup metadata
* @param smudgeCommand
* to use for cleanup metadata
*/
public void deleteFile(String path, File file, EolStreamType streamType,
String smudgeCommand) {
toBeDeleted.put(path, file);
if (file != null && file.isFile()) {
addCheckoutMetadata(cleanupMetadataByPath, path, streamType,
smudgeCommand);
}
}
/**
* Remembers the {@link CheckoutMetadata} for the given path; it may be
* needed in {@link #checkout()} or in {@link #revertModifiedFiles()}.
*
* @param map
* to add the metadata to
* @param path
* of the current node
* @param streamType
* to use for the metadata
* @param smudgeCommand
* to use for the metadata
*/
private void addCheckoutMetadata(Map<String, CheckoutMetadata> map,
String path, EolStreamType streamType, String smudgeCommand) {
if (inCore || map == null) {
return;
}
map.put(path, new CheckoutMetadata(streamType, smudgeCommand));
}
/**
* Detects if CRLF conversion has been configured.
* <p>
* </p>
* See {@link EolStreamTypeUtil#detectStreamType} for more info.
*
* @param attributes
* of the file for which the type is to be detected
* @return the detected type
*/
public EolStreamType detectCheckoutStreamType(Attributes attributes) {
if (inCore) {
return null;
}
return EolStreamTypeUtil.detectStreamType(OperationType.CHECKOUT_OP,
workingTreeOptions, attributes);
}
private void handleDeletedFiles() {
// Iterate in reverse so that "folder/file" is deleted before
// "folder". Otherwise, this could result in a failing path because
// of a non-empty directory, for which delete() would fail.
for (String path : toBeDeleted.descendingKeySet()) {
File file = inCore ? null : toBeDeleted.get(path);
if (file != null && !file.delete()) {
if (!file.isDirectory()) {
result.failedToDelete.add(path);
}
}
}
}
/**
* Marks the given path as modified in the operation.
*
* @param path
* to mark as modified
*/
public void markAsModified(String path) {
result.modifiedFiles.add(path);
}
/**
* Gets the list of files which were modified in this operation.
*
* @return the list
*/
public List<String> getModifiedFiles() {
return result.modifiedFiles;
}
private void checkout() throws NoWorkTreeException, IOException {
for (Map.Entry<String, DirCacheEntry> entry : toBeCheckedOut
.entrySet()) {
DirCacheEntry dirCacheEntry = entry.getValue();
if (dirCacheEntry.getFileMode() == FileMode.GITLINK) {
new File(nonNullRepo().getWorkTree(), entry.getKey())
.mkdirs();
} else {
DirCacheCheckout.checkoutEntry(repo, dirCacheEntry, reader,
false, checkoutMetadataByPath.get(entry.getKey()),
workingTreeOptions);
result.modifiedFiles.add(entry.getKey());
}
}
}
/**
* Reverts any uncommitted changes in the worktree. We know that for all
* modified files the old content was in the old index and the index
* contained only stage 0. In case of inCore operation just clear the
* history of modified files.
*
* @throws IOException
* in case the cleaning up failed
*/
public void revertModifiedFiles() throws IOException {
if (inCore) {
result.modifiedFiles.clear();
return;
}
if (indexChangesWritten) {
return;
}
for (String path : result.modifiedFiles) {
DirCacheEntry entry = dirCache.getEntry(path);
if (entry != null) {
DirCacheCheckout.checkoutEntry(repo, entry, reader, false,
cleanupMetadataByPath.get(path),
workingTreeOptions);
}
}
}
@Override
public void close() throws IOException {
if (implicitDirCache) {
dirCache.unlock();
}
}
/**
* Updates the file in the checkout with the given content.
*
* @param inputStream
* the content to be updated
* @param streamType
* for parsing the content
* @param smudgeCommand
* for formatting the content
* @param path
* of the file to be updated
* @param file
* to be updated
* @throws IOException
* if the file cannot be updated
*/
public void updateFileWithContent(StreamSupplier inputStream,
EolStreamType streamType, String smudgeCommand, String path,
File file) throws IOException {
if (inCore) {
return;
}
CheckoutMetadata metadata = new CheckoutMetadata(streamType,
smudgeCommand);
try (OutputStream outputStream = new FileOutputStream(file)) {
DirCacheCheckout.getContent(repo, path, metadata, inputStream,
workingTreeOptions, outputStream);
}
}
/**
* Creates a path with the given content, and adds it to the specified
* stage to the index builder.
*
* @param input
* the content to be updated
* @param path
* of the file to be updated
* @param fileMode
* of the modified file
* @param entryStage
* of the new entry
* @param lastModified
* instant of the modified file
* @param len
* of the content
* @param lfsAttribute
* for checking for LFS enablement
* @return the entry which was added to the index
* @throws IOException
* if inserting the content fails
*/
public DirCacheEntry insertToIndex(InputStream input, byte[] path,
FileMode fileMode, int entryStage, Instant lastModified,
int len, Attribute lfsAttribute) throws IOException {
return addExistingToIndex(insertResult(input, lfsAttribute, len),
path, fileMode, entryStage, lastModified, len);
}
/**
* Adds a path with the specified stage to the index builder.
*
* @param objectId
* of the existing object to add
* @param path
* of the modified file
* @param fileMode
* of the modified file
* @param entryStage
* of the new entry
* @param lastModified
* instant of the modified file
* @param len
* of the modified file content
* @return the entry which was added to the index
*/
public DirCacheEntry addExistingToIndex(ObjectId objectId, byte[] path,
FileMode fileMode, int entryStage, Instant lastModified,
int len) {
DirCacheEntry dce = new DirCacheEntry(path, entryStage);
dce.setFileMode(fileMode);
if (lastModified != null) {
dce.setLastModified(lastModified);
}
dce.setLength(inCore ? 0 : len);
dce.setObjectId(objectId);
builder.add(dce);
return dce;
}
private ObjectId insertResult(InputStream input, Attribute lfsAttribute,
long length) throws IOException {
try (LfsInputStream is = LfsFactory.getInstance()
.applyCleanFilter(repo, input, length, lfsAttribute)) {
return inserter.insert(OBJ_BLOB, is.getLength(), is);
}
}
/**
* Gets the non-null repository instance of this
* {@link WorkTreeUpdater}.
*
* @return non-null repository instance
* @throws NullPointerException
* if the handler was constructed without a repository.
*/
@NonNull
private Repository nonNullRepo() throws NullPointerException {
return Objects.requireNonNull(repo,
() -> JGitText.get().repositoryIsRequired);
}
}
/**
* If the merge fails (means: not stopped because of unresolved conflicts)
* this enum is used to explain why it failed

View File

@ -1,635 +0,0 @@
/*
* Copyright (C) 2022, Google Inc. and others
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Distribution License v. 1.0 which is available at
* https://www.eclipse.org/org/documents/edl-v10.php.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
package org.eclipse.jgit.merge;
import static org.eclipse.jgit.lib.Constants.OBJ_BLOB;
import java.io.Closeable;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.time.Instant;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.TreeMap;
import org.eclipse.jgit.annotations.NonNull;
import org.eclipse.jgit.annotations.Nullable;
import org.eclipse.jgit.attributes.Attribute;
import org.eclipse.jgit.attributes.Attributes;
import org.eclipse.jgit.dircache.DirCache;
import org.eclipse.jgit.dircache.DirCacheBuildIterator;
import org.eclipse.jgit.dircache.DirCacheBuilder;
import org.eclipse.jgit.dircache.DirCacheCheckout;
import org.eclipse.jgit.dircache.DirCacheCheckout.StreamSupplier;
import org.eclipse.jgit.dircache.DirCacheCheckout.CheckoutMetadata;
import org.eclipse.jgit.dircache.DirCacheEntry;
import org.eclipse.jgit.errors.IndexWriteException;
import org.eclipse.jgit.errors.NoWorkTreeException;
import org.eclipse.jgit.internal.JGitText;
import org.eclipse.jgit.lib.Config;
import org.eclipse.jgit.lib.ConfigConstants;
import org.eclipse.jgit.lib.CoreConfig.EolStreamType;
import org.eclipse.jgit.lib.FileMode;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.ObjectInserter;
import org.eclipse.jgit.lib.ObjectReader;
import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.treewalk.TreeWalk.OperationType;
import org.eclipse.jgit.treewalk.WorkingTreeOptions;
import org.eclipse.jgit.util.LfsFactory;
import org.eclipse.jgit.util.LfsFactory.LfsInputStream;
import org.eclipse.jgit.util.io.EolStreamTypeUtil;
/**
* Handles work tree updates on both the checkout and the index.
* <p>
* You should use a single instance for all of your file changes. In case of an
* error, make sure your instance is released, and initiate a new one if
* necessary.
*/
class WorkTreeUpdater implements Closeable {
/**
* The result of writing the index changes.
*/
public static class Result {
private final List<String> modifiedFiles = new LinkedList<>();
private final List<String> failedToDelete = new LinkedList<>();
private ObjectId treeId = null;
/**
* @return Modified tree ID if any, or null otherwise.
*/
public ObjectId getTreeId() {
return treeId;
}
/**
* @return Files that couldn't be deleted.
*/
public List<String> getFailedToDelete() {
return failedToDelete;
}
/**
* @return Files modified during this operation.
*/
public List<String> getModifiedFiles() {
return modifiedFiles;
}
}
Result result = new Result();
/**
* The repository this handler operates on.
*/
@Nullable
private final Repository repo;
/**
* Set to true if this operation should work in-memory. The repo's dircache
* and workingtree are not touched by this method. Eventually needed files
* are created as temporary files and a new empty, in-memory dircache will
* be used instead the repo's one. Often used for bare repos where the repo
* doesn't even have a workingtree and dircache.
*/
private final boolean inCore;
private final ObjectInserter inserter;
private final ObjectReader reader;
private DirCache dirCache;
private boolean implicitDirCache = false;
/**
* Builder to update the dir cache during this operation.
*/
private DirCacheBuilder builder;
/**
* The {@link WorkingTreeOptions} are needed to determine line endings for
* affected files.
*/
private WorkingTreeOptions workingTreeOptions;
/**
* The size limit (bytes) which controls a file to be stored in {@code Heap}
* or {@code LocalFile} during the operation.
*/
private int inCoreFileSizeLimit;
/**
* If the operation has nothing to do for a file but check it out at the end
* of the operation, it can be added here.
*/
private final Map<String, DirCacheEntry> toBeCheckedOut = new HashMap<>();
/**
* Files in this list will be deleted from the local copy at the end of the
* operation.
*/
private final TreeMap<String, File> toBeDeleted = new TreeMap<>();
/**
* Keeps {@link CheckoutMetadata} for {@link #checkout()}.
*/
private Map<String, CheckoutMetadata> checkoutMetadataByPath;
/**
* Keeps {@link CheckoutMetadata} for {@link #revertModifiedFiles()}.
*/
private Map<String, CheckoutMetadata> cleanupMetadataByPath;
/**
* Whether the changes were successfully written.
*/
private boolean indexChangesWritten;
/**
* @param repo
* the {@link Repository}.
* @param dirCache
* if set, use the provided dir cache. Otherwise, use the default
* repository one
*/
private WorkTreeUpdater(Repository repo, DirCache dirCache) {
this.repo = repo;
this.dirCache = dirCache;
this.inCore = false;
this.inserter = repo.newObjectInserter();
this.reader = inserter.newReader();
Config config = repo.getConfig();
this.workingTreeOptions = config.get(WorkingTreeOptions.KEY);
this.inCoreFileSizeLimit = getInCoreFileSizeLimit(config);
this.checkoutMetadataByPath = new HashMap<>();
this.cleanupMetadataByPath = new HashMap<>();
}
/**
* Creates a new {@link WorkTreeUpdater} for the given repository.
*
* @param repo
* the {@link Repository}.
* @param dirCache
* if set, use the provided dir cache. Otherwise, use the default
* repository one
* @return the {@link WorkTreeUpdater}.
*/
public static WorkTreeUpdater createWorkTreeUpdater(Repository repo,
DirCache dirCache) {
return new WorkTreeUpdater(repo, dirCache);
}
/**
* @param repo
* the {@link Repository}.
* @param dirCache
* if set, use the provided dir cache. Otherwise, creates a new
* one
* @param oi
* to use for writing the modified objects with.
*/
private WorkTreeUpdater(Repository repo, DirCache dirCache,
ObjectInserter oi) {
this.repo = repo;
this.dirCache = dirCache;
this.inserter = oi;
this.inCore = true;
this.reader = oi.newReader();
if (repo != null) {
this.inCoreFileSizeLimit = getInCoreFileSizeLimit(repo.getConfig());
}
}
/**
* Creates a new {@link WorkTreeUpdater} that works in memory only.
*
* @param repo
* the {@link Repository}.
* @param dirCache
* if set, use the provided dir cache. Otherwise, creates a new
* one
* @param oi
* to use for writing the modified objects with.
* @return the {@link WorkTreeUpdater}
*/
public static WorkTreeUpdater createInCoreWorkTreeUpdater(Repository repo,
DirCache dirCache, ObjectInserter oi) {
return new WorkTreeUpdater(repo, dirCache, oi);
}
private static int getInCoreFileSizeLimit(Config config) {
return config.getInt(ConfigConstants.CONFIG_MERGE_SECTION,
ConfigConstants.CONFIG_KEY_IN_CORE_LIMIT, 10 << 20);
}
/**
* Gets the size limit for in-core files in this config.
*
* @return the size
*/
public int getInCoreFileSizeLimit() {
return inCoreFileSizeLimit;
}
/**
* Gets dir cache for the repo. Locked if not inCore.
*
* @return the result dir cache
* @throws IOException
* is case the dir cache cannot be read
*/
public DirCache getLockedDirCache() throws IOException {
if (dirCache == null) {
implicitDirCache = true;
if (inCore) {
dirCache = DirCache.newInCore();
} else {
dirCache = nonNullRepo().lockDirCache();
}
}
if (builder == null) {
builder = dirCache.builder();
}
return dirCache;
}
/**
* Creates a {@link DirCacheBuildIterator} for the builder of this
* {@link WorkTreeUpdater}.
*
* @return the {@link DirCacheBuildIterator}
*/
public DirCacheBuildIterator createDirCacheBuildIterator() {
return new DirCacheBuildIterator(builder);
}
/**
* Writes the changes to the working tree (but not to the index).
*
* @param shouldCheckoutTheirs
* before committing the changes
* @throws IOException
* if any of the writes fail
*/
public void writeWorkTreeChanges(boolean shouldCheckoutTheirs)
throws IOException {
handleDeletedFiles();
if (inCore) {
builder.finish();
return;
}
if (shouldCheckoutTheirs) {
// No problem found. The only thing left to be done is to
// check out all files from "theirs" which have been selected to
// go into the new index.
checkout();
}
// All content operations are successfully done. If we can now write the
// new index we are on quite safe ground. Even if the checkout of
// files coming from "theirs" fails the user can work around such
// failures by checking out the index again.
if (!builder.commit()) {
revertModifiedFiles();
throw new IndexWriteException();
}
}
/**
* Writes the changes to the index.
*
* @return the {@link Result} of the operation.
* @throws IOException
* if any of the writes fail
*/
public Result writeIndexChanges() throws IOException {
result.treeId = getLockedDirCache().writeTree(inserter);
indexChangesWritten = true;
return result;
}
/**
* Adds a {@link DirCacheEntry} for direct checkout and remembers its
* {@link CheckoutMetadata}.
*
* @param path
* of the entry
* @param entry
* to add
* @param cleanupStreamType
* to use for the cleanup metadata
* @param cleanupSmudgeCommand
* to use for the cleanup metadata
* @param checkoutStreamType
* to use for the checkout metadata
* @param checkoutSmudgeCommand
* to use for the checkout metadata
*/
public void addToCheckout(String path, DirCacheEntry entry,
EolStreamType cleanupStreamType, String cleanupSmudgeCommand,
EolStreamType checkoutStreamType, String checkoutSmudgeCommand) {
if (entry != null) {
// In some cases, we just want to add the metadata.
toBeCheckedOut.put(path, entry);
}
addCheckoutMetadata(cleanupMetadataByPath, path, cleanupStreamType,
cleanupSmudgeCommand);
addCheckoutMetadata(checkoutMetadataByPath, path, checkoutStreamType,
checkoutSmudgeCommand);
}
/**
* Gets a map which maps the paths of files which have to be checked out
* because the operation created new fully-merged content for this file into
* the index.
* <p>
* This means: the operation wrote a new stage 0 entry for this path.
* </p>
*
* @return the map
*/
public Map<String, DirCacheEntry> getToBeCheckedOut() {
return toBeCheckedOut;
}
/**
* Remembers the given file to be deleted.
* <p>
* Note the actual deletion is only done in {@link #writeWorkTreeChanges}.
*
* @param path
* of the file to be deleted
* @param file
* to be deleted
* @param streamType
* to use for cleanup metadata
* @param smudgeCommand
* to use for cleanup metadata
*/
public void deleteFile(String path, File file, EolStreamType streamType,
String smudgeCommand) {
toBeDeleted.put(path, file);
if (file != null && file.isFile()) {
addCheckoutMetadata(cleanupMetadataByPath, path, streamType,
smudgeCommand);
}
}
/**
* Remembers the {@link CheckoutMetadata} for the given path; it may be
* needed in {@link #checkout()} or in {@link #revertModifiedFiles()}.
*
* @param map
* to add the metadata to
* @param path
* of the current node
* @param streamType
* to use for the metadata
* @param smudgeCommand
* to use for the metadata
*/
private void addCheckoutMetadata(Map<String, CheckoutMetadata> map,
String path, EolStreamType streamType, String smudgeCommand) {
if (inCore || map == null) {
return;
}
map.put(path, new CheckoutMetadata(streamType, smudgeCommand));
}
/**
* Detects if CRLF conversion has been configured.
* <p>
* </p>
* See {@link EolStreamTypeUtil#detectStreamType} for more info.
*
* @param attributes
* of the file for which the type is to be detected
* @return the detected type
*/
public EolStreamType detectCheckoutStreamType(Attributes attributes) {
if (inCore) {
return null;
}
return EolStreamTypeUtil.detectStreamType(OperationType.CHECKOUT_OP,
workingTreeOptions, attributes);
}
private void handleDeletedFiles() {
// Iterate in reverse so that "folder/file" is deleted before
// "folder". Otherwise, this could result in a failing path because
// of a non-empty directory, for which delete() would fail.
for (String path : toBeDeleted.descendingKeySet()) {
File file = inCore ? null : toBeDeleted.get(path);
if (file != null && !file.delete()) {
if (!file.isDirectory()) {
result.failedToDelete.add(path);
}
}
}
}
/**
* Marks the given path as modified in the operation.
*
* @param path
* to mark as modified
*/
public void markAsModified(String path) {
result.modifiedFiles.add(path);
}
/**
* Gets the list of files which were modified in this operation.
*
* @return the list
*/
public List<String> getModifiedFiles() {
return result.modifiedFiles;
}
private void checkout() throws NoWorkTreeException, IOException {
for (Map.Entry<String, DirCacheEntry> entry : toBeCheckedOut
.entrySet()) {
DirCacheEntry dirCacheEntry = entry.getValue();
if (dirCacheEntry.getFileMode() == FileMode.GITLINK) {
new File(nonNullRepo().getWorkTree(), entry.getKey())
.mkdirs();
} else {
DirCacheCheckout.checkoutEntry(repo, dirCacheEntry, reader,
false, checkoutMetadataByPath.get(entry.getKey()),
workingTreeOptions);
result.modifiedFiles.add(entry.getKey());
}
}
}
/**
* Reverts any uncommitted changes in the worktree. We know that for all
* modified files the old content was in the old index and the index
* contained only stage 0. In case of inCore operation just clear the
* history of modified files.
*
* @throws IOException
* in case the cleaning up failed
*/
public void revertModifiedFiles() throws IOException {
if (inCore) {
result.modifiedFiles.clear();
return;
}
if (indexChangesWritten) {
return;
}
for (String path : result.modifiedFiles) {
DirCacheEntry entry = dirCache.getEntry(path);
if (entry != null) {
DirCacheCheckout.checkoutEntry(repo, entry, reader, false,
cleanupMetadataByPath.get(path), workingTreeOptions);
}
}
}
@Override
public void close() throws IOException {
if (implicitDirCache) {
dirCache.unlock();
}
}
/**
* Updates the file in the checkout with the given content.
*
* @param inputStream
* the content to be updated
* @param streamType
* for parsing the content
* @param smudgeCommand
* for formatting the content
* @param path
* of the file to be updated
* @param file
* to be updated
* @throws IOException
* if the file cannot be updated
*/
public void updateFileWithContent(StreamSupplier inputStream,
EolStreamType streamType, String smudgeCommand, String path,
File file) throws IOException {
if (inCore) {
return;
}
CheckoutMetadata metadata = new CheckoutMetadata(streamType,
smudgeCommand);
try (OutputStream outputStream = new FileOutputStream(file)) {
DirCacheCheckout.getContent(repo, path, metadata,
inputStream, workingTreeOptions, outputStream);
}
}
/**
* Creates a path with the given content, and adds it to the specified stage
* to the index builder.
*
* @param input
* the content to be updated
* @param path
* of the file to be updated
* @param fileMode
* of the modified file
* @param entryStage
* of the new entry
* @param lastModified
* instant of the modified file
* @param len
* of the content
* @param lfsAttribute
* for checking for LFS enablement
* @return the entry which was added to the index
* @throws IOException
* if inserting the content fails
*/
public DirCacheEntry insertToIndex(InputStream input,
byte[] path, FileMode fileMode, int entryStage,
Instant lastModified, int len, Attribute lfsAttribute)
throws IOException {
return addExistingToIndex(insertResult(input, lfsAttribute, len), path,
fileMode, entryStage, lastModified, len);
}
/**
* Adds a path with the specified stage to the index builder.
*
* @param objectId
* of the existing object to add
* @param path
* of the modified file
* @param fileMode
* of the modified file
* @param entryStage
* of the new entry
* @param lastModified
* instant of the modified file
* @param len
* of the modified file content
* @return the entry which was added to the index
*/
public DirCacheEntry addExistingToIndex(ObjectId objectId, byte[] path,
FileMode fileMode, int entryStage, Instant lastModified, int len) {
DirCacheEntry dce = new DirCacheEntry(path, entryStage);
dce.setFileMode(fileMode);
if (lastModified != null) {
dce.setLastModified(lastModified);
}
dce.setLength(inCore ? 0 : len);
dce.setObjectId(objectId);
builder.add(dce);
return dce;
}
private ObjectId insertResult(InputStream input,
Attribute lfsAttribute, long length) throws IOException {
try (LfsInputStream is = LfsFactory.getInstance().applyCleanFilter(repo,
input, length,
lfsAttribute)) {
return inserter.insert(OBJ_BLOB, is.getLength(), is);
}
}
/**
* Gets the non-null repository instance of this {@link WorkTreeUpdater}.
*
* @return non-null repository instance
* @throws NullPointerException
* if the handler was constructed without a repository.
*/
@NonNull
private Repository nonNullRepo() throws NullPointerException {
return Objects.requireNonNull(repo,
() -> JGitText.get().repositoryIsRequired);
}
}

View File

@ -299,14 +299,19 @@ public static final class FileStoreAttributes {
static {
// Shut down the SAVE_RUNNER on System.exit()
Runtime.getRuntime().addShutdownHook(new Thread(() -> {
try {
SAVE_RUNNER.shutdownNow();
SAVE_RUNNER.awaitTermination(100, TimeUnit.MILLISECONDS);
} catch (Exception e) {
// Ignore; we're shutting down
}
}));
try {
Runtime.getRuntime().addShutdownHook(new Thread(() -> {
try {
SAVE_RUNNER.shutdownNow();
SAVE_RUNNER.awaitTermination(100,
TimeUnit.MILLISECONDS);
} catch (Exception e) {
// Ignore; we're shutting down
}
}));
} catch (IllegalStateException e) {
// ignore - may fail if shutdown is already in progress
}
}
/**

View File

@ -1,5 +1,5 @@
/*
* Copyright (C) 2017, Google Inc. and others
* Copyright (C) 2022, Matthias Sohn <matthias.sohn@sap.com> and others
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Distribution License v. 1.0 which is available at
@ -7,99 +7,96 @@
*
* SPDX-License-Identifier: BSD-3-Clause
*/
package org.eclipse.jgit.util.sha1;
import static java.lang.Integer.lowestOneBit;
import static java.lang.Integer.numberOfTrailingZeros;
import static java.lang.Integer.rotateLeft;
import static java.lang.Integer.rotateRight;
import java.io.IOException;
import java.security.MessageDigest;
import java.text.MessageFormat;
import java.util.Arrays;
import org.eclipse.jgit.internal.JGitText;
import org.eclipse.jgit.errors.ConfigInvalidException;
import org.eclipse.jgit.lib.ConfigConstants;
import org.eclipse.jgit.lib.MutableObjectId;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.util.NB;
import org.eclipse.jgit.util.SystemReader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Pure Java implementation of SHA-1 from FIPS 180-1 / RFC 3174.
*
* SHA-1 interface from FIPS 180-1 / RFC 3174 with optional collision detection.
* Some implementations may not support collision detection.
* <p>
* See <a href="https://tools.ietf.org/html/rfc3174">RFC 3174</a>.
* <p>
* Unlike MessageDigest, this implementation includes the algorithm used by
* {@code sha1dc} to detect cryptanalytic collision attacks against SHA-1, such
* as the one used by <a href="https://shattered.it/">SHAttered</a>. See
* <a href="https://github.com/cr-marcstevens/sha1collisiondetection">
* sha1collisiondetection</a> for more information.
* <p>
* When detectCollision is true (default), this implementation throws
* {@link org.eclipse.jgit.util.sha1.Sha1CollisionException} from any digest
* method if a potential collision was detected.
*
* @since 4.7
*/
public class SHA1 {
private static final Logger LOG = LoggerFactory.getLogger(SHA1.class);
private static final boolean DETECT_COLLISIONS;
public abstract class SHA1 {
/**
* SHA1 implementations available in JGit
*/
public enum Sha1Implementation {
/**
* {@link SHA1Java} implemented in Java, supports collision detection.
*/
JAVA(SHA1Java.class),
/**
* Native implementation based on JDK's {@link MessageDigest}.
*/
JDKNATIVE(SHA1Native.class);
static {
SystemReader sr = SystemReader.getInstance();
String v = sr.getProperty("org.eclipse.jgit.util.sha1.detectCollision"); //$NON-NLS-1$
DETECT_COLLISIONS = v != null ? Boolean.parseBoolean(v) : true;
private final String implClassName;
private Sha1Implementation(Class implClass) {
this.implClassName = implClass.getName();
}
@Override
public String toString() {
return implClassName;
}
}
private static final Sha1Implementation SHA1_IMPLEMENTATION = fromConfig();
private static Sha1Implementation fromConfig() {
try {
return SystemReader.getInstance().getUserConfig().getEnum(
ConfigConstants.CONFIG_CORE_SECTION, null,
ConfigConstants.SHA1_IMPLEMENTATION,
Sha1Implementation.JAVA);
} catch (ConfigInvalidException | IOException e) {
return Sha1Implementation.JAVA;
}
}
private static Sha1Implementation getImplementation() {
String fromSystemProperty = System
.getProperty("org.eclipse.jgit.util.sha1.implementation"); //$NON-NLS-1$
if (fromSystemProperty == null) {
return SHA1_IMPLEMENTATION;
}
if (fromSystemProperty
.equalsIgnoreCase(Sha1Implementation.JAVA.name())) {
return Sha1Implementation.JAVA;
}
if (fromSystemProperty
.equalsIgnoreCase(Sha1Implementation.JDKNATIVE.name())) {
return Sha1Implementation.JDKNATIVE;
}
return SHA1_IMPLEMENTATION;
}
/**
* Create a new context to compute a SHA-1 hash of data.
* <p>
* If {@code core.sha1Implementation = jdkNative} in the user level global
* git configuration or the system property
* {@code org.eclipse.jgit.util.sha1.implementation = jdkNative} it will
* create an object using the implementation in the JDK. If both are set the
* system property takes precedence. Otherwise the pure Java implementation
* will be used which supports collision detection but is slower.
*
* @return a new context to compute a SHA-1 hash of data.
*/
public static SHA1 newInstance() {
return new SHA1();
}
private final State h = new State();
private final int[] w = new int[80];
/** Buffer to accumulate partial blocks to 64 byte alignment. */
private final byte[] buffer = new byte[64];
/** Total number of bytes in the message. */
private long length;
private boolean detectCollision = DETECT_COLLISIONS;
private boolean foundCollision;
private final int[] w2 = new int[80];
private final State state58 = new State();
private final State state65 = new State();
private final State hIn = new State();
private final State hTmp = new State();
private SHA1() {
h.init();
}
/**
* Enable likely collision detection.
* <p>
* Default is {@code true}.
* <p>
* May also be set by system property:
* {@code -Dorg.eclipse.jgit.util.sha1.detectCollision=true}.
*
* @param detect
* a boolean.
* @return {@code this}
*/
public SHA1 setDetectCollision(boolean detect) {
detectCollision = detect;
return this;
if (getImplementation() == Sha1Implementation.JDKNATIVE) {
return new SHA1Native();
}
return new SHA1Java();
}
/**
@ -107,14 +104,7 @@ public SHA1 setDetectCollision(boolean detect) {
*
* @param b a byte.
*/
public void update(byte b) {
int bufferLen = (int) (length & 63);
length++;
buffer[bufferLen] = b;
if (bufferLen == 63) {
compress(buffer, 0);
}
}
public abstract void update(byte b);
/**
* Update the digest computation by adding bytes to the message.
@ -122,9 +112,7 @@ public void update(byte b) {
* @param in
* input array of bytes.
*/
public void update(byte[] in) {
update(in, 0, in.length);
}
public abstract void update(byte[] in);
/**
* Update the digest computation by adding bytes to the message.
@ -136,344 +124,7 @@ public void update(byte[] in) {
* @param len
* number of bytes to hash.
*/
public void update(byte[] in, int p, int len) {
// SHA-1 compress can only process whole 64 byte blocks.
// Hold partial updates in buffer, whose length is the low bits.
int bufferLen = (int) (length & 63);
length += len;
if (bufferLen > 0) {
int n = Math.min(64 - bufferLen, len);
System.arraycopy(in, p, buffer, bufferLen, n);
p += n;
len -= n;
if (bufferLen + n < 64) {
return;
}
compress(buffer, 0);
}
while (len >= 64) {
compress(in, p);
p += 64;
len -= 64;
}
if (len > 0) {
System.arraycopy(in, p, buffer, 0, len);
}
}
private void compress(byte[] block, int p) {
initBlock(block, p);
int ubcDvMask = detectCollision ? UbcCheck.check(w) : 0;
compress();
while (ubcDvMask != 0) {
int b = numberOfTrailingZeros(lowestOneBit(ubcDvMask));
UbcCheck.DvInfo dv = UbcCheck.DV[b];
for (int i = 0; i < 80; i++) {
w2[i] = w[i] ^ dv.dm[i];
}
recompress(dv.testt);
if (eq(hTmp, h)) {
foundCollision = true;
break;
}
ubcDvMask &= ~(1 << b);
}
}
private void initBlock(byte[] block, int p) {
for (int t = 0; t < 16; t++) {
w[t] = NB.decodeInt32(block, p + (t << 2));
}
// RFC 3174 6.1.b, extend state vector to 80 words.
for (int t = 16; t < 80; t++) {
int x = w[t - 3] ^ w[t - 8] ^ w[t - 14] ^ w[t - 16];
w[t] = rotateLeft(x, 1); // S^1(...)
}
}
private void compress() {
// Method 1 from RFC 3174 section 6.1.
// Method 2 (circular queue of 16 words) is slower.
int a = h.a, b = h.b, c = h.c, d = h.d, e = h.e;
// @formatter:off
e += s1(a, b, c, d,w[ 0]); b = rotateLeft( b, 30);
d += s1(e, a, b, c,w[ 1]); a = rotateLeft( a, 30);
c += s1(d, e, a, b,w[ 2]); e = rotateLeft( e, 30);
b += s1(c, d, e, a,w[ 3]); d = rotateLeft( d, 30);
a += s1(b, c, d, e,w[ 4]); c = rotateLeft( c, 30);
e += s1(a, b, c, d,w[ 5]); b = rotateLeft( b, 30);
d += s1(e, a, b, c,w[ 6]); a = rotateLeft( a, 30);
c += s1(d, e, a, b,w[ 7]); e = rotateLeft( e, 30);
b += s1(c, d, e, a,w[ 8]); d = rotateLeft( d, 30);
a += s1(b, c, d, e,w[ 9]); c = rotateLeft( c, 30);
e += s1(a, b, c, d,w[ 10]); b = rotateLeft( b, 30);
d += s1(e, a, b, c,w[ 11]); a = rotateLeft( a, 30);
c += s1(d, e, a, b,w[ 12]); e = rotateLeft( e, 30);
b += s1(c, d, e, a,w[ 13]); d = rotateLeft( d, 30);
a += s1(b, c, d, e,w[ 14]); c = rotateLeft( c, 30);
e += s1(a, b, c, d,w[ 15]); b = rotateLeft( b, 30);
d += s1(e, a, b, c,w[ 16]); a = rotateLeft( a, 30);
c += s1(d, e, a, b,w[ 17]); e = rotateLeft( e, 30);
b += s1(c, d, e, a,w[ 18]); d = rotateLeft( d, 30);
a += s1(b, c, d, e,w[ 19]); c = rotateLeft( c, 30);
e += s2(a, b, c, d,w[ 20]); b = rotateLeft( b, 30);
d += s2(e, a, b, c,w[ 21]); a = rotateLeft( a, 30);
c += s2(d, e, a, b,w[ 22]); e = rotateLeft( e, 30);
b += s2(c, d, e, a,w[ 23]); d = rotateLeft( d, 30);
a += s2(b, c, d, e,w[ 24]); c = rotateLeft( c, 30);
e += s2(a, b, c, d,w[ 25]); b = rotateLeft( b, 30);
d += s2(e, a, b, c,w[ 26]); a = rotateLeft( a, 30);
c += s2(d, e, a, b,w[ 27]); e = rotateLeft( e, 30);
b += s2(c, d, e, a,w[ 28]); d = rotateLeft( d, 30);
a += s2(b, c, d, e,w[ 29]); c = rotateLeft( c, 30);
e += s2(a, b, c, d,w[ 30]); b = rotateLeft( b, 30);
d += s2(e, a, b, c,w[ 31]); a = rotateLeft( a, 30);
c += s2(d, e, a, b,w[ 32]); e = rotateLeft( e, 30);
b += s2(c, d, e, a,w[ 33]); d = rotateLeft( d, 30);
a += s2(b, c, d, e,w[ 34]); c = rotateLeft( c, 30);
e += s2(a, b, c, d,w[ 35]); b = rotateLeft( b, 30);
d += s2(e, a, b, c,w[ 36]); a = rotateLeft( a, 30);
c += s2(d, e, a, b,w[ 37]); e = rotateLeft( e, 30);
b += s2(c, d, e, a,w[ 38]); d = rotateLeft( d, 30);
a += s2(b, c, d, e,w[ 39]); c = rotateLeft( c, 30);
e += s3(a, b, c, d,w[ 40]); b = rotateLeft( b, 30);
d += s3(e, a, b, c,w[ 41]); a = rotateLeft( a, 30);
c += s3(d, e, a, b,w[ 42]); e = rotateLeft( e, 30);
b += s3(c, d, e, a,w[ 43]); d = rotateLeft( d, 30);
a += s3(b, c, d, e,w[ 44]); c = rotateLeft( c, 30);
e += s3(a, b, c, d,w[ 45]); b = rotateLeft( b, 30);
d += s3(e, a, b, c,w[ 46]); a = rotateLeft( a, 30);
c += s3(d, e, a, b,w[ 47]); e = rotateLeft( e, 30);
b += s3(c, d, e, a,w[ 48]); d = rotateLeft( d, 30);
a += s3(b, c, d, e,w[ 49]); c = rotateLeft( c, 30);
e += s3(a, b, c, d,w[ 50]); b = rotateLeft( b, 30);
d += s3(e, a, b, c,w[ 51]); a = rotateLeft( a, 30);
c += s3(d, e, a, b,w[ 52]); e = rotateLeft( e, 30);
b += s3(c, d, e, a,w[ 53]); d = rotateLeft( d, 30);
a += s3(b, c, d, e,w[ 54]); c = rotateLeft( c, 30);
e += s3(a, b, c, d,w[ 55]); b = rotateLeft( b, 30);
d += s3(e, a, b, c,w[ 56]); a = rotateLeft( a, 30);
c += s3(d, e, a, b,w[ 57]); e = rotateLeft( e, 30);
state58.save(a, b, c, d, e);
b += s3(c, d, e, a,w[ 58]); d = rotateLeft( d, 30);
a += s3(b, c, d, e,w[ 59]); c = rotateLeft( c, 30);
e += s4(a, b, c, d,w[ 60]); b = rotateLeft( b, 30);
d += s4(e, a, b, c,w[ 61]); a = rotateLeft( a, 30);
c += s4(d, e, a, b,w[ 62]); e = rotateLeft( e, 30);
b += s4(c, d, e, a,w[ 63]); d = rotateLeft( d, 30);
a += s4(b, c, d, e,w[ 64]); c = rotateLeft( c, 30);
state65.save(a, b, c, d, e);
e += s4(a, b, c, d,w[ 65]); b = rotateLeft( b, 30);
d += s4(e, a, b, c,w[ 66]); a = rotateLeft( a, 30);
c += s4(d, e, a, b,w[ 67]); e = rotateLeft( e, 30);
b += s4(c, d, e, a,w[ 68]); d = rotateLeft( d, 30);
a += s4(b, c, d, e,w[ 69]); c = rotateLeft( c, 30);
e += s4(a, b, c, d,w[ 70]); b = rotateLeft( b, 30);
d += s4(e, a, b, c,w[ 71]); a = rotateLeft( a, 30);
c += s4(d, e, a, b,w[ 72]); e = rotateLeft( e, 30);
b += s4(c, d, e, a,w[ 73]); d = rotateLeft( d, 30);
a += s4(b, c, d, e,w[ 74]); c = rotateLeft( c, 30);
e += s4(a, b, c, d,w[ 75]); b = rotateLeft( b, 30);
d += s4(e, a, b, c,w[ 76]); a = rotateLeft( a, 30);
c += s4(d, e, a, b,w[ 77]); e = rotateLeft( e, 30);
b += s4(c, d, e, a,w[ 78]); d = rotateLeft( d, 30);
a += s4(b, c, d, e,w[ 79]); c = rotateLeft( c, 30);
// @formatter:on
h.save(h.a + a, h.b + b, h.c + c, h.d + d, h.e + e);
}
private void recompress(int t) {
State s;
switch (t) {
case 58:
s = state58;
break;
case 65:
s = state65;
break;
default:
throw new IllegalStateException();
}
int a = s.a, b = s.b, c = s.c, d = s.d, e = s.e;
// @formatter:off
if (t == 65) {
{ c = rotateRight( c, 30); a -= s4(b, c, d, e,w2[ 64]);}
{ d = rotateRight( d, 30); b -= s4(c, d, e, a,w2[ 63]);}
{ e = rotateRight( e, 30); c -= s4(d, e, a, b,w2[ 62]);}
{ a = rotateRight( a, 30); d -= s4(e, a, b, c,w2[ 61]);}
{ b = rotateRight( b, 30); e -= s4(a, b, c, d,w2[ 60]);}
{ c = rotateRight( c, 30); a -= s3(b, c, d, e,w2[ 59]);}
{ d = rotateRight( d, 30); b -= s3(c, d, e, a,w2[ 58]);}
}
{ e = rotateRight( e, 30); c -= s3(d, e, a, b,w2[ 57]);}
{ a = rotateRight( a, 30); d -= s3(e, a, b, c,w2[ 56]);}
{ b = rotateRight( b, 30); e -= s3(a, b, c, d,w2[ 55]);}
{ c = rotateRight( c, 30); a -= s3(b, c, d, e,w2[ 54]);}
{ d = rotateRight( d, 30); b -= s3(c, d, e, a,w2[ 53]);}
{ e = rotateRight( e, 30); c -= s3(d, e, a, b,w2[ 52]);}
{ a = rotateRight( a, 30); d -= s3(e, a, b, c,w2[ 51]);}
{ b = rotateRight( b, 30); e -= s3(a, b, c, d,w2[ 50]);}
{ c = rotateRight( c, 30); a -= s3(b, c, d, e,w2[ 49]);}
{ d = rotateRight( d, 30); b -= s3(c, d, e, a,w2[ 48]);}
{ e = rotateRight( e, 30); c -= s3(d, e, a, b,w2[ 47]);}
{ a = rotateRight( a, 30); d -= s3(e, a, b, c,w2[ 46]);}
{ b = rotateRight( b, 30); e -= s3(a, b, c, d,w2[ 45]);}
{ c = rotateRight( c, 30); a -= s3(b, c, d, e,w2[ 44]);}
{ d = rotateRight( d, 30); b -= s3(c, d, e, a,w2[ 43]);}
{ e = rotateRight( e, 30); c -= s3(d, e, a, b,w2[ 42]);}
{ a = rotateRight( a, 30); d -= s3(e, a, b, c,w2[ 41]);}
{ b = rotateRight( b, 30); e -= s3(a, b, c, d,w2[ 40]);}
{ c = rotateRight( c, 30); a -= s2(b, c, d, e,w2[ 39]);}
{ d = rotateRight( d, 30); b -= s2(c, d, e, a,w2[ 38]);}
{ e = rotateRight( e, 30); c -= s2(d, e, a, b,w2[ 37]);}
{ a = rotateRight( a, 30); d -= s2(e, a, b, c,w2[ 36]);}
{ b = rotateRight( b, 30); e -= s2(a, b, c, d,w2[ 35]);}
{ c = rotateRight( c, 30); a -= s2(b, c, d, e,w2[ 34]);}
{ d = rotateRight( d, 30); b -= s2(c, d, e, a,w2[ 33]);}
{ e = rotateRight( e, 30); c -= s2(d, e, a, b,w2[ 32]);}
{ a = rotateRight( a, 30); d -= s2(e, a, b, c,w2[ 31]);}
{ b = rotateRight( b, 30); e -= s2(a, b, c, d,w2[ 30]);}
{ c = rotateRight( c, 30); a -= s2(b, c, d, e,w2[ 29]);}
{ d = rotateRight( d, 30); b -= s2(c, d, e, a,w2[ 28]);}
{ e = rotateRight( e, 30); c -= s2(d, e, a, b,w2[ 27]);}
{ a = rotateRight( a, 30); d -= s2(e, a, b, c,w2[ 26]);}
{ b = rotateRight( b, 30); e -= s2(a, b, c, d,w2[ 25]);}
{ c = rotateRight( c, 30); a -= s2(b, c, d, e,w2[ 24]);}
{ d = rotateRight( d, 30); b -= s2(c, d, e, a,w2[ 23]);}
{ e = rotateRight( e, 30); c -= s2(d, e, a, b,w2[ 22]);}
{ a = rotateRight( a, 30); d -= s2(e, a, b, c,w2[ 21]);}
{ b = rotateRight( b, 30); e -= s2(a, b, c, d,w2[ 20]);}
{ c = rotateRight( c, 30); a -= s1(b, c, d, e,w2[ 19]);}
{ d = rotateRight( d, 30); b -= s1(c, d, e, a,w2[ 18]);}
{ e = rotateRight( e, 30); c -= s1(d, e, a, b,w2[ 17]);}
{ a = rotateRight( a, 30); d -= s1(e, a, b, c,w2[ 16]);}
{ b = rotateRight( b, 30); e -= s1(a, b, c, d,w2[ 15]);}
{ c = rotateRight( c, 30); a -= s1(b, c, d, e,w2[ 14]);}
{ d = rotateRight( d, 30); b -= s1(c, d, e, a,w2[ 13]);}
{ e = rotateRight( e, 30); c -= s1(d, e, a, b,w2[ 12]);}
{ a = rotateRight( a, 30); d -= s1(e, a, b, c,w2[ 11]);}
{ b = rotateRight( b, 30); e -= s1(a, b, c, d,w2[ 10]);}
{ c = rotateRight( c, 30); a -= s1(b, c, d, e,w2[ 9]);}
{ d = rotateRight( d, 30); b -= s1(c, d, e, a,w2[ 8]);}
{ e = rotateRight( e, 30); c -= s1(d, e, a, b,w2[ 7]);}
{ a = rotateRight( a, 30); d -= s1(e, a, b, c,w2[ 6]);}
{ b = rotateRight( b, 30); e -= s1(a, b, c, d,w2[ 5]);}
{ c = rotateRight( c, 30); a -= s1(b, c, d, e,w2[ 4]);}
{ d = rotateRight( d, 30); b -= s1(c, d, e, a,w2[ 3]);}
{ e = rotateRight( e, 30); c -= s1(d, e, a, b,w2[ 2]);}
{ a = rotateRight( a, 30); d -= s1(e, a, b, c,w2[ 1]);}
{ b = rotateRight( b, 30); e -= s1(a, b, c, d,w2[ 0]);}
hIn.save(a, b, c, d, e);
a = s.a; b = s.b; c = s.c; d = s.d; e = s.e;
if (t == 58) {
{ b += s3(c, d, e, a,w2[ 58]); d = rotateLeft( d, 30);}
{ a += s3(b, c, d, e,w2[ 59]); c = rotateLeft( c, 30);}
{ e += s4(a, b, c, d,w2[ 60]); b = rotateLeft( b, 30);}
{ d += s4(e, a, b, c,w2[ 61]); a = rotateLeft( a, 30);}
{ c += s4(d, e, a, b,w2[ 62]); e = rotateLeft( e, 30);}
{ b += s4(c, d, e, a,w2[ 63]); d = rotateLeft( d, 30);}
{ a += s4(b, c, d, e,w2[ 64]); c = rotateLeft( c, 30);}
}
{ e += s4(a, b, c, d,w2[ 65]); b = rotateLeft( b, 30);}
{ d += s4(e, a, b, c,w2[ 66]); a = rotateLeft( a, 30);}
{ c += s4(d, e, a, b,w2[ 67]); e = rotateLeft( e, 30);}
{ b += s4(c, d, e, a,w2[ 68]); d = rotateLeft( d, 30);}
{ a += s4(b, c, d, e,w2[ 69]); c = rotateLeft( c, 30);}
{ e += s4(a, b, c, d,w2[ 70]); b = rotateLeft( b, 30);}
{ d += s4(e, a, b, c,w2[ 71]); a = rotateLeft( a, 30);}
{ c += s4(d, e, a, b,w2[ 72]); e = rotateLeft( e, 30);}
{ b += s4(c, d, e, a,w2[ 73]); d = rotateLeft( d, 30);}
{ a += s4(b, c, d, e,w2[ 74]); c = rotateLeft( c, 30);}
{ e += s4(a, b, c, d,w2[ 75]); b = rotateLeft( b, 30);}
{ d += s4(e, a, b, c,w2[ 76]); a = rotateLeft( a, 30);}
{ c += s4(d, e, a, b,w2[ 77]); e = rotateLeft( e, 30);}
{ b += s4(c, d, e, a,w2[ 78]); d = rotateLeft( d, 30);}
{ a += s4(b, c, d, e,w2[ 79]); c = rotateLeft( c, 30);}
// @formatter:on
hTmp.save(hIn.a + a, hIn.b + b, hIn.c + c, hIn.d + d, hIn.e + e);
}
private static int s1(int a, int b, int c, int d, int w_t) {
return rotateLeft(a, 5)
// f: 0 <= t <= 19
+ ((b & c) | ((~b) & d))
+ 0x5A827999 + w_t;
}
private static int s2(int a, int b, int c, int d, int w_t) {
return rotateLeft(a, 5)
// f: 20 <= t <= 39
+ (b ^ c ^ d)
+ 0x6ED9EBA1 + w_t;
}
private static int s3(int a, int b, int c, int d, int w_t) {
return rotateLeft(a, 5)
// f: 40 <= t <= 59
+ ((b & c) | (b & d) | (c & d))
+ 0x8F1BBCDC + w_t;
}
private static int s4(int a, int b, int c, int d, int w_t) {
return rotateLeft(a, 5)
// f: 60 <= t <= 79
+ (b ^ c ^ d)
+ 0xCA62C1D6 + w_t;
}
private static boolean eq(State q, State r) {
return q.a == r.a
&& q.b == r.b
&& q.c == r.c
&& q.d == r.d
&& q.e == r.e;
}
private void finish() {
int bufferLen = (int) (length & 63);
if (bufferLen > 55) {
// Last block is too small; pad, compress, pad another block.
buffer[bufferLen++] = (byte) 0x80;
Arrays.fill(buffer, bufferLen, 64, (byte) 0);
compress(buffer, 0);
Arrays.fill(buffer, 0, 56, (byte) 0);
} else {
// Last block can hold padding and length.
buffer[bufferLen++] = (byte) 0x80;
Arrays.fill(buffer, bufferLen, 56, (byte) 0);
}
// SHA-1 appends the length of the message in bits after the
// padding block (above). Here length is in bytes. Multiply by
// 8 by shifting by 3 as part of storing the 64 bit byte length
// into the two words expected in the trailer.
NB.encodeInt32(buffer, 56, (int) (length >>> (32 - 3)));
NB.encodeInt32(buffer, 60, (int) (length << 3));
compress(buffer, 0);
if (foundCollision) {
ObjectId id = h.toObjectId();
LOG.warn(MessageFormat.format(JGitText.get().sha1CollisionDetected,
id.name()));
throw new Sha1CollisionException(id);
}
}
public abstract void update(byte[] in, int p, int len);
/**
* Finish the digest and return the resulting hash.
@ -484,17 +135,7 @@ private void finish() {
* @throws org.eclipse.jgit.util.sha1.Sha1CollisionException
* if a collision was detected and safeHash is false.
*/
public byte[] digest() throws Sha1CollisionException {
finish();
byte[] b = new byte[20];
NB.encodeInt32(b, 0, h.a);
NB.encodeInt32(b, 4, h.b);
NB.encodeInt32(b, 8, h.c);
NB.encodeInt32(b, 12, h.d);
NB.encodeInt32(b, 16, h.e);
return b;
}
public abstract byte[] digest() throws Sha1CollisionException;
/**
* Finish the digest and return the resulting hash.
@ -505,10 +146,7 @@ public byte[] digest() throws Sha1CollisionException {
* @throws org.eclipse.jgit.util.sha1.Sha1CollisionException
* if a collision was detected and safeHash is false.
*/
public ObjectId toObjectId() throws Sha1CollisionException {
finish();
return h.toObjectId();
}
public abstract ObjectId toObjectId() throws Sha1CollisionException;
/**
* Finish the digest and return the resulting hash.
@ -520,60 +158,42 @@ public ObjectId toObjectId() throws Sha1CollisionException {
* @throws org.eclipse.jgit.util.sha1.Sha1CollisionException
* if a collision was detected and safeHash is false.
*/
public void digest(MutableObjectId id) throws Sha1CollisionException {
finish();
id.set(h.a, h.b, h.c, h.d, h.e);
}
/**
* Check if a collision was detected.
*
* <p>
* This method only returns an accurate result after the digest was obtained
* through {@link #digest()}, {@link #digest(MutableObjectId)} or
* {@link #toObjectId()}, as the hashing function must finish processing to
* know the final state.
*
* @return {@code true} if a likely collision was detected.
*/
public boolean hasCollision() {
return foundCollision;
}
public abstract void digest(MutableObjectId id)
throws Sha1CollisionException;
/**
* Reset this instance to compute another hash.
*
* @return {@code this}.
*/
public SHA1 reset() {
h.init();
length = 0;
foundCollision = false;
return this;
}
public abstract SHA1 reset();
private static final class State {
int a;
int b;
int c;
int d;
int e;
/**
* Enable likely collision detection.
* <p>
* Default for implementations supporting collision detection is
* {@code true}.
* <p>
* Implementations not supporting collision detection ignore calls to this
* method.
*
* @param detect
* a boolean.
* @return {@code this}
*/
public abstract SHA1 setDetectCollision(boolean detect);
final void init() {
// Magic initialization constants defined by FIPS180.
save(0x67452301, 0xEFCDAB89, 0x98BADCFE, 0x10325476, 0xC3D2E1F0);
}
final void save(int a1, int b1, int c1, int d1, int e1) {
a = a1;
b = b1;
c = c1;
d = d1;
e = e1;
}
ObjectId toObjectId() {
return new ObjectId(a, b, c, d, e);
}
}
}
/**
* Check if a collision was detected. This method only returns an accurate
* result after the digest was obtained through {@link #digest()},
* {@link #digest(MutableObjectId)} or {@link #toObjectId()}, as the hashing
* function must finish processing to know the final state.
* <p>
* Implementations not supporting collision detection always return
* {@code false}.
* <p>
*
* @return {@code true} if a likely collision was detected.
*/
public abstract boolean hasCollision();
}

View File

@ -0,0 +1,579 @@
/*
* Copyright (C) 2017, Google Inc. and others
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Distribution License v. 1.0 which is available at
* https://www.eclipse.org/org/documents/edl-v10.php.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
package org.eclipse.jgit.util.sha1;
import static java.lang.Integer.lowestOneBit;
import static java.lang.Integer.numberOfTrailingZeros;
import static java.lang.Integer.rotateLeft;
import static java.lang.Integer.rotateRight;
import java.text.MessageFormat;
import java.util.Arrays;
import org.eclipse.jgit.internal.JGitText;
import org.eclipse.jgit.lib.MutableObjectId;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.util.NB;
import org.eclipse.jgit.util.SystemReader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Pure Java implementation of SHA-1 from FIPS 180-1 / RFC 3174.
*
* <p>
* See <a href="https://tools.ietf.org/html/rfc3174">RFC 3174</a>.
* <p>
* Unlike MessageDigest, this implementation includes the algorithm used by
* {@code sha1dc} to detect cryptanalytic collision attacks against SHA-1, such
* as the one used by <a href="https://shattered.it/">SHAttered</a>. See
* <a href="https://github.com/cr-marcstevens/sha1collisiondetection">
* sha1collisiondetection</a> for more information.
* <p>
* When detectCollision is true (default), this implementation throws
* {@link org.eclipse.jgit.util.sha1.Sha1CollisionException} from any digest
* method if a potential collision was detected.
*
* @since 4.7
*/
class SHA1Java extends SHA1 {
private static final Logger LOG = LoggerFactory.getLogger(SHA1Java.class);
private static final boolean DETECT_COLLISIONS;
static {
SystemReader sr = SystemReader.getInstance();
String v = sr.getProperty("org.eclipse.jgit.util.sha1.detectCollision"); //$NON-NLS-1$
DETECT_COLLISIONS = v != null ? Boolean.parseBoolean(v) : true;
}
private final State h = new State();
private final int[] w = new int[80];
/** Buffer to accumulate partial blocks to 64 byte alignment. */
private final byte[] buffer = new byte[64];
/** Total number of bytes in the message. */
private long length;
private boolean detectCollision = DETECT_COLLISIONS;
private boolean foundCollision;
private final int[] w2 = new int[80];
private final State state58 = new State();
private final State state65 = new State();
private final State hIn = new State();
private final State hTmp = new State();
SHA1Java() {
h.init();
}
/**
* Enable likely collision detection.
* <p>
* Default is {@code true}.
* <p>
* May also be set by system property:
* {@code -Dorg.eclipse.jgit.util.sha1.detectCollision=true}.
*
* @param detect
* a boolean.
* @return {@code this}
*/
@Override
public SHA1 setDetectCollision(boolean detect) {
detectCollision = detect;
return this;
}
/**
* Update the digest computation by adding a byte.
*
* @param b a byte.
*/
@Override
public void update(byte b) {
int bufferLen = (int) (length & 63);
length++;
buffer[bufferLen] = b;
if (bufferLen == 63) {
compress(buffer, 0);
}
}
/**
* Update the digest computation by adding bytes to the message.
*
* @param in
* input array of bytes.
*/
@Override
public void update(byte[] in) {
update(in, 0, in.length);
}
/**
* Update the digest computation by adding bytes to the message.
*
* @param in
* input array of bytes.
* @param p
* offset to start at from {@code in}.
* @param len
* number of bytes to hash.
*/
@Override
public void update(byte[] in, int p, int len) {
// SHA-1 compress can only process whole 64 byte blocks.
// Hold partial updates in buffer, whose length is the low bits.
int bufferLen = (int) (length & 63);
length += len;
if (bufferLen > 0) {
int n = Math.min(64 - bufferLen, len);
System.arraycopy(in, p, buffer, bufferLen, n);
p += n;
len -= n;
if (bufferLen + n < 64) {
return;
}
compress(buffer, 0);
}
while (len >= 64) {
compress(in, p);
p += 64;
len -= 64;
}
if (len > 0) {
System.arraycopy(in, p, buffer, 0, len);
}
}
private void compress(byte[] block, int p) {
initBlock(block, p);
int ubcDvMask = detectCollision ? UbcCheck.check(w) : 0;
compress();
while (ubcDvMask != 0) {
int b = numberOfTrailingZeros(lowestOneBit(ubcDvMask));
UbcCheck.DvInfo dv = UbcCheck.DV[b];
for (int i = 0; i < 80; i++) {
w2[i] = w[i] ^ dv.dm[i];
}
recompress(dv.testt);
if (eq(hTmp, h)) {
foundCollision = true;
break;
}
ubcDvMask &= ~(1 << b);
}
}
private void initBlock(byte[] block, int p) {
for (int t = 0; t < 16; t++) {
w[t] = NB.decodeInt32(block, p + (t << 2));
}
// RFC 3174 6.1.b, extend state vector to 80 words.
for (int t = 16; t < 80; t++) {
int x = w[t - 3] ^ w[t - 8] ^ w[t - 14] ^ w[t - 16];
w[t] = rotateLeft(x, 1); // S^1(...)
}
}
private void compress() {
// Method 1 from RFC 3174 section 6.1.
// Method 2 (circular queue of 16 words) is slower.
int a = h.a, b = h.b, c = h.c, d = h.d, e = h.e;
// @formatter:off
e += s1(a, b, c, d,w[ 0]); b = rotateLeft( b, 30);
d += s1(e, a, b, c,w[ 1]); a = rotateLeft( a, 30);
c += s1(d, e, a, b,w[ 2]); e = rotateLeft( e, 30);
b += s1(c, d, e, a,w[ 3]); d = rotateLeft( d, 30);
a += s1(b, c, d, e,w[ 4]); c = rotateLeft( c, 30);
e += s1(a, b, c, d,w[ 5]); b = rotateLeft( b, 30);
d += s1(e, a, b, c,w[ 6]); a = rotateLeft( a, 30);
c += s1(d, e, a, b,w[ 7]); e = rotateLeft( e, 30);
b += s1(c, d, e, a,w[ 8]); d = rotateLeft( d, 30);
a += s1(b, c, d, e,w[ 9]); c = rotateLeft( c, 30);
e += s1(a, b, c, d,w[ 10]); b = rotateLeft( b, 30);
d += s1(e, a, b, c,w[ 11]); a = rotateLeft( a, 30);
c += s1(d, e, a, b,w[ 12]); e = rotateLeft( e, 30);
b += s1(c, d, e, a,w[ 13]); d = rotateLeft( d, 30);
a += s1(b, c, d, e,w[ 14]); c = rotateLeft( c, 30);
e += s1(a, b, c, d,w[ 15]); b = rotateLeft( b, 30);
d += s1(e, a, b, c,w[ 16]); a = rotateLeft( a, 30);
c += s1(d, e, a, b,w[ 17]); e = rotateLeft( e, 30);
b += s1(c, d, e, a,w[ 18]); d = rotateLeft( d, 30);
a += s1(b, c, d, e,w[ 19]); c = rotateLeft( c, 30);
e += s2(a, b, c, d,w[ 20]); b = rotateLeft( b, 30);
d += s2(e, a, b, c,w[ 21]); a = rotateLeft( a, 30);
c += s2(d, e, a, b,w[ 22]); e = rotateLeft( e, 30);
b += s2(c, d, e, a,w[ 23]); d = rotateLeft( d, 30);
a += s2(b, c, d, e,w[ 24]); c = rotateLeft( c, 30);
e += s2(a, b, c, d,w[ 25]); b = rotateLeft( b, 30);
d += s2(e, a, b, c,w[ 26]); a = rotateLeft( a, 30);
c += s2(d, e, a, b,w[ 27]); e = rotateLeft( e, 30);
b += s2(c, d, e, a,w[ 28]); d = rotateLeft( d, 30);
a += s2(b, c, d, e,w[ 29]); c = rotateLeft( c, 30);
e += s2(a, b, c, d,w[ 30]); b = rotateLeft( b, 30);
d += s2(e, a, b, c,w[ 31]); a = rotateLeft( a, 30);
c += s2(d, e, a, b,w[ 32]); e = rotateLeft( e, 30);
b += s2(c, d, e, a,w[ 33]); d = rotateLeft( d, 30);
a += s2(b, c, d, e,w[ 34]); c = rotateLeft( c, 30);
e += s2(a, b, c, d,w[ 35]); b = rotateLeft( b, 30);
d += s2(e, a, b, c,w[ 36]); a = rotateLeft( a, 30);
c += s2(d, e, a, b,w[ 37]); e = rotateLeft( e, 30);
b += s2(c, d, e, a,w[ 38]); d = rotateLeft( d, 30);
a += s2(b, c, d, e,w[ 39]); c = rotateLeft( c, 30);
e += s3(a, b, c, d,w[ 40]); b = rotateLeft( b, 30);
d += s3(e, a, b, c,w[ 41]); a = rotateLeft( a, 30);
c += s3(d, e, a, b,w[ 42]); e = rotateLeft( e, 30);
b += s3(c, d, e, a,w[ 43]); d = rotateLeft( d, 30);
a += s3(b, c, d, e,w[ 44]); c = rotateLeft( c, 30);
e += s3(a, b, c, d,w[ 45]); b = rotateLeft( b, 30);
d += s3(e, a, b, c,w[ 46]); a = rotateLeft( a, 30);
c += s3(d, e, a, b,w[ 47]); e = rotateLeft( e, 30);
b += s3(c, d, e, a,w[ 48]); d = rotateLeft( d, 30);
a += s3(b, c, d, e,w[ 49]); c = rotateLeft( c, 30);
e += s3(a, b, c, d,w[ 50]); b = rotateLeft( b, 30);
d += s3(e, a, b, c,w[ 51]); a = rotateLeft( a, 30);
c += s3(d, e, a, b,w[ 52]); e = rotateLeft( e, 30);
b += s3(c, d, e, a,w[ 53]); d = rotateLeft( d, 30);
a += s3(b, c, d, e,w[ 54]); c = rotateLeft( c, 30);
e += s3(a, b, c, d,w[ 55]); b = rotateLeft( b, 30);
d += s3(e, a, b, c,w[ 56]); a = rotateLeft( a, 30);
c += s3(d, e, a, b,w[ 57]); e = rotateLeft( e, 30);
state58.save(a, b, c, d, e);
b += s3(c, d, e, a,w[ 58]); d = rotateLeft( d, 30);
a += s3(b, c, d, e,w[ 59]); c = rotateLeft( c, 30);
e += s4(a, b, c, d,w[ 60]); b = rotateLeft( b, 30);
d += s4(e, a, b, c,w[ 61]); a = rotateLeft( a, 30);
c += s4(d, e, a, b,w[ 62]); e = rotateLeft( e, 30);
b += s4(c, d, e, a,w[ 63]); d = rotateLeft( d, 30);
a += s4(b, c, d, e,w[ 64]); c = rotateLeft( c, 30);
state65.save(a, b, c, d, e);
e += s4(a, b, c, d,w[ 65]); b = rotateLeft( b, 30);
d += s4(e, a, b, c,w[ 66]); a = rotateLeft( a, 30);
c += s4(d, e, a, b,w[ 67]); e = rotateLeft( e, 30);
b += s4(c, d, e, a,w[ 68]); d = rotateLeft( d, 30);
a += s4(b, c, d, e,w[ 69]); c = rotateLeft( c, 30);
e += s4(a, b, c, d,w[ 70]); b = rotateLeft( b, 30);
d += s4(e, a, b, c,w[ 71]); a = rotateLeft( a, 30);
c += s4(d, e, a, b,w[ 72]); e = rotateLeft( e, 30);
b += s4(c, d, e, a,w[ 73]); d = rotateLeft( d, 30);
a += s4(b, c, d, e,w[ 74]); c = rotateLeft( c, 30);
e += s4(a, b, c, d,w[ 75]); b = rotateLeft( b, 30);
d += s4(e, a, b, c,w[ 76]); a = rotateLeft( a, 30);
c += s4(d, e, a, b,w[ 77]); e = rotateLeft( e, 30);
b += s4(c, d, e, a,w[ 78]); d = rotateLeft( d, 30);
a += s4(b, c, d, e,w[ 79]); c = rotateLeft( c, 30);
// @formatter:on
h.save(h.a + a, h.b + b, h.c + c, h.d + d, h.e + e);
}
private void recompress(int t) {
State s;
switch (t) {
case 58:
s = state58;
break;
case 65:
s = state65;
break;
default:
throw new IllegalStateException();
}
int a = s.a, b = s.b, c = s.c, d = s.d, e = s.e;
// @formatter:off
if (t == 65) {
{ c = rotateRight( c, 30); a -= s4(b, c, d, e,w2[ 64]);}
{ d = rotateRight( d, 30); b -= s4(c, d, e, a,w2[ 63]);}
{ e = rotateRight( e, 30); c -= s4(d, e, a, b,w2[ 62]);}
{ a = rotateRight( a, 30); d -= s4(e, a, b, c,w2[ 61]);}
{ b = rotateRight( b, 30); e -= s4(a, b, c, d,w2[ 60]);}
{ c = rotateRight( c, 30); a -= s3(b, c, d, e,w2[ 59]);}
{ d = rotateRight( d, 30); b -= s3(c, d, e, a,w2[ 58]);}
}
{ e = rotateRight( e, 30); c -= s3(d, e, a, b,w2[ 57]);}
{ a = rotateRight( a, 30); d -= s3(e, a, b, c,w2[ 56]);}
{ b = rotateRight( b, 30); e -= s3(a, b, c, d,w2[ 55]);}
{ c = rotateRight( c, 30); a -= s3(b, c, d, e,w2[ 54]);}
{ d = rotateRight( d, 30); b -= s3(c, d, e, a,w2[ 53]);}
{ e = rotateRight( e, 30); c -= s3(d, e, a, b,w2[ 52]);}
{ a = rotateRight( a, 30); d -= s3(e, a, b, c,w2[ 51]);}
{ b = rotateRight( b, 30); e -= s3(a, b, c, d,w2[ 50]);}
{ c = rotateRight( c, 30); a -= s3(b, c, d, e,w2[ 49]);}
{ d = rotateRight( d, 30); b -= s3(c, d, e, a,w2[ 48]);}
{ e = rotateRight( e, 30); c -= s3(d, e, a, b,w2[ 47]);}
{ a = rotateRight( a, 30); d -= s3(e, a, b, c,w2[ 46]);}
{ b = rotateRight( b, 30); e -= s3(a, b, c, d,w2[ 45]);}
{ c = rotateRight( c, 30); a -= s3(b, c, d, e,w2[ 44]);}
{ d = rotateRight( d, 30); b -= s3(c, d, e, a,w2[ 43]);}
{ e = rotateRight( e, 30); c -= s3(d, e, a, b,w2[ 42]);}
{ a = rotateRight( a, 30); d -= s3(e, a, b, c,w2[ 41]);}
{ b = rotateRight( b, 30); e -= s3(a, b, c, d,w2[ 40]);}
{ c = rotateRight( c, 30); a -= s2(b, c, d, e,w2[ 39]);}
{ d = rotateRight( d, 30); b -= s2(c, d, e, a,w2[ 38]);}
{ e = rotateRight( e, 30); c -= s2(d, e, a, b,w2[ 37]);}
{ a = rotateRight( a, 30); d -= s2(e, a, b, c,w2[ 36]);}
{ b = rotateRight( b, 30); e -= s2(a, b, c, d,w2[ 35]);}
{ c = rotateRight( c, 30); a -= s2(b, c, d, e,w2[ 34]);}
{ d = rotateRight( d, 30); b -= s2(c, d, e, a,w2[ 33]);}
{ e = rotateRight( e, 30); c -= s2(d, e, a, b,w2[ 32]);}
{ a = rotateRight( a, 30); d -= s2(e, a, b, c,w2[ 31]);}
{ b = rotateRight( b, 30); e -= s2(a, b, c, d,w2[ 30]);}
{ c = rotateRight( c, 30); a -= s2(b, c, d, e,w2[ 29]);}
{ d = rotateRight( d, 30); b -= s2(c, d, e, a,w2[ 28]);}
{ e = rotateRight( e, 30); c -= s2(d, e, a, b,w2[ 27]);}
{ a = rotateRight( a, 30); d -= s2(e, a, b, c,w2[ 26]);}
{ b = rotateRight( b, 30); e -= s2(a, b, c, d,w2[ 25]);}
{ c = rotateRight( c, 30); a -= s2(b, c, d, e,w2[ 24]);}
{ d = rotateRight( d, 30); b -= s2(c, d, e, a,w2[ 23]);}
{ e = rotateRight( e, 30); c -= s2(d, e, a, b,w2[ 22]);}
{ a = rotateRight( a, 30); d -= s2(e, a, b, c,w2[ 21]);}
{ b = rotateRight( b, 30); e -= s2(a, b, c, d,w2[ 20]);}
{ c = rotateRight( c, 30); a -= s1(b, c, d, e,w2[ 19]);}
{ d = rotateRight( d, 30); b -= s1(c, d, e, a,w2[ 18]);}
{ e = rotateRight( e, 30); c -= s1(d, e, a, b,w2[ 17]);}
{ a = rotateRight( a, 30); d -= s1(e, a, b, c,w2[ 16]);}
{ b = rotateRight( b, 30); e -= s1(a, b, c, d,w2[ 15]);}
{ c = rotateRight( c, 30); a -= s1(b, c, d, e,w2[ 14]);}
{ d = rotateRight( d, 30); b -= s1(c, d, e, a,w2[ 13]);}
{ e = rotateRight( e, 30); c -= s1(d, e, a, b,w2[ 12]);}
{ a = rotateRight( a, 30); d -= s1(e, a, b, c,w2[ 11]);}
{ b = rotateRight( b, 30); e -= s1(a, b, c, d,w2[ 10]);}
{ c = rotateRight( c, 30); a -= s1(b, c, d, e,w2[ 9]);}
{ d = rotateRight( d, 30); b -= s1(c, d, e, a,w2[ 8]);}
{ e = rotateRight( e, 30); c -= s1(d, e, a, b,w2[ 7]);}
{ a = rotateRight( a, 30); d -= s1(e, a, b, c,w2[ 6]);}
{ b = rotateRight( b, 30); e -= s1(a, b, c, d,w2[ 5]);}
{ c = rotateRight( c, 30); a -= s1(b, c, d, e,w2[ 4]);}
{ d = rotateRight( d, 30); b -= s1(c, d, e, a,w2[ 3]);}
{ e = rotateRight( e, 30); c -= s1(d, e, a, b,w2[ 2]);}
{ a = rotateRight( a, 30); d -= s1(e, a, b, c,w2[ 1]);}
{ b = rotateRight( b, 30); e -= s1(a, b, c, d,w2[ 0]);}
hIn.save(a, b, c, d, e);
a = s.a; b = s.b; c = s.c; d = s.d; e = s.e;
if (t == 58) {
{ b += s3(c, d, e, a,w2[ 58]); d = rotateLeft( d, 30);}
{ a += s3(b, c, d, e,w2[ 59]); c = rotateLeft( c, 30);}
{ e += s4(a, b, c, d,w2[ 60]); b = rotateLeft( b, 30);}
{ d += s4(e, a, b, c,w2[ 61]); a = rotateLeft( a, 30);}
{ c += s4(d, e, a, b,w2[ 62]); e = rotateLeft( e, 30);}
{ b += s4(c, d, e, a,w2[ 63]); d = rotateLeft( d, 30);}
{ a += s4(b, c, d, e,w2[ 64]); c = rotateLeft( c, 30);}
}
{ e += s4(a, b, c, d,w2[ 65]); b = rotateLeft( b, 30);}
{ d += s4(e, a, b, c,w2[ 66]); a = rotateLeft( a, 30);}
{ c += s4(d, e, a, b,w2[ 67]); e = rotateLeft( e, 30);}
{ b += s4(c, d, e, a,w2[ 68]); d = rotateLeft( d, 30);}
{ a += s4(b, c, d, e,w2[ 69]); c = rotateLeft( c, 30);}
{ e += s4(a, b, c, d,w2[ 70]); b = rotateLeft( b, 30);}
{ d += s4(e, a, b, c,w2[ 71]); a = rotateLeft( a, 30);}
{ c += s4(d, e, a, b,w2[ 72]); e = rotateLeft( e, 30);}
{ b += s4(c, d, e, a,w2[ 73]); d = rotateLeft( d, 30);}
{ a += s4(b, c, d, e,w2[ 74]); c = rotateLeft( c, 30);}
{ e += s4(a, b, c, d,w2[ 75]); b = rotateLeft( b, 30);}
{ d += s4(e, a, b, c,w2[ 76]); a = rotateLeft( a, 30);}
{ c += s4(d, e, a, b,w2[ 77]); e = rotateLeft( e, 30);}
{ b += s4(c, d, e, a,w2[ 78]); d = rotateLeft( d, 30);}
{ a += s4(b, c, d, e,w2[ 79]); c = rotateLeft( c, 30);}
// @formatter:on
hTmp.save(hIn.a + a, hIn.b + b, hIn.c + c, hIn.d + d, hIn.e + e);
}
private static int s1(int a, int b, int c, int d, int w_t) {
return rotateLeft(a, 5)
// f: 0 <= t <= 19
+ ((b & c) | ((~b) & d))
+ 0x5A827999 + w_t;
}
private static int s2(int a, int b, int c, int d, int w_t) {
return rotateLeft(a, 5)
// f: 20 <= t <= 39
+ (b ^ c ^ d)
+ 0x6ED9EBA1 + w_t;
}
private static int s3(int a, int b, int c, int d, int w_t) {
return rotateLeft(a, 5)
// f: 40 <= t <= 59
+ ((b & c) | (b & d) | (c & d))
+ 0x8F1BBCDC + w_t;
}
private static int s4(int a, int b, int c, int d, int w_t) {
return rotateLeft(a, 5)
// f: 60 <= t <= 79
+ (b ^ c ^ d)
+ 0xCA62C1D6 + w_t;
}
private static boolean eq(State q, State r) {
return q.a == r.a
&& q.b == r.b
&& q.c == r.c
&& q.d == r.d
&& q.e == r.e;
}
private void finish() {
int bufferLen = (int) (length & 63);
if (bufferLen > 55) {
// Last block is too small; pad, compress, pad another block.
buffer[bufferLen++] = (byte) 0x80;
Arrays.fill(buffer, bufferLen, 64, (byte) 0);
compress(buffer, 0);
Arrays.fill(buffer, 0, 56, (byte) 0);
} else {
// Last block can hold padding and length.
buffer[bufferLen++] = (byte) 0x80;
Arrays.fill(buffer, bufferLen, 56, (byte) 0);
}
// SHA-1 appends the length of the message in bits after the
// padding block (above). Here length is in bytes. Multiply by
// 8 by shifting by 3 as part of storing the 64 bit byte length
// into the two words expected in the trailer.
NB.encodeInt32(buffer, 56, (int) (length >>> (32 - 3)));
NB.encodeInt32(buffer, 60, (int) (length << 3));
compress(buffer, 0);
if (foundCollision) {
ObjectId id = h.toObjectId();
LOG.warn(MessageFormat.format(JGitText.get().sha1CollisionDetected,
id.name()));
throw new Sha1CollisionException(id);
}
}
/**
* Finish the digest and return the resulting hash.
* <p>
* Once {@code digest()} is called, this instance should be discarded.
*
* @return the bytes for the resulting hash.
* @throws org.eclipse.jgit.util.sha1.Sha1CollisionException
* if a collision was detected and safeHash is false.
*/
@Override
public byte[] digest() throws Sha1CollisionException {
finish();
byte[] b = new byte[20];
NB.encodeInt32(b, 0, h.a);
NB.encodeInt32(b, 4, h.b);
NB.encodeInt32(b, 8, h.c);
NB.encodeInt32(b, 12, h.d);
NB.encodeInt32(b, 16, h.e);
return b;
}
/**
* Finish the digest and return the resulting hash.
* <p>
* Once {@code digest()} is called, this instance should be discarded.
*
* @return the ObjectId for the resulting hash.
* @throws org.eclipse.jgit.util.sha1.Sha1CollisionException
* if a collision was detected and safeHash is false.
*/
@Override
public ObjectId toObjectId() throws Sha1CollisionException {
finish();
return h.toObjectId();
}
/**
* Finish the digest and return the resulting hash.
* <p>
* Once {@code digest()} is called, this instance should be discarded.
*
* @param id
* destination to copy the digest to.
* @throws org.eclipse.jgit.util.sha1.Sha1CollisionException
* if a collision was detected and safeHash is false.
*/
@Override
public void digest(MutableObjectId id) throws Sha1CollisionException {
finish();
id.set(h.a, h.b, h.c, h.d, h.e);
}
/**
* Check if a collision was detected.
*
* <p>
* This method only returns an accurate result after the digest was obtained
* through {@link #digest()}, {@link #digest(MutableObjectId)} or
* {@link #toObjectId()}, as the hashing function must finish processing to
* know the final state.
*
* @return {@code true} if a likely collision was detected.
*/
@Override
public boolean hasCollision() {
return foundCollision;
}
/**
* Reset this instance to compute another hash.
*
* @return {@code this}.
*/
@Override
public SHA1 reset() {
h.init();
length = 0;
foundCollision = false;
return this;
}
private static final class State {
int a;
int b;
int c;
int d;
int e;
final void init() {
// Magic initialization constants defined by FIPS180.
save(0x67452301, 0xEFCDAB89, 0x98BADCFE, 0x10325476, 0xC3D2E1F0);
}
final void save(int a1, int b1, int c1, int d1, int e1) {
a = a1;
b = b1;
c = c1;
d = d1;
e = e1;
}
ObjectId toObjectId() {
return new ObjectId(a, b, c, d, e);
}
}
}

View File

@ -0,0 +1,75 @@
/*
* Copyright (C) 2022, Matthias Sohn <matthias.sohn@sap.com> and others
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Distribution License v. 1.0 which is available at
* https://www.eclipse.org/org/documents/edl-v10.php.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
package org.eclipse.jgit.util.sha1;
import java.security.MessageDigest;
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.MutableObjectId;
import org.eclipse.jgit.lib.ObjectId;
/**
* SHA1 implementation using native implementation from JDK. It doesn't support
* collision detection but is faster than the pure Java implementation.
*/
class SHA1Native extends SHA1 {
private final MessageDigest md;
SHA1Native() {
md = Constants.newMessageDigest();
}
@Override
public void update(byte b) {
md.update(b);
}
@Override
public void update(byte[] in) {
md.update(in);
}
@Override
public void update(byte[] in, int p, int len) {
md.update(in, p, len);
}
@Override
public byte[] digest() throws Sha1CollisionException {
return md.digest();
}
@Override
public ObjectId toObjectId() throws Sha1CollisionException {
return ObjectId.fromRaw(md.digest());
}
@Override
public void digest(MutableObjectId id) throws Sha1CollisionException {
id.fromRaw(md.digest());
}
@Override
public SHA1 reset() {
md.reset();
return this;
}
@Override
public SHA1 setDetectCollision(boolean detect) {
return this;
}
@Override
public boolean hasCollision() {
return false;
}
}

View File

@ -16,7 +16,7 @@
import org.eclipse.jgit.lib.ObjectId;
/**
* Thrown by {@link org.eclipse.jgit.util.sha1.SHA1} if it detects a likely hash
* Thrown by {@link org.eclipse.jgit.util.sha1.SHA1Java} if it detects a likely hash
* collision.
*
* @since 4.7