Merge branch 'master' into stable-6.0

* master:
  Fix checkout of files with mixed line endings on text=auto eol=crlf
  Don't rely on an implicit default character set
  Fix bad indentation in pom.xml
  Minor code-clean-up in OpenSshConfigFile
  Remove use of deprecated getAllRefs() in UploadPack
  DFS block cache: fix lock issue and support parallel index loading
  JSch: fix service publication for ServiceLoader
  Set JSch global config values only if not set already
  Fix missing peel-part in lsRefsV2 for loose annotated tags
  DFS block cache: allow multiple passes for blocks before eviction
  Fix RevWalk.getMergedInto() ignores annotated tags
  Optimize RevWalk.getMergedInto()
  GarbageCollectCommand: add numberOfBitmaps to statistics
  reftable: drop code for truncated reads
  reftable: pass on invalid object ID in conversion
  Update eclipse-jarsigner-plugin to 1.3.2
  Fix running benchmarks from bazel
  Update eclipse-jarsigner-plugin to 1.3.2
  Add org.bouncycastle.bcutil to p2 repository

Change-Id: Icaa36ded0439853a05ce21de9282e69d87a32284
This commit is contained in:
Matthias Sohn 2021-10-27 14:48:15 +02:00
commit e21da0ddbc
41 changed files with 749 additions and 224 deletions

View File

@ -8,6 +8,8 @@ jmh_java_benchmarks(
name = "benchmarks",
srcs = SRCS,
deps = [
"//lib:javaewah",
"//lib:slf4j-api",
"//org.eclipse.jgit:jgit",
],
)

View File

@ -147,6 +147,12 @@
<bundle id="org.bouncycastle.bcprov.source">
<category name="JGit-dependency-bundles"/>
</bundle>
<bundle id="org.bouncycastle.bcutil">
<category name="JGit-dependency-bundles"/>
</bundle>
<bundle id="org.bouncycastle.bcutil.source">
<category name="JGit-dependency-bundles"/>
</bundle>
<bundle id="org.kohsuke.args4j">
<category name="JGit-dependency-bundles"/>
</bundle>

View File

@ -105,15 +105,15 @@
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<inherited>true</inherited>
<executions>
<execution>
<id>attach-sources</id>
<phase>process-classes</phase>
<goals>
<goal>jar</goal>
</goals>
<artifactId>maven-source-plugin</artifactId>
<inherited>true</inherited>
<executions>
<execution>
<id>attach-sources</id>
<phase>process-classes</phase>
<goals>
<goal>jar</goal>
</goals>
<configuration>
<archive>
<manifestFile>${source-bundle-manifest}</manifestFile>

View File

@ -0,0 +1,24 @@
/*
* Copyright (C) 2021, Thomas Wolf <thomas.wolf@paranor.ch> and others
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Distribution License v. 1.0 which is available at
* https://www.eclipse.org/org/documents/edl-v10.php.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
package org.eclipse.jgit.transport.ssh.jsch;
import static org.junit.Assert.assertNotNull;
import org.eclipse.jgit.transport.SshSessionFactory;
import org.junit.Test;
public class ServiceLoaderTest {
@Test
public void testDefaultFactoryFound() {
SshSessionFactory defaultFactory = SshSessionFactory.getInstance();
assertNotNull(defaultFactory);
}
}

View File

@ -96,15 +96,15 @@
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<inherited>true</inherited>
<executions>
<execution>
<id>attach-sources</id>
<phase>process-classes</phase>
<goals>
<goal>jar</goal>
</goals>
<artifactId>maven-source-plugin</artifactId>
<inherited>true</inherited>
<executions>
<execution>
<id>attach-sources</id>
<phase>process-classes</phase>
<goals>
<goal>jar</goal>
</goals>
<configuration>
<archive>
<manifestFile>${source-bundle-manifest}</manifestFile>

View File

@ -45,6 +45,7 @@
import org.eclipse.jgit.transport.SshSessionFactory;
import org.eclipse.jgit.transport.URIish;
import org.eclipse.jgit.util.FS;
import org.eclipse.jgit.util.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -417,14 +418,26 @@ protected JSch getJSch(OpenSshConfig.Host hc, FS fs) throws JSchException {
*/
protected JSch createDefaultJSch(FS fs) throws JSchException {
final JSch jsch = new JSch();
JSch.setConfig("ssh-rsa", JSch.getConfig("signature.rsa")); //$NON-NLS-1$ //$NON-NLS-2$
JSch.setConfig("ssh-dss", JSch.getConfig("signature.dss")); //$NON-NLS-1$ //$NON-NLS-2$
// See https://bugs.eclipse.org/bugs/show_bug.cgi?id=537790 and
// https://bugs.eclipse.org/bugs/show_bug.cgi?id=576604
copyGlobalConfigIfNotSet("signature.rsa", "ssh-rsa"); //$NON-NLS-1$ //$NON-NLS-2$
copyGlobalConfigIfNotSet("signature.dss", "ssh-dss"); //$NON-NLS-1$ //$NON-NLS-2$
configureJSch(jsch);
knownHosts(jsch, fs);
identities(jsch, fs);
return jsch;
}
private void copyGlobalConfigIfNotSet(String from, String to) {
String toValue = JSch.getConfig(to);
if (StringUtils.isEmptyOrNull(toValue)) {
String fromValue = JSch.getConfig(from);
if (!StringUtils.isEmptyOrNull(fromValue)) {
JSch.setConfig(to, fromValue);
}
}
}
private static void knownHosts(JSch sch, FS fs) throws JSchException {
final File home = fs.userHome();
if (home == null)

View File

@ -83,7 +83,8 @@ public void testCheckoutCRLF() throws Exception {
testCheckout(TEXT_CRLF, AUTO_CRLF, "\r\n", "\r\n");
testCheckout(TEXT_CRLF, AUTO_CRLF, "\n\r", "\r\n\r");
testCheckout(TEXT_CRLF, AUTO_CRLF, "\n\r\n", "\r\n\r\n");
testCheckout(null, AUTO_CRLF, "\n\r\n", "\n\r\n");
testCheckout(TEXT_CRLF, null, "\n\r\n", "\r\n\r\n");
testCheckout(TEXT_CRLF, AUTO_CRLF, "\r\n\r", "\r\n\r");
testCheckout(TEXT_CRLF, AUTO_CRLF, "a\nb\n", "a\r\nb\r\n");

View File

@ -39,7 +39,7 @@ public void testGConeCommit() throws Exception {
Date expire = GitDateParser.parse("now", null, SystemReader
.getInstance().getLocale());
Properties res = git.gc().setExpire(expire).call();
assertTrue(res.size() == 7);
assertTrue(res.size() == 8);
}
@Test
@ -57,6 +57,6 @@ public void testGCmoreCommits() throws Exception {
.setExpire(
GitDateParser.parse("now", null, SystemReader
.getInstance().getLocale())).call();
assertTrue(res.size() == 7);
assertTrue(res.size() == 8);
}
}

View File

@ -324,7 +324,7 @@ public void testPushAfterGC() throws Exception {
// run a gc to ensure we have a bitmap index
Properties res = git1.gc().setExpire(null).call();
assertEquals(7, res.size());
assertEquals(8, res.size());
// create another commit so we have something else to push
writeTrashFile("b", "content of b");

View File

@ -16,13 +16,22 @@
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.LongStream;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import org.eclipse.jgit.internal.storage.pack.PackExt;
import org.eclipse.jgit.junit.TestRepository;
import org.eclipse.jgit.junit.TestRng;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.ObjectInserter;
import org.eclipse.jgit.lib.ObjectReader;
import org.eclipse.jgit.revwalk.RevCommit;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
@ -103,10 +112,95 @@ public void weirdBlockSize() throws Exception {
}
}
@SuppressWarnings("resource")
@Test
public void hasCacheHotMap() throws Exception {
Map<PackExt, Integer> cacheHotMap = new HashMap<>();
// Pack index will be kept in cache longer.
cacheHotMap.put(PackExt.INDEX, Integer.valueOf(3));
DfsBlockCache.reconfigure(new DfsBlockCacheConfig().setBlockSize(512)
.setBlockLimit(512 * 4).setCacheHotMap(cacheHotMap));
cache = DfsBlockCache.getInstance();
DfsRepositoryDescription repo = new DfsRepositoryDescription("test");
InMemoryRepository r1 = new InMemoryRepository(repo);
byte[] content = rng.nextBytes(424242);
ObjectId id;
try (ObjectInserter ins = r1.newObjectInserter()) {
id = ins.insert(OBJ_BLOB, content);
ins.flush();
}
try (ObjectReader rdr = r1.newObjectReader()) {
byte[] actual = rdr.open(id, OBJ_BLOB).getBytes();
assertTrue(Arrays.equals(content, actual));
}
// All cache entries are hot and cache is at capacity.
assertTrue(LongStream.of(cache.getHitCount()).sum() > 0);
assertEquals(99, cache.getFillPercentage());
InMemoryRepository r2 = new InMemoryRepository(repo);
content = rng.nextBytes(424242);
try (ObjectInserter ins = r2.newObjectInserter()) {
ins.insert(OBJ_BLOB, content);
ins.flush();
}
assertEquals(0, LongStream.of(cache.getMissCount()).sum());
assertTrue(cache.getEvictions()[PackExt.PACK.getPosition()] > 0);
assertEquals(0, cache.getEvictions()[PackExt.INDEX.getPosition()]);
}
@SuppressWarnings("resource")
@Test
public void noConcurrencySerializedReads() throws Exception {
DfsRepositoryDescription repo = new DfsRepositoryDescription("test");
InMemoryRepository r1 = new InMemoryRepository(repo);
TestRepository<InMemoryRepository> repository = new TestRepository<>(
r1);
RevCommit commit = repository.branch("/refs/ref1").commit()
.add("blob1", "blob1").create();
repository.branch("/refs/ref2").commit().add("blob2", "blob2")
.parent(commit).create();
new DfsGarbageCollector(r1).pack(null);
// Reset cache with concurrency Level at 1 i.e. no concurrency.
DfsBlockCache.reconfigure(new DfsBlockCacheConfig().setBlockSize(512)
.setBlockLimit(1 << 20).setConcurrencyLevel(1));
cache = DfsBlockCache.getInstance();
DfsReader reader = (DfsReader) r1.newObjectReader();
ExecutorService pool = Executors.newFixedThreadPool(10);
for (DfsPackFile pack : r1.getObjectDatabase().getPacks()) {
// Only load non-garbage pack with bitmap.
if (pack.isGarbage()) {
continue;
}
asyncRun(pool, () -> pack.getBitmapIndex(reader));
asyncRun(pool, () -> pack.getPackIndex(reader));
asyncRun(pool, () -> pack.getBitmapIndex(reader));
}
pool.shutdown();
pool.awaitTermination(500, TimeUnit.MILLISECONDS);
assertTrue("Threads did not complete, likely due to a deadlock.",
pool.isTerminated());
assertEquals(1, cache.getMissCount()[PackExt.BITMAP_INDEX.ordinal()]);
assertEquals(1, cache.getMissCount()[PackExt.INDEX.ordinal()]);
}
private void resetCache() {
DfsBlockCache.reconfigure(new DfsBlockCacheConfig()
.setBlockSize(512)
DfsBlockCache.reconfigure(new DfsBlockCacheConfig().setBlockSize(512)
.setBlockLimit(1 << 20));
cache = DfsBlockCache.getInstance();
}
private void asyncRun(ExecutorService pool, Callable<?> call) {
pool.execute(() -> {
try {
call.call();
} catch (Exception e) {
// Ignore.
}
});
}
}

View File

@ -25,7 +25,9 @@
import static org.junit.Assert.fail;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.security.SecureRandom;
import java.util.ArrayList;
import java.util.Collection;
@ -133,20 +135,21 @@ public void testConvert() throws Exception {
assertTrue(db.getRefDatabase().hasFastTipsWithSha1());
}
@Test
public void testConvertToRefdir() throws Exception {
public void testConvertBrokenObjectId() throws Exception {
db.convertToPackedRefs(false, false);
assertTrue(db.getRefDatabase() instanceof RefDirectory);
Ref h = db.exactRef("HEAD");
assertTrue(h.isSymbolic());
assertEquals("refs/heads/master", h.getTarget().getName());
new File(db.getDirectory(), "refs/heads").mkdirs();
Ref b = db.exactRef("refs/heads/b");
assertFalse(b.isSymbolic());
assertTrue(b.isPeeled());
assertEquals(bCommit, b.getObjectId().name());
String invalidId = "deadbeefdeadbeefdeadbeefdeadbeefdeadbeef";
File headFile = new File(db.getDirectory(), "refs/heads/broken");
try (OutputStream os = new FileOutputStream(headFile)) {
os.write(Constants.encodeASCII(invalidId + "\n"));
}
assertFalse(db.getRefDatabase().hasFastTipsWithSha1());
Ref r = db.exactRef("refs/heads/broken");
assertNotNull(r);
db.convertToReftable(true, false);
}
@Test

View File

@ -941,6 +941,12 @@ public void logScan() throws IOException {
}
assertFalse(lc.next());
}
for (Ref exp : refs) {
try (LogCursor lc = t.seekLog(exp.getName())) {
assertTrue("has " + exp.getName(), lc.next());
}
}
}
@Test

View File

@ -337,6 +337,34 @@ public void testCheckoutWithLFAutoEolCrLf() throws Exception {
"first line\r\nsecond line\r\n", "f text=auto eol=crlf");
}
@Test
public void testCheckoutMixedAutoEolCrLf() throws Exception {
checkoutLineEndings("first line\nsecond line\r\n",
"first line\nsecond line\r\n", "f text=auto eol=crlf");
}
@Test
public void testCheckoutMixedAutoEolLf() throws Exception {
checkoutLineEndings("first line\nsecond line\r\n",
"first line\nsecond line\r\n", "f text=auto eol=lf");
}
@Test
public void testCheckoutMixedTextCrLf() throws Exception {
// Huh? Is this a bug in git? Both git 2.18.0 and git 2.33.0 do
// write the file with CRLF (and consequently report the file as
// modified in "git status" after check-out), however the CRLF in the
// repository is _not_ replaced by LF with eol=lf (see test below).
checkoutLineEndings("first line\nsecond line\r\n",
"first line\r\nsecond line\r\n", "f text eol=crlf");
}
@Test
public void testCheckoutMixedTextLf() throws Exception {
checkoutLineEndings("first line\nsecond line\r\nfoo",
"first line\nsecond line\r\nfoo", "f text eol=lf");
}
private DirCacheCheckout resetHard(RevCommit commit)
throws NoWorkTreeException,
CorruptObjectException, IOException {

View File

@ -99,7 +99,7 @@ public void testIsMergedIntoAny() throws Exception {
createBranch(commit(commit(a)), b);
createBranch(commit(commit(i)), c);
assertTrue( rw.isMergedIntoAny(a, getRefs()));
assertTrue(rw.isMergedIntoAny(a, getRefs()));
}
@Test
@ -125,4 +125,23 @@ public void testIsMergedIntoAll() throws Exception {
assertTrue(rw.isMergedIntoAll(a, getRefs()));
}
@Test
public void testMergeIntoAnnotatedTag() throws Exception {
/*
* a
* |
* b
* / \
* c v1 (annotated tag)
*/
String c = "refs/heads/c";
String v1 = "refs/tags/v1";
final RevCommit a = commit();
final RevCommit b = commit(a);
createBranch(commit(b), c);
createBranch(tag("v1", b), v1);
assertTrue(rw.isMergedIntoAll(a, getRefs()));
}
}

View File

@ -82,20 +82,33 @@ public void findBranchesReachableManyTimes() throws Exception {
* a b
* | |
* c d
* | \
* f e
* | /
* g
*/
RevCommit a = commit();
RevCommit b = commit();
RevCommit c = commit(a);
RevCommit d = commit(b);
RevCommit f = commit(d);
RevCommit e = commit(d);
RevCommit g = commit(f, e);
Ref branchA = branch("a", a);
Ref branchB = branch("b", b);
Ref branchC = branch("c", c);
Ref branchD = branch("d", d);
Ref branchE = branch("e", e);
Ref branchF = branch("f", f);
Ref branchG = branch("g", g);
assertContains(a, asList(branchA, branchC));
assertContains(b, asList(branchB, branchD));
assertContains(b, asList(branchB, branchD, branchE, branchF, branchG));
assertContains(c, asList(branchC));
assertContains(d, asList(branchD));
assertContains(d, asList(branchD, branchE, branchF, branchG));
assertContains(e, asList(branchE, branchG));
assertContains(f, asList(branchF, branchG));
assertContains(g, asList(branchG));
}
private Ref branch(String name, RevCommit dst) throws Exception {

View File

@ -0,0 +1,123 @@
/*
* Copyright (C) 2021, Saša Živkov <sasa.zivkov@sap.com> and others
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Distribution License v. 1.0 which is available at
* https://www.eclipse.org/org/documents/edl-v10.php.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
package org.eclipse.jgit.transport;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.is;
import static org.junit.Assert.assertTrue;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.Objects;
import java.util.function.Consumer;
import org.eclipse.jgit.internal.storage.file.FileRepository;
import org.eclipse.jgit.junit.LocalDiskRepositoryTestCase;
import org.eclipse.jgit.junit.TestRepository;
import org.eclipse.jgit.lib.Sets;
import org.eclipse.jgit.revwalk.RevCommit;
import org.eclipse.jgit.revwalk.RevTag;
import org.junit.Before;
import org.junit.Test;
// TODO: refactor UploadPackTest to run against both DfsRepository and FileRepository
public class UploadPackLsRefsFileRepositoryTest
extends LocalDiskRepositoryTestCase {
private FileRepository server;
private TestRepository<FileRepository> remote;
@Before
@Override
public void setUp() throws Exception {
super.setUp();
server = createWorkRepository();
remote = new TestRepository<>(server);
}
@Test
public void testV2LsRefsPeel() throws Exception {
RevCommit tip = remote.commit().message("message").create();
remote.update("master", tip);
server.updateRef("HEAD").link("refs/heads/master");
RevTag tag = remote.tag("tag", tip);
remote.update("refs/tags/tag", tag);
ByteArrayInputStream recvStream = uploadPackV2("command=ls-refs\n",
PacketLineIn.delimiter(), "peel", PacketLineIn.end());
PacketLineIn pckIn = new PacketLineIn(recvStream);
assertThat(pckIn.readString(),
is(tip.toObjectId().getName() + " HEAD"));
assertThat(pckIn.readString(),
is(tip.toObjectId().getName() + " refs/heads/master"));
assertThat(pckIn.readString(), is(tag.toObjectId().getName()
+ " refs/tags/tag peeled:" + tip.toObjectId().getName()));
assertTrue(PacketLineIn.isEnd(pckIn.readString()));
}
private ByteArrayInputStream uploadPackV2(String... inputLines)
throws Exception {
return uploadPackV2(null, inputLines);
}
private ByteArrayInputStream uploadPackV2(
Consumer<UploadPack> postConstructionSetup, String... inputLines)
throws Exception {
ByteArrayInputStream recvStream = uploadPackV2Setup(
postConstructionSetup, inputLines);
PacketLineIn pckIn = new PacketLineIn(recvStream);
// drain capabilities
while (!PacketLineIn.isEnd(pckIn.readString())) {
// do nothing
}
return recvStream;
}
private ByteArrayInputStream uploadPackV2Setup(
Consumer<UploadPack> postConstructionSetup, String... inputLines)
throws Exception {
ByteArrayInputStream send = linesAsInputStream(inputLines);
server.getConfig().setString("protocol", null, "version", "2");
UploadPack up = new UploadPack(server);
if (postConstructionSetup != null) {
postConstructionSetup.accept(up);
}
up.setExtraParameters(Sets.of("version=2"));
ByteArrayOutputStream recv = new ByteArrayOutputStream();
up.upload(send, recv, null);
return new ByteArrayInputStream(recv.toByteArray());
}
private static ByteArrayInputStream linesAsInputStream(String... inputLines)
throws IOException {
try (ByteArrayOutputStream send = new ByteArrayOutputStream()) {
PacketLineOut pckOut = new PacketLineOut(send);
for (String line : inputLines) {
Objects.requireNonNull(line);
if (PacketLineIn.isEnd(line)) {
pckOut.end();
} else if (PacketLineIn.isDelimiter(line)) {
pckOut.writeDelim();
} else {
pckOut.writeString(line);
}
}
return new ByteArrayInputStream(send.toByteArray());
}
}
}

View File

@ -19,7 +19,6 @@
import java.io.File;
import java.io.IOException;
import java.nio.charset.Charset;
import java.nio.file.Files;
import java.nio.file.InvalidPathException;
import java.nio.file.Path;
@ -182,7 +181,7 @@ public void testReadPipePosixCommandFailure()
FS.readPipe(fs.userHome(),
new String[] { "/bin/sh", "-c", "exit 1" },
Charset.defaultCharset().name());
SystemReader.getInstance().getDefaultCharset().name());
}
@Test(expected = CommandFailedException.class)
@ -192,7 +191,7 @@ public void testReadPipeCommandStartFailure()
FS.readPipe(fs.userHome(),
new String[] { "this-command-does-not-exist" },
Charset.defaultCharset().name());
SystemReader.getInstance().getDefaultCharset().name());
}
@Test

View File

@ -34,7 +34,7 @@ public void test() throws IOException {
assertNoCrLf("\r\n\r", "\n\r");
assertNoCrLf("\r\n\r\r", "\r\n\r\r");
assertNoCrLf("\r\n\r\n", "\r\n\r\n");
assertNoCrLf("\r\n\r\n\r", "\n\r\n\r");
assertNoCrLf("\n\r\n\r", "\n\r\n\r");
assertNoCrLf("\0\n", "\0\n");
}

View File

@ -440,6 +440,7 @@ lockOnNotHeld=Lock on {0} not held.
lockStreamClosed=Output to lock on {0} already closed
lockStreamMultiple=Output to lock on {0} already opened
logInconsistentFiletimeDiff={}: inconsistent duration from file timestamps on {}, {}: {} > {}, but diff = {}. Aborting measurement at resolution {}.
logInvalidDefaultCharset=System property "native.encoding" specifies unknown character set: {}
logLargerFiletimeDiff={}: inconsistent duration from file timestamps on {}, {}: diff = {} > {} (last good value). Aborting measurement.
logSmallerFiletime={}: got smaller file timestamp on {}, {}: {} < {}. Aborting measurement at resolution {}.
logXDGConfigHomeInvalid=Environment variable XDG_CONFIG_HOME contains an invalid path {}

View File

@ -221,6 +221,7 @@ public Properties getStatistics() throws GitAPIException {
@SuppressWarnings("boxing")
private static Properties toProperties(RepoStatistics stats) {
Properties p = new Properties();
p.put("numberOfBitmaps", stats.numberOfBitmaps); //$NON-NLS-1$
p.put("numberOfLooseObjects", stats.numberOfLooseObjects); //$NON-NLS-1$
p.put("numberOfLooseRefs", stats.numberOfLooseRefs); //$NON-NLS-1$
p.put("numberOfPackedObjects", stats.numberOfPackedObjects); //$NON-NLS-1$

View File

@ -12,13 +12,13 @@
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.charset.Charset;
import java.util.concurrent.Callable;
import org.eclipse.jgit.api.errors.AbortedByHookException;
import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.util.FS;
import org.eclipse.jgit.util.ProcessResult;
import org.eclipse.jgit.util.SystemReader;
import org.eclipse.jgit.util.io.TeeOutputStream;
/**
@ -171,7 +171,8 @@ protected void doRun() throws AbortedByHookException, IOException {
getStdinArgs());
if (result.isExecutedWithError()) {
handleError(new String(errorByteArray.toByteArray(),
Charset.defaultCharset().name()), result);
SystemReader.getInstance().getDefaultCharset().name()),
result);
}
}

View File

@ -468,6 +468,7 @@ public static JGitText get() {
/***/ public String lockStreamClosed;
/***/ public String lockStreamMultiple;
/***/ public String logInconsistentFiletimeDiff;
/***/ public String logInvalidDefaultCharset;
/***/ public String logLargerFiletimeDiff;
/***/ public String logSmallerFiletime;
/***/ public String logXDGConfigHomeInvalid;

View File

@ -39,9 +39,10 @@
* Its too expensive during object access to be accurate with a least recently
* used (LRU) algorithm. Strictly ordering every read is a lot of overhead that
* typically doesn't yield a corresponding benefit to the application. This
* cache implements a clock replacement algorithm, giving each block one chance
* to have been accessed during a sweep of the cache to save itself from
* eviction.
* cache implements a clock replacement algorithm, giving each block at least
* one chance to have been accessed during a sweep of the cache to save itself
* from eviction. The number of swipe chances is configurable per pack
* extension.
* <p>
* Entities created by the cache are held under hard references, preventing the
* Java VM from clearing anything. Blocks are discarded by the replacement
@ -161,6 +162,9 @@ public static DfsBlockCache getInstance() {
/** Current position of the clock. */
private Ref clockHand;
/** Limits of cache hot count per pack file extension. */
private final int[] cacheHotLimits = new int[PackExt.values().length];
@SuppressWarnings("unchecked")
private DfsBlockCache(DfsBlockCacheConfig cfg) {
tableSize = tableSize(cfg);
@ -196,6 +200,15 @@ private DfsBlockCache(DfsBlockCacheConfig cfg) {
liveBytes = new AtomicReference<>(newCounters());
refLockWaitTime = cfg.getRefLockWaitTimeConsumer();
for (int i = 0; i < PackExt.values().length; ++i) {
Integer limit = cfg.getCacheHotMap().get(PackExt.values()[i]);
if (limit != null && limit.intValue() > 0) {
cacheHotLimits[i] = limit.intValue();
} else {
cacheHotLimits[i] = DfsBlockCacheConfig.DEFAULT_CACHE_HOT_MAX;
}
}
}
boolean shouldCopyThroughCache(long length) {
@ -394,7 +407,7 @@ DfsBlock getOrLoad(BlockBasedFile file, long position, DfsReader ctx,
}
Ref<DfsBlock> ref = new Ref<>(key, position, v.size(), v);
ref.hot = true;
ref.markHotter();
for (;;) {
HashEntry n = new HashEntry(clean(e2), ref);
if (table.compareAndSet(slot, e2, n)) {
@ -424,10 +437,10 @@ private void reserveSpace(long reserve, DfsStreamKey key) {
Ref prev = clockHand;
Ref hand = clockHand.next;
do {
if (hand.hot) {
// Value was recently touched. Clear
// hot and give it another chance.
hand.hot = false;
if (hand.isHot()) {
// Value was recently touched. Cache is still hot so
// give it another chance, but cool it down a bit.
hand.markColder();
prev = hand;
hand = hand.next;
continue;
@ -525,7 +538,7 @@ <T> Ref<T> getOrLoadRef(
}
getStat(statMiss, key).incrementAndGet();
ref = loader.load();
ref.hot = true;
ref.markHotter();
// Reserve after loading to get the size of the object
reserveSpace(ref.size, key);
for (;;) {
@ -568,7 +581,7 @@ <T> Ref<T> put(DfsStreamKey key, long pos, long size, T v) {
}
ref = new Ref<>(key, pos, size, v);
ref.hot = true;
ref.markHotter();
for (;;) {
HashEntry n = new HashEntry(clean(e2), ref);
if (table.compareAndSet(slot, e2, n)) {
@ -692,7 +705,8 @@ static final class Ref<T> {
final long size;
volatile T value;
Ref next;
volatile boolean hot;
private volatile int hotCount;
Ref(DfsStreamKey key, long position, long size, T v) {
this.key = key;
@ -704,7 +718,7 @@ static final class Ref<T> {
T get() {
T v = value;
if (v != null) {
hot = true;
markHotter();
}
return v;
}
@ -712,6 +726,20 @@ T get() {
boolean has() {
return value != null;
}
void markHotter() {
int cap = DfsBlockCache
.getInstance().cacheHotLimits[key.packExtPos];
hotCount = Math.min(cap, hotCount + 1);
}
void markColder() {
hotCount = Math.max(0, hotCount - 1);
}
boolean isHot() {
return hotCount > 0;
}
}
@FunctionalInterface

View File

@ -18,9 +18,12 @@
import static org.eclipse.jgit.lib.ConfigConstants.CONFIG_KEY_STREAM_RATIO;
import java.text.MessageFormat;
import java.util.Collections;
import java.util.Map;
import java.util.function.Consumer;
import org.eclipse.jgit.internal.JGitText;
import org.eclipse.jgit.internal.storage.pack.PackExt;
import org.eclipse.jgit.lib.Config;
/**
@ -34,6 +37,9 @@ public class DfsBlockCacheConfig {
/** 1024 {@link #KB} (number of bytes in one mebibyte/megabyte) */
public static final int MB = 1024 * KB;
/** Default number of max cache hits. */
public static final int DEFAULT_CACHE_HOT_MAX = 1;
private long blockLimit;
private int blockSize;
private double streamRatio;
@ -41,6 +47,8 @@ public class DfsBlockCacheConfig {
private Consumer<Long> refLock;
private Map<PackExt, Integer> cacheHotMap;
/**
* Create a default configuration.
*/
@ -49,6 +57,7 @@ public DfsBlockCacheConfig() {
setBlockSize(64 * KB);
setStreamRatio(0.30);
setConcurrencyLevel(32);
cacheHotMap = Collections.emptyMap();
}
/**
@ -184,6 +193,28 @@ public DfsBlockCacheConfig setRefLockWaitTimeConsumer(Consumer<Long> c) {
return this;
}
/**
* Get the map of hot count per pack extension for {@code DfsBlockCache}.
*
* @return map of hot count per pack extension for {@code DfsBlockCache}.
*/
public Map<PackExt, Integer> getCacheHotMap() {
return cacheHotMap;
}
/**
* Set the map of hot count per pack extension for {@code DfsBlockCache}.
*
* @param cacheHotMap
* map of hot count per pack extension for {@code DfsBlockCache}.
* @return {@code this}
*/
public DfsBlockCacheConfig setCacheHotMap(
Map<PackExt, Integer> cacheHotMap) {
this.cacheHotMap = Collections.unmodifiableMap(cacheHotMap);
return this;
}
/**
* Update properties by setting fields from the configuration.
* <p>

View File

@ -59,13 +59,6 @@ public final class DfsPackFile extends BlockBasedFile {
private static final int REC_SIZE = Constants.OBJECT_ID_LENGTH + 8;
private static final long REF_POSITION = 0;
/**
* Lock for initialization of {@link #index} and {@link #corruptObjects}.
* <p>
* This lock ensures only one thread can perform the initialization work.
*/
private final Object initLock = new Object();
/** Index mapping {@link ObjectId} to position within the pack stream. */
private volatile PackIndex index;
@ -84,6 +77,9 @@ public final class DfsPackFile extends BlockBasedFile {
*/
private volatile LongList corruptObjects;
/** Lock for {@link #corruptObjects}. */
private final Object corruptObjectsLock = new Object();
/**
* Construct a reader for an existing, packfile.
*
@ -155,35 +151,26 @@ private PackIndex idx(DfsReader ctx) throws IOException {
Repository.getGlobalListenerList()
.dispatch(new BeforeDfsPackIndexLoadedEvent(this));
synchronized (initLock) {
if (index != null) {
return index;
try {
DfsStreamKey idxKey = desc.getStreamKey(INDEX);
AtomicBoolean cacheHit = new AtomicBoolean(true);
DfsBlockCache.Ref<PackIndex> idxref = cache.getOrLoadRef(idxKey,
REF_POSITION, () -> {
cacheHit.set(false);
return loadPackIndex(ctx, idxKey);
});
if (cacheHit.get()) {
ctx.stats.idxCacheHit++;
}
try {
DfsStreamKey idxKey = desc.getStreamKey(INDEX);
AtomicBoolean cacheHit = new AtomicBoolean(true);
DfsBlockCache.Ref<PackIndex> idxref = cache.getOrLoadRef(
idxKey,
REF_POSITION,
() -> {
cacheHit.set(false);
return loadPackIndex(ctx, idxKey);
});
if (cacheHit.get()) {
ctx.stats.idxCacheHit++;
}
PackIndex idx = idxref.get();
if (index == null && idx != null) {
index = idx;
}
return index;
} catch (IOException e) {
invalid = true;
invalidatingCause = e;
throw e;
PackIndex idx = idxref.get();
if (index == null && idx != null) {
index = idx;
}
return index;
} catch (IOException e) {
invalid = true;
invalidatingCause = e;
throw e;
}
}
@ -191,7 +178,17 @@ final boolean isGarbage() {
return desc.getPackSource() == UNREACHABLE_GARBAGE;
}
PackBitmapIndex getBitmapIndex(DfsReader ctx) throws IOException {
/**
* Get the BitmapIndex for this PackFile.
*
* @param ctx
* reader context to support reading from the backing store if
* the index is not already loaded in memory.
* @return the BitmapIndex.
* @throws java.io.IOException
* the bitmap index is not available, or is corrupt.
*/
public PackBitmapIndex getBitmapIndex(DfsReader ctx) throws IOException {
if (invalid || isGarbage() || !desc.hasFileExt(BITMAP_INDEX)) {
return null;
}
@ -200,31 +197,21 @@ PackBitmapIndex getBitmapIndex(DfsReader ctx) throws IOException {
return bitmapIndex;
}
synchronized (initLock) {
if (bitmapIndex != null) {
return bitmapIndex;
}
PackIndex idx = idx(ctx);
PackReverseIndex revidx = getReverseIdx(ctx);
DfsStreamKey bitmapKey = desc.getStreamKey(BITMAP_INDEX);
AtomicBoolean cacheHit = new AtomicBoolean(true);
DfsBlockCache.Ref<PackBitmapIndex> idxref = cache.getOrLoadRef(
bitmapKey,
REF_POSITION,
() -> {
cacheHit.set(false);
return loadBitmapIndex(ctx, bitmapKey, idx, revidx);
});
if (cacheHit.get()) {
ctx.stats.bitmapCacheHit++;
}
PackBitmapIndex bmidx = idxref.get();
if (bitmapIndex == null && bmidx != null) {
bitmapIndex = bmidx;
}
return bitmapIndex;
DfsStreamKey bitmapKey = desc.getStreamKey(BITMAP_INDEX);
AtomicBoolean cacheHit = new AtomicBoolean(true);
DfsBlockCache.Ref<PackBitmapIndex> idxref = cache
.getOrLoadRef(bitmapKey, REF_POSITION, () -> {
cacheHit.set(false);
return loadBitmapIndex(ctx, bitmapKey);
});
if (cacheHit.get()) {
ctx.stats.bitmapCacheHit++;
}
PackBitmapIndex bmidx = idxref.get();
if (bitmapIndex == null && bmidx != null) {
bitmapIndex = bmidx;
}
return bitmapIndex;
}
PackReverseIndex getReverseIdx(DfsReader ctx) throws IOException {
@ -232,31 +219,23 @@ PackReverseIndex getReverseIdx(DfsReader ctx) throws IOException {
return reverseIndex;
}
synchronized (initLock) {
if (reverseIndex != null) {
return reverseIndex;
}
PackIndex idx = idx(ctx);
DfsStreamKey revKey = new DfsStreamKey.ForReverseIndex(
desc.getStreamKey(INDEX));
AtomicBoolean cacheHit = new AtomicBoolean(true);
DfsBlockCache.Ref<PackReverseIndex> revref = cache.getOrLoadRef(
revKey,
REF_POSITION,
() -> {
cacheHit.set(false);
return loadReverseIdx(ctx, revKey, idx);
});
if (cacheHit.get()) {
ctx.stats.ridxCacheHit++;
}
PackReverseIndex revidx = revref.get();
if (reverseIndex == null && revidx != null) {
reverseIndex = revidx;
}
return reverseIndex;
PackIndex idx = idx(ctx);
DfsStreamKey revKey = new DfsStreamKey.ForReverseIndex(
desc.getStreamKey(INDEX));
AtomicBoolean cacheHit = new AtomicBoolean(true);
DfsBlockCache.Ref<PackReverseIndex> revref = cache.getOrLoadRef(revKey,
REF_POSITION, () -> {
cacheHit.set(false);
return loadReverseIdx(ctx, revKey, idx);
});
if (cacheHit.get()) {
ctx.stats.ridxCacheHit++;
}
PackReverseIndex revidx = revref.get();
if (reverseIndex == null && revidx != null) {
reverseIndex = revidx;
}
return reverseIndex;
}
/**
@ -1003,7 +982,7 @@ boolean isCorrupt(long offset) {
private void setCorrupt(long offset) {
LongList list = corruptObjects;
if (list == null) {
synchronized (initLock) {
synchronized (corruptObjectsLock) {
list = corruptObjects;
if (list == null) {
list = new LongList();
@ -1066,11 +1045,8 @@ private DfsBlockCache.Ref<PackReverseIndex> loadReverseIdx(
revidx);
}
private DfsBlockCache.Ref<PackBitmapIndex> loadBitmapIndex(
DfsReader ctx,
DfsStreamKey bitmapKey,
PackIndex idx,
PackReverseIndex revidx) throws IOException {
private DfsBlockCache.Ref<PackBitmapIndex> loadBitmapIndex(DfsReader ctx,
DfsStreamKey bitmapKey) throws IOException {
ctx.stats.readBitmap++;
long start = System.nanoTime();
try (ReadableChannel rc = ctx.db.openFile(desc, BITMAP_INDEX)) {
@ -1086,7 +1062,8 @@ private DfsBlockCache.Ref<PackBitmapIndex> loadBitmapIndex(
bs = wantSize;
}
in = new BufferedInputStream(in, bs);
bmidx = PackBitmapIndex.read(in, idx, revidx);
bmidx = PackBitmapIndex.read(in, () -> idx(ctx),
() -> getReverseIdx(ctx));
} finally {
size = rc.position();
ctx.stats.readBitmapIdxBytes += size;

View File

@ -28,6 +28,7 @@
import java.util.stream.Collectors;
import org.eclipse.jgit.annotations.NonNull;
import org.eclipse.jgit.errors.MissingObjectException;
import org.eclipse.jgit.events.RefsChangedEvent;
import org.eclipse.jgit.internal.storage.reftable.MergedReftable;
import org.eclipse.jgit.internal.storage.reftable.ReftableBatchRefUpdate;
@ -592,15 +593,20 @@ private static Ref refForWrite(RevWalk rw, Ref r) throws IOException {
r.getTarget().getName(), null));
}
ObjectId newId = r.getObjectId();
RevObject obj = rw.parseAny(newId);
RevObject peel = null;
if (obj instanceof RevTag) {
peel = rw.peel(obj);
try {
RevObject obj = rw.parseAny(newId);
if (obj instanceof RevTag) {
peel = rw.peel(obj);
}
} catch (MissingObjectException e) {
/* ignore this error and copy the dangling object ID into reftable too. */
}
if (peel != null) {
return new ObjectIdRef.PeeledTag(PACKED, r.getName(), newId,
peel.copy());
}
return new ObjectIdRef.PeeledTag(PACKED, r.getName(), newId,
peel.copy());
}
return new ObjectIdRef.PeeledNonTag(PACKED, r.getName(), newId);
}

View File

@ -57,11 +57,10 @@ public abstract class PackBitmapIndex {
* @throws CorruptObjectException
* the stream does not contain a valid pack bitmap index.
*/
public static PackBitmapIndex open(
File idxFile, PackIndex packIndex, PackReverseIndex reverseIndex)
public static PackBitmapIndex open(File idxFile, PackIndex packIndex,
PackReverseIndex reverseIndex)
throws IOException {
try (SilentFileInputStream fd = new SilentFileInputStream(
idxFile)) {
try (SilentFileInputStream fd = new SilentFileInputStream(idxFile)) {
try {
return read(fd, packIndex, reverseIndex);
} catch (IOException ioe) {
@ -94,10 +93,39 @@ public static PackBitmapIndex open(
* @throws CorruptObjectException
* the stream does not contain a valid pack bitmap index.
*/
public static PackBitmapIndex read(
InputStream fd, PackIndex packIndex, PackReverseIndex reverseIndex)
public static PackBitmapIndex read(InputStream fd, PackIndex packIndex,
PackReverseIndex reverseIndex) throws IOException {
return new PackBitmapIndexV1(fd, () -> packIndex, () -> reverseIndex);
}
/**
* Read an existing pack bitmap index file from a buffered stream.
* <p>
* The format of the file will be automatically detected and a proper access
* implementation for that format will be constructed and returned to the
* caller. The file may or may not be held open by the returned instance.
*
* @param fd
* stream to read the bitmap index file from. The stream must be
* buffered as some small IOs are performed against the stream.
* The caller is responsible for closing the stream.
* @param packIndexSupplier
* the supplier for pack index for the corresponding pack file.
* @param reverseIndexSupplier
* the supplier for pack reverse index for the corresponding pack
* file.
* @return a copy of the index in-memory.
* @throws java.io.IOException
* the stream cannot be read.
* @throws CorruptObjectException
* the stream does not contain a valid pack bitmap index.
*/
public static PackBitmapIndex read(InputStream fd,
SupplierWithIOException<PackIndex> packIndexSupplier,
SupplierWithIOException<PackReverseIndex> reverseIndexSupplier)
throws IOException {
return new PackBitmapIndexV1(fd, packIndex, reverseIndex);
return new PackBitmapIndexV1(fd, packIndexSupplier,
reverseIndexSupplier);
}
/** Footer checksum applied on the bottom of the pack file. */
@ -121,7 +149,8 @@ public static PackBitmapIndex read(
* @throws java.lang.IllegalArgumentException
* when the item is not found.
*/
public abstract ObjectId getObject(int position) throws IllegalArgumentException;
public abstract ObjectId getObject(int position)
throws IllegalArgumentException;
/**
* Returns a bitmap containing positions for objects that have the given Git
@ -161,4 +190,19 @@ public abstract EWAHCompressedBitmap ofObjectType(
* @return the number of bitmaps in this bitmap index.
*/
public abstract int getBitmapCount();
/**
* Supplier that propagates IOException.
*
* @param <T>
* the return type which is expected from {@link #get()}
*/
@FunctionalInterface
public interface SupplierWithIOException<T> {
/**
* @return result
* @throws IOException
*/
T get() throws IOException;
}
}

View File

@ -14,8 +14,11 @@
import java.io.IOException;
import java.io.InputStream;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.eclipse.jgit.annotations.Nullable;
import org.eclipse.jgit.internal.JGitText;
import org.eclipse.jgit.lib.AnyObjectId;
import org.eclipse.jgit.lib.Constants;
@ -46,11 +49,13 @@ class PackBitmapIndexV1 extends BasePackBitmapIndex {
private final ObjectIdOwnerMap<StoredBitmap> bitmaps;
PackBitmapIndexV1(final InputStream fd, PackIndex packIndex,
PackReverseIndex reverseIndex) throws IOException {
PackBitmapIndexV1(final InputStream fd,
SupplierWithIOException<PackIndex> packIndexSupplier,
SupplierWithIOException<PackReverseIndex> reverseIndexSupplier)
throws IOException {
// An entry is object id, xor offset, flag byte, and a length encoded
// bitmap. The object id is an int32 of the nth position sorted by name.
super(new ObjectIdOwnerMap<StoredBitmap>());
this.packIndex = packIndex;
this.reverseIndex = reverseIndex;
this.bitmaps = getBitmaps();
final byte[] scratch = new byte[32];
@ -97,10 +102,10 @@ class PackBitmapIndexV1 extends BasePackBitmapIndex {
this.blobs = readBitmap(dataInput);
this.tags = readBitmap(dataInput);
// An entry is object id, xor offset, flag byte, and a length encoded
// bitmap. The object id is an int32 of the nth position sorted by name.
// Read full bitmap from storage first.
List<IdxPositionBitmap> idxPositionBitmapList = new ArrayList<>();
// The xor offset is a single byte offset back in the list of entries.
StoredBitmap[] recentBitmaps = new StoredBitmap[MAX_XOR_OFFSET];
IdxPositionBitmap[] recentBitmaps = new IdxPositionBitmap[MAX_XOR_OFFSET];
for (int i = 0; i < (int) numEntries; i++) {
IO.readFully(fd, scratch, 0, 6);
int nthObjectId = NB.decodeInt32(scratch, 0);
@ -108,38 +113,58 @@ class PackBitmapIndexV1 extends BasePackBitmapIndex {
int flags = scratch[5];
EWAHCompressedBitmap bitmap = readBitmap(dataInput);
if (nthObjectId < 0)
if (nthObjectId < 0) {
throw new IOException(MessageFormat.format(
JGitText.get().invalidId, String.valueOf(nthObjectId)));
if (xorOffset < 0)
}
if (xorOffset < 0) {
throw new IOException(MessageFormat.format(
JGitText.get().invalidId, String.valueOf(xorOffset)));
if (xorOffset > MAX_XOR_OFFSET)
}
if (xorOffset > MAX_XOR_OFFSET) {
throw new IOException(MessageFormat.format(
JGitText.get().expectedLessThanGot,
String.valueOf(MAX_XOR_OFFSET),
String.valueOf(xorOffset)));
if (xorOffset > i)
}
if (xorOffset > i) {
throw new IOException(MessageFormat.format(
JGitText.get().expectedLessThanGot, String.valueOf(i),
String.valueOf(xorOffset)));
ObjectId objectId = packIndex.getObjectId(nthObjectId);
StoredBitmap xorBitmap = null;
}
IdxPositionBitmap xorIdxPositionBitmap = null;
if (xorOffset > 0) {
int index = (i - xorOffset);
xorBitmap = recentBitmaps[index % recentBitmaps.length];
if (xorBitmap == null)
xorIdxPositionBitmap = recentBitmaps[index
% recentBitmaps.length];
if (xorIdxPositionBitmap == null) {
throw new IOException(MessageFormat.format(
JGitText.get().invalidId,
String.valueOf(xorOffset)));
}
}
StoredBitmap sb = new StoredBitmap(
objectId, bitmap, xorBitmap, flags);
bitmaps.add(sb);
recentBitmaps[i % recentBitmaps.length] = sb;
IdxPositionBitmap idxPositionBitmap = new IdxPositionBitmap(
nthObjectId, xorIdxPositionBitmap, bitmap, flags);
idxPositionBitmapList.add(idxPositionBitmap);
recentBitmaps[i % recentBitmaps.length] = idxPositionBitmap;
}
this.packIndex = packIndexSupplier.get();
for (int i = 0; i < idxPositionBitmapList.size(); ++i) {
IdxPositionBitmap idxPositionBitmap = idxPositionBitmapList.get(i);
ObjectId objectId = packIndex
.getObjectId(idxPositionBitmap.nthObjectId);
StoredBitmap sb = new StoredBitmap(objectId,
idxPositionBitmap.bitmap,
idxPositionBitmap.getXorStoredBitmap(),
idxPositionBitmap.flags);
// Save the StoredBitmap for a possible future XorStoredBitmap
// reference.
idxPositionBitmap.sb = sb;
bitmaps.add(sb);
}
this.reverseIndex = reverseIndexSupplier.get();
}
/** {@inheritDoc} */
@ -214,4 +239,34 @@ private static EWAHCompressedBitmap readBitmap(DataInput dataInput)
bitmap.deserialize(dataInput);
return bitmap;
}
/**
* Temporary holder of object position in pack index and other metadata for
* {@code StoredBitmap}.
*/
private static final class IdxPositionBitmap {
int nthObjectId;
IdxPositionBitmap xorIdxPositionBitmap;
EWAHCompressedBitmap bitmap;
int flags;
StoredBitmap sb;
IdxPositionBitmap(int nthObjectId,
@Nullable IdxPositionBitmap xorIdxPositionBitmap,
EWAHCompressedBitmap bitmap, int flags) {
this.nthObjectId = nthObjectId;
this.xorIdxPositionBitmap = xorIdxPositionBitmap;
this.bitmap = bitmap;
this.flags = flags;
}
StoredBitmap getXorStoredBitmap() {
return xorIdxPositionBitmap == null ? null
: xorIdxPositionBitmap.sb;
}
}
}

View File

@ -59,7 +59,6 @@
class BlockReader {
private byte blockType;
private long endPosition;
private boolean truncated;
private byte[] buf;
private int bufLen;
@ -79,10 +78,6 @@ byte type() {
return blockType;
}
boolean truncated() {
return truncated;
}
long endPosition() {
return endPosition;
}
@ -298,16 +293,8 @@ private void parseBlockStart(BlockSource src, long pos, int fileBlockSize)
// Log blocks must be inflated after the header.
long deflatedSize = inflateBuf(src, pos, blockLen, fileBlockSize);
endPosition = pos + 4 + deflatedSize;
}
if (bufLen < blockLen) {
if (blockType != INDEX_BLOCK_TYPE) {
throw invalidBlock();
}
// Its OK during sequential scan for an index block to have been
// partially read and be truncated in-memory. This happens when
// the index block is larger than the file's blockSize. Caller
// will break out of its scan loop once it sees the blockType.
truncated = true;
} else if (bufLen < blockLen) {
readBlockIntoBuf(src, pos, blockLen);
} else if (bufLen > blockLen) {
bufLen = blockLen;
}
@ -372,7 +359,7 @@ private void setupEmptyFileBlock() {
}
void verifyIndex() throws IOException {
if (blockType != INDEX_BLOCK_TYPE || truncated) {
if (blockType != INDEX_BLOCK_TYPE) {
throw invalidBlock();
}
}

View File

@ -435,7 +435,7 @@ private BlockReader readBlock(long pos, long end) throws IOException {
BlockReader b = new BlockReader();
b.readBlock(src, pos, sz);
if (b.type() == INDEX_BLOCK_TYPE && !b.truncated()) {
if (b.type() == INDEX_BLOCK_TYPE) {
if (indexCache == null) {
indexCache = new LongMap<>();
}

View File

@ -210,7 +210,7 @@ private List<HostEntry> parse(BufferedReader reader)
// The man page doesn't say so, but the openssh parser (readconf.c)
// starts out in active mode and thus always applies any lines that
// occur before the first host block. We gather those options in a
// HostEntry for DEFAULT_NAME.
// HostEntry.
HostEntry defaults = new HostEntry();
HostEntry current = defaults;
entries.add(defaults);
@ -309,8 +309,7 @@ private List<String> parseList(String argument) {
* @return the validated and possibly sanitized value
*/
protected String validate(String key, String value) {
if (String.CASE_INSENSITIVE_ORDER.compare(key,
SshConstants.PREFERRED_AUTHENTICATIONS) == 0) {
if (SshConstants.PREFERRED_AUTHENTICATIONS.equalsIgnoreCase(key)) {
return stripWhitespace(value);
}
return value;

View File

@ -528,6 +528,7 @@ private List<Ref> getMergedInto(RevCommit needle, Collection<Ref> haystacks,
Enum returnStrategy, ProgressMonitor monitor) throws IOException {
List<Ref> result = new ArrayList<>();
List<RevCommit> uninteresting = new ArrayList<>();
List<RevCommit> marked = new ArrayList<>();
RevFilter oldRF = filter;
TreeFilter oldTF = treeFilter;
try {
@ -540,22 +541,25 @@ private List<Ref> getMergedInto(RevCommit needle, Collection<Ref> haystacks,
return result;
}
monitor.update(1);
RevObject o = parseAny(r.getObjectId());
RevObject o = peel(parseAny(r.getObjectId()));
if (!(o instanceof RevCommit)) {
continue;
}
RevCommit c = (RevCommit) o;
resetRetain(RevFlag.UNINTERESTING);
reset(UNINTERESTING | TEMP_MARK);
markStart(c);
boolean commitFound = false;
RevCommit next;
while ((next = next()) != null) {
if (References.isSameObject(next, needle)) {
if (References.isSameObject(next, needle)
|| (next.flags & TEMP_MARK) != 0) {
result.add(r);
if (returnStrategy == GetMergedIntoStrategy.RETURN_ON_FIRST_FOUND) {
return result;
}
commitFound = true;
c.flags |= TEMP_MARK;
marked.add(c);
break;
}
}
@ -571,6 +575,9 @@ private List<Ref> getMergedInto(RevCommit needle, Collection<Ref> haystacks,
roots.addAll(uninteresting);
filter = oldRF;
treeFilter = oldTF;
for (RevCommit c : marked) {
c.flags &= ~TEMP_MARK;
}
}
return result;
}

View File

@ -57,6 +57,7 @@
import java.util.Optional;
import java.util.Set;
import java.util.TreeMap;
import java.util.function.Function;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import java.util.stream.Stream;
@ -406,14 +407,16 @@ public final Map<String, Ref> getAdvertisedRefs() {
* were advertised.
*/
public void setAdvertisedRefs(@Nullable Map<String, Ref> allRefs) {
if (allRefs != null)
if (allRefs != null) {
refs = allRefs;
else
refs = db.getAllRefs();
if (refFilter == RefFilter.DEFAULT)
} else {
refs = getAllRefs();
}
if (refFilter == RefFilter.DEFAULT) {
refs = transferConfig.getRefFilter().filter(refs);
else
} else {
refs = refFilter.filter(refs);
}
}
/**
@ -864,6 +867,20 @@ public PackStatistics getStatistics() {
return statistics;
}
/**
* Extract the full list of refs from the ref-db.
*
* @return Map of all refname/ref
*/
private Map<String, Ref> getAllRefs() {
try {
return db.getRefDatabase().getRefs().stream().collect(
Collectors.toMap(Ref::getName, Function.identity()));
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
private Map<String, Ref> getAdvertisedOrDefaultRefs() throws IOException {
if (refs != null) {
return refs;
@ -1092,6 +1109,7 @@ private void lsRefsV2(PacketLineOut pckOut) throws IOException {
rawOut.stopBuffering();
PacketLineOutRefAdvertiser adv = new PacketLineOutRefAdvertiser(pckOut);
adv.init(db);
adv.setUseProtocolV2(true);
if (req.getPeel()) {
adv.setDerefTags(true);

View File

@ -23,7 +23,6 @@
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.nio.charset.Charset;
import java.nio.file.AccessDeniedException;
import java.nio.file.FileStore;
import java.nio.file.Files;
@ -1507,7 +1506,7 @@ protected File discoverGitSystemConfig() {
try {
v = readPipe(gitExe.getParentFile(),
new String[] { gitExe.getPath(), "--version" }, //$NON-NLS-1$
Charset.defaultCharset().name());
SystemReader.getInstance().getDefaultCharset().name());
} catch (CommandFailedException e) {
LOG.warn(e.getMessage());
return null;
@ -1527,7 +1526,7 @@ protected File discoverGitSystemConfig() {
w = readPipe(gitExe.getParentFile(),
new String[] { gitExe.getPath(), "config", "--system", //$NON-NLS-1$ //$NON-NLS-2$
"--edit" }, //$NON-NLS-1$
Charset.defaultCharset().name(), env);
SystemReader.getInstance().getDefaultCharset().name(), env);
} catch (CommandFailedException e) {
LOG.warn(e.getMessage());
return null;

View File

@ -17,7 +17,6 @@
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.nio.charset.Charset;
import java.nio.file.FileAlreadyExistsException;
import java.nio.file.FileStore;
import java.nio.file.FileSystemException;
@ -119,8 +118,8 @@ private static int readUmask() {
new String[] { "sh", "-c", "umask" }, //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
null, null);
try (BufferedReader lineRead = new BufferedReader(
new InputStreamReader(p.getInputStream(), Charset
.defaultCharset().name()))) {
new InputStreamReader(p.getInputStream(), SystemReader
.getInstance().getDefaultCharset().name()))) {
if (p.waitFor() == 0) {
String s = lineRead.readLine();
if (s != null && s.matches("0?\\d{3}")) { //$NON-NLS-1$
@ -150,7 +149,8 @@ protected File discoverGitExe() {
try {
String w = readPipe(userHome(),
new String[]{"bash", "--login", "-c", "which git"}, // //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ //$NON-NLS-4$
Charset.defaultCharset().name());
SystemReader.getInstance().getDefaultCharset()
.name());
if (!StringUtils.isEmptyOrNull(w)) {
gitExe = new File(w);
}
@ -168,7 +168,8 @@ protected File discoverGitExe() {
try {
String w = readPipe(userHome(),
new String[] { "xcode-select", "-p" }, //$NON-NLS-1$ //$NON-NLS-2$
Charset.defaultCharset().name());
SystemReader.getInstance().getDefaultCharset()
.name());
if (StringUtils.isEmptyOrNull(w)) {
gitExe = null;
} else {

View File

@ -13,7 +13,6 @@
import java.io.File;
import java.io.IOException;
import java.nio.charset.Charset;
import java.nio.file.FileVisitOption;
import java.nio.file.FileVisitResult;
import java.nio.file.Files;
@ -150,8 +149,10 @@ protected File discoverGitExe() {
String w;
try {
w = readPipe(userHome(),
new String[]{"bash", "--login", "-c", "which git"}, // //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ //$NON-NLS-4$
Charset.defaultCharset().name());
new String[] { "bash", "--login", "-c", //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
"which git" }, // //$NON-NLS-1$
SystemReader.getInstance().getDefaultCharset()
.name());
} catch (CommandFailedException e) {
LOG.warn(e.getMessage());
return null;

View File

@ -1160,7 +1160,7 @@ public static String decodeNoFallback(final Charset cs,
// Try the default character set. A small group of people
// might actually use the same (or very similar) locale.
Charset defcs = Charset.defaultCharset();
Charset defcs = SystemReader.getInstance().getDefaultCharset();
if (!defcs.equals(cs) && !defcs.equals(UTF_8)) {
try {
return decode(b, defcs);

View File

@ -17,6 +17,9 @@
import java.io.IOException;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.nio.charset.Charset;
import java.nio.charset.IllegalCharsetNameException;
import java.nio.charset.UnsupportedCharsetException;
import java.nio.file.InvalidPathException;
import java.nio.file.Path;
import java.nio.file.Paths;
@ -198,6 +201,8 @@ public static void setInstance(SystemReader newReader) {
private AtomicReference<FileBasedConfig> jgitConfig = new AtomicReference<>();
private volatile Charset defaultCharset;
private void init() {
// Creating ObjectChecker must be deferred. Unit tests change
// behavior of is{Windows,MacOS} in constructor of subclass.
@ -438,6 +443,35 @@ public Locale getLocale() {
return Locale.getDefault();
}
/**
* Retrieves the default {@link Charset} depending on the system locale.
*
* @return the {@link Charset}
* @since 6.0
* @see <a href="https://openjdk.java.net/jeps/400">JEP 400</a>
*/
public Charset getDefaultCharset() {
Charset result = defaultCharset;
if (result == null) {
// JEP 400: Java 18 populates this system property.
String encoding = getProperty("native.encoding"); //$NON-NLS-1$
try {
if (!StringUtils.isEmptyOrNull(encoding)) {
result = Charset.forName(encoding);
}
} catch (IllegalCharsetNameException
| UnsupportedCharsetException e) {
LOG.error(JGitText.get().logInvalidDefaultCharset, encoding);
}
if (result == null) {
// This is always UTF-8 on Java >= 18.
result = Charset.defaultCharset();
}
defaultCharset = result;
}
return result;
}
/**
* Returns a simple date format instance as specified by the given pattern.
*

View File

@ -137,6 +137,9 @@ private int buffer(byte[] b, int off, int len) throws IOException {
private void decideMode() throws IOException {
if (detectBinary) {
isBinary = RawText.isBinary(binbuf, binbufcnt);
if (!isBinary) {
isBinary = RawText.isCrLfText(binbuf, binbufcnt);
}
detectBinary = false;
}
int cachedLen = binbufcnt;

View File

@ -827,8 +827,8 @@
</compilerArgs>
<annotationProcessorPaths>
<path>
<groupId>com.google.errorprone</groupId>
<artifactId>error_prone_core</artifactId>
<groupId>com.google.errorprone</groupId>
<artifactId>error_prone_core</artifactId>
<version>2.9.0</version>
</path>
</annotationProcessorPaths>