Exclude refs/tags from bitmap commit selection

Commit db77610 ensured that all refs/tags commits are added to the
primary GC pack. It did that by adding all of the refs/tags commits
to the primary GC pack PackWriter's "interesting" object set.

Unfortunately, all commit objects in the "interesting" set are
selected as commits for which bitmap indices will be built. In a
repository like chromium with lots of tags, this changed the number of
bitmaps created from <700 to >10000. That puts huge memory pressure on
the GC task.

This change restores the original behavior of ignoring tags when
selecting commits for bitmaps.

In the "uninteresting" set, commits for refs/heads and refs/tags for
unannotated tags can not be differentiated. We instead identify
refs/tags commits by passing their ObjectIds as a new "noBitmaps"
parameter to the PackWriter.preparePack() methods.
PackWriterBitmapPreparer.setupTipCommitBitmaps() can then use that
"noBitmaps" parameter to exclude those commits.

Change-Id: Icd287c6b04fc1e48de773033fe432a9b0e904ac5
Signed-off-by: Terry Parker <tparker@google.com>
This commit is contained in:
Terry Parker 2017-05-18 01:30:14 -07:00
parent 69d5e89e99
commit c46c720e99
7 changed files with 159 additions and 59 deletions

View File

@ -711,7 +711,7 @@ private static PackIndex writePack(FileRepository repo, RevWalk walk,
}
ObjectWalk ow = walk.toObjectWalkWithSameObjects();
pw.preparePack(NullProgressMonitor.INSTANCE, ow, want, have);
pw.preparePack(NullProgressMonitor.INSTANCE, ow, want, have, NONE);
String id = pw.computeName().getName();
File packdir = new File(repo.getObjectsDirectory(), "pack");
File packFile = new File(packdir, "pack-" + id + ".pack");

View File

@ -69,6 +69,15 @@ public class GcCommitSelectionTest extends GcTestCase {
@Test
public void testBitmapSpansNoMerges() throws Exception {
testBitmapSpansNoMerges(false);
}
@Test
public void testBitmapSpansNoMergesWithTags() throws Exception {
testBitmapSpansNoMerges(true);
}
private void testBitmapSpansNoMerges(boolean withTags) throws Exception {
/*
* Commit counts -> expected bitmap counts for history without merges.
* The top 100 contiguous commits should always have bitmaps, and the
@ -89,7 +98,10 @@ public void testBitmapSpansNoMerges() throws Exception {
assertTrue(nextCommitCount > currentCommits); // programming error
for (int i = currentCommits; i < nextCommitCount; i++) {
String str = "A" + i;
bb.commit().message(str).add(str, str).create();
RevCommit rc = bb.commit().message(str).add(str, str).create();
if (withTags) {
tr.lightweightTag(str, rc);
}
}
currentCommits = nextCommitCount;
@ -233,7 +245,7 @@ public void testSelectionOrderingWithChains() throws Exception {
m8, m9);
PackWriterBitmapPreparer preparer = newPeparer(m9, commits);
List<BitmapCommit> selection = new ArrayList<>(
preparer.selectCommits(commits.size()));
preparer.selectCommits(commits.size(), PackWriter.NONE));
// Verify that the output is ordered by the separate "chains"
String[] expected = { m0.name(), m1.name(), m2.name(), m4.name(),

View File

@ -53,6 +53,7 @@
import static org.eclipse.jgit.internal.storage.pack.PackExt.BITMAP_INDEX;
import static org.eclipse.jgit.internal.storage.pack.PackExt.INDEX;
import static org.eclipse.jgit.internal.storage.pack.PackExt.PACK;
import static org.eclipse.jgit.internal.storage.pack.PackWriter.NONE;
import java.io.IOException;
import java.util.ArrayList;
@ -111,7 +112,8 @@ public class DfsGarbageCollector {
private List<DfsPackFile> packsBefore;
private List<DfsPackFile> expiredGarbagePacks;
private Set<ObjectId> allHeads;
private Set<ObjectId> allHeadsAndTags;
private Set<ObjectId> allTags;
private Set<ObjectId> nonHeads;
private Set<ObjectId> txnHeads;
private Set<ObjectId> tagTargets;
@ -241,23 +243,36 @@ public boolean pack(ProgressMonitor pm) throws IOException {
Collection<Ref> refsBefore = getAllRefs();
readPacksBefore();
allHeads = new HashSet<>();
Set<ObjectId> allHeads = new HashSet<>();
allHeadsAndTags = new HashSet<>();
allTags = new HashSet<>();
nonHeads = new HashSet<>();
txnHeads = new HashSet<>();
tagTargets = new HashSet<>();
for (Ref ref : refsBefore) {
if (ref.isSymbolic() || ref.getObjectId() == null)
if (ref.isSymbolic() || ref.getObjectId() == null) {
continue;
if (isHead(ref) || isTag(ref))
}
if (isHead(ref)) {
allHeads.add(ref.getObjectId());
else if (RefTreeNames.isRefTree(refdb, ref.getName()))
} else if (isTag(ref)) {
allTags.add(ref.getObjectId());
} else if (RefTreeNames.isRefTree(refdb, ref.getName())) {
txnHeads.add(ref.getObjectId());
else
} else {
nonHeads.add(ref.getObjectId());
if (ref.getPeeledObjectId() != null)
}
if (ref.getPeeledObjectId() != null) {
tagTargets.add(ref.getPeeledObjectId());
}
}
tagTargets.addAll(allHeads);
// Don't exclude tags that are also branch tips.
allTags.removeAll(allHeads);
allHeadsAndTags.addAll(allHeads);
allHeadsAndTags.addAll(allTags);
// Hoist all branch tips and tags earlier in the pack file
tagTargets.addAll(allHeadsAndTags);
boolean rollback = true;
try {
@ -413,12 +428,12 @@ private List<DfsPackDescription> toPrune() {
}
private void packHeads(ProgressMonitor pm) throws IOException {
if (allHeads.isEmpty())
if (allHeadsAndTags.isEmpty())
return;
try (PackWriter pw = newPackWriter()) {
pw.setTagTargets(tagTargets);
pw.preparePack(pm, allHeads, PackWriter.NONE);
pw.preparePack(pm, allHeadsAndTags, NONE, NONE, allTags);
if (0 < pw.getObjectCount())
writePack(GC, pw, pm,
estimateGcPackSize(INSERT, RECEIVE, COMPACT, GC));
@ -432,7 +447,7 @@ private void packRest(ProgressMonitor pm) throws IOException {
try (PackWriter pw = newPackWriter()) {
for (ObjectIdSet packedObjs : newPackObj)
pw.excludeObjects(packedObjs);
pw.preparePack(pm, nonHeads, allHeads);
pw.preparePack(pm, nonHeads, allHeadsAndTags);
if (0 < pw.getObjectCount())
writePack(GC_REST, pw, pm,
estimateGcPackSize(INSERT, RECEIVE, COMPACT, GC_REST));
@ -446,7 +461,7 @@ private void packRefTreeGraph(ProgressMonitor pm) throws IOException {
try (PackWriter pw = newPackWriter()) {
for (ObjectIdSet packedObjs : newPackObj)
pw.excludeObjects(packedObjs);
pw.preparePack(pm, txnHeads, PackWriter.NONE);
pw.preparePack(pm, txnHeads, NONE);
if (0 < pw.getObjectCount())
writePack(GC_TXN, pw, pm, 0 /* unknown pack size */);
}

View File

@ -729,7 +729,9 @@ public Collection<PackFile> repack() throws IOException {
long time = System.currentTimeMillis();
Collection<Ref> refsBefore = getAllRefs();
Set<ObjectId> allHeadsAndTags = new HashSet<>();
Set<ObjectId> allHeads = new HashSet<>();
Set<ObjectId> allTags = new HashSet<>();
Set<ObjectId> nonHeads = new HashSet<>();
Set<ObjectId> txnHeads = new HashSet<>();
Set<ObjectId> tagTargets = new HashSet<>();
@ -739,16 +741,21 @@ public Collection<PackFile> repack() throws IOException {
for (Ref ref : refsBefore) {
checkCancelled();
nonHeads.addAll(listRefLogObjects(ref, 0));
if (ref.isSymbolic() || ref.getObjectId() == null)
if (ref.isSymbolic() || ref.getObjectId() == null) {
continue;
if (isHead(ref) || isTag(ref))
}
if (isHead(ref)) {
allHeads.add(ref.getObjectId());
else if (RefTreeNames.isRefTree(refdb, ref.getName()))
} else if (isTag(ref)) {
allTags.add(ref.getObjectId());
} else if (RefTreeNames.isRefTree(refdb, ref.getName())) {
txnHeads.add(ref.getObjectId());
else
} else {
nonHeads.add(ref.getObjectId());
if (ref.getPeeledObjectId() != null)
}
if (ref.getPeeledObjectId() != null) {
tagTargets.add(ref.getPeeledObjectId());
}
}
List<ObjectIdSet> excluded = new LinkedList<>();
@ -758,13 +765,19 @@ else if (RefTreeNames.isRefTree(refdb, ref.getName()))
excluded.add(f.getIndex());
}
tagTargets.addAll(allHeads);
// Don't exclude tags that are also branch tips
allTags.removeAll(allHeads);
allHeadsAndTags.addAll(allHeads);
allHeadsAndTags.addAll(allTags);
// Hoist all branch tips and tags earlier in the pack file
tagTargets.addAll(allHeadsAndTags);
nonHeads.addAll(indexObjects);
List<PackFile> ret = new ArrayList<>(2);
PackFile heads = null;
if (!allHeads.isEmpty()) {
heads = writePack(allHeads, Collections.<ObjectId> emptySet(),
if (!allHeadsAndTags.isEmpty()) {
heads = writePack(allHeadsAndTags, PackWriter.NONE, allTags,
tagTargets, excluded);
if (heads != null) {
ret.add(heads);
@ -772,12 +785,14 @@ else if (RefTreeNames.isRefTree(refdb, ref.getName()))
}
}
if (!nonHeads.isEmpty()) {
PackFile rest = writePack(nonHeads, allHeads, tagTargets, excluded);
PackFile rest = writePack(nonHeads, allHeadsAndTags, PackWriter.NONE,
tagTargets, excluded);
if (rest != null)
ret.add(rest);
}
if (!txnHeads.isEmpty()) {
PackFile txn = writePack(txnHeads, PackWriter.NONE, null, excluded);
PackFile txn = writePack(txnHeads, PackWriter.NONE, PackWriter.NONE,
null, excluded);
if (txn != null)
ret.add(txn);
}
@ -961,8 +976,9 @@ private Set<ObjectId> listNonHEADIndexObjects()
}
private PackFile writePack(@NonNull Set<? extends ObjectId> want,
@NonNull Set<? extends ObjectId> have, Set<ObjectId> tagTargets,
List<ObjectIdSet> excludeObjects) throws IOException {
@NonNull Set<? extends ObjectId> have, @NonNull Set<ObjectId> tags,
Set<ObjectId> tagTargets, List<ObjectIdSet> excludeObjects)
throws IOException {
checkCancelled();
File tmpPack = null;
Map<PackExt, File> tmpExts = new TreeMap<>(
@ -988,12 +1004,13 @@ public int compare(PackExt o1, PackExt o2) {
// prepare the PackWriter
pw.setDeltaBaseAsOffset(true);
pw.setReuseDeltaCommits(false);
if (tagTargets != null)
if (tagTargets != null) {
pw.setTagTargets(tagTargets);
}
if (excludeObjects != null)
for (ObjectIdSet idx : excludeObjects)
pw.excludeObjects(idx);
pw.preparePack(pm, want, have);
pw.preparePack(pm, want, have, PackWriter.NONE, tags);
if (pw.getObjectCount() == 0)
return null;
checkCancelled();

View File

@ -233,7 +233,9 @@ public static Iterable<PackWriter> getInstances() {
private List<CachedPack> cachedPacks = new ArrayList<>(2);
private Set<ObjectId> tagTargets = Collections.emptySet();
private Set<ObjectId> tagTargets = NONE;
private Set<? extends ObjectId> excludeFromBitmapSelection = NONE;
private ObjectIdSet[] excludeInPacks;
@ -712,8 +714,7 @@ public void preparePack(@NonNull Iterator<RevObject> objectsSource)
public void preparePack(ProgressMonitor countingMonitor,
@NonNull Set<? extends ObjectId> want,
@NonNull Set<? extends ObjectId> have) throws IOException {
preparePack(countingMonitor,
want, have, Collections.<ObjectId> emptySet());
preparePack(countingMonitor, want, have, NONE, NONE);
}
/**
@ -721,9 +722,9 @@ public void preparePack(ProgressMonitor countingMonitor,
* <p>
* Like {@link #preparePack(ProgressMonitor, Set, Set)} but also allows
* specifying commits that should not be walked past ("shallow" commits).
* The caller is responsible for filtering out commits that should not
* be shallow any more ("unshallow" commits as in {@link #setShallowPack})
* from the shallow set.
* The caller is responsible for filtering out commits that should not be
* shallow any more ("unshallow" commits as in {@link #setShallowPack}) from
* the shallow set.
*
* @param countingMonitor
* progress during object enumeration.
@ -731,27 +732,67 @@ public void preparePack(ProgressMonitor countingMonitor,
* objects of interest, ancestors of which will be included in
* the pack. Must not be {@code null}.
* @param have
* objects whose ancestors (up to and including
* {@code shallow} commits) do not need to be included in the
* pack because they are already available from elsewhere.
* Must not be {@code null}.
* objects whose ancestors (up to and including {@code shallow}
* commits) do not need to be included in the pack because they
* are already available from elsewhere. Must not be
* {@code null}.
* @param shallow
* commits indicating the boundary of the history marked with
* {@code have}. Shallow commits have parents but those
* parents are considered not to be already available.
* Parents of {@code shallow} commits and earlier generations
* will be included in the pack if requested by {@code want}.
* Must not be {@code null}.
* {@code have}. Shallow commits have parents but those parents
* are considered not to be already available. Parents of
* {@code shallow} commits and earlier generations will be
* included in the pack if requested by {@code want}. Must not be
* {@code null}.
* @throws IOException
* an I/O problem occured while reading objects.
* an I/O problem occurred while reading objects.
*/
public void preparePack(ProgressMonitor countingMonitor,
@NonNull Set<? extends ObjectId> want,
@NonNull Set<? extends ObjectId> have,
@NonNull Set<? extends ObjectId> shallow) throws IOException {
preparePack(countingMonitor, want, have, shallow, NONE);
}
/**
* Prepare the list of objects to be written to the pack stream.
* <p>
* Like {@link #preparePack(ProgressMonitor, Set, Set)} but also allows
* specifying commits that should not be walked past ("shallow" commits).
* The caller is responsible for filtering out commits that should not be
* shallow any more ("unshallow" commits as in {@link #setShallowPack}) from
* the shallow set.
*
* @param countingMonitor
* progress during object enumeration.
* @param want
* objects of interest, ancestors of which will be included in
* the pack. Must not be {@code null}.
* @param have
* objects whose ancestors (up to and including {@code shallow}
* commits) do not need to be included in the pack because they
* are already available from elsewhere. Must not be
* {@code null}.
* @param shallow
* commits indicating the boundary of the history marked with
* {@code have}. Shallow commits have parents but those parents
* are considered not to be already available. Parents of
* {@code shallow} commits and earlier generations will be
* included in the pack if requested by {@code want}. Must not be
* {@code null}.
* @param noBitmaps
* collection of objects to be excluded from bitmap commit
* selection.
* @throws IOException
* an I/O problem occurred while reading objects.
*/
public void preparePack(ProgressMonitor countingMonitor,
@NonNull Set<? extends ObjectId> want,
@NonNull Set<? extends ObjectId> have,
@NonNull Set<? extends ObjectId> shallow,
@NonNull Set<? extends ObjectId> noBitmaps) throws IOException {
try (ObjectWalk ow = getObjectWalk()) {
ow.assumeShallow(shallow);
preparePack(countingMonitor, ow, want, have);
preparePack(countingMonitor, ow, want, have, noBitmaps);
}
}
@ -784,13 +825,17 @@ private ObjectWalk getObjectWalk() {
* points of graph traversal). Pass {@link #NONE} if all objects
* reachable from {@code want} are desired, such as when serving
* a clone.
* @param noBitmaps
* collection of objects to be excluded from bitmap commit
* selection.
* @throws IOException
* when some I/O problem occur during reading objects.
*/
public void preparePack(ProgressMonitor countingMonitor,
@NonNull ObjectWalk walk,
@NonNull Set<? extends ObjectId> interestingObjects,
@NonNull Set<? extends ObjectId> uninterestingObjects)
@NonNull Set<? extends ObjectId> uninterestingObjects,
@NonNull Set<? extends ObjectId> noBitmaps)
throws IOException {
if (countingMonitor == null)
countingMonitor = NullProgressMonitor.INSTANCE;
@ -798,7 +843,7 @@ public void preparePack(ProgressMonitor countingMonitor,
throw new IllegalArgumentException(
JGitText.get().shallowPacksRequireDepthWalk);
findObjectsToPack(countingMonitor, walk, interestingObjects,
uninterestingObjects);
uninterestingObjects, noBitmaps);
}
/**
@ -965,8 +1010,9 @@ private void endPhase(ProgressMonitor monitor) {
/**
* Write the prepared pack to the supplied stream.
* <p>
* Called after {@link #preparePack(ProgressMonitor, ObjectWalk, Set, Set)}
* or {@link #preparePack(ProgressMonitor, Set, Set)}.
* Called after
* {@link #preparePack(ProgressMonitor, ObjectWalk, Set, Set, Set)} or
* {@link #preparePack(ProgressMonitor, Set, Set)}.
* <p>
* Performs delta search if enabled and writes the pack stream.
* <p>
@ -1652,12 +1698,14 @@ private void writeChecksum(PackOutputStream out) throws IOException {
private void findObjectsToPack(@NonNull ProgressMonitor countingMonitor,
@NonNull ObjectWalk walker, @NonNull Set<? extends ObjectId> want,
@NonNull Set<? extends ObjectId> have) throws IOException {
@NonNull Set<? extends ObjectId> have,
@NonNull Set<? extends ObjectId> noBitmaps) throws IOException {
final long countingStart = System.currentTimeMillis();
beginPhase(PackingPhase.COUNTING, countingMonitor, ProgressMonitor.UNKNOWN);
stats.interestingObjects = Collections.unmodifiableSet(new HashSet<ObjectId>(want));
stats.uninterestingObjects = Collections.unmodifiableSet(new HashSet<ObjectId>(have));
excludeFromBitmapSelection = noBitmaps;
canBuildBitmaps = config.isBuildBitmaps()
&& !shallowPack
@ -2070,8 +2118,8 @@ public boolean prepareBitmapIndex(ProgressMonitor pm) throws IOException {
PackWriterBitmapPreparer bitmapPreparer = new PackWriterBitmapPreparer(
reader, writeBitmaps, pm, stats.interestingObjects, config);
Collection<PackWriterBitmapPreparer.BitmapCommit> selectedCommits =
bitmapPreparer.selectCommits(numCommits);
Collection<PackWriterBitmapPreparer.BitmapCommit> selectedCommits = bitmapPreparer
.selectCommits(numCommits, excludeFromBitmapSelection);
beginPhase(PackingPhase.BUILDING_BITMAPS, pm, selectedCommits.size());

View File

@ -141,6 +141,8 @@ public int compare(BitmapBuilderEntry a, BitmapBuilderEntry b) {
*
* @param expectedCommitCount
* count of commits in the pack
* @param excludeFromBitmapSelection
* commits that should be excluded from bitmap selection
* @return commit objects for which bitmap indices should be built
* @throws IncorrectObjectTypeException
* if any of the processed objects is not a commit
@ -149,7 +151,8 @@ public int compare(BitmapBuilderEntry a, BitmapBuilderEntry b) {
* @throws MissingObjectException
* if an expected object is missing
*/
Collection<BitmapCommit> selectCommits(int expectedCommitCount)
Collection<BitmapCommit> selectCommits(int expectedCommitCount,
Set<? extends ObjectId> excludeFromBitmapSelection)
throws IncorrectObjectTypeException, IOException,
MissingObjectException {
/*
@ -164,7 +167,7 @@ Collection<BitmapCommit> selectCommits(int expectedCommitCount)
RevWalk rw = new RevWalk(reader);
rw.setRetainBody(false);
CommitSelectionHelper selectionHelper = setupTipCommitBitmaps(rw,
expectedCommitCount);
expectedCommitCount, excludeFromBitmapSelection);
pm.endTask();
int totCommits = selectionHelper.getCommitCount();
@ -363,6 +366,8 @@ public final boolean requiresCommitBody() {
* @param expectedCommitCount
* expected count of commits. The actual count may be less due to
* unreachable garbage.
* @param excludeFromBitmapSelection
* commits that should be excluded from bitmap selection
* @return a {@link CommitSelectionHelper} containing bitmaps for the tip
* commits
* @throws IncorrectObjectTypeException
@ -373,8 +378,10 @@ public final boolean requiresCommitBody() {
* if an expected object is missing
*/
private CommitSelectionHelper setupTipCommitBitmaps(RevWalk rw,
int expectedCommitCount) throws IncorrectObjectTypeException,
IOException, MissingObjectException {
int expectedCommitCount,
Set<? extends ObjectId> excludeFromBitmapSelection)
throws IncorrectObjectTypeException, IOException,
MissingObjectException {
BitmapBuilder reuse = commitBitmapIndex.newBitmapBuilder();
List<BitmapCommit> reuseCommits = new ArrayList<>();
for (PackBitmapIndexRemapper.Entry entry : bitmapRemapper) {
@ -403,7 +410,8 @@ private CommitSelectionHelper setupTipCommitBitmaps(RevWalk rw,
Set<RevCommit> peeledWant = new HashSet<>(want.size());
for (AnyObjectId objectId : want) {
RevObject ro = rw.peel(rw.parseAny(objectId));
if (!(ro instanceof RevCommit) || reuse.contains(ro)) {
if (!(ro instanceof RevCommit) || reuse.contains(ro)
|| excludeFromBitmapSelection.contains(ro)) {
continue;
}

View File

@ -1523,7 +1523,7 @@ else if (ref.getName().startsWith(Constants.R_HEADS))
walk.reset();
ObjectWalk ow = rw.toObjectWalkWithSameObjects();
pw.preparePack(pm, ow, wantAll, commonBase);
pw.preparePack(pm, ow, wantAll, commonBase, PackWriter.NONE);
rw = ow;
}