Exclude refs/tags from bitmap commit selection

Commit db77610 ensured that all refs/tags commits are added to the
primary GC pack. It did that by adding all of the refs/tags commits
to the primary GC pack PackWriter's "interesting" object set.

Unfortunately, all commit objects in the "interesting" set are
selected as commits for which bitmap indices will be built. In a
repository like chromium with lots of tags, this changed the number of
bitmaps created from <700 to >10000. That puts huge memory pressure on
the GC task.

This change restores the original behavior of ignoring tags when
selecting commits for bitmaps.

In the "uninteresting" set, commits for refs/heads and refs/tags for
unannotated tags can not be differentiated. We instead identify
refs/tags commits by passing their ObjectIds as a new "noBitmaps"
parameter to the PackWriter.preparePack() methods.
PackWriterBitmapPreparer.setupTipCommitBitmaps() can then use that
"noBitmaps" parameter to exclude those commits.

Change-Id: Icd287c6b04fc1e48de773033fe432a9b0e904ac5
Signed-off-by: Terry Parker <tparker@google.com>
This commit is contained in:
Terry Parker 2017-05-18 01:30:14 -07:00
parent 69d5e89e99
commit c46c720e99
7 changed files with 159 additions and 59 deletions

View File

@ -711,7 +711,7 @@ private static PackIndex writePack(FileRepository repo, RevWalk walk,
} }
ObjectWalk ow = walk.toObjectWalkWithSameObjects(); ObjectWalk ow = walk.toObjectWalkWithSameObjects();
pw.preparePack(NullProgressMonitor.INSTANCE, ow, want, have); pw.preparePack(NullProgressMonitor.INSTANCE, ow, want, have, NONE);
String id = pw.computeName().getName(); String id = pw.computeName().getName();
File packdir = new File(repo.getObjectsDirectory(), "pack"); File packdir = new File(repo.getObjectsDirectory(), "pack");
File packFile = new File(packdir, "pack-" + id + ".pack"); File packFile = new File(packdir, "pack-" + id + ".pack");

View File

@ -69,6 +69,15 @@ public class GcCommitSelectionTest extends GcTestCase {
@Test @Test
public void testBitmapSpansNoMerges() throws Exception { public void testBitmapSpansNoMerges() throws Exception {
testBitmapSpansNoMerges(false);
}
@Test
public void testBitmapSpansNoMergesWithTags() throws Exception {
testBitmapSpansNoMerges(true);
}
private void testBitmapSpansNoMerges(boolean withTags) throws Exception {
/* /*
* Commit counts -> expected bitmap counts for history without merges. * Commit counts -> expected bitmap counts for history without merges.
* The top 100 contiguous commits should always have bitmaps, and the * The top 100 contiguous commits should always have bitmaps, and the
@ -89,7 +98,10 @@ public void testBitmapSpansNoMerges() throws Exception {
assertTrue(nextCommitCount > currentCommits); // programming error assertTrue(nextCommitCount > currentCommits); // programming error
for (int i = currentCommits; i < nextCommitCount; i++) { for (int i = currentCommits; i < nextCommitCount; i++) {
String str = "A" + i; String str = "A" + i;
bb.commit().message(str).add(str, str).create(); RevCommit rc = bb.commit().message(str).add(str, str).create();
if (withTags) {
tr.lightweightTag(str, rc);
}
} }
currentCommits = nextCommitCount; currentCommits = nextCommitCount;
@ -233,7 +245,7 @@ public void testSelectionOrderingWithChains() throws Exception {
m8, m9); m8, m9);
PackWriterBitmapPreparer preparer = newPeparer(m9, commits); PackWriterBitmapPreparer preparer = newPeparer(m9, commits);
List<BitmapCommit> selection = new ArrayList<>( List<BitmapCommit> selection = new ArrayList<>(
preparer.selectCommits(commits.size())); preparer.selectCommits(commits.size(), PackWriter.NONE));
// Verify that the output is ordered by the separate "chains" // Verify that the output is ordered by the separate "chains"
String[] expected = { m0.name(), m1.name(), m2.name(), m4.name(), String[] expected = { m0.name(), m1.name(), m2.name(), m4.name(),

View File

@ -53,6 +53,7 @@
import static org.eclipse.jgit.internal.storage.pack.PackExt.BITMAP_INDEX; import static org.eclipse.jgit.internal.storage.pack.PackExt.BITMAP_INDEX;
import static org.eclipse.jgit.internal.storage.pack.PackExt.INDEX; import static org.eclipse.jgit.internal.storage.pack.PackExt.INDEX;
import static org.eclipse.jgit.internal.storage.pack.PackExt.PACK; import static org.eclipse.jgit.internal.storage.pack.PackExt.PACK;
import static org.eclipse.jgit.internal.storage.pack.PackWriter.NONE;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
@ -111,7 +112,8 @@ public class DfsGarbageCollector {
private List<DfsPackFile> packsBefore; private List<DfsPackFile> packsBefore;
private List<DfsPackFile> expiredGarbagePacks; private List<DfsPackFile> expiredGarbagePacks;
private Set<ObjectId> allHeads; private Set<ObjectId> allHeadsAndTags;
private Set<ObjectId> allTags;
private Set<ObjectId> nonHeads; private Set<ObjectId> nonHeads;
private Set<ObjectId> txnHeads; private Set<ObjectId> txnHeads;
private Set<ObjectId> tagTargets; private Set<ObjectId> tagTargets;
@ -241,23 +243,36 @@ public boolean pack(ProgressMonitor pm) throws IOException {
Collection<Ref> refsBefore = getAllRefs(); Collection<Ref> refsBefore = getAllRefs();
readPacksBefore(); readPacksBefore();
allHeads = new HashSet<>(); Set<ObjectId> allHeads = new HashSet<>();
allHeadsAndTags = new HashSet<>();
allTags = new HashSet<>();
nonHeads = new HashSet<>(); nonHeads = new HashSet<>();
txnHeads = new HashSet<>(); txnHeads = new HashSet<>();
tagTargets = new HashSet<>(); tagTargets = new HashSet<>();
for (Ref ref : refsBefore) { for (Ref ref : refsBefore) {
if (ref.isSymbolic() || ref.getObjectId() == null) if (ref.isSymbolic() || ref.getObjectId() == null) {
continue; continue;
if (isHead(ref) || isTag(ref)) }
if (isHead(ref)) {
allHeads.add(ref.getObjectId()); allHeads.add(ref.getObjectId());
else if (RefTreeNames.isRefTree(refdb, ref.getName())) } else if (isTag(ref)) {
allTags.add(ref.getObjectId());
} else if (RefTreeNames.isRefTree(refdb, ref.getName())) {
txnHeads.add(ref.getObjectId()); txnHeads.add(ref.getObjectId());
else } else {
nonHeads.add(ref.getObjectId()); nonHeads.add(ref.getObjectId());
if (ref.getPeeledObjectId() != null) }
if (ref.getPeeledObjectId() != null) {
tagTargets.add(ref.getPeeledObjectId()); tagTargets.add(ref.getPeeledObjectId());
} }
tagTargets.addAll(allHeads); }
// Don't exclude tags that are also branch tips.
allTags.removeAll(allHeads);
allHeadsAndTags.addAll(allHeads);
allHeadsAndTags.addAll(allTags);
// Hoist all branch tips and tags earlier in the pack file
tagTargets.addAll(allHeadsAndTags);
boolean rollback = true; boolean rollback = true;
try { try {
@ -413,12 +428,12 @@ private List<DfsPackDescription> toPrune() {
} }
private void packHeads(ProgressMonitor pm) throws IOException { private void packHeads(ProgressMonitor pm) throws IOException {
if (allHeads.isEmpty()) if (allHeadsAndTags.isEmpty())
return; return;
try (PackWriter pw = newPackWriter()) { try (PackWriter pw = newPackWriter()) {
pw.setTagTargets(tagTargets); pw.setTagTargets(tagTargets);
pw.preparePack(pm, allHeads, PackWriter.NONE); pw.preparePack(pm, allHeadsAndTags, NONE, NONE, allTags);
if (0 < pw.getObjectCount()) if (0 < pw.getObjectCount())
writePack(GC, pw, pm, writePack(GC, pw, pm,
estimateGcPackSize(INSERT, RECEIVE, COMPACT, GC)); estimateGcPackSize(INSERT, RECEIVE, COMPACT, GC));
@ -432,7 +447,7 @@ private void packRest(ProgressMonitor pm) throws IOException {
try (PackWriter pw = newPackWriter()) { try (PackWriter pw = newPackWriter()) {
for (ObjectIdSet packedObjs : newPackObj) for (ObjectIdSet packedObjs : newPackObj)
pw.excludeObjects(packedObjs); pw.excludeObjects(packedObjs);
pw.preparePack(pm, nonHeads, allHeads); pw.preparePack(pm, nonHeads, allHeadsAndTags);
if (0 < pw.getObjectCount()) if (0 < pw.getObjectCount())
writePack(GC_REST, pw, pm, writePack(GC_REST, pw, pm,
estimateGcPackSize(INSERT, RECEIVE, COMPACT, GC_REST)); estimateGcPackSize(INSERT, RECEIVE, COMPACT, GC_REST));
@ -446,7 +461,7 @@ private void packRefTreeGraph(ProgressMonitor pm) throws IOException {
try (PackWriter pw = newPackWriter()) { try (PackWriter pw = newPackWriter()) {
for (ObjectIdSet packedObjs : newPackObj) for (ObjectIdSet packedObjs : newPackObj)
pw.excludeObjects(packedObjs); pw.excludeObjects(packedObjs);
pw.preparePack(pm, txnHeads, PackWriter.NONE); pw.preparePack(pm, txnHeads, NONE);
if (0 < pw.getObjectCount()) if (0 < pw.getObjectCount())
writePack(GC_TXN, pw, pm, 0 /* unknown pack size */); writePack(GC_TXN, pw, pm, 0 /* unknown pack size */);
} }

View File

@ -729,7 +729,9 @@ public Collection<PackFile> repack() throws IOException {
long time = System.currentTimeMillis(); long time = System.currentTimeMillis();
Collection<Ref> refsBefore = getAllRefs(); Collection<Ref> refsBefore = getAllRefs();
Set<ObjectId> allHeadsAndTags = new HashSet<>();
Set<ObjectId> allHeads = new HashSet<>(); Set<ObjectId> allHeads = new HashSet<>();
Set<ObjectId> allTags = new HashSet<>();
Set<ObjectId> nonHeads = new HashSet<>(); Set<ObjectId> nonHeads = new HashSet<>();
Set<ObjectId> txnHeads = new HashSet<>(); Set<ObjectId> txnHeads = new HashSet<>();
Set<ObjectId> tagTargets = new HashSet<>(); Set<ObjectId> tagTargets = new HashSet<>();
@ -739,17 +741,22 @@ public Collection<PackFile> repack() throws IOException {
for (Ref ref : refsBefore) { for (Ref ref : refsBefore) {
checkCancelled(); checkCancelled();
nonHeads.addAll(listRefLogObjects(ref, 0)); nonHeads.addAll(listRefLogObjects(ref, 0));
if (ref.isSymbolic() || ref.getObjectId() == null) if (ref.isSymbolic() || ref.getObjectId() == null) {
continue; continue;
if (isHead(ref) || isTag(ref)) }
if (isHead(ref)) {
allHeads.add(ref.getObjectId()); allHeads.add(ref.getObjectId());
else if (RefTreeNames.isRefTree(refdb, ref.getName())) } else if (isTag(ref)) {
allTags.add(ref.getObjectId());
} else if (RefTreeNames.isRefTree(refdb, ref.getName())) {
txnHeads.add(ref.getObjectId()); txnHeads.add(ref.getObjectId());
else } else {
nonHeads.add(ref.getObjectId()); nonHeads.add(ref.getObjectId());
if (ref.getPeeledObjectId() != null) }
if (ref.getPeeledObjectId() != null) {
tagTargets.add(ref.getPeeledObjectId()); tagTargets.add(ref.getPeeledObjectId());
} }
}
List<ObjectIdSet> excluded = new LinkedList<>(); List<ObjectIdSet> excluded = new LinkedList<>();
for (final PackFile f : repo.getObjectDatabase().getPacks()) { for (final PackFile f : repo.getObjectDatabase().getPacks()) {
@ -758,13 +765,19 @@ else if (RefTreeNames.isRefTree(refdb, ref.getName()))
excluded.add(f.getIndex()); excluded.add(f.getIndex());
} }
tagTargets.addAll(allHeads); // Don't exclude tags that are also branch tips
allTags.removeAll(allHeads);
allHeadsAndTags.addAll(allHeads);
allHeadsAndTags.addAll(allTags);
// Hoist all branch tips and tags earlier in the pack file
tagTargets.addAll(allHeadsAndTags);
nonHeads.addAll(indexObjects); nonHeads.addAll(indexObjects);
List<PackFile> ret = new ArrayList<>(2); List<PackFile> ret = new ArrayList<>(2);
PackFile heads = null; PackFile heads = null;
if (!allHeads.isEmpty()) { if (!allHeadsAndTags.isEmpty()) {
heads = writePack(allHeads, Collections.<ObjectId> emptySet(), heads = writePack(allHeadsAndTags, PackWriter.NONE, allTags,
tagTargets, excluded); tagTargets, excluded);
if (heads != null) { if (heads != null) {
ret.add(heads); ret.add(heads);
@ -772,12 +785,14 @@ else if (RefTreeNames.isRefTree(refdb, ref.getName()))
} }
} }
if (!nonHeads.isEmpty()) { if (!nonHeads.isEmpty()) {
PackFile rest = writePack(nonHeads, allHeads, tagTargets, excluded); PackFile rest = writePack(nonHeads, allHeadsAndTags, PackWriter.NONE,
tagTargets, excluded);
if (rest != null) if (rest != null)
ret.add(rest); ret.add(rest);
} }
if (!txnHeads.isEmpty()) { if (!txnHeads.isEmpty()) {
PackFile txn = writePack(txnHeads, PackWriter.NONE, null, excluded); PackFile txn = writePack(txnHeads, PackWriter.NONE, PackWriter.NONE,
null, excluded);
if (txn != null) if (txn != null)
ret.add(txn); ret.add(txn);
} }
@ -961,8 +976,9 @@ private Set<ObjectId> listNonHEADIndexObjects()
} }
private PackFile writePack(@NonNull Set<? extends ObjectId> want, private PackFile writePack(@NonNull Set<? extends ObjectId> want,
@NonNull Set<? extends ObjectId> have, Set<ObjectId> tagTargets, @NonNull Set<? extends ObjectId> have, @NonNull Set<ObjectId> tags,
List<ObjectIdSet> excludeObjects) throws IOException { Set<ObjectId> tagTargets, List<ObjectIdSet> excludeObjects)
throws IOException {
checkCancelled(); checkCancelled();
File tmpPack = null; File tmpPack = null;
Map<PackExt, File> tmpExts = new TreeMap<>( Map<PackExt, File> tmpExts = new TreeMap<>(
@ -988,12 +1004,13 @@ public int compare(PackExt o1, PackExt o2) {
// prepare the PackWriter // prepare the PackWriter
pw.setDeltaBaseAsOffset(true); pw.setDeltaBaseAsOffset(true);
pw.setReuseDeltaCommits(false); pw.setReuseDeltaCommits(false);
if (tagTargets != null) if (tagTargets != null) {
pw.setTagTargets(tagTargets); pw.setTagTargets(tagTargets);
}
if (excludeObjects != null) if (excludeObjects != null)
for (ObjectIdSet idx : excludeObjects) for (ObjectIdSet idx : excludeObjects)
pw.excludeObjects(idx); pw.excludeObjects(idx);
pw.preparePack(pm, want, have); pw.preparePack(pm, want, have, PackWriter.NONE, tags);
if (pw.getObjectCount() == 0) if (pw.getObjectCount() == 0)
return null; return null;
checkCancelled(); checkCancelled();

View File

@ -233,7 +233,9 @@ public static Iterable<PackWriter> getInstances() {
private List<CachedPack> cachedPacks = new ArrayList<>(2); private List<CachedPack> cachedPacks = new ArrayList<>(2);
private Set<ObjectId> tagTargets = Collections.emptySet(); private Set<ObjectId> tagTargets = NONE;
private Set<? extends ObjectId> excludeFromBitmapSelection = NONE;
private ObjectIdSet[] excludeInPacks; private ObjectIdSet[] excludeInPacks;
@ -712,8 +714,7 @@ public void preparePack(@NonNull Iterator<RevObject> objectsSource)
public void preparePack(ProgressMonitor countingMonitor, public void preparePack(ProgressMonitor countingMonitor,
@NonNull Set<? extends ObjectId> want, @NonNull Set<? extends ObjectId> want,
@NonNull Set<? extends ObjectId> have) throws IOException { @NonNull Set<? extends ObjectId> have) throws IOException {
preparePack(countingMonitor, preparePack(countingMonitor, want, have, NONE, NONE);
want, have, Collections.<ObjectId> emptySet());
} }
/** /**
@ -721,9 +722,9 @@ public void preparePack(ProgressMonitor countingMonitor,
* <p> * <p>
* Like {@link #preparePack(ProgressMonitor, Set, Set)} but also allows * Like {@link #preparePack(ProgressMonitor, Set, Set)} but also allows
* specifying commits that should not be walked past ("shallow" commits). * specifying commits that should not be walked past ("shallow" commits).
* The caller is responsible for filtering out commits that should not * The caller is responsible for filtering out commits that should not be
* be shallow any more ("unshallow" commits as in {@link #setShallowPack}) * shallow any more ("unshallow" commits as in {@link #setShallowPack}) from
* from the shallow set. * the shallow set.
* *
* @param countingMonitor * @param countingMonitor
* progress during object enumeration. * progress during object enumeration.
@ -731,27 +732,67 @@ public void preparePack(ProgressMonitor countingMonitor,
* objects of interest, ancestors of which will be included in * objects of interest, ancestors of which will be included in
* the pack. Must not be {@code null}. * the pack. Must not be {@code null}.
* @param have * @param have
* objects whose ancestors (up to and including * objects whose ancestors (up to and including {@code shallow}
* {@code shallow} commits) do not need to be included in the * commits) do not need to be included in the pack because they
* pack because they are already available from elsewhere. * are already available from elsewhere. Must not be
* Must not be {@code null}. * {@code null}.
* @param shallow * @param shallow
* commits indicating the boundary of the history marked with * commits indicating the boundary of the history marked with
* {@code have}. Shallow commits have parents but those * {@code have}. Shallow commits have parents but those parents
* parents are considered not to be already available. * are considered not to be already available. Parents of
* Parents of {@code shallow} commits and earlier generations * {@code shallow} commits and earlier generations will be
* will be included in the pack if requested by {@code want}. * included in the pack if requested by {@code want}. Must not be
* Must not be {@code null}. * {@code null}.
* @throws IOException * @throws IOException
* an I/O problem occured while reading objects. * an I/O problem occurred while reading objects.
*/ */
public void preparePack(ProgressMonitor countingMonitor, public void preparePack(ProgressMonitor countingMonitor,
@NonNull Set<? extends ObjectId> want, @NonNull Set<? extends ObjectId> want,
@NonNull Set<? extends ObjectId> have, @NonNull Set<? extends ObjectId> have,
@NonNull Set<? extends ObjectId> shallow) throws IOException { @NonNull Set<? extends ObjectId> shallow) throws IOException {
preparePack(countingMonitor, want, have, shallow, NONE);
}
/**
* Prepare the list of objects to be written to the pack stream.
* <p>
* Like {@link #preparePack(ProgressMonitor, Set, Set)} but also allows
* specifying commits that should not be walked past ("shallow" commits).
* The caller is responsible for filtering out commits that should not be
* shallow any more ("unshallow" commits as in {@link #setShallowPack}) from
* the shallow set.
*
* @param countingMonitor
* progress during object enumeration.
* @param want
* objects of interest, ancestors of which will be included in
* the pack. Must not be {@code null}.
* @param have
* objects whose ancestors (up to and including {@code shallow}
* commits) do not need to be included in the pack because they
* are already available from elsewhere. Must not be
* {@code null}.
* @param shallow
* commits indicating the boundary of the history marked with
* {@code have}. Shallow commits have parents but those parents
* are considered not to be already available. Parents of
* {@code shallow} commits and earlier generations will be
* included in the pack if requested by {@code want}. Must not be
* {@code null}.
* @param noBitmaps
* collection of objects to be excluded from bitmap commit
* selection.
* @throws IOException
* an I/O problem occurred while reading objects.
*/
public void preparePack(ProgressMonitor countingMonitor,
@NonNull Set<? extends ObjectId> want,
@NonNull Set<? extends ObjectId> have,
@NonNull Set<? extends ObjectId> shallow,
@NonNull Set<? extends ObjectId> noBitmaps) throws IOException {
try (ObjectWalk ow = getObjectWalk()) { try (ObjectWalk ow = getObjectWalk()) {
ow.assumeShallow(shallow); ow.assumeShallow(shallow);
preparePack(countingMonitor, ow, want, have); preparePack(countingMonitor, ow, want, have, noBitmaps);
} }
} }
@ -784,13 +825,17 @@ private ObjectWalk getObjectWalk() {
* points of graph traversal). Pass {@link #NONE} if all objects * points of graph traversal). Pass {@link #NONE} if all objects
* reachable from {@code want} are desired, such as when serving * reachable from {@code want} are desired, such as when serving
* a clone. * a clone.
* @param noBitmaps
* collection of objects to be excluded from bitmap commit
* selection.
* @throws IOException * @throws IOException
* when some I/O problem occur during reading objects. * when some I/O problem occur during reading objects.
*/ */
public void preparePack(ProgressMonitor countingMonitor, public void preparePack(ProgressMonitor countingMonitor,
@NonNull ObjectWalk walk, @NonNull ObjectWalk walk,
@NonNull Set<? extends ObjectId> interestingObjects, @NonNull Set<? extends ObjectId> interestingObjects,
@NonNull Set<? extends ObjectId> uninterestingObjects) @NonNull Set<? extends ObjectId> uninterestingObjects,
@NonNull Set<? extends ObjectId> noBitmaps)
throws IOException { throws IOException {
if (countingMonitor == null) if (countingMonitor == null)
countingMonitor = NullProgressMonitor.INSTANCE; countingMonitor = NullProgressMonitor.INSTANCE;
@ -798,7 +843,7 @@ public void preparePack(ProgressMonitor countingMonitor,
throw new IllegalArgumentException( throw new IllegalArgumentException(
JGitText.get().shallowPacksRequireDepthWalk); JGitText.get().shallowPacksRequireDepthWalk);
findObjectsToPack(countingMonitor, walk, interestingObjects, findObjectsToPack(countingMonitor, walk, interestingObjects,
uninterestingObjects); uninterestingObjects, noBitmaps);
} }
/** /**
@ -965,8 +1010,9 @@ private void endPhase(ProgressMonitor monitor) {
/** /**
* Write the prepared pack to the supplied stream. * Write the prepared pack to the supplied stream.
* <p> * <p>
* Called after {@link #preparePack(ProgressMonitor, ObjectWalk, Set, Set)} * Called after
* or {@link #preparePack(ProgressMonitor, Set, Set)}. * {@link #preparePack(ProgressMonitor, ObjectWalk, Set, Set, Set)} or
* {@link #preparePack(ProgressMonitor, Set, Set)}.
* <p> * <p>
* Performs delta search if enabled and writes the pack stream. * Performs delta search if enabled and writes the pack stream.
* <p> * <p>
@ -1652,12 +1698,14 @@ private void writeChecksum(PackOutputStream out) throws IOException {
private void findObjectsToPack(@NonNull ProgressMonitor countingMonitor, private void findObjectsToPack(@NonNull ProgressMonitor countingMonitor,
@NonNull ObjectWalk walker, @NonNull Set<? extends ObjectId> want, @NonNull ObjectWalk walker, @NonNull Set<? extends ObjectId> want,
@NonNull Set<? extends ObjectId> have) throws IOException { @NonNull Set<? extends ObjectId> have,
@NonNull Set<? extends ObjectId> noBitmaps) throws IOException {
final long countingStart = System.currentTimeMillis(); final long countingStart = System.currentTimeMillis();
beginPhase(PackingPhase.COUNTING, countingMonitor, ProgressMonitor.UNKNOWN); beginPhase(PackingPhase.COUNTING, countingMonitor, ProgressMonitor.UNKNOWN);
stats.interestingObjects = Collections.unmodifiableSet(new HashSet<ObjectId>(want)); stats.interestingObjects = Collections.unmodifiableSet(new HashSet<ObjectId>(want));
stats.uninterestingObjects = Collections.unmodifiableSet(new HashSet<ObjectId>(have)); stats.uninterestingObjects = Collections.unmodifiableSet(new HashSet<ObjectId>(have));
excludeFromBitmapSelection = noBitmaps;
canBuildBitmaps = config.isBuildBitmaps() canBuildBitmaps = config.isBuildBitmaps()
&& !shallowPack && !shallowPack
@ -2070,8 +2118,8 @@ public boolean prepareBitmapIndex(ProgressMonitor pm) throws IOException {
PackWriterBitmapPreparer bitmapPreparer = new PackWriterBitmapPreparer( PackWriterBitmapPreparer bitmapPreparer = new PackWriterBitmapPreparer(
reader, writeBitmaps, pm, stats.interestingObjects, config); reader, writeBitmaps, pm, stats.interestingObjects, config);
Collection<PackWriterBitmapPreparer.BitmapCommit> selectedCommits = Collection<PackWriterBitmapPreparer.BitmapCommit> selectedCommits = bitmapPreparer
bitmapPreparer.selectCommits(numCommits); .selectCommits(numCommits, excludeFromBitmapSelection);
beginPhase(PackingPhase.BUILDING_BITMAPS, pm, selectedCommits.size()); beginPhase(PackingPhase.BUILDING_BITMAPS, pm, selectedCommits.size());

View File

@ -141,6 +141,8 @@ public int compare(BitmapBuilderEntry a, BitmapBuilderEntry b) {
* *
* @param expectedCommitCount * @param expectedCommitCount
* count of commits in the pack * count of commits in the pack
* @param excludeFromBitmapSelection
* commits that should be excluded from bitmap selection
* @return commit objects for which bitmap indices should be built * @return commit objects for which bitmap indices should be built
* @throws IncorrectObjectTypeException * @throws IncorrectObjectTypeException
* if any of the processed objects is not a commit * if any of the processed objects is not a commit
@ -149,7 +151,8 @@ public int compare(BitmapBuilderEntry a, BitmapBuilderEntry b) {
* @throws MissingObjectException * @throws MissingObjectException
* if an expected object is missing * if an expected object is missing
*/ */
Collection<BitmapCommit> selectCommits(int expectedCommitCount) Collection<BitmapCommit> selectCommits(int expectedCommitCount,
Set<? extends ObjectId> excludeFromBitmapSelection)
throws IncorrectObjectTypeException, IOException, throws IncorrectObjectTypeException, IOException,
MissingObjectException { MissingObjectException {
/* /*
@ -164,7 +167,7 @@ Collection<BitmapCommit> selectCommits(int expectedCommitCount)
RevWalk rw = new RevWalk(reader); RevWalk rw = new RevWalk(reader);
rw.setRetainBody(false); rw.setRetainBody(false);
CommitSelectionHelper selectionHelper = setupTipCommitBitmaps(rw, CommitSelectionHelper selectionHelper = setupTipCommitBitmaps(rw,
expectedCommitCount); expectedCommitCount, excludeFromBitmapSelection);
pm.endTask(); pm.endTask();
int totCommits = selectionHelper.getCommitCount(); int totCommits = selectionHelper.getCommitCount();
@ -363,6 +366,8 @@ public final boolean requiresCommitBody() {
* @param expectedCommitCount * @param expectedCommitCount
* expected count of commits. The actual count may be less due to * expected count of commits. The actual count may be less due to
* unreachable garbage. * unreachable garbage.
* @param excludeFromBitmapSelection
* commits that should be excluded from bitmap selection
* @return a {@link CommitSelectionHelper} containing bitmaps for the tip * @return a {@link CommitSelectionHelper} containing bitmaps for the tip
* commits * commits
* @throws IncorrectObjectTypeException * @throws IncorrectObjectTypeException
@ -373,8 +378,10 @@ public final boolean requiresCommitBody() {
* if an expected object is missing * if an expected object is missing
*/ */
private CommitSelectionHelper setupTipCommitBitmaps(RevWalk rw, private CommitSelectionHelper setupTipCommitBitmaps(RevWalk rw,
int expectedCommitCount) throws IncorrectObjectTypeException, int expectedCommitCount,
IOException, MissingObjectException { Set<? extends ObjectId> excludeFromBitmapSelection)
throws IncorrectObjectTypeException, IOException,
MissingObjectException {
BitmapBuilder reuse = commitBitmapIndex.newBitmapBuilder(); BitmapBuilder reuse = commitBitmapIndex.newBitmapBuilder();
List<BitmapCommit> reuseCommits = new ArrayList<>(); List<BitmapCommit> reuseCommits = new ArrayList<>();
for (PackBitmapIndexRemapper.Entry entry : bitmapRemapper) { for (PackBitmapIndexRemapper.Entry entry : bitmapRemapper) {
@ -403,7 +410,8 @@ private CommitSelectionHelper setupTipCommitBitmaps(RevWalk rw,
Set<RevCommit> peeledWant = new HashSet<>(want.size()); Set<RevCommit> peeledWant = new HashSet<>(want.size());
for (AnyObjectId objectId : want) { for (AnyObjectId objectId : want) {
RevObject ro = rw.peel(rw.parseAny(objectId)); RevObject ro = rw.peel(rw.parseAny(objectId));
if (!(ro instanceof RevCommit) || reuse.contains(ro)) { if (!(ro instanceof RevCommit) || reuse.contains(ro)
|| excludeFromBitmapSelection.contains(ro)) {
continue; continue;
} }

View File

@ -1523,7 +1523,7 @@ else if (ref.getName().startsWith(Constants.R_HEADS))
walk.reset(); walk.reset();
ObjectWalk ow = rw.toObjectWalkWithSameObjects(); ObjectWalk ow = rw.toObjectWalkWithSameObjects();
pw.preparePack(pm, ow, wantAll, commonBase); pw.preparePack(pm, ow, wantAll, commonBase, PackWriter.NONE);
rw = ow; rw = ow;
} }