Merge changes I159e9154,I06c722b2

* changes:
  DfsObjectDatabase: Expose PackList and move markDirty there
  Invalidate DfsObjDatabase pack list when refs are updated
This commit is contained in:
Dave Borowitz 2016-07-19 10:51:51 -04:00 committed by Gerrit Code Review @ Eclipse.org
commit e790ec3fb1
3 changed files with 190 additions and 45 deletions

View File

@ -61,7 +61,17 @@
/** Manages objects stored in {@link DfsPackFile} on a storage system. */
public abstract class DfsObjDatabase extends ObjectDatabase {
private static final PackList NO_PACKS = new PackList(new DfsPackFile[0]);
private static final PackList NO_PACKS = new PackList(new DfsPackFile[0]) {
@Override
boolean dirty() {
return true;
}
@Override
public void markDirty() {
// Always dirty.
}
};
/** Sources for a pack file. */
public static enum PackSource {
@ -173,7 +183,20 @@ public ObjectInserter newInserter() {
* the pack list cannot be initialized.
*/
public DfsPackFile[] getPacks() throws IOException {
return scanPacks(NO_PACKS).packs;
return getPackList().packs;
}
/**
* Scan and list all available pack files in the repository.
*
* @return list of available packs, with some additional metadata. The
* returned array is shared with the implementation and must not be
* modified by the caller.
* @throws IOException
* the pack list cannot be initialized.
*/
public PackList getPackList() throws IOException {
return scanPacks(NO_PACKS);
}
/** @return repository owning this object database. */
@ -188,7 +211,18 @@ protected DfsRepository getRepository() {
* implementation and must not be modified by the caller.
*/
public DfsPackFile[] getCurrentPacks() {
return packList.get().packs;
return getCurrentPackList().packs;
}
/**
* List currently known pack files in the repository, without scanning.
*
* @return list of available packs, with some additional metadata. The
* returned array is shared with the implementation and must not be
* modified by the caller.
*/
public PackList getCurrentPackList() {
return packList.get();
}
/**
@ -363,11 +397,11 @@ void addPack(DfsPackFile newPack) throws IOException {
DfsPackFile[] packs = new DfsPackFile[1 + o.packs.length];
packs[0] = newPack;
System.arraycopy(o.packs, 0, packs, 1, o.packs.length);
n = new PackList(packs);
n = new PackListImpl(packs);
} while (!packList.compareAndSet(o, n));
}
private PackList scanPacks(final PackList original) throws IOException {
PackList scanPacks(final PackList original) throws IOException {
PackList o, n;
synchronized (packList) {
do {
@ -408,10 +442,10 @@ private PackList scanPacksImpl(PackList old) throws IOException {
for (DfsPackFile p : forReuse.values())
p.close();
if (list.isEmpty())
return new PackList(NO_PACKS.packs);
return new PackListImpl(NO_PACKS.packs);
if (!foundNew)
return old;
return new PackList(list.toArray(new DfsPackFile[list.size()]));
return new PackListImpl(list.toArray(new DfsPackFile[list.size()]));
}
private static Map<DfsPackDescription, DfsPackFile> reuseMap(PackList old) {
@ -456,12 +490,42 @@ public void close() {
// p.close();
}
private static final class PackList {
/** Snapshot of packs scanned in a single pass. */
public static abstract class PackList {
/** All known packs, sorted. */
final DfsPackFile[] packs;
public final DfsPackFile[] packs;
PackList(final DfsPackFile[] packs) {
PackList(DfsPackFile[] packs) {
this.packs = packs;
}
abstract boolean dirty();
/**
* Mark pack list as dirty.
* <p>
* Used when the caller knows that new data might have been written to the
* repository that could invalidate open readers depending on this pack list,
* for example if refs are newly scanned.
*/
public abstract void markDirty();
}
private static final class PackListImpl extends PackList {
private volatile boolean dirty;
PackListImpl(DfsPackFile[] packs) {
super(packs);
}
@Override
boolean dirty() {
return dirty;
}
@Override
public void markDirty() {
dirty = true;
}
}
}

View File

@ -53,6 +53,7 @@
import java.util.Comparator;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Set;
import java.util.zip.DataFormatException;
@ -62,6 +63,7 @@
import org.eclipse.jgit.errors.MissingObjectException;
import org.eclipse.jgit.errors.StoredObjectRepresentationNotAvailableException;
import org.eclipse.jgit.internal.JGitText;
import org.eclipse.jgit.internal.storage.dfs.DfsObjDatabase.PackList;
import org.eclipse.jgit.internal.storage.file.BitmapIndexImpl;
import org.eclipse.jgit.internal.storage.file.PackBitmapIndex;
import org.eclipse.jgit.internal.storage.file.PackIndex;
@ -91,6 +93,8 @@
* reader is not thread safe.
*/
public final class DfsReader extends ObjectReader implements ObjectReuseAsIs {
private static final int MAX_RESOLVE_MATCHES = 256;
/** Temporary buffer large enough for at least one raw object id. */
final byte[] tempId = new byte[OBJECT_ID_LENGTH];
@ -163,22 +167,44 @@ public Collection<ObjectId> resolve(AbbreviatedObjectId id)
return Collections.singleton(id.toObjectId());
boolean noGarbage = avoidUnreachable;
HashSet<ObjectId> matches = new HashSet<ObjectId>(4);
for (DfsPackFile pack : db.getPacks()) {
if (noGarbage && pack.isGarbage())
continue;
pack.resolve(this, matches, id, 256);
if (256 <= matches.size())
break;
PackList packList = db.getPackList();
resolveImpl(packList, id, noGarbage, matches);
if (matches.size() < MAX_RESOLVE_MATCHES && packList.dirty()) {
resolveImpl(db.scanPacks(packList), id, noGarbage, matches);
}
return matches;
}
private void resolveImpl(PackList packList, AbbreviatedObjectId id,
boolean noGarbage, HashSet<ObjectId> matches) throws IOException {
for (DfsPackFile pack : packList.packs) {
if (noGarbage && pack.isGarbage()) {
continue;
}
pack.resolve(this, matches, id, MAX_RESOLVE_MATCHES);
if (matches.size() >= MAX_RESOLVE_MATCHES) {
break;
}
}
}
@Override
public boolean has(AnyObjectId objectId) throws IOException {
if (last != null && last.hasObject(this, objectId))
return true;
boolean noGarbage = avoidUnreachable;
for (DfsPackFile pack : db.getPacks()) {
PackList packList = db.getPackList();
if (hasImpl(packList, objectId, noGarbage)) {
return true;
} else if (packList.dirty()) {
return hasImpl(db.scanPacks(packList), objectId, noGarbage);
}
return false;
}
private boolean hasImpl(PackList packList, AnyObjectId objectId,
boolean noGarbage) throws IOException {
for (DfsPackFile pack : packList.packs) {
if (pack == last || (noGarbage && pack.isGarbage()))
continue;
if (pack.hasObject(this, objectId)) {
@ -193,19 +219,22 @@ public boolean has(AnyObjectId objectId) throws IOException {
public ObjectLoader open(AnyObjectId objectId, int typeHint)
throws MissingObjectException, IncorrectObjectTypeException,
IOException {
ObjectLoader ldr;
if (last != null) {
ObjectLoader ldr = last.get(this, objectId);
ldr = last.get(this, objectId);
if (ldr != null)
return ldr;
}
PackList packList = db.getPackList();
boolean noGarbage = avoidUnreachable;
for (DfsPackFile pack : db.getPacks()) {
if (pack == last || (noGarbage && pack.isGarbage()))
continue;
ObjectLoader ldr = pack.get(this, objectId);
ldr = openImpl(packList, objectId, noGarbage);
if (ldr != null) {
return ldr;
}
if (packList.dirty()) {
ldr = openImpl(db.scanPacks(packList), objectId, noGarbage);
if (ldr != null) {
last = pack;
return ldr;
}
}
@ -216,6 +245,21 @@ public ObjectLoader open(AnyObjectId objectId, int typeHint)
throw new MissingObjectException(objectId.copy(), typeHint);
}
private ObjectLoader openImpl(PackList packList, AnyObjectId objectId,
boolean noGarbage) throws IOException {
for (DfsPackFile pack : packList.packs) {
if (pack == last || (noGarbage && pack.isGarbage())) {
continue;
}
ObjectLoader ldr = pack.get(this, objectId);
if (ldr != null) {
last = pack;
return ldr;
}
}
return null;
}
@Override
public Set<ObjectId> getShallowCommits() {
return Collections.emptySet();
@ -253,39 +297,58 @@ private static class FoundObject<T extends ObjectId> {
private <T extends ObjectId> Iterable<FoundObject<T>> findAll(
Iterable<T> objectIds) throws IOException {
ArrayList<FoundObject<T>> r = new ArrayList<FoundObject<T>>();
DfsPackFile[] packList = db.getPacks();
if (packList.length == 0) {
for (T t : objectIds)
r.add(new FoundObject<T>(t));
return r;
Collection<T> pending = new LinkedList<>();
for (T id : objectIds) {
pending.add(id);
}
PackList packList = db.getPackList();
List<FoundObject<T>> r = new ArrayList<>();
findAllImpl(packList, pending, r);
if (!pending.isEmpty() && packList.dirty()) {
findAllImpl(db.scanPacks(packList), pending, r);
}
for (T t : pending) {
r.add(new FoundObject<T>(t));
}
Collections.sort(r, FOUND_OBJECT_SORT);
return r;
}
private <T extends ObjectId> void findAllImpl(PackList packList,
Collection<T> pending, List<FoundObject<T>> r) {
DfsPackFile[] packs = packList.packs;
if (packs.length == 0) {
return;
}
int lastIdx = 0;
DfsPackFile lastPack = packList[lastIdx];
DfsPackFile lastPack = packs[lastIdx];
boolean noGarbage = avoidUnreachable;
OBJECT_SCAN: for (T t : objectIds) {
OBJECT_SCAN: for (Iterator<T> it = pending.iterator(); it.hasNext();) {
T t = it.next();
try {
long p = lastPack.findOffset(this, t);
if (0 < p) {
r.add(new FoundObject<T>(t, lastIdx, lastPack, p));
it.remove();
continue;
}
} catch (IOException e) {
// Fall though and try to examine other packs.
}
for (int i = 0; i < packList.length; i++) {
for (int i = 0; i < packs.length; i++) {
if (i == lastIdx)
continue;
DfsPackFile pack = packList[i];
DfsPackFile pack = packs[i];
if (noGarbage && pack.isGarbage())
continue;
try {
long p = pack.findOffset(this, t);
if (0 < p) {
r.add(new FoundObject<T>(t, i, pack, p));
it.remove();
lastIdx = i;
lastPack = pack;
continue OBJECT_SCAN;
@ -294,13 +357,9 @@ private <T extends ObjectId> Iterable<FoundObject<T>> findAll(
// Examine other packs.
}
}
r.add(new FoundObject<T>(t));
}
Collections.sort(r, FOUND_OBJECT_SORT);
last = lastPack;
return r;
}
@Override
@ -418,24 +477,43 @@ public long getObjectSize(AnyObjectId objectId, int typeHint)
IOException {
if (last != null) {
long sz = last.getObjectSize(this, objectId);
if (0 <= sz)
if (0 <= sz) {
return sz;
}
}
for (DfsPackFile pack : db.getPacks()) {
if (pack == last)
PackList packList = db.getPackList();
long sz = getObjectSizeImpl(packList, objectId);
if (0 <= sz) {
return sz;
}
if (packList.dirty()) {
sz = getObjectSizeImpl(packList, objectId);
if (0 <= sz) {
return sz;
}
}
if (typeHint == OBJ_ANY) {
throw new MissingObjectException(objectId.copy(),
JGitText.get().unknownObjectType2);
}
throw new MissingObjectException(objectId.copy(), typeHint);
}
private long getObjectSizeImpl(PackList packList, AnyObjectId objectId)
throws IOException {
for (DfsPackFile pack : packList.packs) {
if (pack == last) {
continue;
}
long sz = pack.getObjectSize(this, objectId);
if (0 <= sz) {
last = pack;
return sz;
}
}
if (typeHint == OBJ_ANY)
throw new MissingObjectException(objectId.copy(),
JGitText.get().unknownObjectType2);
throw new MissingObjectException(objectId.copy(), typeHint);
return -1;
}
public DfsObjectToPack newObjectToPack(AnyObjectId objectId, int type) {
@ -451,6 +529,8 @@ public int compare(DfsObjectToPack a, DfsObjectToPack b) {
public void selectObjectRepresentation(PackWriter packer,
ProgressMonitor monitor, Iterable<ObjectToPack> objects)
throws IOException, MissingObjectException {
// Don't check dirty bit on PackList; assume ObjectToPacks all came from the
// current list.
for (DfsPackFile pack : db.getPacks()) {
List<DfsObjectToPack> tmp = findAllFromPack(pack, objects);
if (tmp.isEmpty())

View File

@ -310,6 +310,7 @@ protected RefCache scanAllRefs() throws IOException {
}
ids.sort();
sym.sort();
objdb.getCurrentPackList().markDirty();
return new RefCache(ids.toRefList(), sym.toRefList());
}