Rename DfsPackKey to DfsStreamKey

This renaming supports reusing DfsStreamKey in a future commit
to index other PackExt type streams inside of the DfsBlockCache.

Change-Id: Ib52d374e47724ccb837f4fbab1fc85c486c5b408
This commit is contained in:
Shawn Pearce 2017-07-03 17:22:52 -07:00
parent dfb9884dbc
commit e924de5295
8 changed files with 36 additions and 39 deletions

View File

@ -57,13 +57,13 @@
public class DeltaBaseCacheTest { public class DeltaBaseCacheTest {
private static final int SZ = 512; private static final int SZ = 512;
private DfsPackKey key; private DfsStreamKey key;
private DeltaBaseCache cache; private DeltaBaseCache cache;
private TestRng rng; private TestRng rng;
@Before @Before
public void setUp() { public void setUp() {
key = new DfsPackKey(); key = new DfsStreamKey();
cache = new DeltaBaseCache(SZ); cache = new DeltaBaseCache(SZ);
rng = new TestRng(getClass().getSimpleName()); rng = new TestRng(getClass().getSimpleName());
} }

View File

@ -75,7 +75,7 @@ private static int hash(long position) {
table = new Entry[1 << TABLE_BITS]; table = new Entry[1 << TABLE_BITS];
} }
Entry get(DfsPackKey key, long position) { Entry get(DfsStreamKey key, long position) {
Entry e = table[hash(position)]; Entry e = table[hash(position)];
for (; e != null; e = e.tableNext) { for (; e != null; e = e.tableNext) {
if (e.offset == position && key.equals(e.pack)) { if (e.offset == position && key.equals(e.pack)) {
@ -86,7 +86,7 @@ Entry get(DfsPackKey key, long position) {
return null; return null;
} }
void put(DfsPackKey key, long offset, int objectType, byte[] data) { void put(DfsStreamKey key, long offset, int objectType, byte[] data) {
if (data.length > maxByteCount) if (data.length > maxByteCount)
return; // Too large to cache. return; // Too large to cache.
@ -189,7 +189,7 @@ int getMemoryUsedByTableForTest() {
} }
static class Entry { static class Entry {
final DfsPackKey pack; final DfsStreamKey pack;
final long offset; final long offset;
final int type; final int type;
final byte[] data; final byte[] data;
@ -198,7 +198,7 @@ static class Entry {
Entry lruPrev; Entry lruPrev;
Entry lruNext; Entry lruNext;
Entry(DfsPackKey key, long offset, int type, byte[] data) { Entry(DfsStreamKey key, long offset, int type, byte[] data) {
this.pack = key; this.pack = key;
this.offset = offset; this.offset = offset;
this.type = type; this.type = type;

View File

@ -54,7 +54,7 @@
/** A cached slice of a {@link DfsPackFile}. */ /** A cached slice of a {@link DfsPackFile}. */
final class DfsBlock { final class DfsBlock {
final DfsPackKey pack; final DfsStreamKey stream;
final long start; final long start;
@ -62,8 +62,8 @@ final class DfsBlock {
private final byte[] block; private final byte[] block;
DfsBlock(DfsPackKey p, long pos, byte[] buf) { DfsBlock(DfsStreamKey p, long pos, byte[] buf) {
pack = p; stream = p;
start = pos; start = pos;
end = pos + buf.length; end = pos + buf.length;
block = buf; block = buf;
@ -73,8 +73,8 @@ int size() {
return block.length; return block.length;
} }
boolean contains(DfsPackKey want, long pos) { boolean contains(DfsStreamKey want, long pos) {
return pack == want && start <= pos && pos < end; return stream == want && start <= pos && pos < end;
} }
int copy(long pos, byte[] dstbuf, int dstoff, int cnt) { int copy(long pos, byte[] dstbuf, int dstoff, int cnt) {

View File

@ -195,7 +195,7 @@ private DfsBlockCache(final DfsBlockCacheConfig cfg) {
blockSizeShift = Integer.numberOfTrailingZeros(blockSize); blockSizeShift = Integer.numberOfTrailingZeros(blockSize);
clockLock = new ReentrantLock(true /* fair */); clockLock = new ReentrantLock(true /* fair */);
clockHand = new Ref<>(new DfsPackKey(), -1, 0, null); clockHand = new Ref<>(new DfsStreamKey(), -1, 0, null);
clockHand.next = clockHand; clockHand.next = clockHand;
packCache = new ConcurrentHashMap<>( packCache = new ConcurrentHashMap<>(
@ -260,7 +260,7 @@ public Collection<DfsPackFile> getPackFiles() {
return packFiles; return packFiles;
} }
DfsPackFile getOrCreate(DfsPackDescription dsc, DfsPackKey key) { DfsPackFile getOrCreate(DfsPackDescription dsc, DfsStreamKey key) {
// TODO This table grows without bound. It needs to clean up // TODO This table grows without bound. It needs to clean up
// entries that aren't in cache anymore, and aren't being used // entries that aren't in cache anymore, and aren't being used
// by a live DfsObjDatabase reference. // by a live DfsObjDatabase reference.
@ -277,7 +277,7 @@ DfsPackFile getOrCreate(DfsPackDescription dsc, DfsPackKey key) {
return v; // another thread return v; // another thread
} else { } else {
return new DfsPackFile( return new DfsPackFile(
this, dsc, key != null ? key : new DfsPackKey()); this, dsc, key != null ? key : new DfsStreamKey());
} }
}); });
} }
@ -320,7 +320,7 @@ DfsBlock getOrLoad(DfsPackFile pack, long position, DfsReader ctx,
final long requestedPosition = position; final long requestedPosition = position;
position = pack.alignToBlock(position); position = pack.alignToBlock(position);
DfsPackKey key = pack.key; DfsStreamKey key = pack.key;
int slot = slot(key, position); int slot = slot(key, position);
HashEntry e1 = table.get(slot); HashEntry e1 = table.get(slot);
DfsBlock v = scan(e1, key, position); DfsBlock v = scan(e1, key, position);
@ -442,10 +442,10 @@ private void addToClock(Ref ref, int credit) {
} }
void put(DfsBlock v) { void put(DfsBlock v) {
put(v.pack, v.start, v.size(), v); put(v.stream, v.start, v.size(), v);
} }
<T> Ref<T> put(DfsPackKey key, long pos, int size, T v) { <T> Ref<T> put(DfsStreamKey key, long pos, int size, T v) {
int slot = slot(key, pos); int slot = slot(key, pos);
HashEntry e1 = table.get(slot); HashEntry e1 = table.get(slot);
Ref<T> ref = scanRef(e1, key, pos); Ref<T> ref = scanRef(e1, key, pos);
@ -481,12 +481,12 @@ <T> Ref<T> put(DfsPackKey key, long pos, int size, T v) {
return ref; return ref;
} }
boolean contains(DfsPackKey key, long position) { boolean contains(DfsStreamKey key, long position) {
return scan(table.get(slot(key, position)), key, position) != null; return scan(table.get(slot(key, position)), key, position) != null;
} }
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
<T> T get(DfsPackKey key, long position) { <T> T get(DfsStreamKey key, long position) {
T val = (T) scan(table.get(slot(key, position)), key, position); T val = (T) scan(table.get(slot(key, position)), key, position);
if (val == null) if (val == null)
statMiss.incrementAndGet(); statMiss.incrementAndGet();
@ -495,13 +495,13 @@ <T> T get(DfsPackKey key, long position) {
return val; return val;
} }
private <T> T scan(HashEntry n, DfsPackKey pack, long position) { private <T> T scan(HashEntry n, DfsStreamKey pack, long position) {
Ref<T> r = scanRef(n, pack, position); Ref<T> r = scanRef(n, pack, position);
return r != null ? r.get() : null; return r != null ? r.get() : null;
} }
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
private <T> Ref<T> scanRef(HashEntry n, DfsPackKey pack, long position) { private <T> Ref<T> scanRef(HashEntry n, DfsStreamKey pack, long position) {
for (; n != null; n = n.next) { for (; n != null; n = n.next) {
Ref<T> r = n.ref; Ref<T> r = n.ref;
if (r.pack == pack && r.position == position) if (r.pack == pack && r.position == position)
@ -514,11 +514,11 @@ void remove(DfsPackFile pack) {
packCache.remove(pack.getPackDescription()); packCache.remove(pack.getPackDescription());
} }
private int slot(DfsPackKey pack, long position) { private int slot(DfsStreamKey pack, long position) {
return (hash(pack.hash, position) >>> 1) % tableSize; return (hash(pack.hash, position) >>> 1) % tableSize;
} }
private ReentrantLock lockFor(DfsPackKey pack, long position) { private ReentrantLock lockFor(DfsStreamKey pack, long position) {
return loadLocks[(hash(pack.hash, position) >>> 1) % loadLocks.length]; return loadLocks[(hash(pack.hash, position) >>> 1) % loadLocks.length];
} }
@ -545,14 +545,14 @@ private static final class HashEntry {
} }
static final class Ref<T> { static final class Ref<T> {
final DfsPackKey pack; final DfsStreamKey pack;
final long position; final long position;
final int size; final int size;
volatile T value; volatile T value;
Ref next; Ref next;
volatile boolean hot; volatile boolean hot;
Ref(DfsPackKey pack, long position, int size, T v) { Ref(DfsStreamKey pack, long position, int size, T v) {
this.pack = pack; this.pack = pack;
this.position = position; this.position = position;
this.size = size; this.size = size;

View File

@ -104,7 +104,7 @@ public class DfsInserter extends ObjectInserter {
ObjectIdOwnerMap<PackedObjectInfo> objectMap; ObjectIdOwnerMap<PackedObjectInfo> objectMap;
DfsBlockCache cache; DfsBlockCache cache;
DfsPackKey packKey; DfsStreamKey packKey;
DfsPackDescription packDsc; DfsPackDescription packDsc;
PackStream packOut; PackStream packOut;
private boolean rollback; private boolean rollback;
@ -282,7 +282,7 @@ private void beginPack() throws IOException {
rollback = true; rollback = true;
packDsc = db.newPack(DfsObjDatabase.PackSource.INSERT); packDsc = db.newPack(DfsObjDatabase.PackSource.INSERT);
packOut = new PackStream(db.writeFile(packDsc, PACK)); packOut = new PackStream(db.writeFile(packDsc, PACK));
packKey = new DfsPackKey(); packKey = new DfsStreamKey();
// Write the header as though it were a single object pack. // Write the header as though it were a single object pack.
byte[] buf = packOut.hdrBuf; byte[] buf = packOut.hdrBuf;
@ -633,11 +633,11 @@ private class StreamLoader extends ObjectLoader {
private final int type; private final int type;
private final long size; private final long size;
private final DfsPackKey srcPack; private final DfsStreamKey srcPack;
private final long pos; private final long pos;
StreamLoader(ObjectId id, int type, long sz, StreamLoader(ObjectId id, int type, long sz,
DfsPackKey key, long pos) { DfsStreamKey key, long pos) {
this.id = id; this.id = id;
this.type = type; this.type = type;
this.size = sz; this.size = sz;

View File

@ -114,7 +114,7 @@ public final class DfsPackFile {
private final DfsPackDescription packDesc; private final DfsPackDescription packDesc;
/** Unique identity of this pack while in-memory. */ /** Unique identity of this pack while in-memory. */
final DfsPackKey key; final DfsStreamKey key;
/** /**
* Total number of bytes in this pack file. * Total number of bytes in this pack file.
@ -171,7 +171,7 @@ public final class DfsPackFile {
* @param key * @param key
* interned key used to identify blocks in the block cache. * interned key used to identify blocks in the block cache.
*/ */
DfsPackFile(DfsBlockCache cache, DfsPackDescription desc, DfsPackKey key) { DfsPackFile(DfsBlockCache cache, DfsPackDescription desc, DfsStreamKey key) {
this.cache = cache; this.cache = cache;
this.packDesc = desc; this.packDesc = desc;
this.key = key; this.key = key;

View File

@ -94,7 +94,7 @@ public class DfsPackParser extends PackParser {
private DfsPackDescription packDsc; private DfsPackDescription packDsc;
/** Key used during delta resolution reading delta chains. */ /** Key used during delta resolution reading delta chains. */
private DfsPackKey packKey; private DfsStreamKey packKey;
/** If the index was small enough, the entire index after writing. */ /** If the index was small enough, the entire index after writing. */
private PackIndex packIndex; private PackIndex packIndex;
@ -206,7 +206,7 @@ protected void onPackHeader(long objectCount) throws IOException {
} }
packDsc = objdb.newPack(DfsObjDatabase.PackSource.RECEIVE); packDsc = objdb.newPack(DfsObjDatabase.PackSource.RECEIVE);
packKey = new DfsPackKey(); packKey = new DfsStreamKey();
out = objdb.writeFile(packDsc, PACK); out = objdb.writeFile(packDsc, PACK);
int size = out.blockSize(); int size = out.blockSize();

View File

@ -45,16 +45,13 @@
import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicLong;
final class DfsPackKey { final class DfsStreamKey {
final int hash; final int hash;
final AtomicLong cachedSize = new AtomicLong();
final AtomicLong cachedSize; DfsStreamKey() {
DfsPackKey() {
// Multiply by 31 here so we can more directly combine with another // Multiply by 31 here so we can more directly combine with another
// value without doing the multiply there. // value without doing the multiply there.
//
hash = System.identityHashCode(this) * 31; hash = System.identityHashCode(this) * 31;
cachedSize = new AtomicLong();
} }
} }