DfsReader: Expose when indices are loaded

We want to measure the data used to serve a request. As a first step,
we want to know how many indices are accessed during the request and
their sizes.

Expose an interface in DfsReader to announce when an index is loaded
into the reader, i.e. when its reference is set.

The interface is more flexible to implementors (what/how to collect)
than the existing DfsReaderIOStats object.

Change-Id: I56f7658fde1758efaf869fa779d11b533a81a0a7
This commit is contained in:
Ivan Frade 2023-07-28 11:41:26 +02:00
parent ab6540c66b
commit b4b8f05eea
5 changed files with 321 additions and 25 deletions

View File

@ -18,8 +18,12 @@
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.zip.Deflater;
import org.eclipse.jgit.internal.storage.dfs.DfsObjDatabase.PackSource;
import org.eclipse.jgit.internal.storage.dfs.DfsReader.PackLoadListener;
import org.eclipse.jgit.internal.storage.pack.PackExt;
import org.eclipse.jgit.internal.storage.pack.PackOutputStream;
import org.eclipse.jgit.internal.storage.pack.PackWriter;
@ -130,6 +134,93 @@ public void testLoadObjectSizeIndex_noIndex() throws IOException {
assertFalse(pack.hasObjectSizeIndex(reader));
}
private static class TestPackLoadListener implements PackLoadListener {
final Map<PackExt, Integer> indexLoadCount = new HashMap<>();
int blockLoadCount;
@Override
public void onIndexLoad(String packName, PackSource src, PackExt ext,
long size, Object loadedIdx) {
indexLoadCount.merge(ext, 1, Integer::sum);
}
@Override
public void onBlockLoad(String packName, PackSource src, PackExt ext, long position,
DfsBlockData dfsBlockData) {
blockLoadCount += 1;
}
}
@Test
public void testIndexLoadCallback_indexNotInCache() throws IOException {
bypassCache = false;
clearCache = true;
setObjectSizeIndexMinBytes(-1);
setupPack(512, 800);
TestPackLoadListener tal = new TestPackLoadListener();
DfsReader reader = db.getObjectDatabase().newReader();
reader.addPackLoadListener(tal);
DfsPackFile pack = db.getObjectDatabase().getPacks()[0];
pack.getPackIndex(reader);
assertEquals(1, tal.indexLoadCount.get(PackExt.INDEX).intValue());
}
@Test
public void testIndexLoadCallback_indexInCache() throws IOException {
bypassCache = false;
clearCache = false;
setObjectSizeIndexMinBytes(-1);
setupPack(512, 800);
TestPackLoadListener tal = new TestPackLoadListener();
DfsReader reader = db.getObjectDatabase().newReader();
reader.addPackLoadListener(tal);
DfsPackFile pack = db.getObjectDatabase().getPacks()[0];
pack.getPackIndex(reader);
pack.getPackIndex(reader);
pack.getPackIndex(reader);
assertEquals(1, tal.indexLoadCount.get(PackExt.INDEX).intValue());
}
@Test
public void testIndexLoadCallback_multipleReads() throws IOException {
bypassCache = false;
clearCache = true;
setObjectSizeIndexMinBytes(-1);
setupPack(512, 800);
TestPackLoadListener tal = new TestPackLoadListener();
DfsReader reader = db.getObjectDatabase().newReader();
reader.addPackLoadListener(tal);
DfsPackFile pack = db.getObjectDatabase().getPacks()[0];
pack.getPackIndex(reader);
pack.getPackIndex(reader);
pack.getPackIndex(reader);
assertEquals(1, tal.indexLoadCount.get(PackExt.INDEX).intValue());
}
@Test
public void testBlockLoadCallback_loadInCache() throws IOException {
bypassCache = false;
clearCache = true;
setObjectSizeIndexMinBytes(-1);
setupPack(512, 800);
TestPackLoadListener tal = new TestPackLoadListener();
DfsReader reader = db.getObjectDatabase().newReader();
reader.addPackLoadListener(tal);
DfsPackFile pack = db.getObjectDatabase().getPacks()[0];
ObjectId anObject = pack.getPackIndex(reader).getObjectId(0);
pack.get(reader, anObject).getBytes();
assertEquals(2, tal.blockLoadCount);
}
private ObjectId setupPack(int bs, int ps) throws IOException {
DfsBlockCacheConfig cfg = new DfsBlockCacheConfig().setBlockSize(bs)
.setBlockLimit(bs * 100).setStreamRatio(bypassCache ? 0F : 1F);

View File

@ -11,15 +11,20 @@
import static org.eclipse.jgit.lib.ConfigConstants.CONFIG_KEY_MIN_BYTES_OBJ_SIZE_INDEX;
import static org.eclipse.jgit.lib.ConfigConstants.CONFIG_PACK_SECTION;
import static org.eclipse.jgit.lib.Constants.OBJ_BLOB;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.assertEquals;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import org.eclipse.jgit.internal.storage.dfs.DfsObjDatabase.PackSource;
import org.eclipse.jgit.internal.storage.dfs.DfsReader.PackLoadListener;
import org.eclipse.jgit.internal.storage.pack.PackExt;
import org.eclipse.jgit.junit.JGitTestUtil;
import org.eclipse.jgit.junit.TestRng;
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.ObjectInserter;
import org.junit.Before;
@ -40,31 +45,31 @@ public void isNotLargerThan_objAboveThreshold()
ObjectId obj = insertBlobWithSize(200);
try (DfsReader ctx = db.getObjectDatabase().newReader()) {
assertFalse("limit < threshold < obj",
ctx.isNotLargerThan(obj, Constants.OBJ_BLOB, 50));
ctx.isNotLargerThan(obj, OBJ_BLOB, 50));
assertEquals(1, ctx.stats.isNotLargerThanCallCount);
assertEquals(1, ctx.stats.objectSizeIndexHit);
assertEquals(0, ctx.stats.objectSizeIndexMiss);
assertFalse("limit = threshold < obj",
ctx.isNotLargerThan(obj, Constants.OBJ_BLOB, 100));
ctx.isNotLargerThan(obj, OBJ_BLOB, 100));
assertEquals(2, ctx.stats.isNotLargerThanCallCount);
assertEquals(2, ctx.stats.objectSizeIndexHit);
assertEquals(0, ctx.stats.objectSizeIndexMiss);
assertFalse("threshold < limit < obj",
ctx.isNotLargerThan(obj, Constants.OBJ_BLOB, 150));
ctx.isNotLargerThan(obj, OBJ_BLOB, 150));
assertEquals(3, ctx.stats.isNotLargerThanCallCount);
assertEquals(3, ctx.stats.objectSizeIndexHit);
assertEquals(0, ctx.stats.objectSizeIndexMiss);
assertTrue("threshold < limit = obj",
ctx.isNotLargerThan(obj, Constants.OBJ_BLOB, 200));
ctx.isNotLargerThan(obj, OBJ_BLOB, 200));
assertEquals(4, ctx.stats.isNotLargerThanCallCount);
assertEquals(4, ctx.stats.objectSizeIndexHit);
assertEquals(0, ctx.stats.objectSizeIndexMiss);
assertTrue("threshold < obj < limit",
ctx.isNotLargerThan(obj, Constants.OBJ_BLOB, 250));
ctx.isNotLargerThan(obj, OBJ_BLOB, 250));
assertEquals(5, ctx.stats.isNotLargerThanCallCount);
assertEquals(5, ctx.stats.objectSizeIndexHit);
assertEquals(0, ctx.stats.objectSizeIndexMiss);
@ -80,31 +85,31 @@ public void isNotLargerThan_objBelowThreshold()
ObjectId obj = insertBlobWithSize(50);
try (DfsReader ctx = db.getObjectDatabase().newReader()) {
assertFalse("limit < obj < threshold",
ctx.isNotLargerThan(obj, Constants.OBJ_BLOB, 10));
ctx.isNotLargerThan(obj, OBJ_BLOB, 10));
assertEquals(1, ctx.stats.isNotLargerThanCallCount);
assertEquals(0, ctx.stats.objectSizeIndexHit);
assertEquals(1, ctx.stats.objectSizeIndexMiss);
assertTrue("limit = obj < threshold",
ctx.isNotLargerThan(obj, Constants.OBJ_BLOB, 50));
ctx.isNotLargerThan(obj, OBJ_BLOB, 50));
assertEquals(2, ctx.stats.isNotLargerThanCallCount);
assertEquals(0, ctx.stats.objectSizeIndexHit);
assertEquals(2, ctx.stats.objectSizeIndexMiss);
assertTrue("obj < limit < threshold",
ctx.isNotLargerThan(obj, Constants.OBJ_BLOB, 80));
ctx.isNotLargerThan(obj, OBJ_BLOB, 80));
assertEquals(3, ctx.stats.isNotLargerThanCallCount);
assertEquals(0, ctx.stats.objectSizeIndexHit);
assertEquals(3, ctx.stats.objectSizeIndexMiss);
assertTrue("obj < limit = threshold",
ctx.isNotLargerThan(obj, Constants.OBJ_BLOB, 100));
ctx.isNotLargerThan(obj, OBJ_BLOB, 100));
assertEquals(4, ctx.stats.isNotLargerThanCallCount);
assertEquals(0, ctx.stats.objectSizeIndexHit);
assertEquals(4, ctx.stats.objectSizeIndexMiss);
assertTrue("obj < threshold < limit",
ctx.isNotLargerThan(obj, Constants.OBJ_BLOB, 120));
ctx.isNotLargerThan(obj, OBJ_BLOB, 120));
assertEquals(5, ctx.stats.isNotLargerThanCallCount);
assertEquals(0, ctx.stats.objectSizeIndexHit);
assertEquals(5, ctx.stats.objectSizeIndexMiss);
@ -116,11 +121,11 @@ public void isNotLargerThan_emptyIdx() throws IOException {
setObjectSizeIndexMinBytes(100);
ObjectId obj = insertBlobWithSize(10);
try (DfsReader ctx = db.getObjectDatabase().newReader()) {
assertFalse(ctx.isNotLargerThan(obj, Constants.OBJ_BLOB, 0));
assertTrue(ctx.isNotLargerThan(obj, Constants.OBJ_BLOB, 10));
assertTrue(ctx.isNotLargerThan(obj, Constants.OBJ_BLOB, 40));
assertTrue(ctx.isNotLargerThan(obj, Constants.OBJ_BLOB, 50));
assertTrue(ctx.isNotLargerThan(obj, Constants.OBJ_BLOB, 100));
assertFalse(ctx.isNotLargerThan(obj, OBJ_BLOB, 0));
assertTrue(ctx.isNotLargerThan(obj, OBJ_BLOB, 10));
assertTrue(ctx.isNotLargerThan(obj, OBJ_BLOB, 40));
assertTrue(ctx.isNotLargerThan(obj, OBJ_BLOB, 50));
assertTrue(ctx.isNotLargerThan(obj, OBJ_BLOB, 100));
assertEquals(5, ctx.stats.isNotLargerThanCallCount);
assertEquals(5, ctx.stats.objectSizeIndexMiss);
@ -133,11 +138,11 @@ public void isNotLargerThan_noObjectSizeIndex() throws IOException {
setObjectSizeIndexMinBytes(-1);
ObjectId obj = insertBlobWithSize(10);
try (DfsReader ctx = db.getObjectDatabase().newReader()) {
assertFalse(ctx.isNotLargerThan(obj, Constants.OBJ_BLOB, 0));
assertTrue(ctx.isNotLargerThan(obj, Constants.OBJ_BLOB, 10));
assertTrue(ctx.isNotLargerThan(obj, Constants.OBJ_BLOB, 40));
assertTrue(ctx.isNotLargerThan(obj, Constants.OBJ_BLOB, 50));
assertTrue(ctx.isNotLargerThan(obj, Constants.OBJ_BLOB, 100));
assertFalse(ctx.isNotLargerThan(obj, OBJ_BLOB, 0));
assertTrue(ctx.isNotLargerThan(obj, OBJ_BLOB, 10));
assertTrue(ctx.isNotLargerThan(obj, OBJ_BLOB, 40));
assertTrue(ctx.isNotLargerThan(obj, OBJ_BLOB, 50));
assertTrue(ctx.isNotLargerThan(obj, OBJ_BLOB, 100));
assertEquals(5, ctx.stats.isNotLargerThanCallCount);
assertEquals(0, ctx.stats.objectSizeIndexMiss);
@ -145,12 +150,103 @@ public void isNotLargerThan_noObjectSizeIndex() throws IOException {
}
}
@Test
public void packLoadListener_noInvocations() throws IOException {
insertBlobWithSize(100);
try (DfsReader ctx = db.getObjectDatabase().newReader()) {
CounterPackLoadListener listener = new CounterPackLoadListener();
ctx.addPackLoadListener(listener);
assertEquals(null, listener.callsPerExt.get(PackExt.INDEX));
}
}
@Test
public void packLoadListener_has_openIdx() throws IOException {
ObjectId obj = insertBlobWithSize(100);
try (DfsReader ctx = db.getObjectDatabase().newReader()) {
CounterPackLoadListener listener = new CounterPackLoadListener();
ctx.addPackLoadListener(listener);
boolean has = ctx.has(obj);
assertTrue(has);
assertEquals(Integer.valueOf(1), listener.callsPerExt.get(PackExt.INDEX));
}
}
@Test
public void packLoadListener_notLargerThan_openMultipleIndices() throws IOException {
setObjectSizeIndexMinBytes(100);
ObjectId obj = insertBlobWithSize(200);
try (DfsReader ctx = db.getObjectDatabase().newReader()) {
CounterPackLoadListener listener = new CounterPackLoadListener();
ctx.addPackLoadListener(listener);
boolean notLargerThan = ctx.isNotLargerThan(obj, OBJ_BLOB, 1000);
assertTrue(notLargerThan);
assertEquals(Integer.valueOf(1), listener.callsPerExt.get(PackExt.INDEX));
assertEquals(Integer.valueOf(1), listener.callsPerExt.get(PackExt.OBJECT_SIZE_INDEX));
}
}
@Test
public void packLoadListener_has_openMultipleIndices() throws IOException {
setObjectSizeIndexMinBytes(100);
insertBlobWithSize(200);
insertBlobWithSize(230);
insertBlobWithSize(100);
try (DfsReader ctx = db.getObjectDatabase().newReader()) {
CounterPackLoadListener listener = new CounterPackLoadListener();
ctx.addPackLoadListener(listener);
ObjectId oid = ObjectId.fromString("aa48de2aa61d9dffa8a05439dc115fe82f10f129");
boolean has = ctx.has(oid);
assertFalse(has);
// Open 3 indices trying to find the pack
assertEquals(Integer.valueOf(3), listener.callsPerExt.get(PackExt.INDEX));
}
}
@Test
public void packLoadListener_has_repeatedCalls_openMultipleIndices() throws IOException {
// Two objects NOT in the repo
ObjectId oid = ObjectId.fromString("aa48de2aa61d9dffa8a05439dc115fe82f10f129");
ObjectId oid2 = ObjectId.fromString("aa48de2aa61d9dffa8a05439dc115fe82f10f130");
setObjectSizeIndexMinBytes(100);
insertBlobWithSize(200);
insertBlobWithSize(230);
insertBlobWithSize(100);
CounterPackLoadListener listener = new CounterPackLoadListener();
try (DfsReader ctx = db.getObjectDatabase().newReader()) {
ctx.addPackLoadListener(listener);
boolean has = ctx.has(oid);
ctx.has(oid);
ctx.has(oid2);
assertFalse(has);
// The 3 indices were loaded only once each
assertEquals(Integer.valueOf(3), listener.callsPerExt.get(PackExt.INDEX));
}
}
private static class CounterPackLoadListener implements PackLoadListener {
final Map<PackExt, Integer> callsPerExt = new HashMap<>();
@Override
public void onIndexLoad(String packName, PackSource src, PackExt ext, long size,
Object loadedIdx) {
callsPerExt.merge(ext, 1, Integer::sum);
}
@Override
public void onBlockLoad(String packName, PackSource src, PackExt ext,
long size, DfsBlockData dfsBlockData) {
}
}
private ObjectId insertBlobWithSize(int size)
throws IOException {
TestRng testRng = new TestRng(JGitTestUtil.getName());
ObjectId oid;
try (ObjectInserter ins = db.newObjectInserter()) {
oid = ins.insert(Constants.OBJ_BLOB,
oid = ins.insert(OBJ_BLOB,
testRng.nextBytes(size));
ins.flush();
}

View File

@ -100,7 +100,9 @@ else if (size < cache.getBlockSize())
DfsBlock getOrLoadBlock(long pos, DfsReader ctx) throws IOException {
try (LazyChannel c = new LazyChannel(ctx, desc, ext)) {
return cache.getOrLoad(this, pos, ctx, c);
DfsBlock block = cache.getOrLoad(this, pos, ctx, c);
ctx.emitBlockLoad(this, pos, block);
return block;
}
}

View File

@ -181,6 +181,7 @@ private PackIndex idx(DfsReader ctx) throws IOException {
PackIndex idx = idxref.get();
if (index == null && idx != null) {
index = idx;
ctx.emitIndexLoad(desc, INDEX, System.identityHashCode(idx));
}
return index;
} catch (IOException e) {
@ -226,6 +227,7 @@ public PackBitmapIndex getBitmapIndex(DfsReader ctx) throws IOException {
PackBitmapIndex bmidx = idxref.get();
if (bitmapIndex == null && bmidx != null) {
bitmapIndex = bmidx;
ctx.emitIndexLoad(desc, BITMAP_INDEX, System.identityHashCode(bmidx));
}
return bitmapIndex;
}
@ -263,6 +265,7 @@ public CommitGraph getCommitGraph(DfsReader ctx) throws IOException {
CommitGraph cg = cgref.get();
if (commitGraph == null && cg != null) {
commitGraph = cg;
ctx.emitIndexLoad(desc, COMMIT_GRAPH, System.identityHashCode(cg));
}
return commitGraph;
}
@ -296,6 +299,7 @@ public PackReverseIndex getReverseIdx(DfsReader ctx) throws IOException {
PackReverseIndex revidx = revref.get();
if (reverseIndex == null && revidx != null) {
reverseIndex = revidx;
ctx.emitIndexLoad(desc, REVERSE_INDEX, System.identityHashCode(revidx));
}
return reverseIndex;
}
@ -323,8 +327,9 @@ private PackObjectSizeIndex getObjectSizeIndex(DfsReader ctx)
ctx.stats.objectSizeIndexCacheHit++;
}
PackObjectSizeIndex sizeIdx = sizeIdxRef.get();
if (sizeIdx != null) {
if (objectSizeIndex == null && sizeIdx != null) {
objectSizeIndex = sizeIdx;
ctx.emitIndexLoad(desc, OBJECT_SIZE_INDEX, System.identityHashCode(sizeIdx));
}
} finally {
objectSizeIndexLoadAttempted = true;
@ -426,6 +431,7 @@ void copyPackAsIs(PackOutputStream out, DfsReader ctx) throws IOException {
if (sz > 0) {
rc.setReadAheadBytes(sz);
}
//TODO(ifrade): report ctx.emitBlockLoaded for this copy
if (cache.shouldCopyThroughCache(length)) {
copyPackThroughCache(out, ctx, rc);
} else {
@ -1171,6 +1177,7 @@ private DfsBlockCache.Ref<PackIndex> loadPackIndex(
try (ReadableChannel rc = ctx.db.openFile(desc, INDEX)) {
PackIndex idx = PackIndex.read(alignTo8kBlocks(rc));
ctx.stats.readIdxBytes += rc.position();
ctx.emitIndexLoad(desc, INDEX, System.identityHashCode(idx));
index = idx;
return new DfsBlockCache.Ref<>(
idxKey,
@ -1197,6 +1204,7 @@ private DfsBlockCache.Ref<PackReverseIndex> loadReverseIdx(
long start = System.nanoTime();
PackReverseIndex revidx = PackReverseIndexFactory.computeFromIndex(idx);
reverseIndex = revidx;
ctx.emitIndexLoad(desc, REVERSE_INDEX, System.identityHashCode(revidx));
ctx.stats.readReverseIdxMicros += elapsedMicros(start);
return new DfsBlockCache.Ref<>(
revKey,
@ -1216,6 +1224,7 @@ private DfsBlockCache.Ref<PackObjectSizeIndex> loadObjectSizeIndex(
objectSizeIndex = PackObjectSizeIndexLoader
.load(Channels.newInputStream(rc));
size = rc.position();
ctx.emitIndexLoad(desc, OBJECT_SIZE_INDEX, System.identityHashCode(objectSizeIndex));
} catch (IOException e) {
parsingError = e;
}

View File

@ -34,6 +34,8 @@
import org.eclipse.jgit.internal.JGitText;
import org.eclipse.jgit.internal.storage.commitgraph.CommitGraph;
import org.eclipse.jgit.internal.storage.dfs.DfsObjDatabase.PackList;
import org.eclipse.jgit.internal.storage.dfs.DfsObjDatabase.PackSource;
import org.eclipse.jgit.internal.storage.dfs.DfsReader.PackLoadListener.DfsBlockData;
import org.eclipse.jgit.internal.storage.file.BitmapIndexImpl;
import org.eclipse.jgit.internal.storage.file.PackBitmapIndex;
import org.eclipse.jgit.internal.storage.file.PackIndex;
@ -41,6 +43,7 @@
import org.eclipse.jgit.internal.storage.pack.CachedPack;
import org.eclipse.jgit.internal.storage.pack.ObjectReuseAsIs;
import org.eclipse.jgit.internal.storage.pack.ObjectToPack;
import org.eclipse.jgit.internal.storage.pack.PackExt;
import org.eclipse.jgit.internal.storage.pack.PackOutputStream;
import org.eclipse.jgit.internal.storage.pack.PackWriter;
import org.eclipse.jgit.lib.AbbreviatedObjectId;
@ -79,6 +82,7 @@ public class DfsReader extends ObjectReader implements ObjectReuseAsIs {
private DeltaBaseCache baseCache;
private DfsPackFile last;
private boolean avoidUnreachable;
private List<PackLoadListener> packLoadListeners = new ArrayList<>();
/**
* Initialize a new DfsReader
@ -834,6 +838,100 @@ public DfsReaderIoStats getIoStats() {
return new DfsReaderIoStats(stats);
}
/** Announces when data is loaded by reader */
interface PackLoadListener {
/**
* Immutable copy of a DFS block metadata
*/
class DfsBlockData {
private final int identityHash;
private final int size;
static DfsBlockData of(DfsBlock src) {
return new DfsBlockData(src);
}
private DfsBlockData(DfsBlock src) {
this.identityHash = System.identityHashCode(src);
this.size = src.size();
}
int getIdentityHash() {
return identityHash;
}
int getSize() {
return size;
}
}
/**
* This is called when an index reference (e.g. primary index, reverse
* index, ...) is set in the reader, regarless if loaded from scratch or
* copied from cache.
*
* During the lifetime of the reader, the reference for an index should
* be set only once.
*
* @param packName
* Name of the pack
* @param src
* Source of the pack (e.g. GC, COMPACT, ...)
* @param ext
* Extension in the pack (e.g. IDX, RIDX, ...)
* @param size
* Size of the data loaded (usually as bytes in disk)
* @param loadedIdx
* reference to the loaded index
*/
void onIndexLoad(String packName, PackSource src, PackExt ext, long size,
Object loadedIdx);
/**
* This is called when a dfs block is loaded into the reader.
*
* The reader keeps only one block at a time in memory, so during a
* request the same block could be loaded multiple times.
*
* @param packName
* Name of the pack this block belongs to
* @param src
* Source of the pack (e.g. GC, COMPACT, ...)
* @param ext
* Extension in the pack (e.g. PACK or REFTABLE)
* @param position
* Block offset being loaded
* @param dfsBlockData
* Metadata of the block
*/
void onBlockLoad(String packName, PackSource src, PackExt ext,
long position, DfsBlockData dfsBlockData);
}
void emitIndexLoad(DfsPackDescription packDescription, PackExt ext,
Object loadedIdx) {
packLoadListeners.forEach(
listener -> listener.onIndexLoad(packDescription.getFileName(ext),
packDescription.getPackSource(), ext,
packDescription.getFileSize(ext), loadedIdx));
}
void emitBlockLoad(BlockBasedFile file, long position, DfsBlock dfsBlock) {
packLoadListeners
.forEach(listener -> listener.onBlockLoad(file.getFileName(),
file.desc.getPackSource(), file.ext, position,
DfsBlockData.of(dfsBlock)));
}
/**
* Add listener to record loads by this reader
*
* @param listener a listener
*/
protected void addPackLoadListener(PackLoadListener listener) {
packLoadListeners.add(listener);
}
/**
* {@inheritDoc}
* <p>