dfs: Switch InMemoryRepository to DfsReftableDatabase

This ensure DfsReftableDatabase is tested by the same test suites that
use/test InMemoryRepository. It also simplifies the logic of
InMemoryRepository and brings its compatibility story closer to any
other DFS repository that uses reftables for its reference storage.

Change-Id: I881469fd77ed11a9239b477633510b8c482a19ca
Signed-off-by: Minh Thai <mthai@google.com>
Signed-off-by: Terry Parker <tparker@google.com>
This commit is contained in:
Shawn Pearce 2017-08-12 14:31:16 -07:00 committed by Terry Parker
parent 4b75d5223a
commit 7cd5d77ae3
7 changed files with 48 additions and 220 deletions

View File

@ -31,6 +31,7 @@ Import-Package: javax.servlet;version="[2.5.0,3.2.0)",
org.eclipse.jgit.internal;version="[4.10.0,4.11.0)", org.eclipse.jgit.internal;version="[4.10.0,4.11.0)",
org.eclipse.jgit.internal.storage.dfs;version="[4.10.0,4.11.0)", org.eclipse.jgit.internal.storage.dfs;version="[4.10.0,4.11.0)",
org.eclipse.jgit.internal.storage.file;version="[4.10.0,4.11.0)", org.eclipse.jgit.internal.storage.file;version="[4.10.0,4.11.0)",
org.eclipse.jgit.internal.storage.reftable;version="[4.9.0,4.11.0)",
org.eclipse.jgit.junit;version="[4.10.0,4.11.0)", org.eclipse.jgit.junit;version="[4.10.0,4.11.0)",
org.eclipse.jgit.junit.http;version="[4.10.0,4.11.0)", org.eclipse.jgit.junit.http;version="[4.10.0,4.11.0)",
org.eclipse.jgit.lib;version="[4.10.0,4.11.0)", org.eclipse.jgit.lib;version="[4.10.0,4.11.0)",

View File

@ -46,6 +46,7 @@
import org.eclipse.jgit.internal.storage.dfs.DfsRepositoryDescription; import org.eclipse.jgit.internal.storage.dfs.DfsRepositoryDescription;
import org.eclipse.jgit.internal.storage.dfs.InMemoryRepository; import org.eclipse.jgit.internal.storage.dfs.InMemoryRepository;
import org.eclipse.jgit.internal.storage.reftable.Reftable;
import org.eclipse.jgit.lib.RefDatabase; import org.eclipse.jgit.lib.RefDatabase;
/** /**
@ -80,14 +81,12 @@ void startFailing() {
} }
private class RefsUnreadableRefDatabase extends MemRefDatabase { private class RefsUnreadableRefDatabase extends MemRefDatabase {
@Override @Override
protected RefCache scanAllRefs() throws IOException { protected Reftable reader() throws IOException {
if (failing) { if (failing) {
throw new IOException("disk failed, no refs found"); throw new IOException("disk failed, no refs found");
} else {
return super.scanAllRefs();
} }
return super.reader();
} }
} }
} }

View File

@ -14,7 +14,6 @@
import static org.junit.Assert.fail; import static org.junit.Assert.fail;
import java.io.IOException; import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.Collections; import java.util.Collections;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
@ -722,7 +721,7 @@ public void leavesNonGcReftablesIfNotConfigured() throws Exception {
DfsPackDescription t1 = odb.newPack(INSERT); DfsPackDescription t1 = odb.newPack(INSERT);
try (DfsOutputStream out = odb.writeFile(t1, REFTABLE)) { try (DfsOutputStream out = odb.writeFile(t1, REFTABLE)) {
out.write("ignored".getBytes(StandardCharsets.UTF_8)); new ReftableWriter().begin(out).finish();
t1.addFileExt(REFTABLE); t1.addFileExt(REFTABLE);
} }
odb.commitPack(Collections.singleton(t1), null); odb.commitPack(Collections.singleton(t1), null);
@ -739,9 +738,9 @@ public void leavesNonGcReftablesIfNotConfigured() throws Exception {
assertTrue("commit0 in pack", isObjectInPack(commit0, pack)); assertTrue("commit0 in pack", isObjectInPack(commit0, pack));
assertTrue("commit1 in pack", isObjectInPack(commit1, pack)); assertTrue("commit1 in pack", isObjectInPack(commit1, pack));
// Only INSERT REFTABLE above is present. // A GC and the older INSERT REFTABLE above is present.
DfsReftable[] tables = odb.getReftables(); DfsReftable[] tables = odb.getReftables();
assertEquals(1, tables.length); assertEquals(2, tables.length);
assertEquals(t1, tables[0].getPackDescription()); assertEquals(t1, tables[0].getPackDescription());
} }
@ -754,7 +753,7 @@ public void prunesNonGcReftables() throws Exception {
DfsPackDescription t1 = odb.newPack(INSERT); DfsPackDescription t1 = odb.newPack(INSERT);
try (DfsOutputStream out = odb.writeFile(t1, REFTABLE)) { try (DfsOutputStream out = odb.writeFile(t1, REFTABLE)) {
out.write("ignored".getBytes(StandardCharsets.UTF_8)); new ReftableWriter().begin(out).finish();
t1.addFileExt(REFTABLE); t1.addFileExt(REFTABLE);
} }
odb.commitPack(Collections.singleton(t1), null); odb.commitPack(Collections.singleton(t1), null);

View File

@ -446,8 +446,9 @@ void addPack(DfsPackFile newPack) throws IOException {
// add, as the pack was already committed via commitPack(). // add, as the pack was already committed via commitPack().
// If this is the case return without changing the list. // If this is the case return without changing the list.
for (DfsPackFile p : o.packs) { for (DfsPackFile p : o.packs) {
if (p == newPack) if (p.key.equals(newPack.key)) {
return; return;
}
} }
} }

View File

@ -53,6 +53,7 @@
import org.eclipse.jgit.internal.storage.reftable.MergedReftable; import org.eclipse.jgit.internal.storage.reftable.MergedReftable;
import org.eclipse.jgit.internal.storage.reftable.RefCursor; import org.eclipse.jgit.internal.storage.reftable.RefCursor;
import org.eclipse.jgit.internal.storage.reftable.Reftable; import org.eclipse.jgit.internal.storage.reftable.Reftable;
import org.eclipse.jgit.internal.storage.reftable.ReftableConfig;
import org.eclipse.jgit.lib.BatchRefUpdate; import org.eclipse.jgit.lib.BatchRefUpdate;
import org.eclipse.jgit.lib.NullProgressMonitor; import org.eclipse.jgit.lib.NullProgressMonitor;
import org.eclipse.jgit.lib.ObjectId; import org.eclipse.jgit.lib.ObjectId;
@ -102,6 +103,11 @@ public BatchRefUpdate newBatchUpdate() {
return new ReftableBatchRefUpdate(this, odb); return new ReftableBatchRefUpdate(this, odb);
} }
/** @return configuration to write new reftables with. */
public ReftableConfig getReftableConfig() {
return new ReftableConfig(getRepository().getConfig());
}
/** @return the lock protecting this instance's state. */ /** @return the lock protecting this instance's state. */
protected ReentrantLock getLock() { protected ReentrantLock getLock() {
return lock; return lock;

View File

@ -6,30 +6,13 @@
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import org.eclipse.jgit.annotations.Nullable; import org.eclipse.jgit.annotations.Nullable;
import org.eclipse.jgit.internal.storage.pack.PackExt; import org.eclipse.jgit.internal.storage.pack.PackExt;
import org.eclipse.jgit.lib.BatchRefUpdate; import org.eclipse.jgit.internal.storage.reftable.ReftableConfig;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.ObjectIdRef;
import org.eclipse.jgit.lib.ProgressMonitor;
import org.eclipse.jgit.lib.Ref;
import org.eclipse.jgit.lib.Ref.Storage;
import org.eclipse.jgit.lib.RefDatabase; import org.eclipse.jgit.lib.RefDatabase;
import org.eclipse.jgit.revwalk.RevObject;
import org.eclipse.jgit.revwalk.RevTag;
import org.eclipse.jgit.revwalk.RevWalk;
import org.eclipse.jgit.transport.ReceiveCommand;
import org.eclipse.jgit.util.RefList;
/** /**
* Git repository stored entirely in the local process memory. * Git repository stored entirely in the local process memory.
@ -54,9 +37,8 @@ public InMemoryRepository build() throws IOException {
static final AtomicInteger packId = new AtomicInteger(); static final AtomicInteger packId = new AtomicInteger();
private final MemObjDatabase objdb; private final MemObjDatabase objdb;
private final RefDatabase refdb; private final MemRefDatabase refdb;
private String gitwebDescription; private String gitwebDescription;
private boolean performsAtomicTransactions = true;
/** /**
* Initialize a new in-memory repository. * Initialize a new in-memory repository.
@ -92,7 +74,7 @@ public RefDatabase getRefDatabase() {
* @param atomic * @param atomic
*/ */
public void setPerformsAtomicTransactions(boolean atomic) { public void setPerformsAtomicTransactions(boolean atomic) {
performsAtomicTransactions = atomic; refdb.performsAtomicTransactions = atomic;
} }
@Override @Override
@ -148,6 +130,7 @@ protected synchronized void commitPackImpl(
if (replace != null) if (replace != null)
n.removeAll(replace); n.removeAll(replace);
packs = n; packs = n;
clearCache();
} }
@Override @Override
@ -159,37 +142,43 @@ protected void rollbackPack(Collection<DfsPackDescription> desc) {
protected ReadableChannel openFile(DfsPackDescription desc, PackExt ext) protected ReadableChannel openFile(DfsPackDescription desc, PackExt ext)
throws FileNotFoundException, IOException { throws FileNotFoundException, IOException {
MemPack memPack = (MemPack) desc; MemPack memPack = (MemPack) desc;
byte[] file = memPack.fileMap.get(ext); byte[] file = memPack.get(ext);
if (file == null) if (file == null)
throw new FileNotFoundException(desc.getFileName(ext)); throw new FileNotFoundException(desc.getFileName(ext));
return new ByteArrayReadableChannel(file, blockSize); return new ByteArrayReadableChannel(file, blockSize);
} }
@Override @Override
protected DfsOutputStream writeFile( protected DfsOutputStream writeFile(DfsPackDescription desc,
DfsPackDescription desc, final PackExt ext) throws IOException { PackExt ext) throws IOException {
final MemPack memPack = (MemPack) desc; MemPack memPack = (MemPack) desc;
return new Out() { return new Out() {
@Override @Override
public void flush() { public void flush() {
memPack.fileMap.put(ext, getData()); memPack.put(ext, getData());
} }
}; };
} }
} }
private static class MemPack extends DfsPackDescription { private static class MemPack extends DfsPackDescription {
final Map<PackExt, byte[]> final byte[][] fileMap = new byte[PackExt.values().length][];
fileMap = new HashMap<>();
MemPack(String name, DfsRepositoryDescription repoDesc) { MemPack(String name, DfsRepositoryDescription repoDesc) {
super(repoDesc, name); super(repoDesc, name);
} }
void put(PackExt ext, byte[] data) {
fileMap[ext.getPosition()] = data;
}
byte[] get(PackExt ext) {
return fileMap[ext.getPosition()];
}
} }
private abstract static class Out extends DfsOutputStream { private abstract static class Out extends DfsOutputStream {
private final ByteArrayOutputStream dst = new ByteArrayOutputStream(); private final ByteArrayOutputStream dst = new ByteArrayOutputStream();
private byte[] data; private byte[] data;
@Override @Override
@ -221,7 +210,6 @@ byte[] getData() {
public void close() { public void close() {
flush(); flush();
} }
} }
private static class ByteArrayReadableChannel implements ReadableChannel { private static class ByteArrayReadableChannel implements ReadableChannel {
@ -281,193 +269,27 @@ public void setReadAheadBytes(int b) {
} }
} }
/** /** DfsRefDatabase used by InMemoryRepository. */
* A ref database storing all refs in-memory. protected class MemRefDatabase extends DfsReftableDatabase {
* <p> boolean performsAtomicTransactions = true;
* This class is protected (and not private) to facilitate testing using
* subclasses of InMemoryRepository.
*/
protected class MemRefDatabase extends DfsRefDatabase {
private final ConcurrentMap<String, Ref> refs = new ConcurrentHashMap<>();
private final ReadWriteLock lock = new ReentrantReadWriteLock(true /* fair */);
/** /** Initialize a new in-memory ref database. */
* Initialize a new in-memory ref database.
*/
protected MemRefDatabase() { protected MemRefDatabase() {
super(InMemoryRepository.this); super(InMemoryRepository.this);
} }
@Override
public ReftableConfig getReftableConfig() {
ReftableConfig cfg = new ReftableConfig();
cfg.setAlignBlocks(false);
cfg.setIndexObjects(false);
cfg.fromConfig(getRepository().getConfig());
return cfg;
}
@Override @Override
public boolean performsAtomicTransactions() { public boolean performsAtomicTransactions() {
return performsAtomicTransactions; return performsAtomicTransactions;
} }
@Override
public BatchRefUpdate newBatchUpdate() {
return new BatchRefUpdate(this) {
@Override
public void execute(RevWalk walk, ProgressMonitor monitor)
throws IOException {
if (performsAtomicTransactions() && isAtomic()) {
try {
lock.writeLock().lock();
batch(getCommands());
} finally {
lock.writeLock().unlock();
}
} else {
super.execute(walk, monitor);
}
}
};
}
@Override
protected RefCache scanAllRefs() throws IOException {
RefList.Builder<Ref> ids = new RefList.Builder<>();
RefList.Builder<Ref> sym = new RefList.Builder<>();
try {
lock.readLock().lock();
for (Ref ref : refs.values()) {
if (ref.isSymbolic())
sym.add(ref);
ids.add(ref);
}
} finally {
lock.readLock().unlock();
}
ids.sort();
sym.sort();
objdb.getCurrentPackList().markDirty();
return new RefCache(ids.toRefList(), sym.toRefList());
}
private void batch(List<ReceiveCommand> cmds) {
// Validate that the target exists in a new RevWalk, as the RevWalk
// from the RefUpdate might be reading back unflushed objects.
Map<ObjectId, ObjectId> peeled = new HashMap<>();
try (RevWalk rw = new RevWalk(getRepository())) {
for (ReceiveCommand c : cmds) {
if (c.getResult() != ReceiveCommand.Result.NOT_ATTEMPTED) {
ReceiveCommand.abort(cmds);
return;
}
if (!ObjectId.zeroId().equals(c.getNewId())) {
try {
RevObject o = rw.parseAny(c.getNewId());
if (o instanceof RevTag) {
peeled.put(o, rw.peel(o).copy());
}
} catch (IOException e) {
c.setResult(ReceiveCommand.Result.REJECTED_MISSING_OBJECT);
ReceiveCommand.abort(cmds);
return;
}
}
}
}
// Check all references conform to expected old value.
for (ReceiveCommand c : cmds) {
Ref r = refs.get(c.getRefName());
if (r == null) {
if (c.getType() != ReceiveCommand.Type.CREATE) {
c.setResult(ReceiveCommand.Result.LOCK_FAILURE);
ReceiveCommand.abort(cmds);
return;
}
} else {
ObjectId objectId = r.getObjectId();
if (r.isSymbolic() || objectId == null
|| !objectId.equals(c.getOldId())) {
c.setResult(ReceiveCommand.Result.LOCK_FAILURE);
ReceiveCommand.abort(cmds);
return;
}
}
}
// Write references.
for (ReceiveCommand c : cmds) {
if (c.getType() == ReceiveCommand.Type.DELETE) {
refs.remove(c.getRefName());
c.setResult(ReceiveCommand.Result.OK);
continue;
}
ObjectId p = peeled.get(c.getNewId());
Ref r;
if (p != null) {
r = new ObjectIdRef.PeeledTag(Storage.PACKED,
c.getRefName(), c.getNewId(), p);
} else {
r = new ObjectIdRef.PeeledNonTag(Storage.PACKED,
c.getRefName(), c.getNewId());
}
refs.put(r.getName(), r);
c.setResult(ReceiveCommand.Result.OK);
}
clearCache();
}
@Override
protected boolean compareAndPut(Ref oldRef, Ref newRef)
throws IOException {
try {
lock.writeLock().lock();
ObjectId id = newRef.getObjectId();
if (id != null) {
try (RevWalk rw = new RevWalk(getRepository())) {
// Validate that the target exists in a new RevWalk, as the RevWalk
// from the RefUpdate might be reading back unflushed objects.
rw.parseAny(id);
}
}
String name = newRef.getName();
if (oldRef == null)
return refs.putIfAbsent(name, newRef) == null;
Ref cur = refs.get(name);
if (cur != null) {
if (eq(cur, oldRef))
return refs.replace(name, cur, newRef);
}
if (oldRef.getStorage() == Storage.NEW)
return refs.putIfAbsent(name, newRef) == null;
return false;
} finally {
lock.writeLock().unlock();
}
}
@Override
protected boolean compareAndRemove(Ref oldRef) throws IOException {
try {
lock.writeLock().lock();
String name = oldRef.getName();
Ref cur = refs.get(name);
if (cur != null && eq(cur, oldRef))
return refs.remove(name, cur);
else
return false;
} finally {
lock.writeLock().unlock();
}
}
private boolean eq(Ref a, Ref b) {
if (!Objects.equals(a.getName(), b.getName()))
return false;
if (a.isSymbolic() != b.isSymbolic())
return false;
if (a.isSymbolic())
return Objects.equals(a.getTarget().getName(), b.getTarget().getName());
else
return Objects.equals(a.getObjectId(), b.getObjectId());
}
} }
} }

View File

@ -116,7 +116,7 @@ protected ReftableBatchRefUpdate(DfsReftableDatabase refdb,
this.refdb = refdb; this.refdb = refdb;
this.odb = odb; this.odb = odb;
lock = refdb.getLock(); lock = refdb.getLock();
reftableConfig = new ReftableConfig(refdb.getRepository().getConfig()); reftableConfig = refdb.getReftableConfig();
} }
@Override @Override