From 1bf0c3cdb1aecac41284e63b2daed2d92b908d3d Mon Sep 17 00:00:00 2001 From: "Shawn O. Pearce" Date: Fri, 14 Jan 2011 14:17:55 -0800 Subject: [PATCH] Refactor IndexPack to not require local filesystem By moving the logic that parses a pack stream from the network (or a bundle) into a type that can be constructed by an ObjectInserter, repository implementations have a chance to inject their own logic for storing object data received into the destination repository. The API isn't completely generic yet, there are still quite a few assumptions that the PackParser subclass is storing the data onto the local filesystem as a single file. But its about the simplest split of IndexPack I can come up with without completely ripping the code apart. Change-Id: I5b167c9cc6d7a7c56d0197c62c0fd0036a83ec6c Signed-off-by: Shawn O. Pearce Signed-off-by: Chris Aniszczyk --- .../src/org/eclipse/jgit/pgm/IndexPack.java | 35 +- .../jgit/storage/file/PackFileTest.java | 32 +- .../jgit/storage/file/PackWriterTest.java | 65 +- ...IndexPackTest.java => PackParserTest.java} | 50 +- .../transport/ReceivePackRefFilterTest.java | 19 +- .../src/org/eclipse/jgit/lib/CoreConfig.java | 1 - .../org/eclipse/jgit/lib/ObjectInserter.java | 20 + .../storage/file/CachedObjectDirectory.java | 11 + .../jgit/storage/file/FileObjectDatabase.java | 5 + .../jgit/storage/file/ObjectDirectory.java | 13 +- .../storage/file/ObjectDirectoryInserter.java | 6 + .../file/ObjectDirectoryPackParser.java | 480 ++++++ .../transport/BasePackFetchConnection.java | 19 +- .../jgit/transport/BundleFetchConnection.java | 25 +- .../org/eclipse/jgit/transport/IndexPack.java | 1384 --------------- .../org/eclipse/jgit/transport/LongMap.java | 2 +- .../eclipse/jgit/transport/PackParser.java | 1503 +++++++++++++++++ .../eclipse/jgit/transport/ReceivePack.java | 39 +- .../jgit/transport/WalkFetchConnection.java | 21 +- 19 files changed, 2224 insertions(+), 1506 deletions(-) rename org.eclipse.jgit.test/tst/org/eclipse/jgit/transport/{IndexPackTest.java => PackParserTest.java} (87%) create mode 100644 org.eclipse.jgit/src/org/eclipse/jgit/storage/file/ObjectDirectoryPackParser.java delete mode 100644 org.eclipse.jgit/src/org/eclipse/jgit/transport/IndexPack.java create mode 100644 org.eclipse.jgit/src/org/eclipse/jgit/transport/PackParser.java diff --git a/org.eclipse.jgit.pgm/src/org/eclipse/jgit/pgm/IndexPack.java b/org.eclipse.jgit.pgm/src/org/eclipse/jgit/pgm/IndexPack.java index 640c8ef34..458565039 100644 --- a/org.eclipse.jgit.pgm/src/org/eclipse/jgit/pgm/IndexPack.java +++ b/org.eclipse.jgit.pgm/src/org/eclipse/jgit/pgm/IndexPack.java @@ -45,12 +45,12 @@ package org.eclipse.jgit.pgm; import java.io.BufferedInputStream; -import java.io.File; -import org.kohsuke.args4j.Argument; -import org.kohsuke.args4j.Option; -import org.eclipse.jgit.lib.CoreConfig; +import org.eclipse.jgit.lib.ObjectInserter; import org.eclipse.jgit.lib.TextProgressMonitor; +import org.eclipse.jgit.storage.file.ObjectDirectoryPackParser; +import org.eclipse.jgit.transport.PackParser; +import org.kohsuke.args4j.Option; class IndexPack extends TextBuiltin { @Option(name = "--fix-thin", usage = "usage_fixAThinPackToBeComplete") @@ -59,20 +59,21 @@ class IndexPack extends TextBuiltin { @Option(name = "--index-version", usage = "usage_indexFileFormatToCreate") private int indexVersion = -1; - @Argument(index = 0, required = true, metaVar = "metaVar_base") - private File base; - @Override protected void run() throws Exception { - if (indexVersion == -1) - indexVersion = db.getConfig().get(CoreConfig.KEY) - .getPackIndexVersion(); - final BufferedInputStream in; - final org.eclipse.jgit.transport.IndexPack ip; - in = new BufferedInputStream(System.in); - ip = new org.eclipse.jgit.transport.IndexPack(db, in, base); - ip.setFixThin(fixThin); - ip.setIndexVersion(indexVersion); - ip.index(new TextProgressMonitor()); + BufferedInputStream in = new BufferedInputStream(System.in); + ObjectInserter inserter = db.newObjectInserter(); + try { + PackParser p = inserter.newPackParser(in); + p.setAllowThin(fixThin); + if (indexVersion != -1 && p instanceof ObjectDirectoryPackParser) { + ObjectDirectoryPackParser imp = (ObjectDirectoryPackParser) p; + imp.setIndexVersion(indexVersion); + } + p.parse(new TextProgressMonitor()); + inserter.flush(); + } finally { + inserter.release(); + } } } diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/storage/file/PackFileTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/storage/file/PackFileTest.java index 8cb8e4336..f95dccfae 100644 --- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/storage/file/PackFileTest.java +++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/storage/file/PackFileTest.java @@ -71,7 +71,7 @@ import org.eclipse.jgit.lib.ObjectStream; import org.eclipse.jgit.revwalk.RevBlob; import org.eclipse.jgit.storage.pack.DeltaEncoder; -import org.eclipse.jgit.transport.IndexPack; +import org.eclipse.jgit.transport.PackParser; import org.eclipse.jgit.util.IO; import org.eclipse.jgit.util.NB; import org.eclipse.jgit.util.TemporaryBuffer; @@ -212,11 +212,9 @@ public void testDelta_SmallObjectChain() throws Exception { deflate(pack, delta3); digest(pack); - final byte[] raw = pack.toByteArray(); - IndexPack ip = IndexPack.create(repo, new ByteArrayInputStream(raw)); - ip.setFixThin(true); - ip.index(NullProgressMonitor.INSTANCE); - ip.renameAndOpenPack(); + PackParser ip = index(pack.toByteArray()); + ip.setAllowThin(true); + ip.parse(NullProgressMonitor.INSTANCE); assertTrue("has blob", wc.has(id3)); @@ -273,11 +271,9 @@ public void testDelta_LargeObjectChain() throws Exception { deflate(pack, delta3); digest(pack); - final byte[] raw = pack.toByteArray(); - IndexPack ip = IndexPack.create(repo, new ByteArrayInputStream(raw)); - ip.setFixThin(true); - ip.index(NullProgressMonitor.INSTANCE); - ip.renameAndOpenPack(); + PackParser ip = index(pack.toByteArray()); + ip.setAllowThin(true); + ip.parse(NullProgressMonitor.INSTANCE); assertTrue("has blob", wc.has(id3)); @@ -364,4 +360,18 @@ private void digest(TemporaryBuffer.Heap buf) throws IOException { md.update(buf.toByteArray()); buf.write(md.digest()); } + + private ObjectInserter inserter; + + @After + public void release() { + if (inserter != null) + inserter.release(); + } + + private PackParser index(byte[] raw) throws IOException { + if (inserter == null) + inserter = repo.newObjectInserter(); + return inserter.newPackParser(new ByteArrayInputStream(raw)); + } } diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/storage/file/PackWriterTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/storage/file/PackWriterTest.java index 0751f8b5c..7a829dee0 100644 --- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/storage/file/PackWriterTest.java +++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/storage/file/PackWriterTest.java @@ -44,6 +44,7 @@ package org.eclipse.jgit.storage.file; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; @@ -52,7 +53,6 @@ import java.io.File; import java.io.FileOutputStream; import java.io.IOException; -import java.io.InputStream; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -66,14 +66,14 @@ import org.eclipse.jgit.junit.JGitTestUtil; import org.eclipse.jgit.lib.NullProgressMonitor; import org.eclipse.jgit.lib.ObjectId; +import org.eclipse.jgit.lib.ObjectInserter; import org.eclipse.jgit.lib.SampleDataRepositoryTestCase; -import org.eclipse.jgit.lib.TextProgressMonitor; import org.eclipse.jgit.revwalk.RevObject; import org.eclipse.jgit.revwalk.RevWalk; import org.eclipse.jgit.storage.file.PackIndex.MutableEntry; import org.eclipse.jgit.storage.pack.PackConfig; import org.eclipse.jgit.storage.pack.PackWriter; -import org.eclipse.jgit.transport.IndexPack; +import org.eclipse.jgit.transport.PackParser; import org.junit.After; import org.junit.Before; import org.junit.Test; @@ -92,28 +92,34 @@ public class PackWriterTest extends SampleDataRepositoryTestCase { private ByteArrayOutputStream os; - private File packBase; - - private File packFile; - - private File indexFile; - private PackFile pack; + private ObjectInserter inserter; + + private FileRepository dst; + @Before public void setUp() throws Exception { super.setUp(); os = new ByteArrayOutputStream(); - packBase = new File(trash, "tmp_pack"); - packFile = new File(trash, "tmp_pack.pack"); - indexFile = new File(trash, "tmp_pack.idx"); config = new PackConfig(db); + + dst = createBareRepository(); + File alt = new File(dst.getObjectDatabase().getDirectory(), "info/alternates"); + alt.getParentFile().mkdirs(); + write(alt, db.getObjectDatabase().getDirectory().getAbsolutePath() + "\n"); } @After public void tearDown() throws Exception { - if (writer != null) + if (writer != null) { writer.release(); + writer = null; + } + if (inserter != null) { + inserter.release(); + inserter = null; + } super.tearDown(); } @@ -408,6 +414,11 @@ public void testWriteIndex() throws Exception { config.setIndexVersion(2); writeVerifyPack4(false); + File packFile = pack.getPackFile(); + String name = packFile.getName(); + String base = name.substring(0, name.lastIndexOf('.')); + File indexFile = new File(packFile.getParentFile(), base + ".idx"); + // Validate that IndexPack came up with the right CRC32 value. final PackIndex idx1 = PackIndex.open(indexFile); assertTrue(idx1 instanceof PackIndexV2); @@ -544,23 +555,31 @@ private void createVerifyOpenPack(final Iterator objectSource) } private void verifyOpenPack(final boolean thin) throws IOException { + final byte[] packData = os.toByteArray(); + if (thin) { - final InputStream is = new ByteArrayInputStream(os.toByteArray()); - final IndexPack indexer = new IndexPack(db, is, packBase); + PackParser p = index(packData); try { - indexer.index(new TextProgressMonitor()); + p.parse(NullProgressMonitor.INSTANCE); fail("indexer should grumble about missing object"); } catch (IOException x) { // expected } } - final InputStream is = new ByteArrayInputStream(os.toByteArray()); - final IndexPack indexer = new IndexPack(db, is, packBase); - indexer.setKeepEmpty(true); - indexer.setFixThin(thin); - indexer.setIndexVersion(2); - indexer.index(new TextProgressMonitor()); - pack = new PackFile(indexFile, packFile); + + ObjectDirectoryPackParser p = (ObjectDirectoryPackParser) index(packData); + p.setKeepEmpty(true); + p.setAllowThin(thin); + p.setIndexVersion(2); + p.parse(NullProgressMonitor.INSTANCE); + pack = p.getPackFile(); + assertNotNull("have PackFile after parsing", pack); + } + + private PackParser index(final byte[] packData) throws IOException { + if (inserter == null) + inserter = dst.newObjectInserter(); + return inserter.newPackParser(new ByteArrayInputStream(packData)); } private void verifyObjectsOrder(final ObjectId objectsOrder[]) { diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/transport/IndexPackTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/transport/PackParserTest.java similarity index 87% rename from org.eclipse.jgit.test/tst/org/eclipse/jgit/transport/IndexPackTest.java rename to org.eclipse.jgit.test/tst/org/eclipse/jgit/transport/PackParserTest.java index c75474550..df89674e6 100644 --- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/transport/IndexPackTest.java +++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/transport/PackParserTest.java @@ -61,13 +61,15 @@ import org.eclipse.jgit.lib.Constants; import org.eclipse.jgit.lib.NullProgressMonitor; import org.eclipse.jgit.lib.ObjectId; +import org.eclipse.jgit.lib.ObjectInserter; import org.eclipse.jgit.lib.Repository; import org.eclipse.jgit.lib.RepositoryTestCase; -import org.eclipse.jgit.lib.TextProgressMonitor; import org.eclipse.jgit.revwalk.RevBlob; +import org.eclipse.jgit.storage.file.ObjectDirectoryPackParser; import org.eclipse.jgit.storage.file.PackFile; import org.eclipse.jgit.util.NB; import org.eclipse.jgit.util.TemporaryBuffer; +import org.junit.After; import org.junit.Test; /** @@ -76,8 +78,7 @@ * to make sure they contain the expected objects (well we don't test * for all of them unless the packs are very small). */ -public class IndexPackTest extends RepositoryTestCase { - +public class PackParserTest extends RepositoryTestCase { /** * Test indexing one of the test packs in the egit repo. It has deltas. * @@ -88,9 +89,10 @@ public void test1() throws IOException { File packFile = JGitTestUtil.getTestResourceFile("pack-34be9032ac282b11fa9babdc2b2a93ca996c9c2f.pack"); final InputStream is = new FileInputStream(packFile); try { - IndexPack pack = new IndexPack(db, is, new File(trash, "tmp_pack1")); - pack.index(new TextProgressMonitor()); - PackFile file = new PackFile(new File(trash, "tmp_pack1.idx"), new File(trash, "tmp_pack1.pack")); + ObjectDirectoryPackParser p = (ObjectDirectoryPackParser) index(is); + p.parse(NullProgressMonitor.INSTANCE); + PackFile file = p.getPackFile(); + assertTrue(file.hasObject(ObjectId.fromString("4b825dc642cb6eb9a060e54bf8d69288fbee4904"))); assertTrue(file.hasObject(ObjectId.fromString("540a36d136cf413e4b064c2b0e0a4db60f77feab"))); assertTrue(file.hasObject(ObjectId.fromString("5b6e7c66c276e7610d4a73c70ec1a1f7c1003259"))); @@ -115,9 +117,10 @@ public void test2() throws IOException { File packFile = JGitTestUtil.getTestResourceFile("pack-df2982f284bbabb6bdb59ee3fcc6eb0983e20371.pack"); final InputStream is = new FileInputStream(packFile); try { - IndexPack pack = new IndexPack(db, is, new File(trash, "tmp_pack2")); - pack.index(new TextProgressMonitor()); - PackFile file = new PackFile(new File(trash, "tmp_pack2.idx"), new File(trash, "tmp_pack2.pack")); + ObjectDirectoryPackParser p = (ObjectDirectoryPackParser) index(is); + p.parse(NullProgressMonitor.INSTANCE); + PackFile file = p.getPackFile(); + assertTrue(file.hasObject(ObjectId.fromString("02ba32d3649e510002c21651936b7077aa75ffa9"))); assertTrue(file.hasObject(ObjectId.fromString("0966a434eb1a025db6b71485ab63a3bfbea520b6"))); assertTrue(file.hasObject(ObjectId.fromString("09efc7e59a839528ac7bda9fa020dc9101278680"))); @@ -151,11 +154,9 @@ public void testTinyThinPack() throws Exception { digest(pack); - final byte[] raw = pack.toByteArray(); - IndexPack ip = IndexPack.create(db, new ByteArrayInputStream(raw)); - ip.setFixThin(true); - ip.index(NullProgressMonitor.INSTANCE); - ip.renameAndOpenPack(); + PackParser p = index(new ByteArrayInputStream(pack.toByteArray())); + p.setAllowThin(true); + p.parse(NullProgressMonitor.INSTANCE); } @Test @@ -171,10 +172,9 @@ public void testPackWithDuplicateBlob() throws Exception { deflate(pack, data); digest(pack); - final byte[] raw = pack.toByteArray(); - IndexPack ip = IndexPack.create(db, new ByteArrayInputStream(raw)); - ip.index(NullProgressMonitor.INSTANCE); - ip.renameAndOpenPack(); + PackParser p = index(new ByteArrayInputStream(pack.toByteArray())); + p.setAllowThin(false); + p.parse(NullProgressMonitor.INSTANCE); } private void packHeader(TemporaryBuffer.Heap tinyPack, int cnt) @@ -205,4 +205,18 @@ private void digest(TemporaryBuffer.Heap buf) throws IOException { md.update(buf.toByteArray()); buf.write(md.digest()); } + + private ObjectInserter inserter; + + @After + public void release() { + if (inserter != null) + inserter.release(); + } + + private PackParser index(InputStream in) throws IOException { + if (inserter == null) + inserter = db.newObjectInserter(); + return inserter.newPackParser(in); + } } diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/transport/ReceivePackRefFilterTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/transport/ReceivePackRefFilterTest.java index 4aea1be02..5fd76d4ad 100644 --- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/transport/ReceivePackRefFilterTest.java +++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/transport/ReceivePackRefFilterTest.java @@ -67,6 +67,7 @@ import org.eclipse.jgit.lib.Constants; import org.eclipse.jgit.lib.NullProgressMonitor; import org.eclipse.jgit.lib.ObjectId; +import org.eclipse.jgit.lib.ObjectInserter; import org.eclipse.jgit.lib.ObjectLoader; import org.eclipse.jgit.lib.Ref; import org.eclipse.jgit.lib.Repository; @@ -536,12 +537,22 @@ private void digest(TemporaryBuffer.Heap buf) throws IOException { buf.write(md.digest()); } + private ObjectInserter inserter; + + @After + public void release() { + if (inserter != null) + inserter.release(); + } + private void openPack(TemporaryBuffer.Heap buf) throws IOException { + if (inserter == null) + inserter = src.newObjectInserter(); + final byte[] raw = buf.toByteArray(); - IndexPack ip = IndexPack.create(src, new ByteArrayInputStream(raw)); - ip.setFixThin(true); - ip.index(PM); - ip.renameAndOpenPack(); + PackParser p = inserter.newPackParser(new ByteArrayInputStream(raw)); + p.setAllowThin(true); + p.parse(PM); } private static PacketLineIn asPacketLineIn(TemporaryBuffer.Heap buf) diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/lib/CoreConfig.java b/org.eclipse.jgit/src/org/eclipse/jgit/lib/CoreConfig.java index 712f6e465..a6da60ffd 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/lib/CoreConfig.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/lib/CoreConfig.java @@ -95,7 +95,6 @@ public int getCompression() { /** * @return the preferred pack index file format; 0 for oldest possible. - * @see org.eclipse.jgit.transport.IndexPack */ public int getPackIndexVersion() { return packIndexVersion; diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/lib/ObjectInserter.java b/org.eclipse.jgit/src/org/eclipse/jgit/lib/ObjectInserter.java index 48fc39b4f..0355e56f5 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/lib/ObjectInserter.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/lib/ObjectInserter.java @@ -52,6 +52,8 @@ import java.io.InputStream; import java.security.MessageDigest; +import org.eclipse.jgit.transport.PackParser; + /** * Inserts objects into an existing {@code ObjectDatabase}. *

@@ -73,6 +75,11 @@ public ObjectId insert(int objectType, long length, InputStream in) throw new UnsupportedOperationException(); } + @Override + public PackParser newPackParser(InputStream in) throws IOException { + throw new UnsupportedOperationException(); + } + @Override public void flush() throws IOException { // Do nothing. @@ -282,6 +289,19 @@ public ObjectId insert(int type, byte[] data, int off, int len) public abstract ObjectId insert(int objectType, long length, InputStream in) throws IOException; + /** + * Initialize a parser to read from a pack formatted stream. + * + * @param in + * the input stream. The stream is not closed by the parser, and + * must instead be closed by the caller once parsing is complete. + * @return the pack parser. + * @throws IOException + * the parser instance, which can be configured and then used to + * parse objects into the ObjectDatabase. + */ + public abstract PackParser newPackParser(InputStream in) throws IOException; + /** * Make all inserted objects visible. *

diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/storage/file/CachedObjectDirectory.java b/org.eclipse.jgit/src/org/eclipse/jgit/storage/file/CachedObjectDirectory.java index 0f7f7b8d9..001ae288a 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/storage/file/CachedObjectDirectory.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/storage/file/CachedObjectDirectory.java @@ -58,6 +58,7 @@ import org.eclipse.jgit.lib.ObjectLoader; import org.eclipse.jgit.storage.pack.ObjectToPack; import org.eclipse.jgit.storage.pack.PackWriter; +import org.eclipse.jgit.util.FS; /** * The cached instance of an {@link ObjectDirectory}. @@ -132,6 +133,11 @@ Config getConfig() { return wrapped.getConfig(); } + @Override + FS getFS() { + return wrapped.getFS(); + } + @Override AlternateHandle[] myAlternates() { if (alts == null) { @@ -233,6 +239,11 @@ InsertLooseObjectResult insertUnpackedObject(File tmp, ObjectId objectId, return result; } + @Override + PackFile openPack(File pack, File idx) throws IOException { + return wrapped.openPack(pack, idx); + } + @Override void selectObjectRepresentation(PackWriter packer, ObjectToPack otp, WindowCursor curs) throws IOException { diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/storage/file/FileObjectDatabase.java b/org.eclipse.jgit/src/org/eclipse/jgit/storage/file/FileObjectDatabase.java index 418c3c0ad..22e5412ef 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/storage/file/FileObjectDatabase.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/storage/file/FileObjectDatabase.java @@ -56,6 +56,7 @@ import org.eclipse.jgit.lib.ObjectReader; import org.eclipse.jgit.storage.pack.ObjectToPack; import org.eclipse.jgit.storage.pack.PackWriter; +import org.eclipse.jgit.util.FS; abstract class FileObjectDatabase extends ObjectDatabase { static enum InsertLooseObjectResult { @@ -132,6 +133,8 @@ abstract void resolve(Set matches, AbbreviatedObjectId id) abstract Config getConfig(); + abstract FS getFS(); + /** * Open an object from this database. *

@@ -278,6 +281,8 @@ abstract long getObjectSize2(WindowCursor curs, String objectName, abstract InsertLooseObjectResult insertUnpackedObject(File tmp, ObjectId id, boolean createDuplicate) throws IOException; + abstract PackFile openPack(File pack, File idx) throws IOException; + abstract FileObjectDatabase newCachedFileObjectDatabase(); static class AlternateHandle { diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/storage/file/ObjectDirectory.java b/org.eclipse.jgit/src/org/eclipse/jgit/storage/file/ObjectDirectory.java index 4620357fb..afad321cb 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/storage/file/ObjectDirectory.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/storage/file/ObjectDirectory.java @@ -233,11 +233,13 @@ public Collection getPacks() { * path of the pack file to open. * @param idx * path of the corresponding index file. + * @return the pack that was opened and added to the database. * @throws IOException * index file could not be opened, read, or is not recognized as * a Git pack file index. */ - public void openPack(final File pack, final File idx) throws IOException { + public PackFile openPack(final File pack, final File idx) + throws IOException { final String p = pack.getName(); final String i = idx.getName(); @@ -250,7 +252,9 @@ public void openPack(final File pack, final File idx) throws IOException { if (!p.substring(0, 45).equals(i.substring(0, 45))) throw new IOException(MessageFormat.format(JGitText.get().packDoesNotMatchIndex, pack)); - insertPack(new PackFile(idx, pack)); + PackFile res = new PackFile(idx, pack); + insertPack(res); + return res; } @Override @@ -519,6 +523,11 @@ Config getConfig() { return config; } + @Override + FS getFS() { + return fs; + } + private void insertPack(final PackFile pf) { PackList o, n; do { diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/storage/file/ObjectDirectoryInserter.java b/org.eclipse.jgit/src/org/eclipse/jgit/storage/file/ObjectDirectoryInserter.java index 5569ff5a7..0e46b6e14 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/storage/file/ObjectDirectoryInserter.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/storage/file/ObjectDirectoryInserter.java @@ -63,6 +63,7 @@ import org.eclipse.jgit.lib.Constants; import org.eclipse.jgit.lib.ObjectId; import org.eclipse.jgit.lib.ObjectInserter; +import org.eclipse.jgit.transport.PackParser; import org.eclipse.jgit.util.FileUtils; /** Creates loose objects in a {@link ObjectDirectory}. */ @@ -100,6 +101,11 @@ public ObjectId insert(final int type, long len, final InputStream is) throw new ObjectWritingException("Unable to create new object: " + dst); } + @Override + public PackParser newPackParser(InputStream in) throws IOException { + return new ObjectDirectoryPackParser(db, in); + } + @Override public void flush() throws IOException { // Do nothing. Objects are immediately visible. diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/storage/file/ObjectDirectoryPackParser.java b/org.eclipse.jgit/src/org/eclipse/jgit/storage/file/ObjectDirectoryPackParser.java new file mode 100644 index 000000000..b13df8108 --- /dev/null +++ b/org.eclipse.jgit/src/org/eclipse/jgit/storage/file/ObjectDirectoryPackParser.java @@ -0,0 +1,480 @@ +/* + * Copyright (C) 2008-2011, Google Inc. + * Copyright (C) 2007-2008, Robin Rosenberg + * Copyright (C) 2008, Shawn O. Pearce + * and other copyright owners as documented in the project's IP log. + * + * This program and the accompanying materials are made available + * under the terms of the Eclipse Distribution License v1.0 which + * accompanies this distribution, is reproduced below, and is + * available at http://www.eclipse.org/org/documents/edl-v10.php + * + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or + * without modification, are permitted provided that the following + * conditions are met: + * + * - Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * + * - Redistributions in binary form must reproduce the above + * copyright notice, this list of conditions and the following + * disclaimer in the documentation and/or other materials provided + * with the distribution. + * + * - Neither the name of the Eclipse Foundation, Inc. nor the + * names of its contributors may be used to endorse or promote + * products derived from this software without specific prior + * written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND + * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, + * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES + * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR + * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT + * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; + * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, + * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF + * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +package org.eclipse.jgit.storage.file; + +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.RandomAccessFile; +import java.security.MessageDigest; +import java.text.MessageFormat; +import java.util.Arrays; +import java.util.List; +import java.util.zip.CRC32; +import java.util.zip.Deflater; + +import org.eclipse.jgit.JGitText; +import org.eclipse.jgit.lib.AnyObjectId; +import org.eclipse.jgit.lib.Constants; +import org.eclipse.jgit.lib.CoreConfig; +import org.eclipse.jgit.lib.ObjectId; +import org.eclipse.jgit.lib.ObjectInserter; +import org.eclipse.jgit.lib.ProgressMonitor; +import org.eclipse.jgit.transport.PackParser; +import org.eclipse.jgit.transport.PackedObjectInfo; +import org.eclipse.jgit.util.FileUtils; +import org.eclipse.jgit.util.NB; + +/** + * Consumes a pack stream and stores as a pack file in {@link ObjectDirectory}. + *

+ * To obtain an instance of a parser, applications should use + * {@link ObjectInserter#newPackParser(InputStream)}. + */ +public class ObjectDirectoryPackParser extends PackParser { + private final FileObjectDatabase db; + + /** CRC-32 computation for objects that are appended onto the pack. */ + private final CRC32 crc; + + /** Running SHA-1 of any base objects appended after {@link #origEnd}. */ + private final MessageDigest tailDigest; + + /** Preferred format version of the pack-*.idx file to generate. */ + private int indexVersion; + + /** If true, pack with 0 objects will be stored. Usually these are deleted. */ + private boolean keepEmpty; + + /** Path of the temporary file holding the pack data. */ + private File tmpPack; + + /** + * Path of the index created for the pack, to find objects quickly at read + * time. + */ + private File tmpIdx; + + /** Read/write handle to {@link #tmpPack} while it is being parsed. */ + private RandomAccessFile out; + + /** Length of the original pack stream, before missing bases were appended. */ + private long origEnd; + + /** The original checksum of data up to {@link #origEnd}. */ + private byte[] origHash; + + /** Current end of the pack file. */ + private long packEnd; + + /** Checksum of the entire pack file. */ + private byte[] packHash; + + /** Compresses delta bases when completing a thin pack. */ + private Deflater def; + + /** The pack that was created, if parsing was successful. */ + private PackFile newPack; + + ObjectDirectoryPackParser(FileObjectDatabase odb, InputStream src) { + super(odb, src); + this.db = odb; + this.crc = new CRC32(); + this.tailDigest = Constants.newMessageDigest(); + + indexVersion = db.getConfig().get(CoreConfig.KEY).getPackIndexVersion(); + } + + /** + * Set the pack index file format version this instance will create. + * + * @param version + * the version to write. The special version 0 designates the + * oldest (most compatible) format available for the objects. + * @see PackIndexWriter + */ + public void setIndexVersion(int version) { + indexVersion = version; + } + + /** + * Configure this index pack instance to keep an empty pack. + *

+ * By default an empty pack (a pack with no objects) is not kept, as doi so + * is completely pointless. With no objects in the pack there is no d stored + * by it, so the pack is unnecessary. + * + * @param empty + * true to enable keeping an empty pack. + */ + public void setKeepEmpty(final boolean empty) { + keepEmpty = empty; + } + + /** + * Get the imported {@link PackFile}. + *

+ * This method is supplied only to support testing; applications shouldn't + * be using it directly to access the imported data. + * + * @return the imported PackFile, if parsing was successful. + */ + public PackFile getPackFile() { + return newPack; + } + + @Override + public PackLock parse(ProgressMonitor progress) throws IOException { + tmpPack = File.createTempFile("incoming_", ".pack", db.getDirectory()); + tmpIdx = new File(db.getDirectory(), baseName(tmpPack) + ".idx"); + try { + out = new RandomAccessFile(tmpPack, "rw"); + + super.parse(progress); + + out.seek(packEnd); + out.write(packHash); + out.getChannel().force(true); + out.close(); + + writeIdx(); + + tmpPack.setReadOnly(); + tmpIdx.setReadOnly(); + + return renameAndOpenPack(getLockMessage()); + } finally { + if (def != null) + def.end(); + try { + if (out != null && out.getChannel().isOpen()) + out.close(); + } catch (IOException closeError) { + // Ignored. We want to delete the file. + } + cleanupTemporaryFiles(); + } + } + + @Override + protected void onBeginWholeObject(long streamPosition, int type, + long inflatedSize) throws IOException { + crc.reset(); + } + + @Override + protected void onEndWholeObject(PackedObjectInfo info) throws IOException { + info.setCRC((int) crc.getValue()); + } + + @Override + protected void onBeginOfsDelta(long streamPosition, + long baseStreamPosition, long inflatedSize) throws IOException { + crc.reset(); + } + + @Override + protected void onBeginRefDelta(long streamPosition, AnyObjectId baseId, + long inflatedSize) throws IOException { + crc.reset(); + } + + @Override + protected UnresolvedDelta onEndDelta() throws IOException { + UnresolvedDelta delta = new UnresolvedDelta(); + delta.setCRC((int) crc.getValue()); + return delta; + } + + @Override + protected void onObjectHeader(Source src, byte[] raw, int pos, int len) + throws IOException { + crc.update(raw, pos, len); + } + + @Override + protected void onObjectData(Source src, byte[] raw, int pos, int len) + throws IOException { + crc.update(raw, pos, len); + } + + @Override + protected void onStoreStream(byte[] raw, int pos, int len) + throws IOException { + out.write(raw, pos, len); + } + + @Override + protected void onPackFooter(byte[] hash) throws IOException { + packEnd = out.getFilePointer(); + origEnd = packEnd; + origHash = hash; + packHash = hash; + } + + @Override + protected ObjectTypeAndSize seekDatabase(UnresolvedDelta delta, + ObjectTypeAndSize info) throws IOException { + out.seek(delta.getOffset()); + crc.reset(); + return readObjectHeader(info); + } + + @Override + protected ObjectTypeAndSize seekDatabase(PackedObjectInfo obj, + ObjectTypeAndSize info) throws IOException { + out.seek(obj.getOffset()); + crc.reset(); + return readObjectHeader(info); + } + + @Override + protected int readDatabase(byte[] dst, int pos, int cnt) throws IOException { + return out.read(dst, pos, cnt); + } + + @Override + protected boolean checkCRC(int oldCRC) { + return oldCRC == (int) crc.getValue(); + } + + private static String baseName(File tmpPack) { + String name = tmpPack.getName(); + return name.substring(0, name.lastIndexOf('.')); + } + + private void cleanupTemporaryFiles() { + if (tmpIdx != null && !tmpIdx.delete() && tmpIdx.exists()) + tmpIdx.deleteOnExit(); + if (tmpPack != null && !tmpPack.delete() && tmpPack.exists()) + tmpPack.deleteOnExit(); + } + + @Override + protected boolean onAppendBase(final int typeCode, final byte[] data, + final PackedObjectInfo info) throws IOException { + info.setOffset(packEnd); + + final byte[] buf = buffer(); + int sz = data.length; + int len = 0; + buf[len++] = (byte) ((typeCode << 4) | sz & 15); + sz >>>= 4; + while (sz > 0) { + buf[len - 1] |= 0x80; + buf[len++] = (byte) (sz & 0x7f); + sz >>>= 7; + } + + tailDigest.update(buf, 0, len); + crc.reset(); + crc.update(buf, 0, len); + out.seek(packEnd); + out.write(buf, 0, len); + packEnd += len; + + if (def == null) + def = new Deflater(Deflater.DEFAULT_COMPRESSION, false); + else + def.reset(); + def.setInput(data); + def.finish(); + + while (!def.finished()) { + len = def.deflate(buf); + tailDigest.update(buf, 0, len); + crc.update(buf, 0, len); + out.write(buf, 0, len); + packEnd += len; + } + + info.setCRC((int) crc.getValue()); + return true; + } + + @Override + protected void onEndThinPack() throws IOException { + final byte[] tailHash = this.tailDigest.digest(); + final byte[] buf = buffer(); + + final MessageDigest origDigest = Constants.newMessageDigest(); + final MessageDigest tailDigest = Constants.newMessageDigest(); + final MessageDigest packDigest = Constants.newMessageDigest(); + + long origRemaining = origEnd; + out.seek(0); + out.readFully(buf, 0, 12); + origDigest.update(buf, 0, 12); + origRemaining -= 12; + + NB.encodeInt32(buf, 8, getObjectCount()); + out.seek(0); + out.write(buf, 0, 12); + packDigest.update(buf, 0, 12); + + for (;;) { + final int n = out.read(buf); + if (n < 0) + break; + if (origRemaining != 0) { + final int origCnt = (int) Math.min(n, origRemaining); + origDigest.update(buf, 0, origCnt); + origRemaining -= origCnt; + if (origRemaining == 0) + tailDigest.update(buf, origCnt, n - origCnt); + } else + tailDigest.update(buf, 0, n); + + packDigest.update(buf, 0, n); + } + + if (!Arrays.equals(origDigest.digest(), origHash) + || !Arrays.equals(tailDigest.digest(), tailHash)) + throw new IOException( + JGitText.get().packCorruptedWhileWritingToFilesystem); + + packHash = packDigest.digest(); + } + + private void writeIdx() throws IOException { + List list = getSortedObjectList(null /* by ObjectId */); + final FileOutputStream os = new FileOutputStream(tmpIdx); + try { + final PackIndexWriter iw; + if (indexVersion <= 0) + iw = PackIndexWriter.createOldestPossible(os, list); + else + iw = PackIndexWriter.createVersion(os, indexVersion); + iw.write(list, packHash); + os.getChannel().force(true); + } finally { + os.close(); + } + } + + private PackLock renameAndOpenPack(final String lockMessage) + throws IOException { + if (!keepEmpty && getObjectCount() == 0) { + cleanupTemporaryFiles(); + return null; + } + + final MessageDigest d = Constants.newMessageDigest(); + final byte[] oeBytes = new byte[Constants.OBJECT_ID_LENGTH]; + for (int i = 0; i < getObjectCount(); i++) { + final PackedObjectInfo oe = getObject(i); + oe.copyRawTo(oeBytes, 0); + d.update(oeBytes); + } + + final String name = ObjectId.fromRaw(d.digest()).name(); + final File packDir = new File(db.getDirectory(), "pack"); + final File finalPack = new File(packDir, "pack-" + name + ".pack"); + final File finalIdx = new File(packDir, "pack-" + name + ".idx"); + final PackLock keep = new PackLock(finalPack, db.getFS()); + + if (!packDir.exists() && !packDir.mkdir() && !packDir.exists()) { + // The objects/pack directory isn't present, and we are unable + // to create it. There is no way to move this pack in. + // + cleanupTemporaryFiles(); + throw new IOException(MessageFormat.format( + JGitText.get().cannotCreateDirectory, packDir + .getAbsolutePath())); + } + + if (finalPack.exists()) { + // If the pack is already present we should never replace it. + // + cleanupTemporaryFiles(); + return null; + } + + if (lockMessage != null) { + // If we have a reason to create a keep file for this pack, do + // so, or fail fast and don't put the pack in place. + // + try { + if (!keep.lock(lockMessage)) + throw new IOException(MessageFormat.format( + JGitText.get().cannotLockPackIn, finalPack)); + } catch (IOException e) { + cleanupTemporaryFiles(); + throw e; + } + } + + if (!tmpPack.renameTo(finalPack)) { + cleanupTemporaryFiles(); + keep.unlock(); + throw new IOException(MessageFormat.format( + JGitText.get().cannotMovePackTo, finalPack)); + } + + if (!tmpIdx.renameTo(finalIdx)) { + cleanupTemporaryFiles(); + keep.unlock(); + if (!finalPack.delete()) + finalPack.deleteOnExit(); + throw new IOException(MessageFormat.format( + JGitText.get().cannotMoveIndexTo, finalIdx)); + } + + try { + newPack = db.openPack(finalPack, finalIdx); + } catch (IOException err) { + keep.unlock(); + if (finalPack.exists()) + FileUtils.delete(finalPack); + if (finalIdx.exists()) + FileUtils.delete(finalIdx); + throw err; + } + + return lockMessage != null ? keep : null; + } +} diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/BasePackFetchConnection.java b/org.eclipse.jgit/src/org/eclipse/jgit/transport/BasePackFetchConnection.java index 19f5748a2..2653bad55 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/transport/BasePackFetchConnection.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/transport/BasePackFetchConnection.java @@ -61,6 +61,7 @@ import org.eclipse.jgit.lib.Constants; import org.eclipse.jgit.lib.MutableObjectId; import org.eclipse.jgit.lib.ObjectId; +import org.eclipse.jgit.lib.ObjectInserter; import org.eclipse.jgit.lib.ProgressMonitor; import org.eclipse.jgit.lib.Ref; import org.eclipse.jgit.lib.Config.SectionParser; @@ -635,17 +636,21 @@ private void markCommon(final RevObject obj, final AckNackResult anr) } private void receivePack(final ProgressMonitor monitor) throws IOException { - final IndexPack ip; - InputStream input = in; if (sideband) input = new SideBandInputStream(input, monitor, getMessageWriter()); - ip = IndexPack.create(local, input); - ip.setFixThin(thinPack); - ip.setObjectChecking(transport.isCheckFetchedObjects()); - ip.index(monitor); - packLock = ip.renameAndOpenPack(lockMessage); + ObjectInserter ins = local.newObjectInserter(); + try { + PackParser parser = ins.newPackParser(input); + parser.setAllowThin(thinPack); + parser.setObjectChecking(transport.isCheckFetchedObjects()); + parser.setLockMessage(lockMessage); + packLock = parser.parse(monitor); + ins.flush(); + } finally { + ins.release(); + } } private static class CancelledException extends Exception { diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/BundleFetchConnection.java b/org.eclipse.jgit/src/org/eclipse/jgit/transport/BundleFetchConnection.java index 126acab48..fddc89dfa 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/transport/BundleFetchConnection.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/transport/BundleFetchConnection.java @@ -66,8 +66,10 @@ import org.eclipse.jgit.errors.PackProtocolException; import org.eclipse.jgit.errors.TransportException; import org.eclipse.jgit.lib.Constants; +import org.eclipse.jgit.lib.NullProgressMonitor; import org.eclipse.jgit.lib.ObjectId; import org.eclipse.jgit.lib.ObjectIdRef; +import org.eclipse.jgit.lib.ObjectInserter; import org.eclipse.jgit.lib.ProgressMonitor; import org.eclipse.jgit.lib.Ref; import org.eclipse.jgit.revwalk.RevCommit; @@ -96,7 +98,7 @@ class BundleFetchConnection extends BaseFetchConnection { BundleFetchConnection(Transport transportBundle, final InputStream src) throws TransportException { transport = transportBundle; - bin = new BufferedInputStream(src, IndexPack.BUFFER_SIZE); + bin = new BufferedInputStream(src); try { switch (readSignature()) { case 2: @@ -179,9 +181,17 @@ protected void doFetch(final ProgressMonitor monitor, throws TransportException { verifyPrerequisites(); try { - final IndexPack ip = newIndexPack(); - ip.index(monitor); - packLock = ip.renameAndOpenPack(lockMessage); + ObjectInserter ins = transport.local.newObjectInserter(); + try { + PackParser parser = ins.newPackParser(bin); + parser.setAllowThin(true); + parser.setObjectChecking(transport.isCheckFetchedObjects()); + parser.setLockMessage(lockMessage); + packLock = parser.parse(NullProgressMonitor.INSTANCE); + ins.flush(); + } finally { + ins.release(); + } } catch (IOException err) { close(); throw new TransportException(transport.uri, err.getMessage(), err); @@ -201,13 +211,6 @@ public Collection getPackLocks() { return Collections. emptyList(); } - private IndexPack newIndexPack() throws IOException { - final IndexPack ip = IndexPack.create(transport.local, bin); - ip.setFixThin(true); - ip.setObjectChecking(transport.isCheckFetchedObjects()); - return ip; - } - private void verifyPrerequisites() throws TransportException { if (prereqs.isEmpty()) return; diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/IndexPack.java b/org.eclipse.jgit/src/org/eclipse/jgit/transport/IndexPack.java deleted file mode 100644 index f8dc391d1..000000000 --- a/org.eclipse.jgit/src/org/eclipse/jgit/transport/IndexPack.java +++ /dev/null @@ -1,1384 +0,0 @@ -/* - * Copyright (C) 2008-2010, Google Inc. - * Copyright (C) 2007-2008, Robin Rosenberg - * Copyright (C) 2008, Shawn O. Pearce - * and other copyright owners as documented in the project's IP log. - * - * This program and the accompanying materials are made available - * under the terms of the Eclipse Distribution License v1.0 which - * accompanies this distribution, is reproduced below, and is - * available at http://www.eclipse.org/org/documents/edl-v10.php - * - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or - * without modification, are permitted provided that the following - * conditions are met: - * - * - Redistributions of source code must retain the above copyright - * notice, this list of conditions and the following disclaimer. - * - * - Redistributions in binary form must reproduce the above - * copyright notice, this list of conditions and the following - * disclaimer in the documentation and/or other materials provided - * with the distribution. - * - * - Neither the name of the Eclipse Foundation, Inc. nor the - * names of its contributors may be used to endorse or promote - * products derived from this software without specific prior - * written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND - * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, - * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES - * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR - * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, - * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT - * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; - * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER - * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, - * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF - * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ - -package org.eclipse.jgit.transport; - -import java.io.EOFException; -import java.io.File; -import java.io.FileOutputStream; -import java.io.IOException; -import java.io.InputStream; -import java.io.RandomAccessFile; -import java.security.MessageDigest; -import java.text.MessageFormat; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.zip.CRC32; -import java.util.zip.DataFormatException; -import java.util.zip.Deflater; -import java.util.zip.Inflater; - -import org.eclipse.jgit.JGitText; -import org.eclipse.jgit.errors.CorruptObjectException; -import org.eclipse.jgit.errors.MissingObjectException; -import org.eclipse.jgit.lib.AnyObjectId; -import org.eclipse.jgit.lib.Constants; -import org.eclipse.jgit.lib.CoreConfig; -import org.eclipse.jgit.lib.InflaterCache; -import org.eclipse.jgit.lib.MutableObjectId; -import org.eclipse.jgit.lib.ObjectChecker; -import org.eclipse.jgit.lib.ObjectDatabase; -import org.eclipse.jgit.lib.ObjectId; -import org.eclipse.jgit.lib.ObjectIdSubclassMap; -import org.eclipse.jgit.lib.ObjectLoader; -import org.eclipse.jgit.lib.ObjectReader; -import org.eclipse.jgit.lib.ObjectStream; -import org.eclipse.jgit.lib.ProgressMonitor; -import org.eclipse.jgit.lib.Repository; -import org.eclipse.jgit.storage.file.PackIndexWriter; -import org.eclipse.jgit.storage.file.PackLock; -import org.eclipse.jgit.storage.pack.BinaryDelta; -import org.eclipse.jgit.util.FileUtils; -import org.eclipse.jgit.util.IO; -import org.eclipse.jgit.util.NB; - -/** Indexes Git pack files for local use. */ -public class IndexPack { - /** - * Size of the internal stream buffer. - *

- * If callers are going to be supplying IndexPack a BufferedInputStream they - * should use this buffer size as the size of the buffer for that - * BufferedInputStream, and any other its may be wrapping. This way the - * buffers will cascade efficiently and only the IndexPack buffer will be - * receiving the bulk of the data stream. - */ - public static final int BUFFER_SIZE = 8192; - - /** - * Create an index pack instance to load a new pack into a repository. - *

- * The received pack data and generated index will be saved to temporary - * files within the repository's objects directory. To use the - * data contained within them call {@link #renameAndOpenPack()} once the - * indexing is complete. - * - * @param db - * the repository that will receive the new pack. - * @param is - * stream to read the pack data from. If the stream is buffered - * use {@link #BUFFER_SIZE} as the buffer size for the stream. - * @return a new index pack instance. - * @throws IOException - * a temporary file could not be created. - */ - public static IndexPack create(final Repository db, final InputStream is) - throws IOException { - final String suffix = ".pack"; - final File objdir = db.getObjectsDirectory(); - final File tmp = File.createTempFile("incoming_", suffix, objdir); - final String n = tmp.getName(); - final File base; - - base = new File(objdir, n.substring(0, n.length() - suffix.length())); - final IndexPack ip = new IndexPack(db, is, base); - ip.setIndexVersion(db.getConfig().get(CoreConfig.KEY) - .getPackIndexVersion()); - return ip; - } - - private static enum Source { - /** Data is read from the incoming stream. */ - INPUT, - - /** - * Data is read from the spooled pack file. - *

- * During streaming, some (or all) data might be saved into the spooled - * pack file so it can be randomly accessed later. - */ - FILE; - } - - private final Repository repo; - - /** - * Object database used for loading existing objects - */ - private final ObjectDatabase objectDatabase; - - private InflaterStream inflater; - - private byte[] readBuffer; - - private final MessageDigest objectDigest; - - private final MutableObjectId tempObjectId; - - private InputStream in; - - private byte[] buf; - - private long bBase; - - private int bOffset; - - private int bAvail; - - private ObjectChecker objCheck; - - private boolean fixThin; - - private boolean keepEmpty; - - private boolean needBaseObjectIds; - - private int outputVersion; - - private final File dstPack; - - private final File dstIdx; - - private long objectCount; - - private PackedObjectInfo[] entries; - - /** - * Every object contained within the incoming pack. - *

- * This is a subset of {@link #entries}, as thin packs can add additional - * objects to {@code entries} by copying already existing objects from the - * repository onto the end of the thin pack to make it self-contained. - */ - private ObjectIdSubclassMap newObjectIds; - - private int deltaCount; - - private int entryCount; - - private final CRC32 crc = new CRC32(); - - private ObjectIdSubclassMap baseById; - - /** - * Objects referenced by their name from deltas, that aren't in this pack. - *

- * This is the set of objects that were copied onto the end of this pack to - * make it complete. These objects were not transmitted by the remote peer, - * but instead were assumed to already exist in the local repository. - */ - private ObjectIdSubclassMap baseObjectIds; - - private LongMap baseByPos; - - /** Blobs whose contents need to be double-checked after indexing. */ - private List deferredCheckBlobs; - - private MessageDigest packDigest; - - private RandomAccessFile packOut; - - private byte[] packcsum; - - /** If {@link #fixThin} this is the last byte of the original checksum. */ - private long originalEOF; - - private ObjectReader readCurs; - - /** - * Create a new pack indexer utility. - * - * @param db - * @param src - * stream to read the pack data from. If the stream is buffered - * use {@link #BUFFER_SIZE} as the buffer size for the stream. - * @param dstBase - * @throws IOException - * the output packfile could not be created. - */ - public IndexPack(final Repository db, final InputStream src, - final File dstBase) throws IOException { - repo = db; - objectDatabase = db.getObjectDatabase().newCachedDatabase(); - in = src; - inflater = new InflaterStream(); - readCurs = objectDatabase.newReader(); - buf = new byte[BUFFER_SIZE]; - readBuffer = new byte[BUFFER_SIZE]; - objectDigest = Constants.newMessageDigest(); - tempObjectId = new MutableObjectId(); - packDigest = Constants.newMessageDigest(); - - if (dstBase != null) { - final File dir = dstBase.getParentFile(); - final String nam = dstBase.getName(); - dstPack = new File(dir, nam + ".pack"); - dstIdx = new File(dir, nam + ".idx"); - packOut = new RandomAccessFile(dstPack, "rw"); - packOut.setLength(0); - } else { - dstPack = null; - dstIdx = null; - } - } - - /** - * Set the pack index file format version this instance will create. - * - * @param version - * the version to write. The special version 0 designates the - * oldest (most compatible) format available for the objects. - * @see PackIndexWriter - */ - public void setIndexVersion(final int version) { - outputVersion = version; - } - - /** - * Configure this index pack instance to make a thin pack complete. - *

- * Thin packs are sometimes used during network transfers to allow a delta - * to be sent without a base object. Such packs are not permitted on disk. - * They can be fixed by copying the base object onto the end of the pack. - * - * @param fix - * true to enable fixing a thin pack. - */ - public void setFixThin(final boolean fix) { - fixThin = fix; - } - - /** - * Configure this index pack instance to keep an empty pack. - *

- * By default an empty pack (a pack with no objects) is not kept, as doing - * so is completely pointless. With no objects in the pack there is no data - * stored by it, so the pack is unnecessary. - * - * @param empty true to enable keeping an empty pack. - */ - public void setKeepEmpty(final boolean empty) { - keepEmpty = empty; - } - - /** - * Configure this index pack instance to keep track of new objects. - *

- * By default an index pack doesn't save the new objects that were created - * when it was instantiated. Setting this flag to {@code true} allows the - * caller to use {@link #getNewObjectIds()} to retrieve that list. - * - * @param b {@code true} to enable keeping track of new objects. - */ - public void setNeedNewObjectIds(boolean b) { - if (b) - newObjectIds = new ObjectIdSubclassMap(); - else - newObjectIds = null; - } - - private boolean needNewObjectIds() { - return newObjectIds != null; - } - - /** - * Configure this index pack instance to keep track of the objects assumed - * for delta bases. - *

- * By default an index pack doesn't save the objects that were used as delta - * bases. Setting this flag to {@code true} will allow the caller to - * use {@link #getBaseObjectIds()} to retrieve that list. - * - * @param b {@code true} to enable keeping track of delta bases. - */ - public void setNeedBaseObjectIds(boolean b) { - this.needBaseObjectIds = b; - } - - /** @return the new objects that were sent by the user */ - public ObjectIdSubclassMap getNewObjectIds() { - if (newObjectIds != null) - return newObjectIds; - return new ObjectIdSubclassMap(); - } - - /** @return set of objects the incoming pack assumed for delta purposes */ - public ObjectIdSubclassMap getBaseObjectIds() { - if (baseObjectIds != null) - return baseObjectIds; - return new ObjectIdSubclassMap(); - } - - /** - * Configure the checker used to validate received objects. - *

- * Usually object checking isn't necessary, as Git implementations only - * create valid objects in pack files. However, additional checking may be - * useful if processing data from an untrusted source. - * - * @param oc - * the checker instance; null to disable object checking. - */ - public void setObjectChecker(final ObjectChecker oc) { - objCheck = oc; - } - - /** - * Configure the checker used to validate received objects. - *

- * Usually object checking isn't necessary, as Git implementations only - * create valid objects in pack files. However, additional checking may be - * useful if processing data from an untrusted source. - *

- * This is shorthand for: - * - *

-	 * setObjectChecker(on ? new ObjectChecker() : null);
-	 * 
- * - * @param on - * true to enable the default checker; false to disable it. - */ - public void setObjectChecking(final boolean on) { - setObjectChecker(on ? new ObjectChecker() : null); - } - - /** - * Consume data from the input stream until the packfile is indexed. - * - * @param progress - * progress feedback - * - * @throws IOException - */ - public void index(final ProgressMonitor progress) throws IOException { - progress.start(2 /* tasks */); - try { - try { - readPackHeader(); - - entries = new PackedObjectInfo[(int) objectCount]; - baseById = new ObjectIdSubclassMap(); - baseByPos = new LongMap(); - deferredCheckBlobs = new ArrayList(); - - progress.beginTask(JGitText.get().receivingObjects, - (int) objectCount); - for (int done = 0; done < objectCount; done++) { - indexOneObject(); - progress.update(1); - if (progress.isCancelled()) - throw new IOException(JGitText.get().downloadCancelled); - } - readPackFooter(); - endInput(); - if (!deferredCheckBlobs.isEmpty()) - doDeferredCheckBlobs(); - progress.endTask(); - if (deltaCount > 0) { - if (packOut == null) - throw new IOException(JGitText.get().needPackOut); - resolveDeltas(progress); - if (entryCount < objectCount) { - if (!fixThin) { - throw new IOException(MessageFormat.format( - JGitText.get().packHasUnresolvedDeltas, (objectCount - entryCount))); - } - fixThinPack(progress); - } - } - if (packOut != null && (keepEmpty || entryCount > 0)) - packOut.getChannel().force(true); - - packDigest = null; - baseById = null; - baseByPos = null; - - if (dstIdx != null && (keepEmpty || entryCount > 0)) - writeIdx(); - - } finally { - try { - if (readCurs != null) - readCurs.release(); - } finally { - readCurs = null; - } - - try { - inflater.release(); - } finally { - inflater = null; - objectDatabase.close(); - } - - progress.endTask(); - if (packOut != null) - packOut.close(); - } - - if (keepEmpty || entryCount > 0) { - if (dstPack != null) - dstPack.setReadOnly(); - if (dstIdx != null) - dstIdx.setReadOnly(); - } - } catch (IOException err) { - if (dstPack != null) - FileUtils.delete(dstPack); - if (dstIdx != null) - FileUtils.delete(dstIdx); - throw err; - } - } - - private void resolveDeltas(final ProgressMonitor progress) - throws IOException { - progress.beginTask(JGitText.get().resolvingDeltas, deltaCount); - final int last = entryCount; - for (int i = 0; i < last; i++) { - final int before = entryCount; - resolveDeltas(entries[i]); - progress.update(entryCount - before); - if (progress.isCancelled()) - throw new IOException(JGitText.get().downloadCancelledDuringIndexing); - } - progress.endTask(); - } - - private void resolveDeltas(final PackedObjectInfo oe) throws IOException { - UnresolvedDelta children = firstChildOf(oe); - if (children == null) - return; - - DeltaVisit visit = new DeltaVisit(); - visit.nextChild = children; - - crc.reset(); - position(oe.getOffset()); - int c = readFrom(Source.FILE); - final int typeCode = (c >> 4) & 7; - long sz = c & 15; - int shift = 4; - while ((c & 0x80) != 0) { - c = readFrom(Source.FILE); - sz += (c & 0x7f) << shift; - shift += 7; - } - - switch (typeCode) { - case Constants.OBJ_COMMIT: - case Constants.OBJ_TREE: - case Constants.OBJ_BLOB: - case Constants.OBJ_TAG: - visit.data = inflateAndReturn(Source.FILE, sz); - break; - default: - throw new IOException(MessageFormat.format( - JGitText.get().unknownObjectType, typeCode)); - } - - if (oe.getCRC() != (int) crc.getValue()) { - throw new IOException(MessageFormat.format( - JGitText.get().corruptionDetectedReReadingAt, - oe.getOffset())); - } - - resolveDeltas(visit.next(), typeCode); - } - - private void resolveDeltas(DeltaVisit visit, final int type) - throws IOException { - do { - final long pos = visit.delta.position; - crc.reset(); - position(pos); - int c = readFrom(Source.FILE); - final int typeCode = (c >> 4) & 7; - long sz = c & 15; - int shift = 4; - while ((c & 0x80) != 0) { - c = readFrom(Source.FILE); - sz += (c & 0x7f) << shift; - shift += 7; - } - - switch (typeCode) { - case Constants.OBJ_OFS_DELTA: { - c = readFrom(Source.FILE) & 0xff; - while ((c & 128) != 0) - c = readFrom(Source.FILE) & 0xff; - visit.data = BinaryDelta.apply(visit.parent.data, inflateAndReturn(Source.FILE, sz)); - break; - } - case Constants.OBJ_REF_DELTA: { - crc.update(buf, fill(Source.FILE, 20), 20); - use(20); - visit.data = BinaryDelta.apply(visit.parent.data, inflateAndReturn(Source.FILE, sz)); - break; - } - default: - throw new IOException(MessageFormat.format(JGitText.get().unknownObjectType, typeCode)); - } - - final int crc32 = (int) crc.getValue(); - if (visit.delta.crc != crc32) - throw new IOException(MessageFormat.format(JGitText.get().corruptionDetectedReReadingAt, pos)); - - objectDigest.update(Constants.encodedTypeString(type)); - objectDigest.update((byte) ' '); - objectDigest.update(Constants.encodeASCII(visit.data.length)); - objectDigest.update((byte) 0); - objectDigest.update(visit.data); - tempObjectId.fromRaw(objectDigest.digest(), 0); - - verifySafeObject(tempObjectId, type, visit.data); - - PackedObjectInfo oe; - oe = new PackedObjectInfo(pos, crc32, tempObjectId); - addObjectAndTrack(oe); - - visit.nextChild = firstChildOf(oe); - visit = visit.next(); - } while (visit != null); - } - - private UnresolvedDelta removeBaseById(final AnyObjectId id){ - final DeltaChain d = baseById.get(id); - return d != null ? d.remove() : null; - } - - private static UnresolvedDelta reverse(UnresolvedDelta c) { - UnresolvedDelta tail = null; - while (c != null) { - final UnresolvedDelta n = c.next; - c.next = tail; - tail = c; - c = n; - } - return tail; - } - - private UnresolvedDelta firstChildOf(PackedObjectInfo oe) { - UnresolvedDelta a = reverse(removeBaseById(oe)); - UnresolvedDelta b = reverse(baseByPos.remove(oe.getOffset())); - - if (a == null) - return b; - if (b == null) - return a; - - UnresolvedDelta first = null; - UnresolvedDelta last = null; - while (a != null || b != null) { - UnresolvedDelta curr; - if (b == null || (a != null && a.position < b.position)) { - curr = a; - a = a.next; - } else { - curr = b; - b = b.next; - } - if (last != null) - last.next = curr; - else - first = curr; - last = curr; - curr.next = null; - } - return first; - } - - private void fixThinPack(final ProgressMonitor progress) throws IOException { - growEntries(); - - if (needBaseObjectIds) - baseObjectIds = new ObjectIdSubclassMap(); - - packDigest.reset(); - originalEOF = packOut.length() - 20; - final Deflater def = new Deflater(Deflater.DEFAULT_COMPRESSION, false); - final List missing = new ArrayList(64); - long end = originalEOF; - for (final DeltaChain baseId : baseById) { - if (baseId.head == null) - continue; - if (needBaseObjectIds) - baseObjectIds.add(baseId); - final ObjectLoader ldr; - try { - ldr = readCurs.open(baseId); - } catch (MissingObjectException notFound) { - missing.add(baseId); - continue; - } - - final DeltaVisit visit = new DeltaVisit(); - visit.data = ldr.getCachedBytes(Integer.MAX_VALUE); - final int typeCode = ldr.getType(); - final PackedObjectInfo oe; - - crc.reset(); - packOut.seek(end); - writeWhole(def, typeCode, visit.data); - oe = new PackedObjectInfo(end, (int) crc.getValue(), baseId); - entries[entryCount++] = oe; - end = packOut.getFilePointer(); - - visit.nextChild = firstChildOf(oe); - resolveDeltas(visit.next(), typeCode); - - if (progress.isCancelled()) - throw new IOException(JGitText.get().downloadCancelledDuringIndexing); - } - def.end(); - - for (final DeltaChain base : missing) { - if (base.head != null) - throw new MissingObjectException(base, "delta base"); - } - - if (end - originalEOF < 20) { - // Ugly corner case; if what we appended on to complete deltas - // doesn't completely cover the SHA-1 we have to truncate off - // we need to shorten the file, otherwise we will include part - // of the old footer as object content. - packOut.setLength(end); - } - - fixHeaderFooter(packcsum, packDigest.digest()); - } - - private void writeWhole(final Deflater def, final int typeCode, - final byte[] data) throws IOException { - int sz = data.length; - int hdrlen = 0; - buf[hdrlen++] = (byte) ((typeCode << 4) | sz & 15); - sz >>>= 4; - while (sz > 0) { - buf[hdrlen - 1] |= 0x80; - buf[hdrlen++] = (byte) (sz & 0x7f); - sz >>>= 7; - } - packDigest.update(buf, 0, hdrlen); - crc.update(buf, 0, hdrlen); - packOut.write(buf, 0, hdrlen); - def.reset(); - def.setInput(data); - def.finish(); - while (!def.finished()) { - final int datlen = def.deflate(buf); - packDigest.update(buf, 0, datlen); - crc.update(buf, 0, datlen); - packOut.write(buf, 0, datlen); - } - } - - private void fixHeaderFooter(final byte[] origcsum, final byte[] tailcsum) - throws IOException { - final MessageDigest origDigest = Constants.newMessageDigest(); - final MessageDigest tailDigest = Constants.newMessageDigest(); - long origRemaining = originalEOF; - - packOut.seek(0); - bAvail = 0; - bOffset = 0; - fill(Source.FILE, 12); - - { - final int origCnt = (int) Math.min(bAvail, origRemaining); - origDigest.update(buf, 0, origCnt); - origRemaining -= origCnt; - if (origRemaining == 0) - tailDigest.update(buf, origCnt, bAvail - origCnt); - } - - NB.encodeInt32(buf, 8, entryCount); - packOut.seek(0); - packOut.write(buf, 0, 12); - packOut.seek(bAvail); - - packDigest.reset(); - packDigest.update(buf, 0, bAvail); - for (;;) { - final int n = packOut.read(buf); - if (n < 0) - break; - if (origRemaining != 0) { - final int origCnt = (int) Math.min(n, origRemaining); - origDigest.update(buf, 0, origCnt); - origRemaining -= origCnt; - if (origRemaining == 0) - tailDigest.update(buf, origCnt, n - origCnt); - } else - tailDigest.update(buf, 0, n); - - packDigest.update(buf, 0, n); - } - - if (!Arrays.equals(origDigest.digest(), origcsum) - || !Arrays.equals(tailDigest.digest(), tailcsum)) - throw new IOException(JGitText.get().packCorruptedWhileWritingToFilesystem); - - packcsum = packDigest.digest(); - packOut.write(packcsum); - } - - private void growEntries() { - final PackedObjectInfo[] ne; - - ne = new PackedObjectInfo[(int) objectCount + baseById.size()]; - System.arraycopy(entries, 0, ne, 0, entryCount); - entries = ne; - } - - private void writeIdx() throws IOException { - Arrays.sort(entries, 0, entryCount); - List list = Arrays.asList(entries); - if (entryCount < entries.length) - list = list.subList(0, entryCount); - - final FileOutputStream os = new FileOutputStream(dstIdx); - try { - final PackIndexWriter iw; - if (outputVersion <= 0) - iw = PackIndexWriter.createOldestPossible(os, list); - else - iw = PackIndexWriter.createVersion(os, outputVersion); - iw.write(list, packcsum); - os.getChannel().force(true); - } finally { - os.close(); - } - } - - private void readPackHeader() throws IOException { - final int hdrln = Constants.PACK_SIGNATURE.length + 4 + 4; - final int p = fill(Source.INPUT, hdrln); - for (int k = 0; k < Constants.PACK_SIGNATURE.length; k++) - if (buf[p + k] != Constants.PACK_SIGNATURE[k]) - throw new IOException(JGitText.get().notAPACKFile); - - final long vers = NB.decodeUInt32(buf, p + 4); - if (vers != 2 && vers != 3) - throw new IOException(MessageFormat.format(JGitText.get().unsupportedPackVersion, vers)); - objectCount = NB.decodeUInt32(buf, p + 8); - use(hdrln); - } - - private void readPackFooter() throws IOException { - sync(); - final byte[] cmpcsum = packDigest.digest(); - final int c = fill(Source.INPUT, 20); - packcsum = new byte[20]; - System.arraycopy(buf, c, packcsum, 0, 20); - use(20); - if (packOut != null) - packOut.write(packcsum); - - if (!Arrays.equals(cmpcsum, packcsum)) - throw new CorruptObjectException(JGitText.get().corruptObjectPackfileChecksumIncorrect); - } - - // Cleanup all resources associated with our input parsing. - private void endInput() { - in = null; - } - - // Read one entire object or delta from the input. - private void indexOneObject() throws IOException { - final long pos = position(); - - crc.reset(); - int c = readFrom(Source.INPUT); - final int typeCode = (c >> 4) & 7; - long sz = c & 15; - int shift = 4; - while ((c & 0x80) != 0) { - c = readFrom(Source.INPUT); - sz += (c & 0x7f) << shift; - shift += 7; - } - - switch (typeCode) { - case Constants.OBJ_COMMIT: - case Constants.OBJ_TREE: - case Constants.OBJ_BLOB: - case Constants.OBJ_TAG: - whole(typeCode, pos, sz); - break; - case Constants.OBJ_OFS_DELTA: { - c = readFrom(Source.INPUT); - long ofs = c & 127; - while ((c & 128) != 0) { - ofs += 1; - c = readFrom(Source.INPUT); - ofs <<= 7; - ofs += (c & 127); - } - final long base = pos - ofs; - final UnresolvedDelta n; - inflateAndSkip(Source.INPUT, sz); - n = new UnresolvedDelta(pos, (int) crc.getValue()); - n.next = baseByPos.put(base, n); - deltaCount++; - break; - } - case Constants.OBJ_REF_DELTA: { - c = fill(Source.INPUT, 20); - crc.update(buf, c, 20); - final ObjectId base = ObjectId.fromRaw(buf, c); - use(20); - DeltaChain r = baseById.get(base); - if (r == null) { - r = new DeltaChain(base); - baseById.add(r); - } - inflateAndSkip(Source.INPUT, sz); - r.add(new UnresolvedDelta(pos, (int) crc.getValue())); - deltaCount++; - break; - } - default: - throw new IOException(MessageFormat.format(JGitText.get().unknownObjectType, typeCode)); - } - } - - private void whole(final int type, final long pos, final long sz) - throws IOException { - objectDigest.update(Constants.encodedTypeString(type)); - objectDigest.update((byte) ' '); - objectDigest.update(Constants.encodeASCII(sz)); - objectDigest.update((byte) 0); - - boolean checkContentLater = false; - if (type == Constants.OBJ_BLOB) { - InputStream inf = inflate(Source.INPUT, sz); - long cnt = 0; - while (cnt < sz) { - int r = inf.read(readBuffer); - if (r <= 0) - break; - objectDigest.update(readBuffer, 0, r); - cnt += r; - } - inf.close(); - tempObjectId.fromRaw(objectDigest.digest(), 0); - checkContentLater = readCurs.has(tempObjectId); - - } else { - final byte[] data = inflateAndReturn(Source.INPUT, sz); - objectDigest.update(data); - tempObjectId.fromRaw(objectDigest.digest(), 0); - verifySafeObject(tempObjectId, type, data); - } - - final int crc32 = (int) crc.getValue(); - PackedObjectInfo obj = new PackedObjectInfo(pos, crc32, tempObjectId); - addObjectAndTrack(obj); - if (checkContentLater) - deferredCheckBlobs.add(obj); - } - - private void verifySafeObject(final AnyObjectId id, final int type, - final byte[] data) throws IOException { - if (objCheck != null) { - try { - objCheck.check(type, data); - } catch (CorruptObjectException e) { - throw new IOException(MessageFormat.format(JGitText.get().invalidObject - , Constants.typeString(type) , id.name() , e.getMessage())); - } - } - - try { - final ObjectLoader ldr = readCurs.open(id, type); - final byte[] existingData = ldr.getCachedBytes(data.length); - if (!Arrays.equals(data, existingData)) { - throw new IOException(MessageFormat.format(JGitText.get().collisionOn, id.name())); - } - } catch (MissingObjectException notLocal) { - // This is OK, we don't have a copy of the object locally - // but the API throws when we try to read it as usually its - // an error to read something that doesn't exist. - } - } - - private void doDeferredCheckBlobs() throws IOException { - final byte[] curBuffer = new byte[readBuffer.length]; - for (PackedObjectInfo obj : deferredCheckBlobs) { - position(obj.getOffset()); - - int c = readFrom(Source.FILE); - final int type = (c >> 4) & 7; - long sz = c & 15; - int shift = 4; - while ((c & 0x80) != 0) { - c = readFrom(Source.FILE); - sz += (c & 0x7f) << shift; - shift += 7; - } - - if (type != Constants.OBJ_BLOB) - throw new IOException(MessageFormat.format( - JGitText.get().unknownObjectType, type)); - - ObjectStream cur = readCurs.open(obj, type).openStream(); - try { - if (cur.getSize() != sz) - throw new IOException(MessageFormat.format( - JGitText.get().collisionOn, obj.name())); - InputStream pck = inflate(Source.FILE, sz); - while (0 < sz) { - int n = (int) Math.min(readBuffer.length, sz); - IO.readFully(cur, curBuffer, 0, n); - IO.readFully(pck, readBuffer, 0, n); - for (int i = 0; i < n; i++) { - if (curBuffer[i] != readBuffer[i]) - throw new IOException(MessageFormat.format(JGitText - .get().collisionOn, obj.name())); - } - sz -= n; - } - pck.close(); - } finally { - cur.close(); - } - } - } - - // Current position of {@link #bOffset} within the entire file. - private long position() { - return bBase + bOffset; - } - - private void position(final long pos) throws IOException { - packOut.seek(pos); - bBase = pos; - bOffset = 0; - bAvail = 0; - } - - // Consume exactly one byte from the buffer and return it. - private int readFrom(final Source src) throws IOException { - if (bAvail == 0) - fill(src, 1); - bAvail--; - final int b = buf[bOffset++] & 0xff; - crc.update(b); - return b; - } - - // Consume cnt bytes from the buffer. - private void use(final int cnt) { - bOffset += cnt; - bAvail -= cnt; - } - - // Ensure at least need bytes are available in in {@link #buf}. - private int fill(final Source src, final int need) throws IOException { - while (bAvail < need) { - int next = bOffset + bAvail; - int free = buf.length - next; - if (free + bAvail < need) { - switch(src){ - case INPUT: - sync(); - break; - case FILE: - if (bAvail > 0) - System.arraycopy(buf, bOffset, buf, 0, bAvail); - bOffset = 0; - break; - } - next = bAvail; - free = buf.length - next; - } - switch(src){ - case INPUT: - next = in.read(buf, next, free); - break; - case FILE: - next = packOut.read(buf, next, free); - break; - } - if (next <= 0) - throw new EOFException(JGitText.get().packfileIsTruncated); - bAvail += next; - } - return bOffset; - } - - // Store consumed bytes in {@link #buf} up to {@link #bOffset}. - private void sync() throws IOException { - packDigest.update(buf, 0, bOffset); - if (packOut != null) - packOut.write(buf, 0, bOffset); - if (bAvail > 0) - System.arraycopy(buf, bOffset, buf, 0, bAvail); - bBase += bOffset; - bOffset = 0; - } - - private void inflateAndSkip(final Source src, final long inflatedSize) - throws IOException { - final InputStream inf = inflate(src, inflatedSize); - IO.skipFully(inf, inflatedSize); - inf.close(); - } - - private byte[] inflateAndReturn(final Source src, final long inflatedSize) - throws IOException { - final byte[] dst = new byte[(int) inflatedSize]; - final InputStream inf = inflate(src, inflatedSize); - IO.readFully(inf, dst, 0, dst.length); - inf.close(); - return dst; - } - - private InputStream inflate(final Source src, final long inflatedSize) - throws IOException { - inflater.open(src, inflatedSize); - return inflater; - } - - private static class DeltaChain extends ObjectId { - UnresolvedDelta head; - - DeltaChain(final AnyObjectId id) { - super(id); - } - - UnresolvedDelta remove() { - final UnresolvedDelta r = head; - if (r != null) - head = null; - return r; - } - - void add(final UnresolvedDelta d) { - d.next = head; - head = d; - } - } - - private static class UnresolvedDelta { - final long position; - - final int crc; - - UnresolvedDelta next; - - UnresolvedDelta(final long headerOffset, final int crc32) { - position = headerOffset; - crc = crc32; - } - } - - private static class DeltaVisit { - final UnresolvedDelta delta; - - byte[] data; - - DeltaVisit parent; - - UnresolvedDelta nextChild; - - DeltaVisit() { - this.delta = null; // At the root of the stack we have a base. - } - - DeltaVisit(DeltaVisit parent) { - this.parent = parent; - this.delta = parent.nextChild; - parent.nextChild = delta.next; - } - - DeltaVisit next() { - // If our parent has no more children, discard it. - if (parent != null && parent.nextChild == null) { - parent.data = null; - parent = parent.parent; - } - - if (nextChild != null) - return new DeltaVisit(this); - - // If we have no child ourselves, our parent must (if it exists), - // due to the discard rule above. With no parent, we are done. - if (parent != null) - return new DeltaVisit(parent); - return null; - } - } - - /** - * Rename the pack to it's final name and location and open it. - *

- * If the call completes successfully the repository this IndexPack instance - * was created with will have the objects in the pack available for reading - * and use, without needing to scan for packs. - * - * @throws IOException - * The pack could not be inserted into the repository's objects - * directory. The pack no longer exists on disk, as it was - * removed prior to throwing the exception to the caller. - */ - public void renameAndOpenPack() throws IOException { - renameAndOpenPack(null); - } - - /** - * Rename the pack to it's final name and location and open it. - *

- * If the call completes successfully the repository this IndexPack instance - * was created with will have the objects in the pack available for reading - * and use, without needing to scan for packs. - * - * @param lockMessage - * message to place in the pack-*.keep file. If null, no lock - * will be created, and this method returns null. - * @return the pack lock object, if lockMessage is not null. - * @throws IOException - * The pack could not be inserted into the repository's objects - * directory. The pack no longer exists on disk, as it was - * removed prior to throwing the exception to the caller. - */ - public PackLock renameAndOpenPack(final String lockMessage) - throws IOException { - if (!keepEmpty && entryCount == 0) { - cleanupTemporaryFiles(); - return null; - } - - final MessageDigest d = Constants.newMessageDigest(); - final byte[] oeBytes = new byte[Constants.OBJECT_ID_LENGTH]; - for (int i = 0; i < entryCount; i++) { - final PackedObjectInfo oe = entries[i]; - oe.copyRawTo(oeBytes, 0); - d.update(oeBytes); - } - - final String name = ObjectId.fromRaw(d.digest()).name(); - final File packDir = new File(repo.getObjectsDirectory(), "pack"); - final File finalPack = new File(packDir, "pack-" + name + ".pack"); - final File finalIdx = new File(packDir, "pack-" + name + ".idx"); - final PackLock keep = new PackLock(finalPack, repo.getFS()); - - if (!packDir.exists() && !packDir.mkdir() && !packDir.exists()) { - // The objects/pack directory isn't present, and we are unable - // to create it. There is no way to move this pack in. - // - cleanupTemporaryFiles(); - throw new IOException(MessageFormat.format(JGitText.get().cannotCreateDirectory, packDir.getAbsolutePath())); - } - - if (finalPack.exists()) { - // If the pack is already present we should never replace it. - // - cleanupTemporaryFiles(); - return null; - } - - if (lockMessage != null) { - // If we have a reason to create a keep file for this pack, do - // so, or fail fast and don't put the pack in place. - // - try { - if (!keep.lock(lockMessage)) - throw new IOException(MessageFormat.format(JGitText.get().cannotLockPackIn, finalPack)); - } catch (IOException e) { - cleanupTemporaryFiles(); - throw e; - } - } - - if (!dstPack.renameTo(finalPack)) { - cleanupTemporaryFiles(); - keep.unlock(); - throw new IOException(MessageFormat.format(JGitText.get().cannotMovePackTo, finalPack)); - } - - if (!dstIdx.renameTo(finalIdx)) { - cleanupTemporaryFiles(); - keep.unlock(); - if (!finalPack.delete()) - finalPack.deleteOnExit(); - throw new IOException(MessageFormat.format(JGitText.get().cannotMoveIndexTo, finalIdx)); - } - - try { - repo.openPack(finalPack, finalIdx); - } catch (IOException err) { - keep.unlock(); - FileUtils.delete(finalPack); - FileUtils.delete(finalIdx); - throw err; - } - - return lockMessage != null ? keep : null; - } - - private void cleanupTemporaryFiles() { - if (!dstIdx.delete()) - dstIdx.deleteOnExit(); - if (!dstPack.delete()) - dstPack.deleteOnExit(); - } - - private void addObjectAndTrack(PackedObjectInfo oe) { - entries[entryCount++] = oe; - if (needNewObjectIds()) - newObjectIds.add(oe); - } - - private class InflaterStream extends InputStream { - private final Inflater inf; - - private final byte[] skipBuffer; - - private Source src; - - private long expectedSize; - - private long actualSize; - - private int p; - - InflaterStream() { - inf = InflaterCache.get(); - skipBuffer = new byte[512]; - } - - void release() { - inf.reset(); - InflaterCache.release(inf); - } - - void open(Source source, long inflatedSize) throws IOException { - src = source; - expectedSize = inflatedSize; - actualSize = 0; - - p = fill(src, 24); - inf.setInput(buf, p, bAvail); - } - - @Override - public long skip(long toSkip) throws IOException { - long n = 0; - while (n < toSkip) { - final int cnt = (int) Math.min(skipBuffer.length, toSkip - n); - final int r = read(skipBuffer, 0, cnt); - if (r <= 0) - break; - n += r; - } - return n; - } - - @Override - public int read() throws IOException { - int n = read(skipBuffer, 0, 1); - return n == 1 ? skipBuffer[0] & 0xff : -1; - } - - @Override - public int read(byte[] dst, int pos, int cnt) throws IOException { - try { - int n = 0; - while (n < cnt) { - int r = inf.inflate(dst, pos + n, cnt - n); - if (r == 0) { - if (inf.finished()) - break; - if (inf.needsInput()) { - crc.update(buf, p, bAvail); - use(bAvail); - - p = fill(src, 24); - inf.setInput(buf, p, bAvail); - } else { - throw new CorruptObjectException( - MessageFormat - .format( - JGitText.get().packfileCorruptionDetected, - JGitText.get().unknownZlibError)); - } - } else { - n += r; - } - } - actualSize += n; - return 0 < n ? n : -1; - } catch (DataFormatException dfe) { - throw new CorruptObjectException(MessageFormat.format(JGitText - .get().packfileCorruptionDetected, dfe.getMessage())); - } - } - - @Override - public void close() throws IOException { - // We need to read here to enter the loop above and pump the - // trailing checksum into the Inflater. It should return -1 as the - // caller was supposed to consume all content. - // - if (read(skipBuffer) != -1 || actualSize != expectedSize) { - throw new CorruptObjectException(MessageFormat.format(JGitText - .get().packfileCorruptionDetected, - JGitText.get().wrongDecompressedLength)); - } - - int used = bAvail - inf.getRemaining(); - if (0 < used) { - crc.update(buf, p, used); - use(used); - } - - inf.reset(); - } - } -} diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/LongMap.java b/org.eclipse.jgit/src/org/eclipse/jgit/transport/LongMap.java index 6381c24dc..88b4b074a 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/transport/LongMap.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/transport/LongMap.java @@ -44,7 +44,7 @@ package org.eclipse.jgit.transport; /** - * Simple Map helper for {@link IndexPack}. + * Simple Map helper for {@link PackParser}. * * @param * type of the value instance. diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/PackParser.java b/org.eclipse.jgit/src/org/eclipse/jgit/transport/PackParser.java new file mode 100644 index 000000000..54e7dd989 --- /dev/null +++ b/org.eclipse.jgit/src/org/eclipse/jgit/transport/PackParser.java @@ -0,0 +1,1503 @@ +/* + * Copyright (C) 2008-2011, Google Inc. + * Copyright (C) 2007-2008, Robin Rosenberg + * Copyright (C) 2008, Shawn O. Pearce + * and other copyright owners as documented in the project's IP log. + * + * This program and the accompanying materials are made available + * under the terms of the Eclipse Distribution License v1.0 which + * accompanies this distribution, is reproduced below, and is + * available at http://www.eclipse.org/org/documents/edl-v10.php + * + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or + * without modification, are permitted provided that the following + * conditions are met: + * + * - Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * + * - Redistributions in binary form must reproduce the above + * copyright notice, this list of conditions and the following + * disclaimer in the documentation and/or other materials provided + * with the distribution. + * + * - Neither the name of the Eclipse Foundation, Inc. nor the + * names of its contributors may be used to endorse or promote + * products derived from this software without specific prior + * written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND + * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, + * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES + * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR + * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT + * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; + * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, + * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF + * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +package org.eclipse.jgit.transport; + +import java.io.EOFException; +import java.io.IOException; +import java.io.InputStream; +import java.security.MessageDigest; +import java.text.MessageFormat; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Comparator; +import java.util.List; +import java.util.zip.DataFormatException; +import java.util.zip.Inflater; + +import org.eclipse.jgit.JGitText; +import org.eclipse.jgit.errors.CorruptObjectException; +import org.eclipse.jgit.errors.MissingObjectException; +import org.eclipse.jgit.lib.AnyObjectId; +import org.eclipse.jgit.lib.Constants; +import org.eclipse.jgit.lib.InflaterCache; +import org.eclipse.jgit.lib.MutableObjectId; +import org.eclipse.jgit.lib.NullProgressMonitor; +import org.eclipse.jgit.lib.ObjectChecker; +import org.eclipse.jgit.lib.ObjectDatabase; +import org.eclipse.jgit.lib.ObjectId; +import org.eclipse.jgit.lib.ObjectIdSubclassMap; +import org.eclipse.jgit.lib.ObjectInserter; +import org.eclipse.jgit.lib.ObjectLoader; +import org.eclipse.jgit.lib.ObjectReader; +import org.eclipse.jgit.lib.ObjectStream; +import org.eclipse.jgit.lib.ProgressMonitor; +import org.eclipse.jgit.storage.file.PackLock; +import org.eclipse.jgit.storage.pack.BinaryDelta; +import org.eclipse.jgit.util.IO; +import org.eclipse.jgit.util.NB; + +/** + * Parses a pack stream and imports it for an {@link ObjectInserter}. + *

+ * Applications can acquire an instance of a parser from ObjectInserter's + * {@link ObjectInserter#newPackParser(InputStream)} method. + *

+ * Implementations of {@link ObjectInserter} should subclass this type and + * provide their own logic for the various {@code on*()} event methods declared + * to be abstract. + */ +public abstract class PackParser { + /** Size of the internal stream buffer. */ + private static final int BUFFER_SIZE = 8192; + + /** Location data is being obtained from. */ + public static enum Source { + /** Data is read from the incoming stream. */ + INPUT, + + /** Data is read back from the database's buffers. */ + DATABASE; + } + + /** Object database used for loading existing objects. */ + private final ObjectDatabase objectDatabase; + + private InflaterStream inflater; + + private byte[] tempBuffer; + + private byte[] hdrBuf; + + private final MessageDigest objectDigest; + + private final MutableObjectId tempObjectId; + + private InputStream in; + + private byte[] buf; + + /** Position in the input stream of {@code buf[0]}. */ + private long bBase; + + private int bOffset; + + private int bAvail; + + private ObjectChecker objCheck; + + private boolean allowThin; + + private boolean needBaseObjectIds; + + private long objectCount; + + private PackedObjectInfo[] entries; + + /** + * Every object contained within the incoming pack. + *

+ * This is a subset of {@link #entries}, as thin packs can add additional + * objects to {@code entries} by copying already existing objects from the + * repository onto the end of the thin pack to make it self-contained. + */ + private ObjectIdSubclassMap newObjectIds; + + private int deltaCount; + + private int entryCount; + + private ObjectIdSubclassMap baseById; + + /** + * Objects referenced by their name from deltas, that aren't in this pack. + *

+ * This is the set of objects that were copied onto the end of this pack to + * make it complete. These objects were not transmitted by the remote peer, + * but instead were assumed to already exist in the local repository. + */ + private ObjectIdSubclassMap baseObjectIds; + + private LongMap baseByPos; + + /** Blobs whose contents need to be double-checked after indexing. */ + private List deferredCheckBlobs; + + private MessageDigest packDigest; + + private ObjectReader readCurs; + + /** Message to protect the pack data from garbage collection. */ + private String lockMessage; + + /** + * Initialize a pack parser. + * + * @param odb + * database the parser will write its objects into. + * @param src + * the stream the parser will read. + */ + protected PackParser(final ObjectDatabase odb, final InputStream src) { + objectDatabase = odb.newCachedDatabase(); + in = src; + + inflater = new InflaterStream(); + readCurs = objectDatabase.newReader(); + buf = new byte[BUFFER_SIZE]; + tempBuffer = new byte[BUFFER_SIZE]; + hdrBuf = new byte[64]; + objectDigest = Constants.newMessageDigest(); + tempObjectId = new MutableObjectId(); + packDigest = Constants.newMessageDigest(); + } + + /** @return true if a thin pack (missing base objects) is permitted. */ + public boolean isAllowThin() { + return allowThin; + } + + /** + * Configure this index pack instance to allow a thin pack. + *

+ * Thin packs are sometimes used during network transfers to allow a delta + * to be sent without a base object. Such packs are not permitted on disk. + * + * @param allow + * true to enable a thin pack. + */ + public void setAllowThin(final boolean allow) { + allowThin = allow; + } + + /** + * Configure this index pack instance to keep track of new objects. + *

+ * By default an index pack doesn't save the new objects that were created + * when it was instantiated. Setting this flag to {@code true} allows the + * caller to use {@link #getNewObjectIds()} to retrieve that list. + * + * @param b + * {@code true} to enable keeping track of new objects. + */ + public void setNeedNewObjectIds(boolean b) { + if (b) + newObjectIds = new ObjectIdSubclassMap(); + else + newObjectIds = null; + } + + private boolean needNewObjectIds() { + return newObjectIds != null; + } + + /** + * Configure this index pack instance to keep track of the objects assumed + * for delta bases. + *

+ * By default an index pack doesn't save the objects that were used as delta + * bases. Setting this flag to {@code true} will allow the caller to use + * {@link #getBaseObjectIds()} to retrieve that list. + * + * @param b + * {@code true} to enable keeping track of delta bases. + */ + public void setNeedBaseObjectIds(boolean b) { + this.needBaseObjectIds = b; + } + + /** @return the new objects that were sent by the user */ + public ObjectIdSubclassMap getNewObjectIds() { + if (newObjectIds != null) + return newObjectIds; + return new ObjectIdSubclassMap(); + } + + /** @return set of objects the incoming pack assumed for delta purposes */ + public ObjectIdSubclassMap getBaseObjectIds() { + if (baseObjectIds != null) + return baseObjectIds; + return new ObjectIdSubclassMap(); + } + + /** + * Configure the checker used to validate received objects. + *

+ * Usually object checking isn't necessary, as Git implementations only + * create valid objects in pack files. However, additional checking may be + * useful if processing data from an untrusted source. + * + * @param oc + * the checker instance; null to disable object checking. + */ + public void setObjectChecker(final ObjectChecker oc) { + objCheck = oc; + } + + /** + * Configure the checker used to validate received objects. + *

+ * Usually object checking isn't necessary, as Git implementations only + * create valid objects in pack files. However, additional checking may be + * useful if processing data from an untrusted source. + *

+ * This is shorthand for: + * + *

+	 * setObjectChecker(on ? new ObjectChecker() : null);
+	 * 
+ * + * @param on + * true to enable the default checker; false to disable it. + */ + public void setObjectChecking(final boolean on) { + setObjectChecker(on ? new ObjectChecker() : null); + } + + /** @return the message to record with the pack lock. */ + public String getLockMessage() { + return lockMessage; + } + + /** + * Set the lock message for the incoming pack data. + * + * @param msg + * if not null, the message to associate with the incoming data + * while it is locked to prevent garbage collection. + */ + public void setLockMessage(String msg) { + lockMessage = msg; + } + + /** + * Get the number of objects in the stream. + *

+ * The object count is only available after {@link #parse(ProgressMonitor)} + * has returned. The count may have been increased if the stream was a thin + * pack, and missing bases objects were appending onto it by the subclass. + * + * @return number of objects parsed out of the stream. + */ + public int getObjectCount() { + return entryCount; + } + + /*** + * Get the information about the requested object. + *

+ * The object information is only available after + * {@link #parse(ProgressMonitor)} has returned. + * + * @param nth + * index of the object in the stream. Must be between 0 and + * {@link #getObjectCount()}-1. + * @return the object information. + */ + public PackedObjectInfo getObject(int nth) { + return entries[nth]; + } + + /** + * Get all of the objects, sorted by their name. + *

+ * The object information is only available after + * {@link #parse(ProgressMonitor)} has returned. + *

+ * To maintain lower memory usage and good runtime performance, this method + * sorts the objects in-place and therefore impacts the ordering presented + * by {@link #getObject(int)}. + * + * @param cmp + * comparison function, if null objects are stored by ObjectId. + * @return sorted list of objects in this pack stream. + */ + public List getSortedObjectList( + Comparator cmp) { + Arrays.sort(entries, 0, entryCount, cmp); + List list = Arrays.asList(entries); + if (entryCount < entries.length) + list = list.subList(0, entryCount); + return list; + } + + /** + * Parse the pack stream. + * + * @param progress + * callback to provide progress feedback during parsing. If null, + * {@link NullProgressMonitor} will be used. + * @return the pack lock, if one was requested by setting + * {@link #setLockMessage(String)}. + * @throws IOException + * the stream is malformed, or contains corrupt objects. + */ + public PackLock parse(ProgressMonitor progress) throws IOException { + if (progress == null) + progress = NullProgressMonitor.INSTANCE; + progress.start(2 /* tasks */); + try { + readPackHeader(); + + entries = new PackedObjectInfo[(int) objectCount]; + baseById = new ObjectIdSubclassMap(); + baseByPos = new LongMap(); + deferredCheckBlobs = new ArrayList(); + + progress.beginTask(JGitText.get().receivingObjects, + (int) objectCount); + for (int done = 0; done < objectCount; done++) { + indexOneObject(); + progress.update(1); + if (progress.isCancelled()) + throw new IOException(JGitText.get().downloadCancelled); + } + readPackFooter(); + endInput(); + if (!deferredCheckBlobs.isEmpty()) + doDeferredCheckBlobs(); + progress.endTask(); + if (deltaCount > 0) { + resolveDeltas(progress); + if (entryCount < objectCount) { + if (!isAllowThin()) { + throw new IOException(MessageFormat.format(JGitText + .get().packHasUnresolvedDeltas, + (objectCount - entryCount))); + } + + resolveDeltasWithExternalBases(progress); + } + } + + packDigest = null; + baseById = null; + baseByPos = null; + } finally { + try { + if (readCurs != null) + readCurs.release(); + } finally { + readCurs = null; + } + + try { + inflater.release(); + } finally { + inflater = null; + objectDatabase.close(); + } + + progress.endTask(); + } + return null; // By default there is no locking. + } + + private void resolveDeltas(final ProgressMonitor progress) + throws IOException { + progress.beginTask(JGitText.get().resolvingDeltas, deltaCount); + final int last = entryCount; + for (int i = 0; i < last; i++) { + final int before = entryCount; + resolveDeltas(entries[i]); + progress.update(entryCount - before); + if (progress.isCancelled()) + throw new IOException( + JGitText.get().downloadCancelledDuringIndexing); + } + progress.endTask(); + } + + private void resolveDeltas(final PackedObjectInfo oe) throws IOException { + UnresolvedDelta children = firstChildOf(oe); + if (children == null) + return; + + DeltaVisit visit = new DeltaVisit(); + visit.nextChild = children; + + ObjectTypeAndSize info = openDatabase(oe, new ObjectTypeAndSize()); + switch (info.type) { + case Constants.OBJ_COMMIT: + case Constants.OBJ_TREE: + case Constants.OBJ_BLOB: + case Constants.OBJ_TAG: + visit.data = inflateAndReturn(Source.DATABASE, info.size); + visit.id = oe; + break; + default: + throw new IOException(MessageFormat.format( + JGitText.get().unknownObjectType, info.type)); + } + + if (!checkCRC(oe.getCRC())) { + throw new IOException(MessageFormat.format( + JGitText.get().corruptionDetectedReReadingAt, oe + .getOffset())); + } + + resolveDeltas(visit.next(), info.type, info); + } + + private void resolveDeltas(DeltaVisit visit, final int type, + ObjectTypeAndSize info) throws IOException { + do { + info = openDatabase(visit.delta, info); + switch (info.type) { + case Constants.OBJ_OFS_DELTA: + case Constants.OBJ_REF_DELTA: + break; + + default: + throw new IOException(MessageFormat.format( + JGitText.get().unknownObjectType, info.type)); + } + + visit.data = BinaryDelta.apply(visit.parent.data, // + inflateAndReturn(Source.DATABASE, info.size)); + + if (!checkCRC(visit.delta.crc)) + throw new IOException(MessageFormat.format( + JGitText.get().corruptionDetectedReReadingAt, + visit.delta.position)); + + objectDigest.update(Constants.encodedTypeString(type)); + objectDigest.update((byte) ' '); + objectDigest.update(Constants.encodeASCII(visit.data.length)); + objectDigest.update((byte) 0); + objectDigest.update(visit.data); + tempObjectId.fromRaw(objectDigest.digest(), 0); + + verifySafeObject(tempObjectId, type, visit.data); + + PackedObjectInfo oe; + oe = newInfo(tempObjectId, visit.delta, visit.parent.id); + oe.setOffset(visit.delta.position); + addObjectAndTrack(oe); + visit.id = oe; + + visit.nextChild = firstChildOf(oe); + visit = visit.next(); + } while (visit != null); + } + + /** + * Read the header of the current object. + *

+ * After the header has been parsed, this method automatically invokes + * {@link #onObjectHeader(Source, byte[], int, int)} to allow the + * implementation to update its internal checksums for the bytes read. + *

+ * When this method returns the database will be positioned on the first + * byte of the deflated data stream. + * + * @param info + * the info object to populate. + * @return {@code info}, after populating. + * @throws IOException + * the size cannot be read. + */ + protected ObjectTypeAndSize readObjectHeader(ObjectTypeAndSize info) + throws IOException { + int hdrPtr = 0; + int c = readFrom(Source.DATABASE); + hdrBuf[hdrPtr++] = (byte) c; + + info.type = (c >> 4) & 7; + long sz = c & 15; + int shift = 4; + while ((c & 0x80) != 0) { + c = readFrom(Source.DATABASE); + hdrBuf[hdrPtr++] = (byte) c; + sz += (c & 0x7f) << shift; + shift += 7; + } + info.size = sz; + + switch (info.type) { + case Constants.OBJ_COMMIT: + case Constants.OBJ_TREE: + case Constants.OBJ_BLOB: + case Constants.OBJ_TAG: + onObjectHeader(Source.DATABASE, hdrBuf, 0, hdrPtr); + break; + + case Constants.OBJ_OFS_DELTA: + c = readFrom(Source.DATABASE); + hdrBuf[hdrPtr++] = (byte) c; + while ((c & 128) != 0) { + c = readFrom(Source.DATABASE); + hdrBuf[hdrPtr++] = (byte) c; + } + onObjectHeader(Source.DATABASE, hdrBuf, 0, hdrPtr); + break; + + case Constants.OBJ_REF_DELTA: + System.arraycopy(buf, fill(Source.DATABASE, 20), hdrBuf, hdrPtr, 20); + hdrPtr += 20; + use(20); + onObjectHeader(Source.DATABASE, hdrBuf, 0, hdrPtr); + break; + + default: + throw new IOException(MessageFormat.format( + JGitText.get().unknownObjectType, info.type)); + } + return info; + } + + private UnresolvedDelta removeBaseById(final AnyObjectId id) { + final DeltaChain d = baseById.get(id); + return d != null ? d.remove() : null; + } + + private static UnresolvedDelta reverse(UnresolvedDelta c) { + UnresolvedDelta tail = null; + while (c != null) { + final UnresolvedDelta n = c.next; + c.next = tail; + tail = c; + c = n; + } + return tail; + } + + private UnresolvedDelta firstChildOf(PackedObjectInfo oe) { + UnresolvedDelta a = reverse(removeBaseById(oe)); + UnresolvedDelta b = reverse(baseByPos.remove(oe.getOffset())); + + if (a == null) + return b; + if (b == null) + return a; + + UnresolvedDelta first = null; + UnresolvedDelta last = null; + while (a != null || b != null) { + UnresolvedDelta curr; + if (b == null || (a != null && a.position < b.position)) { + curr = a; + a = a.next; + } else { + curr = b; + b = b.next; + } + if (last != null) + last.next = curr; + else + first = curr; + last = curr; + curr.next = null; + } + return first; + } + + private void resolveDeltasWithExternalBases(final ProgressMonitor progress) + throws IOException { + growEntries(baseById.size()); + + if (needBaseObjectIds) + baseObjectIds = new ObjectIdSubclassMap(); + + final List missing = new ArrayList(64); + for (final DeltaChain baseId : baseById) { + if (baseId.head == null) + continue; + + if (needBaseObjectIds) + baseObjectIds.add(baseId); + + final ObjectLoader ldr; + try { + ldr = readCurs.open(baseId); + } catch (MissingObjectException notFound) { + missing.add(baseId); + continue; + } + + final DeltaVisit visit = new DeltaVisit(); + visit.data = ldr.getCachedBytes(Integer.MAX_VALUE); + visit.id = baseId; + final int typeCode = ldr.getType(); + final PackedObjectInfo oe = newInfo(baseId, null, null); + + if (onAppendBase(typeCode, visit.data, oe)) + entries[entryCount++] = oe; + + visit.nextChild = firstChildOf(oe); + resolveDeltas(visit.next(), typeCode, new ObjectTypeAndSize()); + + if (progress.isCancelled()) + throw new IOException( + JGitText.get().downloadCancelledDuringIndexing); + } + + for (final DeltaChain base : missing) { + if (base.head != null) + throw new MissingObjectException(base, "delta base"); + } + + onEndThinPack(); + } + + private void growEntries(int extraObjects) { + final PackedObjectInfo[] ne; + + ne = new PackedObjectInfo[(int) objectCount + extraObjects]; + System.arraycopy(entries, 0, ne, 0, entryCount); + entries = ne; + } + + private void readPackHeader() throws IOException { + final int hdrln = Constants.PACK_SIGNATURE.length + 4 + 4; + final int p = fill(Source.INPUT, hdrln); + for (int k = 0; k < Constants.PACK_SIGNATURE.length; k++) + if (buf[p + k] != Constants.PACK_SIGNATURE[k]) + throw new IOException(JGitText.get().notAPACKFile); + + final long vers = NB.decodeUInt32(buf, p + 4); + if (vers != 2 && vers != 3) + throw new IOException(MessageFormat.format( + JGitText.get().unsupportedPackVersion, vers)); + objectCount = NB.decodeUInt32(buf, p + 8); + use(hdrln); + } + + private void readPackFooter() throws IOException { + sync(); + final byte[] actHash = packDigest.digest(); + + final int c = fill(Source.INPUT, 20); + final byte[] srcHash = new byte[20]; + System.arraycopy(buf, c, srcHash, 0, 20); + use(20); + + if (!Arrays.equals(actHash, srcHash)) + throw new CorruptObjectException( + JGitText.get().corruptObjectPackfileChecksumIncorrect); + + onPackFooter(srcHash); + } + + // Cleanup all resources associated with our input parsing. + private void endInput() { + in = null; + } + + // Read one entire object or delta from the input. + private void indexOneObject() throws IOException { + final long streamPosition = streamPosition(); + + int hdrPtr = 0; + int c = readFrom(Source.INPUT); + hdrBuf[hdrPtr++] = (byte) c; + + final int typeCode = (c >> 4) & 7; + long sz = c & 15; + int shift = 4; + while ((c & 0x80) != 0) { + c = readFrom(Source.INPUT); + hdrBuf[hdrPtr++] = (byte) c; + sz += (c & 0x7f) << shift; + shift += 7; + } + + switch (typeCode) { + case Constants.OBJ_COMMIT: + case Constants.OBJ_TREE: + case Constants.OBJ_BLOB: + case Constants.OBJ_TAG: + onBeginWholeObject(streamPosition, typeCode, sz); + onObjectHeader(Source.INPUT, hdrBuf, 0, hdrPtr); + whole(streamPosition, typeCode, sz); + break; + + case Constants.OBJ_OFS_DELTA: { + c = readFrom(Source.INPUT); + hdrBuf[hdrPtr++] = (byte) c; + long ofs = c & 127; + while ((c & 128) != 0) { + ofs += 1; + c = readFrom(Source.INPUT); + hdrBuf[hdrPtr++] = (byte) c; + ofs <<= 7; + ofs += (c & 127); + } + final long base = streamPosition - ofs; + onBeginOfsDelta(streamPosition, base, sz); + onObjectHeader(Source.INPUT, hdrBuf, 0, hdrPtr); + inflateAndSkip(Source.INPUT, sz); + UnresolvedDelta n = onEndDelta(); + n.position = streamPosition; + n.next = baseByPos.put(base, n); + deltaCount++; + break; + } + + case Constants.OBJ_REF_DELTA: { + c = fill(Source.INPUT, 20); + final ObjectId base = ObjectId.fromRaw(buf, c); + System.arraycopy(buf, c, hdrBuf, hdrPtr, 20); + hdrPtr += 20; + use(20); + DeltaChain r = baseById.get(base); + if (r == null) { + r = new DeltaChain(base); + baseById.add(r); + } + onBeginRefDelta(streamPosition, base, sz); + onObjectHeader(Source.INPUT, hdrBuf, 0, hdrPtr); + inflateAndSkip(Source.INPUT, sz); + UnresolvedDelta n = onEndDelta(); + n.position = streamPosition; + r.add(n); + deltaCount++; + break; + } + + default: + throw new IOException(MessageFormat.format( + JGitText.get().unknownObjectType, typeCode)); + } + } + + private void whole(final long pos, final int type, final long sz) + throws IOException { + objectDigest.update(Constants.encodedTypeString(type)); + objectDigest.update((byte) ' '); + objectDigest.update(Constants.encodeASCII(sz)); + objectDigest.update((byte) 0); + + boolean checkContentLater = false; + if (type == Constants.OBJ_BLOB) { + byte[] readBuffer = buffer(); + InputStream inf = inflate(Source.INPUT, sz); + long cnt = 0; + while (cnt < sz) { + int r = inf.read(readBuffer); + if (r <= 0) + break; + objectDigest.update(readBuffer, 0, r); + cnt += r; + } + inf.close(); + tempObjectId.fromRaw(objectDigest.digest(), 0); + checkContentLater = readCurs.has(tempObjectId); + + } else { + final byte[] data = inflateAndReturn(Source.INPUT, sz); + objectDigest.update(data); + tempObjectId.fromRaw(objectDigest.digest(), 0); + verifySafeObject(tempObjectId, type, data); + } + + PackedObjectInfo obj = newInfo(tempObjectId, null, null); + obj.setOffset(pos); + onEndWholeObject(obj); + addObjectAndTrack(obj); + if (checkContentLater) + deferredCheckBlobs.add(obj); + } + + private void verifySafeObject(final AnyObjectId id, final int type, + final byte[] data) throws IOException { + if (objCheck != null) { + try { + objCheck.check(type, data); + } catch (CorruptObjectException e) { + throw new IOException(MessageFormat.format( + JGitText.get().invalidObject, Constants + .typeString(type), id.name(), e.getMessage())); + } + } + + try { + final ObjectLoader ldr = readCurs.open(id, type); + final byte[] existingData = ldr.getCachedBytes(data.length); + if (!Arrays.equals(data, existingData)) { + throw new IOException(MessageFormat.format( + JGitText.get().collisionOn, id.name())); + } + } catch (MissingObjectException notLocal) { + // This is OK, we don't have a copy of the object locally + // but the API throws when we try to read it as usually its + // an error to read something that doesn't exist. + } + } + + private void doDeferredCheckBlobs() throws IOException { + final byte[] readBuffer = buffer(); + final byte[] curBuffer = new byte[readBuffer.length]; + ObjectTypeAndSize info = new ObjectTypeAndSize(); + + for (PackedObjectInfo obj : deferredCheckBlobs) { + info = openDatabase(obj, info); + + if (info.type != Constants.OBJ_BLOB) + throw new IOException(MessageFormat.format( + JGitText.get().unknownObjectType, info.type)); + + ObjectStream cur = readCurs.open(obj, info.type).openStream(); + try { + long sz = info.size; + if (cur.getSize() != sz) + throw new IOException(MessageFormat.format( + JGitText.get().collisionOn, obj.name())); + InputStream pck = inflate(Source.DATABASE, sz); + while (0 < sz) { + int n = (int) Math.min(readBuffer.length, sz); + IO.readFully(cur, curBuffer, 0, n); + IO.readFully(pck, readBuffer, 0, n); + for (int i = 0; i < n; i++) { + if (curBuffer[i] != readBuffer[i]) + throw new IOException(MessageFormat.format(JGitText + .get().collisionOn, obj.name())); + } + sz -= n; + } + pck.close(); + } finally { + cur.close(); + } + } + } + + /** @return current position of the input stream being parsed. */ + private long streamPosition() { + return bBase + bOffset; + } + + private ObjectTypeAndSize openDatabase(PackedObjectInfo obj, + ObjectTypeAndSize info) throws IOException { + bOffset = 0; + bAvail = 0; + return seekDatabase(obj, info); + } + + private ObjectTypeAndSize openDatabase(UnresolvedDelta delta, + ObjectTypeAndSize info) throws IOException { + bOffset = 0; + bAvail = 0; + return seekDatabase(delta, info); + } + + // Consume exactly one byte from the buffer and return it. + private int readFrom(final Source src) throws IOException { + if (bAvail == 0) + fill(src, 1); + bAvail--; + return buf[bOffset++] & 0xff; + } + + // Consume cnt bytes from the buffer. + private void use(final int cnt) { + bOffset += cnt; + bAvail -= cnt; + } + + // Ensure at least need bytes are available in in {@link #buf}. + private int fill(final Source src, final int need) throws IOException { + while (bAvail < need) { + int next = bOffset + bAvail; + int free = buf.length - next; + if (free + bAvail < need) { + switch (src) { + case INPUT: + sync(); + break; + case DATABASE: + if (bAvail > 0) + System.arraycopy(buf, bOffset, buf, 0, bAvail); + bOffset = 0; + break; + } + next = bAvail; + free = buf.length - next; + } + switch (src) { + case INPUT: + next = in.read(buf, next, free); + break; + case DATABASE: + next = readDatabase(buf, next, free); + break; + } + if (next <= 0) + throw new EOFException(JGitText.get().packfileIsTruncated); + bAvail += next; + } + return bOffset; + } + + // Store consumed bytes in {@link #buf} up to {@link #bOffset}. + private void sync() throws IOException { + packDigest.update(buf, 0, bOffset); + onStoreStream(buf, 0, bOffset); + if (bAvail > 0) + System.arraycopy(buf, bOffset, buf, 0, bAvail); + bBase += bOffset; + bOffset = 0; + } + + /** @return a temporary byte array for use by the caller. */ + protected byte[] buffer() { + return tempBuffer; + } + + /** + * Construct a PackedObjectInfo instance for this parser. + * + * @param id + * identity of the object to be tracked. + * @param delta + * if the object was previously an unresolved delta, this is the + * delta object that was tracking it. Otherwise null. + * @param deltaBase + * if the object was previously an unresolved delta, this is the + * ObjectId of the base of the delta. The base may be outside of + * the pack stream if the stream was a thin-pack. + * @return info object containing this object's data. + */ + protected PackedObjectInfo newInfo(AnyObjectId id, UnresolvedDelta delta, + ObjectId deltaBase) { + PackedObjectInfo oe = new PackedObjectInfo(id); + if (delta != null) + oe.setCRC(delta.crc); + return oe; + } + + /** + * Store bytes received from the raw stream. + *

+ * This method is invoked during {@link #parse(ProgressMonitor)} as data is + * consumed from the incoming stream. Implementors may use this event to + * archive the raw incoming stream to the destination repository in large + * chunks, without paying attention to object boundaries. + *

+ * The only component of the pack not supplied to this method is the last 20 + * bytes of the pack that comprise the trailing SHA-1 checksum. Those are + * passed to {@link #onPackFooter(byte[])}. + * + * @param raw + * buffer to copy data out of. + * @param pos + * first offset within the buffer that is valid. + * @param len + * number of bytes in the buffer that are valid. + * @throws IOException + * the stream cannot be archived. + */ + protected abstract void onStoreStream(byte[] raw, int pos, int len) + throws IOException; + + /** + * Store (and/or checksum) an object header. + *

+ * Invoked after any of the {@code onBegin()} events. The entire header is + * supplied in a single invocation, before any object data is supplied. + * + * @param src + * where the data came from + * @param raw + * buffer to read data from. + * @param pos + * first offset within buffer that is valid. + * @param len + * number of bytes in buffer that are valid. + * @throws IOException + * the stream cannot be archived. + */ + protected abstract void onObjectHeader(Source src, byte[] raw, int pos, + int len) throws IOException; + + /** + * Store (and/or checksum) a portion of an object's data. + *

+ * This method may be invoked multiple times per object, depending on the + * size of the object, the size of the parser's internal read buffer, and + * the alignment of the object relative to the read buffer. + *

+ * Invoked after {@link #onObjectHeader(Source, byte[], int, int)}. + * + * @param src + * where the data came from + * @param raw + * buffer to read data from. + * @param pos + * first offset within buffer that is valid. + * @param len + * number of bytes in buffer that are valid. + * @throws IOException + * the stream cannot be archived. + */ + protected abstract void onObjectData(Source src, byte[] raw, int pos, + int len) throws IOException; + + /** + * Provide the implementation with the original stream's pack footer. + * + * @param hash + * the trailing 20 bytes of the pack, this is a SHA-1 checksum of + * all of the pack data. + * @throws IOException + * the stream cannot be archived. + */ + protected abstract void onPackFooter(byte[] hash) throws IOException; + + /** + * Provide the implementation with a base that was outside of the pack. + *

+ * This event only occurs on a thin pack for base objects that were outside + * of the pack and came from the local repository. Usually an implementation + * uses this event to compress the base and append it onto the end of the + * pack, so the pack stays self-contained. + * + * @param typeCode + * type of the base object. + * @param data + * complete content of the base object. + * @param info + * packed object information for this base. Implementors must + * populate the CRC and offset members if returning true. + * @return true if the {@code info} should be included in the object list + * returned by {@link #getSortedObjectList(Comparator)}, false if it + * should not be included. + * @throws IOException + * the base could not be included into the pack. + */ + protected abstract boolean onAppendBase(int typeCode, byte[] data, + PackedObjectInfo info) throws IOException; + + /** + * Event indicating a thin pack has been completely processed. + *

+ * This event is invoked only if a thin pack has delta references to objects + * external from the pack. The event is called after all of those deltas + * have been resolved. + * + * @throws IOException + * the pack cannot be archived. + */ + protected abstract void onEndThinPack() throws IOException; + + /** + * Reposition the database to re-read a previously stored object. + *

+ * If the database is computing CRC-32 checksums for object data, it should + * reset its internal CRC instance during this method call. + * + * @param obj + * the object position to begin reading from. This is from + * {@link #newInfo(AnyObjectId, UnresolvedDelta, ObjectId)}. + * @param info + * object to populate with type and size. + * @return the {@code info} object. + * @throws IOException + * the database cannot reposition to this location. + */ + protected abstract ObjectTypeAndSize seekDatabase(PackedObjectInfo obj, + ObjectTypeAndSize info) throws IOException; + + /** + * Reposition the database to re-read a previously stored object. + *

+ * If the database is computing CRC-32 checksums for object data, it should + * reset its internal CRC instance during this method call. + * + * @param delta + * the object position to begin reading from. This is an instance + * previously returned by {@link #onEndDelta()}. + * @param info + * object to populate with type and size. + * @return the {@code info} object. + * @throws IOException + * the database cannot reposition to this location. + */ + protected abstract ObjectTypeAndSize seekDatabase(UnresolvedDelta delta, + ObjectTypeAndSize info) throws IOException; + + /** + * Read from the database's current position into the buffer. + * + * @param dst + * the buffer to copy read data into. + * @param pos + * position within {@code dst} to start copying data into. + * @param cnt + * ideal target number of bytes to read. Actual read length may + * be shorter. + * @return number of bytes stored. + * @throws IOException + * the database cannot be accessed. + */ + protected abstract int readDatabase(byte[] dst, int pos, int cnt) + throws IOException; + + /** + * Check the current CRC matches the expected value. + *

+ * This method is invoked when an object is read back in from the database + * and its data is used during delta resolution. The CRC is validated after + * the object has been fully read, allowing the parser to verify there was + * no silent data corruption. + *

+ * Implementations are free to ignore this check by always returning true if + * they are performing other data integrity validations at a lower level. + * + * @param oldCRC + * the prior CRC that was recorded during the first scan of the + * object from the pack stream. + * @return true if the CRC matches; false if it does not. + */ + protected abstract boolean checkCRC(int oldCRC); + + /** + * Event notifying the start of an object stored whole (not as a delta). + * + * @param streamPosition + * position of this object in the incoming stream. + * @param type + * type of the object; one of {@link Constants#OBJ_COMMIT}, + * {@link Constants#OBJ_TREE}, {@link Constants#OBJ_BLOB}, or + * {@link Constants#OBJ_TAG}. + * @param inflatedSize + * size of the object when fully inflated. The size stored within + * the pack may be larger or smaller, and is not yet known. + * @throws IOException + * the object cannot be recorded. + */ + protected abstract void onBeginWholeObject(long streamPosition, int type, + long inflatedSize) throws IOException; + + /** + * Event notifying the the current object. + * + *@param info + * object information. + * @throws IOException + * the object cannot be recorded. + */ + protected abstract void onEndWholeObject(PackedObjectInfo info) + throws IOException; + + /** + * Event notifying start of a delta referencing its base by offset. + * + * @param deltaStreamPosition + * position of this object in the incoming stream. + * @param baseStreamPosition + * position of the base object in the incoming stream. The base + * must be before the delta, therefore {@code baseStreamPosition + * < deltaStreamPosition}. This is not the position + * returned by a prior end object event. + * @param inflatedSize + * size of the delta when fully inflated. The size stored within + * the pack may be larger or smaller, and is not yet known. + * @throws IOException + * the object cannot be recorded. + */ + protected abstract void onBeginOfsDelta(long deltaStreamPosition, + long baseStreamPosition, long inflatedSize) throws IOException; + + /** + * Event notifying start of a delta referencing its base by ObjectId. + * + * @param deltaStreamPosition + * position of this object in the incoming stream. + * @param baseId + * name of the base object. This object may be later in the + * stream, or might not appear at all in the stream (in the case + * of a thin-pack). + * @param inflatedSize + * size of the delta when fully inflated. The size stored within + * the pack may be larger or smaller, and is not yet known. + * @throws IOException + * the object cannot be recorded. + */ + protected abstract void onBeginRefDelta(long deltaStreamPosition, + AnyObjectId baseId, long inflatedSize) throws IOException; + + /** + * Event notifying the the current object. + * + *@return object information that must be populated with at least the + * offset. + * @throws IOException + * the object cannot be recorded. + */ + protected UnresolvedDelta onEndDelta() throws IOException { + return new UnresolvedDelta(); + } + + /** Type and size information about an object in the database buffer. */ + public static class ObjectTypeAndSize { + /** The type of the object. */ + public int type; + + /** The inflated size of the object. */ + public long size; + } + + private void inflateAndSkip(final Source src, final long inflatedSize) + throws IOException { + final InputStream inf = inflate(src, inflatedSize); + IO.skipFully(inf, inflatedSize); + inf.close(); + } + + private byte[] inflateAndReturn(final Source src, final long inflatedSize) + throws IOException { + final byte[] dst = new byte[(int) inflatedSize]; + final InputStream inf = inflate(src, inflatedSize); + IO.readFully(inf, dst, 0, dst.length); + inf.close(); + return dst; + } + + private InputStream inflate(final Source src, final long inflatedSize) + throws IOException { + inflater.open(src, inflatedSize); + return inflater; + } + + private static class DeltaChain extends ObjectId { + UnresolvedDelta head; + + DeltaChain(final AnyObjectId id) { + super(id); + } + + UnresolvedDelta remove() { + final UnresolvedDelta r = head; + if (r != null) + head = null; + return r; + } + + void add(final UnresolvedDelta d) { + d.next = head; + head = d; + } + } + + /** Information about an unresolved delta in this pack stream. */ + public static class UnresolvedDelta { + long position; + + int crc; + + UnresolvedDelta next; + + /** @return offset within the input stream. */ + public long getOffset() { + return position; + } + + /** @return the CRC-32 checksum of the stored delta data. */ + public int getCRC() { + return crc; + } + + /** + * @param crc32 + * the CRC-32 checksum of the stored delta data. + */ + public void setCRC(int crc32) { + crc = crc32; + } + } + + private static class DeltaVisit { + final UnresolvedDelta delta; + + ObjectId id; + + byte[] data; + + DeltaVisit parent; + + UnresolvedDelta nextChild; + + DeltaVisit() { + this.delta = null; // At the root of the stack we have a base. + } + + DeltaVisit(DeltaVisit parent) { + this.parent = parent; + this.delta = parent.nextChild; + parent.nextChild = delta.next; + } + + DeltaVisit next() { + // If our parent has no more children, discard it. + if (parent != null && parent.nextChild == null) { + parent.data = null; + parent = parent.parent; + } + + if (nextChild != null) + return new DeltaVisit(this); + + // If we have no child ourselves, our parent must (if it exists), + // due to the discard rule above. With no parent, we are done. + if (parent != null) + return new DeltaVisit(parent); + return null; + } + } + + private void addObjectAndTrack(PackedObjectInfo oe) { + entries[entryCount++] = oe; + if (needNewObjectIds()) + newObjectIds.add(oe); + } + + private class InflaterStream extends InputStream { + private final Inflater inf; + + private final byte[] skipBuffer; + + private Source src; + + private long expectedSize; + + private long actualSize; + + private int p; + + InflaterStream() { + inf = InflaterCache.get(); + skipBuffer = new byte[512]; + } + + void release() { + inf.reset(); + InflaterCache.release(inf); + } + + void open(Source source, long inflatedSize) throws IOException { + src = source; + expectedSize = inflatedSize; + actualSize = 0; + + p = fill(src, 1); + inf.setInput(buf, p, bAvail); + } + + @Override + public long skip(long toSkip) throws IOException { + long n = 0; + while (n < toSkip) { + final int cnt = (int) Math.min(skipBuffer.length, toSkip - n); + final int r = read(skipBuffer, 0, cnt); + if (r <= 0) + break; + n += r; + } + return n; + } + + @Override + public int read() throws IOException { + int n = read(skipBuffer, 0, 1); + return n == 1 ? skipBuffer[0] & 0xff : -1; + } + + @Override + public int read(byte[] dst, int pos, int cnt) throws IOException { + try { + int n = 0; + while (n < cnt) { + int r = inf.inflate(dst, pos + n, cnt - n); + if (r == 0) { + if (inf.finished()) + break; + if (inf.needsInput()) { + onObjectData(src, buf, p, bAvail); + use(bAvail); + + p = fill(src, 1); + inf.setInput(buf, p, bAvail); + } else { + throw new CorruptObjectException( + MessageFormat + .format( + JGitText.get().packfileCorruptionDetected, + JGitText.get().unknownZlibError)); + } + } else { + n += r; + } + } + actualSize += n; + return 0 < n ? n : -1; + } catch (DataFormatException dfe) { + throw new CorruptObjectException(MessageFormat.format(JGitText + .get().packfileCorruptionDetected, dfe.getMessage())); + } + } + + @Override + public void close() throws IOException { + // We need to read here to enter the loop above and pump the + // trailing checksum into the Inflater. It should return -1 as the + // caller was supposed to consume all content. + // + if (read(skipBuffer) != -1 || actualSize != expectedSize) { + throw new CorruptObjectException(MessageFormat.format(JGitText + .get().packfileCorruptionDetected, + JGitText.get().wrongDecompressedLength)); + } + + int used = bAvail - inf.getRemaining(); + if (0 < used) { + onObjectData(src, buf, p, used); + use(used); + } + + inf.reset(); + } + } +} diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/ReceivePack.java b/org.eclipse.jgit/src/org/eclipse/jgit/transport/ReceivePack.java index 0b98f6c4a..8c50604f1 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/transport/ReceivePack.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/transport/ReceivePack.java @@ -70,15 +70,16 @@ import org.eclipse.jgit.errors.PackProtocolException; import org.eclipse.jgit.errors.UnpackException; import org.eclipse.jgit.lib.Config; +import org.eclipse.jgit.lib.Config.SectionParser; import org.eclipse.jgit.lib.Constants; import org.eclipse.jgit.lib.NullProgressMonitor; import org.eclipse.jgit.lib.ObjectId; import org.eclipse.jgit.lib.ObjectIdSubclassMap; +import org.eclipse.jgit.lib.ObjectInserter; import org.eclipse.jgit.lib.PersonIdent; import org.eclipse.jgit.lib.Ref; import org.eclipse.jgit.lib.RefUpdate; import org.eclipse.jgit.lib.Repository; -import org.eclipse.jgit.lib.Config.SectionParser; import org.eclipse.jgit.revwalk.ObjectWalk; import org.eclipse.jgit.revwalk.RevBlob; import org.eclipse.jgit.revwalk.RevCommit; @@ -161,7 +162,7 @@ public class ReceivePack { private Writer msgs; - private IndexPack ip; + private PackParser parser; /** The refs we advertised as existing at the start of the connection. */ private Map refs; @@ -630,7 +631,7 @@ private void service() throws IOException { receivePack(); if (needCheckConnectivity()) checkConnectivity(); - ip = null; + parser = null; unpackError = null; } catch (IOException err) { unpackError = err; @@ -779,17 +780,23 @@ private void receivePack() throws IOException { if (timeoutIn != null) timeoutIn.setTimeout(10 * timeout * 1000); - ip = IndexPack.create(db, rawIn); - ip.setFixThin(true); - ip.setNeedNewObjectIds(checkReferencedIsReachable); - ip.setNeedBaseObjectIds(checkReferencedIsReachable); - ip.setObjectChecking(isCheckReceivedObjects()); - ip.index(NullProgressMonitor.INSTANCE); + ObjectInserter ins = db.newObjectInserter(); + try { + String lockMsg = "jgit receive-pack"; + if (getRefLogIdent() != null) + lockMsg += " from " + getRefLogIdent().toExternalString(); - String lockMsg = "jgit receive-pack"; - if (getRefLogIdent() != null) - lockMsg += " from " + getRefLogIdent().toExternalString(); - packLock = ip.renameAndOpenPack(lockMsg); + parser = ins.newPackParser(rawIn); + parser.setAllowThin(true); + parser.setNeedNewObjectIds(checkReferencedIsReachable); + parser.setNeedBaseObjectIds(checkReferencedIsReachable); + parser.setObjectChecking(isCheckReceivedObjects()); + parser.setLockMessage(lockMsg); + packLock = parser.parse(NullProgressMonitor.INSTANCE); + ins.flush(); + } finally { + ins.release(); + } if (timeoutIn != null) timeoutIn.setTimeout(timeout * 1000); @@ -805,10 +812,10 @@ private void checkConnectivity() throws IOException { ObjectIdSubclassMap providedObjects = null; if (checkReferencedIsReachable) { - baseObjects = ip.getBaseObjectIds(); - providedObjects = ip.getNewObjectIds(); + baseObjects = parser.getBaseObjectIds(); + providedObjects = parser.getNewObjectIds(); } - ip = null; + parser = null; final ObjectWalk ow = new ObjectWalk(db); ow.setRetainBody(false); diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/WalkFetchConnection.java b/org.eclipse.jgit/src/org/eclipse/jgit/transport/WalkFetchConnection.java index e789e6dcb..67637f4e6 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/transport/WalkFetchConnection.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/transport/WalkFetchConnection.java @@ -856,17 +856,16 @@ else if (tmpIdx.isFile()) { } void downloadPack(final ProgressMonitor monitor) throws IOException { - final WalkRemoteObjectDatabase.FileStream s; - final IndexPack ip; - - s = connection.open("pack/" + packName); - ip = IndexPack.create(local, s.in); - ip.setFixThin(false); - ip.setObjectChecker(objCheck); - ip.index(monitor); - final PackLock keep = ip.renameAndOpenPack(lockMessage); - if (keep != null) - packLocks.add(keep); + String name = "pack/" + packName; + WalkRemoteObjectDatabase.FileStream s = connection.open(name); + PackParser parser = inserter.newPackParser(s.in); + parser.setAllowThin(false); + parser.setObjectChecker(objCheck); + parser.setLockMessage(lockMessage); + PackLock lock = parser.parse(monitor); + if (lock != null) + packLocks.add(lock); + inserter.flush(); } } }