Use limited getCachedBytes code to reduce duplication

Rather than duplicating this block everywhere, reuse the limited size
form of getCachedBytes to acquire the content of an object.

Change-Id: I2e26a823e6fd0964d8f8dbfaa0fc2e8834c179c1
Signed-off-by: Shawn O. Pearce <spearce@spearce.org>
Signed-off-by: Chris Aniszczyk <caniszczyk@gmail.com>
This commit is contained in:
Shawn O. Pearce 2010-08-24 14:01:55 -07:00 committed by Chris Aniszczyk
parent 2292655e9e
commit c11711f98e
4 changed files with 12 additions and 83 deletions

View File

@ -50,13 +50,11 @@
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.List;
import org.eclipse.jgit.JGitText;
import org.eclipse.jgit.errors.CorruptObjectException;
import org.eclipse.jgit.errors.LargeObjectException;
import org.eclipse.jgit.errors.MissingObjectException;
import org.eclipse.jgit.lib.AbbreviatedObjectId;
import org.eclipse.jgit.lib.Constants;
@ -68,7 +66,6 @@
import org.eclipse.jgit.patch.FileHeader;
import org.eclipse.jgit.patch.HunkHeader;
import org.eclipse.jgit.patch.FileHeader.PatchType;
import org.eclipse.jgit.util.IO;
import org.eclipse.jgit.util.QuotedString;
import org.eclipse.jgit.util.io.DisabledOutputStream;
@ -88,7 +85,7 @@ public class DiffFormatter {
private RawText.Factory rawTextFactory = RawText.FACTORY;
private long bigFileThreshold = 50 * 1024 * 1024;
private int bigFileThreshold = 50 * 1024 * 1024;
/**
* Create a new formatter with a default level of context.
@ -176,7 +173,7 @@ public void setRawTextFactory(RawText.Factory type) {
* @param bigFileThreshold
* the limit, in bytes.
*/
public void setBigFileThreshold(long bigFileThreshold) {
public void setBigFileThreshold(int bigFileThreshold) {
this.bigFileThreshold = bigFileThreshold;
}
@ -358,34 +355,8 @@ private byte[] open(FileMode mode, AbbreviatedObjectId id)
if (db == null)
throw new IllegalStateException(JGitText.get().repositoryIsRequired);
if (id.isComplete()) {
ObjectLoader ldr = db.open(id.toObjectId());
if (!ldr.isLarge())
return ldr.getCachedBytes();
long sz = ldr.getSize();
if (sz < bigFileThreshold && sz < Integer.MAX_VALUE) {
byte[] buf;
try {
buf = new byte[(int) sz];
} catch (OutOfMemoryError noMemory) {
LargeObjectException e;
e = new LargeObjectException(id.toObjectId());
e.initCause(noMemory);
throw e;
}
InputStream in = ldr.openStream();
try {
IO.readFully(in, buf, 0, buf.length);
} finally {
in.close();
}
return buf;
}
}
return new byte[] {};
ObjectLoader ldr = db.open(id.toObjectId());
return ldr.getCachedBytes(bigFileThreshold);
}
/**

View File

@ -57,7 +57,6 @@
import org.eclipse.jgit.revwalk.RevTree;
import org.eclipse.jgit.revwalk.RevWalk;
import org.eclipse.jgit.treewalk.TreeWalk;
import org.eclipse.jgit.util.IO;
import org.eclipse.jgit.util.RawParseUtils;
/**
@ -117,17 +116,7 @@ private static byte[] read(ObjectReader or, AnyObjectId blobId)
throws MissingObjectException, IncorrectObjectTypeException,
IOException {
ObjectLoader loader = or.open(blobId, Constants.OBJ_BLOB);
if (loader.isLarge()) {
ObjectStream in = loader.openStream();
try {
byte[] buf = new byte[(int) in.getSize()];
IO.readFully(in, buf, 0, buf.length);
return buf;
} finally {
in.close();
}
}
return loader.getCachedBytes();
return loader.getCachedBytes(Integer.MAX_VALUE);
}
/**

View File

@ -105,9 +105,9 @@ public class PackConfig {
/**
* Default big file threshold: {@value}
*
* @see #setBigFileThreshold(long)
* @see #setBigFileThreshold(int)
*/
public static final long DEFAULT_BIG_FILE_THRESHOLD = 50 * 1024 * 1024;
public static final int DEFAULT_BIG_FILE_THRESHOLD = 50 * 1024 * 1024;
/**
* Default delta cache size: {@value}
@ -151,7 +151,7 @@ public class PackConfig {
private int deltaCacheLimit = DEFAULT_DELTA_CACHE_LIMIT;
private long bigFileThreshold = DEFAULT_BIG_FILE_THRESHOLD;
private int bigFileThreshold = DEFAULT_BIG_FILE_THRESHOLD;
private int threads;
@ -470,7 +470,7 @@ public void setDeltaCacheLimit(int size) {
*
* @return the configured big file threshold.
*/
public long getBigFileThreshold() {
public int getBigFileThreshold() {
return bigFileThreshold;
}
@ -482,7 +482,7 @@ public long getBigFileThreshold() {
* @param bigFileThreshold
* the limit, in bytes.
*/
public void setBigFileThreshold(long bigFileThreshold) {
public void setBigFileThreshold(int bigFileThreshold) {
this.bigFileThreshold = bigFileThreshold;
}
@ -609,7 +609,7 @@ public void fromConfig(final Config rc) {
setCompressionLevel(rc.getInt("pack", "compression",
rc.getInt("core", "compression", getCompressionLevel())));
setIndexVersion(rc.getInt("pack", "indexversion", getIndexVersion()));
setBigFileThreshold(rc.getLong("core", "bigfilethreshold", getBigFileThreshold()));
setBigFileThreshold(rc.getInt("core", "bigfilethreshold", getBigFileThreshold()));
setThreads(rc.getInt("pack", "threads", getThreads()));
// These variables aren't standardized

View File

@ -48,7 +48,6 @@
import static org.eclipse.jgit.storage.pack.StoredObjectRepresentation.PACK_WHOLE;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.security.MessageDigest;
import java.util.ArrayList;
@ -93,7 +92,6 @@
import org.eclipse.jgit.revwalk.RevObject;
import org.eclipse.jgit.revwalk.RevSort;
import org.eclipse.jgit.storage.file.PackIndexWriter;
import org.eclipse.jgit.util.IO;
import org.eclipse.jgit.util.TemporaryBuffer;
/**
@ -957,41 +955,12 @@ private byte[] buffer(AnyObjectId objId) throws IOException {
static byte[] buffer(PackConfig config, ObjectReader or, AnyObjectId objId)
throws IOException {
ObjectLoader ldr = or.open(objId);
if (!ldr.isLarge())
return ldr.getCachedBytes();
// PackWriter should have already pruned objects that
// are above the big file threshold, so our chances of
// the object being below it are very good. We really
// shouldn't be here, unless the implementation is odd.
// If it really is too big to work with, abort out now.
//
long sz = ldr.getSize();
if (config.getBigFileThreshold() <= sz || Integer.MAX_VALUE < sz)
throw new LargeObjectException(objId.copy());
// Its considered to be large by the loader, but we really
// want it in byte array format. Try to make it happen.
//
byte[] buf;
try {
buf = new byte[(int) sz];
} catch (OutOfMemoryError noMemory) {
LargeObjectException e;
e = new LargeObjectException(objId.copy());
e.initCause(noMemory);
throw e;
}
InputStream in = ldr.openStream();
try {
IO.readFully(in, buf, 0, buf.length);
} finally {
in.close();
}
return buf;
return or.open(objId).getCachedBytes(config.getBigFileThreshold());
}
private Deflater deflater() {