diff --git a/org.eclipse.jgit.junit/src/org/eclipse/jgit/junit/RepositoryTestCase.java b/org.eclipse.jgit.junit/src/org/eclipse/jgit/junit/RepositoryTestCase.java index 95fe18b83..987f923b0 100644 --- a/org.eclipse.jgit.junit/src/org/eclipse/jgit/junit/RepositoryTestCase.java +++ b/org.eclipse.jgit.junit/src/org/eclipse/jgit/junit/RepositoryTestCase.java @@ -349,7 +349,8 @@ public static String slashify(String str) { * younger modification timestamp than the modification timestamp of the * given file. This is done by touching a temporary file, reading the * lastmodified attribute and, if needed, sleeping. After sleeping this loop - * starts again until the filesystem timer has advanced enough. + * starts again until the filesystem timer has advanced enough. The + * temporary file will be created as a sibling of lastFile. * * @param lastFile * the file on which we want to wait until the filesystem timer @@ -362,21 +363,26 @@ public static String slashify(String str) { */ public static long fsTick(File lastFile) throws InterruptedException, IOException { - long sleepTime = 64; + File tmp; FS fs = FS.DETECTED; - if (lastFile != null && !fs.exists(lastFile)) - throw new FileNotFoundException(lastFile.getPath()); - File tmp = File.createTempFile("FileTreeIteratorWithTimeControl", null); + if (lastFile == null) { + lastFile = tmp = File + .createTempFile("fsTickTmpFile", null); + } else { + if (!fs.exists(lastFile)) { + throw new FileNotFoundException(lastFile.getPath()); + } + tmp = File.createTempFile("fsTickTmpFile", null, + lastFile.getParentFile()); + } + long res = FS.getFsTimerResolution(tmp.toPath()).toMillis(); + long sleepTime = res / 10; try { - long startTime = (lastFile == null) ? fs.lastModified(tmp) : fs - .lastModified(lastFile); + long startTime = fs.lastModified(lastFile); long actTime = fs.lastModified(tmp); while (actTime <= startTime) { Thread.sleep(sleepTime); - sleepTime *= 2; - try (FileOutputStream fos = new FileOutputStream(tmp)) { - // Do nothing - } + FileUtils.touch(tmp.toPath()); actTime = fs.lastModified(tmp); } return actTime; diff --git a/org.eclipse.jgit.lfs.server.test/BUILD b/org.eclipse.jgit.lfs.server.test/BUILD index 1341dd601..fb0d6918f 100644 --- a/org.eclipse.jgit.lfs.server.test/BUILD +++ b/org.eclipse.jgit.lfs.server.test/BUILD @@ -32,7 +32,7 @@ junit_tests( exclude = TEST_BASE, ), jvm_flags = [ - "-Xmx256m", + "-Xmx512m", "-Dfile.encoding=UTF-8", ], tags = ["lfs-server"], diff --git a/org.eclipse.jgit.pgm.test/BUILD b/org.eclipse.jgit.pgm.test/BUILD index 5bedf9ade..f32fa12de 100644 --- a/org.eclipse.jgit.pgm.test/BUILD +++ b/org.eclipse.jgit.pgm.test/BUILD @@ -7,7 +7,7 @@ junit_tests( name = "pgm", srcs = glob(["tst/**/*.java"]), jvm_flags = [ - "-Xmx256m", + "-Xmx512m", "-Dfile.encoding=UTF-8", ], tags = ["pgm"], diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/FileSnapshotTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/FileSnapshotTest.java index 9ceaa345d..5ebdeb6e8 100644 --- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/FileSnapshotTest.java +++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/FileSnapshotTest.java @@ -42,16 +42,22 @@ */ package org.eclipse.jgit.internal.storage.file; +import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.StandardCopyOption; +import java.nio.file.attribute.FileTime; import java.util.ArrayList; import java.util.List; import org.eclipse.jgit.util.FileUtils; +import org.eclipse.jgit.util.SystemReader; import org.junit.After; +import org.junit.Assume; import org.junit.Before; import org.junit.Test; @@ -121,12 +127,59 @@ public void testNewFileWithWait() throws Exception { @Test public void testNewFileNoWait() throws Exception { File f1 = createFile("newfile"); - waitNextSec(f1); FileSnapshot save = FileSnapshot.save(f1); - Thread.sleep(1500); assertTrue(save.isModified(f1)); } + /** + * Simulate packfile replacement in same file which may occur if set of + * objects in the pack is the same but pack config was different. On Posix + * filesystems this should change the inode (filekey in java.nio + * terminology). + * + * @throws Exception + */ + @Test + public void testSimulatePackfileReplacement() throws Exception { + Assume.assumeFalse(SystemReader.getInstance().isWindows()); + File f1 = createFile("file"); // inode y + File f2 = createFile("fool"); // Guarantees new inode x + // wait on f2 since this method resets lastModified of the file + // and leaves lastModified of f1 untouched + waitNextSec(f2); + waitNextSec(f2); + FileTime timestamp = Files.getLastModifiedTime(f1.toPath()); + FileSnapshot save = FileSnapshot.save(f1); + Files.move(f2.toPath(), f1.toPath(), // Now "file" is inode x + StandardCopyOption.REPLACE_EXISTING, + StandardCopyOption.ATOMIC_MOVE); + Files.setLastModifiedTime(f1.toPath(), timestamp); + assertTrue(save.isModified(f1)); + assertTrue("unexpected change of fileKey", save.wasFileKeyChanged()); + assertFalse("unexpected size change", save.wasSizeChanged()); + assertFalse("unexpected lastModified change", + save.wasLastModifiedChanged()); + assertFalse("lastModified was unexpectedly racily clean", + save.wasLastModifiedRacilyClean()); + } + + /** + * Append a character to a file to change its size and set original + * lastModified + * + * @throws Exception + */ + @Test + public void testFileSizeChanged() throws Exception { + File f = createFile("file"); + FileTime timestamp = Files.getLastModifiedTime(f.toPath()); + FileSnapshot save = FileSnapshot.save(f); + append(f, (byte) 'x'); + Files.setLastModifiedTime(f.toPath(), timestamp); + assertTrue(save.isModified(f)); + assertTrue(save.wasSizeChanged()); + } + private File createFile(String string) throws IOException { trash.mkdirs(); File f = File.createTempFile(string, "tdat", trash); diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/PackFileSnapshotTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/PackFileSnapshotTest.java new file mode 100644 index 000000000..a1433e9fe --- /dev/null +++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/PackFileSnapshotTest.java @@ -0,0 +1,354 @@ +/* + * Copyright (C) 2019, Matthias Sohn + * and other copyright owners as documented in the project's IP log. + * + * This program and the accompanying materials are made available + * under the terms of the Eclipse Distribution License v1.0 which + * accompanies this distribution, is reproduced below, and is + * available at http://www.eclipse.org/org/documents/edl-v10.php + * + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or + * without modification, are permitted provided that the following + * conditions are met: + * + * - Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * + * - Redistributions in binary form must reproduce the above + * copyright notice, this list of conditions and the following + * disclaimer in the documentation and/or other materials provided + * with the distribution. + * + * - Neither the name of the Eclipse Foundation, Inc. nor the + * names of its contributors may be used to endorse or promote + * products derived from this software without specific prior + * written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND + * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, + * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES + * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR + * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT + * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; + * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, + * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF + * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +package org.eclipse.jgit.internal.storage.file; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; +import static org.junit.Assume.assumeFalse; +import static org.junit.Assume.assumeTrue; + +import java.io.File; +import java.io.IOException; +import java.io.OutputStream; +import java.io.Writer; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.nio.file.StandardCopyOption; +import java.nio.file.StandardOpenOption; +//import java.nio.file.attribute.BasicFileAttributes; +import java.text.ParseException; +import java.util.Collection; +import java.util.Iterator; +import java.util.Random; +import java.util.zip.Deflater; + +import org.eclipse.jgit.api.GarbageCollectCommand; +import org.eclipse.jgit.api.Git; +import org.eclipse.jgit.api.errors.AbortedByHookException; +import org.eclipse.jgit.api.errors.ConcurrentRefUpdateException; +import org.eclipse.jgit.api.errors.GitAPIException; +import org.eclipse.jgit.api.errors.NoFilepatternException; +import org.eclipse.jgit.api.errors.NoHeadException; +import org.eclipse.jgit.api.errors.NoMessageException; +import org.eclipse.jgit.api.errors.UnmergedPathsException; +import org.eclipse.jgit.api.errors.WrongRepositoryStateException; +import org.eclipse.jgit.junit.RepositoryTestCase; +import org.eclipse.jgit.lib.AnyObjectId; +import org.eclipse.jgit.lib.ConfigConstants; +import org.eclipse.jgit.lib.ObjectId; +import org.eclipse.jgit.storage.file.FileBasedConfig; +import org.eclipse.jgit.storage.pack.PackConfig; +import org.junit.Test; + +public class PackFileSnapshotTest extends RepositoryTestCase { + + private static ObjectId unknownID = ObjectId + .fromString("1234567890123456789012345678901234567890"); + + @Test + public void testSamePackDifferentCompressionDetectChecksumChanged() + throws Exception { + Git git = Git.wrap(db); + File f = writeTrashFile("file", "foobar "); + for (int i = 0; i < 10; i++) { + appendRandomLine(f); + git.add().addFilepattern("file").call(); + git.commit().setMessage("message" + i).call(); + } + + FileBasedConfig c = db.getConfig(); + c.setInt(ConfigConstants.CONFIG_GC_SECTION, null, + ConfigConstants.CONFIG_KEY_AUTOPACKLIMIT, 1); + c.save(); + Collection packs = gc(Deflater.NO_COMPRESSION); + assertEquals("expected 1 packfile after gc", 1, packs.size()); + PackFile p1 = packs.iterator().next(); + PackFileSnapshot snapshot = p1.getFileSnapshot(); + + packs = gc(Deflater.BEST_COMPRESSION); + assertEquals("expected 1 packfile after gc", 1, packs.size()); + PackFile p2 = packs.iterator().next(); + File pf = p2.getPackFile(); + + // changing compression level with aggressive gc may change size, + // fileKey (on *nix) and checksum. Hence FileSnapshot.isModified can + // return true already based on size or fileKey. + // So the only thing we can test here is that we ensure that checksum + // also changed when we read it here in this test + assertTrue("expected snapshot to detect modified pack", + snapshot.isModified(pf)); + assertTrue("expected checksum changed", snapshot.isChecksumChanged(pf)); + } + + private void appendRandomLine(File f, int length, Random r) + throws IOException { + try (Writer w = Files.newBufferedWriter(f.toPath(), + StandardOpenOption.APPEND)) { + appendRandomLine(w, length, r); + } + } + + private void appendRandomLine(File f) throws IOException { + appendRandomLine(f, 5, new Random()); + } + + private void appendRandomLine(Writer w, int len, Random r) + throws IOException { + final int c1 = 32; // ' ' + int c2 = 126; // '~' + for (int i = 0; i < len; i++) { + w.append((char) (c1 + r.nextInt(1 + c2 - c1))); + } + } + + private ObjectId createTestRepo(int testDataSeed, int testDataLength) + throws IOException, GitAPIException, NoFilepatternException, + NoHeadException, NoMessageException, UnmergedPathsException, + ConcurrentRefUpdateException, WrongRepositoryStateException, + AbortedByHookException { + // Create a repo with two commits and one file. Each commit adds + // testDataLength number of bytes. Data are random bytes. Since the + // seed for the random number generator is specified we will get + // the same set of bytes for every run and for every platform + Random r = new Random(testDataSeed); + Git git = Git.wrap(db); + File f = writeTrashFile("file", "foobar "); + appendRandomLine(f, testDataLength, r); + git.add().addFilepattern("file").call(); + git.commit().setMessage("message1").call(); + appendRandomLine(f, testDataLength, r); + git.add().addFilepattern("file").call(); + return git.commit().setMessage("message2").call().getId(); + } + + // Try repacking so fast that you get two new packs which differ only in + // content/chksum but have same name, size and lastmodified. + // Since this is done with standard gc (which creates new tmp files and + // renames them) the filekeys of the new packfiles differ helping jgit + // to detect the fast modification + @Test + public void testDetectModificationAlthoughSameSizeAndModificationtime() + throws Exception { + int testDataSeed = 1; + int testDataLength = 100; + FileBasedConfig config = db.getConfig(); + // don't use mtime of the parent folder to detect pack file + // modification. + config.setBoolean(ConfigConstants.CONFIG_CORE_SECTION, null, + ConfigConstants.CONFIG_KEY_TRUSTFOLDERSTAT, false); + config.save(); + + createTestRepo(testDataSeed, testDataLength); + + // repack to create initial packfile + PackFile pf = repackAndCheck(5, null, null, null); + Path packFilePath = pf.getPackFile().toPath(); + AnyObjectId chk1 = pf.getPackChecksum(); + String name = pf.getPackName(); + Long length = Long.valueOf(pf.getPackFile().length()); + long m1 = packFilePath.toFile().lastModified(); + + // Wait for a filesystem timer tick to enhance probability the rest of + // this test is done before the filesystem timer ticks again. + fsTick(packFilePath.toFile()); + + // Repack to create packfile with same name, length. Lastmodified and + // content and checksum are different since compression level differs + AnyObjectId chk2 = repackAndCheck(6, name, length, chk1) + .getPackChecksum(); + long m2 = packFilePath.toFile().lastModified(); + assumeFalse(m2 == m1); + + // Repack to create packfile with same name, length. Lastmodified is + // equal to the previous one because we are in the same filesystem timer + // slot. Content and its checksum are different + AnyObjectId chk3 = repackAndCheck(7, name, length, chk2) + .getPackChecksum(); + long m3 = packFilePath.toFile().lastModified(); + + // ask for an unknown git object to force jgit to rescan the list of + // available packs. If we would ask for a known objectid then JGit would + // skip searching for new/modified packfiles + db.getObjectDatabase().has(unknownID); + assertEquals(chk3, getSinglePack(db.getObjectDatabase().getPacks()) + .getPackChecksum()); + assumeTrue(m3 == m2); + } + + // Try repacking so fast that we get two new packs which differ only in + // content and checksum but have same name, size and lastmodified. + // To avoid that JGit detects modification by checking the filekey create + // two new packfiles upfront and create copies of them. Then modify the + // packfiles in-place by opening them for write and then copying the + // content. + @Test + public void testDetectModificationAlthoughSameSizeAndModificationtimeAndFileKey() + throws Exception { + int testDataSeed = 1; + int testDataLength = 100; + FileBasedConfig config = db.getConfig(); + config.setBoolean(ConfigConstants.CONFIG_CORE_SECTION, null, + ConfigConstants.CONFIG_KEY_TRUSTFOLDERSTAT, false); + config.save(); + + createTestRepo(testDataSeed, testDataLength); + + // Repack to create initial packfile. Make a copy of it + PackFile pf = repackAndCheck(5, null, null, null); + Path packFilePath = pf.getPackFile().toPath(); + Path packFileBasePath = packFilePath.resolveSibling( + packFilePath.getFileName().toString().replaceAll(".pack", "")); + AnyObjectId chk1 = pf.getPackChecksum(); + String name = pf.getPackName(); + Long length = Long.valueOf(pf.getPackFile().length()); + copyPack(packFileBasePath, "", ".copy1"); + + // Repack to create second packfile. Make a copy of it + AnyObjectId chk2 = repackAndCheck(6, name, length, chk1) + .getPackChecksum(); + copyPack(packFileBasePath, "", ".copy2"); + + // Repack to create third packfile + AnyObjectId chk3 = repackAndCheck(7, name, length, chk2) + .getPackChecksum(); + long m3 = packFilePath.toFile().lastModified(); + db.getObjectDatabase().has(unknownID); + assertEquals(chk3, getSinglePack(db.getObjectDatabase().getPacks()) + .getPackChecksum()); + + // Wait for a filesystem timer tick to enhance probability the rest of + // this test is done before the filesystem timer ticks. + fsTick(packFilePath.toFile()); + + // Copy copy2 to packfile data to force modification of packfile without + // changing the packfile's filekey. + copyPack(packFileBasePath, ".copy2", ""); + long m2 = packFilePath.toFile().lastModified(); + assumeFalse(m3 == m2); + + db.getObjectDatabase().has(unknownID); + assertEquals(chk2, getSinglePack(db.getObjectDatabase().getPacks()) + .getPackChecksum()); + + // Copy copy2 to packfile data to force modification of packfile without + // changing the packfile's filekey. + copyPack(packFileBasePath, ".copy1", ""); + long m1 = packFilePath.toFile().lastModified(); + assumeTrue(m2 == m1); + db.getObjectDatabase().has(unknownID); + assertEquals(chk1, getSinglePack(db.getObjectDatabase().getPacks()) + .getPackChecksum()); + } + + // Copy file from src to dst but avoid creating a new File (with new + // FileKey) if dst already exists + private Path copyFile(Path src, Path dst) throws IOException { + if (Files.exists(dst)) { + dst.toFile().setWritable(true); + try (OutputStream dstOut = Files.newOutputStream(dst)) { + Files.copy(src, dstOut); + return dst; + } + } else { + return Files.copy(src, dst, StandardCopyOption.REPLACE_EXISTING); + } + } + + private Path copyPack(Path base, String srcSuffix, String dstSuffix) + throws IOException { + copyFile(Paths.get(base + ".idx" + srcSuffix), + Paths.get(base + ".idx" + dstSuffix)); + copyFile(Paths.get(base + ".bitmap" + srcSuffix), + Paths.get(base + ".bitmap" + dstSuffix)); + return copyFile(Paths.get(base + ".pack" + srcSuffix), + Paths.get(base + ".pack" + dstSuffix)); + } + + private PackFile repackAndCheck(int compressionLevel, String oldName, + Long oldLength, AnyObjectId oldChkSum) + throws IOException, ParseException { + PackFile p = getSinglePack(gc(compressionLevel)); + File pf = p.getPackFile(); + // The following two assumptions should not cause the test to fail. If + // on a certain platform we get packfiles (containing the same git + // objects) where the lengths differ or the checksums don't differ we + // just skip this test. A reason for that could be that compression + // works differently or random number generator works differently. Then + // we have to search for more consistent test data or checkin these + // packfiles as test resources + assumeTrue(oldLength == null || pf.length() == oldLength.longValue()); + assumeTrue(oldChkSum == null || !p.getPackChecksum().equals(oldChkSum)); + assertTrue(oldName == null || p.getPackName().equals(oldName)); + return p; + } + + private PackFile getSinglePack(Collection packs) { + Iterator pIt = packs.iterator(); + PackFile p = pIt.next(); + assertFalse(pIt.hasNext()); + return p; + } + + private Collection gc(int compressionLevel) + throws IOException, ParseException { + GC gc = new GC(db); + PackConfig pc = new PackConfig(db.getConfig()); + pc.setCompressionLevel(compressionLevel); + + pc.setSinglePack(true); + + // --aggressive + pc.setDeltaSearchWindowSize( + GarbageCollectCommand.DEFAULT_GC_AGGRESSIVE_WINDOW); + pc.setMaxDeltaDepth(GarbageCollectCommand.DEFAULT_GC_AGGRESSIVE_DEPTH); + pc.setReuseObjects(false); + + gc.setPackConfig(pc); + gc.setExpireAgeMillis(0); + gc.setPackExpireAgeMillis(0); + return gc.gc(); + } + +} diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/util/FSTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/util/FSTest.java index 2c8273d03..59c8e31c0 100644 --- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/util/FSTest.java +++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/util/FSTest.java @@ -52,9 +52,16 @@ import java.io.IOException; import java.nio.charset.Charset; import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.attribute.FileTime; import java.nio.file.attribute.PosixFileAttributeView; import java.nio.file.attribute.PosixFilePermission; +import java.time.Duration; +import java.time.ZoneId; +import java.time.format.DateTimeFormatter; +import java.util.Locale; import java.util.Set; +import java.util.concurrent.TimeUnit; import org.eclipse.jgit.errors.CommandFailedException; import org.eclipse.jgit.junit.RepositoryTestCase; @@ -186,4 +193,34 @@ public void testReadPipeCommandStartFailure() new String[] { "this-command-does-not-exist" }, Charset.defaultCharset().name()); } + + @Test + public void testFsTimestampResolution() throws Exception { + DateTimeFormatter formatter = DateTimeFormatter + .ofPattern("uuuu-MMM-dd HH:mm:ss.nnnnnnnnn", Locale.ENGLISH) + .withZone(ZoneId.systemDefault()); + Path dir = Files.createTempDirectory("probe-filesystem"); + Duration resolution = FS.getFsTimerResolution(dir); + long resolutionNs = resolution.toNanos(); + assertTrue(resolutionNs > 0); + for (int i = 0; i < 10; i++) { + Path f = null; + try { + f = dir.resolve("testTimestampResolution" + i); + Files.createFile(f); + FileUtils.touch(f); + FileTime t1 = Files.getLastModifiedTime(f); + TimeUnit.NANOSECONDS.sleep(resolutionNs); + FileUtils.touch(f); + FileTime t2 = Files.getLastModifiedTime(f); + assertTrue(String.format( + "expected t2=%s to be larger than t1=%s\nsince file timestamp resolution was measured to be %,d ns", + formatter.format(t2.toInstant()), + formatter.format(t1.toInstant()), + Long.valueOf(resolutionNs)), t2.compareTo(t1) > 0); + } finally { + Files.delete(f); + } + } + } } diff --git a/org.eclipse.jgit/.settings/.api_filters b/org.eclipse.jgit/.settings/.api_filters index 0e7a71b67..4131901d7 100644 --- a/org.eclipse.jgit/.settings/.api_filters +++ b/org.eclipse.jgit/.settings/.api_filters @@ -22,6 +22,62 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + @@ -43,5 +99,19 @@ + + + + + + + + + + + + + + diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/FileSnapshot.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/FileSnapshot.java index f26eba336..1de313500 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/FileSnapshot.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/FileSnapshot.java @@ -48,9 +48,13 @@ import java.nio.file.attribute.BasicFileAttributes; import java.text.DateFormat; import java.text.SimpleDateFormat; +import java.time.Duration; import java.util.Date; import java.util.Locale; +import java.util.Objects; +import java.util.concurrent.TimeUnit; +import org.eclipse.jgit.annotations.NonNull; import org.eclipse.jgit.util.FS; /** @@ -77,6 +81,8 @@ public class FileSnapshot { */ public static final long UNKNOWN_SIZE = -1; + private static final Object MISSING_FILEKEY = new Object(); + /** * A FileSnapshot that is considered to always be modified. *

@@ -84,7 +90,8 @@ public class FileSnapshot { * file, but only after {@link #isModified(File)} gets invoked. The returned * snapshot contains only invalid status information. */ - public static final FileSnapshot DIRTY = new FileSnapshot(-1, -1, UNKNOWN_SIZE); + public static final FileSnapshot DIRTY = new FileSnapshot(-1, -1, + UNKNOWN_SIZE, Duration.ZERO, MISSING_FILEKEY); /** * A FileSnapshot that is clean if the file does not exist. @@ -93,7 +100,8 @@ public class FileSnapshot { * file to be clean. {@link #isModified(File)} will return false if the file * path does not exist. */ - public static final FileSnapshot MISSING_FILE = new FileSnapshot(0, 0, 0) { + public static final FileSnapshot MISSING_FILE = new FileSnapshot(0, 0, 0, + Duration.ZERO, MISSING_FILEKEY) { @Override public boolean isModified(File path) { return FS.DETECTED.exists(path); @@ -111,18 +119,12 @@ public boolean isModified(File path) { * @return the snapshot. */ public static FileSnapshot save(File path) { - long read = System.currentTimeMillis(); - long modified; - long size; - try { - BasicFileAttributes fileAttributes = FS.DETECTED.fileAttributes(path); - modified = fileAttributes.lastModifiedTime().toMillis(); - size = fileAttributes.size(); - } catch (IOException e) { - modified = path.lastModified(); - size = path.length(); - } - return new FileSnapshot(read, modified, size); + return new FileSnapshot(path); + } + + private static Object getFileKey(BasicFileAttributes fileAttributes) { + Object fileKey = fileAttributes.fileKey(); + return fileKey == null ? MISSING_FILEKEY : fileKey; } /** @@ -130,6 +132,11 @@ public static FileSnapshot save(File path) { * already known. *

* This method should be invoked before the file is accessed. + *

+ * Note that this method cannot rely on measuring file timestamp resolution + * to avoid racy git issues caused by finite file timestamp resolution since + * it's unknown in which filesystem the file is located. Hence the worst + * case fallback for timestamp resolution is used. * * @param modified * the last modification time of the file @@ -137,7 +144,8 @@ public static FileSnapshot save(File path) { */ public static FileSnapshot save(long modified) { final long read = System.currentTimeMillis(); - return new FileSnapshot(read, modified, -1); + return new FileSnapshot(read, modified, -1, Duration.ZERO, + MISSING_FILEKEY); } /** Last observed modification time of the path. */ @@ -154,11 +162,57 @@ public static FileSnapshot save(long modified) { * When set to {@link #UNKNOWN_SIZE} the size is not considered for modification checks. */ private final long size; - private FileSnapshot(long read, long modified, long size) { + /** measured filesystem timestamp resolution */ + private Duration fsTimestampResolution; + + /** + * Object that uniquely identifies the given file, or {@code + * null} if a file key is not available + */ + private final Object fileKey; + + /** + * Record a snapshot for a specific file path. + *

+ * This method should be invoked before the file is accessed. + * + * @param path + * the path to later remember. The path's current status + * information is saved. + */ + protected FileSnapshot(File path) { + this.lastRead = System.currentTimeMillis(); + this.fsTimestampResolution = FS + .getFsTimerResolution(path.toPath().getParent()); + BasicFileAttributes fileAttributes = null; + try { + fileAttributes = FS.DETECTED.fileAttributes(path); + } catch (IOException e) { + this.lastModified = path.lastModified(); + this.size = path.length(); + this.fileKey = MISSING_FILEKEY; + return; + } + this.lastModified = fileAttributes.lastModifiedTime().toMillis(); + this.size = fileAttributes.size(); + this.fileKey = getFileKey(fileAttributes); + } + + private boolean sizeChanged; + + private boolean fileKeyChanged; + + private boolean lastModifiedChanged; + + private boolean wasRacyClean; + + private FileSnapshot(long read, long modified, long size, + @NonNull Duration fsTimestampResolution, @NonNull Object fileKey) { this.lastRead = read; this.lastModified = modified; - this.cannotBeRacilyClean = notRacyClean(read); + this.fsTimestampResolution = fsTimestampResolution; this.size = size; + this.fileKey = fileKey; } /** @@ -187,15 +241,30 @@ public long size() { public boolean isModified(File path) { long currLastModified; long currSize; + Object currFileKey; try { BasicFileAttributes fileAttributes = FS.DETECTED.fileAttributes(path); currLastModified = fileAttributes.lastModifiedTime().toMillis(); currSize = fileAttributes.size(); + currFileKey = getFileKey(fileAttributes); } catch (IOException e) { currLastModified = path.lastModified(); currSize = path.length(); + currFileKey = MISSING_FILEKEY; } - return (currSize != UNKNOWN_SIZE && currSize != size) || isModified(currLastModified); + sizeChanged = isSizeChanged(currSize); + if (sizeChanged) { + return true; + } + fileKeyChanged = isFileKeyChanged(currFileKey); + if (fileKeyChanged) { + return true; + } + lastModifiedChanged = isModified(currLastModified); + if (lastModifiedChanged) { + return true; + } + return false; } /** @@ -222,11 +291,25 @@ public boolean isModified(File path) { */ public void setClean(FileSnapshot other) { final long now = other.lastRead; - if (notRacyClean(now)) + if (!isRacyClean(now)) { cannotBeRacilyClean = true; + } lastRead = now; } + /** + * Wait until this snapshot's file can't be racy anymore + * + * @throws InterruptedException + * if sleep was interrupted + */ + public void waitUntilNotRacy() throws InterruptedException { + while (isRacyClean(System.currentTimeMillis())) { + TimeUnit.NANOSECONDS + .sleep((fsTimestampResolution.toNanos() + 1) * 11 / 10); + } + } + /** * Compare two snapshots to see if they cache the same information. * @@ -235,72 +318,120 @@ public void setClean(FileSnapshot other) { * @return true if the two snapshots share the same information. */ public boolean equals(FileSnapshot other) { - return lastModified == other.lastModified; + return lastModified == other.lastModified && size == other.size + && Objects.equals(fileKey, other.fileKey); } /** {@inheritDoc} */ @Override - public boolean equals(Object other) { - if (other instanceof FileSnapshot) - return equals((FileSnapshot) other); - return false; + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (!(obj instanceof FileSnapshot)) { + return false; + } + FileSnapshot other = (FileSnapshot) obj; + return equals(other); } /** {@inheritDoc} */ @Override public int hashCode() { - // This is pretty pointless, but override hashCode to ensure that - // x.hashCode() == y.hashCode() when x.equals(y) is true. - // - return (int) lastModified; + return Objects.hash(Long.valueOf(lastModified), Long.valueOf(size), + fileKey); + } + + /** + * @return {@code true} if FileSnapshot.isModified(File) found the file size + * changed + */ + boolean wasSizeChanged() { + return sizeChanged; + } + + /** + * @return {@code true} if FileSnapshot.isModified(File) found the file key + * changed + */ + boolean wasFileKeyChanged() { + return fileKeyChanged; + } + + /** + * @return {@code true} if FileSnapshot.isModified(File) found the file's + * lastModified changed + */ + boolean wasLastModifiedChanged() { + return lastModifiedChanged; + } + + /** + * @return {@code true} if FileSnapshot.isModified(File) detected that + * lastModified is racily clean + */ + boolean wasLastModifiedRacilyClean() { + return wasRacyClean; } /** {@inheritDoc} */ + @SuppressWarnings("nls") @Override public String toString() { - if (this == DIRTY) - return "DIRTY"; //$NON-NLS-1$ - if (this == MISSING_FILE) - return "MISSING_FILE"; //$NON-NLS-1$ - DateFormat f = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS", //$NON-NLS-1$ + if (this == DIRTY) { + return "DIRTY"; + } + if (this == MISSING_FILE) { + return "MISSING_FILE"; + } + DateFormat f = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS", Locale.US); - return "FileSnapshot[modified: " + f.format(new Date(lastModified)) //$NON-NLS-1$ - + ", read: " + f.format(new Date(lastRead)) + "]"; //$NON-NLS-1$ //$NON-NLS-2$ + return "FileSnapshot[modified: " + f.format(new Date(lastModified)) + + ", read: " + f.format(new Date(lastRead)) + ", size:" + size + + ", fileKey: " + fileKey + "]"; } - private boolean notRacyClean(long read) { - // The last modified time granularity of FAT filesystems is 2 seconds. - // Using 2.5 seconds here provides a reasonably high assurance that - // a modification was not missed. - // - return read - lastModified > 2500; + private boolean isRacyClean(long read) { + // add a 10% safety margin + long racyNanos = (fsTimestampResolution.toNanos() + 1) * 11 / 10; + return wasRacyClean = (read - lastModified) * 1_000_000 <= racyNanos; } private boolean isModified(long currLastModified) { // Any difference indicates the path was modified. - // - if (lastModified != currLastModified) + + lastModifiedChanged = lastModified != currLastModified; + if (lastModifiedChanged) { return true; + } // We have already determined the last read was far enough // after the last modification that any new modifications // are certain to change the last modified time. - // - if (cannotBeRacilyClean) + if (cannotBeRacilyClean) { return false; - - if (notRacyClean(lastRead)) { + } + if (!isRacyClean(lastRead)) { // Our last read should have marked cannotBeRacilyClean, // but this thread may not have seen the change. The read // of the volatile field lastRead should have fixed that. - // return false; } // We last read this path too close to its last observed // modification time. We may have missed a modification. // Scan again, to ensure we still see the same state. - // return true; } + + private boolean isFileKeyChanged(Object currFileKey) { + return currFileKey != MISSING_FILEKEY && !currFileKey.equals(fileKey); + } + + private boolean isSizeChanged(long currSize) { + return currSize != UNKNOWN_SIZE && currSize != size; + } } diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/GC.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/GC.java index e1ba13004..3c830e88c 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/GC.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/GC.java @@ -178,7 +178,7 @@ public static void setExecutor(ExecutorService e) { private Date packExpire; - private PackConfig pconfig = null; + private PackConfig pconfig; /** * the refs which existed during the last call to {@link #repack()}. This is @@ -214,6 +214,7 @@ public static void setExecutor(ExecutorService e) { */ public GC(FileRepository repo) { this.repo = repo; + this.pconfig = new PackConfig(repo); this.pm = NullProgressMonitor.INSTANCE; } @@ -398,7 +399,7 @@ private void deleteOldPacks(Collection oldPacks, */ private void removeOldPack(File packFile, String packName, PackExt ext, int deleteOptions) throws IOException { - if (pconfig != null && pconfig.isPreserveOldPacks()) { + if (pconfig.isPreserveOldPacks()) { File oldPackDir = repo.getObjectDatabase().getPreservedDirectory(); FileUtils.mkdir(oldPackDir, true); @@ -414,7 +415,7 @@ private void removeOldPack(File packFile, String packName, PackExt ext, * Delete the preserved directory including all pack files within */ private void prunePreserved() { - if (pconfig != null && pconfig.isPrunePreserved()) { + if (pconfig.isPrunePreserved()) { try { FileUtils.delete(repo.getObjectDatabase().getPreservedDirectory(), FileUtils.RECURSIVE | FileUtils.RETRY | FileUtils.SKIP_MISSING); @@ -856,7 +857,7 @@ public Collection repack() throws IOException { nonHeads.addAll(indexObjects); // Combine the GC_REST objects into the GC pack if requested - if (pconfig != null && pconfig.getSinglePack()) { + if (pconfig.getSinglePack()) { allHeadsAndTags.addAll(nonHeads); nonHeads.clear(); } @@ -1159,7 +1160,7 @@ private PackFile writePack(@NonNull Set want, return Integer.signum(o1.hashCode() - o2.hashCode()); }); try (PackWriter pw = new PackWriter( - (pconfig == null) ? new PackConfig(repo) : pconfig, + pconfig, repo.newObjectReader())) { // prepare the PackWriter pw.setDeltaBaseAsOffset(true); @@ -1255,8 +1256,23 @@ private PackFile writePack(@NonNull Set want, realExt), e); } } - - return repo.getObjectDatabase().openPack(realPack); + boolean interrupted = false; + try { + FileSnapshot snapshot = FileSnapshot.save(realPack); + if (pconfig.doWaitPreventRacyPack(snapshot.size())) { + snapshot.waitUntilNotRacy(); + } + } catch (InterruptedException e) { + interrupted = true; + } + try { + return repo.getObjectDatabase().openPack(realPack); + } finally { + if (interrupted) { + // Re-set interrupted flag + Thread.currentThread().interrupt(); + } + } } finally { if (tmpPack != null && tmpPack.exists()) tmpPack.delete(); @@ -1434,7 +1450,7 @@ public void setPackExpireAgeMillis(long packExpireAgeMillis) { * the {@link org.eclipse.jgit.storage.pack.PackConfig} used when * writing packs */ - public void setPackConfig(PackConfig pconfig) { + public void setPackConfig(@NonNull PackConfig pconfig) { this.pconfig = pconfig; } diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/ObjectDirectory.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/ObjectDirectory.java index b3af54581..35522667e 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/ObjectDirectory.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/ObjectDirectory.java @@ -911,9 +911,10 @@ private PackList scanPacksImpl(PackList old) { final String packName = base + PACK.getExtension(); final File packFile = new File(packDirectory, packName); - final PackFile oldPack = forReuse.remove(packName); + final PackFile oldPack = forReuse.get(packName); if (oldPack != null && !oldPack.getFileSnapshot().isModified(packFile)) { + forReuse.remove(packName); list.add(oldPack); continue; } diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/ObjectDirectoryPackParser.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/ObjectDirectoryPackParser.java index 0cec2d5a8..6e8a15e86 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/ObjectDirectoryPackParser.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/ObjectDirectoryPackParser.java @@ -65,6 +65,7 @@ import org.eclipse.jgit.lib.CoreConfig; import org.eclipse.jgit.lib.ObjectId; import org.eclipse.jgit.lib.ProgressMonitor; +import org.eclipse.jgit.storage.pack.PackConfig; import org.eclipse.jgit.transport.PackParser; import org.eclipse.jgit.transport.PackedObjectInfo; import org.eclipse.jgit.util.FileUtils; @@ -122,9 +123,12 @@ public class ObjectDirectoryPackParser extends PackParser { /** The pack that was created, if parsing was successful. */ private PackFile newPack; + private PackConfig pconfig; + ObjectDirectoryPackParser(FileObjectDatabase odb, InputStream src) { super(odb, src); this.db = odb; + this.pconfig = new PackConfig(odb.getConfig()); this.crc = new CRC32(); this.tailDigest = Constants.newMessageDigest(); @@ -514,6 +518,15 @@ private PackLock renameAndOpenPack(String lockMessage) JGitText.get().cannotMoveIndexTo, finalIdx), e); } + boolean interrupted = false; + try { + FileSnapshot snapshot = FileSnapshot.save(finalPack); + if (pconfig.doWaitPreventRacyPack(snapshot.size())) { + snapshot.waitUntilNotRacy(); + } + } catch (InterruptedException e) { + interrupted = true; + } try { newPack = db.openPack(finalPack); } catch (IOException err) { @@ -523,6 +536,11 @@ private PackLock renameAndOpenPack(String lockMessage) if (finalIdx.exists()) FileUtils.delete(finalIdx); throw err; + } finally { + if (interrupted) { + // Re-set interrupted flag + Thread.currentThread().interrupt(); + } } return lockMessage != null ? keep : null; diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackFile.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackFile.java index d834b1a72..73ad38c95 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackFile.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackFile.java @@ -93,6 +93,8 @@ import org.eclipse.jgit.util.LongList; import org.eclipse.jgit.util.NB; import org.eclipse.jgit.util.RawParseUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A Git version 2 pack file representation. A pack file contains Git objects in @@ -100,6 +102,7 @@ * objects are similar. */ public class PackFile implements Iterable { + private final static Logger LOG = LoggerFactory.getLogger(PackFile.class); /** Sorts PackFiles to be most recently created to least recently created. */ public static final Comparator SORT = new Comparator() { @Override @@ -131,7 +134,7 @@ public int compare(PackFile a, PackFile b) { int packLastModified; - private FileSnapshot fileSnapshot; + private PackFileSnapshot fileSnapshot; private volatile boolean invalid; @@ -168,7 +171,7 @@ public int compare(PackFile a, PackFile b) { */ public PackFile(File packFile, int extensions) { this.packFile = packFile; - this.fileSnapshot = FileSnapshot.save(packFile); + this.fileSnapshot = PackFileSnapshot.save(packFile); this.packLastModified = (int) (fileSnapshot.lastModified() >> 10); this.extensions = extensions; @@ -189,10 +192,22 @@ private PackIndex idx() throws IOException { throw new PackInvalidException(packFile, invalidatingCause); } try { + long start = System.currentTimeMillis(); idx = PackIndex.open(extFile(INDEX)); + if (LOG.isDebugEnabled()) { + LOG.debug(String.format( + "Opening pack index %s, size %.3f MB took %d ms", //$NON-NLS-1$ + extFile(INDEX).getAbsolutePath(), + Float.valueOf(extFile(INDEX).length() + / (1024f * 1024)), + Long.valueOf(System.currentTimeMillis() + - start))); + } if (packChecksum == null) { packChecksum = idx.packChecksum; + fileSnapshot.setChecksum( + ObjectId.fromRaw(packChecksum)); } else if (!Arrays.equals(packChecksum, idx.packChecksum)) { throw new PackMismatchException(MessageFormat @@ -371,10 +386,14 @@ ObjectId findObjectForOffset(long offset) throws IOException { * * @return the packfile @{@link FileSnapshot} that the object is loaded from. */ - FileSnapshot getFileSnapshot() { + PackFileSnapshot getFileSnapshot() { return fileSnapshot; } + AnyObjectId getPackChecksum() { + return ObjectId.fromRaw(packChecksum); + } + private final byte[] decompress(final long position, final int sz, final WindowCursor curs) throws IOException, DataFormatException { byte[] dstbuf; @@ -1209,4 +1228,12 @@ private File extFile(PackExt ext) { private boolean hasExt(PackExt ext) { return (extensions & ext.getBit()) != 0; } + + @SuppressWarnings("nls") + @Override + public String toString() { + return "PackFile [packFileName=" + packFile.getName() + ", length=" + + packFile.length() + ", packChecksum=" + + ObjectId.fromRaw(packChecksum).name() + "]"; + } } diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackFileSnapshot.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackFileSnapshot.java new file mode 100644 index 000000000..19ec3af49 --- /dev/null +++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackFileSnapshot.java @@ -0,0 +1,123 @@ +/* + * Copyright (C) 2019, Matthias Sohn + * and other copyright owners as documented in the project's IP log. + * + * This program and the accompanying materials are made available + * under the terms of the Eclipse Distribution License v1.0 which + * accompanies this distribution, is reproduced below, and is + * available at http://www.eclipse.org/org/documents/edl-v10.php + * + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or + * without modification, are permitted provided that the following + * conditions are met: + * + * - Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * + * - Redistributions in binary form must reproduce the above + * copyright notice, this list of conditions and the following + * disclaimer in the documentation and/or other materials provided + * with the distribution. + * + * - Neither the name of the Eclipse Foundation, Inc. nor the + * names of its contributors may be used to endorse or promote + * products derived from this software without specific prior + * written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND + * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, + * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES + * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR + * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT + * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; + * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, + * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF + * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +package org.eclipse.jgit.internal.storage.file; + +import java.io.File; +import java.io.IOException; +import java.io.RandomAccessFile; + +import org.eclipse.jgit.lib.AnyObjectId; +import org.eclipse.jgit.lib.ObjectId; + +class PackFileSnapshot extends FileSnapshot { + + private static final ObjectId MISSING_CHECKSUM = ObjectId.zeroId(); + + /** + * Record a snapshot for a specific packfile path. + *

+ * This method should be invoked before the packfile is accessed. + * + * @param path + * the path to later remember. The path's current status + * information is saved. + * @return the snapshot. + */ + public static PackFileSnapshot save(File path) { + return new PackFileSnapshot(path); + } + + private AnyObjectId checksum = MISSING_CHECKSUM; + + private boolean wasChecksumChanged; + + + PackFileSnapshot(File packFile) { + super(packFile); + } + + void setChecksum(AnyObjectId checksum) { + this.checksum = checksum; + } + + /** {@inheritDoc} */ + @Override + public boolean isModified(File packFile) { + if (!super.isModified(packFile)) { + return false; + } + if (wasSizeChanged() || wasFileKeyChanged() + || !wasLastModifiedRacilyClean()) { + return true; + } + return isChecksumChanged(packFile); + } + + boolean isChecksumChanged(File packFile) { + return wasChecksumChanged = checksum != MISSING_CHECKSUM + && !checksum.equals(readChecksum(packFile)); + } + + private AnyObjectId readChecksum(File packFile) { + try (RandomAccessFile fd = new RandomAccessFile(packFile, "r")) { //$NON-NLS-1$ + fd.seek(fd.length() - 20); + final byte[] buf = new byte[20]; + fd.readFully(buf, 0, 20); + return ObjectId.fromRaw(buf); + } catch (IOException e) { + return MISSING_CHECKSUM; + } + } + + boolean wasChecksumChanged() { + return wasChecksumChanged; + } + + @SuppressWarnings("nls") + @Override + public String toString() { + return "PackFileSnapshot [checksum=" + checksum + ", " + + super.toString() + "]"; + } + +} diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackInserter.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackInserter.java index 0ce3cc93c..a27a2b00c 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackInserter.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackInserter.java @@ -86,6 +86,7 @@ import org.eclipse.jgit.lib.ObjectLoader; import org.eclipse.jgit.lib.ObjectReader; import org.eclipse.jgit.lib.ObjectStream; +import org.eclipse.jgit.storage.pack.PackConfig; import org.eclipse.jgit.transport.PackParser; import org.eclipse.jgit.transport.PackedObjectInfo; import org.eclipse.jgit.util.BlockList; @@ -115,8 +116,11 @@ public class PackInserter extends ObjectInserter { private PackStream packOut; private Inflater cachedInflater; + private PackConfig pconfig; + PackInserter(ObjectDirectory db) { this.db = db; + this.pconfig = new PackConfig(db.getConfig()); } /** @@ -296,9 +300,25 @@ public void flush() throws IOException { realIdx), e); } - db.openPack(realPack); - rollback = false; - clear(); + boolean interrupted = false; + try { + FileSnapshot snapshot = FileSnapshot.save(realPack); + if (pconfig.doWaitPreventRacyPack(snapshot.size())) { + snapshot.waitUntilNotRacy(); + } + } catch (InterruptedException e) { + interrupted = true; + } + try { + db.openPack(realPack); + rollback = false; + } finally { + clear(); + if (interrupted) { + // Re-set interrupted flag + Thread.currentThread().interrupt(); + } + } } private static void writePackIndex(File idx, byte[] packHash, diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/storage/pack/PackConfig.java b/org.eclipse.jgit/src/org/eclipse/jgit/storage/pack/PackConfig.java index 256e41d22..6bd32dd87 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/storage/pack/PackConfig.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/storage/pack/PackConfig.java @@ -116,12 +116,30 @@ public class PackConfig { */ public static final int DEFAULT_DELTA_SEARCH_WINDOW_SIZE = 10; + private static final int MB = 1 << 20; + /** * Default big file threshold: {@value} * * @see #setBigFileThreshold(int) */ - public static final int DEFAULT_BIG_FILE_THRESHOLD = 50 * 1024 * 1024; + public static final int DEFAULT_BIG_FILE_THRESHOLD = 50 * MB; + + /** + * Default if we wait before opening a newly written pack to prevent its + * lastModified timestamp could be racy + * + * @since 5.1.8 + */ + public static final boolean DEFAULT_WAIT_PREVENT_RACY_PACK = false; + + /** + * Default if we wait before opening a newly written pack to prevent its + * lastModified timestamp could be racy + * + * @since 5.1.8 + */ + public static final long DEFAULT_MINSIZE_PREVENT_RACY_PACK = 100 * MB; /** * Default delta cache size: {@value} @@ -238,6 +256,10 @@ public class PackConfig { private int bigFileThreshold = DEFAULT_BIG_FILE_THRESHOLD; + private boolean waitPreventRacyPack = DEFAULT_WAIT_PREVENT_RACY_PACK; + + private long minSizePreventRacyPack = DEFAULT_MINSIZE_PREVENT_RACY_PACK; + private int threads; private Executor executor; @@ -314,6 +336,8 @@ public PackConfig(PackConfig cfg) { this.deltaCacheSize = cfg.deltaCacheSize; this.deltaCacheLimit = cfg.deltaCacheLimit; this.bigFileThreshold = cfg.bigFileThreshold; + this.waitPreventRacyPack = cfg.waitPreventRacyPack; + this.minSizePreventRacyPack = cfg.minSizePreventRacyPack; this.threads = cfg.threads; this.executor = cfg.executor; this.indexVersion = cfg.indexVersion; @@ -736,6 +760,76 @@ public void setBigFileThreshold(int bigFileThreshold) { this.bigFileThreshold = bigFileThreshold; } + /** + * Get whether we wait before opening a newly written pack to prevent its + * lastModified timestamp could be racy + * + * @return whether we wait before opening a newly written pack to prevent + * its lastModified timestamp could be racy + * @since 5.1.8 + */ + public boolean isWaitPreventRacyPack() { + return waitPreventRacyPack; + } + + /** + * Get whether we wait before opening a newly written pack to prevent its + * lastModified timestamp could be racy. Returns {@code true} if + * {@code waitToPreventRacyPack = true} and + * {@code packSize > minSizePreventRacyPack}, {@code false} otherwise. + * + * @param packSize + * size of the pack file + * + * @return whether we wait before opening a newly written pack to prevent + * its lastModified timestamp could be racy + * @since 5.1.8 + */ + public boolean doWaitPreventRacyPack(long packSize) { + return isWaitPreventRacyPack() + && packSize > getMinSizePreventRacyPack(); + } + + /** + * Set whether we wait before opening a newly written pack to prevent its + * lastModified timestamp could be racy + * + * @param waitPreventRacyPack + * whether we wait before opening a newly written pack to prevent + * its lastModified timestamp could be racy + * @since 5.1.8 + */ + public void setWaitPreventRacyPack(boolean waitPreventRacyPack) { + this.waitPreventRacyPack = waitPreventRacyPack; + } + + /** + * Get minimum packfile size for which we wait before opening a newly + * written pack to prevent its lastModified timestamp could be racy if + * {@code isWaitToPreventRacyPack} is {@code true}. + * + * @return minimum packfile size, default is 100 MiB + * + * @since 5.1.8 + */ + public long getMinSizePreventRacyPack() { + return minSizePreventRacyPack; + } + + /** + * Set minimum packfile size for which we wait before opening a newly + * written pack to prevent its lastModified timestamp could be racy if + * {@code isWaitToPreventRacyPack} is {@code true}. + * + * @param minSizePreventRacyPack + * minimum packfile size, default is 100 MiB + * + * @since 5.1.8 + */ + public void setMinSizePreventRacyPack(long minSizePreventRacyPack) { + this.minSizePreventRacyPack = minSizePreventRacyPack; + } + /** * Get the compression level applied to objects in the pack. * @@ -1083,6 +1177,10 @@ public void fromConfig(Config rc) { setBitmapInactiveBranchAgeInDays( rc.getInt("pack", "bitmapinactivebranchageindays", //$NON-NLS-1$ //$NON-NLS-2$ getBitmapInactiveBranchAgeInDays())); + setWaitPreventRacyPack(rc.getBoolean("pack", "waitpreventracypack", //$NON-NLS-1$ //$NON-NLS-2$ + isWaitPreventRacyPack())); + setMinSizePreventRacyPack(rc.getLong("pack", "minsizepreventracypack", //$NON-NLS-1$//$NON-NLS-2$ + getMinSizePreventRacyPack())); } /** {@inheritDoc} */ diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/util/FS.java b/org.eclipse.jgit/src/org/eclipse/jgit/util/FS.java index 536e4f1df..fb958872a 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/util/FS.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/util/FS.java @@ -55,23 +55,31 @@ import java.io.OutputStream; import java.io.PrintStream; import java.nio.charset.Charset; +import java.nio.file.AccessDeniedException; +import java.nio.file.FileStore; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.attribute.BasicFileAttributes; +import java.nio.file.attribute.FileTime; import java.security.AccessController; import java.security.PrivilegedAction; import java.text.MessageFormat; +import java.time.Duration; import java.util.Arrays; import java.util.HashMap; import java.util.Map; import java.util.Objects; import java.util.Optional; +import java.util.UUID; +import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; +import java.util.stream.Collectors; +import org.eclipse.jgit.annotations.NonNull; import org.eclipse.jgit.annotations.Nullable; import org.eclipse.jgit.api.errors.JGitInternalException; import org.eclipse.jgit.errors.CommandFailedException; @@ -180,6 +188,83 @@ public int getRc() { } } + private static final class FileStoreAttributeCache { + /** + * The last modified time granularity of FAT filesystems is 2 seconds. + */ + private static final Duration FALLBACK_TIMESTAMP_RESOLUTION = Duration + .ofMillis(2000); + + private static final Map attributeCache = new ConcurrentHashMap<>(); + + static Duration getFsTimestampResolution(Path file) { + try { + Path dir = Files.isDirectory(file) ? file : file.getParent(); + if (!dir.toFile().canWrite()) { + // can not determine FileStore of an unborn directory or in + // a read-only directory + return FALLBACK_TIMESTAMP_RESOLUTION; + } + FileStore s = Files.getFileStore(dir); + FileStoreAttributeCache c = attributeCache.get(s); + if (c == null) { + c = new FileStoreAttributeCache(dir); + attributeCache.put(s, c); + if (LOG.isDebugEnabled()) { + LOG.debug(c.toString()); + } + } + return c.getFsTimestampResolution(); + + } catch (IOException | InterruptedException e) { + LOG.warn(e.getMessage(), e); + return FALLBACK_TIMESTAMP_RESOLUTION; + } + } + + private Duration fsTimestampResolution; + + Duration getFsTimestampResolution() { + return fsTimestampResolution; + } + + private FileStoreAttributeCache(Path dir) + throws IOException, InterruptedException { + Path probe = dir.resolve(".probe-" + UUID.randomUUID()); //$NON-NLS-1$ + Files.createFile(probe); + try { + FileTime startTime = Files.getLastModifiedTime(probe); + FileTime actTime = startTime; + long sleepTime = 512; + while (actTime.compareTo(startTime) <= 0) { + TimeUnit.NANOSECONDS.sleep(sleepTime); + FileUtils.touch(probe); + actTime = Files.getLastModifiedTime(probe); + // limit sleep time to max. 100ms + if (sleepTime < 100_000_000L) { + sleepTime = sleepTime * 2; + } + } + fsTimestampResolution = Duration.between(startTime.toInstant(), + actTime.toInstant()); + } catch (AccessDeniedException e) { + LOG.error(e.getLocalizedMessage(), e); + } finally { + Files.delete(probe); + } + } + + @SuppressWarnings("nls") + @Override + public String toString() { + return "FileStoreAttributeCache[" + attributeCache.keySet() + .stream() + .map(key -> "FileStore[" + key + "]: fsTimestampResolution=" + + attributeCache.get(key).getFsTimestampResolution()) + .collect(Collectors.joining(",\n")) + "]"; + } + } + /** The auto-detected implementation selected for this operating system and JRE. */ public static final FS DETECTED = detect(); @@ -221,6 +306,21 @@ public static FS detect(Boolean cygwinUsed) { return factory.detect(cygwinUsed); } + /** + * Get an estimate for the filesystem timestamp resolution from a cache of + * timestamp resolution per FileStore, if not yet available it is measured + * for a probe file under the given directory. + * + * @param dir + * the directory under which the probe file will be created to + * measure the timer resolution. + * @return measured filesystem timestamp resolution + * @since 5.2.3 + */ + public static Duration getFsTimerResolution(@NonNull Path dir) { + return FileStoreAttributeCache.getFsTimestampResolution(dir); + } + private volatile Holder userHome; private volatile Holder gitSystemConfig; diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/util/FileUtils.java b/org.eclipse.jgit/src/org/eclipse/jgit/util/FileUtils.java index 97f480dd3..9bba6ca8a 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/util/FileUtils.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/util/FileUtils.java @@ -49,6 +49,7 @@ import java.io.File; import java.io.IOException; +import java.io.OutputStream; import java.nio.file.AtomicMoveNotSupportedException; import java.nio.file.CopyOption; import java.nio.file.Files; @@ -908,4 +909,18 @@ public static String pathToString(File file) { } return path; } + + /** + * Touch the given file + * + * @param f + * the file to touch + * @throws IOException + * @since 5.2.3 + */ + public static void touch(Path f) throws IOException { + try (OutputStream fos = Files.newOutputStream(f)) { + // touch the file + } + } }