Merge pull request #17562 from ziglang/fetch-symlink-normalize-sep
Package.Fetch: normalize path separators in symlinks
This commit is contained in:
@@ -1803,7 +1803,7 @@ pub const Dir = struct {
|
||||
);
|
||||
switch (rc) {
|
||||
.SUCCESS => return result,
|
||||
.OBJECT_NAME_INVALID => unreachable,
|
||||
.OBJECT_NAME_INVALID => return error.BadPathName,
|
||||
.OBJECT_NAME_NOT_FOUND => return error.FileNotFound,
|
||||
.OBJECT_PATH_NOT_FOUND => return error.FileNotFound,
|
||||
.NOT_A_DIRECTORY => return error.NotDir,
|
||||
|
||||
@@ -3810,12 +3810,11 @@ test "replace" {
|
||||
try testing.expectEqualStrings(expected, output[0..expected.len]);
|
||||
}
|
||||
|
||||
/// Replace all occurrences of `needle` with `replacement`.
|
||||
pub fn replaceScalar(comptime T: type, slice: []T, needle: T, replacement: T) void {
|
||||
for (slice, 0..) |e, i| {
|
||||
if (e == needle) {
|
||||
slice[i] = replacement;
|
||||
}
|
||||
/// Replace all occurrences of `match` with `replacement`.
|
||||
pub fn replaceScalar(comptime T: type, slice: []T, match: T, replacement: T) void {
|
||||
for (slice) |*e| {
|
||||
if (e.* == match)
|
||||
e.* = replacement;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -81,6 +81,10 @@ pub const JobQueue = struct {
|
||||
wait_group: WaitGroup = .{},
|
||||
global_cache: Cache.Directory,
|
||||
recursive: bool,
|
||||
/// Dumps hash information to stdout which can be used to troubleshoot why
|
||||
/// two hashes of the same package do not match.
|
||||
/// If this is true, `recursive` must be false.
|
||||
debug_hash: bool,
|
||||
work_around_btrfs_bug: bool,
|
||||
|
||||
pub const Table = std.AutoArrayHashMapUnmanaged(Manifest.MultiHashHexDigest, *Fetch);
|
||||
@@ -1315,7 +1319,7 @@ fn computeHash(
|
||||
const kind: HashedFile.Kind = switch (entry.kind) {
|
||||
.directory => unreachable,
|
||||
.file => .file,
|
||||
.sym_link => .sym_link,
|
||||
.sym_link => .link,
|
||||
else => return f.fail(f.location_tok, try eb.printString(
|
||||
"package contains '{s}' which has illegal file type '{s}'",
|
||||
.{ entry.path, @tagName(entry.kind) },
|
||||
@@ -1329,7 +1333,7 @@ fn computeHash(
|
||||
const hashed_file = try arena.create(HashedFile);
|
||||
hashed_file.* = .{
|
||||
.fs_path = fs_path,
|
||||
.normalized_path = try normalizePath(arena, fs_path),
|
||||
.normalized_path = try normalizePathAlloc(arena, fs_path),
|
||||
.kind = kind,
|
||||
.hash = undefined, // to be populated by the worker
|
||||
.failure = undefined, // to be populated by the worker
|
||||
@@ -1399,9 +1403,36 @@ fn computeHash(
|
||||
}
|
||||
|
||||
if (any_failures) return error.FetchFailed;
|
||||
|
||||
if (f.job_queue.debug_hash) {
|
||||
assert(!f.job_queue.recursive);
|
||||
// Print something to stdout that can be text diffed to figure out why
|
||||
// the package hash is different.
|
||||
dumpHashInfo(all_files.items) catch |err| {
|
||||
std.debug.print("unable to write to stdout: {s}\n", .{@errorName(err)});
|
||||
std.process.exit(1);
|
||||
};
|
||||
}
|
||||
|
||||
return hasher.finalResult();
|
||||
}
|
||||
|
||||
fn dumpHashInfo(all_files: []const *const HashedFile) !void {
|
||||
const stdout = std.io.getStdOut();
|
||||
var bw = std.io.bufferedWriter(stdout.writer());
|
||||
const w = bw.writer();
|
||||
|
||||
for (all_files) |hashed_file| {
|
||||
try w.print("{s}: {s}: {s}\n", .{
|
||||
@tagName(hashed_file.kind),
|
||||
std.fmt.fmtSliceHexLower(&hashed_file.hash),
|
||||
hashed_file.normalized_path,
|
||||
});
|
||||
}
|
||||
|
||||
try bw.flush();
|
||||
}
|
||||
|
||||
fn workerHashFile(dir: fs.Dir, hashed_file: *HashedFile, wg: *WaitGroup) void {
|
||||
defer wg.finish();
|
||||
hashed_file.failure = hashFileFallible(dir, hashed_file);
|
||||
@@ -1427,8 +1458,14 @@ fn hashFileFallible(dir: fs.Dir, hashed_file: *HashedFile) HashedFile.Error!void
|
||||
hasher.update(buf[0..bytes_read]);
|
||||
}
|
||||
},
|
||||
.sym_link => {
|
||||
.link => {
|
||||
const link_name = try dir.readLink(hashed_file.fs_path, &buf);
|
||||
if (fs.path.sep != canonical_sep) {
|
||||
// Package hashes are intended to be consistent across
|
||||
// platforms which means we must normalize path separators
|
||||
// inside symlinks.
|
||||
normalizePath(link_name);
|
||||
}
|
||||
hasher.update(link_name);
|
||||
},
|
||||
}
|
||||
@@ -1474,7 +1511,7 @@ const HashedFile = struct {
|
||||
fs.File.StatError ||
|
||||
fs.Dir.ReadLinkError;
|
||||
|
||||
const Kind = enum { file, sym_link };
|
||||
const Kind = enum { file, link };
|
||||
|
||||
fn lessThan(context: void, lhs: *const HashedFile, rhs: *const HashedFile) bool {
|
||||
_ = context;
|
||||
@@ -1484,22 +1521,20 @@ const HashedFile = struct {
|
||||
|
||||
/// Make a file system path identical independently of operating system path inconsistencies.
|
||||
/// This converts backslashes into forward slashes.
|
||||
fn normalizePath(arena: Allocator, fs_path: []const u8) ![]const u8 {
|
||||
const canonical_sep = '/';
|
||||
|
||||
if (fs.path.sep == canonical_sep)
|
||||
return fs_path;
|
||||
|
||||
fn normalizePathAlloc(arena: Allocator, fs_path: []const u8) ![]const u8 {
|
||||
if (fs.path.sep == canonical_sep) return fs_path;
|
||||
const normalized = try arena.dupe(u8, fs_path);
|
||||
for (normalized) |*byte| {
|
||||
switch (byte.*) {
|
||||
fs.path.sep => byte.* = canonical_sep,
|
||||
else => continue,
|
||||
}
|
||||
}
|
||||
normalizePath(normalized);
|
||||
return normalized;
|
||||
}
|
||||
|
||||
const canonical_sep = fs.path.sep_posix;
|
||||
|
||||
fn normalizePath(bytes: []u8) void {
|
||||
assert(fs.path.sep != canonical_sep);
|
||||
std.mem.replaceScalar(u8, bytes, fs.path.sep, canonical_sep);
|
||||
}
|
||||
|
||||
const Filter = struct {
|
||||
include_paths: std.StringArrayHashMapUnmanaged(void) = .{},
|
||||
|
||||
|
||||
@@ -5143,6 +5143,7 @@ pub fn cmdBuild(gpa: Allocator, arena: Allocator, args: []const []const u8) !voi
|
||||
.thread_pool = &thread_pool,
|
||||
.global_cache = global_cache_directory,
|
||||
.recursive = true,
|
||||
.debug_hash = false,
|
||||
.work_around_btrfs_bug = work_around_btrfs_bug,
|
||||
};
|
||||
defer job_queue.deinit();
|
||||
@@ -6991,6 +6992,7 @@ pub const usage_fetch =
|
||||
\\Options:
|
||||
\\ -h, --help Print this help and exit
|
||||
\\ --global-cache-dir [path] Override path to global Zig cache directory
|
||||
\\ --debug-hash Print verbose hash information to stdout
|
||||
\\
|
||||
;
|
||||
|
||||
@@ -7004,6 +7006,7 @@ fn cmdFetch(
|
||||
std.process.hasEnvVarConstant("ZIG_BTRFS_WORKAROUND");
|
||||
var opt_path_or_url: ?[]const u8 = null;
|
||||
var override_global_cache_dir: ?[]const u8 = try optionalStringEnvVar(arena, "ZIG_GLOBAL_CACHE_DIR");
|
||||
var debug_hash: bool = false;
|
||||
|
||||
{
|
||||
var i: usize = 0;
|
||||
@@ -7019,6 +7022,9 @@ fn cmdFetch(
|
||||
i += 1;
|
||||
override_global_cache_dir = args[i];
|
||||
continue;
|
||||
} else if (mem.eql(u8, arg, "--debug-hash")) {
|
||||
debug_hash = true;
|
||||
continue;
|
||||
} else {
|
||||
fatal("unrecognized parameter: '{s}'", .{arg});
|
||||
}
|
||||
@@ -7057,6 +7063,7 @@ fn cmdFetch(
|
||||
.thread_pool = &thread_pool,
|
||||
.global_cache = global_cache_directory,
|
||||
.recursive = false,
|
||||
.debug_hash = debug_hash,
|
||||
.work_around_btrfs_bug = work_around_btrfs_bug,
|
||||
};
|
||||
defer job_queue.deinit();
|
||||
|
||||
Reference in New Issue
Block a user