diff --git a/src/Cache.zig b/src/Cache.zig index 3295ffac7d..856d2c4277 100644 --- a/src/Cache.zig +++ b/src/Cache.zig @@ -26,6 +26,7 @@ pub fn obtain(cache: *const Cache) Manifest { /// This is 128 bits - Even with 2^54 cache entries, the probably of a collision would be under 10^-6 pub const bin_digest_len = 16; pub const hex_digest_len = bin_digest_len * 2; +pub const BinDigest = [bin_digest_len]u8; const manifest_file_size_max = 50 * 1024 * 1024; @@ -41,7 +42,7 @@ pub const File = struct { path: ?[]const u8, max_file_size: ?usize, stat: fs.File.Stat, - bin_digest: [bin_digest_len]u8, + bin_digest: BinDigest, contents: ?[]const u8, pub fn deinit(self: *File, allocator: *Allocator) void { @@ -139,16 +140,16 @@ pub const HashHelper = struct { return copy.final(); } - pub fn peekBin(hh: HashHelper) [bin_digest_len]u8 { + pub fn peekBin(hh: HashHelper) BinDigest { var copy = hh; - var bin_digest: [bin_digest_len]u8 = undefined; + var bin_digest: BinDigest = undefined; copy.hasher.final(&bin_digest); return bin_digest; } /// Returns a hex encoded hash of the inputs, mutating the state of the hasher. pub fn final(hh: *HashHelper) [hex_digest_len]u8 { - var bin_digest: [bin_digest_len]u8 = undefined; + var bin_digest: BinDigest = undefined; hh.hasher.final(&bin_digest); var out_digest: [hex_digest_len]u8 = undefined; @@ -241,7 +242,7 @@ pub const Manifest = struct { const ext = ".txt"; var manifest_file_path: [self.hex_digest.len + ext.len]u8 = undefined; - var bin_digest: [bin_digest_len]u8 = undefined; + var bin_digest: BinDigest = undefined; self.hash.hasher.final(&bin_digest); _ = std.fmt.bufPrint(&self.hex_digest, "{x}", .{bin_digest}) catch unreachable; @@ -347,7 +348,7 @@ pub const Manifest = struct { cache_hash_file.stat.inode = 0; } - var actual_digest: [bin_digest_len]u8 = undefined; + var actual_digest: BinDigest = undefined; try hashFile(this_file, &actual_digest); if (!mem.eql(u8, &cache_hash_file.bin_digest, &actual_digest)) { @@ -381,7 +382,7 @@ pub const Manifest = struct { return true; } - pub fn unhit(self: *Manifest, bin_digest: [bin_digest_len]u8, input_file_count: usize) void { + pub fn unhit(self: *Manifest, bin_digest: BinDigest, input_file_count: usize) void { // Reset the hash. self.hash.hasher = hasher_init; self.hash.hasher.update(&bin_digest); @@ -530,7 +531,7 @@ pub const Manifest = struct { // cache_release is called we still might be working on creating // the artifacts to cache. - var bin_digest: [bin_digest_len]u8 = undefined; + var bin_digest: BinDigest = undefined; self.hash.hasher.final(&bin_digest); var out_digest: [hex_digest_len]u8 = undefined; diff --git a/src/Compilation.zig b/src/Compilation.zig index d86a401fb7..7c73912159 100644 --- a/src/Compilation.zig +++ b/src/Compilation.zig @@ -33,6 +33,7 @@ gpa: *Allocator, arena_state: std.heap.ArenaAllocator.State, bin_file: *link.File, c_object_table: std.AutoArrayHashMapUnmanaged(*CObject, void) = .{}, +c_object_cache_digest_set: std.AutoHashMapUnmanaged(Cache.BinDigest, void) = .{}, stage1_lock: ?Cache.Lock = null, stage1_cache_manifest: *Cache.Manifest = undefined, @@ -1110,6 +1111,7 @@ pub fn destroy(self: *Compilation) void { entry.key.destroy(gpa); } self.c_object_table.deinit(gpa); + self.c_object_cache_digest_set.deinit(gpa); for (self.failed_c_objects.items()) |entry| { entry.value.destroy(gpa); @@ -1682,6 +1684,17 @@ fn updateCObject(comp: *Compilation, c_object: *CObject, c_comp_progress_node: * } } + { + const gop = try comp.c_object_cache_digest_set.getOrPut(comp.gpa, man.hash.peekBin()); + if (gop.found_existing) { + return comp.failCObj( + c_object, + "the same source file was already added to the same compilation with the same flags", + .{}, + ); + } + } + var arena_allocator = std.heap.ArenaAllocator.init(comp.gpa); defer arena_allocator.deinit(); const arena = &arena_allocator.allocator;