Skip to content

stage2: detect redundant C/C++ source files #7394

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Dec 11, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 9 additions & 8 deletions src/Cache.zig
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ pub fn obtain(cache: *const Cache) Manifest {
/// This is 128 bits - Even with 2^54 cache entries, the probably of a collision would be under 10^-6
pub const bin_digest_len = 16;
pub const hex_digest_len = bin_digest_len * 2;
pub const BinDigest = [bin_digest_len]u8;

const manifest_file_size_max = 50 * 1024 * 1024;

Expand All @@ -41,7 +42,7 @@ pub const File = struct {
path: ?[]const u8,
max_file_size: ?usize,
stat: fs.File.Stat,
bin_digest: [bin_digest_len]u8,
bin_digest: BinDigest,
contents: ?[]const u8,

pub fn deinit(self: *File, allocator: *Allocator) void {
Expand Down Expand Up @@ -139,16 +140,16 @@ pub const HashHelper = struct {
return copy.final();
}

pub fn peekBin(hh: HashHelper) [bin_digest_len]u8 {
pub fn peekBin(hh: HashHelper) BinDigest {
var copy = hh;
var bin_digest: [bin_digest_len]u8 = undefined;
var bin_digest: BinDigest = undefined;
copy.hasher.final(&bin_digest);
return bin_digest;
}

/// Returns a hex encoded hash of the inputs, mutating the state of the hasher.
pub fn final(hh: *HashHelper) [hex_digest_len]u8 {
var bin_digest: [bin_digest_len]u8 = undefined;
var bin_digest: BinDigest = undefined;
hh.hasher.final(&bin_digest);

var out_digest: [hex_digest_len]u8 = undefined;
Expand Down Expand Up @@ -241,7 +242,7 @@ pub const Manifest = struct {
const ext = ".txt";
var manifest_file_path: [self.hex_digest.len + ext.len]u8 = undefined;

var bin_digest: [bin_digest_len]u8 = undefined;
var bin_digest: BinDigest = undefined;
self.hash.hasher.final(&bin_digest);

_ = std.fmt.bufPrint(&self.hex_digest, "{x}", .{bin_digest}) catch unreachable;
Expand Down Expand Up @@ -347,7 +348,7 @@ pub const Manifest = struct {
cache_hash_file.stat.inode = 0;
}

var actual_digest: [bin_digest_len]u8 = undefined;
var actual_digest: BinDigest = undefined;
try hashFile(this_file, &actual_digest);

if (!mem.eql(u8, &cache_hash_file.bin_digest, &actual_digest)) {
Expand Down Expand Up @@ -381,7 +382,7 @@ pub const Manifest = struct {
return true;
}

pub fn unhit(self: *Manifest, bin_digest: [bin_digest_len]u8, input_file_count: usize) void {
pub fn unhit(self: *Manifest, bin_digest: BinDigest, input_file_count: usize) void {
// Reset the hash.
self.hash.hasher = hasher_init;
self.hash.hasher.update(&bin_digest);
Expand Down Expand Up @@ -530,7 +531,7 @@ pub const Manifest = struct {
// cache_release is called we still might be working on creating
// the artifacts to cache.

var bin_digest: [bin_digest_len]u8 = undefined;
var bin_digest: BinDigest = undefined;
self.hash.hasher.final(&bin_digest);

var out_digest: [hex_digest_len]u8 = undefined;
Expand Down
13 changes: 13 additions & 0 deletions src/Compilation.zig
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ gpa: *Allocator,
arena_state: std.heap.ArenaAllocator.State,
bin_file: *link.File,
c_object_table: std.AutoArrayHashMapUnmanaged(*CObject, void) = .{},
c_object_cache_digest_set: std.AutoHashMapUnmanaged(Cache.BinDigest, void) = .{},
stage1_lock: ?Cache.Lock = null,
stage1_cache_manifest: *Cache.Manifest = undefined,

Expand Down Expand Up @@ -1152,6 +1153,7 @@ pub fn destroy(self: *Compilation) void {
entry.key.destroy(gpa);
}
self.c_object_table.deinit(gpa);
self.c_object_cache_digest_set.deinit(gpa);

for (self.failed_c_objects.items()) |entry| {
entry.value.destroy(gpa);
Expand Down Expand Up @@ -1730,6 +1732,17 @@ fn updateCObject(comp: *Compilation, c_object: *CObject, c_comp_progress_node: *
}
}

{
const gop = try comp.c_object_cache_digest_set.getOrPut(comp.gpa, man.hash.peekBin());
if (gop.found_existing) {
return comp.failCObj(
c_object,
"the same source file was already added to the same compilation with the same flags",
.{},
);
}
}

var arena_allocator = std.heap.ArenaAllocator.init(comp.gpa);
defer arena_allocator.deinit();
const arena = &arena_allocator.allocator;
Expand Down