Skip to content

New command: zig pkg fetch #15331

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
23 commits
Select commit Hold shift + click to select a range
5055cea
save wip
DraagrenKirneh Apr 14, 2023
aa4b716
save
DraagrenKirneh Apr 15, 2023
8383ee3
Merge branch 'master' into feature/build-pkg-fetch
DraagrenKirneh Apr 16, 2023
a500333
revert change to .gitignore
DraagrenKirneh Apr 16, 2023
774db77
cleanup code, add usage_pkg string
DraagrenKirneh Apr 16, 2023
f06d287
cleanup. handle build dir cleanup when found in a parent directory
DraagrenKirneh Apr 16, 2023
5d19bc8
add back newline in .gitignore
DraagrenKirneh Apr 17, 2023
7eef638
remove wip file
DraagrenKirneh Apr 17, 2023
a533921
Merge branch 'master' into feature/build-pkg-fetch
DraagrenKirneh Apr 17, 2023
1c69d3a
Merge branch 'master' into feature/build-pkg-fetch
DraagrenKirneh Apr 20, 2023
b51765f
fix typo in help option, update http client to latest changes
DraagrenKirneh Apr 21, 2023
2833d3a
Merge branch 'master' into feature/build-pkg-fetch
DraagrenKirneh Apr 23, 2023
3330cb8
Merge branch 'master' into feature/build-pkg-fetch
DraagrenKirneh Apr 26, 2023
2449311
Merge branch 'master' into feature/build-pkg-fetch
DraagrenKirneh May 1, 2023
cba7b19
Merge branch 'master' into feature/build-pkg-fetch
DraagrenKirneh May 6, 2023
9218c82
Update to work with latest changes in package.zig
DraagrenKirneh May 8, 2023
da3e105
Merge branch 'master' into feature/build-pkg-fetch
DraagrenKirneh May 8, 2023
e66928f
Merge branch 'master' into feature/build-pkg-fetch
DraagrenKirneh May 9, 2023
8cd3ed2
debug build error windows - adding ws2_32 directly
DraagrenKirneh May 10, 2023
db1b4e9
add ws2_32 for enable_llvm as well
DraagrenKirneh May 10, 2023
9b51c60
dbg: try with omit fetch code
DraagrenKirneh May 13, 2023
15c8ec1
Merge branch 'master' into feature/build-pkg-fetch
DraagrenKirneh May 14, 2023
cab6227
update latest fetching changes in Package.zig. cleanup package command
DraagrenKirneh May 14, 2023
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
289 changes: 288 additions & 1 deletion src/Package.zig
Original file line number Diff line number Diff line change
Expand Up @@ -214,6 +214,89 @@ pub fn getName(target: *const Package, gpa: Allocator, mod: Module) ![]const u8

pub const build_zig_basename = "build.zig";

pub fn fetchDependencies(
arena: Allocator,
thread_pool: *ThreadPool,
http_client: *std.http.Client,
directory: Compilation.Directory,
global_cache_directory: Compilation.Directory,
local_cache_directory: Compilation.Directory,
name_prefix: []const u8,
error_bundle: *std.zig.ErrorBundle.Wip,
) !void {
const max_bytes = 10 * 1024 * 1024;
const gpa = thread_pool.allocator;
const build_zig_zon_bytes = directory.handle.readFileAllocOptions(
arena,
Manifest.basename,
max_bytes,
null,
1,
0,
) catch |err| switch (err) {
error.FileNotFound => {
// Handle the same as no dependencies.
return;
},
else => |e| return e,
};

var ast = try std.zig.Ast.parse(gpa, build_zig_zon_bytes, .zon);
defer ast.deinit(gpa);

if (ast.errors.len > 0) {
const file_path = try directory.join(arena, &.{Manifest.basename});
try main.putAstErrorsIntoBundle(gpa, ast, file_path, error_bundle);
return error.PackageFetchFailed;
}

var manifest = try Manifest.parse(gpa, ast);
defer manifest.deinit(gpa);

if (manifest.errors.len > 0) {
const file_path = try directory.join(arena, &.{Manifest.basename});
for (manifest.errors) |msg| {
try Report.addErrorMessage(ast, file_path, error_bundle, 0, msg);
}
return error.PackageFetchFailed;
}

const report: Report = .{
.ast = &ast,
.directory = directory,
.error_bundle = error_bundle,
};

var any_error = false;
const deps_list = manifest.dependencies.values();
for (manifest.dependencies.keys(), 0..) |name, i| {
const dep = deps_list[i];

const sub_prefix = try std.fmt.allocPrint(arena, "{s}{s}.", .{ name_prefix, name });

const sub_pkg = try fetchAndUnpack(
thread_pool,
http_client,
global_cache_directory,
dep,
report,
);

try fetchDependencies(
arena,
thread_pool,
http_client,
sub_pkg.root_src_directory,
global_cache_directory,
local_cache_directory,
sub_prefix,
error_bundle,
);
}

if (any_error) return error.InvalidBuildManifestFile;
}

pub fn fetchAndAddDependencies(
pkg: *Package,
root_pkg: *Package,
Expand Down Expand Up @@ -280,7 +363,7 @@ pub fn fetchAndAddDependencies(
const sub_prefix = try std.fmt.allocPrint(arena, "{s}{s}.", .{ name_prefix, name });
const fqn = sub_prefix[0 .. sub_prefix.len - 1];

const sub_pkg = try fetchAndUnpack(
const sub_pkg = try fetchUnpackImport(
thread_pool,
http_client,
global_cache_directory,
Expand Down Expand Up @@ -401,12 +484,216 @@ const MultiHashHexDigest = [hex_multihash_len]u8;
/// This is to avoid creating multiple modules for the same build.zig file.
pub const AllModules = std.AutoHashMapUnmanaged(MultiHashHexDigest, *Package);

fn importCachedPackage(
gpa: Allocator,
global_cache_directory: Compilation.Directory,
dep: Manifest.Dependency,
build_roots_source: *std.ArrayList(u8),
fqn: []const u8,
all_modules: *AllModules,
) !*Package {
const s = fs.path.sep_str;

// Check if the expected_hash is already present in the global package
// cache, and thereby avoid both fetching and unpacking.
if (dep.hash) |h| {
const hex_digest = h[0..hex_multihash_len];
const pkg_dir_sub_path = "p" ++ s ++ hex_digest;

const build_root = try global_cache_directory.join(gpa, &.{pkg_dir_sub_path});
errdefer gpa.free(build_root);

var pkg_dir = global_cache_directory.handle.openDir(pkg_dir_sub_path, .{}) catch |err| switch (err) {
error.FileNotFound => return error.MissingFile, //@todo
else => |e| return e,
};
errdefer pkg_dir.close();

try build_roots_source.writer().print(" pub const {s} = \"{}\";\n", .{
std.zig.fmtId(fqn), std.zig.fmtEscapes(build_root),
});

// The compiler has a rule that a file must not be included in multiple modules,
// so we must detect if a module has been created for this package and reuse it.
const gop = try all_modules.getOrPut(gpa, hex_digest.*);
if (gop.found_existing) {
gpa.free(build_root);
return gop.value_ptr.*;
}

const ptr = try gpa.create(Package);
errdefer gpa.destroy(ptr);

const owned_src_path = try gpa.dupe(u8, build_zig_basename);
errdefer gpa.free(owned_src_path);

ptr.* = .{
.root_src_directory = .{
.path = build_root,
.handle = pkg_dir,
},
.root_src_directory_owned = true,
.root_src_path = owned_src_path,
};

gop.value_ptr.* = ptr;
return ptr;
}
return error.MissingDependencyHash;
}

fn fetchAndUnpack(
thread_pool: *ThreadPool,
http_client: *std.http.Client,
global_cache_directory: Compilation.Directory,
dep: Manifest.Dependency,
report: Report,
) !*Package {
const gpa = http_client.allocator;
const s = fs.path.sep_str;

// Check if the expected_hash is already present in the global package
// cache, and thereby avoid both fetching and unpacking.
if (dep.hash) |h| cached: {
const hex_digest = h[0..hex_multihash_len];
const pkg_dir_sub_path = "p" ++ s ++ hex_digest;

const build_root = try global_cache_directory.join(gpa, &.{pkg_dir_sub_path});
errdefer gpa.free(build_root);

var pkg_dir = global_cache_directory.handle.openDir(pkg_dir_sub_path, .{}) catch |err| switch (err) {
error.FileNotFound => break :cached,
else => |e| return e,
};
errdefer pkg_dir.close();

const ptr = try gpa.create(Package);
errdefer gpa.destroy(ptr);

const owned_src_path = try gpa.dupe(u8, build_zig_basename);
errdefer gpa.free(owned_src_path);

ptr.* = .{
.root_src_directory = .{
.path = build_root,
.handle = pkg_dir,
},
.root_src_directory_owned = true,
.root_src_path = owned_src_path,
};

return ptr;
}

const uri = try std.Uri.parse(dep.url);

const rand_int = std.crypto.random.int(u64);
const tmp_dir_sub_path = "tmp" ++ s ++ Manifest.hex64(rand_int);

const actual_hash = a: {
var tmp_directory: Compilation.Directory = d: {
const path = try global_cache_directory.join(gpa, &.{tmp_dir_sub_path});
errdefer gpa.free(path);

const iterable_dir = try global_cache_directory.handle.makeOpenPathIterable(tmp_dir_sub_path, .{});
errdefer iterable_dir.close();

break :d .{
.path = path,
.handle = iterable_dir.dir,
};
};
defer tmp_directory.closeAndFree(gpa);

var h = std.http.Headers{ .allocator = gpa };
defer h.deinit();

var req = try http_client.request(.GET, uri, h, .{});
defer req.deinit();

try req.start();
try req.wait();

if (req.response.status != .ok) {
return report.fail(dep.url_tok, "Expected response status '200 OK' got '{} {s}'", .{
@enumToInt(req.response.status),
req.response.status.phrase() orelse "",
});
}

const content_type = req.response.headers.getFirstValue("Content-Type") orelse
return report.fail(dep.url_tok, "Missing 'Content-Type' header", .{});

if (ascii.eqlIgnoreCase(content_type, "application/gzip") or
ascii.eqlIgnoreCase(content_type, "application/x-gzip") or
ascii.eqlIgnoreCase(content_type, "application/tar+gzip"))
{
// I observed the gzip stream to read 1 byte at a time, so I am using a
// buffered reader on the front of it.
try unpackTarball(gpa, &req, tmp_directory.handle, std.compress.gzip);
} else if (ascii.eqlIgnoreCase(content_type, "application/x-xz")) {
// I have not checked what buffer sizes the xz decompression implementation uses
// by default, so the same logic applies for buffering the reader as for gzip.
try unpackTarball(gpa, &req, tmp_directory.handle, std.compress.xz);
} else {
return report.fail(dep.url_tok, "Unsupported 'Content-Type' header value: '{s}'", .{content_type});
}

// TODO: delete files not included in the package prior to computing the package hash.
// for example, if the ini file has directives to include/not include certain files,
// apply those rules directly to the filesystem right here. This ensures that files
// not protected by the hash are not present on the file system.

// TODO: raise an error for files that have illegal paths on some operating systems.
// For example, on Linux a path with a backslash should raise an error here.
// Of course, if the ignore rules above omit the file from the package, then everything
// is fine and no error should be raised.

break :a try computePackageHash(thread_pool, .{ .dir = tmp_directory.handle });
};

const pkg_dir_sub_path = "p" ++ s ++ Manifest.hexDigest(actual_hash);
try renameTmpIntoCache(global_cache_directory.handle, tmp_dir_sub_path, pkg_dir_sub_path);

const actual_hex = Manifest.hexDigest(actual_hash);
if (dep.hash) |h| {
if (!mem.eql(u8, h, &actual_hex)) {
return report.fail(dep.hash_tok, "hash mismatch: expected: {s}, found: {s}", .{
h, actual_hex,
});
}
} else {
const file_path = try report.directory.join(gpa, &.{Manifest.basename});
defer gpa.free(file_path);

const eb = report.error_bundle;
const notes_len = 1;
try Report.addErrorMessage(report.ast.*, file_path, eb, notes_len, .{
.tok = dep.url_tok,
.off = 0,
.msg = "url field is missing corresponding hash field",
});
const notes_start = try eb.reserveNotes(notes_len);
eb.extra.items[notes_start] = @enumToInt(try eb.addErrorMessage(.{
.msg = try eb.printString("expected .hash = \"{s}\",", .{&actual_hex}),
}));
return error.PackageFetchFailed;
}

return createWithDir(
gpa,
global_cache_directory,
pkg_dir_sub_path,
build_zig_basename,
);
}

fn fetchUnpackImport(
thread_pool: *ThreadPool,
http_client: *std.http.Client,
global_cache_directory: Compilation.Directory,
dep: Manifest.Dependency,
report: Report,
build_roots_source: *std.ArrayList(u8),
fqn: []const u8,
all_modules: *AllModules,
Expand Down
Loading