diff --git a/src/Package.zig b/src/Package.zig index f4593fee4844..545656a47d16 100644 --- a/src/Package.zig +++ b/src/Package.zig @@ -214,6 +214,89 @@ pub fn getName(target: *const Package, gpa: Allocator, mod: Module) ![]const u8 pub const build_zig_basename = "build.zig"; +pub fn fetchDependencies( + arena: Allocator, + thread_pool: *ThreadPool, + http_client: *std.http.Client, + directory: Compilation.Directory, + global_cache_directory: Compilation.Directory, + local_cache_directory: Compilation.Directory, + name_prefix: []const u8, + error_bundle: *std.zig.ErrorBundle.Wip, +) !void { + const max_bytes = 10 * 1024 * 1024; + const gpa = thread_pool.allocator; + const build_zig_zon_bytes = directory.handle.readFileAllocOptions( + arena, + Manifest.basename, + max_bytes, + null, + 1, + 0, + ) catch |err| switch (err) { + error.FileNotFound => { + // Handle the same as no dependencies. + return; + }, + else => |e| return e, + }; + + var ast = try std.zig.Ast.parse(gpa, build_zig_zon_bytes, .zon); + defer ast.deinit(gpa); + + if (ast.errors.len > 0) { + const file_path = try directory.join(arena, &.{Manifest.basename}); + try main.putAstErrorsIntoBundle(gpa, ast, file_path, error_bundle); + return error.PackageFetchFailed; + } + + var manifest = try Manifest.parse(gpa, ast); + defer manifest.deinit(gpa); + + if (manifest.errors.len > 0) { + const file_path = try directory.join(arena, &.{Manifest.basename}); + for (manifest.errors) |msg| { + try Report.addErrorMessage(ast, file_path, error_bundle, 0, msg); + } + return error.PackageFetchFailed; + } + + const report: Report = .{ + .ast = &ast, + .directory = directory, + .error_bundle = error_bundle, + }; + + var any_error = false; + const deps_list = manifest.dependencies.values(); + for (manifest.dependencies.keys(), 0..) |name, i| { + const dep = deps_list[i]; + + const sub_prefix = try std.fmt.allocPrint(arena, "{s}{s}.", .{ name_prefix, name }); + + const sub_pkg = try fetchAndUnpack( + thread_pool, + http_client, + global_cache_directory, + dep, + report, + ); + + try fetchDependencies( + arena, + thread_pool, + http_client, + sub_pkg.root_src_directory, + global_cache_directory, + local_cache_directory, + sub_prefix, + error_bundle, + ); + } + + if (any_error) return error.InvalidBuildManifestFile; +} + pub fn fetchAndAddDependencies( pkg: *Package, root_pkg: *Package, @@ -280,7 +363,7 @@ pub fn fetchAndAddDependencies( const sub_prefix = try std.fmt.allocPrint(arena, "{s}{s}.", .{ name_prefix, name }); const fqn = sub_prefix[0 .. sub_prefix.len - 1]; - const sub_pkg = try fetchAndUnpack( + const sub_pkg = try fetchUnpackImport( thread_pool, http_client, global_cache_directory, @@ -401,12 +484,216 @@ const MultiHashHexDigest = [hex_multihash_len]u8; /// This is to avoid creating multiple modules for the same build.zig file. pub const AllModules = std.AutoHashMapUnmanaged(MultiHashHexDigest, *Package); +fn importCachedPackage( + gpa: Allocator, + global_cache_directory: Compilation.Directory, + dep: Manifest.Dependency, + build_roots_source: *std.ArrayList(u8), + fqn: []const u8, + all_modules: *AllModules, +) !*Package { + const s = fs.path.sep_str; + + // Check if the expected_hash is already present in the global package + // cache, and thereby avoid both fetching and unpacking. + if (dep.hash) |h| { + const hex_digest = h[0..hex_multihash_len]; + const pkg_dir_sub_path = "p" ++ s ++ hex_digest; + + const build_root = try global_cache_directory.join(gpa, &.{pkg_dir_sub_path}); + errdefer gpa.free(build_root); + + var pkg_dir = global_cache_directory.handle.openDir(pkg_dir_sub_path, .{}) catch |err| switch (err) { + error.FileNotFound => return error.MissingFile, //@todo + else => |e| return e, + }; + errdefer pkg_dir.close(); + + try build_roots_source.writer().print(" pub const {s} = \"{}\";\n", .{ + std.zig.fmtId(fqn), std.zig.fmtEscapes(build_root), + }); + + // The compiler has a rule that a file must not be included in multiple modules, + // so we must detect if a module has been created for this package and reuse it. + const gop = try all_modules.getOrPut(gpa, hex_digest.*); + if (gop.found_existing) { + gpa.free(build_root); + return gop.value_ptr.*; + } + + const ptr = try gpa.create(Package); + errdefer gpa.destroy(ptr); + + const owned_src_path = try gpa.dupe(u8, build_zig_basename); + errdefer gpa.free(owned_src_path); + + ptr.* = .{ + .root_src_directory = .{ + .path = build_root, + .handle = pkg_dir, + }, + .root_src_directory_owned = true, + .root_src_path = owned_src_path, + }; + + gop.value_ptr.* = ptr; + return ptr; + } + return error.MissingDependencyHash; +} + fn fetchAndUnpack( thread_pool: *ThreadPool, http_client: *std.http.Client, global_cache_directory: Compilation.Directory, dep: Manifest.Dependency, report: Report, +) !*Package { + const gpa = http_client.allocator; + const s = fs.path.sep_str; + + // Check if the expected_hash is already present in the global package + // cache, and thereby avoid both fetching and unpacking. + if (dep.hash) |h| cached: { + const hex_digest = h[0..hex_multihash_len]; + const pkg_dir_sub_path = "p" ++ s ++ hex_digest; + + const build_root = try global_cache_directory.join(gpa, &.{pkg_dir_sub_path}); + errdefer gpa.free(build_root); + + var pkg_dir = global_cache_directory.handle.openDir(pkg_dir_sub_path, .{}) catch |err| switch (err) { + error.FileNotFound => break :cached, + else => |e| return e, + }; + errdefer pkg_dir.close(); + + const ptr = try gpa.create(Package); + errdefer gpa.destroy(ptr); + + const owned_src_path = try gpa.dupe(u8, build_zig_basename); + errdefer gpa.free(owned_src_path); + + ptr.* = .{ + .root_src_directory = .{ + .path = build_root, + .handle = pkg_dir, + }, + .root_src_directory_owned = true, + .root_src_path = owned_src_path, + }; + + return ptr; + } + + const uri = try std.Uri.parse(dep.url); + + const rand_int = std.crypto.random.int(u64); + const tmp_dir_sub_path = "tmp" ++ s ++ Manifest.hex64(rand_int); + + const actual_hash = a: { + var tmp_directory: Compilation.Directory = d: { + const path = try global_cache_directory.join(gpa, &.{tmp_dir_sub_path}); + errdefer gpa.free(path); + + const iterable_dir = try global_cache_directory.handle.makeOpenPathIterable(tmp_dir_sub_path, .{}); + errdefer iterable_dir.close(); + + break :d .{ + .path = path, + .handle = iterable_dir.dir, + }; + }; + defer tmp_directory.closeAndFree(gpa); + + var h = std.http.Headers{ .allocator = gpa }; + defer h.deinit(); + + var req = try http_client.request(.GET, uri, h, .{}); + defer req.deinit(); + + try req.start(); + try req.wait(); + + if (req.response.status != .ok) { + return report.fail(dep.url_tok, "Expected response status '200 OK' got '{} {s}'", .{ + @enumToInt(req.response.status), + req.response.status.phrase() orelse "", + }); + } + + const content_type = req.response.headers.getFirstValue("Content-Type") orelse + return report.fail(dep.url_tok, "Missing 'Content-Type' header", .{}); + + if (ascii.eqlIgnoreCase(content_type, "application/gzip") or + ascii.eqlIgnoreCase(content_type, "application/x-gzip") or + ascii.eqlIgnoreCase(content_type, "application/tar+gzip")) + { + // I observed the gzip stream to read 1 byte at a time, so I am using a + // buffered reader on the front of it. + try unpackTarball(gpa, &req, tmp_directory.handle, std.compress.gzip); + } else if (ascii.eqlIgnoreCase(content_type, "application/x-xz")) { + // I have not checked what buffer sizes the xz decompression implementation uses + // by default, so the same logic applies for buffering the reader as for gzip. + try unpackTarball(gpa, &req, tmp_directory.handle, std.compress.xz); + } else { + return report.fail(dep.url_tok, "Unsupported 'Content-Type' header value: '{s}'", .{content_type}); + } + + // TODO: delete files not included in the package prior to computing the package hash. + // for example, if the ini file has directives to include/not include certain files, + // apply those rules directly to the filesystem right here. This ensures that files + // not protected by the hash are not present on the file system. + + // TODO: raise an error for files that have illegal paths on some operating systems. + // For example, on Linux a path with a backslash should raise an error here. + // Of course, if the ignore rules above omit the file from the package, then everything + // is fine and no error should be raised. + + break :a try computePackageHash(thread_pool, .{ .dir = tmp_directory.handle }); + }; + + const pkg_dir_sub_path = "p" ++ s ++ Manifest.hexDigest(actual_hash); + try renameTmpIntoCache(global_cache_directory.handle, tmp_dir_sub_path, pkg_dir_sub_path); + + const actual_hex = Manifest.hexDigest(actual_hash); + if (dep.hash) |h| { + if (!mem.eql(u8, h, &actual_hex)) { + return report.fail(dep.hash_tok, "hash mismatch: expected: {s}, found: {s}", .{ + h, actual_hex, + }); + } + } else { + const file_path = try report.directory.join(gpa, &.{Manifest.basename}); + defer gpa.free(file_path); + + const eb = report.error_bundle; + const notes_len = 1; + try Report.addErrorMessage(report.ast.*, file_path, eb, notes_len, .{ + .tok = dep.url_tok, + .off = 0, + .msg = "url field is missing corresponding hash field", + }); + const notes_start = try eb.reserveNotes(notes_len); + eb.extra.items[notes_start] = @enumToInt(try eb.addErrorMessage(.{ + .msg = try eb.printString("expected .hash = \"{s}\",", .{&actual_hex}), + })); + return error.PackageFetchFailed; + } + + return createWithDir( + gpa, + global_cache_directory, + pkg_dir_sub_path, + build_zig_basename, + ); +} + +fn fetchUnpackImport( + thread_pool: *ThreadPool, + http_client: *std.http.Client, + global_cache_directory: Compilation.Directory, + dep: Manifest.Dependency, + report: Report, build_roots_source: *std.ArrayList(u8), fqn: []const u8, all_modules: *AllModules, diff --git a/src/main.zig b/src/main.zig index a680a5d89eba..85f458fc76b5 100644 --- a/src/main.zig +++ b/src/main.zig @@ -299,6 +299,8 @@ pub fn mainArgs(gpa: Allocator, arena: Allocator, args: []const []const u8) !voi return cmdBuild(gpa, arena, cmd_args); } else if (mem.eql(u8, cmd, "fmt")) { return cmdFmt(gpa, arena, cmd_args); + } else if (mem.eql(u8, cmd, "pkg")) { + return cmdPkg(gpa, arena, cmd_args); } else if (mem.eql(u8, cmd, "objcopy")) { return @import("objcopy.zig").cmdObjCopy(gpa, arena, cmd_args); } else if (mem.eql(u8, cmd, "libc")) { @@ -4145,6 +4147,147 @@ pub fn cmdInit( } } +pub const usage_pkg = + \\Usage: zig pkg [command] [options] + \\ + \\ Runs a package command + \\ + \\Commands: + \\ fetch fetch all dependenices found in build.zig.zon. + \\ + \\Options: + \\ -h --help Print this help and exit. + \\ +; + +pub fn cmdPkg(gpa: Allocator, arena: Allocator, args: []const []const u8) !void { + if (args.len == 0) fatal("Expected at least one argument.\n", .{}); + + for (args) |arg| { + if (mem.eql(u8, arg, "-h") or mem.eql(u8, arg, "--help")) { + const stdout = io.getStdOut().writer(); + try stdout.writeAll(usage_pkg); + return cleanExit(); + } + } + + const command_arg = args[0]; + if (!mem.eql(u8, command_arg, "fetch")) { + fatal("Invalid package command: {s}\n", .{command_arg}); + } + + try cmdPkgFetch(gpa, arena); +} + +pub fn cmdPkgFetch(gpa: Allocator, arena: Allocator) !void { + if (build_options.omit_pkg_fetching_code) unreachable; + + var color: Color = .auto; + + var cleanup_build_runner_dir: ?fs.Dir = null; + defer if (cleanup_build_runner_dir) |*dir| dir.close(); + const cwd_path = try process.getCwdAlloc(arena); + + var build_file: ?[]const u8 = null; + var cleanup_build_dir: ?fs.Dir = null; + defer if (cleanup_build_dir) |*dir| dir.close(); + const build_zig_basename = if (build_file) |bf| fs.path.basename(bf) else "build.zig"; + const build_directory: Compilation.Directory = blk: { + if (build_file) |bf| { + if (fs.path.dirname(bf)) |dirname| { + const dir = fs.cwd().openDir(dirname, .{}) catch |err| { + fatal("unable to open directory to build file from argument 'build-file', '{s}': {s}", .{ dirname, @errorName(err) }); + }; + cleanup_build_dir = dir; + break :blk .{ .path = dirname, .handle = dir }; + } + break :blk .{ .path = null, .handle = fs.cwd() }; + } + // Search up parent directories until we find build.zig. + var dirname: []const u8 = cwd_path; + while (true) { + const joined_path = try fs.path.join(arena, &[_][]const u8{ dirname, build_zig_basename }); + if (fs.cwd().access(joined_path, .{})) |_| { + const dir = fs.cwd().openDir(dirname, .{}) catch |err| { + fatal("unable to open directory while searching for build.zig file, '{s}': {s}", .{ dirname, @errorName(err) }); + }; + if (!mem.eql(u8, dirname, cwd_path)) { + cleanup_build_dir = dir; + } + break :blk .{ .path = dirname, .handle = dir }; + } else |err| switch (err) { + error.FileNotFound => { + dirname = fs.path.dirname(dirname) orelse { + std.log.info("{s}", .{ + \\Initialize a 'build.zig' template file with `zig init-lib` or `zig init-exe`, + \\or see `zig --help` for more options. + }); + fatal("No 'build.zig' file found, in the current directory or any parent directories.", .{}); + }; + continue; + }, + else => |e| return e, + } + } + }; + + var override_global_cache_dir: ?[]const u8 = try optionalStringEnvVar(arena, "ZIG_GLOBAL_CACHE_DIR"); + var global_cache_directory: Compilation.Directory = l: { + const p = override_global_cache_dir orelse try introspect.resolveGlobalCacheDir(arena); + break :l .{ + .handle = try fs.cwd().makeOpenPath(p, .{}), + .path = p, + }; + }; + defer global_cache_directory.handle.close(); + + var override_local_cache_dir: ?[]const u8 = try optionalStringEnvVar(arena, "ZIG_LOCAL_CACHE_DIR"); + var local_cache_directory: Compilation.Directory = l: { + if (override_local_cache_dir) |local_cache_dir_path| { + break :l .{ + .handle = try fs.cwd().makeOpenPath(local_cache_dir_path, .{}), + .path = local_cache_dir_path, + }; + } + const cache_dir_path = try build_directory.join(arena, &[_][]const u8{"zig-cache"}); + break :l .{ + .handle = try build_directory.handle.makeOpenPath("zig-cache", .{}), + .path = cache_dir_path, + }; + }; + defer local_cache_directory.handle.close(); + + var wip_errors: std.zig.ErrorBundle.Wip = undefined; + try wip_errors.init(gpa); + defer wip_errors.deinit(); + + var thread_pool: ThreadPool = undefined; + try thread_pool.init(.{ .allocator = gpa }); + defer thread_pool.deinit(); + + var http_client: std.http.Client = .{ .allocator = gpa }; + defer http_client.deinit(); + + const fetch_result = Package.fetchDependencies( + arena, + &thread_pool, + &http_client, + build_directory, + global_cache_directory, + local_cache_directory, + "", + &wip_errors, + ); + + if (wip_errors.root_list.items.len > 0) { + var errors = try wip_errors.toOwnedBundle(""); + defer errors.deinit(gpa); + errors.renderToStdErr(renderOptions(color)); + process.exit(1); + } + try fetch_result; +} + pub const usage_build = \\Usage: zig build [steps] [options] \\ @@ -4282,6 +4425,9 @@ pub fn cmdBuild(gpa: Allocator, arena: Allocator, args: []const []const u8) !voi const dir = fs.cwd().openDir(dirname, .{}) catch |err| { fatal("unable to open directory while searching for build.zig file, '{s}': {s}", .{ dirname, @errorName(err) }); }; + if (!mem.eql(u8, dirname, cwd_path)) { + cleanup_build_dir = dir; + } break :blk .{ .path = dirname, .handle = dir }; } else |err| switch (err) { error.FileNotFound => {