diff --git a/CMakeLists.txt b/CMakeLists.txt index 5afea9354e5a..b0867c220bc1 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -506,7 +506,9 @@ set(ZIG_STAGE2_SOURCES "${CMAKE_SOURCE_DIR}/lib/std/Thread.zig" "${CMAKE_SOURCE_DIR}/lib/std/Thread/Futex.zig" "${CMAKE_SOURCE_DIR}/lib/std/Thread/Mutex.zig" + "${CMAKE_SOURCE_DIR}/lib/std/Thread/Pool.zig" "${CMAKE_SOURCE_DIR}/lib/std/Thread/ResetEvent.zig" + "${CMAKE_SOURCE_DIR}/lib/std/Thread/WaitGroup.zig" "${CMAKE_SOURCE_DIR}/lib/std/time.zig" "${CMAKE_SOURCE_DIR}/lib/std/treap.zig" "${CMAKE_SOURCE_DIR}/lib/std/unicode.zig" @@ -516,6 +518,7 @@ set(ZIG_STAGE2_SOURCES "${CMAKE_SOURCE_DIR}/lib/std/zig/c_builtins.zig" "${CMAKE_SOURCE_DIR}/lib/std/zig/Parse.zig" "${CMAKE_SOURCE_DIR}/lib/std/zig/render.zig" + "${CMAKE_SOURCE_DIR}/lib/std/zig/Server.zig" "${CMAKE_SOURCE_DIR}/lib/std/zig/string_literal.zig" "${CMAKE_SOURCE_DIR}/lib/std/zig/system.zig" "${CMAKE_SOURCE_DIR}/lib/std/zig/system/NativePaths.zig" @@ -530,9 +533,7 @@ set(ZIG_STAGE2_SOURCES "${CMAKE_SOURCE_DIR}/src/Package.zig" "${CMAKE_SOURCE_DIR}/src/RangeSet.zig" "${CMAKE_SOURCE_DIR}/src/Sema.zig" - "${CMAKE_SOURCE_DIR}/src/ThreadPool.zig" "${CMAKE_SOURCE_DIR}/src/TypedValue.zig" - "${CMAKE_SOURCE_DIR}/src/WaitGroup.zig" "${CMAKE_SOURCE_DIR}/src/Zir.zig" "${CMAKE_SOURCE_DIR}/src/arch/aarch64/CodeGen.zig" "${CMAKE_SOURCE_DIR}/src/arch/aarch64/Emit.zig" diff --git a/build.zig b/build.zig index 12e5d014e2ca..a0c4ca7fb45e 100644 --- a/build.zig +++ b/build.zig @@ -31,6 +31,11 @@ pub fn build(b: *std.Build) !void { const use_zig_libcxx = b.option(bool, "use-zig-libcxx", "If libc++ is needed, use zig's bundled version, don't try to integrate with the system") orelse false; const test_step = b.step("test", "Run all the tests"); + const deprecated_skip_install_lib_files = b.option(bool, "skip-install-lib-files", "deprecated. see no-lib") orelse false; + if (deprecated_skip_install_lib_files) { + std.log.warn("-Dskip-install-lib-files is deprecated in favor of -Dno-lib", .{}); + } + const skip_install_lib_files = b.option(bool, "no-lib", "skip copying of lib/ files and langref to installation prefix. Useful for development") orelse deprecated_skip_install_lib_files; const docgen_exe = b.addExecutable(.{ .name = "docgen", @@ -40,28 +45,32 @@ pub fn build(b: *std.Build) !void { }); docgen_exe.single_threaded = single_threaded; - const langref_out_path = try b.cache_root.join(b.allocator, &.{"langref.html"}); - const docgen_cmd = docgen_exe.run(); - docgen_cmd.addArgs(&[_][]const u8{ - "--zig", - b.zig_exe, - "doc" ++ fs.path.sep_str ++ "langref.html.in", - langref_out_path, - }); - docgen_cmd.step.dependOn(&docgen_exe.step); + const docgen_cmd = b.addRunArtifact(docgen_exe); + docgen_cmd.addArgs(&.{ "--zig", b.zig_exe }); + docgen_cmd.addFileSourceArg(.{ .path = "doc/langref.html.in" }); + const langref_file = docgen_cmd.addOutputFileArg("langref.html"); + const install_langref = b.addInstallFileWithDir(langref_file, .prefix, "doc/langref.html"); + if (!skip_install_lib_files) { + b.getInstallStep().dependOn(&install_langref.step); + } const docs_step = b.step("docs", "Build documentation"); docs_step.dependOn(&docgen_cmd.step); - const test_cases = b.addTest(.{ - .root_source_file = .{ .path = "src/test.zig" }, + // This is for legacy reasons, to be removed after our CI scripts are upgraded to use + // the file from the install prefix instead. + const legacy_write_to_cache = b.addWriteFiles(); + legacy_write_to_cache.addCopyFileToSource(langref_file, "zig-cache/langref.html"); + docs_step.dependOn(&legacy_write_to_cache.step); + + const check_case_exe = b.addExecutable(.{ + .name = "check-case", + .root_source_file = .{ .path = "test/src/Cases.zig" }, .optimize = optimize, }); - test_cases.main_pkg_path = "."; - test_cases.stack_size = stack_size; - test_cases.single_threaded = single_threaded; - - const fmt_build_zig = b.addFmt(&[_][]const u8{"build.zig"}); + check_case_exe.main_pkg_path = "."; + check_case_exe.stack_size = stack_size; + check_case_exe.single_threaded = single_threaded; const skip_debug = b.option(bool, "skip-debug", "Main test suite skips debug builds") orelse false; const skip_release = b.option(bool, "skip-release", "Main test suite skips release builds") orelse false; @@ -74,11 +83,6 @@ pub fn build(b: *std.Build) !void { const skip_stage1 = b.option(bool, "skip-stage1", "Main test suite skips stage1 compile error tests") orelse false; const skip_run_translated_c = b.option(bool, "skip-run-translated-c", "Main test suite skips run-translated-c tests") orelse false; const skip_stage2_tests = b.option(bool, "skip-stage2-tests", "Main test suite skips self-hosted compiler tests") orelse false; - const deprecated_skip_install_lib_files = b.option(bool, "skip-install-lib-files", "deprecated. see no-lib") orelse false; - if (deprecated_skip_install_lib_files) { - std.log.warn("-Dskip-install-lib-files is deprecated in favor of -Dno-lib", .{}); - } - const skip_install_lib_files = b.option(bool, "no-lib", "skip copying of lib/ files to installation prefix. Useful for development") orelse deprecated_skip_install_lib_files; const only_install_lib_files = b.option(bool, "lib-files-only", "Only install library files") orelse false; @@ -175,13 +179,12 @@ pub fn build(b: *std.Build) !void { test_step.dependOn(&exe.step); } - b.default_step.dependOn(&exe.step); exe.single_threaded = single_threaded; if (target.isWindows() and target.getAbi() == .gnu) { // LTO is currently broken on mingw, this can be removed when it's fixed. exe.want_lto = false; - test_cases.want_lto = false; + check_case_exe.want_lto = false; } const exe_options = b.addOptions(); @@ -195,11 +198,11 @@ pub fn build(b: *std.Build) !void { exe_options.addOption(bool, "llvm_has_arc", llvm_has_arc); exe_options.addOption(bool, "force_gpa", force_gpa); exe_options.addOption(bool, "only_c", only_c); - exe_options.addOption(bool, "omit_pkg_fetching_code", false); + exe_options.addOption(bool, "omit_pkg_fetching_code", only_c); if (link_libc) { exe.linkLibC(); - test_cases.linkLibC(); + check_case_exe.linkLibC(); } const is_debug = optimize == .Debug; @@ -285,14 +288,14 @@ pub fn build(b: *std.Build) !void { } try addCmakeCfgOptionsToExe(b, cfg, exe, use_zig_libcxx); - try addCmakeCfgOptionsToExe(b, cfg, test_cases, use_zig_libcxx); + try addCmakeCfgOptionsToExe(b, cfg, check_case_exe, use_zig_libcxx); } else { // Here we are -Denable-llvm but no cmake integration. try addStaticLlvmOptionsToExe(exe); - try addStaticLlvmOptionsToExe(test_cases); + try addStaticLlvmOptionsToExe(check_case_exe); } if (target.isWindows()) { - inline for (.{ exe, test_cases }) |artifact| { + inline for (.{ exe, check_case_exe }) |artifact| { artifact.linkSystemLibrary("version"); artifact.linkSystemLibrary("uuid"); artifact.linkSystemLibrary("ole32"); @@ -337,8 +340,9 @@ pub fn build(b: *std.Build) !void { const test_filter = b.option([]const u8, "test-filter", "Skip tests that do not match filter"); const test_cases_options = b.addOptions(); - test_cases.addOptions("build_options", test_cases_options); + check_case_exe.addOptions("build_options", test_cases_options); + test_cases_options.addOption(bool, "enable_tracy", false); test_cases_options.addOption(bool, "enable_logging", enable_logging); test_cases_options.addOption(bool, "enable_link_snapshots", enable_link_snapshots); test_cases_options.addOption(bool, "skip_non_native", skip_non_native); @@ -361,12 +365,6 @@ pub fn build(b: *std.Build) !void { test_cases_options.addOption(std.SemanticVersion, "semver", semver); test_cases_options.addOption(?[]const u8, "test_filter", test_filter); - const test_cases_step = b.step("test-cases", "Run the main compiler test cases"); - test_cases_step.dependOn(&test_cases.step); - if (!skip_stage2_tests) { - test_step.dependOn(test_cases_step); - } - var chosen_opt_modes_buf: [4]builtin.Mode = undefined; var chosen_mode_index: usize = 0; if (!skip_debug) { @@ -387,96 +385,101 @@ pub fn build(b: *std.Build) !void { } const optimization_modes = chosen_opt_modes_buf[0..chosen_mode_index]; - // run stage1 `zig fmt` on this build.zig file just to make sure it works - test_step.dependOn(&fmt_build_zig.step); - const fmt_step = b.step("test-fmt", "Run zig fmt against build.zig to make sure it works"); - fmt_step.dependOn(&fmt_build_zig.step); - - test_step.dependOn(tests.addPkgTests( - b, - test_filter, - "test/behavior.zig", - "behavior", - "Run the behavior tests", - optimization_modes, - skip_single_threaded, - skip_non_native, - skip_libc, - skip_stage1, - skip_stage2_tests, - )); + const fmt_include_paths = &.{ "doc", "lib", "src", "test", "tools", "build.zig" }; + const fmt_exclude_paths = &.{"test/cases"}; + const do_fmt = b.addFmt(.{ + .paths = fmt_include_paths, + .exclude_paths = fmt_exclude_paths, + }); - test_step.dependOn(tests.addPkgTests( - b, - test_filter, - "lib/compiler_rt.zig", - "compiler-rt", - "Run the compiler_rt tests", - optimization_modes, - true, // skip_single_threaded - skip_non_native, - true, // skip_libc - skip_stage1, - skip_stage2_tests or true, // TODO get these all passing - )); + b.step("test-fmt", "Check source files having conforming formatting").dependOn(&b.addFmt(.{ + .paths = fmt_include_paths, + .exclude_paths = fmt_exclude_paths, + .check = true, + }).step); - test_step.dependOn(tests.addPkgTests( - b, - test_filter, - "lib/c.zig", - "universal-libc", - "Run the universal libc tests", - optimization_modes, - true, // skip_single_threaded - skip_non_native, - true, // skip_libc - skip_stage1, - skip_stage2_tests or true, // TODO get these all passing - )); + const test_cases_step = b.step("test-cases", "Run the main compiler test cases"); + try tests.addCases(b, test_cases_step, test_filter, check_case_exe); + if (!skip_stage2_tests) test_step.dependOn(test_cases_step); + + test_step.dependOn(tests.addModuleTests(b, .{ + .test_filter = test_filter, + .root_src = "test/behavior.zig", + .name = "behavior", + .desc = "Run the behavior tests", + .optimize_modes = optimization_modes, + .skip_single_threaded = skip_single_threaded, + .skip_non_native = skip_non_native, + .skip_libc = skip_libc, + .skip_stage1 = skip_stage1, + .skip_stage2 = skip_stage2_tests, + .max_rss = 1 * 1024 * 1024 * 1024, + })); + + test_step.dependOn(tests.addModuleTests(b, .{ + .test_filter = test_filter, + .root_src = "lib/compiler_rt.zig", + .name = "compiler-rt", + .desc = "Run the compiler_rt tests", + .optimize_modes = optimization_modes, + .skip_single_threaded = true, + .skip_non_native = skip_non_native, + .skip_libc = true, + .skip_stage1 = skip_stage1, + .skip_stage2 = true, // TODO get all these passing + })); + + test_step.dependOn(tests.addModuleTests(b, .{ + .test_filter = test_filter, + .root_src = "lib/c.zig", + .name = "universal-libc", + .desc = "Run the universal libc tests", + .optimize_modes = optimization_modes, + .skip_single_threaded = true, + .skip_non_native = skip_non_native, + .skip_libc = true, + .skip_stage1 = skip_stage1, + .skip_stage2 = true, // TODO get all these passing + })); test_step.dependOn(tests.addCompareOutputTests(b, test_filter, optimization_modes)); test_step.dependOn(tests.addStandaloneTests( b, - test_filter, optimization_modes, - skip_non_native, enable_macos_sdk, - target, skip_stage2_tests, - b.enable_darling, - b.enable_qemu, - b.enable_rosetta, - b.enable_wasmtime, - b.enable_wine, enable_symlinks_windows, )); test_step.dependOn(tests.addCAbiTests(b, skip_non_native, skip_release)); - test_step.dependOn(tests.addLinkTests(b, test_filter, optimization_modes, enable_macos_sdk, skip_stage2_tests, enable_symlinks_windows)); + test_step.dependOn(tests.addLinkTests(b, enable_macos_sdk, skip_stage2_tests, enable_symlinks_windows)); test_step.dependOn(tests.addStackTraceTests(b, test_filter, optimization_modes)); - test_step.dependOn(tests.addCliTests(b, test_filter, optimization_modes)); + test_step.dependOn(tests.addCliTests(b)); test_step.dependOn(tests.addAssembleAndLinkTests(b, test_filter, optimization_modes)); test_step.dependOn(tests.addTranslateCTests(b, test_filter)); if (!skip_run_translated_c) { test_step.dependOn(tests.addRunTranslatedCTests(b, test_filter, target)); } - // tests for this feature are disabled until we have the self-hosted compiler available - // test_step.dependOn(tests.addGenHTests(b, test_filter)); - test_step.dependOn(tests.addPkgTests( - b, - test_filter, - "lib/std/std.zig", - "std", - "Run the standard library tests", - optimization_modes, - skip_single_threaded, - skip_non_native, - skip_libc, - skip_stage1, - true, // TODO get these all passing - )); + test_step.dependOn(tests.addModuleTests(b, .{ + .test_filter = test_filter, + .root_src = "lib/std/std.zig", + .name = "std", + .desc = "Run the standard library tests", + .optimize_modes = optimization_modes, + .skip_single_threaded = skip_single_threaded, + .skip_non_native = skip_non_native, + .skip_libc = skip_libc, + .skip_stage1 = skip_stage1, + .skip_stage2 = true, // TODO get all these passing + // I observed a value of 3398275072 on my M1, and multiplied by 1.1 to + // get this amount: + .max_rss = 3738102579, + })); try addWasiUpdateStep(b, version); + + b.step("fmt", "Modify source files in place to have conforming formatting") + .dependOn(&do_fmt.step); } fn addWasiUpdateStep(b: *std.Build, version: [:0]const u8) !void { @@ -505,6 +508,7 @@ fn addWasiUpdateStep(b: *std.Build, version: [:0]const u8) !void { exe_options.addOption(bool, "enable_tracy_callstack", false); exe_options.addOption(bool, "enable_tracy_allocation", false); exe_options.addOption(bool, "value_tracing", false); + exe_options.addOption(bool, "omit_pkg_fetching_code", true); const run_opt = b.addSystemCommand(&.{ "wasm-opt", "-Oz", "--enable-bulk-memory" }); run_opt.addArtifactArg(exe); @@ -676,10 +680,7 @@ fn addCxxKnownPath( ) !void { if (!std.process.can_spawn) return error.RequiredLibraryNotFound; - const path_padded = try b.exec(&[_][]const u8{ - ctx.cxx_compiler, - b.fmt("-print-file-name={s}", .{objname}), - }); + const path_padded = b.exec(&.{ ctx.cxx_compiler, b.fmt("-print-file-name={s}", .{objname}) }); var tokenizer = mem.tokenize(u8, path_padded, "\r\n"); const path_unpadded = tokenizer.next().?; if (mem.eql(u8, path_unpadded, objname)) { diff --git a/ci/aarch64-linux-debug.sh b/ci/aarch64-linux-debug.sh index 94f40c557b90..ba4bcc0c9da4 100644 --- a/ci/aarch64-linux-debug.sh +++ b/ci/aarch64-linux-debug.sh @@ -67,7 +67,7 @@ stage3-debug/bin/zig build test docs \ --zig-lib-dir "$(pwd)/../lib" # Look for HTML errors. -tidy --drop-empty-elements no -qe "$ZIG_LOCAL_CACHE_DIR/langref.html" +tidy --drop-empty-elements no -qe "stage3-debug/doc/langref.html" # Produce the experimental std lib documentation. stage3-debug/bin/zig test ../lib/std/std.zig -femit-docs -fno-emit-bin --zig-lib-dir ../lib diff --git a/ci/aarch64-linux-release.sh b/ci/aarch64-linux-release.sh index 65d6063f250e..b3837c49424d 100644 --- a/ci/aarch64-linux-release.sh +++ b/ci/aarch64-linux-release.sh @@ -67,7 +67,7 @@ stage3-release/bin/zig build test docs \ --zig-lib-dir "$(pwd)/../lib" # Look for HTML errors. -tidy --drop-empty-elements no -qe "$ZIG_LOCAL_CACHE_DIR/langref.html" +tidy --drop-empty-elements no -qe "stage3-release/doc/langref.html" # Produce the experimental std lib documentation. stage3-release/bin/zig test ../lib/std/std.zig -femit-docs -fno-emit-bin --zig-lib-dir ../lib diff --git a/ci/x86_64-linux-debug.sh b/ci/x86_64-linux-debug.sh index 7f2382f04a69..37dcacb86d50 100755 --- a/ci/x86_64-linux-debug.sh +++ b/ci/x86_64-linux-debug.sh @@ -66,7 +66,7 @@ stage3-debug/bin/zig build test docs \ --zig-lib-dir "$(pwd)/../lib" # Look for HTML errors. -tidy --drop-empty-elements no -qe "$ZIG_LOCAL_CACHE_DIR/langref.html" +tidy --drop-empty-elements no -qe "stage3-debug/doc/langref.html" # Produce the experimental std lib documentation. stage3-debug/bin/zig test ../lib/std/std.zig -femit-docs -fno-emit-bin --zig-lib-dir ../lib diff --git a/ci/x86_64-linux-release.sh b/ci/x86_64-linux-release.sh index cdb24e4a6f94..01088a793c8c 100755 --- a/ci/x86_64-linux-release.sh +++ b/ci/x86_64-linux-release.sh @@ -67,7 +67,7 @@ stage3-release/bin/zig build test docs \ --zig-lib-dir "$(pwd)/../lib" # Look for HTML errors. -tidy --drop-empty-elements no -qe "$ZIG_LOCAL_CACHE_DIR/langref.html" +tidy --drop-empty-elements no -qe "stage3-release/doc/langref.html" # Produce the experimental std lib documentation. stage3-release/bin/zig test ../lib/std/std.zig -femit-docs -fno-emit-bin --zig-lib-dir ../lib diff --git a/doc/docgen.zig b/doc/docgen.zig index fae513f8c3cd..67163ca42714 100644 --- a/doc/docgen.zig +++ b/doc/docgen.zig @@ -1270,7 +1270,7 @@ fn genHtml( zig_exe: []const u8, do_code_tests: bool, ) !void { - var progress = Progress{}; + var progress = Progress{ .dont_print_on_dumb = true }; const root_node = progress.start("Generating docgen examples", toc.nodes.len); defer root_node.end(); diff --git a/lib/build_runner.zig b/lib/build_runner.zig index 64421d103172..5f5601a68d99 100644 --- a/lib/build_runner.zig +++ b/lib/build_runner.zig @@ -1,12 +1,14 @@ const root = @import("@build"); const std = @import("std"); const builtin = @import("builtin"); +const assert = std.debug.assert; const io = std.io; const fmt = std.fmt; const mem = std.mem; const process = std.process; const ArrayList = std.ArrayList; const File = std.fs.File; +const Step = std.Build.Step; pub const dependencies = @import("@dependencies"); @@ -14,12 +16,15 @@ pub fn main() !void { // Here we use an ArenaAllocator backed by a DirectAllocator because a build is a short-lived, // one shot program. We don't need to waste time freeing memory and finding places to squish // bytes into. So we free everything all at once at the very end. - var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator); - defer arena.deinit(); + var single_threaded_arena = std.heap.ArenaAllocator.init(std.heap.page_allocator); + defer single_threaded_arena.deinit(); - const allocator = arena.allocator(); - var args = try process.argsAlloc(allocator); - defer process.argsFree(allocator, args); + var thread_safe_arena: std.heap.ThreadSafeAllocator = .{ + .child_allocator = single_threaded_arena.allocator(), + }; + const arena = thread_safe_arena.allocator(); + + var args = try process.argsAlloc(arena); // skip my own exe name var arg_idx: usize = 1; @@ -59,18 +64,17 @@ pub fn main() !void { }; var cache: std.Build.Cache = .{ - .gpa = allocator, + .gpa = arena, .manifest_dir = try local_cache_directory.handle.makeOpenPath("h", .{}), }; cache.addPrefix(.{ .path = null, .handle = std.fs.cwd() }); cache.addPrefix(build_root_directory); cache.addPrefix(local_cache_directory); cache.addPrefix(global_cache_directory); - - //cache.hash.addBytes(builtin.zig_version); + cache.hash.addBytes(builtin.zig_version_string); const builder = try std.Build.create( - allocator, + arena, zig_exe, build_root_directory, local_cache_directory, @@ -80,35 +84,34 @@ pub fn main() !void { ); defer builder.destroy(); - var targets = ArrayList([]const u8).init(allocator); - var debug_log_scopes = ArrayList([]const u8).init(allocator); - - const stderr_stream = io.getStdErr().writer(); - const stdout_stream = io.getStdOut().writer(); + var targets = ArrayList([]const u8).init(arena); + var debug_log_scopes = ArrayList([]const u8).init(arena); + var thread_pool_options: std.Thread.Pool.Options = .{ .allocator = arena }; var install_prefix: ?[]const u8 = null; var dir_list = std.Build.DirList{}; + var enable_summary: ?bool = null; + var max_rss: usize = 0; + var color: Color = .auto; - // before arg parsing, check for the NO_COLOR environment variable - // if it exists, default the color setting to .off - // explicit --color arguments will still override this setting. - builder.color = if (std.process.hasEnvVarConstant("NO_COLOR")) .off else .auto; + const stderr_stream = io.getStdErr().writer(); + const stdout_stream = io.getStdOut().writer(); while (nextArg(args, &arg_idx)) |arg| { if (mem.startsWith(u8, arg, "-D")) { const option_contents = arg[2..]; if (option_contents.len == 0) { std.debug.print("Expected option name after '-D'\n\n", .{}); - return usageAndErr(builder, false, stderr_stream); + usageAndErr(builder, false, stderr_stream); } if (mem.indexOfScalar(u8, option_contents, '=')) |name_end| { const option_name = option_contents[0..name_end]; const option_value = option_contents[name_end + 1 ..]; if (try builder.addUserInputOption(option_name, option_value)) - return usageAndErr(builder, false, stderr_stream); + usageAndErr(builder, false, stderr_stream); } else { if (try builder.addUserInputFlag(option_contents)) - return usageAndErr(builder, false, stderr_stream); + usageAndErr(builder, false, stderr_stream); } } else if (mem.startsWith(u8, arg, "-")) { if (mem.eql(u8, arg, "--verbose")) { @@ -118,69 +121,83 @@ pub fn main() !void { } else if (mem.eql(u8, arg, "-p") or mem.eql(u8, arg, "--prefix")) { install_prefix = nextArg(args, &arg_idx) orelse { std.debug.print("Expected argument after {s}\n\n", .{arg}); - return usageAndErr(builder, false, stderr_stream); + usageAndErr(builder, false, stderr_stream); }; } else if (mem.eql(u8, arg, "-l") or mem.eql(u8, arg, "--list-steps")) { return steps(builder, false, stdout_stream); } else if (mem.eql(u8, arg, "--prefix-lib-dir")) { dir_list.lib_dir = nextArg(args, &arg_idx) orelse { std.debug.print("Expected argument after {s}\n\n", .{arg}); - return usageAndErr(builder, false, stderr_stream); + usageAndErr(builder, false, stderr_stream); }; } else if (mem.eql(u8, arg, "--prefix-exe-dir")) { dir_list.exe_dir = nextArg(args, &arg_idx) orelse { std.debug.print("Expected argument after {s}\n\n", .{arg}); - return usageAndErr(builder, false, stderr_stream); + usageAndErr(builder, false, stderr_stream); }; } else if (mem.eql(u8, arg, "--prefix-include-dir")) { dir_list.include_dir = nextArg(args, &arg_idx) orelse { std.debug.print("Expected argument after {s}\n\n", .{arg}); - return usageAndErr(builder, false, stderr_stream); + usageAndErr(builder, false, stderr_stream); }; } else if (mem.eql(u8, arg, "--sysroot")) { const sysroot = nextArg(args, &arg_idx) orelse { std.debug.print("Expected argument after --sysroot\n\n", .{}); - return usageAndErr(builder, false, stderr_stream); + usageAndErr(builder, false, stderr_stream); }; builder.sysroot = sysroot; + } else if (mem.eql(u8, arg, "--maxrss")) { + const max_rss_text = nextArg(args, &arg_idx) orelse { + std.debug.print("Expected argument after --sysroot\n\n", .{}); + usageAndErr(builder, false, stderr_stream); + }; + // TODO: support shorthand such as "2GiB", "2GB", or "2G" + max_rss = std.fmt.parseInt(usize, max_rss_text, 10) catch |err| { + std.debug.print("invalid byte size: '{s}': {s}\n", .{ + max_rss_text, @errorName(err), + }); + process.exit(1); + }; } else if (mem.eql(u8, arg, "--search-prefix")) { const search_prefix = nextArg(args, &arg_idx) orelse { std.debug.print("Expected argument after --search-prefix\n\n", .{}); - return usageAndErr(builder, false, stderr_stream); + usageAndErr(builder, false, stderr_stream); }; builder.addSearchPrefix(search_prefix); } else if (mem.eql(u8, arg, "--libc")) { const libc_file = nextArg(args, &arg_idx) orelse { std.debug.print("Expected argument after --libc\n\n", .{}); - return usageAndErr(builder, false, stderr_stream); + usageAndErr(builder, false, stderr_stream); }; builder.libc_file = libc_file; } else if (mem.eql(u8, arg, "--color")) { const next_arg = nextArg(args, &arg_idx) orelse { std.debug.print("expected [auto|on|off] after --color", .{}); - return usageAndErr(builder, false, stderr_stream); + usageAndErr(builder, false, stderr_stream); }; - builder.color = std.meta.stringToEnum(@TypeOf(builder.color), next_arg) orelse { + color = std.meta.stringToEnum(Color, next_arg) orelse { std.debug.print("expected [auto|on|off] after --color, found '{s}'", .{next_arg}); - return usageAndErr(builder, false, stderr_stream); + usageAndErr(builder, false, stderr_stream); }; } else if (mem.eql(u8, arg, "--zig-lib-dir")) { builder.zig_lib_dir = nextArg(args, &arg_idx) orelse { std.debug.print("Expected argument after --zig-lib-dir\n\n", .{}); - return usageAndErr(builder, false, stderr_stream); + usageAndErr(builder, false, stderr_stream); }; } else if (mem.eql(u8, arg, "--debug-log")) { const next_arg = nextArg(args, &arg_idx) orelse { std.debug.print("Expected argument after {s}\n\n", .{arg}); - return usageAndErr(builder, false, stderr_stream); + usageAndErr(builder, false, stderr_stream); }; try debug_log_scopes.append(next_arg); + } else if (mem.eql(u8, arg, "--debug-pkg-config")) { + builder.debug_pkg_config = true; } else if (mem.eql(u8, arg, "--debug-compile-errors")) { builder.debug_compile_errors = true; } else if (mem.eql(u8, arg, "--glibc-runtimes")) { builder.glibc_runtimes_dir = nextArg(args, &arg_idx) orelse { std.debug.print("Expected argument after --glibc-runtimes\n\n", .{}); - return usageAndErr(builder, false, stderr_stream); + usageAndErr(builder, false, stderr_stream); }; } else if (mem.eql(u8, arg, "--verbose-link")) { builder.verbose_link = true; @@ -194,8 +211,6 @@ pub fn main() !void { builder.verbose_cc = true; } else if (mem.eql(u8, arg, "--verbose-llvm-cpu-features")) { builder.verbose_llvm_cpu_features = true; - } else if (mem.eql(u8, arg, "--prominent-compile-errors")) { - builder.prominent_compile_errors = true; } else if (mem.eql(u8, arg, "-fwine")) { builder.enable_wine = true; } else if (mem.eql(u8, arg, "-fno-wine")) { @@ -216,6 +231,10 @@ pub fn main() !void { builder.enable_darling = true; } else if (mem.eql(u8, arg, "-fno-darling")) { builder.enable_darling = false; + } else if (mem.eql(u8, arg, "-fsummary")) { + enable_summary = true; + } else if (mem.eql(u8, arg, "-fno-summary")) { + enable_summary = false; } else if (mem.eql(u8, arg, "-freference-trace")) { builder.reference_trace = 256; } else if (mem.startsWith(u8, arg, "-freference-trace=")) { @@ -226,39 +245,639 @@ pub fn main() !void { }; } else if (mem.eql(u8, arg, "-fno-reference-trace")) { builder.reference_trace = null; + } else if (mem.startsWith(u8, arg, "-j")) { + const num = arg["-j".len..]; + const n_jobs = std.fmt.parseUnsigned(u32, num, 10) catch |err| { + std.debug.print("unable to parse jobs count '{s}': {s}", .{ + num, @errorName(err), + }); + process.exit(1); + }; + if (n_jobs < 1) { + std.debug.print("number of jobs must be at least 1\n", .{}); + process.exit(1); + } + thread_pool_options.n_jobs = n_jobs; } else if (mem.eql(u8, arg, "--")) { builder.args = argsRest(args, arg_idx); break; } else { std.debug.print("Unrecognized argument: {s}\n\n", .{arg}); - return usageAndErr(builder, false, stderr_stream); + usageAndErr(builder, false, stderr_stream); } } else { try targets.append(arg); } } + const stderr = std.io.getStdErr(); + const ttyconf = get_tty_conf(color, stderr); + switch (ttyconf) { + .no_color => try builder.env_map.put("NO_COLOR", "1"), + .escape_codes => try builder.env_map.put("ZIG_DEBUG_COLOR", "1"), + .windows_api => {}, + } + + var progress: std.Progress = .{ .dont_print_on_dumb = true }; + const main_progress_node = progress.start("", 0); + builder.debug_log_scopes = debug_log_scopes.items; builder.resolveInstallPrefix(install_prefix, dir_list); - try builder.runBuild(root); + { + var prog_node = main_progress_node.start("user build.zig logic", 0); + defer prog_node.end(); + try builder.runBuild(root); + } if (builder.validateUserInputDidItFail()) - return usageAndErr(builder, true, stderr_stream); + usageAndErr(builder, true, stderr_stream); - builder.make(targets.items) catch |err| { - switch (err) { - error.InvalidStepName => { - return usageAndErr(builder, true, stderr_stream); + var run: Run = .{ + .max_rss = max_rss, + .max_rss_is_default = false, + .max_rss_mutex = .{}, + .memory_blocked_steps = std.ArrayList(*Step).init(arena), + + .claimed_rss = 0, + .enable_summary = enable_summary, + .ttyconf = ttyconf, + .stderr = stderr, + }; + + if (run.max_rss == 0) { + run.max_rss = process.totalSystemMemory() catch std.math.maxInt(usize); + run.max_rss_is_default = true; + } + + runStepNames( + arena, + builder, + targets.items, + main_progress_node, + thread_pool_options, + &run, + ) catch |err| switch (err) { + error.UncleanExit => process.exit(1), + else => return err, + }; +} + +const Run = struct { + max_rss: usize, + max_rss_is_default: bool, + max_rss_mutex: std.Thread.Mutex, + memory_blocked_steps: std.ArrayList(*Step), + + claimed_rss: usize, + enable_summary: ?bool, + ttyconf: std.debug.TTY.Config, + stderr: std.fs.File, +}; + +fn runStepNames( + arena: std.mem.Allocator, + b: *std.Build, + step_names: []const []const u8, + parent_prog_node: *std.Progress.Node, + thread_pool_options: std.Thread.Pool.Options, + run: *Run, +) !void { + const gpa = b.allocator; + var step_stack: std.AutoArrayHashMapUnmanaged(*Step, void) = .{}; + defer step_stack.deinit(gpa); + + if (step_names.len == 0) { + try step_stack.put(gpa, b.default_step, {}); + } else { + try step_stack.ensureUnusedCapacity(gpa, step_names.len); + for (0..step_names.len) |i| { + const step_name = step_names[step_names.len - i - 1]; + const s = b.top_level_steps.get(step_name) orelse { + std.debug.print("no step named '{s}'. Access the help menu with 'zig build -h'\n", .{step_name}); + process.exit(1); + }; + step_stack.putAssumeCapacity(&s.step, {}); + } + } + + const starting_steps = try arena.dupe(*Step, step_stack.keys()); + for (starting_steps) |s| { + checkForDependencyLoop(b, s, &step_stack) catch |err| switch (err) { + error.DependencyLoopDetected => return error.UncleanExit, + else => |e| return e, + }; + } + + { + // Check that we have enough memory to complete the build. + var any_problems = false; + for (step_stack.keys()) |s| { + if (s.max_rss == 0) continue; + if (s.max_rss > run.max_rss) { + std.debug.print("{s}{s}: this step declares an upper bound of {d} bytes of memory, exceeding the available {d} bytes of memory\n", .{ + s.owner.dep_prefix, s.name, s.max_rss, run.max_rss, + }); + any_problems = true; + } + } + if (any_problems) { + if (run.max_rss_is_default) { + std.debug.print("note: use --maxrss to override the default", .{}); + } + return error.UncleanExit; + } + } + + var thread_pool: std.Thread.Pool = undefined; + try thread_pool.init(thread_pool_options); + defer thread_pool.deinit(); + + { + defer parent_prog_node.end(); + + var step_prog = parent_prog_node.start("steps", step_stack.count()); + defer step_prog.end(); + + var wait_group: std.Thread.WaitGroup = .{}; + defer wait_group.wait(); + + // Here we spawn the initial set of tasks with a nice heuristic - + // dependency order. Each worker when it finishes a step will then + // check whether it should run any dependants. + const steps_slice = step_stack.keys(); + for (0..steps_slice.len) |i| { + const step = steps_slice[steps_slice.len - i - 1]; + + wait_group.start(); + thread_pool.spawn(workerMakeOneStep, .{ + &wait_group, &thread_pool, b, step, &step_prog, run, + }) catch @panic("OOM"); + } + } + assert(run.memory_blocked_steps.items.len == 0); + + var test_skip_count: usize = 0; + var test_fail_count: usize = 0; + var test_pass_count: usize = 0; + var test_leak_count: usize = 0; + var test_count: usize = 0; + + var success_count: usize = 0; + var skipped_count: usize = 0; + var failure_count: usize = 0; + var pending_count: usize = 0; + var total_compile_errors: usize = 0; + var compile_error_steps: std.ArrayListUnmanaged(*Step) = .{}; + defer compile_error_steps.deinit(gpa); + + for (step_stack.keys()) |s| { + test_fail_count += s.test_results.fail_count; + test_skip_count += s.test_results.skip_count; + test_leak_count += s.test_results.leak_count; + test_pass_count += s.test_results.passCount(); + test_count += s.test_results.test_count; + + switch (s.state) { + .precheck_unstarted => unreachable, + .precheck_started => unreachable, + .running => unreachable, + .precheck_done => { + // precheck_done is equivalent to dependency_failure in the case of + // transitive dependencies. For example: + // A -> B -> C (failure) + // B will be marked as dependency_failure, while A may never be queued, and thus + // remain in the initial state of precheck_done. + s.state = .dependency_failure; + pending_count += 1; + }, + .dependency_failure => pending_count += 1, + .success => success_count += 1, + .skipped => skipped_count += 1, + .failure => { + failure_count += 1; + const compile_errors_len = s.result_error_bundle.errorMessageCount(); + if (compile_errors_len > 0) { + total_compile_errors += compile_errors_len; + try compile_error_steps.append(gpa, s); + } }, - error.UncleanExit => process.exit(1), - // This error is intended to indicate that the step has already - // logged an error message and so printing the error return trace - // here would be unwanted extra information, unless the user opts - // into it with a debug flag. - error.StepFailed => process.exit(1), - else => return err, } - }; + } + + // A proper command line application defaults to silently succeeding. + // The user may request verbose mode if they have a different preference. + if (failure_count == 0 and run.enable_summary != true) return cleanExit(); + + const ttyconf = run.ttyconf; + const stderr = run.stderr; + + if (run.enable_summary != false) { + const total_count = success_count + failure_count + pending_count + skipped_count; + ttyconf.setColor(stderr, .Cyan) catch {}; + stderr.writeAll("Build Summary:") catch {}; + ttyconf.setColor(stderr, .Reset) catch {}; + stderr.writer().print(" {d}/{d} steps succeeded", .{ success_count, total_count }) catch {}; + if (skipped_count > 0) stderr.writer().print("; {d} skipped", .{skipped_count}) catch {}; + if (failure_count > 0) stderr.writer().print("; {d} failed", .{failure_count}) catch {}; + + if (test_count > 0) stderr.writer().print("; {d}/{d} tests passed", .{ test_pass_count, test_count }) catch {}; + if (test_skip_count > 0) stderr.writer().print("; {d} skipped", .{test_skip_count}) catch {}; + if (test_fail_count > 0) stderr.writer().print("; {d} failed", .{test_fail_count}) catch {}; + if (test_leak_count > 0) stderr.writer().print("; {d} leaked", .{test_leak_count}) catch {}; + + if (run.enable_summary == null) { + ttyconf.setColor(stderr, .Dim) catch {}; + stderr.writeAll(" (disable with -fno-summary)") catch {}; + ttyconf.setColor(stderr, .Reset) catch {}; + } + stderr.writeAll("\n") catch {}; + + // Print a fancy tree with build results. + var print_node: PrintNode = .{ .parent = null }; + if (step_names.len == 0) { + print_node.last = true; + printTreeStep(b, b.default_step, stderr, ttyconf, &print_node, &step_stack) catch {}; + } else { + for (step_names, 0..) |step_name, i| { + const tls = b.top_level_steps.get(step_name).?; + print_node.last = i + 1 == b.top_level_steps.count(); + printTreeStep(b, &tls.step, stderr, ttyconf, &print_node, &step_stack) catch {}; + } + } + } + + if (failure_count == 0) return cleanExit(); + + // Finally, render compile errors at the bottom of the terminal. + // We use a separate compile_error_steps array list because step_stack is destructively + // mutated in printTreeStep above. + if (total_compile_errors > 0) { + for (compile_error_steps.items) |s| { + if (s.result_error_bundle.errorMessageCount() > 0) { + s.result_error_bundle.renderToStdErr(renderOptions(ttyconf)); + } + } + + // Signal to parent process that we have printed compile errors. The + // parent process may choose to omit the "following command failed" + // line in this case. + process.exit(2); + } + + process.exit(1); +} + +const PrintNode = struct { + parent: ?*PrintNode, + last: bool = false, +}; + +fn printPrefix(node: *PrintNode, stderr: std.fs.File, ttyconf: std.debug.TTY.Config) !void { + const parent = node.parent orelse return; + if (parent.parent == null) return; + try printPrefix(parent, stderr, ttyconf); + if (parent.last) { + try stderr.writeAll(" "); + } else { + try stderr.writeAll(switch (ttyconf) { + .no_color, .windows_api => "| ", + .escape_codes => "\x1B\x28\x30\x78\x1B\x28\x42 ", // │ + }); + } +} + +fn printTreeStep( + b: *std.Build, + s: *Step, + stderr: std.fs.File, + ttyconf: std.debug.TTY.Config, + parent_node: *PrintNode, + step_stack: *std.AutoArrayHashMapUnmanaged(*Step, void), +) !void { + const first = step_stack.swapRemove(s); + try printPrefix(parent_node, stderr, ttyconf); + + if (!first) try ttyconf.setColor(stderr, .Dim); + if (parent_node.parent != null) { + if (parent_node.last) { + try stderr.writeAll(switch (ttyconf) { + .no_color, .windows_api => "+- ", + .escape_codes => "\x1B\x28\x30\x6d\x71\x1B\x28\x42 ", // └─ + }); + } else { + try stderr.writeAll(switch (ttyconf) { + .no_color, .windows_api => "+- ", + .escape_codes => "\x1B\x28\x30\x74\x71\x1B\x28\x42 ", // ├─ + }); + } + } + + // dep_prefix omitted here because it is redundant with the tree. + try stderr.writeAll(s.name); + + if (first) { + switch (s.state) { + .precheck_unstarted => unreachable, + .precheck_started => unreachable, + .precheck_done => unreachable, + .running => unreachable, + + .dependency_failure => { + try ttyconf.setColor(stderr, .Dim); + try stderr.writeAll(" transitive failure\n"); + try ttyconf.setColor(stderr, .Reset); + }, + + .success => { + try ttyconf.setColor(stderr, .Green); + if (s.result_cached) { + try stderr.writeAll(" cached"); + } else if (s.test_results.test_count > 0) { + const pass_count = s.test_results.passCount(); + try stderr.writer().print(" {d} passed", .{pass_count}); + if (s.test_results.skip_count > 0) { + try ttyconf.setColor(stderr, .Yellow); + try stderr.writer().print(" {d} skipped", .{s.test_results.skip_count}); + } + } else { + try stderr.writeAll(" success"); + } + try ttyconf.setColor(stderr, .Reset); + if (s.result_duration_ns) |ns| { + try ttyconf.setColor(stderr, .Dim); + if (ns >= std.time.ns_per_min) { + try stderr.writer().print(" {d}m", .{ns / std.time.ns_per_min}); + } else if (ns >= std.time.ns_per_s) { + try stderr.writer().print(" {d}s", .{ns / std.time.ns_per_s}); + } else if (ns >= std.time.ns_per_ms) { + try stderr.writer().print(" {d}ms", .{ns / std.time.ns_per_ms}); + } else if (ns >= std.time.ns_per_us) { + try stderr.writer().print(" {d}us", .{ns / std.time.ns_per_us}); + } else { + try stderr.writer().print(" {d}ns", .{ns}); + } + try ttyconf.setColor(stderr, .Reset); + } + if (s.result_peak_rss != 0) { + const rss = s.result_peak_rss; + try ttyconf.setColor(stderr, .Dim); + if (rss >= 1000_000_000) { + try stderr.writer().print(" MaxRSS:{d}G", .{rss / 1000_000_000}); + } else if (rss >= 1000_000) { + try stderr.writer().print(" MaxRSS:{d}M", .{rss / 1000_000}); + } else if (rss >= 1000) { + try stderr.writer().print(" MaxRSS:{d}K", .{rss / 1000}); + } else { + try stderr.writer().print(" MaxRSS:{d}B", .{rss}); + } + try ttyconf.setColor(stderr, .Reset); + } + try stderr.writeAll("\n"); + }, + + .skipped => { + try ttyconf.setColor(stderr, .Yellow); + try stderr.writeAll(" skipped\n"); + try ttyconf.setColor(stderr, .Reset); + }, + + .failure => { + if (s.result_error_bundle.errorMessageCount() > 0) { + try ttyconf.setColor(stderr, .Red); + try stderr.writer().print(" {d} errors\n", .{ + s.result_error_bundle.errorMessageCount(), + }); + try ttyconf.setColor(stderr, .Reset); + } else if (!s.test_results.isSuccess()) { + try stderr.writer().print(" {d}/{d} passed", .{ + s.test_results.passCount(), s.test_results.test_count, + }); + if (s.test_results.fail_count > 0) { + try stderr.writeAll(", "); + try ttyconf.setColor(stderr, .Red); + try stderr.writer().print("{d} failed", .{ + s.test_results.fail_count, + }); + try ttyconf.setColor(stderr, .Reset); + } + if (s.test_results.skip_count > 0) { + try stderr.writeAll(", "); + try ttyconf.setColor(stderr, .Yellow); + try stderr.writer().print("{d} skipped", .{ + s.test_results.skip_count, + }); + try ttyconf.setColor(stderr, .Reset); + } + if (s.test_results.leak_count > 0) { + try stderr.writeAll(", "); + try ttyconf.setColor(stderr, .Red); + try stderr.writer().print("{d} leaked", .{ + s.test_results.leak_count, + }); + try ttyconf.setColor(stderr, .Reset); + } + try stderr.writeAll("\n"); + } else { + try ttyconf.setColor(stderr, .Red); + try stderr.writeAll(" failure\n"); + try ttyconf.setColor(stderr, .Reset); + } + }, + } + + for (s.dependencies.items, 0..) |dep, i| { + var print_node: PrintNode = .{ + .parent = parent_node, + .last = i == s.dependencies.items.len - 1, + }; + try printTreeStep(b, dep, stderr, ttyconf, &print_node, step_stack); + } + } else { + if (s.dependencies.items.len == 0) { + try stderr.writeAll(" (reused)\n"); + } else { + try stderr.writer().print(" (+{d} more reused dependencies)\n", .{ + s.dependencies.items.len, + }); + } + try ttyconf.setColor(stderr, .Reset); + } +} + +fn checkForDependencyLoop( + b: *std.Build, + s: *Step, + step_stack: *std.AutoArrayHashMapUnmanaged(*Step, void), +) !void { + switch (s.state) { + .precheck_started => { + std.debug.print("dependency loop detected:\n {s}\n", .{s.name}); + return error.DependencyLoopDetected; + }, + .precheck_unstarted => { + s.state = .precheck_started; + + try step_stack.ensureUnusedCapacity(b.allocator, s.dependencies.items.len); + for (s.dependencies.items) |dep| { + try step_stack.put(b.allocator, dep, {}); + try dep.dependants.append(b.allocator, s); + checkForDependencyLoop(b, dep, step_stack) catch |err| { + if (err == error.DependencyLoopDetected) { + std.debug.print(" {s}\n", .{s.name}); + } + return err; + }; + } + + s.state = .precheck_done; + }, + .precheck_done => {}, + + // These don't happen until we actually run the step graph. + .dependency_failure => unreachable, + .running => unreachable, + .success => unreachable, + .failure => unreachable, + .skipped => unreachable, + } +} + +fn workerMakeOneStep( + wg: *std.Thread.WaitGroup, + thread_pool: *std.Thread.Pool, + b: *std.Build, + s: *Step, + prog_node: *std.Progress.Node, + run: *Run, +) void { + defer wg.finish(); + + // First, check the conditions for running this step. If they are not met, + // then we return without doing the step, relying on another worker to + // queue this step up again when dependencies are met. + for (s.dependencies.items) |dep| { + switch (@atomicLoad(Step.State, &dep.state, .SeqCst)) { + .success, .skipped => continue, + .failure, .dependency_failure => { + @atomicStore(Step.State, &s.state, .dependency_failure, .SeqCst); + return; + }, + .precheck_done, .running => { + // dependency is not finished yet. + return; + }, + .precheck_unstarted => unreachable, + .precheck_started => unreachable, + } + } + + if (s.max_rss != 0) { + run.max_rss_mutex.lock(); + defer run.max_rss_mutex.unlock(); + + // Avoid running steps twice. + if (s.state != .precheck_done) { + // Another worker got the job. + return; + } + + const new_claimed_rss = run.claimed_rss + s.max_rss; + if (new_claimed_rss > run.max_rss) { + // Running this step right now could possibly exceed the allotted RSS. + // Add this step to the queue of memory-blocked steps. + run.memory_blocked_steps.append(s) catch @panic("OOM"); + return; + } + + run.claimed_rss = new_claimed_rss; + s.state = .running; + } else { + // Avoid running steps twice. + if (@cmpxchgStrong(Step.State, &s.state, .precheck_done, .running, .SeqCst, .SeqCst) != null) { + // Another worker got the job. + return; + } + } + + var sub_prog_node = prog_node.start(s.name, 0); + sub_prog_node.activate(); + defer sub_prog_node.end(); + + const make_result = s.make(&sub_prog_node); + + // No matter the result, we want to display error/warning messages. + if (s.result_error_msgs.items.len > 0) { + sub_prog_node.context.lock_stderr(); + defer sub_prog_node.context.unlock_stderr(); + + const stderr = run.stderr; + const ttyconf = run.ttyconf; + + for (s.result_error_msgs.items) |msg| { + // Sometimes it feels like you just can't catch a break. Finally, + // with Zig, you can. + ttyconf.setColor(stderr, .Bold) catch break; + stderr.writeAll(s.owner.dep_prefix) catch break; + stderr.writeAll(s.name) catch break; + stderr.writeAll(": ") catch break; + ttyconf.setColor(stderr, .Red) catch break; + stderr.writeAll("error: ") catch break; + ttyconf.setColor(stderr, .Reset) catch break; + stderr.writeAll(msg) catch break; + stderr.writeAll("\n") catch break; + } + } + + handle_result: { + if (make_result) |_| { + @atomicStore(Step.State, &s.state, .success, .SeqCst); + } else |err| switch (err) { + error.MakeFailed => { + @atomicStore(Step.State, &s.state, .failure, .SeqCst); + break :handle_result; + }, + error.MakeSkipped => @atomicStore(Step.State, &s.state, .skipped, .SeqCst), + } + + // Successful completion of a step, so we queue up its dependants as well. + for (s.dependants.items) |dep| { + wg.start(); + thread_pool.spawn(workerMakeOneStep, .{ + wg, thread_pool, b, dep, prog_node, run, + }) catch @panic("OOM"); + } + } + + // If this is a step that claims resources, we must now queue up other + // steps that are waiting for resources. + if (s.max_rss != 0) { + run.max_rss_mutex.lock(); + defer run.max_rss_mutex.unlock(); + + // Give the memory back to the scheduler. + run.claimed_rss -= s.max_rss; + // Avoid kicking off too many tasks that we already know will not have + // enough resources. + var remaining = run.max_rss - run.claimed_rss; + var i: usize = 0; + var j: usize = 0; + while (j < run.memory_blocked_steps.items.len) : (j += 1) { + const dep = run.memory_blocked_steps.items[j]; + assert(dep.max_rss != 0); + if (dep.max_rss <= remaining) { + remaining -= dep.max_rss; + + wg.start(); + thread_pool.spawn(workerMakeOneStep, .{ + wg, thread_pool, b, dep, prog_node, run, + }) catch @panic("OOM"); + } else { + run.memory_blocked_steps.items[i] = dep; + i += 1; + } + } + run.memory_blocked_steps.shrinkRetainingCapacity(i); + } } fn steps(builder: *std.Build, already_ran_build: bool, out_stream: anytype) !void { @@ -269,7 +888,7 @@ fn steps(builder: *std.Build, already_ran_build: bool, out_stream: anytype) !voi } const allocator = builder.allocator; - for (builder.top_level_steps.items) |top_level_step| { + for (builder.top_level_steps.values()) |top_level_step| { const name = if (&top_level_step.step == builder.default_step) try fmt.allocPrint(allocator, "{s} (default)", .{top_level_step.step.name}) else @@ -327,6 +946,10 @@ fn usage(builder: *std.Build, already_ran_build: bool, out_stream: anytype) !voi \\ --verbose Print commands before executing them \\ --color [auto|off|on] Enable or disable colored error messages \\ --prominent-compile-errors Output compile errors formatted for a human to read + \\ -fsummary Print the build summary, even on success + \\ -fno-summary Omit the build summary, even on failure + \\ -j Limit concurrent jobs (default is to use all CPU cores) + \\ --maxrss Limit memory usage (default is to use available memory) \\ \\Project-Specific Options: \\ @@ -364,6 +987,7 @@ fn usage(builder: *std.Build, already_ran_build: bool, out_stream: anytype) !voi \\ --zig-lib-dir [arg] Override path to Zig lib directory \\ --build-runner [file] Override path to build runner \\ --debug-log [scope] Enable debugging the compiler + \\ --debug-pkg-config Fail if unknown pkg-config flags encountered \\ --verbose-link Enable compiler debug output for linking \\ --verbose-air Enable compiler debug output for Zig AIR \\ --verbose-llvm-ir Enable compiler debug output for LLVM IR @@ -374,7 +998,7 @@ fn usage(builder: *std.Build, already_ran_build: bool, out_stream: anytype) !voi ); } -fn usageAndErr(builder: *std.Build, already_ran_build: bool, out_stream: anytype) void { +fn usageAndErr(builder: *std.Build, already_ran_build: bool, out_stream: anytype) noreturn { usage(builder, already_ran_build, out_stream) catch {}; process.exit(1); } @@ -389,3 +1013,28 @@ fn argsRest(args: [][]const u8, idx: usize) ?[][]const u8 { if (idx >= args.len) return null; return args[idx..]; } + +fn cleanExit() void { + // Perhaps in the future there could be an Advanced Options flag such as + // --debug-build-runner-leaks which would make this function return instead + // of calling exit. + process.exit(0); +} + +const Color = enum { auto, off, on }; + +fn get_tty_conf(color: Color, stderr: std.fs.File) std.debug.TTY.Config { + return switch (color) { + .auto => std.debug.detectTTYConfig(stderr), + .on => .escape_codes, + .off => .no_color, + }; +} + +fn renderOptions(ttyconf: std.debug.TTY.Config) std.zig.ErrorBundle.RenderOptions { + return .{ + .ttyconf = ttyconf, + .include_source_line = ttyconf != .no_color, + .include_reference_trace = ttyconf != .no_color, + }; +} diff --git a/lib/std/Build.zig b/lib/std/Build.zig index 120196f97222..279dd765b559 100644 --- a/lib/std/Build.zig +++ b/lib/std/Build.zig @@ -32,14 +32,12 @@ pub const Step = @import("Build/Step.zig"); pub const CheckFileStep = @import("Build/CheckFileStep.zig"); pub const CheckObjectStep = @import("Build/CheckObjectStep.zig"); pub const ConfigHeaderStep = @import("Build/ConfigHeaderStep.zig"); -pub const EmulatableRunStep = @import("Build/EmulatableRunStep.zig"); pub const FmtStep = @import("Build/FmtStep.zig"); pub const InstallArtifactStep = @import("Build/InstallArtifactStep.zig"); pub const InstallDirStep = @import("Build/InstallDirStep.zig"); pub const InstallFileStep = @import("Build/InstallFileStep.zig"); pub const ObjCopyStep = @import("Build/ObjCopyStep.zig"); pub const CompileStep = @import("Build/CompileStep.zig"); -pub const LogStep = @import("Build/LogStep.zig"); pub const OptionsStep = @import("Build/OptionsStep.zig"); pub const RemoveDirStep = @import("Build/RemoveDirStep.zig"); pub const RunStep = @import("Build/RunStep.zig"); @@ -59,15 +57,12 @@ verbose_air: bool, verbose_llvm_ir: bool, verbose_cimport: bool, verbose_llvm_cpu_features: bool, -/// The purpose of executing the command is for a human to read compile errors from the terminal -prominent_compile_errors: bool, -color: enum { auto, on, off } = .auto, reference_trace: ?u32 = null, invalid_user_input: bool, zig_exe: []const u8, default_step: *Step, env_map: *EnvMap, -top_level_steps: ArrayList(*TopLevelStep), +top_level_steps: std.StringArrayHashMapUnmanaged(*TopLevelStep), install_prefix: []const u8, dest_dir: ?[]const u8, lib_dir: []const u8, @@ -90,6 +85,7 @@ pkg_config_pkg_list: ?(PkgConfigError![]const PkgConfigPkg) = null, args: ?[][]const u8 = null, debug_log_scopes: []const []const u8 = &.{}, debug_compile_errors: bool = false, +debug_pkg_config: bool = false, /// Experimental. Use system Darling installation to run cross compiled macOS build artifacts. enable_darling: bool = false, @@ -198,7 +194,7 @@ pub fn create( env_map.* = try process.getEnvMap(allocator); const self = try allocator.create(Build); - self.* = Build{ + self.* = .{ .zig_exe = zig_exe, .build_root = build_root, .cache_root = cache_root, @@ -211,13 +207,12 @@ pub fn create( .verbose_llvm_ir = false, .verbose_cimport = false, .verbose_llvm_cpu_features = false, - .prominent_compile_errors = false, .invalid_user_input = false, .allocator = allocator, .user_input_options = UserInputOptionsMap.init(allocator), .available_options_map = AvailableOptionsMap.init(allocator), .available_options_list = ArrayList(AvailableOption).init(allocator), - .top_level_steps = ArrayList(*TopLevelStep).init(allocator), + .top_level_steps = .{}, .default_step = undefined, .env_map = env_map, .search_prefixes = ArrayList([]const u8).init(allocator), @@ -227,12 +222,21 @@ pub fn create( .h_dir = undefined, .dest_dir = env_map.get("DESTDIR"), .installed_files = ArrayList(InstalledFile).init(allocator), - .install_tls = TopLevelStep{ - .step = Step.initNoOp(.top_level, "install", allocator), + .install_tls = .{ + .step = Step.init(.{ + .id = .top_level, + .name = "install", + .owner = self, + }), .description = "Copy build artifacts to prefix path", }, - .uninstall_tls = TopLevelStep{ - .step = Step.init(.top_level, "uninstall", allocator, makeUninstall), + .uninstall_tls = .{ + .step = Step.init(.{ + .id = .top_level, + .name = "uninstall", + .owner = self, + .makeFn = makeUninstall, + }), .description = "Remove build artifacts from prefix path", }, .zig_lib_dir = null, @@ -241,8 +245,8 @@ pub fn create( .host = host, .modules = std.StringArrayHashMap(*Module).init(allocator), }; - try self.top_level_steps.append(&self.install_tls); - try self.top_level_steps.append(&self.uninstall_tls); + try self.top_level_steps.put(allocator, self.install_tls.step.name, &self.install_tls); + try self.top_level_steps.put(allocator, self.uninstall_tls.step.name, &self.uninstall_tls); self.default_step = &self.install_tls.step; return self; } @@ -264,11 +268,20 @@ fn createChildOnly(parent: *Build, dep_name: []const u8, build_root: Cache.Direc child.* = .{ .allocator = allocator, .install_tls = .{ - .step = Step.initNoOp(.top_level, "install", allocator), + .step = Step.init(.{ + .id = .top_level, + .name = "install", + .owner = child, + }), .description = "Copy build artifacts to prefix path", }, .uninstall_tls = .{ - .step = Step.init(.top_level, "uninstall", allocator, makeUninstall), + .step = Step.init(.{ + .id = .top_level, + .name = "uninstall", + .owner = child, + .makeFn = makeUninstall, + }), .description = "Remove build artifacts from prefix path", }, .user_input_options = UserInputOptionsMap.init(allocator), @@ -281,14 +294,12 @@ fn createChildOnly(parent: *Build, dep_name: []const u8, build_root: Cache.Direc .verbose_llvm_ir = parent.verbose_llvm_ir, .verbose_cimport = parent.verbose_cimport, .verbose_llvm_cpu_features = parent.verbose_llvm_cpu_features, - .prominent_compile_errors = parent.prominent_compile_errors, - .color = parent.color, .reference_trace = parent.reference_trace, .invalid_user_input = false, .zig_exe = parent.zig_exe, .default_step = undefined, .env_map = parent.env_map, - .top_level_steps = ArrayList(*TopLevelStep).init(allocator), + .top_level_steps = .{}, .install_prefix = undefined, .dest_dir = parent.dest_dir, .lib_dir = parent.lib_dir, @@ -306,6 +317,7 @@ fn createChildOnly(parent: *Build, dep_name: []const u8, build_root: Cache.Direc .zig_lib_dir = parent.zig_lib_dir, .debug_log_scopes = parent.debug_log_scopes, .debug_compile_errors = parent.debug_compile_errors, + .debug_pkg_config = parent.debug_pkg_config, .enable_darling = parent.enable_darling, .enable_qemu = parent.enable_qemu, .enable_rosetta = parent.enable_rosetta, @@ -316,8 +328,8 @@ fn createChildOnly(parent: *Build, dep_name: []const u8, build_root: Cache.Direc .dep_prefix = parent.fmt("{s}{s}.", .{ parent.dep_prefix, dep_name }), .modules = std.StringArrayHashMap(*Module).init(allocator), }; - try child.top_level_steps.append(&child.install_tls); - try child.top_level_steps.append(&child.uninstall_tls); + try child.top_level_steps.put(allocator, child.install_tls.step.name, &child.install_tls); + try child.top_level_steps.put(allocator, child.uninstall_tls.step.name, &child.uninstall_tls); child.default_step = &child.install_tls.step; return child; } @@ -372,27 +384,24 @@ fn applyArgs(b: *Build, args: anytype) !void { }, } } - const Hasher = std.crypto.auth.siphash.SipHash128(1, 3); + + // Create an installation directory local to this package. This will be used when + // dependant packages require a standard prefix, such as include directories for C headers. + var hash = b.cache.hash; // Random bytes to make unique. Refresh this with new random bytes when // implementation is modified in a non-backwards-compatible way. - var hash = Hasher.init("ZaEsvQ5ClaA2IdH9"); - hash.update(b.dep_prefix); + hash.add(@as(u32, 0xd8cb0055)); + hash.addBytes(b.dep_prefix); // TODO additionally update the hash with `args`. - - var digest: [16]u8 = undefined; - hash.final(&digest); - var hash_basename: [digest.len * 2]u8 = undefined; - _ = std.fmt.bufPrint(&hash_basename, "{s}", .{std.fmt.fmtSliceHexLower(&digest)}) catch - unreachable; - - const install_prefix = try b.cache_root.join(b.allocator, &.{ "i", &hash_basename }); + const digest = hash.final(); + const install_prefix = try b.cache_root.join(b.allocator, &.{ "i", &digest }); b.resolveInstallPrefix(install_prefix, .{}); } -pub fn destroy(self: *Build) void { - self.env_map.deinit(); - self.top_level_steps.deinit(); - self.allocator.destroy(self); +pub fn destroy(b: *Build) void { + b.env_map.deinit(); + b.top_level_steps.deinit(b.allocator); + b.allocator.destroy(b); } /// This function is intended to be called by lib/build_runner.zig, not a build.zig file. @@ -441,6 +450,7 @@ pub const ExecutableOptions = struct { target: CrossTarget = .{}, optimize: std.builtin.Mode = .Debug, linkage: ?CompileStep.Linkage = null, + max_rss: usize = 0, }; pub fn addExecutable(b: *Build, options: ExecutableOptions) *CompileStep { @@ -452,6 +462,7 @@ pub fn addExecutable(b: *Build, options: ExecutableOptions) *CompileStep { .optimize = options.optimize, .kind = .exe, .linkage = options.linkage, + .max_rss = options.max_rss, }); } @@ -460,6 +471,7 @@ pub const ObjectOptions = struct { root_source_file: ?FileSource = null, target: CrossTarget, optimize: std.builtin.Mode, + max_rss: usize = 0, }; pub fn addObject(b: *Build, options: ObjectOptions) *CompileStep { @@ -469,6 +481,7 @@ pub fn addObject(b: *Build, options: ObjectOptions) *CompileStep { .target = options.target, .optimize = options.optimize, .kind = .obj, + .max_rss = options.max_rss, }); } @@ -478,6 +491,7 @@ pub const SharedLibraryOptions = struct { version: ?std.builtin.Version = null, target: CrossTarget, optimize: std.builtin.Mode, + max_rss: usize = 0, }; pub fn addSharedLibrary(b: *Build, options: SharedLibraryOptions) *CompileStep { @@ -489,6 +503,7 @@ pub fn addSharedLibrary(b: *Build, options: SharedLibraryOptions) *CompileStep { .version = options.version, .target = options.target, .optimize = options.optimize, + .max_rss = options.max_rss, }); } @@ -498,6 +513,7 @@ pub const StaticLibraryOptions = struct { target: CrossTarget, optimize: std.builtin.Mode, version: ?std.builtin.Version = null, + max_rss: usize = 0, }; pub fn addStaticLibrary(b: *Build, options: StaticLibraryOptions) *CompileStep { @@ -509,25 +525,27 @@ pub fn addStaticLibrary(b: *Build, options: StaticLibraryOptions) *CompileStep { .version = options.version, .target = options.target, .optimize = options.optimize, + .max_rss = options.max_rss, }); } pub const TestOptions = struct { name: []const u8 = "test", - kind: CompileStep.Kind = .@"test", root_source_file: FileSource, target: CrossTarget = .{}, optimize: std.builtin.Mode = .Debug, version: ?std.builtin.Version = null, + max_rss: usize = 0, }; pub fn addTest(b: *Build, options: TestOptions) *CompileStep { return CompileStep.create(b, .{ .name = options.name, - .kind = options.kind, + .kind = .@"test", .root_source_file = options.root_source_file, .target = options.target, .optimize = options.optimize, + .max_rss = options.max_rss, }); } @@ -536,6 +554,7 @@ pub const AssemblyOptions = struct { source_file: FileSource, target: CrossTarget, optimize: std.builtin.Mode, + max_rss: usize = 0, }; pub fn addAssembly(b: *Build, options: AssemblyOptions) *CompileStep { @@ -545,6 +564,7 @@ pub fn addAssembly(b: *Build, options: AssemblyOptions) *CompileStep { .root_source_file = null, .target = options.target, .optimize = options.optimize, + .max_rss = options.max_rss, }); obj_step.addAssemblyFileSource(options.source_file.dupe(b)); return obj_step; @@ -605,16 +625,15 @@ pub fn addSystemCommand(self: *Build, argv: []const []const u8) *RunStep { /// Creates a `RunStep` with an executable built with `addExecutable`. /// Add command line arguments with methods of `RunStep`. pub fn addRunArtifact(b: *Build, exe: *CompileStep) *RunStep { - assert(exe.kind == .exe or exe.kind == .test_exe); - // It doesn't have to be native. We catch that if you actually try to run it. // Consider that this is declarative; the run step may not be run unless a user // option is supplied. - const run_step = RunStep.create(b, b.fmt("run {s}", .{exe.step.name})); + const run_step = RunStep.create(b, b.fmt("run {s}", .{exe.name})); run_step.addArtifactArg(exe); - if (exe.kind == .test_exe) { - run_step.addArg(b.zig_exe); + if (exe.kind == .@"test") { + run_step.stdio = .zig_test; + run_step.addArgs(&.{"--listen=-"}); } if (exe.vcpkg_bin_path) |path| { @@ -634,7 +653,11 @@ pub fn addConfigHeader( options: ConfigHeaderStep.Options, values: anytype, ) *ConfigHeaderStep { - const config_header_step = ConfigHeaderStep.create(b, options); + var options_copy = options; + if (options_copy.first_ret_addr == null) + options_copy.first_ret_addr = @returnAddress(); + + const config_header_step = ConfigHeaderStep.create(b, options_copy); config_header_step.addValues(values); return config_header_step; } @@ -671,17 +694,8 @@ pub fn addWriteFile(self: *Build, file_path: []const u8, data: []const u8) *Writ return write_file_step; } -pub fn addWriteFiles(self: *Build) *WriteFileStep { - const write_file_step = self.allocator.create(WriteFileStep) catch @panic("OOM"); - write_file_step.* = WriteFileStep.init(self); - return write_file_step; -} - -pub fn addLog(self: *Build, comptime format: []const u8, args: anytype) *LogStep { - const data = self.fmt(format, args); - const log_step = self.allocator.create(LogStep) catch @panic("OOM"); - log_step.* = LogStep.init(self, data); - return log_step; +pub fn addWriteFiles(b: *Build) *WriteFileStep { + return WriteFileStep.create(b); } pub fn addRemoveDirTree(self: *Build, dir_path: []const u8) *RemoveDirStep { @@ -690,32 +704,14 @@ pub fn addRemoveDirTree(self: *Build, dir_path: []const u8) *RemoveDirStep { return remove_dir_step; } -pub fn addFmt(self: *Build, paths: []const []const u8) *FmtStep { - return FmtStep.create(self, paths); +pub fn addFmt(b: *Build, options: FmtStep.Options) *FmtStep { + return FmtStep.create(b, options); } pub fn addTranslateC(self: *Build, options: TranslateCStep.Options) *TranslateCStep { return TranslateCStep.create(self, options); } -pub fn make(self: *Build, step_names: []const []const u8) !void { - var wanted_steps = ArrayList(*Step).init(self.allocator); - defer wanted_steps.deinit(); - - if (step_names.len == 0) { - try wanted_steps.append(self.default_step); - } else { - for (step_names) |step_name| { - const s = try self.getTopLevelStepByName(step_name); - try wanted_steps.append(s); - } - } - - for (wanted_steps.items) |s| { - try self.makeOneStep(s); - } -} - pub fn getInstallStep(self: *Build) *Step { return &self.install_tls.step; } @@ -724,7 +720,8 @@ pub fn getUninstallStep(self: *Build) *Step { return &self.uninstall_tls.step; } -fn makeUninstall(uninstall_step: *Step) anyerror!void { +fn makeUninstall(uninstall_step: *Step, prog_node: *std.Progress.Node) anyerror!void { + _ = prog_node; const uninstall_tls = @fieldParentPtr(TopLevelStep, "step", uninstall_step); const self = @fieldParentPtr(Build, "uninstall_tls", uninstall_tls); @@ -739,37 +736,6 @@ fn makeUninstall(uninstall_step: *Step) anyerror!void { // TODO remove empty directories } -fn makeOneStep(self: *Build, s: *Step) anyerror!void { - if (s.loop_flag) { - log.err("Dependency loop detected:\n {s}", .{s.name}); - return error.DependencyLoopDetected; - } - s.loop_flag = true; - - for (s.dependencies.items) |dep| { - self.makeOneStep(dep) catch |err| { - if (err == error.DependencyLoopDetected) { - log.err(" {s}", .{s.name}); - } - return err; - }; - } - - s.loop_flag = false; - - try s.make(); -} - -fn getTopLevelStepByName(self: *Build, name: []const u8) !*Step { - for (self.top_level_steps.items) |top_level_step| { - if (mem.eql(u8, top_level_step.step.name, name)) { - return &top_level_step.step; - } - } - log.err("Cannot run step '{s}' because it does not exist", .{name}); - return error.InvalidStepName; -} - pub fn option(self: *Build, comptime T: type, name_raw: []const u8, description_raw: []const u8) ?T { const name = self.dupe(name_raw); const description = self.dupe(description_raw); @@ -906,11 +872,15 @@ pub fn option(self: *Build, comptime T: type, name_raw: []const u8, description_ pub fn step(self: *Build, name: []const u8, description: []const u8) *Step { const step_info = self.allocator.create(TopLevelStep) catch @panic("OOM"); - step_info.* = TopLevelStep{ - .step = Step.initNoOp(.top_level, name, self.allocator), + step_info.* = .{ + .step = Step.init(.{ + .id = .top_level, + .name = name, + .owner = self, + }), .description = self.dupe(description), }; - self.top_level_steps.append(step_info) catch @panic("OOM"); + self.top_level_steps.put(self.allocator, step_info.step.name, step_info) catch @panic("OOM"); return &step_info.step; } @@ -1178,50 +1148,18 @@ pub fn validateUserInputDidItFail(self: *Build) bool { return self.invalid_user_input; } -pub fn spawnChild(self: *Build, argv: []const []const u8) !void { - return self.spawnChildEnvMap(null, self.env_map, argv); -} - -fn printCmd(cwd: ?[]const u8, argv: []const []const u8) void { - if (cwd) |yes_cwd| std.debug.print("cd {s} && ", .{yes_cwd}); +fn allocPrintCmd(ally: Allocator, opt_cwd: ?[]const u8, argv: []const []const u8) ![]u8 { + var buf = ArrayList(u8).init(ally); + if (opt_cwd) |cwd| try buf.writer().print("cd {s} && ", .{cwd}); for (argv) |arg| { - std.debug.print("{s} ", .{arg}); + try buf.writer().print("{s} ", .{arg}); } - std.debug.print("\n", .{}); + return buf.toOwnedSlice(); } -pub fn spawnChildEnvMap(self: *Build, cwd: ?[]const u8, env_map: *const EnvMap, argv: []const []const u8) !void { - if (self.verbose) { - printCmd(cwd, argv); - } - - if (!std.process.can_spawn) - return error.ExecNotSupported; - - var child = std.ChildProcess.init(argv, self.allocator); - child.cwd = cwd; - child.env_map = env_map; - - const term = child.spawnAndWait() catch |err| { - log.err("Unable to spawn {s}: {s}", .{ argv[0], @errorName(err) }); - return err; - }; - - switch (term) { - .Exited => |code| { - if (code != 0) { - log.err("The following command exited with error code {}:", .{code}); - printCmd(cwd, argv); - return error.UncleanExit; - } - }, - else => { - log.err("The following command terminated unexpectedly:", .{}); - printCmd(cwd, argv); - - return error.UncleanExit; - }, - } +fn printCmd(ally: Allocator, cwd: ?[]const u8, argv: []const []const u8) void { + const text = allocPrintCmd(ally, cwd, argv) catch @panic("OOM"); + std.debug.print("{s}\n", .{text}); } pub fn installArtifact(self: *Build, artifact: *CompileStep) void { @@ -1280,12 +1218,7 @@ pub fn addInstallFileWithDir( install_dir: InstallDir, dest_rel_path: []const u8, ) *InstallFileStep { - if (dest_rel_path.len == 0) { - panic("dest_rel_path must be non-empty", .{}); - } - const install_step = self.allocator.create(InstallFileStep) catch @panic("OOM"); - install_step.* = InstallFileStep.init(self, source.dupe(self), install_dir, dest_rel_path); - return install_step; + return InstallFileStep.create(self, source.dupe(self), install_dir, dest_rel_path); } pub fn addInstallDirectory(self: *Build, options: InstallDirectoryOptions) *InstallDirStep { @@ -1294,6 +1227,14 @@ pub fn addInstallDirectory(self: *Build, options: InstallDirectoryOptions) *Inst return install_step; } +pub fn addCheckFile( + b: *Build, + file_source: FileSource, + options: CheckFileStep.Options, +) *CheckFileStep { + return CheckFileStep.create(b, file_source, options); +} + pub fn pushInstalledFile(self: *Build, dir: InstallDir, dest_rel_path: []const u8) void { const file = InstalledFile{ .dir = dir, @@ -1302,18 +1243,6 @@ pub fn pushInstalledFile(self: *Build, dir: InstallDir, dest_rel_path: []const u self.installed_files.append(file.dupe(self)) catch @panic("OOM"); } -pub fn updateFile(self: *Build, source_path: []const u8, dest_path: []const u8) !void { - if (self.verbose) { - log.info("cp {s} {s} ", .{ source_path, dest_path }); - } - const cwd = fs.cwd(); - const prev_status = try fs.Dir.updateFile(cwd, source_path, cwd, dest_path, .{}); - if (self.verbose) switch (prev_status) { - .stale => log.info("# installed", .{}), - .fresh => log.info("# up-to-date", .{}), - }; -} - pub fn truncateFile(self: *Build, dest_path: []const u8) !void { if (self.verbose) { log.info("truncate {s}", .{dest_path}); @@ -1397,7 +1326,7 @@ pub fn execAllowFail( ) ExecError![]u8 { assert(argv.len != 0); - if (!std.process.can_spawn) + if (!process.can_spawn) return error.ExecNotSupported; const max_output_size = 400 * 1024; @@ -1430,59 +1359,27 @@ pub fn execAllowFail( } } -pub fn execFromStep(self: *Build, argv: []const []const u8, src_step: ?*Step) ![]u8 { - assert(argv.len != 0); - - if (self.verbose) { - printCmd(null, argv); - } - - if (!std.process.can_spawn) { - if (src_step) |s| log.err("{s}...", .{s.name}); - log.err("Unable to spawn the following command: cannot spawn child process", .{}); - printCmd(null, argv); - std.os.abort(); +/// This is a helper function to be called from build.zig scripts, *not* from +/// inside step make() functions. If any errors occur, it fails the build with +/// a helpful message. +pub fn exec(b: *Build, argv: []const []const u8) []u8 { + if (!process.can_spawn) { + std.debug.print("unable to spawn the following command: cannot spawn child process\n{s}\n", .{ + try allocPrintCmd(b.allocator, null, argv), + }); + process.exit(1); } var code: u8 = undefined; - return self.execAllowFail(argv, &code, .Inherit) catch |err| switch (err) { - error.ExecNotSupported => { - if (src_step) |s| log.err("{s}...", .{s.name}); - log.err("Unable to spawn the following command: cannot spawn child process", .{}); - printCmd(null, argv); - std.os.abort(); - }, - error.FileNotFound => { - if (src_step) |s| log.err("{s}...", .{s.name}); - log.err("Unable to spawn the following command: file not found", .{}); - printCmd(null, argv); - std.os.exit(@truncate(u8, code)); - }, - error.ExitCodeFailure => { - if (src_step) |s| log.err("{s}...", .{s.name}); - if (self.prominent_compile_errors) { - log.err("The step exited with error code {d}", .{code}); - } else { - log.err("The following command exited with error code {d}:", .{code}); - printCmd(null, argv); - } - - std.os.exit(@truncate(u8, code)); - }, - error.ProcessTerminated => { - if (src_step) |s| log.err("{s}...", .{s.name}); - log.err("The following command terminated unexpectedly:", .{}); - printCmd(null, argv); - std.os.exit(@truncate(u8, code)); - }, - else => |e| return e, + return b.execAllowFail(argv, &code, .Inherit) catch |err| { + const printed_cmd = allocPrintCmd(b.allocator, null, argv) catch @panic("OOM"); + std.debug.print("unable to spawn the following command: {s}\n{s}\n", .{ + @errorName(err), printed_cmd, + }); + process.exit(1); }; } -pub fn exec(self: *Build, argv: []const []const u8) ![]u8 { - return self.execFromStep(argv, null); -} - pub fn addSearchPrefix(self: *Build, search_prefix: []const u8) void { self.search_prefixes.append(self.dupePath(search_prefix)) catch @panic("OOM"); } @@ -1547,10 +1444,29 @@ pub fn dependency(b: *Build, name: []const u8, args: anytype) *Dependency { const full_path = b.pathFromRoot("build.zig.zon"); std.debug.print("no dependency named '{s}' in '{s}'. All packages used in build.zig must be declared in this file.\n", .{ name, full_path }); - std.process.exit(1); + process.exit(1); +} + +pub fn anonymousDependency( + b: *Build, + /// The path to the directory containing the dependency's build.zig file, + /// relative to the current package's build.zig. + relative_build_root: []const u8, + /// A direct `@import` of the build.zig of the dependency. + comptime build_zig: type, + args: anytype, +) *Dependency { + const arena = b.allocator; + const build_root = b.build_root.join(arena, &.{relative_build_root}) catch @panic("OOM"); + const name = arena.dupe(u8, relative_build_root) catch @panic("OOM"); + for (name) |*byte| switch (byte.*) { + '/', '\\' => byte.* = '.', + else => continue, + }; + return dependencyInner(b, name, build_root, build_zig, args); } -fn dependencyInner( +pub fn dependencyInner( b: *Build, name: []const u8, build_root_string: []const u8, @@ -1563,7 +1479,7 @@ fn dependencyInner( std.debug.print("unable to open '{s}': {s}\n", .{ build_root_string, @errorName(err), }); - std.process.exit(1); + process.exit(1); }, }; const sub_builder = b.createChild(name, build_root, args) catch @panic("unhandled error"); @@ -1607,7 +1523,7 @@ pub const GeneratedFile = struct { pub fn getPath(self: GeneratedFile) []const u8 { return self.path orelse std.debug.panic( - "getPath() was called on a GeneratedFile that wasn't build yet. Is there a missing Step dependency on step '{s}'?", + "getPath() was called on a GeneratedFile that wasn't built yet. Is there a missing Step dependency on step '{s}'?", .{self.step.name}, ); } @@ -1647,12 +1563,23 @@ pub const FileSource = union(enum) { } /// Should only be called during make(), returns a path relative to the build root or absolute. - pub fn getPath(self: FileSource, builder: *Build) []const u8 { - const path = switch (self) { - .path => |p| builder.pathFromRoot(p), - .generated => |gen| gen.getPath(), - }; - return path; + pub fn getPath(self: FileSource, src_builder: *Build) []const u8 { + return getPath2(self, src_builder, null); + } + + /// Should only be called during make(), returns a path relative to the build root or absolute. + /// asking_step is only used for debugging purposes; it's the step being run that is asking for + /// the path. + pub fn getPath2(self: FileSource, src_builder: *Build, asking_step: ?*Step) []const u8 { + switch (self) { + .path => |p| return src_builder.pathFromRoot(p), + .generated => |gen| return gen.path orelse { + std.debug.getStderrMutex().lock(); + const stderr = std.io.getStdErr(); + dumpBadGetPathHelp(gen.step, stderr, src_builder, asking_step) catch {}; + @panic("misconfigured build script"); + }, + } } /// Duplicates the file source for a given builder. @@ -1664,6 +1591,54 @@ pub const FileSource = union(enum) { } }; +/// In this function the stderr mutex has already been locked. +fn dumpBadGetPathHelp( + s: *Step, + stderr: fs.File, + src_builder: *Build, + asking_step: ?*Step, +) anyerror!void { + const w = stderr.writer(); + try w.print( + \\getPath() was called on a GeneratedFile that wasn't built yet. + \\ source package path: {s} + \\ Is there a missing Step dependency on step '{s}'? + \\ + , .{ + src_builder.build_root.path orelse ".", + s.name, + }); + + const tty_config = std.debug.detectTTYConfig(stderr); + tty_config.setColor(w, .Red) catch {}; + try stderr.writeAll(" The step was created by this stack trace:\n"); + tty_config.setColor(w, .Reset) catch {}; + + const debug_info = std.debug.getSelfDebugInfo() catch |err| { + try w.print("Unable to dump stack trace: Unable to open debug info: {s}\n", .{@errorName(err)}); + return; + }; + const ally = debug_info.allocator; + std.debug.writeStackTrace(s.getStackTrace(), w, ally, debug_info, tty_config) catch |err| { + try stderr.writer().print("Unable to dump stack trace: {s}\n", .{@errorName(err)}); + return; + }; + if (asking_step) |as| { + tty_config.setColor(w, .Red) catch {}; + try stderr.writeAll(" The step that is missing a dependency on the above step was created by this stack trace:\n"); + tty_config.setColor(w, .Reset) catch {}; + + std.debug.writeStackTrace(as.getStackTrace(), w, ally, debug_info, tty_config) catch |err| { + try stderr.writer().print("Unable to dump stack trace: {s}\n", .{@errorName(err)}); + return; + }; + } + + tty_config.setColor(w, .Red) catch {}; + try stderr.writeAll(" Hope that helps. Proceeding to panic.\n"); + tty_config.setColor(w, .Reset) catch {}; +} + /// Allocates a new string for assigning a value to a named macro. /// If the value is omitted, it is set to 1. /// `name` and `value` need not live longer than the function call. @@ -1703,9 +1678,7 @@ pub const InstallDir = union(enum) { /// Duplicates the install directory including the path if set to custom. pub fn dupe(self: InstallDir, builder: *Build) InstallDir { if (self == .custom) { - // Written with this temporary to avoid RLS problems - const duped_path = builder.dupe(self.custom); - return .{ .custom = duped_path }; + return .{ .custom = builder.dupe(self.custom) }; } else { return self; } @@ -1753,17 +1726,45 @@ pub fn serializeCpu(allocator: Allocator, cpu: std.Target.Cpu) ![]const u8 { } } +/// This function is intended to be called in the `configure` phase only. +/// It returns an absolute directory path, which is potentially going to be a +/// source of API breakage in the future, so keep that in mind when using this +/// function. +pub fn makeTempPath(b: *Build) []const u8 { + const rand_int = std.crypto.random.int(u64); + const tmp_dir_sub_path = "tmp" ++ fs.path.sep_str ++ hex64(rand_int); + const result_path = b.cache_root.join(b.allocator, &.{tmp_dir_sub_path}) catch @panic("OOM"); + fs.cwd().makePath(result_path) catch |err| { + std.debug.print("unable to make tmp path '{s}': {s}\n", .{ + result_path, @errorName(err), + }); + }; + return result_path; +} + +/// There are a few copies of this function in miscellaneous places. Would be nice to find +/// a home for them. +fn hex64(x: u64) [16]u8 { + const hex_charset = "0123456789abcdef"; + var result: [16]u8 = undefined; + var i: usize = 0; + while (i < 8) : (i += 1) { + const byte = @truncate(u8, x >> @intCast(u6, 8 * i)); + result[i * 2 + 0] = hex_charset[byte >> 4]; + result[i * 2 + 1] = hex_charset[byte & 15]; + } + return result; +} + test { _ = CheckFileStep; _ = CheckObjectStep; - _ = EmulatableRunStep; _ = FmtStep; _ = InstallArtifactStep; _ = InstallDirStep; _ = InstallFileStep; _ = ObjCopyStep; _ = CompileStep; - _ = LogStep; _ = OptionsStep; _ = RemoveDirStep; _ = RunStep; diff --git a/lib/std/Build/Cache.zig b/lib/std/Build/Cache.zig index d4dbe6ec14f1..b25e34916868 100644 --- a/lib/std/Build/Cache.zig +++ b/lib/std/Build/Cache.zig @@ -7,27 +7,27 @@ pub const Directory = struct { /// directly, but it is needed when passing the directory to a child process. /// `null` means cwd. path: ?[]const u8, - handle: std.fs.Dir, + handle: fs.Dir, pub fn join(self: Directory, allocator: Allocator, paths: []const []const u8) ![]u8 { if (self.path) |p| { // TODO clean way to do this with only 1 allocation - const part2 = try std.fs.path.join(allocator, paths); + const part2 = try fs.path.join(allocator, paths); defer allocator.free(part2); - return std.fs.path.join(allocator, &[_][]const u8{ p, part2 }); + return fs.path.join(allocator, &[_][]const u8{ p, part2 }); } else { - return std.fs.path.join(allocator, paths); + return fs.path.join(allocator, paths); } } pub fn joinZ(self: Directory, allocator: Allocator, paths: []const []const u8) ![:0]u8 { if (self.path) |p| { // TODO clean way to do this with only 1 allocation - const part2 = try std.fs.path.join(allocator, paths); + const part2 = try fs.path.join(allocator, paths); defer allocator.free(part2); - return std.fs.path.joinZ(allocator, &[_][]const u8{ p, part2 }); + return fs.path.joinZ(allocator, &[_][]const u8{ p, part2 }); } else { - return std.fs.path.joinZ(allocator, paths); + return fs.path.joinZ(allocator, paths); } } @@ -39,6 +39,20 @@ pub const Directory = struct { if (self.path) |p| gpa.free(p); self.* = undefined; } + + pub fn format( + self: Directory, + comptime fmt_string: []const u8, + options: fmt.FormatOptions, + writer: anytype, + ) !void { + _ = options; + if (fmt_string.len != 0) fmt.invalidFmtError(fmt, self); + if (self.path) |p| { + try writer.writeAll(p); + try writer.writeAll(fs.path.sep_str); + } + } }; gpa: Allocator, @@ -243,10 +257,10 @@ pub const HashHelper = struct { hh.hasher.final(&bin_digest); var out_digest: [hex_digest_len]u8 = undefined; - _ = std.fmt.bufPrint( + _ = fmt.bufPrint( &out_digest, "{s}", - .{std.fmt.fmtSliceHexLower(&bin_digest)}, + .{fmt.fmtSliceHexLower(&bin_digest)}, ) catch unreachable; return out_digest; } @@ -365,10 +379,10 @@ pub const Manifest = struct { var bin_digest: BinDigest = undefined; self.hash.hasher.final(&bin_digest); - _ = std.fmt.bufPrint( + _ = fmt.bufPrint( &self.hex_digest, "{s}", - .{std.fmt.fmtSliceHexLower(&bin_digest)}, + .{fmt.fmtSliceHexLower(&bin_digest)}, ) catch unreachable; self.hash.hasher = hasher_init; @@ -408,7 +422,11 @@ pub const Manifest = struct { self.have_exclusive_lock = true; return false; // cache miss; exclusive lock already held } else |err| switch (err) { - error.WouldBlock => continue, + // There are no dir components, so you would think + // that this was unreachable, however we have + // observed on macOS two processes racing to do + // openat() with O_CREAT manifest in ENOENT. + error.WouldBlock, error.FileNotFound => continue, else => |e| return e, } }, @@ -425,7 +443,10 @@ pub const Manifest = struct { self.manifest_file = manifest_file; self.have_exclusive_lock = true; } else |err| switch (err) { - error.WouldBlock => { + // There are no dir components, so you would think that this was + // unreachable, however we have observed on macOS two processes racing + // to do openat() with O_CREAT manifest in ENOENT. + error.WouldBlock, error.FileNotFound => { self.manifest_file = try self.cache.manifest_dir.openFile(&manifest_file_path, .{ .lock = .Shared, }); @@ -469,7 +490,7 @@ pub const Manifest = struct { cache_hash_file.stat.size = fmt.parseInt(u64, size, 10) catch return error.InvalidFormat; cache_hash_file.stat.inode = fmt.parseInt(fs.File.INode, inode, 10) catch return error.InvalidFormat; cache_hash_file.stat.mtime = fmt.parseInt(i64, mtime_nsec_str, 10) catch return error.InvalidFormat; - _ = std.fmt.hexToBytes(&cache_hash_file.bin_digest, digest_str) catch return error.InvalidFormat; + _ = fmt.hexToBytes(&cache_hash_file.bin_digest, digest_str) catch return error.InvalidFormat; const prefix = fmt.parseInt(u8, prefix_str, 10) catch return error.InvalidFormat; if (prefix >= self.cache.prefixes_len) return error.InvalidFormat; @@ -806,10 +827,10 @@ pub const Manifest = struct { self.hash.hasher.final(&bin_digest); var out_digest: [hex_digest_len]u8 = undefined; - _ = std.fmt.bufPrint( + _ = fmt.bufPrint( &out_digest, "{s}", - .{std.fmt.fmtSliceHexLower(&bin_digest)}, + .{fmt.fmtSliceHexLower(&bin_digest)}, ) catch unreachable; return out_digest; @@ -831,10 +852,10 @@ pub const Manifest = struct { var encoded_digest: [hex_digest_len]u8 = undefined; for (self.files.items) |file| { - _ = std.fmt.bufPrint( + _ = fmt.bufPrint( &encoded_digest, "{s}", - .{std.fmt.fmtSliceHexLower(&file.bin_digest)}, + .{fmt.fmtSliceHexLower(&file.bin_digest)}, ) catch unreachable; try writer.print("{d} {d} {d} {s} {d} {s}\n", .{ file.stat.size, @@ -955,16 +976,16 @@ fn hashFile(file: fs.File, bin_digest: *[Hasher.mac_length]u8) !void { } // Create/Write a file, close it, then grab its stat.mtime timestamp. -fn testGetCurrentFileTimestamp() !i128 { +fn testGetCurrentFileTimestamp(dir: fs.Dir) !i128 { const test_out_file = "test-filetimestamp.tmp"; - var file = try fs.cwd().createFile(test_out_file, .{ + var file = try dir.createFile(test_out_file, .{ .read = true, .truncate = true, }); defer { file.close(); - fs.cwd().deleteFile(test_out_file) catch {}; + dir.deleteFile(test_out_file) catch {}; } return (try file.stat()).mtime; @@ -976,16 +997,17 @@ test "cache file and then recall it" { return error.SkipZigTest; } - const cwd = fs.cwd(); + var tmp = testing.tmpDir(.{}); + defer tmp.cleanup(); const temp_file = "test.txt"; const temp_manifest_dir = "temp_manifest_dir"; - try cwd.writeFile(temp_file, "Hello, world!\n"); + try tmp.dir.writeFile(temp_file, "Hello, world!\n"); // Wait for file timestamps to tick - const initial_time = try testGetCurrentFileTimestamp(); - while ((try testGetCurrentFileTimestamp()) == initial_time) { + const initial_time = try testGetCurrentFileTimestamp(tmp.dir); + while ((try testGetCurrentFileTimestamp(tmp.dir)) == initial_time) { std.time.sleep(1); } @@ -995,9 +1017,9 @@ test "cache file and then recall it" { { var cache = Cache{ .gpa = testing.allocator, - .manifest_dir = try cwd.makeOpenPath(temp_manifest_dir, .{}), + .manifest_dir = try tmp.dir.makeOpenPath(temp_manifest_dir, .{}), }; - cache.addPrefix(.{ .path = null, .handle = fs.cwd() }); + cache.addPrefix(.{ .path = null, .handle = tmp.dir }); defer cache.manifest_dir.close(); { @@ -1033,9 +1055,6 @@ test "cache file and then recall it" { try testing.expectEqual(digest1, digest2); } - - try cwd.deleteTree(temp_manifest_dir); - try cwd.deleteFile(temp_file); } test "check that changing a file makes cache fail" { @@ -1043,21 +1062,19 @@ test "check that changing a file makes cache fail" { // https://github.com/ziglang/zig/issues/5437 return error.SkipZigTest; } - const cwd = fs.cwd(); + var tmp = testing.tmpDir(.{}); + defer tmp.cleanup(); const temp_file = "cache_hash_change_file_test.txt"; const temp_manifest_dir = "cache_hash_change_file_manifest_dir"; const original_temp_file_contents = "Hello, world!\n"; const updated_temp_file_contents = "Hello, world; but updated!\n"; - try cwd.deleteTree(temp_manifest_dir); - try cwd.deleteTree(temp_file); - - try cwd.writeFile(temp_file, original_temp_file_contents); + try tmp.dir.writeFile(temp_file, original_temp_file_contents); // Wait for file timestamps to tick - const initial_time = try testGetCurrentFileTimestamp(); - while ((try testGetCurrentFileTimestamp()) == initial_time) { + const initial_time = try testGetCurrentFileTimestamp(tmp.dir); + while ((try testGetCurrentFileTimestamp(tmp.dir)) == initial_time) { std.time.sleep(1); } @@ -1067,9 +1084,9 @@ test "check that changing a file makes cache fail" { { var cache = Cache{ .gpa = testing.allocator, - .manifest_dir = try cwd.makeOpenPath(temp_manifest_dir, .{}), + .manifest_dir = try tmp.dir.makeOpenPath(temp_manifest_dir, .{}), }; - cache.addPrefix(.{ .path = null, .handle = fs.cwd() }); + cache.addPrefix(.{ .path = null, .handle = tmp.dir }); defer cache.manifest_dir.close(); { @@ -1089,7 +1106,7 @@ test "check that changing a file makes cache fail" { try ch.writeManifest(); } - try cwd.writeFile(temp_file, updated_temp_file_contents); + try tmp.dir.writeFile(temp_file, updated_temp_file_contents); { var ch = cache.obtain(); @@ -1111,9 +1128,6 @@ test "check that changing a file makes cache fail" { try testing.expect(!mem.eql(u8, digest1[0..], digest2[0..])); } - - try cwd.deleteTree(temp_manifest_dir); - try cwd.deleteTree(temp_file); } test "no file inputs" { @@ -1121,18 +1135,20 @@ test "no file inputs" { // https://github.com/ziglang/zig/issues/5437 return error.SkipZigTest; } - const cwd = fs.cwd(); + + var tmp = testing.tmpDir(.{}); + defer tmp.cleanup(); + const temp_manifest_dir = "no_file_inputs_manifest_dir"; - defer cwd.deleteTree(temp_manifest_dir) catch {}; var digest1: [hex_digest_len]u8 = undefined; var digest2: [hex_digest_len]u8 = undefined; var cache = Cache{ .gpa = testing.allocator, - .manifest_dir = try cwd.makeOpenPath(temp_manifest_dir, .{}), + .manifest_dir = try tmp.dir.makeOpenPath(temp_manifest_dir, .{}), }; - cache.addPrefix(.{ .path = null, .handle = fs.cwd() }); + cache.addPrefix(.{ .path = null, .handle = tmp.dir }); defer cache.manifest_dir.close(); { @@ -1167,18 +1183,19 @@ test "Manifest with files added after initial hash work" { // https://github.com/ziglang/zig/issues/5437 return error.SkipZigTest; } - const cwd = fs.cwd(); + var tmp = testing.tmpDir(.{}); + defer tmp.cleanup(); const temp_file1 = "cache_hash_post_file_test1.txt"; const temp_file2 = "cache_hash_post_file_test2.txt"; const temp_manifest_dir = "cache_hash_post_file_manifest_dir"; - try cwd.writeFile(temp_file1, "Hello, world!\n"); - try cwd.writeFile(temp_file2, "Hello world the second!\n"); + try tmp.dir.writeFile(temp_file1, "Hello, world!\n"); + try tmp.dir.writeFile(temp_file2, "Hello world the second!\n"); // Wait for file timestamps to tick - const initial_time = try testGetCurrentFileTimestamp(); - while ((try testGetCurrentFileTimestamp()) == initial_time) { + const initial_time = try testGetCurrentFileTimestamp(tmp.dir); + while ((try testGetCurrentFileTimestamp(tmp.dir)) == initial_time) { std.time.sleep(1); } @@ -1189,9 +1206,9 @@ test "Manifest with files added after initial hash work" { { var cache = Cache{ .gpa = testing.allocator, - .manifest_dir = try cwd.makeOpenPath(temp_manifest_dir, .{}), + .manifest_dir = try tmp.dir.makeOpenPath(temp_manifest_dir, .{}), }; - cache.addPrefix(.{ .path = null, .handle = fs.cwd() }); + cache.addPrefix(.{ .path = null, .handle = tmp.dir }); defer cache.manifest_dir.close(); { @@ -1224,11 +1241,11 @@ test "Manifest with files added after initial hash work" { try testing.expect(mem.eql(u8, &digest1, &digest2)); // Modify the file added after initial hash - try cwd.writeFile(temp_file2, "Hello world the second, updated\n"); + try tmp.dir.writeFile(temp_file2, "Hello world the second, updated\n"); // Wait for file timestamps to tick - const initial_time2 = try testGetCurrentFileTimestamp(); - while ((try testGetCurrentFileTimestamp()) == initial_time2) { + const initial_time2 = try testGetCurrentFileTimestamp(tmp.dir); + while ((try testGetCurrentFileTimestamp(tmp.dir)) == initial_time2) { std.time.sleep(1); } @@ -1251,8 +1268,4 @@ test "Manifest with files added after initial hash work" { try testing.expect(!mem.eql(u8, &digest1, &digest3)); } - - try cwd.deleteTree(temp_manifest_dir); - try cwd.deleteFile(temp_file1); - try cwd.deleteFile(temp_file2); } diff --git a/lib/std/Build/CheckFileStep.zig b/lib/std/Build/CheckFileStep.zig index b08a797e8413..1c2b6b77867b 100644 --- a/lib/std/Build/CheckFileStep.zig +++ b/lib/std/Build/CheckFileStep.zig @@ -1,51 +1,88 @@ -const std = @import("../std.zig"); -const Step = std.Build.Step; -const fs = std.fs; -const mem = std.mem; - -const CheckFileStep = @This(); - -pub const base_id = .check_file; +//! Fail the build step if a file does not match certain checks. +//! TODO: make this more flexible, supporting more kinds of checks. +//! TODO: generalize the code in std.testing.expectEqualStrings and make this +//! CheckFileStep produce those helpful diagnostics when there is not a match. step: Step, -builder: *std.Build, expected_matches: []const []const u8, +expected_exact: ?[]const u8, source: std.Build.FileSource, max_bytes: usize = 20 * 1024 * 1024, +pub const base_id = .check_file; + +pub const Options = struct { + expected_matches: []const []const u8 = &.{}, + expected_exact: ?[]const u8 = null, +}; + pub fn create( - builder: *std.Build, + owner: *std.Build, source: std.Build.FileSource, - expected_matches: []const []const u8, + options: Options, ) *CheckFileStep { - const self = builder.allocator.create(CheckFileStep) catch @panic("OOM"); - self.* = CheckFileStep{ - .builder = builder, - .step = Step.init(.check_file, "CheckFile", builder.allocator, make), - .source = source.dupe(builder), - .expected_matches = builder.dupeStrings(expected_matches), + const self = owner.allocator.create(CheckFileStep) catch @panic("OOM"); + self.* = .{ + .step = Step.init(.{ + .id = .check_file, + .name = "CheckFile", + .owner = owner, + .makeFn = make, + }), + .source = source.dupe(owner), + .expected_matches = owner.dupeStrings(options.expected_matches), + .expected_exact = options.expected_exact, }; self.source.addStepDependencies(&self.step); return self; } -fn make(step: *Step) !void { +pub fn setName(self: *CheckFileStep, name: []const u8) void { + self.step.name = name; +} + +fn make(step: *Step, prog_node: *std.Progress.Node) !void { + _ = prog_node; + const b = step.owner; const self = @fieldParentPtr(CheckFileStep, "step", step); - const src_path = self.source.getPath(self.builder); - const contents = try fs.cwd().readFileAlloc(self.builder.allocator, src_path, self.max_bytes); + const src_path = self.source.getPath(b); + const contents = fs.cwd().readFileAlloc(b.allocator, src_path, self.max_bytes) catch |err| { + return step.fail("unable to read '{s}': {s}", .{ + src_path, @errorName(err), + }); + }; for (self.expected_matches) |expected_match| { if (mem.indexOf(u8, contents, expected_match) == null) { - std.debug.print( + return step.fail( \\ - \\========= Expected to find: =================== + \\========= expected to find: =================== \\{s} - \\========= But file does not contain it: ======= + \\========= but file does not contain it: ======= \\{s} - \\ + \\=============================================== , .{ expected_match, contents }); - return error.TestFailed; + } + } + + if (self.expected_exact) |expected_exact| { + if (!mem.eql(u8, expected_exact, contents)) { + return step.fail( + \\ + \\========= expected: ===================== + \\{s} + \\========= but found: ==================== + \\{s} + \\========= from the following file: ====== + \\{s} + , .{ expected_exact, contents, src_path }); } } } + +const CheckFileStep = @This(); +const std = @import("../std.zig"); +const Step = std.Build.Step; +const fs = std.fs; +const mem = std.mem; diff --git a/lib/std/Build/CheckObjectStep.zig b/lib/std/Build/CheckObjectStep.zig index 5cb096581f99..7cac2d04ec83 100644 --- a/lib/std/Build/CheckObjectStep.zig +++ b/lib/std/Build/CheckObjectStep.zig @@ -10,25 +10,31 @@ const CheckObjectStep = @This(); const Allocator = mem.Allocator; const Step = std.Build.Step; -const EmulatableRunStep = std.Build.EmulatableRunStep; pub const base_id = .check_object; step: Step, -builder: *std.Build, source: std.Build.FileSource, max_bytes: usize = 20 * 1024 * 1024, checks: std.ArrayList(Check), dump_symtab: bool = false, obj_format: std.Target.ObjectFormat, -pub fn create(builder: *std.Build, source: std.Build.FileSource, obj_format: std.Target.ObjectFormat) *CheckObjectStep { - const gpa = builder.allocator; +pub fn create( + owner: *std.Build, + source: std.Build.FileSource, + obj_format: std.Target.ObjectFormat, +) *CheckObjectStep { + const gpa = owner.allocator; const self = gpa.create(CheckObjectStep) catch @panic("OOM"); self.* = .{ - .builder = builder, - .step = Step.init(.check_file, "CheckObject", gpa, make), - .source = source.dupe(builder), + .step = Step.init(.{ + .id = .check_file, + .name = "CheckObject", + .owner = owner, + .makeFn = make, + }), + .source = source.dupe(owner), .checks = std.ArrayList(Check).init(gpa), .obj_format = obj_format, }; @@ -38,14 +44,18 @@ pub fn create(builder: *std.Build, source: std.Build.FileSource, obj_format: std /// Runs and (optionally) compares the output of a binary. /// Asserts `self` was generated from an executable step. -pub fn runAndCompare(self: *CheckObjectStep) *EmulatableRunStep { +/// TODO this doesn't actually compare, and there's no apparent reason for it +/// to depend on the check object step. I don't see why this function should exist, +/// the caller could just add the run step directly. +pub fn runAndCompare(self: *CheckObjectStep) *std.Build.RunStep { const dependencies_len = self.step.dependencies.items.len; assert(dependencies_len > 0); const exe_step = self.step.dependencies.items[dependencies_len - 1]; const exe = exe_step.cast(std.Build.CompileStep).?; - const emulatable_step = EmulatableRunStep.create(self.builder, "EmulatableRun", exe); - emulatable_step.step.dependOn(&self.step); - return emulatable_step; + const run = self.step.owner.addRunArtifact(exe); + run.skip_foreign_checks = true; + run.step.dependOn(&self.step); + return run; } /// There two types of actions currently suported: @@ -123,7 +133,8 @@ const Action = struct { /// Will return true if the `phrase` is correctly parsed into an RPN program and /// its reduced, computed value compares using `op` with the expected value, either /// a literal or another extracted variable. - fn computeCmp(act: Action, gpa: Allocator, global_vars: anytype) !bool { + fn computeCmp(act: Action, step: *Step, global_vars: anytype) !bool { + const gpa = step.owner.allocator; var op_stack = std.ArrayList(enum { add, sub, mod, mul }).init(gpa); var values = std.ArrayList(u64).init(gpa); @@ -140,11 +151,11 @@ const Action = struct { } else { const val = std.fmt.parseInt(u64, next, 0) catch blk: { break :blk global_vars.get(next) orelse { - std.debug.print( + try step.addError( \\ - \\========= Variable was not extracted: =========== + \\========= variable was not extracted: =========== \\{s} - \\ + \\================================================= , .{next}); return error.UnknownVariable; }; @@ -176,11 +187,11 @@ const Action = struct { const exp_value = switch (act.expected.?.value) { .variable => |name| global_vars.get(name) orelse { - std.debug.print( + try step.addError( \\ - \\========= Variable was not extracted: =========== + \\========= variable was not extracted: =========== \\{s} - \\ + \\================================================= , .{name}); return error.UnknownVariable; }, @@ -249,7 +260,7 @@ const Check = struct { /// Creates a new sequence of actions with `phrase` as the first anchor searched phrase. pub fn checkStart(self: *CheckObjectStep, phrase: []const u8) void { - var new_check = Check.create(self.builder); + var new_check = Check.create(self.step.owner); new_check.match(phrase); self.checks.append(new_check) catch @panic("OOM"); } @@ -291,34 +302,34 @@ pub fn checkComputeCompare( program: []const u8, expected: ComputeCompareExpected, ) void { - var new_check = Check.create(self.builder); + var new_check = Check.create(self.step.owner); new_check.computeCmp(program, expected); self.checks.append(new_check) catch @panic("OOM"); } -fn make(step: *Step) !void { +fn make(step: *Step, prog_node: *std.Progress.Node) !void { + _ = prog_node; + const b = step.owner; + const gpa = b.allocator; const self = @fieldParentPtr(CheckObjectStep, "step", step); - const gpa = self.builder.allocator; - const src_path = self.source.getPath(self.builder); - const contents = try fs.cwd().readFileAllocOptions( + const src_path = self.source.getPath(b); + const contents = fs.cwd().readFileAllocOptions( gpa, src_path, self.max_bytes, null, @alignOf(u64), null, - ); + ) catch |err| return step.fail("unable to read '{s}': {s}", .{ src_path, @errorName(err) }); const output = switch (self.obj_format) { - .macho => try MachODumper.parseAndDump(contents, .{ - .gpa = gpa, + .macho => try MachODumper.parseAndDump(step, contents, .{ .dump_symtab = self.dump_symtab, }), .elf => @panic("TODO elf parser"), .coff => @panic("TODO coff parser"), - .wasm => try WasmDumper.parseAndDump(contents, .{ - .gpa = gpa, + .wasm => try WasmDumper.parseAndDump(step, contents, .{ .dump_symtab = self.dump_symtab, }), else => unreachable, @@ -334,54 +345,50 @@ fn make(step: *Step) !void { while (it.next()) |line| { if (try act.match(line, &vars)) break; } else { - std.debug.print( + return step.fail( \\ - \\========= Expected to find: ========================== + \\========= expected to find: ========================== \\{s} - \\========= But parsed file does not contain it: ======= + \\========= but parsed file does not contain it: ======= \\{s} - \\ + \\====================================================== , .{ act.phrase, output }); - return error.TestFailed; } }, .not_present => { while (it.next()) |line| { if (try act.match(line, &vars)) { - std.debug.print( + return step.fail( \\ - \\========= Expected not to find: =================== + \\========= expected not to find: =================== \\{s} - \\========= But parsed file does contain it: ======== + \\========= but parsed file does contain it: ======== \\{s} - \\ + \\=================================================== , .{ act.phrase, output }); - return error.TestFailed; } } }, .compute_cmp => { - const res = act.computeCmp(gpa, vars) catch |err| switch (err) { + const res = act.computeCmp(step, vars) catch |err| switch (err) { error.UnknownVariable => { - std.debug.print( - \\========= From parsed file: ===================== + return step.fail( + \\========= from parsed file: ===================== \\{s} - \\ + \\================================================= , .{output}); - return error.TestFailed; }, else => |e| return e, }; if (!res) { - std.debug.print( + return step.fail( \\ - \\========= Comparison failed for action: =========== + \\========= comparison failed for action: =========== \\{s} {} - \\========= From parsed file: ======================= + \\========= from parsed file: ======================= \\{s} - \\ + \\=================================================== , .{ act.phrase, act.expected.?, output }); - return error.TestFailed; } }, } @@ -390,7 +397,6 @@ fn make(step: *Step) !void { } const Opts = struct { - gpa: ?Allocator = null, dump_symtab: bool = false, }; @@ -398,8 +404,8 @@ const MachODumper = struct { const LoadCommandIterator = macho.LoadCommandIterator; const symtab_label = "symtab"; - fn parseAndDump(bytes: []align(@alignOf(u64)) const u8, opts: Opts) ![]const u8 { - const gpa = opts.gpa orelse unreachable; // MachO dumper requires an allocator + fn parseAndDump(step: *Step, bytes: []align(@alignOf(u64)) const u8, opts: Opts) ![]const u8 { + const gpa = step.owner.allocator; var stream = std.io.fixedBufferStream(bytes); const reader = stream.reader(); @@ -681,8 +687,8 @@ const MachODumper = struct { const WasmDumper = struct { const symtab_label = "symbols"; - fn parseAndDump(bytes: []const u8, opts: Opts) ![]const u8 { - const gpa = opts.gpa orelse unreachable; // Wasm dumper requires an allocator + fn parseAndDump(step: *Step, bytes: []const u8, opts: Opts) ![]const u8 { + const gpa = step.owner.allocator; if (opts.dump_symtab) { @panic("TODO: Implement symbol table parsing and dumping"); } @@ -703,20 +709,24 @@ const WasmDumper = struct { const writer = output.writer(); while (reader.readByte()) |current_byte| { - const section = std.meta.intToEnum(std.wasm.Section, current_byte) catch |err| { - std.debug.print("Found invalid section id '{d}'\n", .{current_byte}); - return err; + const section = std.meta.intToEnum(std.wasm.Section, current_byte) catch { + return step.fail("Found invalid section id '{d}'", .{current_byte}); }; const section_length = try std.leb.readULEB128(u32, reader); - try parseAndDumpSection(section, bytes[fbs.pos..][0..section_length], writer); + try parseAndDumpSection(step, section, bytes[fbs.pos..][0..section_length], writer); fbs.pos += section_length; } else |_| {} // reached end of stream return output.toOwnedSlice(); } - fn parseAndDumpSection(section: std.wasm.Section, data: []const u8, writer: anytype) !void { + fn parseAndDumpSection( + step: *Step, + section: std.wasm.Section, + data: []const u8, + writer: anytype, + ) !void { var fbs = std.io.fixedBufferStream(data); const reader = fbs.reader(); @@ -739,7 +749,7 @@ const WasmDumper = struct { => { const entries = try std.leb.readULEB128(u32, reader); try writer.print("\nentries {d}\n", .{entries}); - try dumpSection(section, data[fbs.pos..], entries, writer); + try dumpSection(step, section, data[fbs.pos..], entries, writer); }, .custom => { const name_length = try std.leb.readULEB128(u32, reader); @@ -748,7 +758,7 @@ const WasmDumper = struct { try writer.print("\nname {s}\n", .{name}); if (mem.eql(u8, name, "name")) { - try parseDumpNames(reader, writer, data); + try parseDumpNames(step, reader, writer, data); } else if (mem.eql(u8, name, "producers")) { try parseDumpProducers(reader, writer, data); } else if (mem.eql(u8, name, "target_features")) { @@ -764,7 +774,7 @@ const WasmDumper = struct { } } - fn dumpSection(section: std.wasm.Section, data: []const u8, entries: u32, writer: anytype) !void { + fn dumpSection(step: *Step, section: std.wasm.Section, data: []const u8, entries: u32, writer: anytype) !void { var fbs = std.io.fixedBufferStream(data); const reader = fbs.reader(); @@ -774,19 +784,18 @@ const WasmDumper = struct { while (i < entries) : (i += 1) { const func_type = try reader.readByte(); if (func_type != std.wasm.function_type) { - std.debug.print("Expected function type, found byte '{d}'\n", .{func_type}); - return error.UnexpectedByte; + return step.fail("expected function type, found byte '{d}'", .{func_type}); } const params = try std.leb.readULEB128(u32, reader); try writer.print("params {d}\n", .{params}); var index: u32 = 0; while (index < params) : (index += 1) { - try parseDumpType(std.wasm.Valtype, reader, writer); + try parseDumpType(step, std.wasm.Valtype, reader, writer); } else index = 0; const returns = try std.leb.readULEB128(u32, reader); try writer.print("returns {d}\n", .{returns}); while (index < returns) : (index += 1) { - try parseDumpType(std.wasm.Valtype, reader, writer); + try parseDumpType(step, std.wasm.Valtype, reader, writer); } } }, @@ -800,9 +809,8 @@ const WasmDumper = struct { const name = data[fbs.pos..][0..name_len]; fbs.pos += name_len; - const kind = std.meta.intToEnum(std.wasm.ExternalKind, try reader.readByte()) catch |err| { - std.debug.print("Invalid import kind\n", .{}); - return err; + const kind = std.meta.intToEnum(std.wasm.ExternalKind, try reader.readByte()) catch { + return step.fail("invalid import kind", .{}); }; try writer.print( @@ -819,11 +827,11 @@ const WasmDumper = struct { try parseDumpLimits(reader, writer); }, .global => { - try parseDumpType(std.wasm.Valtype, reader, writer); + try parseDumpType(step, std.wasm.Valtype, reader, writer); try writer.print("mutable {}\n", .{0x01 == try std.leb.readULEB128(u32, reader)}); }, .table => { - try parseDumpType(std.wasm.RefType, reader, writer); + try parseDumpType(step, std.wasm.RefType, reader, writer); try parseDumpLimits(reader, writer); }, } @@ -838,7 +846,7 @@ const WasmDumper = struct { .table => { var i: u32 = 0; while (i < entries) : (i += 1) { - try parseDumpType(std.wasm.RefType, reader, writer); + try parseDumpType(step, std.wasm.RefType, reader, writer); try parseDumpLimits(reader, writer); } }, @@ -851,9 +859,9 @@ const WasmDumper = struct { .global => { var i: u32 = 0; while (i < entries) : (i += 1) { - try parseDumpType(std.wasm.Valtype, reader, writer); + try parseDumpType(step, std.wasm.Valtype, reader, writer); try writer.print("mutable {}\n", .{0x01 == try std.leb.readULEB128(u1, reader)}); - try parseDumpInit(reader, writer); + try parseDumpInit(step, reader, writer); } }, .@"export" => { @@ -863,9 +871,8 @@ const WasmDumper = struct { const name = data[fbs.pos..][0..name_len]; fbs.pos += name_len; const kind_byte = try std.leb.readULEB128(u8, reader); - const kind = std.meta.intToEnum(std.wasm.ExternalKind, kind_byte) catch |err| { - std.debug.print("invalid export kind value '{d}'\n", .{kind_byte}); - return err; + const kind = std.meta.intToEnum(std.wasm.ExternalKind, kind_byte) catch { + return step.fail("invalid export kind value '{d}'", .{kind_byte}); }; const index = try std.leb.readULEB128(u32, reader); try writer.print( @@ -880,7 +887,7 @@ const WasmDumper = struct { var i: u32 = 0; while (i < entries) : (i += 1) { try writer.print("table index {d}\n", .{try std.leb.readULEB128(u32, reader)}); - try parseDumpInit(reader, writer); + try parseDumpInit(step, reader, writer); const function_indexes = try std.leb.readULEB128(u32, reader); var function_index: u32 = 0; @@ -896,7 +903,7 @@ const WasmDumper = struct { while (i < entries) : (i += 1) { const index = try std.leb.readULEB128(u32, reader); try writer.print("memory index 0x{x}\n", .{index}); - try parseDumpInit(reader, writer); + try parseDumpInit(step, reader, writer); const size = try std.leb.readULEB128(u32, reader); try writer.print("size {d}\n", .{size}); try reader.skipBytes(size, .{}); // we do not care about the content of the segments @@ -906,11 +913,10 @@ const WasmDumper = struct { } } - fn parseDumpType(comptime WasmType: type, reader: anytype, writer: anytype) !void { + fn parseDumpType(step: *Step, comptime WasmType: type, reader: anytype, writer: anytype) !void { const type_byte = try reader.readByte(); - const valtype = std.meta.intToEnum(WasmType, type_byte) catch |err| { - std.debug.print("Invalid wasm type value '{d}'\n", .{type_byte}); - return err; + const valtype = std.meta.intToEnum(WasmType, type_byte) catch { + return step.fail("Invalid wasm type value '{d}'", .{type_byte}); }; try writer.print("type {s}\n", .{@tagName(valtype)}); } @@ -925,11 +931,10 @@ const WasmDumper = struct { } } - fn parseDumpInit(reader: anytype, writer: anytype) !void { + fn parseDumpInit(step: *Step, reader: anytype, writer: anytype) !void { const byte = try std.leb.readULEB128(u8, reader); - const opcode = std.meta.intToEnum(std.wasm.Opcode, byte) catch |err| { - std.debug.print("invalid wasm opcode '{d}'\n", .{byte}); - return err; + const opcode = std.meta.intToEnum(std.wasm.Opcode, byte) catch { + return step.fail("invalid wasm opcode '{d}'", .{byte}); }; switch (opcode) { .i32_const => try writer.print("i32.const {x}\n", .{try std.leb.readILEB128(i32, reader)}), @@ -941,14 +946,13 @@ const WasmDumper = struct { } const end_opcode = try std.leb.readULEB128(u8, reader); if (end_opcode != std.wasm.opcode(.end)) { - std.debug.print("expected 'end' opcode in init expression\n", .{}); - return error.MissingEndOpcode; + return step.fail("expected 'end' opcode in init expression", .{}); } } - fn parseDumpNames(reader: anytype, writer: anytype, data: []const u8) !void { + fn parseDumpNames(step: *Step, reader: anytype, writer: anytype, data: []const u8) !void { while (reader.context.pos < data.len) { - try parseDumpType(std.wasm.NameSubsection, reader, writer); + try parseDumpType(step, std.wasm.NameSubsection, reader, writer); const size = try std.leb.readULEB128(u32, reader); const entries = try std.leb.readULEB128(u32, reader); try writer.print( diff --git a/lib/std/Build/CompileStep.zig b/lib/std/Build/CompileStep.zig index 49d2fae68de1..d73e5d3b41df 100644 --- a/lib/std/Build/CompileStep.zig +++ b/lib/std/Build/CompileStep.zig @@ -1,7 +1,6 @@ const builtin = @import("builtin"); const std = @import("../std.zig"); const mem = std.mem; -const log = std.log; const fs = std.fs; const assert = std.debug.assert; const panic = std.debug.panic; @@ -22,7 +21,6 @@ const InstallDir = std.Build.InstallDir; const InstallArtifactStep = std.Build.InstallArtifactStep; const GeneratedFile = std.Build.GeneratedFile; const ObjCopyStep = std.Build.ObjCopyStep; -const EmulatableRunStep = std.Build.EmulatableRunStep; const CheckObjectStep = std.Build.CheckObjectStep; const RunStep = std.Build.RunStep; const OptionsStep = std.Build.OptionsStep; @@ -32,7 +30,6 @@ const CompileStep = @This(); pub const base_id: Step.Id = .compile; step: Step, -builder: *std.Build, name: []const u8, target: CrossTarget, target_info: NativeTargetInfo, @@ -49,9 +46,9 @@ strip: ?bool, unwind_tables: ?bool, // keep in sync with src/link.zig:CompressDebugSections compress_debug_sections: enum { none, zlib } = .none, -lib_paths: ArrayList([]const u8), -rpaths: ArrayList([]const u8), -framework_dirs: ArrayList([]const u8), +lib_paths: ArrayList(FileSource), +rpaths: ArrayList(FileSource), +framework_dirs: ArrayList(FileSource), frameworks: StringHashMap(FrameworkLinkInfo), verbose_link: bool, verbose_cc: bool, @@ -86,7 +83,6 @@ c_std: std.Build.CStd, zig_lib_dir: ?[]const u8, main_pkg_path: ?[]const u8, exec_cmd_args: ?[]const ?[]const u8, -name_prefix: []const u8, filter: ?[]const u8, test_evented_io: bool = false, test_runner: ?[]const u8, @@ -210,10 +206,17 @@ want_lto: ?bool = null, use_llvm: ?bool = null, use_lld: ?bool = null, +/// This is an advanced setting that can change the intent of this CompileStep. +/// If this slice has nonzero length, it means that this CompileStep exists to +/// check for compile errors and return *success* if they match, and failure +/// otherwise. +expect_errors: []const []const u8 = &.{}, + output_path_source: GeneratedFile, output_lib_path_source: GeneratedFile, output_h_path_source: GeneratedFile, output_pdb_path_source: GeneratedFile, +output_dirname_source: GeneratedFile, pub const CSourceFiles = struct { files: []const []const u8, @@ -277,6 +280,7 @@ pub const Options = struct { kind: Kind, linkage: ?Linkage = null, version: ?std.builtin.Version = null, + max_rss: usize = 0, }; pub const Kind = enum { @@ -284,7 +288,6 @@ pub const Kind = enum { lib, obj, @"test", - test_exe, }; pub const Linkage = enum { dynamic, static }; @@ -305,18 +308,35 @@ pub const EmitOption = union(enum) { } }; -pub fn create(builder: *std.Build, options: Options) *CompileStep { - const name = builder.dupe(options.name); - const root_src: ?FileSource = if (options.root_source_file) |rsrc| rsrc.dupe(builder) else null; +pub fn create(owner: *std.Build, options: Options) *CompileStep { + const name = owner.dupe(options.name); + const root_src: ?FileSource = if (options.root_source_file) |rsrc| rsrc.dupe(owner) else null; if (mem.indexOf(u8, name, "/") != null or mem.indexOf(u8, name, "\\") != null) { panic("invalid name: '{s}'. It looks like a file path, but it is supposed to be the library or application name.", .{name}); } - const self = builder.allocator.create(CompileStep) catch @panic("OOM"); + // Avoid the common case of the step name looking like "zig test test". + const name_adjusted = if (options.kind == .@"test" and mem.eql(u8, name, "test")) + "" + else + owner.fmt("{s} ", .{name}); + + const step_name = owner.fmt("{s} {s}{s} {s}", .{ + switch (options.kind) { + .exe => "zig build-exe", + .lib => "zig build-lib", + .obj => "zig build-obj", + .@"test" => "zig test", + }, + name_adjusted, + @tagName(options.optimize), + options.target.zigTriple(owner.allocator) catch @panic("OOM"), + }); + + const self = owner.allocator.create(CompileStep) catch @panic("OOM"); self.* = CompileStep{ .strip = null, .unwind_tables = null, - .builder = builder, .verbose_link = false, .verbose_cc = false, .optimize = options.optimize, @@ -325,29 +345,34 @@ pub fn create(builder: *std.Build, options: Options) *CompileStep { .kind = options.kind, .root_src = root_src, .name = name, - .frameworks = StringHashMap(FrameworkLinkInfo).init(builder.allocator), - .step = Step.init(base_id, name, builder.allocator, make), + .frameworks = StringHashMap(FrameworkLinkInfo).init(owner.allocator), + .step = Step.init(.{ + .id = base_id, + .name = step_name, + .owner = owner, + .makeFn = make, + .max_rss = options.max_rss, + }), .version = options.version, .out_filename = undefined, - .out_h_filename = builder.fmt("{s}.h", .{name}), + .out_h_filename = owner.fmt("{s}.h", .{name}), .out_lib_filename = undefined, - .out_pdb_filename = builder.fmt("{s}.pdb", .{name}), + .out_pdb_filename = owner.fmt("{s}.pdb", .{name}), .major_only_filename = null, .name_only_filename = null, - .modules = std.StringArrayHashMap(*Module).init(builder.allocator), - .include_dirs = ArrayList(IncludeDir).init(builder.allocator), - .link_objects = ArrayList(LinkObject).init(builder.allocator), - .c_macros = ArrayList([]const u8).init(builder.allocator), - .lib_paths = ArrayList([]const u8).init(builder.allocator), - .rpaths = ArrayList([]const u8).init(builder.allocator), - .framework_dirs = ArrayList([]const u8).init(builder.allocator), - .installed_headers = ArrayList(*Step).init(builder.allocator), + .modules = std.StringArrayHashMap(*Module).init(owner.allocator), + .include_dirs = ArrayList(IncludeDir).init(owner.allocator), + .link_objects = ArrayList(LinkObject).init(owner.allocator), + .c_macros = ArrayList([]const u8).init(owner.allocator), + .lib_paths = ArrayList(FileSource).init(owner.allocator), + .rpaths = ArrayList(FileSource).init(owner.allocator), + .framework_dirs = ArrayList(FileSource).init(owner.allocator), + .installed_headers = ArrayList(*Step).init(owner.allocator), .object_src = undefined, .c_std = std.Build.CStd.C99, .zig_lib_dir = null, .main_pkg_path = null, .exec_cmd_args = null, - .name_prefix = "", .filter = null, .test_runner = null, .disable_stack_probing = false, @@ -363,6 +388,7 @@ pub fn create(builder: *std.Build, options: Options) *CompileStep { .output_lib_path_source = GeneratedFile{ .step = &self.step }, .output_h_path_source = GeneratedFile{ .step = &self.step }, .output_pdb_path_source = GeneratedFile{ .step = &self.step }, + .output_dirname_source = GeneratedFile{ .step = &self.step }, .target_info = NativeTargetInfo.detect(self.target) catch @panic("unhandled error"), }; @@ -372,15 +398,16 @@ pub fn create(builder: *std.Build, options: Options) *CompileStep { } fn computeOutFileNames(self: *CompileStep) void { + const b = self.step.owner; const target = self.target_info.target; - self.out_filename = std.zig.binNameAlloc(self.builder.allocator, .{ + self.out_filename = std.zig.binNameAlloc(b.allocator, .{ .root_name = self.name, .target = target, .output_mode = switch (self.kind) { .lib => .Lib, .obj => .Obj, - .exe, .@"test", .test_exe => .Exe, + .exe, .@"test" => .Exe, }, .link_mode = if (self.linkage) |some| @as(std.builtin.LinkMode, switch (some) { .dynamic => .Dynamic, @@ -394,30 +421,30 @@ fn computeOutFileNames(self: *CompileStep) void { self.out_lib_filename = self.out_filename; } else if (self.version) |version| { if (target.isDarwin()) { - self.major_only_filename = self.builder.fmt("lib{s}.{d}.dylib", .{ + self.major_only_filename = b.fmt("lib{s}.{d}.dylib", .{ self.name, version.major, }); - self.name_only_filename = self.builder.fmt("lib{s}.dylib", .{self.name}); + self.name_only_filename = b.fmt("lib{s}.dylib", .{self.name}); self.out_lib_filename = self.out_filename; } else if (target.os.tag == .windows) { - self.out_lib_filename = self.builder.fmt("{s}.lib", .{self.name}); + self.out_lib_filename = b.fmt("{s}.lib", .{self.name}); } else { - self.major_only_filename = self.builder.fmt("lib{s}.so.{d}", .{ self.name, version.major }); - self.name_only_filename = self.builder.fmt("lib{s}.so", .{self.name}); + self.major_only_filename = b.fmt("lib{s}.so.{d}", .{ self.name, version.major }); + self.name_only_filename = b.fmt("lib{s}.so", .{self.name}); self.out_lib_filename = self.out_filename; } } else { if (target.isDarwin()) { self.out_lib_filename = self.out_filename; } else if (target.os.tag == .windows) { - self.out_lib_filename = self.builder.fmt("{s}.lib", .{self.name}); + self.out_lib_filename = b.fmt("{s}.lib", .{self.name}); } else { self.out_lib_filename = self.out_filename; } } if (self.output_dir != null) { - self.output_lib_path_source.path = self.builder.pathJoin( + self.output_lib_path_source.path = b.pathJoin( &.{ self.output_dir.?, self.out_lib_filename }, ); } @@ -425,17 +452,20 @@ fn computeOutFileNames(self: *CompileStep) void { } pub fn setOutputDir(self: *CompileStep, dir: []const u8) void { - self.output_dir = self.builder.dupePath(dir); + const b = self.step.owner; + self.output_dir = b.dupePath(dir); } pub fn install(self: *CompileStep) void { - self.builder.installArtifact(self); + const b = self.step.owner; + b.installArtifact(self); } -pub fn installHeader(a: *CompileStep, src_path: []const u8, dest_rel_path: []const u8) void { - const install_file = a.builder.addInstallHeaderFile(src_path, dest_rel_path); - a.builder.getInstallStep().dependOn(&install_file.step); - a.installed_headers.append(&install_file.step) catch @panic("OOM"); +pub fn installHeader(cs: *CompileStep, src_path: []const u8, dest_rel_path: []const u8) void { + const b = cs.step.owner; + const install_file = b.addInstallHeaderFile(src_path, dest_rel_path); + b.getInstallStep().dependOn(&install_file.step); + cs.installed_headers.append(&install_file.step) catch @panic("OOM"); } pub const InstallConfigHeaderOptions = struct { @@ -449,13 +479,14 @@ pub fn installConfigHeader( options: InstallConfigHeaderOptions, ) void { const dest_rel_path = options.dest_rel_path orelse config_header.include_path; - const install_file = cs.builder.addInstallFileWithDir( + const b = cs.step.owner; + const install_file = b.addInstallFileWithDir( .{ .generated = &config_header.output_file }, options.install_dir, dest_rel_path, ); install_file.step.dependOn(&config_header.step); - cs.builder.getInstallStep().dependOn(&install_file.step); + b.getInstallStep().dependOn(&install_file.step); cs.installed_headers.append(&install_file.step) catch @panic("OOM"); } @@ -472,91 +503,83 @@ pub fn installHeadersDirectory( } pub fn installHeadersDirectoryOptions( - a: *CompileStep, + cs: *CompileStep, options: std.Build.InstallDirStep.Options, ) void { - const install_dir = a.builder.addInstallDirectory(options); - a.builder.getInstallStep().dependOn(&install_dir.step); - a.installed_headers.append(&install_dir.step) catch @panic("OOM"); + const b = cs.step.owner; + const install_dir = b.addInstallDirectory(options); + b.getInstallStep().dependOn(&install_dir.step); + cs.installed_headers.append(&install_dir.step) catch @panic("OOM"); } -pub fn installLibraryHeaders(a: *CompileStep, l: *CompileStep) void { +pub fn installLibraryHeaders(cs: *CompileStep, l: *CompileStep) void { assert(l.kind == .lib); - const install_step = a.builder.getInstallStep(); + const b = cs.step.owner; + const install_step = b.getInstallStep(); // Copy each element from installed_headers, modifying the builder // to be the new parent's builder. for (l.installed_headers.items) |step| { const step_copy = switch (step.id) { inline .install_file, .install_dir => |id| blk: { const T = id.Type(); - const ptr = a.builder.allocator.create(T) catch @panic("OOM"); + const ptr = b.allocator.create(T) catch @panic("OOM"); ptr.* = step.cast(T).?.*; - ptr.override_source_builder = ptr.builder; - ptr.builder = a.builder; + ptr.dest_builder = b; break :blk &ptr.step; }, else => unreachable, }; - a.installed_headers.append(step_copy) catch @panic("OOM"); + cs.installed_headers.append(step_copy) catch @panic("OOM"); install_step.dependOn(step_copy); } - a.installed_headers.appendSlice(l.installed_headers.items) catch @panic("OOM"); + cs.installed_headers.appendSlice(l.installed_headers.items) catch @panic("OOM"); } pub fn addObjCopy(cs: *CompileStep, options: ObjCopyStep.Options) *ObjCopyStep { + const b = cs.step.owner; var copy = options; if (copy.basename == null) { if (options.format) |f| { - copy.basename = cs.builder.fmt("{s}.{s}", .{ cs.name, @tagName(f) }); + copy.basename = b.fmt("{s}.{s}", .{ cs.name, @tagName(f) }); } else { copy.basename = cs.name; } } - return cs.builder.addObjCopy(cs.getOutputSource(), copy); + return b.addObjCopy(cs.getOutputSource(), copy); } /// Deprecated: use `std.Build.addRunArtifact` /// This function will run in the context of the package that created the executable, /// which is undesirable when running an executable provided by a dependency package. -pub fn run(exe: *CompileStep) *RunStep { - return exe.builder.addRunArtifact(exe); -} - -/// Creates an `EmulatableRunStep` with an executable built with `addExecutable`. -/// Allows running foreign binaries through emulation platforms such as Qemu or Rosetta. -/// When a binary cannot be ran through emulation or the option is disabled, a warning -/// will be printed and the binary will *NOT* be ran. -pub fn runEmulatable(exe: *CompileStep) *EmulatableRunStep { - assert(exe.kind == .exe or exe.kind == .test_exe); - - const run_step = EmulatableRunStep.create(exe.builder, exe.builder.fmt("run {s}", .{exe.step.name}), exe); - if (exe.vcpkg_bin_path) |path| { - RunStep.addPathDirInternal(&run_step.step, exe.builder, path); - } - return run_step; +pub fn run(cs: *CompileStep) *RunStep { + return cs.step.owner.addRunArtifact(cs); } -pub fn checkObject(self: *CompileStep, obj_format: std.Target.ObjectFormat) *CheckObjectStep { - return CheckObjectStep.create(self.builder, self.getOutputSource(), obj_format); +pub fn checkObject(self: *CompileStep) *CheckObjectStep { + return CheckObjectStep.create(self.step.owner, self.getOutputSource(), self.target_info.target.ofmt); } pub fn setLinkerScriptPath(self: *CompileStep, source: FileSource) void { - self.linker_script = source.dupe(self.builder); + const b = self.step.owner; + self.linker_script = source.dupe(b); source.addStepDependencies(&self.step); } pub fn linkFramework(self: *CompileStep, framework_name: []const u8) void { - self.frameworks.put(self.builder.dupe(framework_name), .{}) catch @panic("OOM"); + const b = self.step.owner; + self.frameworks.put(b.dupe(framework_name), .{}) catch @panic("OOM"); } pub fn linkFrameworkNeeded(self: *CompileStep, framework_name: []const u8) void { - self.frameworks.put(self.builder.dupe(framework_name), .{ + const b = self.step.owner; + self.frameworks.put(b.dupe(framework_name), .{ .needed = true, }) catch @panic("OOM"); } pub fn linkFrameworkWeak(self: *CompileStep, framework_name: []const u8) void { - self.frameworks.put(self.builder.dupe(framework_name), .{ + const b = self.step.owner; + self.frameworks.put(b.dupe(framework_name), .{ .weak = true, }) catch @panic("OOM"); } @@ -595,7 +618,7 @@ pub fn producesPdbFile(self: *CompileStep) bool { if (!self.target.isWindows() and !self.target.isUefi()) return false; if (self.target.getObjectFormat() == .c) return false; if (self.strip == true) return false; - return self.isDynamicLibrary() or self.kind == .exe or self.kind == .test_exe; + return self.isDynamicLibrary() or self.kind == .exe or self.kind == .@"test"; } pub fn linkLibC(self: *CompileStep) void { @@ -609,21 +632,24 @@ pub fn linkLibCpp(self: *CompileStep) void { /// If the value is omitted, it is set to 1. /// `name` and `value` need not live longer than the function call. pub fn defineCMacro(self: *CompileStep, name: []const u8, value: ?[]const u8) void { - const macro = std.Build.constructCMacro(self.builder.allocator, name, value); + const b = self.step.owner; + const macro = std.Build.constructCMacro(b.allocator, name, value); self.c_macros.append(macro) catch @panic("OOM"); } /// name_and_value looks like [name]=[value]. If the value is omitted, it is set to 1. pub fn defineCMacroRaw(self: *CompileStep, name_and_value: []const u8) void { - self.c_macros.append(self.builder.dupe(name_and_value)) catch @panic("OOM"); + const b = self.step.owner; + self.c_macros.append(b.dupe(name_and_value)) catch @panic("OOM"); } /// This one has no integration with anything, it just puts -lname on the command line. /// Prefer to use `linkSystemLibrary` instead. pub fn linkSystemLibraryName(self: *CompileStep, name: []const u8) void { + const b = self.step.owner; self.link_objects.append(.{ .system_lib = .{ - .name = self.builder.dupe(name), + .name = b.dupe(name), .needed = false, .weak = false, .use_pkg_config = .no, @@ -634,9 +660,10 @@ pub fn linkSystemLibraryName(self: *CompileStep, name: []const u8) void { /// This one has no integration with anything, it just puts -needed-lname on the command line. /// Prefer to use `linkSystemLibraryNeeded` instead. pub fn linkSystemLibraryNeededName(self: *CompileStep, name: []const u8) void { + const b = self.step.owner; self.link_objects.append(.{ .system_lib = .{ - .name = self.builder.dupe(name), + .name = b.dupe(name), .needed = true, .weak = false, .use_pkg_config = .no, @@ -647,9 +674,10 @@ pub fn linkSystemLibraryNeededName(self: *CompileStep, name: []const u8) void { /// Darwin-only. This one has no integration with anything, it just puts -weak-lname on the /// command line. Prefer to use `linkSystemLibraryWeak` instead. pub fn linkSystemLibraryWeakName(self: *CompileStep, name: []const u8) void { + const b = self.step.owner; self.link_objects.append(.{ .system_lib = .{ - .name = self.builder.dupe(name), + .name = b.dupe(name), .needed = false, .weak = true, .use_pkg_config = .no, @@ -660,9 +688,10 @@ pub fn linkSystemLibraryWeakName(self: *CompileStep, name: []const u8) void { /// This links against a system library, exclusively using pkg-config to find the library. /// Prefer to use `linkSystemLibrary` instead. pub fn linkSystemLibraryPkgConfigOnly(self: *CompileStep, lib_name: []const u8) void { + const b = self.step.owner; self.link_objects.append(.{ .system_lib = .{ - .name = self.builder.dupe(lib_name), + .name = b.dupe(lib_name), .needed = false, .weak = false, .use_pkg_config = .force, @@ -673,9 +702,10 @@ pub fn linkSystemLibraryPkgConfigOnly(self: *CompileStep, lib_name: []const u8) /// This links against a system library, exclusively using pkg-config to find the library. /// Prefer to use `linkSystemLibraryNeeded` instead. pub fn linkSystemLibraryNeededPkgConfigOnly(self: *CompileStep, lib_name: []const u8) void { + const b = self.step.owner; self.link_objects.append(.{ .system_lib = .{ - .name = self.builder.dupe(lib_name), + .name = b.dupe(lib_name), .needed = true, .weak = false, .use_pkg_config = .force, @@ -685,14 +715,15 @@ pub fn linkSystemLibraryNeededPkgConfigOnly(self: *CompileStep, lib_name: []cons /// Run pkg-config for the given library name and parse the output, returning the arguments /// that should be passed to zig to link the given library. -pub fn runPkgConfig(self: *CompileStep, lib_name: []const u8) ![]const []const u8 { +fn runPkgConfig(self: *CompileStep, lib_name: []const u8) ![]const []const u8 { + const b = self.step.owner; const pkg_name = match: { // First we have to map the library name to pkg config name. Unfortunately, // there are several examples where this is not straightforward: // -lSDL2 -> pkg-config sdl2 // -lgdk-3 -> pkg-config gdk-3.0 // -latk-1.0 -> pkg-config atk - const pkgs = try getPkgConfigList(self.builder); + const pkgs = try getPkgConfigList(b); // Exact match means instant winner. for (pkgs) |pkg| { @@ -732,7 +763,7 @@ pub fn runPkgConfig(self: *CompileStep, lib_name: []const u8) ![]const []const u }; var code: u8 = undefined; - const stdout = if (self.builder.execAllowFail(&[_][]const u8{ + const stdout = if (b.execAllowFail(&[_][]const u8{ "pkg-config", pkg_name, "--cflags", @@ -745,7 +776,7 @@ pub fn runPkgConfig(self: *CompileStep, lib_name: []const u8) ![]const []const u else => return err, }; - var zig_args = ArrayList([]const u8).init(self.builder.allocator); + var zig_args = ArrayList([]const u8).init(b.allocator); defer zig_args.deinit(); var it = mem.tokenize(u8, stdout, " \r\n\t"); @@ -770,8 +801,8 @@ pub fn runPkgConfig(self: *CompileStep, lib_name: []const u8) ![]const []const u try zig_args.appendSlice(&[_][]const u8{ "-D", macro }); } else if (mem.startsWith(u8, tok, "-D")) { try zig_args.append(tok); - } else if (self.builder.verbose) { - log.warn("Ignoring pkg-config flag '{s}'", .{tok}); + } else if (b.debug_pkg_config) { + return self.step.fail("unknown pkg-config flag '{s}'", .{tok}); } } @@ -794,6 +825,7 @@ fn linkSystemLibraryInner(self: *CompileStep, name: []const u8, opts: struct { needed: bool = false, weak: bool = false, }) void { + const b = self.step.owner; if (isLibCLibrary(name)) { self.linkLibC(); return; @@ -805,7 +837,7 @@ fn linkSystemLibraryInner(self: *CompileStep, name: []const u8, opts: struct { self.link_objects.append(.{ .system_lib = .{ - .name = self.builder.dupe(name), + .name = b.dupe(name), .needed = opts.needed, .weak = opts.weak, .use_pkg_config = .yes, @@ -813,27 +845,31 @@ fn linkSystemLibraryInner(self: *CompileStep, name: []const u8, opts: struct { }) catch @panic("OOM"); } -pub fn setNamePrefix(self: *CompileStep, text: []const u8) void { - assert(self.kind == .@"test" or self.kind == .test_exe); - self.name_prefix = self.builder.dupe(text); +pub fn setName(self: *CompileStep, text: []const u8) void { + const b = self.step.owner; + assert(self.kind == .@"test"); + self.name = b.dupe(text); } pub fn setFilter(self: *CompileStep, text: ?[]const u8) void { - assert(self.kind == .@"test" or self.kind == .test_exe); - self.filter = if (text) |t| self.builder.dupe(t) else null; + const b = self.step.owner; + assert(self.kind == .@"test"); + self.filter = if (text) |t| b.dupe(t) else null; } pub fn setTestRunner(self: *CompileStep, path: ?[]const u8) void { - assert(self.kind == .@"test" or self.kind == .test_exe); - self.test_runner = if (path) |p| self.builder.dupePath(p) else null; + const b = self.step.owner; + assert(self.kind == .@"test"); + self.test_runner = if (path) |p| b.dupePath(p) else null; } /// Handy when you have many C/C++ source files and want them all to have the same flags. pub fn addCSourceFiles(self: *CompileStep, files: []const []const u8, flags: []const []const u8) void { - const c_source_files = self.builder.allocator.create(CSourceFiles) catch @panic("OOM"); + const b = self.step.owner; + const c_source_files = b.allocator.create(CSourceFiles) catch @panic("OOM"); - const files_copy = self.builder.dupeStrings(files); - const flags_copy = self.builder.dupeStrings(flags); + const files_copy = b.dupeStrings(files); + const flags_copy = b.dupeStrings(flags); c_source_files.* = .{ .files = files_copy, @@ -850,8 +886,9 @@ pub fn addCSourceFile(self: *CompileStep, file: []const u8, flags: []const []con } pub fn addCSourceFileSource(self: *CompileStep, source: CSourceFile) void { - const c_source_file = self.builder.allocator.create(CSourceFile) catch @panic("OOM"); - c_source_file.* = source.dupe(self.builder); + const b = self.step.owner; + const c_source_file = b.allocator.create(CSourceFile) catch @panic("OOM"); + c_source_file.* = source.dupe(b); self.link_objects.append(.{ .c_source_file = c_source_file }) catch @panic("OOM"); source.source.addStepDependencies(&self.step); } @@ -865,52 +902,61 @@ pub fn setVerboseCC(self: *CompileStep, value: bool) void { } pub fn overrideZigLibDir(self: *CompileStep, dir_path: []const u8) void { - self.zig_lib_dir = self.builder.dupePath(dir_path); + const b = self.step.owner; + self.zig_lib_dir = b.dupePath(dir_path); } pub fn setMainPkgPath(self: *CompileStep, dir_path: []const u8) void { - self.main_pkg_path = self.builder.dupePath(dir_path); + const b = self.step.owner; + self.main_pkg_path = b.dupePath(dir_path); } pub fn setLibCFile(self: *CompileStep, libc_file: ?FileSource) void { - self.libc_file = if (libc_file) |f| f.dupe(self.builder) else null; + const b = self.step.owner; + self.libc_file = if (libc_file) |f| f.dupe(b) else null; } /// Returns the generated executable, library or object file. /// To run an executable built with zig build, use `run`, or create an install step and invoke it. pub fn getOutputSource(self: *CompileStep) FileSource { - return FileSource{ .generated = &self.output_path_source }; + return .{ .generated = &self.output_path_source }; +} + +pub fn getOutputDirectorySource(self: *CompileStep) FileSource { + return .{ .generated = &self.output_dirname_source }; } /// Returns the generated import library. This function can only be called for libraries. pub fn getOutputLibSource(self: *CompileStep) FileSource { assert(self.kind == .lib); - return FileSource{ .generated = &self.output_lib_path_source }; + return .{ .generated = &self.output_lib_path_source }; } /// Returns the generated header file. /// This function can only be called for libraries or object files which have `emit_h` set. pub fn getOutputHSource(self: *CompileStep) FileSource { - assert(self.kind != .exe and self.kind != .test_exe and self.kind != .@"test"); + assert(self.kind != .exe and self.kind != .@"test"); assert(self.emit_h); - return FileSource{ .generated = &self.output_h_path_source }; + return .{ .generated = &self.output_h_path_source }; } /// Returns the generated PDB file. This function can only be called for Windows and UEFI. pub fn getOutputPdbSource(self: *CompileStep) FileSource { // TODO: Is this right? Isn't PDB for *any* PE/COFF file? assert(self.target.isWindows() or self.target.isUefi()); - return FileSource{ .generated = &self.output_pdb_path_source }; + return .{ .generated = &self.output_pdb_path_source }; } pub fn addAssemblyFile(self: *CompileStep, path: []const u8) void { + const b = self.step.owner; self.link_objects.append(.{ - .assembly_file = .{ .path = self.builder.dupe(path) }, + .assembly_file = .{ .path = b.dupe(path) }, }) catch @panic("OOM"); } pub fn addAssemblyFileSource(self: *CompileStep, source: FileSource) void { - const source_duped = source.dupe(self.builder); + const b = self.step.owner; + const source_duped = source.dupe(b); self.link_objects.append(.{ .assembly_file = source_duped }) catch @panic("OOM"); source_duped.addStepDependencies(&self.step); } @@ -920,7 +966,8 @@ pub fn addObjectFile(self: *CompileStep, source_file: []const u8) void { } pub fn addObjectFileSource(self: *CompileStep, source: FileSource) void { - self.link_objects.append(.{ .static_path = source.dupe(self.builder) }) catch @panic("OOM"); + const b = self.step.owner; + self.link_objects.append(.{ .static_path = source.dupe(b) }) catch @panic("OOM"); source.addStepDependencies(&self.step); } @@ -935,11 +982,13 @@ pub const addLibPath = @compileError("deprecated, use addLibraryPath"); pub const addFrameworkDir = @compileError("deprecated, use addFrameworkPath"); pub fn addSystemIncludePath(self: *CompileStep, path: []const u8) void { - self.include_dirs.append(IncludeDir{ .raw_path_system = self.builder.dupe(path) }) catch @panic("OOM"); + const b = self.step.owner; + self.include_dirs.append(IncludeDir{ .raw_path_system = b.dupe(path) }) catch @panic("OOM"); } pub fn addIncludePath(self: *CompileStep, path: []const u8) void { - self.include_dirs.append(IncludeDir{ .raw_path = self.builder.dupe(path) }) catch @panic("OOM"); + const b = self.step.owner; + self.include_dirs.append(IncludeDir{ .raw_path = b.dupe(path) }) catch @panic("OOM"); } pub fn addConfigHeader(self: *CompileStep, config_header: *ConfigHeaderStep) void { @@ -948,23 +997,42 @@ pub fn addConfigHeader(self: *CompileStep, config_header: *ConfigHeaderStep) voi } pub fn addLibraryPath(self: *CompileStep, path: []const u8) void { - self.lib_paths.append(self.builder.dupe(path)) catch @panic("OOM"); + const b = self.step.owner; + self.lib_paths.append(.{ .path = b.dupe(path) }) catch @panic("OOM"); +} + +pub fn addLibraryPathDirectorySource(self: *CompileStep, directory_source: FileSource) void { + self.lib_paths.append(directory_source) catch @panic("OOM"); + directory_source.addStepDependencies(&self.step); } pub fn addRPath(self: *CompileStep, path: []const u8) void { - self.rpaths.append(self.builder.dupe(path)) catch @panic("OOM"); + const b = self.step.owner; + self.rpaths.append(.{ .path = b.dupe(path) }) catch @panic("OOM"); +} + +pub fn addRPathDirectorySource(self: *CompileStep, directory_source: FileSource) void { + self.rpaths.append(directory_source) catch @panic("OOM"); + directory_source.addStepDependencies(&self.step); } pub fn addFrameworkPath(self: *CompileStep, dir_path: []const u8) void { - self.framework_dirs.append(self.builder.dupe(dir_path)) catch @panic("OOM"); + const b = self.step.owner; + self.framework_dirs.append(.{ .path = b.dupe(dir_path) }) catch @panic("OOM"); +} + +pub fn addFrameworkPathDirectorySource(self: *CompileStep, directory_source: FileSource) void { + self.framework_dirs.append(directory_source) catch @panic("OOM"); + directory_source.addStepDependencies(&self.step); } /// Adds a module to be used with `@import` and exposing it in the current /// package's module table using `name`. pub fn addModule(cs: *CompileStep, name: []const u8, module: *Module) void { - cs.modules.put(cs.builder.dupe(name), module) catch @panic("OOM"); + const b = cs.step.owner; + cs.modules.put(b.dupe(name), module) catch @panic("OOM"); - var done = std.AutoHashMap(*Module, void).init(cs.builder.allocator); + var done = std.AutoHashMap(*Module, void).init(b.allocator); defer done.deinit(); cs.addRecursiveBuildDeps(module, &done) catch @panic("OOM"); } @@ -972,7 +1040,8 @@ pub fn addModule(cs: *CompileStep, name: []const u8, module: *Module) void { /// Adds a module to be used with `@import` without exposing it in the current /// package's module table. pub fn addAnonymousModule(cs: *CompileStep, name: []const u8, options: std.Build.CreateModuleOptions) void { - const module = cs.builder.createModule(options); + const b = cs.step.owner; + const module = b.createModule(options); return addModule(cs, name, module); } @@ -992,12 +1061,13 @@ fn addRecursiveBuildDeps(cs: *CompileStep, module: *Module, done: *std.AutoHashM /// If Vcpkg was found on the system, it will be added to include and lib /// paths for the specified target. pub fn addVcpkgPaths(self: *CompileStep, linkage: CompileStep.Linkage) !void { + const b = self.step.owner; // Ideally in the Unattempted case we would call the function recursively // after findVcpkgRoot and have only one switch statement, but the compiler // cannot resolve the error set. - switch (self.builder.vcpkg_root) { + switch (b.vcpkg_root) { .unattempted => { - self.builder.vcpkg_root = if (try findVcpkgRoot(self.builder.allocator)) |root| + b.vcpkg_root = if (try findVcpkgRoot(b.allocator)) |root| VcpkgRoot{ .found = root } else .not_found; @@ -1006,31 +1076,32 @@ pub fn addVcpkgPaths(self: *CompileStep, linkage: CompileStep.Linkage) !void { .found => {}, } - switch (self.builder.vcpkg_root) { + switch (b.vcpkg_root) { .unattempted => unreachable, .not_found => return error.VcpkgNotFound, .found => |root| { - const allocator = self.builder.allocator; + const allocator = b.allocator; const triplet = try self.target.vcpkgTriplet(allocator, if (linkage == .static) .Static else .Dynamic); - defer self.builder.allocator.free(triplet); + defer b.allocator.free(triplet); - const include_path = self.builder.pathJoin(&.{ root, "installed", triplet, "include" }); + const include_path = b.pathJoin(&.{ root, "installed", triplet, "include" }); errdefer allocator.free(include_path); try self.include_dirs.append(IncludeDir{ .raw_path = include_path }); - const lib_path = self.builder.pathJoin(&.{ root, "installed", triplet, "lib" }); - try self.lib_paths.append(lib_path); + const lib_path = b.pathJoin(&.{ root, "installed", triplet, "lib" }); + try self.lib_paths.append(.{ .path = lib_path }); - self.vcpkg_bin_path = self.builder.pathJoin(&.{ root, "installed", triplet, "bin" }); + self.vcpkg_bin_path = b.pathJoin(&.{ root, "installed", triplet, "bin" }); }, } } pub fn setExecCmd(self: *CompileStep, args: []const ?[]const u8) void { + const b = self.step.owner; assert(self.kind == .@"test"); - const duped_args = self.builder.allocator.alloc(?[]u8, args.len) catch @panic("OOM"); + const duped_args = b.allocator.alloc(?[]u8, args.len) catch @panic("OOM"); for (args, 0..) |arg, i| { - duped_args[i] = if (arg) |a| self.builder.dupe(a) else null; + duped_args[i] = if (arg) |a| b.dupe(a) else null; } self.exec_cmd_args = duped_args; } @@ -1039,22 +1110,27 @@ fn linkLibraryOrObject(self: *CompileStep, other: *CompileStep) void { self.step.dependOn(&other.step); self.link_objects.append(.{ .other_step = other }) catch @panic("OOM"); self.include_dirs.append(.{ .other_step = other }) catch @panic("OOM"); + + for (other.installed_headers.items) |install_step| { + self.step.dependOn(install_step); + } } fn appendModuleArgs( cs: *CompileStep, zig_args: *ArrayList([]const u8), ) error{OutOfMemory}!void { + const b = cs.step.owner; // First, traverse the whole dependency graph and give every module a unique name, ideally one // named after what it's called somewhere in the graph. It will help here to have both a mapping // from module to name and a set of all the currently-used names. - var mod_names = std.AutoHashMap(*Module, []const u8).init(cs.builder.allocator); - var names = std.StringHashMap(void).init(cs.builder.allocator); + var mod_names = std.AutoHashMap(*Module, []const u8).init(b.allocator); + var names = std.StringHashMap(void).init(b.allocator); var to_name = std.ArrayList(struct { name: []const u8, mod: *Module, - }).init(cs.builder.allocator); + }).init(b.allocator); { var it = cs.modules.iterator(); while (it.next()) |kv| { @@ -1075,7 +1151,7 @@ fn appendModuleArgs( if (mod_names.contains(dep.mod)) continue; // We'll use this buffer to store the name we decide on - var buf = try cs.builder.allocator.alloc(u8, dep.name.len + 32); + var buf = try b.allocator.alloc(u8, dep.name.len + 32); // First, try just the exposed dependency name std.mem.copy(u8, buf, dep.name); var name = buf[0..dep.name.len]; @@ -1112,15 +1188,15 @@ fn appendModuleArgs( const mod = kv.key_ptr.*; const name = kv.value_ptr.*; - const deps_str = try constructDepString(cs.builder.allocator, mod_names, mod.dependencies); + const deps_str = try constructDepString(b.allocator, mod_names, mod.dependencies); const src = mod.builder.pathFromRoot(mod.source_file.getPath(mod.builder)); try zig_args.append("--mod"); - try zig_args.append(try std.fmt.allocPrint(cs.builder.allocator, "{s}:{s}:{s}", .{ name, deps_str, src })); + try zig_args.append(try std.fmt.allocPrint(b.allocator, "{s}:{s}:{s}", .{ name, deps_str, src })); } } // Lastly, output the root dependencies - const deps_str = try constructDepString(cs.builder.allocator, mod_names, cs.modules); + const deps_str = try constructDepString(b.allocator, mod_names, cs.modules); if (deps_str.len > 0) { try zig_args.append("--deps"); try zig_args.append(deps_str); @@ -1150,43 +1226,36 @@ fn constructDepString( } } -fn make(step: *Step) !void { +fn make(step: *Step, prog_node: *std.Progress.Node) !void { + const b = step.owner; const self = @fieldParentPtr(CompileStep, "step", step); - const builder = self.builder; if (self.root_src == null and self.link_objects.items.len == 0) { - log.err("{s}: linker needs 1 or more objects to link", .{self.step.name}); - return error.NeedAnObject; + return step.fail("the linker needs one or more objects to link", .{}); } - var zig_args = ArrayList([]const u8).init(builder.allocator); + var zig_args = ArrayList([]const u8).init(b.allocator); defer zig_args.deinit(); - try zig_args.append(builder.zig_exe); + try zig_args.append(b.zig_exe); const cmd = switch (self.kind) { .lib => "build-lib", .exe => "build-exe", .obj => "build-obj", .@"test" => "test", - .test_exe => "test", }; try zig_args.append(cmd); - if (builder.color != .auto) { - try zig_args.append("--color"); - try zig_args.append(@tagName(builder.color)); - } - - if (builder.reference_trace) |some| { - try zig_args.append(try std.fmt.allocPrint(builder.allocator, "-freference-trace={d}", .{some})); + if (b.reference_trace) |some| { + try zig_args.append(try std.fmt.allocPrint(b.allocator, "-freference-trace={d}", .{some})); } try addFlag(&zig_args, "LLVM", self.use_llvm); try addFlag(&zig_args, "LLD", self.use_lld); if (self.target.ofmt) |ofmt| { - try zig_args.append(try std.fmt.allocPrint(builder.allocator, "-ofmt={s}", .{@tagName(ofmt)})); + try zig_args.append(try std.fmt.allocPrint(b.allocator, "-ofmt={s}", .{@tagName(ofmt)})); } if (self.entry_symbol_name) |entry| { @@ -1196,18 +1265,18 @@ fn make(step: *Step) !void { if (self.stack_size) |stack_size| { try zig_args.append("--stack"); - try zig_args.append(try std.fmt.allocPrint(builder.allocator, "{}", .{stack_size})); + try zig_args.append(try std.fmt.allocPrint(b.allocator, "{}", .{stack_size})); } - if (self.root_src) |root_src| try zig_args.append(root_src.getPath(builder)); + if (self.root_src) |root_src| try zig_args.append(root_src.getPath(b)); // We will add link objects from transitive dependencies, but we want to keep // all link objects in the same order provided. // This array is used to keep self.link_objects immutable. var transitive_deps: TransitiveDeps = .{ - .link_objects = ArrayList(LinkObject).init(builder.allocator), - .seen_system_libs = StringHashMap(void).init(builder.allocator), - .seen_steps = std.AutoHashMap(*const Step, void).init(builder.allocator), + .link_objects = ArrayList(LinkObject).init(b.allocator), + .seen_system_libs = StringHashMap(void).init(b.allocator), + .seen_steps = std.AutoHashMap(*const Step, void).init(b.allocator), .is_linking_libcpp = self.is_linking_libcpp, .is_linking_libc = self.is_linking_libc, .frameworks = &self.frameworks, @@ -1220,14 +1289,13 @@ fn make(step: *Step) !void { for (transitive_deps.link_objects.items) |link_object| { switch (link_object) { - .static_path => |static_path| try zig_args.append(static_path.getPath(builder)), + .static_path => |static_path| try zig_args.append(static_path.getPath(b)), .other_step => |other| switch (other.kind) { .exe => @panic("Cannot link with an executable build artifact"), - .test_exe => @panic("Cannot link with an executable build artifact"), .@"test" => @panic("Cannot link with a test"), .obj => { - try zig_args.append(other.getOutputSource().getPath(builder)); + try zig_args.append(other.getOutputSource().getPath(b)); }, .lib => l: { if (self.isStaticLibrary() and other.isStaticLibrary()) { @@ -1235,7 +1303,7 @@ fn make(step: *Step) !void { break :l; } - const full_path_lib = other.getOutputLibSource().getPath(builder); + const full_path_lib = other.getOutputLibSource().getPath(b); try zig_args.append(full_path_lib); if (other.linkage == Linkage.dynamic and !self.target.isWindows()) { @@ -1250,14 +1318,11 @@ fn make(step: *Step) !void { .system_lib => |system_lib| { const prefix: []const u8 = prefix: { if (system_lib.needed) break :prefix "-needed-l"; - if (system_lib.weak) { - if (self.target.isDarwin()) break :prefix "-weak-l"; - log.warn("Weak library import used for a non-darwin target, this will be converted to normally library import `-lname`", .{}); - } + if (system_lib.weak) break :prefix "-weak-l"; break :prefix "-l"; }; switch (system_lib.use_pkg_config) { - .no => try zig_args.append(builder.fmt("{s}{s}", .{ prefix, system_lib.name })), + .no => try zig_args.append(b.fmt("{s}{s}", .{ prefix, system_lib.name })), .yes, .force => { if (self.runPkgConfig(system_lib.name)) |args| { try zig_args.appendSlice(args); @@ -1271,7 +1336,7 @@ fn make(step: *Step) !void { .yes => { // pkg-config failed, so fall back to linking the library // by name directly. - try zig_args.append(builder.fmt("{s}{s}", .{ + try zig_args.append(b.fmt("{s}{s}", .{ prefix, system_lib.name, })); @@ -1294,7 +1359,7 @@ fn make(step: *Step) !void { try zig_args.append("--"); prev_has_extra_flags = false; } - try zig_args.append(asm_file.getPath(builder)); + try zig_args.append(asm_file.getPath(b)); }, .c_source_file => |c_source_file| { @@ -1311,7 +1376,7 @@ fn make(step: *Step) !void { } try zig_args.append("--"); } - try zig_args.append(c_source_file.source.getPath(builder)); + try zig_args.append(c_source_file.source.getPath(b)); }, .c_source_files => |c_source_files| { @@ -1329,7 +1394,7 @@ fn make(step: *Step) !void { try zig_args.append("--"); } for (c_source_files.files) |file| { - try zig_args.append(builder.pathFromRoot(file)); + try zig_args.append(b.pathFromRoot(file)); } }, } @@ -1345,7 +1410,7 @@ fn make(step: *Step) !void { if (self.image_base) |image_base| { try zig_args.append("--image-base"); - try zig_args.append(builder.fmt("0x{x}", .{image_base})); + try zig_args.append(b.fmt("0x{x}", .{image_base})); } if (self.filter) |filter| { @@ -1357,39 +1422,34 @@ fn make(step: *Step) !void { try zig_args.append("--test-evented-io"); } - if (self.name_prefix.len != 0) { - try zig_args.append("--test-name-prefix"); - try zig_args.append(self.name_prefix); - } - if (self.test_runner) |test_runner| { try zig_args.append("--test-runner"); - try zig_args.append(builder.pathFromRoot(test_runner)); + try zig_args.append(b.pathFromRoot(test_runner)); } - for (builder.debug_log_scopes) |log_scope| { + for (b.debug_log_scopes) |log_scope| { try zig_args.append("--debug-log"); try zig_args.append(log_scope); } - if (builder.debug_compile_errors) { + if (b.debug_compile_errors) { try zig_args.append("--debug-compile-errors"); } - if (builder.verbose_cimport) try zig_args.append("--verbose-cimport"); - if (builder.verbose_air) try zig_args.append("--verbose-air"); - if (builder.verbose_llvm_ir) try zig_args.append("--verbose-llvm-ir"); - if (builder.verbose_link or self.verbose_link) try zig_args.append("--verbose-link"); - if (builder.verbose_cc or self.verbose_cc) try zig_args.append("--verbose-cc"); - if (builder.verbose_llvm_cpu_features) try zig_args.append("--verbose-llvm-cpu-features"); + if (b.verbose_cimport) try zig_args.append("--verbose-cimport"); + if (b.verbose_air) try zig_args.append("--verbose-air"); + if (b.verbose_llvm_ir) try zig_args.append("--verbose-llvm-ir"); + if (b.verbose_link or self.verbose_link) try zig_args.append("--verbose-link"); + if (b.verbose_cc or self.verbose_cc) try zig_args.append("--verbose-cc"); + if (b.verbose_llvm_cpu_features) try zig_args.append("--verbose-llvm-cpu-features"); - if (self.emit_analysis.getArg(builder, "emit-analysis")) |arg| try zig_args.append(arg); - if (self.emit_asm.getArg(builder, "emit-asm")) |arg| try zig_args.append(arg); - if (self.emit_bin.getArg(builder, "emit-bin")) |arg| try zig_args.append(arg); - if (self.emit_docs.getArg(builder, "emit-docs")) |arg| try zig_args.append(arg); - if (self.emit_implib.getArg(builder, "emit-implib")) |arg| try zig_args.append(arg); - if (self.emit_llvm_bc.getArg(builder, "emit-llvm-bc")) |arg| try zig_args.append(arg); - if (self.emit_llvm_ir.getArg(builder, "emit-llvm-ir")) |arg| try zig_args.append(arg); + if (self.emit_analysis.getArg(b, "emit-analysis")) |arg| try zig_args.append(arg); + if (self.emit_asm.getArg(b, "emit-asm")) |arg| try zig_args.append(arg); + if (self.emit_bin.getArg(b, "emit-bin")) |arg| try zig_args.append(arg); + if (self.emit_docs.getArg(b, "emit-docs")) |arg| try zig_args.append(arg); + if (self.emit_implib.getArg(b, "emit-implib")) |arg| try zig_args.append(arg); + if (self.emit_llvm_bc.getArg(b, "emit-llvm-bc")) |arg| try zig_args.append(arg); + if (self.emit_llvm_ir.getArg(b, "emit-llvm-ir")) |arg| try zig_args.append(arg); if (self.emit_h) try zig_args.append("-femit-h"); @@ -1430,31 +1490,31 @@ fn make(step: *Step) !void { } if (self.link_z_common_page_size) |size| { try zig_args.append("-z"); - try zig_args.append(builder.fmt("common-page-size={d}", .{size})); + try zig_args.append(b.fmt("common-page-size={d}", .{size})); } if (self.link_z_max_page_size) |size| { try zig_args.append("-z"); - try zig_args.append(builder.fmt("max-page-size={d}", .{size})); + try zig_args.append(b.fmt("max-page-size={d}", .{size})); } if (self.libc_file) |libc_file| { try zig_args.append("--libc"); - try zig_args.append(libc_file.getPath(builder)); - } else if (builder.libc_file) |libc_file| { + try zig_args.append(libc_file.getPath(b)); + } else if (b.libc_file) |libc_file| { try zig_args.append("--libc"); try zig_args.append(libc_file); } switch (self.optimize) { .Debug => {}, // Skip since it's the default. - else => try zig_args.append(builder.fmt("-O{s}", .{@tagName(self.optimize)})), + else => try zig_args.append(b.fmt("-O{s}", .{@tagName(self.optimize)})), } try zig_args.append("--cache-dir"); - try zig_args.append(builder.cache_root.path orelse "."); + try zig_args.append(b.cache_root.path orelse "."); try zig_args.append("--global-cache-dir"); - try zig_args.append(builder.global_cache_root.path orelse "."); + try zig_args.append(b.global_cache_root.path orelse "."); try zig_args.append("--name"); try zig_args.append(self.name); @@ -1466,11 +1526,11 @@ fn make(step: *Step) !void { if (self.kind == .lib and self.linkage != null and self.linkage.? == .dynamic) { if (self.version) |version| { try zig_args.append("--version"); - try zig_args.append(builder.fmt("{}", .{version})); + try zig_args.append(b.fmt("{}", .{version})); } if (self.target.isDarwin()) { - const install_name = self.install_name orelse builder.fmt("@rpath/{s}{s}{s}", .{ + const install_name = self.install_name orelse b.fmt("@rpath/{s}{s}{s}", .{ self.target.libPrefix(), self.name, self.target.dynamicLibSuffix(), @@ -1484,7 +1544,7 @@ fn make(step: *Step) !void { try zig_args.appendSlice(&[_][]const u8{ "--entitlements", entitlements }); } if (self.pagezero_size) |pagezero_size| { - const size = try std.fmt.allocPrint(builder.allocator, "{x}", .{pagezero_size}); + const size = try std.fmt.allocPrint(b.allocator, "{x}", .{pagezero_size}); try zig_args.appendSlice(&[_][]const u8{ "-pagezero_size", size }); } if (self.search_strategy) |strat| switch (strat) { @@ -1492,7 +1552,7 @@ fn make(step: *Step) !void { .dylibs_first => try zig_args.append("-search_dylibs_first"), }; if (self.headerpad_size) |headerpad_size| { - const size = try std.fmt.allocPrint(builder.allocator, "{x}", .{headerpad_size}); + const size = try std.fmt.allocPrint(b.allocator, "{x}", .{headerpad_size}); try zig_args.appendSlice(&[_][]const u8{ "-headerpad", size }); } if (self.headerpad_max_install_names) { @@ -1540,16 +1600,16 @@ fn make(step: *Step) !void { try zig_args.append("--export-table"); } if (self.initial_memory) |initial_memory| { - try zig_args.append(builder.fmt("--initial-memory={d}", .{initial_memory})); + try zig_args.append(b.fmt("--initial-memory={d}", .{initial_memory})); } if (self.max_memory) |max_memory| { - try zig_args.append(builder.fmt("--max-memory={d}", .{max_memory})); + try zig_args.append(b.fmt("--max-memory={d}", .{max_memory})); } if (self.shared_memory) { try zig_args.append("--shared-memory"); } if (self.global_base) |global_base| { - try zig_args.append(builder.fmt("--global-base={d}", .{global_base})); + try zig_args.append(b.fmt("--global-base={d}", .{global_base})); } if (self.code_model != .default) { @@ -1557,16 +1617,16 @@ fn make(step: *Step) !void { try zig_args.append(@tagName(self.code_model)); } if (self.wasi_exec_model) |model| { - try zig_args.append(builder.fmt("-mexec-model={s}", .{@tagName(model)})); + try zig_args.append(b.fmt("-mexec-model={s}", .{@tagName(model)})); } for (self.export_symbol_names) |symbol_name| { - try zig_args.append(builder.fmt("--export={s}", .{symbol_name})); + try zig_args.append(b.fmt("--export={s}", .{symbol_name})); } if (!self.target.isNative()) { try zig_args.appendSlice(&.{ - "-target", try self.target.zigTriple(builder.allocator), - "-mcpu", try std.Build.serializeCpu(builder.allocator, self.target.getCpu()), + "-target", try self.target.zigTriple(b.allocator), + "-mcpu", try std.Build.serializeCpu(b.allocator, self.target.getCpu()), }); if (self.target.dynamic_linker.get()) |dynamic_linker| { @@ -1577,12 +1637,12 @@ fn make(step: *Step) !void { if (self.linker_script) |linker_script| { try zig_args.append("--script"); - try zig_args.append(linker_script.getPath(builder)); + try zig_args.append(linker_script.getPath(b)); } if (self.version_script) |version_script| { try zig_args.append("--version-script"); - try zig_args.append(builder.pathFromRoot(version_script)); + try zig_args.append(b.pathFromRoot(version_script)); } if (self.kind == .@"test") { @@ -1595,83 +1655,7 @@ fn make(step: *Step) !void { try zig_args.append("--test-cmd-bin"); } } - } else { - const need_cross_glibc = self.target.isGnuLibC() and transitive_deps.is_linking_libc; - - switch (builder.host.getExternalExecutor(self.target_info, .{ - .qemu_fixes_dl = need_cross_glibc and builder.glibc_runtimes_dir != null, - .link_libc = transitive_deps.is_linking_libc, - })) { - .native => {}, - .bad_dl, .bad_os_or_cpu => { - try zig_args.append("--test-no-exec"); - }, - .rosetta => if (builder.enable_rosetta) { - try zig_args.append("--test-cmd-bin"); - } else { - try zig_args.append("--test-no-exec"); - }, - .qemu => |bin_name| ok: { - if (builder.enable_qemu) qemu: { - const glibc_dir_arg = if (need_cross_glibc) - builder.glibc_runtimes_dir orelse break :qemu - else - null; - try zig_args.append("--test-cmd"); - try zig_args.append(bin_name); - if (glibc_dir_arg) |dir| { - // TODO look into making this a call to `linuxTriple`. This - // needs the directory to be called "i686" rather than - // "x86" which is why we do it manually here. - const fmt_str = "{s}" ++ fs.path.sep_str ++ "{s}-{s}-{s}"; - const cpu_arch = self.target.getCpuArch(); - const os_tag = self.target.getOsTag(); - const abi = self.target.getAbi(); - const cpu_arch_name: []const u8 = if (cpu_arch == .x86) - "i686" - else - @tagName(cpu_arch); - const full_dir = try std.fmt.allocPrint(builder.allocator, fmt_str, .{ - dir, cpu_arch_name, @tagName(os_tag), @tagName(abi), - }); - - try zig_args.append("--test-cmd"); - try zig_args.append("-L"); - try zig_args.append("--test-cmd"); - try zig_args.append(full_dir); - } - try zig_args.append("--test-cmd-bin"); - break :ok; - } - try zig_args.append("--test-no-exec"); - }, - .wine => |bin_name| if (builder.enable_wine) { - try zig_args.append("--test-cmd"); - try zig_args.append(bin_name); - try zig_args.append("--test-cmd-bin"); - } else { - try zig_args.append("--test-no-exec"); - }, - .wasmtime => |bin_name| if (builder.enable_wasmtime) { - try zig_args.append("--test-cmd"); - try zig_args.append(bin_name); - try zig_args.append("--test-cmd"); - try zig_args.append("--dir=."); - try zig_args.append("--test-cmd-bin"); - } else { - try zig_args.append("--test-no-exec"); - }, - .darling => |bin_name| if (builder.enable_darling) { - try zig_args.append("--test-cmd"); - try zig_args.append(bin_name); - try zig_args.append("--test-cmd-bin"); - } else { - try zig_args.append("--test-no-exec"); - }, - } } - } else if (self.kind == .test_exe) { - try zig_args.append("--test-no-exec"); } try self.appendModuleArgs(&zig_args); @@ -1680,18 +1664,18 @@ fn make(step: *Step) !void { switch (include_dir) { .raw_path => |include_path| { try zig_args.append("-I"); - try zig_args.append(builder.pathFromRoot(include_path)); + try zig_args.append(b.pathFromRoot(include_path)); }, .raw_path_system => |include_path| { - if (builder.sysroot != null) { + if (b.sysroot != null) { try zig_args.append("-iwithsysroot"); } else { try zig_args.append("-isystem"); } - const resolved_include_path = builder.pathFromRoot(include_path); + const resolved_include_path = b.pathFromRoot(include_path); - const common_include_path = if (builtin.os.tag == .windows and builder.sysroot != null and fs.path.isAbsolute(resolved_include_path)) blk: { + const common_include_path = if (builtin.os.tag == .windows and b.sysroot != null and fs.path.isAbsolute(resolved_include_path)) blk: { // We need to check for disk designator and strip it out from dir path so // that zig/clang can concat resolved_include_path with sysroot. const disk_designator = fs.path.diskDesignatorWindows(resolved_include_path); @@ -1707,17 +1691,14 @@ fn make(step: *Step) !void { }, .other_step => |other| { if (other.emit_h) { - const h_path = other.getOutputHSource().getPath(builder); + const h_path = other.getOutputHSource().getPath(b); try zig_args.append("-isystem"); try zig_args.append(fs.path.dirname(h_path).?); } if (other.installed_headers.items.len > 0) { - for (other.installed_headers.items) |install_step| { - try install_step.make(); - } try zig_args.append("-I"); - try zig_args.append(builder.pathJoin(&.{ - other.builder.install_prefix, "include", + try zig_args.append(b.pathJoin(&.{ + other.step.owner.install_prefix, "include", })); } }, @@ -1729,33 +1710,35 @@ fn make(step: *Step) !void { } } - for (self.lib_paths.items) |lib_path| { - try zig_args.append("-L"); - try zig_args.append(lib_path); + for (self.c_macros.items) |c_macro| { + try zig_args.append("-D"); + try zig_args.append(c_macro); } - for (self.rpaths.items) |rpath| { - try zig_args.append("-rpath"); - try zig_args.append(rpath); + try zig_args.ensureUnusedCapacity(2 * self.lib_paths.items.len); + for (self.lib_paths.items) |lib_path| { + zig_args.appendAssumeCapacity("-L"); + zig_args.appendAssumeCapacity(lib_path.getPath2(b, step)); } - for (self.c_macros.items) |c_macro| { - try zig_args.append("-D"); - try zig_args.append(c_macro); + try zig_args.ensureUnusedCapacity(2 * self.rpaths.items.len); + for (self.rpaths.items) |rpath| { + zig_args.appendAssumeCapacity("-rpath"); + zig_args.appendAssumeCapacity(rpath.getPath2(b, step)); } - if (self.target.isDarwin()) { - for (self.framework_dirs.items) |dir| { - if (builder.sysroot != null) { - try zig_args.append("-iframeworkwithsysroot"); - } else { - try zig_args.append("-iframework"); - } - try zig_args.append(dir); - try zig_args.append("-F"); - try zig_args.append(dir); + for (self.framework_dirs.items) |directory_source| { + if (b.sysroot != null) { + try zig_args.append("-iframeworkwithsysroot"); + } else { + try zig_args.append("-iframework"); } + try zig_args.append(directory_source.getPath2(b, step)); + try zig_args.append("-F"); + try zig_args.append(directory_source.getPath2(b, step)); + } + { var it = self.frameworks.iterator(); while (it.next()) |entry| { const name = entry.key_ptr.*; @@ -1769,29 +1752,45 @@ fn make(step: *Step) !void { } try zig_args.append(name); } - } else { - if (self.framework_dirs.items.len > 0) { - log.info("Framework directories have been added for a non-darwin target, this will have no affect on the build", .{}); - } - - if (self.frameworks.count() > 0) { - log.info("Frameworks have been added for a non-darwin target, this will have no affect on the build", .{}); - } } - if (builder.sysroot) |sysroot| { + if (b.sysroot) |sysroot| { try zig_args.appendSlice(&[_][]const u8{ "--sysroot", sysroot }); } - for (builder.search_prefixes.items) |search_prefix| { - try zig_args.append("-L"); - try zig_args.append(builder.pathJoin(&.{ - search_prefix, "lib", - })); - try zig_args.append("-I"); - try zig_args.append(builder.pathJoin(&.{ - search_prefix, "include", - })); + for (b.search_prefixes.items) |search_prefix| { + var prefix_dir = fs.cwd().openDir(search_prefix, .{}) catch |err| { + return step.fail("unable to open prefix directory '{s}': {s}", .{ + search_prefix, @errorName(err), + }); + }; + defer prefix_dir.close(); + + // Avoid passing -L and -I flags for nonexistent directories. + // This prevents a warning, that should probably be upgraded to an error in Zig's + // CLI parsing code, when the linker sees an -L directory that does not exist. + + if (prefix_dir.accessZ("lib", .{})) |_| { + try zig_args.appendSlice(&.{ + "-L", try fs.path.join(b.allocator, &.{ search_prefix, "lib" }), + }); + } else |err| switch (err) { + error.FileNotFound => {}, + else => |e| return step.fail("unable to access '{s}/lib' directory: {s}", .{ + search_prefix, @errorName(e), + }), + } + + if (prefix_dir.accessZ("include", .{})) |_| { + try zig_args.appendSlice(&.{ + "-I", try fs.path.join(b.allocator, &.{ search_prefix, "include" }), + }); + } else |err| switch (err) { + error.FileNotFound => {}, + else => |e| return step.fail("unable to access '{s}/include' directory: {s}", .{ + search_prefix, @errorName(e), + }), + } } try addFlag(&zig_args, "valgrind", self.valgrind_support); @@ -1800,15 +1799,15 @@ fn make(step: *Step) !void { if (self.zig_lib_dir) |dir| { try zig_args.append("--zig-lib-dir"); - try zig_args.append(builder.pathFromRoot(dir)); - } else if (builder.zig_lib_dir) |dir| { + try zig_args.append(b.pathFromRoot(dir)); + } else if (b.zig_lib_dir) |dir| { try zig_args.append("--zig-lib-dir"); try zig_args.append(dir); } if (self.main_pkg_path) |dir| { try zig_args.append("--main-pkg-path"); - try zig_args.append(builder.pathFromRoot(dir)); + try zig_args.append(b.pathFromRoot(dir)); } try addFlag(&zig_args, "PIC", self.force_pic); @@ -1830,6 +1829,7 @@ fn make(step: *Step) !void { } try zig_args.append("--enable-cache"); + try zig_args.append("--listen=-"); // Windows has an argument length limit of 32,766 characters, macOS 262,144 and Linux // 2,097,152. If our args exceed 30 KiB, we instead write them to a "response file" and @@ -1840,15 +1840,15 @@ fn make(step: *Step) !void { args_length += arg.len + 1; // +1 to account for null terminator } if (args_length >= 30 * 1024) { - try builder.cache_root.handle.makePath("args"); + try b.cache_root.handle.makePath("args"); const args_to_escape = zig_args.items[2..]; - var escaped_args = try ArrayList([]const u8).initCapacity(builder.allocator, args_to_escape.len); + var escaped_args = try ArrayList([]const u8).initCapacity(b.allocator, args_to_escape.len); arg_blk: for (args_to_escape) |arg| { for (arg, 0..) |c, arg_idx| { if (c == '\\' or c == '"') { // Slow path for arguments that need to be escaped. We'll need to allocate and copy - var escaped = try ArrayList(u8).initCapacity(builder.allocator, arg.len + 1); + var escaped = try ArrayList(u8).initCapacity(b.allocator, arg.len + 1); const writer = escaped.writer(); try writer.writeAll(arg[0..arg_idx]); for (arg[arg_idx..]) |to_escape| { @@ -1864,8 +1864,8 @@ fn make(step: *Step) !void { // Write the args to zig-cache/args/ to avoid conflicts with // other zig build commands running in parallel. - const partially_quoted = try std.mem.join(builder.allocator, "\" \"", escaped_args.items); - const args = try std.mem.concat(builder.allocator, u8, &[_][]const u8{ "\"", partially_quoted, "\"" }); + const partially_quoted = try std.mem.join(b.allocator, "\" \"", escaped_args.items); + const args = try std.mem.concat(b.allocator, u8, &[_][]const u8{ "\"", partially_quoted, "\"" }); var args_hash: [Sha256.digest_length]u8 = undefined; Sha256.hash(args, &args_hash, .{}); @@ -1877,28 +1877,35 @@ fn make(step: *Step) !void { ); const args_file = "args" ++ fs.path.sep_str ++ args_hex_hash; - try builder.cache_root.handle.writeFile(args_file, args); + try b.cache_root.handle.writeFile(args_file, args); - const resolved_args_file = try mem.concat(builder.allocator, u8, &.{ + const resolved_args_file = try mem.concat(b.allocator, u8, &.{ "@", - try builder.cache_root.join(builder.allocator, &.{args_file}), + try b.cache_root.join(b.allocator, &.{args_file}), }); zig_args.shrinkRetainingCapacity(2); try zig_args.append(resolved_args_file); } - const output_dir_nl = try builder.execFromStep(zig_args.items, &self.step); - const build_output_dir = mem.trimRight(u8, output_dir_nl, "\r\n"); + const output_bin_path = step.evalZigProcess(zig_args.items, prog_node) catch |err| switch (err) { + error.NeedCompileErrorCheck => { + assert(self.expect_errors.len != 0); + try checkCompileErrors(self); + return; + }, + else => |e| return e, + }; + const build_output_dir = fs.path.dirname(output_bin_path).?; if (self.output_dir) |output_dir| { - var src_dir = try std.fs.cwd().openIterableDir(build_output_dir, .{}); + var src_dir = try fs.cwd().openIterableDir(build_output_dir, .{}); defer src_dir.close(); // Create the output directory if it doesn't exist. - try std.fs.cwd().makePath(output_dir); + try fs.cwd().makePath(output_dir); - var dest_dir = try std.fs.cwd().openDir(output_dir, .{}); + var dest_dir = try fs.cwd().openDir(output_dir, .{}); defer dest_dir.close(); var it = src_dir.iterate(); @@ -1922,25 +1929,34 @@ fn make(step: *Step) !void { // Update generated files if (self.output_dir != null) { - self.output_path_source.path = builder.pathJoin( + self.output_dirname_source.path = self.output_dir.?; + + self.output_path_source.path = b.pathJoin( &.{ self.output_dir.?, self.out_filename }, ); if (self.emit_h) { - self.output_h_path_source.path = builder.pathJoin( + self.output_h_path_source.path = b.pathJoin( &.{ self.output_dir.?, self.out_h_filename }, ); } if (self.target.isWindows() or self.target.isUefi()) { - self.output_pdb_path_source.path = builder.pathJoin( + self.output_pdb_path_source.path = b.pathJoin( &.{ self.output_dir.?, self.out_pdb_filename }, ); } } - if (self.kind == .lib and self.linkage != null and self.linkage.? == .dynamic and self.version != null and self.target.wantSharedLibSymLinks()) { - try doAtomicSymLinks(builder.allocator, self.getOutputSource().getPath(builder), self.major_only_filename.?, self.name_only_filename.?); + if (self.kind == .lib and self.linkage != null and self.linkage.? == .dynamic and + self.version != null and self.target.wantSharedLibSymLinks()) + { + try doAtomicSymLinks( + step, + self.getOutputSource().getPath(b), + self.major_only_filename.?, + self.name_only_filename.?, + ); } } @@ -1982,30 +1998,27 @@ fn findVcpkgRoot(allocator: Allocator) !?[]const u8 { } pub fn doAtomicSymLinks( - allocator: Allocator, + step: *Step, output_path: []const u8, filename_major_only: []const u8, filename_name_only: []const u8, ) !void { + const arena = step.owner.allocator; const out_dir = fs.path.dirname(output_path) orelse "."; const out_basename = fs.path.basename(output_path); // sym link for libfoo.so.1 to libfoo.so.1.2.3 - const major_only_path = try fs.path.join( - allocator, - &[_][]const u8{ out_dir, filename_major_only }, - ); - fs.atomicSymLink(allocator, out_basename, major_only_path) catch |err| { - log.err("Unable to symlink {s} -> {s}", .{ major_only_path, out_basename }); - return err; + const major_only_path = try fs.path.join(arena, &.{ out_dir, filename_major_only }); + fs.atomicSymLink(arena, out_basename, major_only_path) catch |err| { + return step.fail("unable to symlink {s} -> {s}: {s}", .{ + major_only_path, out_basename, @errorName(err), + }); }; // sym link for libfoo.so to libfoo.so.1 - const name_only_path = try fs.path.join( - allocator, - &[_][]const u8{ out_dir, filename_name_only }, - ); - fs.atomicSymLink(allocator, filename_major_only, name_only_path) catch |err| { - log.err("Unable to symlink {s} -> {s}", .{ name_only_path, filename_major_only }); - return err; + const name_only_path = try fs.path.join(arena, &.{ out_dir, filename_name_only }); + fs.atomicSymLink(arena, filename_major_only, name_only_path) catch |err| { + return step.fail("Unable to symlink {s} -> {s}: {s}", .{ + name_only_path, filename_major_only, @errorName(err), + }); }; } @@ -2117,3 +2130,57 @@ const TransitiveDeps = struct { } } }; + +fn checkCompileErrors(self: *CompileStep) !void { + // Clear this field so that it does not get printed by the build runner. + const actual_eb = self.step.result_error_bundle; + self.step.result_error_bundle = std.zig.ErrorBundle.empty; + + const arena = self.step.owner.allocator; + + var actual_stderr_list = std.ArrayList(u8).init(arena); + try actual_eb.renderToWriter(.{ + .ttyconf = .no_color, + .include_reference_trace = false, + .include_source_line = false, + }, actual_stderr_list.writer()); + const actual_stderr = try actual_stderr_list.toOwnedSlice(); + + // Render the expected lines into a string that we can compare verbatim. + var expected_generated = std.ArrayList(u8).init(arena); + + var actual_line_it = mem.split(u8, actual_stderr, "\n"); + for (self.expect_errors) |expect_line| { + const actual_line = actual_line_it.next() orelse { + try expected_generated.appendSlice(expect_line); + try expected_generated.append('\n'); + continue; + }; + if (mem.endsWith(u8, actual_line, expect_line)) { + try expected_generated.appendSlice(actual_line); + try expected_generated.append('\n'); + continue; + } + if (mem.startsWith(u8, expect_line, ":?:?: ")) { + if (mem.endsWith(u8, actual_line, expect_line[":?:?: ".len..])) { + try expected_generated.appendSlice(actual_line); + try expected_generated.append('\n'); + continue; + } + } + try expected_generated.appendSlice(expect_line); + try expected_generated.append('\n'); + } + + if (mem.eql(u8, expected_generated.items, actual_stderr)) return; + + // TODO merge this with the testing.expectEqualStrings logic, and also CheckFile + return self.step.fail( + \\ + \\========= expected: ===================== + \\{s} + \\========= but found: ==================== + \\{s} + \\========================================= + , .{ expected_generated.items, actual_stderr }); +} diff --git a/lib/std/Build/ConfigHeaderStep.zig b/lib/std/Build/ConfigHeaderStep.zig index 595c1018f709..c1849b410e0b 100644 --- a/lib/std/Build/ConfigHeaderStep.zig +++ b/lib/std/Build/ConfigHeaderStep.zig @@ -1,9 +1,3 @@ -const std = @import("../std.zig"); -const ConfigHeaderStep = @This(); -const Step = std.Build.Step; - -pub const base_id: Step.Id = .config_header; - pub const Style = union(enum) { /// The configure format supported by autotools. It uses `#undef foo` to /// mark lines that can be substituted with different values. @@ -34,7 +28,6 @@ pub const Value = union(enum) { }; step: Step, -builder: *std.Build, values: std.StringArrayHashMap(Value), output_file: std.Build.GeneratedFile, @@ -42,43 +35,57 @@ style: Style, max_bytes: usize, include_path: []const u8, +pub const base_id: Step.Id = .config_header; + pub const Options = struct { style: Style = .blank, max_bytes: usize = 2 * 1024 * 1024, include_path: ?[]const u8 = null, + first_ret_addr: ?usize = null, }; -pub fn create(builder: *std.Build, options: Options) *ConfigHeaderStep { - const self = builder.allocator.create(ConfigHeaderStep) catch @panic("OOM"); - const name = if (options.style.getFileSource()) |s| - builder.fmt("configure {s} header {s}", .{ @tagName(options.style), s.getDisplayName() }) - else - builder.fmt("configure {s} header", .{@tagName(options.style)}); - self.* = .{ - .builder = builder, - .step = Step.init(base_id, name, builder.allocator, make), - .style = options.style, - .values = std.StringArrayHashMap(Value).init(builder.allocator), +pub fn create(owner: *std.Build, options: Options) *ConfigHeaderStep { + const self = owner.allocator.create(ConfigHeaderStep) catch @panic("OOM"); - .max_bytes = options.max_bytes, - .include_path = "config.h", - .output_file = .{ .step = &self.step }, - }; + var include_path: []const u8 = "config.h"; if (options.style.getFileSource()) |s| switch (s) { .path => |p| { const basename = std.fs.path.basename(p); if (std.mem.endsWith(u8, basename, ".h.in")) { - self.include_path = basename[0 .. basename.len - 3]; + include_path = basename[0 .. basename.len - 3]; } }, else => {}, }; - if (options.include_path) |include_path| { - self.include_path = include_path; + if (options.include_path) |p| { + include_path = p; } + const name = if (options.style.getFileSource()) |s| + owner.fmt("configure {s} header {s} to {s}", .{ + @tagName(options.style), s.getDisplayName(), include_path, + }) + else + owner.fmt("configure {s} header to {s}", .{ @tagName(options.style), include_path }); + + self.* = .{ + .step = Step.init(.{ + .id = base_id, + .name = name, + .owner = owner, + .makeFn = make, + .first_ret_addr = options.first_ret_addr orelse @returnAddress(), + }), + .style = options.style, + .values = std.StringArrayHashMap(Value).init(owner.allocator), + + .max_bytes = options.max_bytes, + .include_path = include_path, + .output_file = .{ .step = &self.step }, + }; + return self; } @@ -146,26 +153,20 @@ fn putValue(self: *ConfigHeaderStep, field_name: []const u8, comptime T: type, v } } -fn make(step: *Step) !void { +fn make(step: *Step, prog_node: *std.Progress.Node) !void { + _ = prog_node; + const b = step.owner; const self = @fieldParentPtr(ConfigHeaderStep, "step", step); - const gpa = self.builder.allocator; - - // The cache is used here not really as a way to speed things up - because writing - // the data to a file would probably be very fast - but as a way to find a canonical - // location to put build artifacts. + const gpa = b.allocator; + const arena = b.allocator; - // If, for example, a hard-coded path was used as the location to put ConfigHeaderStep - // files, then two ConfigHeaderStep executing in parallel might clobber each other. + var man = b.cache.obtain(); + defer man.deinit(); - // TODO port the cache system from the compiler to zig std lib. Until then - // we construct the path directly, and no "cache hit" detection happens; - // the files are always written. - // Note there is very similar code over in WriteFileStep - const Hasher = std.crypto.auth.siphash.SipHash128(1, 3); // Random bytes to make ConfigHeaderStep unique. Refresh this with new // random bytes when ConfigHeaderStep implementation is modified in a // non-backwards-compatible way. - var hash = Hasher.init("PGuDTpidxyMqnkGM"); + man.hash.add(@as(u32, 0xdef08d23)); var output = std.ArrayList(u8).init(gpa); defer output.deinit(); @@ -177,15 +178,15 @@ fn make(step: *Step) !void { switch (self.style) { .autoconf => |file_source| { try output.appendSlice(c_generated_line); - const src_path = file_source.getPath(self.builder); - const contents = try std.fs.cwd().readFileAlloc(gpa, src_path, self.max_bytes); - try render_autoconf(contents, &output, self.values, src_path); + const src_path = file_source.getPath(b); + const contents = try std.fs.cwd().readFileAlloc(arena, src_path, self.max_bytes); + try render_autoconf(step, contents, &output, self.values, src_path); }, .cmake => |file_source| { try output.appendSlice(c_generated_line); - const src_path = file_source.getPath(self.builder); - const contents = try std.fs.cwd().readFileAlloc(gpa, src_path, self.max_bytes); - try render_cmake(contents, &output, self.values, src_path); + const src_path = file_source.getPath(b); + const contents = try std.fs.cwd().readFileAlloc(arena, src_path, self.max_bytes); + try render_cmake(step, contents, &output, self.values, src_path); }, .blank => { try output.appendSlice(c_generated_line); @@ -197,43 +198,44 @@ fn make(step: *Step) !void { }, } - hash.update(output.items); + man.hash.addBytes(output.items); - var digest: [16]u8 = undefined; - hash.final(&digest); - var hash_basename: [digest.len * 2]u8 = undefined; - _ = std.fmt.bufPrint( - &hash_basename, - "{s}", - .{std.fmt.fmtSliceHexLower(&digest)}, - ) catch unreachable; + if (try step.cacheHit(&man)) { + const digest = man.final(); + self.output_file.path = try b.cache_root.join(arena, &.{ + "o", &digest, self.include_path, + }); + return; + } - const output_dir = try self.builder.cache_root.join(gpa, &.{ "o", &hash_basename }); + const digest = man.final(); // If output_path has directory parts, deal with them. Example: // output_dir is zig-cache/o/HASH // output_path is libavutil/avconfig.h // We want to open directory zig-cache/o/HASH/libavutil/ // but keep output_dir as zig-cache/o/HASH for -I include - const sub_dir_path = if (std.fs.path.dirname(self.include_path)) |d| - try std.fs.path.join(gpa, &.{ output_dir, d }) - else - output_dir; + const sub_path = try std.fs.path.join(arena, &.{ "o", &digest, self.include_path }); + const sub_path_dirname = std.fs.path.dirname(sub_path).?; - var dir = std.fs.cwd().makeOpenPath(sub_dir_path, .{}) catch |err| { - std.debug.print("unable to make path {s}: {s}\n", .{ output_dir, @errorName(err) }); - return err; + b.cache_root.handle.makePath(sub_path_dirname) catch |err| { + return step.fail("unable to make path '{}{s}': {s}", .{ + b.cache_root, sub_path_dirname, @errorName(err), + }); }; - defer dir.close(); - try dir.writeFile(std.fs.path.basename(self.include_path), output.items); + b.cache_root.handle.writeFile(sub_path, output.items) catch |err| { + return step.fail("unable to write file '{}{s}': {s}", .{ + b.cache_root, sub_path, @errorName(err), + }); + }; - self.output_file.path = try std.fs.path.join(self.builder.allocator, &.{ - output_dir, self.include_path, - }); + self.output_file.path = try b.cache_root.join(arena, &.{sub_path}); + try man.writeManifest(); } fn render_autoconf( + step: *Step, contents: []const u8, output: *std.ArrayList(u8), values: std.StringArrayHashMap(Value), @@ -260,7 +262,7 @@ fn render_autoconf( } const name = it.rest(); const kv = values_copy.fetchSwapRemove(name) orelse { - std.debug.print("{s}:{d}: error: unspecified config header value: '{s}'\n", .{ + try step.addError("{s}:{d}: error: unspecified config header value: '{s}'", .{ src_path, line_index + 1, name, }); any_errors = true; @@ -270,15 +272,17 @@ fn render_autoconf( } for (values_copy.keys()) |name| { - std.debug.print("{s}: error: config header value unused: '{s}'\n", .{ src_path, name }); + try step.addError("{s}: error: config header value unused: '{s}'", .{ src_path, name }); + any_errors = true; } if (any_errors) { - return error.HeaderConfigFailed; + return error.MakeFailed; } } fn render_cmake( + step: *Step, contents: []const u8, output: *std.ArrayList(u8), values: std.StringArrayHashMap(Value), @@ -304,14 +308,14 @@ fn render_cmake( continue; } const name = it.next() orelse { - std.debug.print("{s}:{d}: error: missing define name\n", .{ + try step.addError("{s}:{d}: error: missing define name", .{ src_path, line_index + 1, }); any_errors = true; continue; }; const kv = values_copy.fetchSwapRemove(name) orelse { - std.debug.print("{s}:{d}: error: unspecified config header value: '{s}'\n", .{ + try step.addError("{s}:{d}: error: unspecified config header value: '{s}'", .{ src_path, line_index + 1, name, }); any_errors = true; @@ -321,7 +325,8 @@ fn render_cmake( } for (values_copy.keys()) |name| { - std.debug.print("{s}: error: config header value unused: '{s}'\n", .{ src_path, name }); + try step.addError("{s}: error: config header value unused: '{s}'", .{ src_path, name }); + any_errors = true; } if (any_errors) { @@ -426,3 +431,7 @@ fn renderValueNasm(output: *std.ArrayList(u8), name: []const u8, value: Value) ! }, } } + +const std = @import("../std.zig"); +const ConfigHeaderStep = @This(); +const Step = std.Build.Step; diff --git a/lib/std/Build/EmulatableRunStep.zig b/lib/std/Build/EmulatableRunStep.zig deleted file mode 100644 index d4b5238524f9..000000000000 --- a/lib/std/Build/EmulatableRunStep.zig +++ /dev/null @@ -1,213 +0,0 @@ -//! Unlike `RunStep` this step will provide emulation, when enabled, to run foreign binaries. -//! When a binary is foreign, but emulation for the target is disabled, the specified binary -//! will not be run and therefore also not validated against its output. -//! This step can be useful when wishing to run a built binary on multiple platforms, -//! without having to verify if it's possible to be ran against. - -const std = @import("../std.zig"); -const Step = std.Build.Step; -const CompileStep = std.Build.CompileStep; -const RunStep = std.Build.RunStep; - -const fs = std.fs; -const process = std.process; -const EnvMap = process.EnvMap; - -const EmulatableRunStep = @This(); - -pub const base_id = .emulatable_run; - -const max_stdout_size = 1 * 1024 * 1024; // 1 MiB - -step: Step, -builder: *std.Build, - -/// The artifact (executable) to be run by this step -exe: *CompileStep, - -/// Set this to `null` to ignore the exit code for the purpose of determining a successful execution -expected_term: ?std.ChildProcess.Term = .{ .Exited = 0 }, - -/// Override this field to modify the environment -env_map: ?*EnvMap, - -/// Set this to modify the current working directory -cwd: ?[]const u8, - -stdout_action: RunStep.StdIoAction = .inherit, -stderr_action: RunStep.StdIoAction = .inherit, - -/// When set to true, hides the warning of skipping a foreign binary which cannot be run on the host -/// or through emulation. -hide_foreign_binaries_warning: bool, - -/// Creates a step that will execute the given artifact. This step will allow running the -/// binary through emulation when any of the emulation options such as `enable_rosetta` are set to true. -/// When set to false, and the binary is foreign, running the executable is skipped. -/// Asserts given artifact is an executable. -pub fn create(builder: *std.Build, name: []const u8, artifact: *CompileStep) *EmulatableRunStep { - std.debug.assert(artifact.kind == .exe or artifact.kind == .test_exe); - const self = builder.allocator.create(EmulatableRunStep) catch @panic("OOM"); - - const option_name = "hide-foreign-warnings"; - const hide_warnings = if (builder.available_options_map.get(option_name) == null) warn: { - break :warn builder.option(bool, option_name, "Hide the warning when a foreign binary which is incompatible is skipped") orelse false; - } else false; - - self.* = .{ - .builder = builder, - .step = Step.init(.emulatable_run, name, builder.allocator, make), - .exe = artifact, - .env_map = null, - .cwd = null, - .hide_foreign_binaries_warning = hide_warnings, - }; - self.step.dependOn(&artifact.step); - - return self; -} - -fn make(step: *Step) !void { - const self = @fieldParentPtr(EmulatableRunStep, "step", step); - const host_info = self.builder.host; - - var argv_list = std.ArrayList([]const u8).init(self.builder.allocator); - defer argv_list.deinit(); - - const need_cross_glibc = self.exe.target.isGnuLibC() and self.exe.is_linking_libc; - switch (host_info.getExternalExecutor(self.exe.target_info, .{ - .qemu_fixes_dl = need_cross_glibc and self.builder.glibc_runtimes_dir != null, - .link_libc = self.exe.is_linking_libc, - })) { - .native => {}, - .rosetta => if (!self.builder.enable_rosetta) return warnAboutForeignBinaries(self), - .wine => |bin_name| if (self.builder.enable_wine) { - try argv_list.append(bin_name); - } else return, - .qemu => |bin_name| if (self.builder.enable_qemu) { - const glibc_dir_arg = if (need_cross_glibc) - self.builder.glibc_runtimes_dir orelse return - else - null; - try argv_list.append(bin_name); - if (glibc_dir_arg) |dir| { - // TODO look into making this a call to `linuxTriple`. This - // needs the directory to be called "i686" rather than - // "x86" which is why we do it manually here. - const fmt_str = "{s}" ++ fs.path.sep_str ++ "{s}-{s}-{s}"; - const cpu_arch = self.exe.target.getCpuArch(); - const os_tag = self.exe.target.getOsTag(); - const abi = self.exe.target.getAbi(); - const cpu_arch_name: []const u8 = if (cpu_arch == .x86) - "i686" - else - @tagName(cpu_arch); - const full_dir = try std.fmt.allocPrint(self.builder.allocator, fmt_str, .{ - dir, cpu_arch_name, @tagName(os_tag), @tagName(abi), - }); - - try argv_list.append("-L"); - try argv_list.append(full_dir); - } - } else return warnAboutForeignBinaries(self), - .darling => |bin_name| if (self.builder.enable_darling) { - try argv_list.append(bin_name); - } else return warnAboutForeignBinaries(self), - .wasmtime => |bin_name| if (self.builder.enable_wasmtime) { - try argv_list.append(bin_name); - try argv_list.append("--dir=."); - } else return warnAboutForeignBinaries(self), - else => return warnAboutForeignBinaries(self), - } - - if (self.exe.target.isWindows()) { - // On Windows we don't have rpaths so we have to add .dll search paths to PATH - RunStep.addPathForDynLibsInternal(&self.step, self.builder, self.exe); - } - - const executable_path = self.exe.installed_path orelse self.exe.getOutputSource().getPath(self.builder); - try argv_list.append(executable_path); - - try RunStep.runCommand( - argv_list.items, - self.builder, - self.expected_term, - self.stdout_action, - self.stderr_action, - .Inherit, - self.env_map, - self.cwd, - false, - ); -} - -pub fn expectStdErrEqual(self: *EmulatableRunStep, bytes: []const u8) void { - self.stderr_action = .{ .expect_exact = self.builder.dupe(bytes) }; -} - -pub fn expectStdOutEqual(self: *EmulatableRunStep, bytes: []const u8) void { - self.stdout_action = .{ .expect_exact = self.builder.dupe(bytes) }; -} - -fn warnAboutForeignBinaries(step: *EmulatableRunStep) void { - if (step.hide_foreign_binaries_warning) return; - const builder = step.builder; - const artifact = step.exe; - - const host_name = builder.host.target.zigTriple(builder.allocator) catch @panic("unhandled error"); - const foreign_name = artifact.target.zigTriple(builder.allocator) catch @panic("unhandled error"); - const target_info = std.zig.system.NativeTargetInfo.detect(artifact.target) catch @panic("unhandled error"); - const need_cross_glibc = artifact.target.isGnuLibC() and artifact.is_linking_libc; - switch (builder.host.getExternalExecutor(target_info, .{ - .qemu_fixes_dl = need_cross_glibc and builder.glibc_runtimes_dir != null, - .link_libc = artifact.is_linking_libc, - })) { - .native => unreachable, - .bad_dl => |foreign_dl| { - const host_dl = builder.host.dynamic_linker.get() orelse "(none)"; - std.debug.print("the host system does not appear to be capable of executing binaries from the target because the host dynamic linker is '{s}', while the target dynamic linker is '{s}'. Consider setting the dynamic linker as '{s}'.\n", .{ - host_dl, foreign_dl, host_dl, - }); - }, - .bad_os_or_cpu => { - std.debug.print("the host system ({s}) does not appear to be capable of executing binaries from the target ({s}).\n", .{ - host_name, foreign_name, - }); - }, - .darling => if (!builder.enable_darling) { - std.debug.print( - "the host system ({s}) does not appear to be capable of executing binaries " ++ - "from the target ({s}). Consider enabling darling.\n", - .{ host_name, foreign_name }, - ); - }, - .rosetta => if (!builder.enable_rosetta) { - std.debug.print( - "the host system ({s}) does not appear to be capable of executing binaries " ++ - "from the target ({s}). Consider enabling rosetta.\n", - .{ host_name, foreign_name }, - ); - }, - .wine => if (!builder.enable_wine) { - std.debug.print( - "the host system ({s}) does not appear to be capable of executing binaries " ++ - "from the target ({s}). Consider enabling wine.\n", - .{ host_name, foreign_name }, - ); - }, - .qemu => if (!builder.enable_qemu) { - std.debug.print( - "the host system ({s}) does not appear to be capable of executing binaries " ++ - "from the target ({s}). Consider enabling qemu.\n", - .{ host_name, foreign_name }, - ); - }, - .wasmtime => { - std.debug.print( - "the host system ({s}) does not appear to be capable of executing binaries " ++ - "from the target ({s}). Consider enabling wasmtime.\n", - .{ host_name, foreign_name }, - ); - }, - } -} diff --git a/lib/std/Build/FmtStep.zig b/lib/std/Build/FmtStep.zig index 4a5efde2bd81..2a823423362d 100644 --- a/lib/std/Build/FmtStep.zig +++ b/lib/std/Build/FmtStep.zig @@ -1,32 +1,73 @@ -const std = @import("../std.zig"); -const Step = std.Build.Step; -const FmtStep = @This(); +//! This step has two modes: +//! * Modify mode: directly modify source files, formatting them in place. +//! * Check mode: fail the step if a non-conforming file is found. + +step: Step, +paths: []const []const u8, +exclude_paths: []const []const u8, +check: bool, pub const base_id = .fmt; -step: Step, -builder: *std.Build, -argv: [][]const u8, - -pub fn create(builder: *std.Build, paths: []const []const u8) *FmtStep { - const self = builder.allocator.create(FmtStep) catch @panic("OOM"); - const name = "zig fmt"; - self.* = FmtStep{ - .step = Step.init(.fmt, name, builder.allocator, make), - .builder = builder, - .argv = builder.allocator.alloc([]u8, paths.len + 2) catch @panic("OOM"), - }; +pub const Options = struct { + paths: []const []const u8 = &.{}, + exclude_paths: []const []const u8 = &.{}, + /// If true, fails the build step when any non-conforming files are encountered. + check: bool = false, +}; - self.argv[0] = builder.zig_exe; - self.argv[1] = "fmt"; - for (paths, 0..) |path, i| { - self.argv[2 + i] = builder.pathFromRoot(path); - } +pub fn create(owner: *std.Build, options: Options) *FmtStep { + const self = owner.allocator.create(FmtStep) catch @panic("OOM"); + const name = if (options.check) "zig fmt --check" else "zig fmt"; + self.* = .{ + .step = Step.init(.{ + .id = base_id, + .name = name, + .owner = owner, + .makeFn = make, + }), + .paths = options.paths, + .exclude_paths = options.exclude_paths, + .check = options.check, + }; return self; } -fn make(step: *Step) !void { +fn make(step: *Step, prog_node: *std.Progress.Node) !void { + // zig fmt is fast enough that no progress is needed. + _ = prog_node; + + // TODO: if check=false, this means we are modifying source files in place, which + // is an operation that could race against other operations also modifying source files + // in place. In this case, this step should obtain a write lock while making those + // modifications. + + const b = step.owner; + const arena = b.allocator; const self = @fieldParentPtr(FmtStep, "step", step); - return self.builder.spawnChild(self.argv); + var argv: std.ArrayListUnmanaged([]const u8) = .{}; + try argv.ensureUnusedCapacity(arena, 2 + 1 + self.paths.len + 2 * self.exclude_paths.len); + + argv.appendAssumeCapacity(b.zig_exe); + argv.appendAssumeCapacity("fmt"); + + if (self.check) { + argv.appendAssumeCapacity("--check"); + } + + for (self.paths) |p| { + argv.appendAssumeCapacity(b.pathFromRoot(p)); + } + + for (self.exclude_paths) |p| { + argv.appendAssumeCapacity("--exclude"); + argv.appendAssumeCapacity(b.pathFromRoot(p)); + } + + return step.evalChildProcess(argv.items); } + +const std = @import("../std.zig"); +const Step = std.Build.Step; +const FmtStep = @This(); diff --git a/lib/std/Build/InstallArtifactStep.zig b/lib/std/Build/InstallArtifactStep.zig index c419c85fdf16..445f1e8ea8da 100644 --- a/lib/std/Build/InstallArtifactStep.zig +++ b/lib/std/Build/InstallArtifactStep.zig @@ -3,83 +3,133 @@ const Step = std.Build.Step; const CompileStep = std.Build.CompileStep; const InstallDir = std.Build.InstallDir; const InstallArtifactStep = @This(); +const fs = std.fs; pub const base_id = .install_artifact; step: Step, -builder: *std.Build, +dest_builder: *std.Build, artifact: *CompileStep, dest_dir: InstallDir, pdb_dir: ?InstallDir, h_dir: ?InstallDir, +/// If non-null, adds additional path components relative to dest_dir, and +/// overrides the basename of the CompileStep. +dest_sub_path: ?[]const u8, -pub fn create(builder: *std.Build, artifact: *CompileStep) *InstallArtifactStep { +pub fn create(owner: *std.Build, artifact: *CompileStep) *InstallArtifactStep { if (artifact.install_step) |s| return s; - const self = builder.allocator.create(InstallArtifactStep) catch @panic("OOM"); + const self = owner.allocator.create(InstallArtifactStep) catch @panic("OOM"); self.* = InstallArtifactStep{ - .builder = builder, - .step = Step.init(.install_artifact, builder.fmt("install {s}", .{artifact.step.name}), builder.allocator, make), + .step = Step.init(.{ + .id = base_id, + .name = owner.fmt("install {s}", .{artifact.name}), + .owner = owner, + .makeFn = make, + }), + .dest_builder = owner, .artifact = artifact, .dest_dir = artifact.override_dest_dir orelse switch (artifact.kind) { .obj => @panic("Cannot install a .obj build artifact."), - .@"test" => @panic("Cannot install a .test build artifact, use .test_exe instead."), - .exe, .test_exe => InstallDir{ .bin = {} }, + .exe, .@"test" => InstallDir{ .bin = {} }, .lib => InstallDir{ .lib = {} }, }, .pdb_dir = if (artifact.producesPdbFile()) blk: { - if (artifact.kind == .exe or artifact.kind == .test_exe) { + if (artifact.kind == .exe or artifact.kind == .@"test") { break :blk InstallDir{ .bin = {} }; } else { break :blk InstallDir{ .lib = {} }; } } else null, .h_dir = if (artifact.kind == .lib and artifact.emit_h) .header else null, + .dest_sub_path = null, }; self.step.dependOn(&artifact.step); artifact.install_step = self; - builder.pushInstalledFile(self.dest_dir, artifact.out_filename); + owner.pushInstalledFile(self.dest_dir, artifact.out_filename); if (self.artifact.isDynamicLibrary()) { if (artifact.major_only_filename) |name| { - builder.pushInstalledFile(.lib, name); + owner.pushInstalledFile(.lib, name); } if (artifact.name_only_filename) |name| { - builder.pushInstalledFile(.lib, name); + owner.pushInstalledFile(.lib, name); } if (self.artifact.target.isWindows()) { - builder.pushInstalledFile(.lib, artifact.out_lib_filename); + owner.pushInstalledFile(.lib, artifact.out_lib_filename); } } if (self.pdb_dir) |pdb_dir| { - builder.pushInstalledFile(pdb_dir, artifact.out_pdb_filename); + owner.pushInstalledFile(pdb_dir, artifact.out_pdb_filename); } if (self.h_dir) |h_dir| { - builder.pushInstalledFile(h_dir, artifact.out_h_filename); + owner.pushInstalledFile(h_dir, artifact.out_h_filename); } return self; } -fn make(step: *Step) !void { +fn make(step: *Step, prog_node: *std.Progress.Node) !void { + _ = prog_node; + const src_builder = step.owner; const self = @fieldParentPtr(InstallArtifactStep, "step", step); - const builder = self.builder; + const dest_builder = self.dest_builder; - const full_dest_path = builder.getInstallPath(self.dest_dir, self.artifact.out_filename); - try builder.updateFile(self.artifact.getOutputSource().getPath(builder), full_dest_path); - if (self.artifact.isDynamicLibrary() and self.artifact.version != null and self.artifact.target.wantSharedLibSymLinks()) { - try CompileStep.doAtomicSymLinks(builder.allocator, full_dest_path, self.artifact.major_only_filename.?, self.artifact.name_only_filename.?); + const dest_sub_path = if (self.dest_sub_path) |sub_path| sub_path else self.artifact.out_filename; + const full_dest_path = dest_builder.getInstallPath(self.dest_dir, dest_sub_path); + const cwd = fs.cwd(); + + var all_cached = true; + + { + const full_src_path = self.artifact.getOutputSource().getPath(src_builder); + const p = fs.Dir.updateFile(cwd, full_src_path, cwd, full_dest_path, .{}) catch |err| { + return step.fail("unable to update file from '{s}' to '{s}': {s}", .{ + full_src_path, full_dest_path, @errorName(err), + }); + }; + all_cached = all_cached and p == .fresh; + } + + if (self.artifact.isDynamicLibrary() and + self.artifact.version != null and + self.artifact.target.wantSharedLibSymLinks()) + { + try CompileStep.doAtomicSymLinks(step, full_dest_path, self.artifact.major_only_filename.?, self.artifact.name_only_filename.?); } - if (self.artifact.isDynamicLibrary() and self.artifact.target.isWindows() and self.artifact.emit_implib != .no_emit) { - const full_implib_path = builder.getInstallPath(self.dest_dir, self.artifact.out_lib_filename); - try builder.updateFile(self.artifact.getOutputLibSource().getPath(builder), full_implib_path); + if (self.artifact.isDynamicLibrary() and + self.artifact.target.isWindows() and + self.artifact.emit_implib != .no_emit) + { + const full_src_path = self.artifact.getOutputLibSource().getPath(src_builder); + const full_implib_path = dest_builder.getInstallPath(self.dest_dir, self.artifact.out_lib_filename); + const p = fs.Dir.updateFile(cwd, full_src_path, cwd, full_implib_path, .{}) catch |err| { + return step.fail("unable to update file from '{s}' to '{s}': {s}", .{ + full_src_path, full_implib_path, @errorName(err), + }); + }; + all_cached = all_cached and p == .fresh; } if (self.pdb_dir) |pdb_dir| { - const full_pdb_path = builder.getInstallPath(pdb_dir, self.artifact.out_pdb_filename); - try builder.updateFile(self.artifact.getOutputPdbSource().getPath(builder), full_pdb_path); + const full_src_path = self.artifact.getOutputPdbSource().getPath(src_builder); + const full_pdb_path = dest_builder.getInstallPath(pdb_dir, self.artifact.out_pdb_filename); + const p = fs.Dir.updateFile(cwd, full_src_path, cwd, full_pdb_path, .{}) catch |err| { + return step.fail("unable to update file from '{s}' to '{s}': {s}", .{ + full_src_path, full_pdb_path, @errorName(err), + }); + }; + all_cached = all_cached and p == .fresh; } if (self.h_dir) |h_dir| { - const full_h_path = builder.getInstallPath(h_dir, self.artifact.out_h_filename); - try builder.updateFile(self.artifact.getOutputHSource().getPath(builder), full_h_path); + const full_src_path = self.artifact.getOutputHSource().getPath(src_builder); + const full_h_path = dest_builder.getInstallPath(h_dir, self.artifact.out_h_filename); + const p = fs.Dir.updateFile(cwd, full_src_path, cwd, full_h_path, .{}) catch |err| { + return step.fail("unable to update file from '{s}' to '{s}': {s}", .{ + full_src_path, full_h_path, @errorName(err), + }); + }; + all_cached = all_cached and p == .fresh; } self.artifact.installed_path = full_dest_path; + step.result_cached = all_cached; } diff --git a/lib/std/Build/InstallDirStep.zig b/lib/std/Build/InstallDirStep.zig index 41dbb3e35a5f..d9ea24891346 100644 --- a/lib/std/Build/InstallDirStep.zig +++ b/lib/std/Build/InstallDirStep.zig @@ -4,14 +4,12 @@ const fs = std.fs; const Step = std.Build.Step; const InstallDir = std.Build.InstallDir; const InstallDirStep = @This(); -const log = std.log; step: Step, -builder: *std.Build, options: Options, /// This is used by the build system when a file being installed comes from one /// package but is being installed by another. -override_source_builder: ?*std.Build = null, +dest_builder: *std.Build, pub const base_id = .install_dir; @@ -40,31 +38,35 @@ pub const Options = struct { } }; -pub fn init( - builder: *std.Build, - options: Options, -) InstallDirStep { - builder.pushInstalledFile(options.install_dir, options.install_subdir); - return InstallDirStep{ - .builder = builder, - .step = Step.init(.install_dir, builder.fmt("install {s}/", .{options.source_dir}), builder.allocator, make), - .options = options.dupe(builder), +pub fn init(owner: *std.Build, options: Options) InstallDirStep { + owner.pushInstalledFile(options.install_dir, options.install_subdir); + return .{ + .step = Step.init(.{ + .id = .install_dir, + .name = owner.fmt("install {s}/", .{options.source_dir}), + .owner = owner, + .makeFn = make, + }), + .options = options.dupe(owner), + .dest_builder = owner, }; } -fn make(step: *Step) !void { +fn make(step: *Step, prog_node: *std.Progress.Node) !void { + _ = prog_node; const self = @fieldParentPtr(InstallDirStep, "step", step); - const dest_prefix = self.builder.getInstallPath(self.options.install_dir, self.options.install_subdir); - const src_builder = self.override_source_builder orelse self.builder; - const full_src_dir = src_builder.pathFromRoot(self.options.source_dir); - var src_dir = std.fs.cwd().openIterableDir(full_src_dir, .{}) catch |err| { - log.err("InstallDirStep: unable to open source directory '{s}': {s}", .{ - full_src_dir, @errorName(err), + const dest_builder = self.dest_builder; + const arena = dest_builder.allocator; + const dest_prefix = dest_builder.getInstallPath(self.options.install_dir, self.options.install_subdir); + const src_builder = self.step.owner; + var src_dir = src_builder.build_root.handle.openIterableDir(self.options.source_dir, .{}) catch |err| { + return step.fail("unable to open source directory '{}{s}': {s}", .{ + src_builder.build_root, self.options.source_dir, @errorName(err), }); - return error.StepFailed; }; defer src_dir.close(); - var it = try src_dir.walk(self.builder.allocator); + var it = try src_dir.walk(arena); + var all_cached = true; next_entry: while (try it.next()) |entry| { for (self.options.exclude_extensions) |ext| { if (mem.endsWith(u8, entry.path, ext)) { @@ -72,22 +74,37 @@ fn make(step: *Step) !void { } } - const full_path = self.builder.pathJoin(&.{ full_src_dir, entry.path }); - const dest_path = self.builder.pathJoin(&.{ dest_prefix, entry.path }); + // relative to src build root + const src_sub_path = try fs.path.join(arena, &.{ self.options.source_dir, entry.path }); + const dest_path = try fs.path.join(arena, &.{ dest_prefix, entry.path }); + const cwd = fs.cwd(); switch (entry.kind) { - .Directory => try fs.cwd().makePath(dest_path), + .Directory => try cwd.makePath(dest_path), .File => { for (self.options.blank_extensions) |ext| { if (mem.endsWith(u8, entry.path, ext)) { - try self.builder.truncateFile(dest_path); + try dest_builder.truncateFile(dest_path); continue :next_entry; } } - try self.builder.updateFile(full_path, dest_path); + const prev_status = fs.Dir.updateFile( + src_builder.build_root.handle, + src_sub_path, + cwd, + dest_path, + .{}, + ) catch |err| { + return step.fail("unable to update file from '{}{s}' to '{s}': {s}", .{ + src_builder.build_root, src_sub_path, dest_path, @errorName(err), + }); + }; + all_cached = all_cached and prev_status == .fresh; }, else => continue, } } + + step.result_cached = all_cached; } diff --git a/lib/std/Build/InstallFileStep.zig b/lib/std/Build/InstallFileStep.zig index 8c8d8ad2d451..011ad4820853 100644 --- a/lib/std/Build/InstallFileStep.zig +++ b/lib/std/Build/InstallFileStep.zig @@ -3,38 +3,55 @@ const Step = std.Build.Step; const FileSource = std.Build.FileSource; const InstallDir = std.Build.InstallDir; const InstallFileStep = @This(); +const assert = std.debug.assert; pub const base_id = .install_file; step: Step, -builder: *std.Build, source: FileSource, dir: InstallDir, dest_rel_path: []const u8, /// This is used by the build system when a file being installed comes from one /// package but is being installed by another. -override_source_builder: ?*std.Build = null, +dest_builder: *std.Build, -pub fn init( - builder: *std.Build, +pub fn create( + owner: *std.Build, source: FileSource, dir: InstallDir, dest_rel_path: []const u8, -) InstallFileStep { - builder.pushInstalledFile(dir, dest_rel_path); - return InstallFileStep{ - .builder = builder, - .step = Step.init(.install_file, builder.fmt("install {s} to {s}", .{ source.getDisplayName(), dest_rel_path }), builder.allocator, make), - .source = source.dupe(builder), - .dir = dir.dupe(builder), - .dest_rel_path = builder.dupePath(dest_rel_path), +) *InstallFileStep { + assert(dest_rel_path.len != 0); + owner.pushInstalledFile(dir, dest_rel_path); + const self = owner.allocator.create(InstallFileStep) catch @panic("OOM"); + self.* = .{ + .step = Step.init(.{ + .id = base_id, + .name = owner.fmt("install {s} to {s}", .{ source.getDisplayName(), dest_rel_path }), + .owner = owner, + .makeFn = make, + }), + .source = source.dupe(owner), + .dir = dir.dupe(owner), + .dest_rel_path = owner.dupePath(dest_rel_path), + .dest_builder = owner, }; + source.addStepDependencies(&self.step); + return self; } -fn make(step: *Step) !void { +fn make(step: *Step, prog_node: *std.Progress.Node) !void { + _ = prog_node; + const src_builder = step.owner; const self = @fieldParentPtr(InstallFileStep, "step", step); - const src_builder = self.override_source_builder orelse self.builder; - const full_src_path = self.source.getPath(src_builder); - const full_dest_path = self.builder.getInstallPath(self.dir, self.dest_rel_path); - try self.builder.updateFile(full_src_path, full_dest_path); + const dest_builder = self.dest_builder; + const full_src_path = self.source.getPath2(src_builder, step); + const full_dest_path = dest_builder.getInstallPath(self.dir, self.dest_rel_path); + const cwd = std.fs.cwd(); + const prev = std.fs.Dir.updateFile(cwd, full_src_path, cwd, full_dest_path, .{}) catch |err| { + return step.fail("unable to update file from '{s}' to '{s}': {s}", .{ + full_src_path, full_dest_path, @errorName(err), + }); + }; + step.result_cached = prev == .fresh; } diff --git a/lib/std/Build/LogStep.zig b/lib/std/Build/LogStep.zig deleted file mode 100644 index 6d51df8cbd47..000000000000 --- a/lib/std/Build/LogStep.zig +++ /dev/null @@ -1,23 +0,0 @@ -const std = @import("../std.zig"); -const log = std.log; -const Step = std.Build.Step; -const LogStep = @This(); - -pub const base_id = .log; - -step: Step, -builder: *std.Build, -data: []const u8, - -pub fn init(builder: *std.Build, data: []const u8) LogStep { - return LogStep{ - .builder = builder, - .step = Step.init(.log, builder.fmt("log {s}", .{data}), builder.allocator, make), - .data = builder.dupe(data), - }; -} - -fn make(step: *Step) anyerror!void { - const self = @fieldParentPtr(LogStep, "step", step); - log.info("{s}", .{self.data}); -} diff --git a/lib/std/Build/ObjCopyStep.zig b/lib/std/Build/ObjCopyStep.zig index aea5b8975cf6..608c56591f93 100644 --- a/lib/std/Build/ObjCopyStep.zig +++ b/lib/std/Build/ObjCopyStep.zig @@ -21,7 +21,6 @@ pub const RawFormat = enum { }; step: Step, -builder: *std.Build, file_source: std.Build.FileSource, basename: []const u8, output_file: std.Build.GeneratedFile, @@ -38,19 +37,18 @@ pub const Options = struct { }; pub fn create( - builder: *std.Build, + owner: *std.Build, file_source: std.Build.FileSource, options: Options, ) *ObjCopyStep { - const self = builder.allocator.create(ObjCopyStep) catch @panic("OOM"); + const self = owner.allocator.create(ObjCopyStep) catch @panic("OOM"); self.* = ObjCopyStep{ - .step = Step.init( - base_id, - builder.fmt("objcopy {s}", .{file_source.getDisplayName()}), - builder.allocator, - make, - ), - .builder = builder, + .step = Step.init(.{ + .id = base_id, + .name = owner.fmt("objcopy {s}", .{file_source.getDisplayName()}), + .owner = owner, + .makeFn = make, + }), .file_source = file_source, .basename = options.basename orelse file_source.getDisplayName(), .output_file = std.Build.GeneratedFile{ .step = &self.step }, @@ -67,9 +65,9 @@ pub fn getOutputSource(self: *const ObjCopyStep) std.Build.FileSource { return .{ .generated = &self.output_file }; } -fn make(step: *Step) !void { +fn make(step: *Step, prog_node: *std.Progress.Node) !void { + const b = step.owner; const self = @fieldParentPtr(ObjCopyStep, "step", step); - const b = self.builder; var man = b.cache.obtain(); defer man.deinit(); @@ -84,7 +82,7 @@ fn make(step: *Step) !void { man.hash.addOptional(self.pad_to); man.hash.addOptional(self.format); - if (man.hit() catch |err| failWithCacheError(man, err)) { + if (try step.cacheHit(&man)) { // Cache hit, skip subprocess execution. const digest = man.final(); self.output_file.path = try b.cache_root.join(b.allocator, &.{ @@ -97,8 +95,7 @@ fn make(step: *Step) !void { const full_dest_path = try b.cache_root.join(b.allocator, &.{ "o", &digest, self.basename }); const cache_path = "o" ++ fs.path.sep_str ++ digest; b.cache_root.handle.makePath(cache_path) catch |err| { - std.debug.print("unable to make path {s}: {s}\n", .{ cache_path, @errorName(err) }); - return err; + return step.fail("unable to make path {s}: {s}", .{ cache_path, @errorName(err) }); }; var argv = std.ArrayList([]const u8).init(b.allocator); @@ -116,23 +113,10 @@ fn make(step: *Step) !void { }; try argv.appendSlice(&.{ full_src_path, full_dest_path }); - _ = try self.builder.execFromStep(argv.items, &self.step); + + try argv.append("--listen=-"); + _ = try step.evalZigProcess(argv.items, prog_node); self.output_file.path = full_dest_path; try man.writeManifest(); } - -/// TODO consolidate this with the same function in RunStep? -/// Also properly deal with concurrency (see open PR) -fn failWithCacheError(man: std.Build.Cache.Manifest, err: anyerror) noreturn { - const i = man.failed_file_index orelse failWithSimpleError(err); - const pp = man.files.items[i].prefixed_path orelse failWithSimpleError(err); - const prefix = man.cache.prefixes()[pp.prefix].path orelse ""; - std.debug.print("{s}: {s}/{s}\n", .{ @errorName(err), prefix, pp.sub_path }); - std.process.exit(1); -} - -fn failWithSimpleError(err: anyerror) noreturn { - std.debug.print("{s}\n", .{@errorName(err)}); - std.process.exit(1); -} diff --git a/lib/std/Build/OptionsStep.zig b/lib/std/Build/OptionsStep.zig index e5c3e2382169..859d0b68c934 100644 --- a/lib/std/Build/OptionsStep.zig +++ b/lib/std/Build/OptionsStep.zig @@ -12,21 +12,24 @@ pub const base_id = .options; step: Step, generated_file: GeneratedFile, -builder: *std.Build, contents: std.ArrayList(u8), artifact_args: std.ArrayList(OptionArtifactArg), file_source_args: std.ArrayList(OptionFileSourceArg), -pub fn create(builder: *std.Build) *OptionsStep { - const self = builder.allocator.create(OptionsStep) catch @panic("OOM"); +pub fn create(owner: *std.Build) *OptionsStep { + const self = owner.allocator.create(OptionsStep) catch @panic("OOM"); self.* = .{ - .builder = builder, - .step = Step.init(.options, "options", builder.allocator, make), + .step = Step.init(.{ + .id = base_id, + .name = "options", + .owner = owner, + .makeFn = make, + }), .generated_file = undefined, - .contents = std.ArrayList(u8).init(builder.allocator), - .artifact_args = std.ArrayList(OptionArtifactArg).init(builder.allocator), - .file_source_args = std.ArrayList(OptionFileSourceArg).init(builder.allocator), + .contents = std.ArrayList(u8).init(owner.allocator), + .artifact_args = std.ArrayList(OptionArtifactArg).init(owner.allocator), + .file_source_args = std.ArrayList(OptionFileSourceArg).init(owner.allocator), }; self.generated_file = .{ .step = &self.step }; @@ -192,7 +195,7 @@ pub fn addOptionFileSource( ) void { self.file_source_args.append(.{ .name = name, - .source = source.dupe(self.builder), + .source = source.dupe(self.step.owner), }) catch @panic("OOM"); source.addStepDependencies(&self.step); } @@ -200,12 +203,12 @@ pub fn addOptionFileSource( /// The value is the path in the cache dir. /// Adds a dependency automatically. pub fn addOptionArtifact(self: *OptionsStep, name: []const u8, artifact: *CompileStep) void { - self.artifact_args.append(.{ .name = self.builder.dupe(name), .artifact = artifact }) catch @panic("OOM"); + self.artifact_args.append(.{ .name = self.step.owner.dupe(name), .artifact = artifact }) catch @panic("OOM"); self.step.dependOn(&artifact.step); } pub fn createModule(self: *OptionsStep) *std.Build.Module { - return self.builder.createModule(.{ + return self.step.owner.createModule(.{ .source_file = self.getSource(), .dependencies = &.{}, }); @@ -215,14 +218,18 @@ pub fn getSource(self: *OptionsStep) FileSource { return .{ .generated = &self.generated_file }; } -fn make(step: *Step) !void { +fn make(step: *Step, prog_node: *std.Progress.Node) !void { + // This step completes so quickly that no progress is necessary. + _ = prog_node; + + const b = step.owner; const self = @fieldParentPtr(OptionsStep, "step", step); for (self.artifact_args.items) |item| { self.addOption( []const u8, item.name, - self.builder.pathFromRoot(item.artifact.getOutputSource().getPath(self.builder)), + b.pathFromRoot(item.artifact.getOutputSource().getPath(b)), ); } @@ -230,20 +237,18 @@ fn make(step: *Step) !void { self.addOption( []const u8, item.name, - item.source.getPath(self.builder), + item.source.getPath(b), ); } - var options_dir = try self.builder.cache_root.handle.makeOpenPath("options", .{}); + var options_dir = try b.cache_root.handle.makeOpenPath("options", .{}); defer options_dir.close(); const basename = self.hashContentsToFileName(); try options_dir.writeFile(&basename, self.contents.items); - self.generated_file.path = try self.builder.cache_root.join(self.builder.allocator, &.{ - "options", &basename, - }); + self.generated_file.path = try b.cache_root.join(b.allocator, &.{ "options", &basename }); } fn hashContentsToFileName(self: *OptionsStep) [64]u8 { diff --git a/lib/std/Build/RemoveDirStep.zig b/lib/std/Build/RemoveDirStep.zig index f3b71dcec168..a5bf3c32565d 100644 --- a/lib/std/Build/RemoveDirStep.zig +++ b/lib/std/Build/RemoveDirStep.zig @@ -1,5 +1,4 @@ const std = @import("../std.zig"); -const log = std.log; const fs = std.fs; const Step = std.Build.Step; const RemoveDirStep = @This(); @@ -7,23 +6,37 @@ const RemoveDirStep = @This(); pub const base_id = .remove_dir; step: Step, -builder: *std.Build, dir_path: []const u8, -pub fn init(builder: *std.Build, dir_path: []const u8) RemoveDirStep { +pub fn init(owner: *std.Build, dir_path: []const u8) RemoveDirStep { return RemoveDirStep{ - .builder = builder, - .step = Step.init(.remove_dir, builder.fmt("RemoveDir {s}", .{dir_path}), builder.allocator, make), - .dir_path = builder.dupePath(dir_path), + .step = Step.init(.{ + .id = .remove_dir, + .name = owner.fmt("RemoveDir {s}", .{dir_path}), + .owner = owner, + .makeFn = make, + }), + .dir_path = owner.dupePath(dir_path), }; } -fn make(step: *Step) !void { +fn make(step: *Step, prog_node: *std.Progress.Node) !void { + // TODO update progress node while walking file system. + // Should the standard library support this use case?? + _ = prog_node; + + const b = step.owner; const self = @fieldParentPtr(RemoveDirStep, "step", step); - const full_path = self.builder.pathFromRoot(self.dir_path); - fs.cwd().deleteTree(full_path) catch |err| { - log.err("Unable to remove {s}: {s}", .{ full_path, @errorName(err) }); - return err; + b.build_root.handle.deleteTree(self.dir_path) catch |err| { + if (b.build_root.path) |base| { + return step.fail("unable to recursively delete path '{s}/{s}': {s}", .{ + base, self.dir_path, @errorName(err), + }); + } else { + return step.fail("unable to recursively delete path '{s}': {s}", .{ + self.dir_path, @errorName(err), + }); + } }; } diff --git a/lib/std/Build/RunStep.zig b/lib/std/Build/RunStep.zig index 1aae37d2f398..feeb64f6ca7c 100644 --- a/lib/std/Build/RunStep.zig +++ b/lib/std/Build/RunStep.zig @@ -10,76 +10,136 @@ const ArrayList = std.ArrayList; const EnvMap = process.EnvMap; const Allocator = mem.Allocator; const ExecError = std.Build.ExecError; - -const max_stdout_size = 1 * 1024 * 1024; // 1 MiB +const assert = std.debug.assert; const RunStep = @This(); pub const base_id: Step.Id = .run; step: Step, -builder: *std.Build, /// See also addArg and addArgs to modifying this directly argv: ArrayList(Arg), /// Set this to modify the current working directory +/// TODO change this to a Build.Cache.Directory to better integrate with +/// future child process cwd API. cwd: ?[]const u8, /// Override this field to modify the environment, or use setEnvironmentVariable env_map: ?*EnvMap, -stdout_action: StdIoAction = .inherit, -stderr_action: StdIoAction = .inherit, - -stdin_behavior: std.ChildProcess.StdIo = .Inherit, - -/// Set this to `null` to ignore the exit code for the purpose of determining a successful execution -expected_term: ?std.ChildProcess.Term = .{ .Exited = 0 }, - -/// Print the command before running it -print: bool, -/// Controls whether execution is skipped if the output file is up-to-date. -/// The default is to always run if there is no output file, and to skip -/// running if all output files are up-to-date. -condition: enum { output_outdated, always } = .output_outdated, +/// Configures whether the RunStep is considered to have side-effects, and also +/// whether the RunStep will inherit stdio streams, forwarding them to the +/// parent process, in which case will require a global lock to prevent other +/// steps from interfering with stdio while the subprocess associated with this +/// RunStep is running. +/// If the RunStep is determined to not have side-effects, then execution will +/// be skipped if all output files are up-to-date and input files are +/// unchanged. +stdio: StdIo = .infer_from_args, +/// This field must be `null` if stdio is `inherit`. +stdin: ?[]const u8 = null, /// Additional file paths relative to build.zig that, when modified, indicate /// that the RunStep should be re-executed. +/// If the RunStep is determined to have side-effects, this field is ignored +/// and the RunStep is always executed when it appears in the build graph. extra_file_dependencies: []const []const u8 = &.{}, -pub const StdIoAction = union(enum) { +/// After adding an output argument, this step will by default rename itself +/// for a better display name in the build summary. +/// This can be disabled by setting this to false. +rename_step_with_output_arg: bool = true, + +/// If this is true, a RunStep which is configured to check the output of the +/// executed binary will not fail the build if the binary cannot be executed +/// due to being for a foreign binary to the host system which is running the +/// build graph. +/// Command-line arguments such as -fqemu and -fwasmtime may affect whether a +/// binary is detected as foreign, as well as system configuration such as +/// Rosetta (macOS) and binfmt_misc (Linux). +/// If this RunStep is considered to have side-effects, then this flag does +/// nothing. +skip_foreign_checks: bool = false, + +/// If stderr or stdout exceeds this amount, the child process is killed and +/// the step fails. +max_stdio_size: usize = 10 * 1024 * 1024, + +captured_stdout: ?*Output = null, +captured_stderr: ?*Output = null, + +has_side_effects: bool = false, + +pub const StdIo = union(enum) { + /// Whether the RunStep has side-effects will be determined by whether or not one + /// of the args is an output file (added with `addOutputFileArg`). + /// If the RunStep is determined to have side-effects, this is the same as `inherit`. + /// The step will fail if the subprocess crashes or returns a non-zero exit code. + infer_from_args, + /// Causes the RunStep to be considered to have side-effects, and therefore + /// always execute when it appears in the build graph. + /// It also means that this step will obtain a global lock to prevent other + /// steps from running in the meantime. + /// The step will fail if the subprocess crashes or returns a non-zero exit code. inherit, - ignore, - expect_exact: []const u8, - expect_matches: []const []const u8, + /// Causes the RunStep to be considered to *not* have side-effects. The + /// process will be re-executed if any of the input dependencies are + /// modified. The exit code and standard I/O streams will be checked for + /// certain conditions, and the step will succeed or fail based on these + /// conditions. + /// Note that an explicit check for exit code 0 needs to be added to this + /// list if such a check is desireable. + check: std.ArrayList(Check), + /// This RunStep is running a zig unit test binary and will communicate + /// extra metadata over the IPC protocol. + zig_test, + + pub const Check = union(enum) { + expect_stderr_exact: []const u8, + expect_stderr_match: []const u8, + expect_stdout_exact: []const u8, + expect_stdout_match: []const u8, + expect_term: std.process.Child.Term, + }; }; pub const Arg = union(enum) { artifact: *CompileStep, file_source: std.Build.FileSource, + directory_source: std.Build.FileSource, bytes: []u8, - output: Output, + output: *Output, +}; - pub const Output = struct { - generated_file: *std.Build.GeneratedFile, - basename: []const u8, - }; +pub const Output = struct { + generated_file: std.Build.GeneratedFile, + prefix: []const u8, + basename: []const u8, }; -pub fn create(builder: *std.Build, name: []const u8) *RunStep { - const self = builder.allocator.create(RunStep) catch @panic("OOM"); - self.* = RunStep{ - .builder = builder, - .step = Step.init(base_id, name, builder.allocator, make), - .argv = ArrayList(Arg).init(builder.allocator), +pub fn create(owner: *std.Build, name: []const u8) *RunStep { + const self = owner.allocator.create(RunStep) catch @panic("OOM"); + self.* = .{ + .step = Step.init(.{ + .id = base_id, + .name = name, + .owner = owner, + .makeFn = make, + }), + .argv = ArrayList(Arg).init(owner.allocator), .cwd = null, .env_map = null, - .print = builder.verbose, }; return self; } +pub fn setName(self: *RunStep, name: []const u8) void { + self.step.name = name; + self.rename_step_with_output_arg = false; +} + pub fn addArtifactArg(self: *RunStep, artifact: *CompileStep) void { self.argv.append(Arg{ .artifact = artifact }) catch @panic("OOM"); self.step.dependOn(&artifact.step); @@ -89,25 +149,47 @@ pub fn addArtifactArg(self: *RunStep, artifact: *CompileStep) void { /// run, and returns a FileSource which can be used as inputs to other APIs /// throughout the build system. pub fn addOutputFileArg(rs: *RunStep, basename: []const u8) std.Build.FileSource { - const generated_file = rs.builder.allocator.create(std.Build.GeneratedFile) catch @panic("OOM"); - generated_file.* = .{ .step = &rs.step }; - rs.argv.append(.{ .output = .{ - .generated_file = generated_file, - .basename = rs.builder.dupe(basename), - } }) catch @panic("OOM"); + return addPrefixedOutputFileArg(rs, "", basename); +} - return .{ .generated = generated_file }; +pub fn addPrefixedOutputFileArg( + rs: *RunStep, + prefix: []const u8, + basename: []const u8, +) std.Build.FileSource { + const b = rs.step.owner; + + const output = b.allocator.create(Output) catch @panic("OOM"); + output.* = .{ + .prefix = prefix, + .basename = basename, + .generated_file = .{ .step = &rs.step }, + }; + rs.argv.append(.{ .output = output }) catch @panic("OOM"); + + if (rs.rename_step_with_output_arg) { + rs.setName(b.fmt("{s} ({s})", .{ rs.step.name, basename })); + } + + return .{ .generated = &output.generated_file }; } pub fn addFileSourceArg(self: *RunStep, file_source: std.Build.FileSource) void { - self.argv.append(Arg{ - .file_source = file_source.dupe(self.builder), + self.argv.append(.{ + .file_source = file_source.dupe(self.step.owner), }) catch @panic("OOM"); file_source.addStepDependencies(&self.step); } +pub fn addDirectorySourceArg(self: *RunStep, directory_source: std.Build.FileSource) void { + self.argv.append(.{ + .directory_source = directory_source.dupe(self.step.owner), + }) catch @panic("OOM"); + directory_source.addStepDependencies(&self.step); +} + pub fn addArg(self: *RunStep, arg: []const u8) void { - self.argv.append(Arg{ .bytes = self.builder.dupe(arg) }) catch @panic("OOM"); + self.argv.append(.{ .bytes = self.step.owner.dupe(arg) }) catch @panic("OOM"); } pub fn addArgs(self: *RunStep, args: []const []const u8) void { @@ -117,102 +199,183 @@ pub fn addArgs(self: *RunStep, args: []const []const u8) void { } pub fn clearEnvironment(self: *RunStep) void { - const new_env_map = self.builder.allocator.create(EnvMap) catch @panic("OOM"); - new_env_map.* = EnvMap.init(self.builder.allocator); + const b = self.step.owner; + const new_env_map = b.allocator.create(EnvMap) catch @panic("OOM"); + new_env_map.* = EnvMap.init(b.allocator); self.env_map = new_env_map; } pub fn addPathDir(self: *RunStep, search_path: []const u8) void { - addPathDirInternal(&self.step, self.builder, search_path); -} - -/// For internal use only, users of `RunStep` should use `addPathDir` directly. -pub fn addPathDirInternal(step: *Step, builder: *std.Build, search_path: []const u8) void { - const env_map = getEnvMapInternal(step, builder.allocator); + const b = self.step.owner; + const env_map = getEnvMapInternal(self); const key = "PATH"; var prev_path = env_map.get(key); if (prev_path) |pp| { - const new_path = builder.fmt("{s}" ++ [1]u8{fs.path.delimiter} ++ "{s}", .{ pp, search_path }); + const new_path = b.fmt("{s}" ++ [1]u8{fs.path.delimiter} ++ "{s}", .{ pp, search_path }); env_map.put(key, new_path) catch @panic("OOM"); } else { - env_map.put(key, builder.dupePath(search_path)) catch @panic("OOM"); + env_map.put(key, b.dupePath(search_path)) catch @panic("OOM"); } } pub fn getEnvMap(self: *RunStep) *EnvMap { - return getEnvMapInternal(&self.step, self.builder.allocator); + return getEnvMapInternal(self); } -fn getEnvMapInternal(step: *Step, allocator: Allocator) *EnvMap { - const maybe_env_map = switch (step.id) { - .run => step.cast(RunStep).?.env_map, - .emulatable_run => step.cast(std.Build.EmulatableRunStep).?.env_map, - else => unreachable, - }; - return maybe_env_map orelse { - const env_map = allocator.create(EnvMap) catch @panic("OOM"); - env_map.* = process.getEnvMap(allocator) catch @panic("unhandled error"); - switch (step.id) { - .run => step.cast(RunStep).?.env_map = env_map, - .emulatable_run => step.cast(RunStep).?.env_map = env_map, - else => unreachable, - } +fn getEnvMapInternal(self: *RunStep) *EnvMap { + const arena = self.step.owner.allocator; + return self.env_map orelse { + const env_map = arena.create(EnvMap) catch @panic("OOM"); + env_map.* = process.getEnvMap(arena) catch @panic("unhandled error"); + self.env_map = env_map; return env_map; }; } pub fn setEnvironmentVariable(self: *RunStep, key: []const u8, value: []const u8) void { + const b = self.step.owner; const env_map = self.getEnvMap(); - env_map.put( - self.builder.dupe(key), - self.builder.dupe(value), - ) catch @panic("unhandled error"); + env_map.put(b.dupe(key), b.dupe(value)) catch @panic("unhandled error"); +} + +pub fn removeEnvironmentVariable(self: *RunStep, key: []const u8) void { + self.getEnvMap().remove(key); } +/// Adds a check for exact stderr match. Does not add any other checks. pub fn expectStdErrEqual(self: *RunStep, bytes: []const u8) void { - self.stderr_action = .{ .expect_exact = self.builder.dupe(bytes) }; + const new_check: StdIo.Check = .{ .expect_stderr_exact = self.step.owner.dupe(bytes) }; + self.addCheck(new_check); } +/// Adds a check for exact stdout match as well as a check for exit code 0, if +/// there is not already an expected termination check. pub fn expectStdOutEqual(self: *RunStep, bytes: []const u8) void { - self.stdout_action = .{ .expect_exact = self.builder.dupe(bytes) }; + const new_check: StdIo.Check = .{ .expect_stdout_exact = self.step.owner.dupe(bytes) }; + self.addCheck(new_check); + if (!self.hasTermCheck()) { + self.expectExitCode(0); + } } -fn stdIoActionToBehavior(action: StdIoAction) std.ChildProcess.StdIo { - return switch (action) { - .ignore => .Ignore, - .inherit => .Inherit, - .expect_exact, .expect_matches => .Pipe, +pub fn expectExitCode(self: *RunStep, code: u8) void { + const new_check: StdIo.Check = .{ .expect_term = .{ .Exited = code } }; + self.addCheck(new_check); +} + +pub fn hasTermCheck(self: RunStep) bool { + for (self.stdio.check.items) |check| switch (check) { + .expect_term => return true, + else => continue, }; + return false; } -fn needOutputCheck(self: RunStep) bool { - switch (self.condition) { - .always => return false, - .output_outdated => {}, +pub fn addCheck(self: *RunStep, new_check: StdIo.Check) void { + switch (self.stdio) { + .infer_from_args => { + self.stdio = .{ .check = std.ArrayList(StdIo.Check).init(self.step.owner.allocator) }; + self.stdio.check.append(new_check) catch @panic("OOM"); + }, + .check => |*checks| checks.append(new_check) catch @panic("OOM"), + else => @panic("illegal call to addCheck: conflicting helper method calls. Suggest to directly set stdio field of RunStep instead"), } - if (self.extra_file_dependencies.len > 0) return true; +} + +pub fn captureStdErr(self: *RunStep) std.Build.FileSource { + assert(self.stdio != .inherit); + + if (self.captured_stderr) |output| return .{ .generated = &output.generated_file }; + + const output = self.step.owner.allocator.create(Output) catch @panic("OOM"); + output.* = .{ + .prefix = "", + .basename = "stderr", + .generated_file = .{ .step = &self.step }, + }; + self.captured_stderr = output; + return .{ .generated = &output.generated_file }; +} + +pub fn captureStdOut(self: *RunStep) *std.Build.GeneratedFile { + assert(self.stdio != .inherit); + if (self.captured_stdout) |output| return .{ .generated = &output.generated_file }; + + const output = self.step.owner.allocator.create(Output) catch @panic("OOM"); + output.* = .{ + .prefix = "", + .basename = "stdout", + .generated_file = .{ .step = &self.step }, + }; + self.captured_stdout = output; + return .{ .generated = &output.generated_file }; +} + +/// Returns whether the RunStep has side effects *other than* updating the output arguments. +fn hasSideEffects(self: RunStep) bool { + if (self.has_side_effects) return true; + return switch (self.stdio) { + .infer_from_args => !self.hasAnyOutputArgs(), + .inherit => true, + .check => false, + .zig_test => false, + }; +} + +fn hasAnyOutputArgs(self: RunStep) bool { + if (self.captured_stdout != null) return true; + if (self.captured_stderr != null) return true; for (self.argv.items) |arg| switch (arg) { .output => return true, else => continue, }; + return false; +} + +fn checksContainStdout(checks: []const StdIo.Check) bool { + for (checks) |check| switch (check) { + .expect_stderr_exact, + .expect_stderr_match, + .expect_term, + => continue, + + .expect_stdout_exact, + .expect_stdout_match, + => return true, + }; + return false; +} +fn checksContainStderr(checks: []const StdIo.Check) bool { + for (checks) |check| switch (check) { + .expect_stdout_exact, + .expect_stdout_match, + .expect_term, + => continue, + + .expect_stderr_exact, + .expect_stderr_match, + => return true, + }; return false; } -fn make(step: *Step) !void { +fn make(step: *Step, prog_node: *std.Progress.Node) !void { + const b = step.owner; + const arena = b.allocator; const self = @fieldParentPtr(RunStep, "step", step); - const need_output_check = self.needOutputCheck(); + const has_side_effects = self.hasSideEffects(); - var argv_list = ArrayList([]const u8).init(self.builder.allocator); + var argv_list = ArrayList([]const u8).init(arena); var output_placeholders = ArrayList(struct { index: usize, - output: Arg.Output, - }).init(self.builder.allocator); + output: *Output, + }).init(arena); - var man = self.builder.cache.obtain(); + var man = b.cache.obtain(); defer man.deinit(); for (self.argv.items) |arg| { @@ -222,23 +385,29 @@ fn make(step: *Step) !void { man.hash.addBytes(bytes); }, .file_source => |file| { - const file_path = file.getPath(self.builder); + const file_path = file.getPath(b); try argv_list.append(file_path); _ = try man.addFile(file_path, null); }, + .directory_source => |file| { + const file_path = file.getPath(b); + try argv_list.append(file_path); + man.hash.addBytes(file_path); + }, .artifact => |artifact| { if (artifact.target.isWindows()) { // On Windows we don't have rpaths so we have to add .dll search paths to PATH self.addPathForDynLibs(artifact); } const file_path = artifact.installed_path orelse - artifact.getOutputSource().getPath(self.builder); + artifact.getOutputSource().getPath(b); try argv_list.append(file_path); _ = try man.addFile(file_path, null); }, .output => |output| { + man.hash.addBytes(output.prefix); man.hash.addBytes(output.basename); // Add a placeholder into the argument list because we need the // manifest hash to be updated with all arguments before the @@ -252,60 +421,77 @@ fn make(step: *Step) !void { } } - if (need_output_check) { - for (self.extra_file_dependencies) |file_path| { - _ = try man.addFile(self.builder.pathFromRoot(file_path), null); - } + if (self.captured_stdout) |output| { + man.hash.addBytes(output.basename); + } - if (man.hit() catch |err| failWithCacheError(man, err)) { - // cache hit, skip running command - const digest = man.final(); - for (output_placeholders.items) |placeholder| { - placeholder.output.generated_file.path = try self.builder.cache_root.join( - self.builder.allocator, - &.{ "o", &digest, placeholder.output.basename }, - ); - } - return; - } + if (self.captured_stderr) |output| { + man.hash.addBytes(output.basename); + } - const digest = man.final(); + hashStdIo(&man.hash, self.stdio); + if (has_side_effects) { + try runCommand(self, argv_list.items, has_side_effects, null, prog_node); + return; + } + + for (self.extra_file_dependencies) |file_path| { + _ = try man.addFile(b.pathFromRoot(file_path), null); + } + + if (try step.cacheHit(&man)) { + // cache hit, skip running command + const digest = man.final(); for (output_placeholders.items) |placeholder| { - const output_path = try self.builder.cache_root.join( - self.builder.allocator, - &.{ "o", &digest, placeholder.output.basename }, - ); - const output_dir = fs.path.dirname(output_path).?; - fs.cwd().makePath(output_dir) catch |err| { - std.debug.print("unable to make path {s}: {s}\n", .{ output_dir, @errorName(err) }); - return err; - }; + placeholder.output.generated_file.path = try b.cache_root.join(arena, &.{ + "o", &digest, placeholder.output.basename, + }); + } + + if (self.captured_stdout) |output| { + output.generated_file.path = try b.cache_root.join(arena, &.{ + "o", &digest, output.basename, + }); + } - placeholder.output.generated_file.path = output_path; - argv_list.items[placeholder.index] = output_path; + if (self.captured_stderr) |output| { + output.generated_file.path = try b.cache_root.join(arena, &.{ + "o", &digest, output.basename, + }); } + + step.result_cached = true; + return; } - try runCommand( - argv_list.items, - self.builder, - self.expected_term, - self.stdout_action, - self.stderr_action, - self.stdin_behavior, - self.env_map, - self.cwd, - self.print, - ); - - if (need_output_check) { - try man.writeManifest(); + const digest = man.final(); + + for (output_placeholders.items) |placeholder| { + const output_components = .{ "o", &digest, placeholder.output.basename }; + const output_sub_path = try fs.path.join(arena, &output_components); + const output_sub_dir_path = fs.path.dirname(output_sub_path).?; + b.cache_root.handle.makePath(output_sub_dir_path) catch |err| { + return step.fail("unable to make path '{}{s}': {s}", .{ + b.cache_root, output_sub_dir_path, @errorName(err), + }); + }; + const output_path = try b.cache_root.join(arena, &output_components); + placeholder.output.generated_file.path = output_path; + const cli_arg = if (placeholder.output.prefix.len == 0) + output_path + else + b.fmt("{s}{s}", .{ placeholder.output.prefix, output_path }); + argv_list.items[placeholder.index] = cli_arg; } + + try runCommand(self, argv_list.items, has_side_effects, &digest, prog_node); + + try step.writeManifest(&man); } fn formatTerm( - term: ?std.ChildProcess.Term, + term: ?std.process.Child.Term, comptime fmt: []const u8, options: std.fmt.FormatOptions, writer: anytype, @@ -321,11 +507,11 @@ fn formatTerm( try writer.writeAll("exited with any code"); } } -fn fmtTerm(term: ?std.ChildProcess.Term) std.fmt.Formatter(formatTerm) { +fn fmtTerm(term: ?std.process.Child.Term) std.fmt.Formatter(formatTerm) { return .{ .data = term }; } -fn termMatches(expected: ?std.ChildProcess.Term, actual: std.ChildProcess.Term) bool { +fn termMatches(expected: ?std.process.Child.Term, actual: std.process.Child.Term) bool { return if (expected) |e| switch (e) { .Exited => |expected_code| switch (actual) { .Exited => |actual_code| expected_code == actual_code, @@ -349,183 +535,701 @@ fn termMatches(expected: ?std.ChildProcess.Term, actual: std.ChildProcess.Term) }; } -pub fn runCommand( +fn runCommand( + self: *RunStep, argv: []const []const u8, - builder: *std.Build, - expected_term: ?std.ChildProcess.Term, - stdout_action: StdIoAction, - stderr_action: StdIoAction, - stdin_behavior: std.ChildProcess.StdIo, - env_map: ?*EnvMap, - maybe_cwd: ?[]const u8, - print: bool, + has_side_effects: bool, + digest: ?*const [std.Build.Cache.hex_digest_len]u8, + prog_node: *std.Progress.Node, ) !void { - const cwd = if (maybe_cwd) |cwd| builder.pathFromRoot(cwd) else builder.build_root.path; - - if (!std.process.can_spawn) { - const cmd = try std.mem.join(builder.allocator, " ", argv); - std.debug.print("the following command cannot be executed ({s} does not support spawning a child process):\n{s}", .{ - @tagName(builtin.os.tag), cmd, - }); - builder.allocator.free(cmd); - return ExecError.ExecNotSupported; - } + const step = &self.step; + const b = step.owner; + const arena = b.allocator; + + try step.handleChildProcUnsupported(self.cwd, argv); + try Step.handleVerbose2(step.owner, self.cwd, self.env_map, argv); + + const allow_skip = switch (self.stdio) { + .check, .zig_test => self.skip_foreign_checks, + else => false, + }; + + var interp_argv = std.ArrayList([]const u8).init(b.allocator); + defer interp_argv.deinit(); + + const result = spawnChildAndCollect(self, argv, has_side_effects, prog_node) catch |err| term: { + // InvalidExe: cpu arch mismatch + // FileNotFound: can happen with a wrong dynamic linker path + if (err == error.InvalidExe or err == error.FileNotFound) interpret: { + // TODO: learn the target from the binary directly rather than from + // relying on it being a CompileStep. This will make this logic + // work even for the edge case that the binary was produced by a + // third party. + const exe = switch (self.argv.items[0]) { + .artifact => |exe| exe, + else => break :interpret, + }; + switch (exe.kind) { + .exe, .@"test" => {}, + else => break :interpret, + } + + const need_cross_glibc = exe.target.isGnuLibC() and exe.is_linking_libc; + switch (b.host.getExternalExecutor(exe.target_info, .{ + .qemu_fixes_dl = need_cross_glibc and b.glibc_runtimes_dir != null, + .link_libc = exe.is_linking_libc, + })) { + .native, .rosetta => { + if (allow_skip) return error.MakeSkipped; + break :interpret; + }, + .wine => |bin_name| { + if (b.enable_wine) { + try interp_argv.append(bin_name); + try interp_argv.appendSlice(argv); + } else { + return failForeign(self, "-fwine", argv[0], exe); + } + }, + .qemu => |bin_name| { + if (b.enable_qemu) { + const glibc_dir_arg = if (need_cross_glibc) + b.glibc_runtimes_dir orelse return + else + null; + + try interp_argv.append(bin_name); + + if (glibc_dir_arg) |dir| { + // TODO look into making this a call to `linuxTriple`. This + // needs the directory to be called "i686" rather than + // "x86" which is why we do it manually here. + const fmt_str = "{s}" ++ fs.path.sep_str ++ "{s}-{s}-{s}"; + const cpu_arch = exe.target.getCpuArch(); + const os_tag = exe.target.getOsTag(); + const abi = exe.target.getAbi(); + const cpu_arch_name: []const u8 = if (cpu_arch == .x86) + "i686" + else + @tagName(cpu_arch); + const full_dir = try std.fmt.allocPrint(b.allocator, fmt_str, .{ + dir, cpu_arch_name, @tagName(os_tag), @tagName(abi), + }); + + try interp_argv.append("-L"); + try interp_argv.append(full_dir); + } + + try interp_argv.appendSlice(argv); + } else { + return failForeign(self, "-fqemu", argv[0], exe); + } + }, + .darling => |bin_name| { + if (b.enable_darling) { + try interp_argv.append(bin_name); + try interp_argv.appendSlice(argv); + } else { + return failForeign(self, "-fdarling", argv[0], exe); + } + }, + .wasmtime => |bin_name| { + if (b.enable_wasmtime) { + try interp_argv.append(bin_name); + try interp_argv.append("--dir=."); + try interp_argv.append(argv[0]); + try interp_argv.append("--"); + try interp_argv.appendSlice(argv[1..]); + } else { + return failForeign(self, "-fwasmtime", argv[0], exe); + } + }, + .bad_dl => |foreign_dl| { + if (allow_skip) return error.MakeSkipped; + + const host_dl = b.host.dynamic_linker.get() orelse "(none)"; + + return step.fail( + \\the host system is unable to execute binaries from the target + \\ because the host dynamic linker is '{s}', + \\ while the target dynamic linker is '{s}'. + \\ consider setting the dynamic linker or enabling skip_foreign_checks in the Run step + , .{ host_dl, foreign_dl }); + }, + .bad_os_or_cpu => { + if (allow_skip) return error.MakeSkipped; + + const host_name = try b.host.target.zigTriple(b.allocator); + const foreign_name = try exe.target.zigTriple(b.allocator); + + return step.fail("the host system ({s}) is unable to execute binaries from the target ({s})", .{ + host_name, foreign_name, + }); + }, + } - var child = std.ChildProcess.init(argv, builder.allocator); - child.cwd = cwd; - child.env_map = env_map orelse builder.env_map; + if (exe.target.isWindows()) { + // On Windows we don't have rpaths so we have to add .dll search paths to PATH + self.addPathForDynLibs(exe); + } - child.stdin_behavior = stdin_behavior; - child.stdout_behavior = stdIoActionToBehavior(stdout_action); - child.stderr_behavior = stdIoActionToBehavior(stderr_action); + try Step.handleVerbose2(step.owner, self.cwd, self.env_map, interp_argv.items); - if (print) - printCmd(cwd, argv); + break :term spawnChildAndCollect(self, interp_argv.items, has_side_effects, prog_node) catch |e| { + return step.fail("unable to spawn interpreter {s}: {s}", .{ + interp_argv.items[0], @errorName(e), + }); + }; + } - child.spawn() catch |err| { - std.debug.print("Unable to spawn {s}: {s}\n", .{ argv[0], @errorName(err) }); - return err; + return step.fail("unable to spawn {s}: {s}", .{ argv[0], @errorName(err) }); }; - // TODO need to poll to read these streams to prevent a deadlock (or rely on evented I/O). + step.result_duration_ns = result.elapsed_ns; + step.result_peak_rss = result.peak_rss; + step.test_results = result.stdio.test_results; - var stdout: ?[]const u8 = null; - defer if (stdout) |s| builder.allocator.free(s); + // Capture stdout and stderr to GeneratedFile objects. + const Stream = struct { + captured: ?*Output, + is_null: bool, + bytes: []const u8, + }; + for ([_]Stream{ + .{ + .captured = self.captured_stdout, + .is_null = result.stdio.stdout_null, + .bytes = result.stdio.stdout, + }, + .{ + .captured = self.captured_stderr, + .is_null = result.stdio.stderr_null, + .bytes = result.stdio.stderr, + }, + }) |stream| { + if (stream.captured) |output| { + assert(!stream.is_null); + + const output_components = .{ "o", digest.?, output.basename }; + const output_path = try b.cache_root.join(arena, &output_components); + output.generated_file.path = output_path; + + const sub_path = try fs.path.join(arena, &output_components); + const sub_path_dirname = fs.path.dirname(sub_path).?; + b.cache_root.handle.makePath(sub_path_dirname) catch |err| { + return step.fail("unable to make path '{}{s}': {s}", .{ + b.cache_root, sub_path_dirname, @errorName(err), + }); + }; + b.cache_root.handle.writeFile(sub_path, stream.bytes) catch |err| { + return step.fail("unable to write file '{}{s}': {s}", .{ + b.cache_root, sub_path, @errorName(err), + }); + }; + } + } - switch (stdout_action) { - .expect_exact, .expect_matches => { - stdout = try child.stdout.?.reader().readAllAlloc(builder.allocator, max_stdout_size); + const final_argv = if (interp_argv.items.len == 0) argv else interp_argv.items; + + switch (self.stdio) { + .check => |checks| for (checks.items) |check| switch (check) { + .expect_stderr_exact => |expected_bytes| { + assert(!result.stdio.stderr_null); + if (!mem.eql(u8, expected_bytes, result.stdio.stderr)) { + return step.fail( + \\ + \\========= expected this stderr: ========= + \\{s} + \\========= but found: ==================== + \\{s} + \\========= from the following command: === + \\{s} + , .{ + expected_bytes, + result.stdio.stderr, + try Step.allocPrintCmd(arena, self.cwd, final_argv), + }); + } + }, + .expect_stderr_match => |match| { + assert(!result.stdio.stderr_null); + if (mem.indexOf(u8, result.stdio.stderr, match) == null) { + return step.fail( + \\ + \\========= expected to find in stderr: ========= + \\{s} + \\========= but stderr does not contain it: ===== + \\{s} + \\========= from the following command: ========= + \\{s} + , .{ + match, + result.stdio.stderr, + try Step.allocPrintCmd(arena, self.cwd, final_argv), + }); + } + }, + .expect_stdout_exact => |expected_bytes| { + assert(!result.stdio.stdout_null); + if (!mem.eql(u8, expected_bytes, result.stdio.stdout)) { + return step.fail( + \\ + \\========= expected this stdout: ========= + \\{s} + \\========= but found: ==================== + \\{s} + \\========= from the following command: === + \\{s} + , .{ + expected_bytes, + result.stdio.stdout, + try Step.allocPrintCmd(arena, self.cwd, final_argv), + }); + } + }, + .expect_stdout_match => |match| { + assert(!result.stdio.stdout_null); + if (mem.indexOf(u8, result.stdio.stdout, match) == null) { + return step.fail( + \\ + \\========= expected to find in stdout: ========= + \\{s} + \\========= but stdout does not contain it: ===== + \\{s} + \\========= from the following command: ========= + \\{s} + , .{ + match, + result.stdio.stdout, + try Step.allocPrintCmd(arena, self.cwd, final_argv), + }); + } + }, + .expect_term => |expected_term| { + if (!termMatches(expected_term, result.term)) { + return step.fail("the following command {} (expected {}):\n{s}", .{ + fmtTerm(result.term), + fmtTerm(expected_term), + try Step.allocPrintCmd(arena, self.cwd, final_argv), + }); + } + }, + }, + .zig_test => { + const expected_term: std.process.Child.Term = .{ .Exited = 0 }; + if (!termMatches(expected_term, result.term)) { + return step.fail("the following command {} (expected {}):\n{s}", .{ + fmtTerm(result.term), + fmtTerm(expected_term), + try Step.allocPrintCmd(arena, self.cwd, final_argv), + }); + } + if (!result.stdio.test_results.isSuccess()) { + return step.fail( + "the following test command failed:\n{s}", + .{try Step.allocPrintCmd(arena, self.cwd, final_argv)}, + ); + } + }, + else => { + try step.handleChildProcessTerm(result.term, self.cwd, final_argv); }, - .inherit, .ignore => {}, } +} - var stderr: ?[]const u8 = null; - defer if (stderr) |s| builder.allocator.free(s); +const ChildProcResult = struct { + term: std.process.Child.Term, + elapsed_ns: u64, + peak_rss: usize, - switch (stderr_action) { - .expect_exact, .expect_matches => { - stderr = try child.stderr.?.reader().readAllAlloc(builder.allocator, max_stdout_size); - }, - .inherit, .ignore => {}, + stdio: StdIoResult, +}; + +fn spawnChildAndCollect( + self: *RunStep, + argv: []const []const u8, + has_side_effects: bool, + prog_node: *std.Progress.Node, +) !ChildProcResult { + const b = self.step.owner; + const arena = b.allocator; + + var child = std.process.Child.init(argv, arena); + if (self.cwd) |cwd| { + child.cwd = b.pathFromRoot(cwd); + } else { + child.cwd = b.build_root.path; + child.cwd_dir = b.build_root.handle; } + child.env_map = self.env_map orelse b.env_map; + child.request_resource_usage_statistics = true; - const term = child.wait() catch |err| { - std.debug.print("Unable to spawn {s}: {s}\n", .{ argv[0], @errorName(err) }); - return err; + child.stdin_behavior = switch (self.stdio) { + .infer_from_args => if (has_side_effects) .Inherit else .Close, + .inherit => .Inherit, + .check => .Close, + .zig_test => .Pipe, }; + child.stdout_behavior = switch (self.stdio) { + .infer_from_args => if (has_side_effects) .Inherit else .Ignore, + .inherit => .Inherit, + .check => |checks| if (checksContainStdout(checks.items)) .Pipe else .Ignore, + .zig_test => .Pipe, + }; + child.stderr_behavior = switch (self.stdio) { + .infer_from_args => if (has_side_effects) .Inherit else .Pipe, + .inherit => .Inherit, + .check => .Pipe, + .zig_test => .Pipe, + }; + if (self.captured_stdout != null) child.stdout_behavior = .Pipe; + if (self.captured_stderr != null) child.stderr_behavior = .Pipe; + if (self.stdin != null) { + assert(child.stdin_behavior != .Inherit); + child.stdin_behavior = .Pipe; + } - if (!termMatches(expected_term, term)) { - if (builder.prominent_compile_errors) { - std.debug.print("Run step {} (expected {})\n", .{ fmtTerm(term), fmtTerm(expected_term) }); - } else { - std.debug.print("The following command {} (expected {}):\n", .{ fmtTerm(term), fmtTerm(expected_term) }); - printCmd(cwd, argv); + try child.spawn(); + var timer = try std.time.Timer.start(); + + const result = if (self.stdio == .zig_test) + evalZigTest(self, &child, prog_node) + else + evalGeneric(self, &child); + + const term = try child.wait(); + const elapsed_ns = timer.read(); + + return .{ + .stdio = try result, + .term = term, + .elapsed_ns = elapsed_ns, + .peak_rss = child.resource_usage_statistics.getMaxRss() orelse 0, + }; +} + +const StdIoResult = struct { + // These use boolean flags instead of optionals as a workaround for + // https://github.com/ziglang/zig/issues/14783 + stdout: []const u8, + stderr: []const u8, + stdout_null: bool, + stderr_null: bool, + test_results: Step.TestResults, +}; + +fn evalZigTest( + self: *RunStep, + child: *std.process.Child, + prog_node: *std.Progress.Node, +) !StdIoResult { + const gpa = self.step.owner.allocator; + const arena = self.step.owner.allocator; + + var poller = std.io.poll(gpa, enum { stdout, stderr }, .{ + .stdout = child.stdout.?, + .stderr = child.stderr.?, + }); + defer poller.deinit(); + + try sendMessage(child.stdin.?, .query_test_metadata); + + const Header = std.zig.Server.Message.Header; + + const stdout = poller.fifo(.stdout); + const stderr = poller.fifo(.stderr); + + var fail_count: u32 = 0; + var skip_count: u32 = 0; + var leak_count: u32 = 0; + var test_count: u32 = 0; + + var metadata: ?TestMetadata = null; + + var sub_prog_node: ?std.Progress.Node = null; + defer if (sub_prog_node) |*n| n.end(); + + poll: while (true) { + while (stdout.readableLength() < @sizeOf(Header)) { + if (!(try poller.poll())) break :poll; + } + const header = stdout.reader().readStruct(Header) catch unreachable; + while (stdout.readableLength() < header.bytes_len) { + if (!(try poller.poll())) break :poll; } - return error.UnexpectedExit; + const body = stdout.readableSliceOfLen(header.bytes_len); + + switch (header.tag) { + .zig_version => { + if (!std.mem.eql(u8, builtin.zig_version_string, body)) { + return self.step.fail( + "zig version mismatch build runner vs compiler: '{s}' vs '{s}'", + .{ builtin.zig_version_string, body }, + ); + } + }, + .test_metadata => { + const TmHdr = std.zig.Server.Message.TestMetadata; + const tm_hdr = @ptrCast(*align(1) const TmHdr, body); + test_count = tm_hdr.tests_len; + + const names_bytes = body[@sizeOf(TmHdr)..][0 .. test_count * @sizeOf(u32)]; + const async_frame_lens_bytes = body[@sizeOf(TmHdr) + names_bytes.len ..][0 .. test_count * @sizeOf(u32)]; + const expected_panic_msgs_bytes = body[@sizeOf(TmHdr) + names_bytes.len + async_frame_lens_bytes.len ..][0 .. test_count * @sizeOf(u32)]; + const string_bytes = body[@sizeOf(TmHdr) + names_bytes.len + async_frame_lens_bytes.len + expected_panic_msgs_bytes.len ..][0..tm_hdr.string_bytes_len]; + + const names = std.mem.bytesAsSlice(u32, names_bytes); + const async_frame_lens = std.mem.bytesAsSlice(u32, async_frame_lens_bytes); + const expected_panic_msgs = std.mem.bytesAsSlice(u32, expected_panic_msgs_bytes); + const names_aligned = try arena.alloc(u32, names.len); + for (names_aligned, names) |*dest, src| dest.* = src; + + const async_frame_lens_aligned = try arena.alloc(u32, async_frame_lens.len); + for (async_frame_lens_aligned, async_frame_lens) |*dest, src| dest.* = src; + + const expected_panic_msgs_aligned = try arena.alloc(u32, expected_panic_msgs.len); + for (expected_panic_msgs_aligned, expected_panic_msgs) |*dest, src| dest.* = src; + + prog_node.setEstimatedTotalItems(names.len); + metadata = .{ + .string_bytes = try arena.dupe(u8, string_bytes), + .names = names_aligned, + .async_frame_lens = async_frame_lens_aligned, + .expected_panic_msgs = expected_panic_msgs_aligned, + .next_index = 0, + .prog_node = prog_node, + }; + + try requestNextTest(child.stdin.?, &metadata.?, &sub_prog_node); + }, + .test_results => { + const md = metadata.?; + + const TrHdr = std.zig.Server.Message.TestResults; + const tr_hdr = @ptrCast(*align(1) const TrHdr, body); + fail_count += @boolToInt(tr_hdr.flags.fail); + skip_count += @boolToInt(tr_hdr.flags.skip); + leak_count += @boolToInt(tr_hdr.flags.leak); + + if (tr_hdr.flags.fail or tr_hdr.flags.leak) { + const name = std.mem.sliceTo(md.string_bytes[md.names[tr_hdr.index]..], 0); + const msg = std.mem.trim(u8, stderr.readableSlice(0), "\n"); + const label = if (tr_hdr.flags.fail) "failed" else "leaked"; + if (msg.len > 0) { + try self.step.addError("'{s}' {s}: {s}", .{ name, label, msg }); + } else { + try self.step.addError("'{s}' {s}", .{ name, label }); + } + stderr.discard(msg.len); + } + + try requestNextTest(child.stdin.?, &metadata.?, &sub_prog_node); + }, + else => {}, // ignore other messages + } + + stdout.discard(body.len); } - switch (stderr_action) { - .inherit, .ignore => {}, - .expect_exact => |expected_bytes| { - if (!mem.eql(u8, expected_bytes, stderr.?)) { - std.debug.print( - \\ - \\========= Expected this stderr: ========= - \\{s} - \\========= But found: ==================== - \\{s} - \\ - , .{ expected_bytes, stderr.? }); - printCmd(cwd, argv); - return error.TestFailed; - } - }, - .expect_matches => |matches| for (matches) |match| { - if (mem.indexOf(u8, stderr.?, match) == null) { - std.debug.print( - \\ - \\========= Expected to find in stderr: ========= - \\{s} - \\========= But stderr does not contain it: ===== - \\{s} - \\ - , .{ match, stderr.? }); - printCmd(cwd, argv); - return error.TestFailed; - } - }, + if (stderr.readableLength() > 0) { + const msg = std.mem.trim(u8, try stderr.toOwnedSlice(), "\n"); + if (msg.len > 0) try self.step.result_error_msgs.append(arena, msg); } - switch (stdout_action) { - .inherit, .ignore => {}, - .expect_exact => |expected_bytes| { - if (!mem.eql(u8, expected_bytes, stdout.?)) { - std.debug.print( - \\ - \\========= Expected this stdout: ========= - \\{s} - \\========= But found: ==================== - \\{s} - \\ - , .{ expected_bytes, stdout.? }); - printCmd(cwd, argv); - return error.TestFailed; - } - }, - .expect_matches => |matches| for (matches) |match| { - if (mem.indexOf(u8, stdout.?, match) == null) { - std.debug.print( - \\ - \\========= Expected to find in stdout: ========= - \\{s} - \\========= But stdout does not contain it: ===== - \\{s} - \\ - , .{ match, stdout.? }); - printCmd(cwd, argv); - return error.TestFailed; - } + // Send EOF to stdin. + child.stdin.?.close(); + child.stdin = null; + + return .{ + .stdout = &.{}, + .stderr = &.{}, + .stdout_null = true, + .stderr_null = true, + .test_results = .{ + .test_count = test_count, + .fail_count = fail_count, + .skip_count = skip_count, + .leak_count = leak_count, }, + }; +} + +const TestMetadata = struct { + names: []const u32, + async_frame_lens: []const u32, + expected_panic_msgs: []const u32, + string_bytes: []const u8, + next_index: u32, + prog_node: *std.Progress.Node, + + fn testName(tm: TestMetadata, index: u32) []const u8 { + return std.mem.sliceTo(tm.string_bytes[tm.names[index]..], 0); + } +}; + +fn requestNextTest(in: fs.File, metadata: *TestMetadata, sub_prog_node: *?std.Progress.Node) !void { + while (metadata.next_index < metadata.names.len) { + const i = metadata.next_index; + metadata.next_index += 1; + + if (metadata.async_frame_lens[i] != 0) continue; + if (metadata.expected_panic_msgs[i] != 0) continue; + + const name = metadata.testName(i); + if (sub_prog_node.*) |*n| n.end(); + sub_prog_node.* = metadata.prog_node.start(name, 0); + + try sendRunTestMessage(in, i); + return; + } else { + try sendMessage(in, .exit); } } -fn failWithCacheError(man: std.Build.Cache.Manifest, err: anyerror) noreturn { - const i = man.failed_file_index orelse failWithSimpleError(err); - const pp = man.files.items[i].prefixed_path orelse failWithSimpleError(err); - const prefix = man.cache.prefixes()[pp.prefix].path orelse ""; - std.debug.print("{s}: {s}/{s}\n", .{ @errorName(err), prefix, pp.sub_path }); - std.process.exit(1); +fn sendMessage(file: std.fs.File, tag: std.zig.Client.Message.Tag) !void { + const header: std.zig.Client.Message.Header = .{ + .tag = tag, + .bytes_len = 0, + }; + try file.writeAll(std.mem.asBytes(&header)); } -fn failWithSimpleError(err: anyerror) noreturn { - std.debug.print("{s}\n", .{@errorName(err)}); - std.process.exit(1); +fn sendRunTestMessage(file: std.fs.File, index: u32) !void { + const header: std.zig.Client.Message.Header = .{ + .tag = .run_test, + .bytes_len = 4, + }; + const full_msg = std.mem.asBytes(&header) ++ std.mem.asBytes(&index); + try file.writeAll(full_msg); } -fn printCmd(cwd: ?[]const u8, argv: []const []const u8) void { - if (cwd) |yes_cwd| std.debug.print("cd {s} && ", .{yes_cwd}); - for (argv) |arg| { - std.debug.print("{s} ", .{arg}); +fn evalGeneric(self: *RunStep, child: *std.process.Child) !StdIoResult { + const arena = self.step.owner.allocator; + + if (self.stdin) |stdin| { + child.stdin.?.writeAll(stdin) catch |err| { + return self.step.fail("unable to write stdin: {s}", .{@errorName(err)}); + }; + child.stdin.?.close(); + child.stdin = null; } - std.debug.print("\n", .{}); -} -fn addPathForDynLibs(self: *RunStep, artifact: *CompileStep) void { - addPathForDynLibsInternal(&self.step, self.builder, artifact); + // These are not optionals, as a workaround for + // https://github.com/ziglang/zig/issues/14783 + var stdout_bytes: []const u8 = undefined; + var stderr_bytes: []const u8 = undefined; + var stdout_null = true; + var stderr_null = true; + + if (child.stdout) |stdout| { + if (child.stderr) |stderr| { + var poller = std.io.poll(arena, enum { stdout, stderr }, .{ + .stdout = stdout, + .stderr = stderr, + }); + defer poller.deinit(); + + while (try poller.poll()) { + if (poller.fifo(.stdout).count > self.max_stdio_size) + return error.StdoutStreamTooLong; + if (poller.fifo(.stderr).count > self.max_stdio_size) + return error.StderrStreamTooLong; + } + + stdout_bytes = try poller.fifo(.stdout).toOwnedSlice(); + stderr_bytes = try poller.fifo(.stderr).toOwnedSlice(); + stdout_null = false; + stderr_null = false; + } else { + stdout_bytes = try stdout.reader().readAllAlloc(arena, self.max_stdio_size); + stdout_null = false; + } + } else if (child.stderr) |stderr| { + stderr_bytes = try stderr.reader().readAllAlloc(arena, self.max_stdio_size); + stderr_null = false; + } + + if (!stderr_null and stderr_bytes.len > 0) { + // Treat stderr as an error message. + const stderr_is_diagnostic = self.captured_stderr == null and switch (self.stdio) { + .check => |checks| !checksContainStderr(checks.items), + else => true, + }; + if (stderr_is_diagnostic) { + try self.step.result_error_msgs.append(arena, stderr_bytes); + } + } + + return .{ + .stdout = stdout_bytes, + .stderr = stderr_bytes, + .stdout_null = stdout_null, + .stderr_null = stderr_null, + .test_results = .{}, + }; } -/// This should only be used for internal usage, this is called automatically -/// for the user. -pub fn addPathForDynLibsInternal(step: *Step, builder: *std.Build, artifact: *CompileStep) void { +fn addPathForDynLibs(self: *RunStep, artifact: *CompileStep) void { + const b = self.step.owner; for (artifact.link_objects.items) |link_object| { switch (link_object) { .other_step => |other| { if (other.target.isWindows() and other.isDynamicLibrary()) { - addPathDirInternal(step, builder, fs.path.dirname(other.getOutputSource().getPath(builder)).?); - addPathForDynLibsInternal(step, builder, other); + addPathDir(self, fs.path.dirname(other.getOutputSource().getPath(b)).?); + addPathForDynLibs(self, other); } }, else => {}, } } } + +fn failForeign( + self: *RunStep, + suggested_flag: []const u8, + argv0: []const u8, + exe: *CompileStep, +) error{ MakeFailed, MakeSkipped, OutOfMemory } { + switch (self.stdio) { + .check, .zig_test => { + if (self.skip_foreign_checks) + return error.MakeSkipped; + + const b = self.step.owner; + const host_name = try b.host.target.zigTriple(b.allocator); + const foreign_name = try exe.target.zigTriple(b.allocator); + + return self.step.fail( + \\unable to spawn foreign binary '{s}' ({s}) on host system ({s}) + \\ consider using {s} or enabling skip_foreign_checks in the Run step + , .{ argv0, foreign_name, host_name, suggested_flag }); + }, + else => { + return self.step.fail("unable to spawn foreign binary '{s}'", .{argv0}); + }, + } +} + +fn hashStdIo(hh: *std.Build.Cache.HashHelper, stdio: StdIo) void { + switch (stdio) { + .infer_from_args, .inherit, .zig_test => {}, + .check => |checks| for (checks.items) |check| { + hh.add(@as(std.meta.Tag(StdIo.Check), check)); + switch (check) { + .expect_stderr_exact, + .expect_stderr_match, + .expect_stdout_exact, + .expect_stdout_match, + => |s| hh.addBytes(s), + + .expect_term => |term| { + hh.add(@as(std.meta.Tag(std.process.Child.Term), term)); + switch (term) { + .Exited => |x| hh.add(x), + .Signal, .Stopped, .Unknown => |x| hh.add(x), + } + }, + } + }, + } +} diff --git a/lib/std/Build/Step.zig b/lib/std/Build/Step.zig index 82c39ac2cc1a..88580a6cbcda 100644 --- a/lib/std/Build/Step.zig +++ b/lib/std/Build/Step.zig @@ -1,9 +1,77 @@ id: Id, name: []const u8, -makeFn: *const fn (self: *Step) anyerror!void, +owner: *Build, +makeFn: MakeFn, + dependencies: std.ArrayList(*Step), -loop_flag: bool, -done_flag: bool, +/// This field is empty during execution of the user's build script, and +/// then populated during dependency loop checking in the build runner. +dependants: std.ArrayListUnmanaged(*Step), +state: State, +/// Set this field to declare an upper bound on the amount of bytes of memory it will +/// take to run the step. Zero means no limit. +/// +/// The idea to annotate steps that might use a high amount of RAM with an +/// upper bound. For example, perhaps a particular set of unit tests require 4 +/// GiB of RAM, and those tests will be run under 4 different build +/// configurations at once. This would potentially require 16 GiB of memory on +/// the system if all 4 steps executed simultaneously, which could easily be +/// greater than what is actually available, potentially causing the system to +/// crash when using `zig build` at the default concurrency level. +/// +/// This field causes the build runner to do two things: +/// 1. ulimit child processes, so that they will fail if it would exceed this +/// memory limit. This serves to enforce that this upper bound value is +/// correct. +/// 2. Ensure that the set of concurrent steps at any given time have a total +/// max_rss value that does not exceed the `max_total_rss` value of the build +/// runner. This value is configurable on the command line, and defaults to the +/// total system memory available. +max_rss: usize, + +result_error_msgs: std.ArrayListUnmanaged([]const u8), +result_error_bundle: std.zig.ErrorBundle, +result_cached: bool, +result_duration_ns: ?u64, +/// 0 means unavailable or not reported. +result_peak_rss: usize, +test_results: TestResults, + +/// The return addresss associated with creation of this step that can be useful +/// to print along with debugging messages. +debug_stack_trace: [n_debug_stack_frames]usize, + +pub const TestResults = struct { + fail_count: u32 = 0, + skip_count: u32 = 0, + leak_count: u32 = 0, + test_count: u32 = 0, + + pub fn isSuccess(tr: TestResults) bool { + return tr.fail_count == 0 and tr.leak_count == 0; + } + + pub fn passCount(tr: TestResults) u32 { + return tr.test_count - tr.fail_count - tr.skip_count; + } +}; + +pub const MakeFn = *const fn (self: *Step, prog_node: *std.Progress.Node) anyerror!void; + +const n_debug_stack_frames = 4; + +pub const State = enum { + precheck_unstarted, + precheck_started, + precheck_done, + running, + dependency_failure, + success, + failure, + /// This state indicates that the step did not complete, however, it also did not fail, + /// and it is safe to continue executing its dependencies. + skipped, +}; pub const Id = enum { top_level, @@ -17,7 +85,6 @@ pub const Id = enum { translate_c, write_file, run, - emulatable_run, check_file, check_object, config_header, @@ -38,7 +105,6 @@ pub const Id = enum { .translate_c => Build.TranslateCStep, .write_file => Build.WriteFileStep, .run => Build.RunStep, - .emulatable_run => Build.EmulatableRunStep, .check_file => Build.CheckFileStep, .check_object => Build.CheckObjectStep, .config_header => Build.ConfigHeaderStep, @@ -49,39 +115,99 @@ pub const Id = enum { } }; -pub fn init( +pub const Options = struct { id: Id, name: []const u8, - allocator: Allocator, - makeFn: *const fn (self: *Step) anyerror!void, -) Step { - return Step{ - .id = id, - .name = allocator.dupe(u8, name) catch @panic("OOM"), - .makeFn = makeFn, - .dependencies = std.ArrayList(*Step).init(allocator), - .loop_flag = false, - .done_flag = false, + owner: *Build, + makeFn: MakeFn = makeNoOp, + first_ret_addr: ?usize = null, + max_rss: usize = 0, +}; + +pub fn init(options: Options) Step { + const arena = options.owner.allocator; + + var addresses = [1]usize{0} ** n_debug_stack_frames; + const first_ret_addr = options.first_ret_addr orelse @returnAddress(); + var stack_trace = std.builtin.StackTrace{ + .instruction_addresses = &addresses, + .index = 0, }; -} + std.debug.captureStackTrace(first_ret_addr, &stack_trace); -pub fn initNoOp(id: Id, name: []const u8, allocator: Allocator) Step { - return init(id, name, allocator, makeNoOp); + return .{ + .id = options.id, + .name = arena.dupe(u8, options.name) catch @panic("OOM"), + .owner = options.owner, + .makeFn = options.makeFn, + .dependencies = std.ArrayList(*Step).init(arena), + .dependants = .{}, + .state = .precheck_unstarted, + .max_rss = options.max_rss, + .debug_stack_trace = addresses, + .result_error_msgs = .{}, + .result_error_bundle = std.zig.ErrorBundle.empty, + .result_cached = false, + .result_duration_ns = null, + .result_peak_rss = 0, + .test_results = .{}, + }; } -pub fn make(self: *Step) !void { - if (self.done_flag) return; +/// If the Step's `make` function reports `error.MakeFailed`, it indicates they +/// have already reported the error. Otherwise, we add a simple error report +/// here. +pub fn make(s: *Step, prog_node: *std.Progress.Node) error{ MakeFailed, MakeSkipped }!void { + const arena = s.owner.allocator; + + s.makeFn(s, prog_node) catch |err| switch (err) { + error.MakeFailed => return error.MakeFailed, + error.MakeSkipped => return error.MakeSkipped, + else => { + s.result_error_msgs.append(arena, @errorName(err)) catch @panic("OOM"); + return error.MakeFailed; + }, + }; + + if (!s.test_results.isSuccess()) { + return error.MakeFailed; + } - try self.makeFn(self); - self.done_flag = true; + if (s.max_rss != 0 and s.result_peak_rss > s.max_rss) { + const msg = std.fmt.allocPrint(arena, "memory usage peaked at {d} bytes, exceeding the declared upper bound of {d}", .{ + s.result_peak_rss, s.max_rss, + }) catch @panic("OOM"); + s.result_error_msgs.append(arena, msg) catch @panic("OOM"); + return error.MakeFailed; + } } pub fn dependOn(self: *Step, other: *Step) void { self.dependencies.append(other) catch @panic("OOM"); } -fn makeNoOp(self: *Step) anyerror!void { - _ = self; +pub fn getStackTrace(s: *Step) std.builtin.StackTrace { + const stack_addresses = &s.debug_stack_trace; + var len: usize = 0; + while (len < n_debug_stack_frames and stack_addresses[len] != 0) { + len += 1; + } + return .{ + .instruction_addresses = stack_addresses, + .index = len, + }; +} + +fn makeNoOp(step: *Step, prog_node: *std.Progress.Node) anyerror!void { + _ = prog_node; + + var all_cached = true; + + for (step.dependencies.items) |dep| { + all_cached = all_cached and dep.result_cached; + } + + step.result_cached = all_cached; } pub fn cast(step: *Step, comptime T: type) ?*T { @@ -91,7 +217,323 @@ pub fn cast(step: *Step, comptime T: type) ?*T { return null; } +/// For debugging purposes, prints identifying information about this Step. +pub fn dump(step: *Step) void { + std.debug.getStderrMutex().lock(); + defer std.debug.getStderrMutex().unlock(); + + const stderr = std.io.getStdErr(); + const w = stderr.writer(); + const tty_config = std.debug.detectTTYConfig(stderr); + const debug_info = std.debug.getSelfDebugInfo() catch |err| { + w.print("Unable to dump stack trace: Unable to open debug info: {s}\n", .{ + @errorName(err), + }) catch {}; + return; + }; + const ally = debug_info.allocator; + w.print("name: '{s}'. creation stack trace:\n", .{step.name}) catch {}; + std.debug.writeStackTrace(step.getStackTrace(), w, ally, debug_info, tty_config) catch |err| { + stderr.writer().print("Unable to dump stack trace: {s}\n", .{@errorName(err)}) catch {}; + return; + }; +} + const Step = @This(); const std = @import("../std.zig"); const Build = std.Build; const Allocator = std.mem.Allocator; +const assert = std.debug.assert; +const builtin = @import("builtin"); + +pub fn evalChildProcess(s: *Step, argv: []const []const u8) !void { + const arena = s.owner.allocator; + + try handleChildProcUnsupported(s, null, argv); + try handleVerbose(s.owner, null, argv); + + const result = std.ChildProcess.exec(.{ + .allocator = arena, + .argv = argv, + }) catch |err| return s.fail("unable to spawn {s}: {s}", .{ argv[0], @errorName(err) }); + + if (result.stderr.len > 0) { + try s.result_error_msgs.append(arena, result.stderr); + } + + try handleChildProcessTerm(s, result.term, null, argv); +} + +pub fn fail(step: *Step, comptime fmt: []const u8, args: anytype) error{ OutOfMemory, MakeFailed } { + try step.addError(fmt, args); + return error.MakeFailed; +} + +pub fn addError(step: *Step, comptime fmt: []const u8, args: anytype) error{OutOfMemory}!void { + const arena = step.owner.allocator; + const msg = try std.fmt.allocPrint(arena, fmt, args); + try step.result_error_msgs.append(arena, msg); +} + +/// Assumes that argv contains `--listen=-` and that the process being spawned +/// is the zig compiler - the same version that compiled the build runner. +pub fn evalZigProcess( + s: *Step, + argv: []const []const u8, + prog_node: *std.Progress.Node, +) ![]const u8 { + assert(argv.len != 0); + const b = s.owner; + const arena = b.allocator; + const gpa = arena; + + try handleChildProcUnsupported(s, null, argv); + try handleVerbose(s.owner, null, argv); + + var child = std.ChildProcess.init(argv, arena); + child.env_map = b.env_map; + child.stdin_behavior = .Pipe; + child.stdout_behavior = .Pipe; + child.stderr_behavior = .Pipe; + child.request_resource_usage_statistics = true; + + child.spawn() catch |err| return s.fail("unable to spawn {s}: {s}", .{ + argv[0], @errorName(err), + }); + var timer = try std.time.Timer.start(); + + var poller = std.io.poll(gpa, enum { stdout, stderr }, .{ + .stdout = child.stdout.?, + .stderr = child.stderr.?, + }); + defer poller.deinit(); + + try sendMessage(child.stdin.?, .update); + try sendMessage(child.stdin.?, .exit); + + const Header = std.zig.Server.Message.Header; + var result: ?[]const u8 = null; + + var node_name: std.ArrayListUnmanaged(u8) = .{}; + defer node_name.deinit(gpa); + var sub_prog_node = prog_node.start("", 0); + defer sub_prog_node.end(); + + const stdout = poller.fifo(.stdout); + + poll: while (true) { + while (stdout.readableLength() < @sizeOf(Header)) { + if (!(try poller.poll())) break :poll; + } + const header = stdout.reader().readStruct(Header) catch unreachable; + while (stdout.readableLength() < header.bytes_len) { + if (!(try poller.poll())) break :poll; + } + const body = stdout.readableSliceOfLen(header.bytes_len); + + switch (header.tag) { + .zig_version => { + if (!std.mem.eql(u8, builtin.zig_version_string, body)) { + return s.fail( + "zig version mismatch build runner vs compiler: '{s}' vs '{s}'", + .{ builtin.zig_version_string, body }, + ); + } + }, + .error_bundle => { + const EbHdr = std.zig.Server.Message.ErrorBundle; + const eb_hdr = @ptrCast(*align(1) const EbHdr, body); + const extra_bytes = + body[@sizeOf(EbHdr)..][0 .. @sizeOf(u32) * eb_hdr.extra_len]; + const string_bytes = + body[@sizeOf(EbHdr) + extra_bytes.len ..][0..eb_hdr.string_bytes_len]; + // TODO: use @ptrCast when the compiler supports it + const unaligned_extra = std.mem.bytesAsSlice(u32, extra_bytes); + const extra_array = try arena.alloc(u32, unaligned_extra.len); + // TODO: use @memcpy when it supports slices + for (extra_array, unaligned_extra) |*dst, src| dst.* = src; + s.result_error_bundle = .{ + .string_bytes = try arena.dupe(u8, string_bytes), + .extra = extra_array, + }; + }, + .progress => { + node_name.clearRetainingCapacity(); + try node_name.appendSlice(gpa, body); + sub_prog_node.setName(node_name.items); + }, + .emit_bin_path => { + const EbpHdr = std.zig.Server.Message.EmitBinPath; + const ebp_hdr = @ptrCast(*align(1) const EbpHdr, body); + s.result_cached = ebp_hdr.flags.cache_hit; + result = try arena.dupe(u8, body[@sizeOf(EbpHdr)..]); + }, + else => {}, // ignore other messages + } + + stdout.discard(body.len); + } + + const stderr = poller.fifo(.stderr); + if (stderr.readableLength() > 0) { + try s.result_error_msgs.append(arena, try stderr.toOwnedSlice()); + } + + // Send EOF to stdin. + child.stdin.?.close(); + child.stdin = null; + + const term = child.wait() catch |err| { + return s.fail("unable to wait for {s}: {s}", .{ argv[0], @errorName(err) }); + }; + s.result_duration_ns = timer.read(); + s.result_peak_rss = child.resource_usage_statistics.getMaxRss() orelse 0; + + // Special handling for CompileStep that is expecting compile errors. + if (s.cast(Build.CompileStep)) |compile| switch (term) { + .Exited => { + // Note that the exit code may be 0 in this case due to the + // compiler server protocol. + if (compile.expect_errors.len != 0 and s.result_error_bundle.errorMessageCount() > 0) { + return error.NeedCompileErrorCheck; + } + }, + else => {}, + }; + + try handleChildProcessTerm(s, term, null, argv); + + if (s.result_error_bundle.errorMessageCount() > 0) { + return s.fail("the following command failed with {d} compilation errors:\n{s}", .{ + s.result_error_bundle.errorMessageCount(), + try allocPrintCmd(arena, null, argv), + }); + } + + return result orelse return s.fail( + "the following command failed to communicate the compilation result:\n{s}", + .{try allocPrintCmd(arena, null, argv)}, + ); +} + +fn sendMessage(file: std.fs.File, tag: std.zig.Client.Message.Tag) !void { + const header: std.zig.Client.Message.Header = .{ + .tag = tag, + .bytes_len = 0, + }; + try file.writeAll(std.mem.asBytes(&header)); +} + +pub fn handleVerbose( + b: *Build, + opt_cwd: ?[]const u8, + argv: []const []const u8, +) error{OutOfMemory}!void { + return handleVerbose2(b, opt_cwd, null, argv); +} + +pub fn handleVerbose2( + b: *Build, + opt_cwd: ?[]const u8, + opt_env: ?*const std.process.EnvMap, + argv: []const []const u8, +) error{OutOfMemory}!void { + if (b.verbose) { + // Intention of verbose is to print all sub-process command lines to + // stderr before spawning them. + const text = try allocPrintCmd2(b.allocator, opt_cwd, opt_env, argv); + std.debug.print("{s}\n", .{text}); + } +} + +pub inline fn handleChildProcUnsupported( + s: *Step, + opt_cwd: ?[]const u8, + argv: []const []const u8, +) error{ OutOfMemory, MakeFailed }!void { + if (!std.process.can_spawn) { + return s.fail( + "unable to execute the following command: host cannot spawn child processes\n{s}", + .{try allocPrintCmd(s.owner.allocator, opt_cwd, argv)}, + ); + } +} + +pub fn handleChildProcessTerm( + s: *Step, + term: std.ChildProcess.Term, + opt_cwd: ?[]const u8, + argv: []const []const u8, +) error{ MakeFailed, OutOfMemory }!void { + const arena = s.owner.allocator; + switch (term) { + .Exited => |code| { + if (code != 0) { + return s.fail( + "the following command exited with error code {d}:\n{s}", + .{ code, try allocPrintCmd(arena, opt_cwd, argv) }, + ); + } + }, + .Signal, .Stopped, .Unknown => { + return s.fail( + "the following command terminated unexpectedly:\n{s}", + .{try allocPrintCmd(arena, opt_cwd, argv)}, + ); + }, + } +} + +pub fn allocPrintCmd( + arena: Allocator, + opt_cwd: ?[]const u8, + argv: []const []const u8, +) Allocator.Error![]u8 { + return allocPrintCmd2(arena, opt_cwd, null, argv); +} + +pub fn allocPrintCmd2( + arena: Allocator, + opt_cwd: ?[]const u8, + opt_env: ?*const std.process.EnvMap, + argv: []const []const u8, +) Allocator.Error![]u8 { + var buf: std.ArrayListUnmanaged(u8) = .{}; + if (opt_cwd) |cwd| try buf.writer(arena).print("cd {s} && ", .{cwd}); + if (opt_env) |env| { + const process_env_map = std.process.getEnvMap(arena) catch std.process.EnvMap.init(arena); + var it = env.iterator(); + while (it.next()) |entry| { + const key = entry.key_ptr.*; + const value = entry.value_ptr.*; + if (process_env_map.get(key)) |process_value| { + if (std.mem.eql(u8, value, process_value)) continue; + } + try buf.writer(arena).print("{s}={s} ", .{ key, value }); + } + } + for (argv) |arg| { + try buf.writer(arena).print("{s} ", .{arg}); + } + return buf.toOwnedSlice(arena); +} + +pub fn cacheHit(s: *Step, man: *std.Build.Cache.Manifest) !bool { + s.result_cached = man.hit() catch |err| return failWithCacheError(s, man, err); + return s.result_cached; +} + +fn failWithCacheError(s: *Step, man: *const std.Build.Cache.Manifest, err: anyerror) anyerror { + const i = man.failed_file_index orelse return err; + const pp = man.files.items[i].prefixed_path orelse return err; + const prefix = man.cache.prefixes()[pp.prefix].path orelse ""; + return s.fail("{s}: {s}/{s}", .{ @errorName(err), prefix, pp.sub_path }); +} + +pub fn writeManifest(s: *Step, man: *std.Build.Cache.Manifest) !void { + if (s.test_results.isSuccess()) { + man.writeManifest() catch |err| { + try s.addError("unable to write cache manifest: {s}", .{@errorName(err)}); + }; + } +} diff --git a/lib/std/Build/TranslateCStep.zig b/lib/std/Build/TranslateCStep.zig index fb0adfd0aec9..0cfd5d85a883 100644 --- a/lib/std/Build/TranslateCStep.zig +++ b/lib/std/Build/TranslateCStep.zig @@ -11,7 +11,6 @@ const TranslateCStep = @This(); pub const base_id = .translate_c; step: Step, -builder: *std.Build, source: std.Build.FileSource, include_dirs: std.ArrayList([]const u8), c_macros: std.ArrayList([]const u8), @@ -26,15 +25,19 @@ pub const Options = struct { optimize: std.builtin.OptimizeMode, }; -pub fn create(builder: *std.Build, options: Options) *TranslateCStep { - const self = builder.allocator.create(TranslateCStep) catch @panic("OOM"); - const source = options.source_file.dupe(builder); +pub fn create(owner: *std.Build, options: Options) *TranslateCStep { + const self = owner.allocator.create(TranslateCStep) catch @panic("OOM"); + const source = options.source_file.dupe(owner); self.* = TranslateCStep{ - .step = Step.init(.translate_c, "translate-c", builder.allocator, make), - .builder = builder, + .step = Step.init(.{ + .id = .translate_c, + .name = "translate-c", + .owner = owner, + .makeFn = make, + }), .source = source, - .include_dirs = std.ArrayList([]const u8).init(builder.allocator), - .c_macros = std.ArrayList([]const u8).init(builder.allocator), + .include_dirs = std.ArrayList([]const u8).init(owner.allocator), + .c_macros = std.ArrayList([]const u8).init(owner.allocator), .out_basename = undefined, .target = options.target, .optimize = options.optimize, @@ -54,7 +57,7 @@ pub const AddExecutableOptions = struct { /// Creates a step to build an executable from the translated source. pub fn addExecutable(self: *TranslateCStep, options: AddExecutableOptions) *CompileStep { - return self.builder.addExecutable(.{ + return self.step.owner.addExecutable(.{ .root_source_file = .{ .generated = &self.output_file }, .name = options.name orelse "translated_c", .version = options.version, @@ -65,43 +68,49 @@ pub fn addExecutable(self: *TranslateCStep, options: AddExecutableOptions) *Comp } pub fn addIncludeDir(self: *TranslateCStep, include_dir: []const u8) void { - self.include_dirs.append(self.builder.dupePath(include_dir)) catch @panic("OOM"); + self.include_dirs.append(self.step.owner.dupePath(include_dir)) catch @panic("OOM"); } pub fn addCheckFile(self: *TranslateCStep, expected_matches: []const []const u8) *CheckFileStep { - return CheckFileStep.create(self.builder, .{ .generated = &self.output_file }, self.builder.dupeStrings(expected_matches)); + return CheckFileStep.create( + self.step.owner, + .{ .generated = &self.output_file }, + .{ .expected_matches = expected_matches }, + ); } /// If the value is omitted, it is set to 1. /// `name` and `value` need not live longer than the function call. pub fn defineCMacro(self: *TranslateCStep, name: []const u8, value: ?[]const u8) void { - const macro = std.Build.constructCMacro(self.builder.allocator, name, value); + const macro = std.Build.constructCMacro(self.step.owner.allocator, name, value); self.c_macros.append(macro) catch @panic("OOM"); } /// name_and_value looks like [name]=[value]. If the value is omitted, it is set to 1. pub fn defineCMacroRaw(self: *TranslateCStep, name_and_value: []const u8) void { - self.c_macros.append(self.builder.dupe(name_and_value)) catch @panic("OOM"); + self.c_macros.append(self.step.owner.dupe(name_and_value)) catch @panic("OOM"); } -fn make(step: *Step) !void { +fn make(step: *Step, prog_node: *std.Progress.Node) !void { + const b = step.owner; const self = @fieldParentPtr(TranslateCStep, "step", step); - var argv_list = std.ArrayList([]const u8).init(self.builder.allocator); - try argv_list.append(self.builder.zig_exe); + var argv_list = std.ArrayList([]const u8).init(b.allocator); + try argv_list.append(b.zig_exe); try argv_list.append("translate-c"); try argv_list.append("-lc"); try argv_list.append("--enable-cache"); + try argv_list.append("--listen=-"); if (!self.target.isNative()) { try argv_list.append("-target"); - try argv_list.append(try self.target.zigTriple(self.builder.allocator)); + try argv_list.append(try self.target.zigTriple(b.allocator)); } switch (self.optimize) { .Debug => {}, // Skip since it's the default. - else => try argv_list.append(self.builder.fmt("-O{s}", .{@tagName(self.optimize)})), + else => try argv_list.append(b.fmt("-O{s}", .{@tagName(self.optimize)})), } for (self.include_dirs.items) |include_dir| { @@ -114,16 +123,15 @@ fn make(step: *Step) !void { try argv_list.append(c_macro); } - try argv_list.append(self.source.getPath(self.builder)); + try argv_list.append(self.source.getPath(b)); - const output_path_nl = try self.builder.execFromStep(argv_list.items, &self.step); - const output_path = mem.trimRight(u8, output_path_nl, "\r\n"); + const output_path = try step.evalZigProcess(argv_list.items, prog_node); self.out_basename = fs.path.basename(output_path); const output_dir = fs.path.dirname(output_path).?; self.output_file.path = try fs.path.join( - self.builder.allocator, + b.allocator, &[_][]const u8{ output_dir, self.out_basename }, ); } diff --git a/lib/std/Build/WriteFileStep.zig b/lib/std/Build/WriteFileStep.zig index 3a30aba19053..dee79af5bee5 100644 --- a/lib/std/Build/WriteFileStep.zig +++ b/lib/std/Build/WriteFileStep.zig @@ -10,11 +10,11 @@ //! control. step: Step, -builder: *std.Build, /// The elements here are pointers because we need stable pointers for the /// GeneratedFile field. files: std.ArrayListUnmanaged(*File), output_source_files: std.ArrayListUnmanaged(OutputSourceFile), +generated_directory: std.Build.GeneratedFile, pub const base_id = .write_file; @@ -34,24 +34,34 @@ pub const Contents = union(enum) { copy: std.Build.FileSource, }; -pub fn init(builder: *std.Build) WriteFileStep { - return .{ - .builder = builder, - .step = Step.init(.write_file, "writefile", builder.allocator, make), +pub fn create(owner: *std.Build) *WriteFileStep { + const wf = owner.allocator.create(WriteFileStep) catch @panic("OOM"); + wf.* = .{ + .step = Step.init(.{ + .id = .write_file, + .name = "WriteFile", + .owner = owner, + .makeFn = make, + }), .files = .{}, .output_source_files = .{}, + .generated_directory = .{ .step = &wf.step }, }; + return wf; } pub fn add(wf: *WriteFileStep, sub_path: []const u8, bytes: []const u8) void { - const gpa = wf.builder.allocator; + const b = wf.step.owner; + const gpa = b.allocator; const file = gpa.create(File) catch @panic("OOM"); file.* = .{ .generated_file = .{ .step = &wf.step }, - .sub_path = wf.builder.dupePath(sub_path), - .contents = .{ .bytes = wf.builder.dupe(bytes) }, + .sub_path = b.dupePath(sub_path), + .contents = .{ .bytes = b.dupe(bytes) }, }; wf.files.append(gpa, file) catch @panic("OOM"); + + wf.maybeUpdateName(); } /// Place the file into the generated directory within the local cache, @@ -62,14 +72,18 @@ pub fn add(wf: *WriteFileStep, sub_path: []const u8, bytes: []const u8) void { /// required sub-path exists. /// This is the option expected to be used most commonly with `addCopyFile`. pub fn addCopyFile(wf: *WriteFileStep, source: std.Build.FileSource, sub_path: []const u8) void { - const gpa = wf.builder.allocator; + const b = wf.step.owner; + const gpa = b.allocator; const file = gpa.create(File) catch @panic("OOM"); file.* = .{ .generated_file = .{ .step = &wf.step }, - .sub_path = wf.builder.dupePath(sub_path), + .sub_path = b.dupePath(sub_path), .contents = .{ .copy = source }, }; wf.files.append(gpa, file) catch @panic("OOM"); + + wf.maybeUpdateName(); + source.addStepDependencies(&wf.step); } /// A path relative to the package root. @@ -79,10 +93,26 @@ pub fn addCopyFile(wf: *WriteFileStep, source: std.Build.FileSource, sub_path: [ /// those changes to version control. /// A file added this way is not available with `getFileSource`. pub fn addCopyFileToSource(wf: *WriteFileStep, source: std.Build.FileSource, sub_path: []const u8) void { - wf.output_source_files.append(wf.builder.allocator, .{ + const b = wf.step.owner; + wf.output_source_files.append(b.allocator, .{ .contents = .{ .copy = source }, .sub_path = sub_path, }) catch @panic("OOM"); + source.addStepDependencies(&wf.step); +} + +/// A path relative to the package root. +/// Be careful with this because it updates source files. This should not be +/// used as part of the normal build process, but as a utility occasionally +/// run by a developer with intent to modify source files and then commit +/// those changes to version control. +/// A file added this way is not available with `getFileSource`. +pub fn addBytesToSource(wf: *WriteFileStep, bytes: []const u8, sub_path: []const u8) void { + const b = wf.step.owner; + wf.output_source_files.append(b.allocator, .{ + .contents = .{ .bytes = bytes }, + .sub_path = sub_path, + }) catch @panic("OOM"); } /// Gets a file source for the given sub_path. If the file does not exist, returns `null`. @@ -95,21 +125,63 @@ pub fn getFileSource(wf: *WriteFileStep, sub_path: []const u8) ?std.Build.FileSo return null; } -fn make(step: *Step) !void { +/// Returns a `FileSource` representing the base directory that contains all the +/// files from this `WriteFileStep`. +pub fn getDirectorySource(wf: *WriteFileStep) std.Build.FileSource { + return .{ .generated = &wf.generated_directory }; +} + +fn maybeUpdateName(wf: *WriteFileStep) void { + if (wf.files.items.len == 1) { + // First time adding a file; update name. + if (std.mem.eql(u8, wf.step.name, "WriteFile")) { + wf.step.name = wf.step.owner.fmt("WriteFile {s}", .{wf.files.items[0].sub_path}); + } + } +} + +fn make(step: *Step, prog_node: *std.Progress.Node) !void { + _ = prog_node; + const b = step.owner; const wf = @fieldParentPtr(WriteFileStep, "step", step); // Writing to source files is kind of an extra capability of this // WriteFileStep - arguably it should be a different step. But anyway here // it is, it happens unconditionally and does not interact with the other // files here. + var any_miss = false; for (wf.output_source_files.items) |output_source_file| { - const basename = fs.path.basename(output_source_file.sub_path); if (fs.path.dirname(output_source_file.sub_path)) |dirname| { - var dir = try wf.builder.build_root.handle.makeOpenPath(dirname, .{}); - defer dir.close(); - try writeFile(wf, dir, output_source_file.contents, basename); - } else { - try writeFile(wf, wf.builder.build_root.handle, output_source_file.contents, basename); + b.build_root.handle.makePath(dirname) catch |err| { + return step.fail("unable to make path '{}{s}': {s}", .{ + b.build_root, dirname, @errorName(err), + }); + }; + } + switch (output_source_file.contents) { + .bytes => |bytes| { + b.build_root.handle.writeFile(output_source_file.sub_path, bytes) catch |err| { + return step.fail("unable to write file '{}{s}': {s}", .{ + b.build_root, output_source_file.sub_path, @errorName(err), + }); + }; + any_miss = true; + }, + .copy => |file_source| { + const source_path = file_source.getPath(b); + const prev_status = fs.Dir.updateFile( + fs.cwd(), + source_path, + b.build_root.handle, + output_source_file.sub_path, + .{}, + ) catch |err| { + return step.fail("unable to update file from '{s}' to '{}{s}': {s}", .{ + source_path, b.build_root, output_source_file.sub_path, @errorName(err), + }); + }; + any_miss = any_miss or prev_status == .stale; + }, } } @@ -120,7 +192,7 @@ fn make(step: *Step) !void { // If, for example, a hard-coded path was used as the location to put WriteFileStep // files, then two WriteFileSteps executing in parallel might clobber each other. - var man = wf.builder.cache.obtain(); + var man = b.cache.obtain(); defer man.deinit(); // Random bytes to make WriteFileStep unique. Refresh this with @@ -135,76 +207,82 @@ fn make(step: *Step) !void { man.hash.addBytes(bytes); }, .copy => |file_source| { - _ = try man.addFile(file_source.getPath(wf.builder), null); + _ = try man.addFile(file_source.getPath(b), null); }, } } - if (man.hit() catch |err| failWithCacheError(man, err)) { - // Cache hit, skip writing file data. + if (try step.cacheHit(&man)) { const digest = man.final(); for (wf.files.items) |file| { - file.generated_file.path = try wf.builder.cache_root.join( - wf.builder.allocator, - &.{ "o", &digest, file.sub_path }, - ); + file.generated_file.path = try b.cache_root.join(b.allocator, &.{ + "o", &digest, file.sub_path, + }); } + wf.generated_directory.path = try b.cache_root.join(b.allocator, &.{ "o", &digest }); return; } const digest = man.final(); const cache_path = "o" ++ fs.path.sep_str ++ digest; - var cache_dir = wf.builder.cache_root.handle.makeOpenPath(cache_path, .{}) catch |err| { - std.debug.print("unable to make path {s}: {s}\n", .{ cache_path, @errorName(err) }); - return err; + wf.generated_directory.path = try b.cache_root.join(b.allocator, &.{ "o", &digest }); + + var cache_dir = b.cache_root.handle.makeOpenPath(cache_path, .{}) catch |err| { + return step.fail("unable to make path '{}{s}': {s}", .{ + b.cache_root, cache_path, @errorName(err), + }); }; defer cache_dir.close(); for (wf.files.items) |file| { - const basename = fs.path.basename(file.sub_path); if (fs.path.dirname(file.sub_path)) |dirname| { - var dir = try wf.builder.cache_root.handle.makeOpenPath(dirname, .{}); - defer dir.close(); - try writeFile(wf, dir, file.contents, basename); - } else { - try writeFile(wf, cache_dir, file.contents, basename); + cache_dir.makePath(dirname) catch |err| { + return step.fail("unable to make path '{}{s}{c}{s}': {s}", .{ + b.cache_root, cache_path, fs.path.sep, dirname, @errorName(err), + }); + }; + } + switch (file.contents) { + .bytes => |bytes| { + cache_dir.writeFile(file.sub_path, bytes) catch |err| { + return step.fail("unable to write file '{}{s}{c}{s}': {s}", .{ + b.cache_root, cache_path, fs.path.sep, file.sub_path, @errorName(err), + }); + }; + }, + .copy => |file_source| { + const source_path = file_source.getPath(b); + const prev_status = fs.Dir.updateFile( + fs.cwd(), + source_path, + cache_dir, + file.sub_path, + .{}, + ) catch |err| { + return step.fail("unable to update file from '{s}' to '{}{s}{c}{s}': {s}", .{ + source_path, + b.cache_root, + cache_path, + fs.path.sep, + file.sub_path, + @errorName(err), + }); + }; + // At this point we already will mark the step as a cache miss. + // But this is kind of a partial cache hit since individual + // file copies may be avoided. Oh well, this information is + // discarded. + _ = prev_status; + }, } - file.generated_file.path = try wf.builder.cache_root.join( - wf.builder.allocator, - &.{ cache_path, file.sub_path }, - ); - } - - try man.writeManifest(); -} - -fn writeFile(wf: *WriteFileStep, dir: fs.Dir, contents: Contents, basename: []const u8) !void { - // TODO after landing concurrency PR, improve error reporting here - switch (contents) { - .bytes => |bytes| return dir.writeFile(basename, bytes), - .copy => |file_source| { - const source_path = file_source.getPath(wf.builder); - const prev_status = try fs.Dir.updateFile(fs.cwd(), source_path, dir, basename, .{}); - _ = prev_status; // TODO logging (affected by open PR regarding concurrency) - }, + file.generated_file.path = try b.cache_root.join(b.allocator, &.{ + cache_path, file.sub_path, + }); } -} - -/// TODO consolidate this with the same function in RunStep? -/// Also properly deal with concurrency (see open PR) -fn failWithCacheError(man: std.Build.Cache.Manifest, err: anyerror) noreturn { - const i = man.failed_file_index orelse failWithSimpleError(err); - const pp = man.files.items[i].prefixed_path orelse failWithSimpleError(err); - const prefix = man.cache.prefixes()[pp.prefix].path orelse ""; - std.debug.print("{s}: {s}/{s}\n", .{ @errorName(err), prefix, pp.sub_path }); - std.process.exit(1); -} -fn failWithSimpleError(err: anyerror) noreturn { - std.debug.print("{s}\n", .{@errorName(err)}); - std.process.exit(1); + try step.writeManifest(&man); } const std = @import("../std.zig"); diff --git a/lib/std/Progress.zig b/lib/std/Progress.zig index f0b0e2dbd5fe..dba71663981f 100644 --- a/lib/std/Progress.zig +++ b/lib/std/Progress.zig @@ -126,6 +126,21 @@ pub const Node = struct { } } + /// Thread-safe. + pub fn setName(self: *Node, name: []const u8) void { + const progress = self.context; + progress.update_mutex.lock(); + defer progress.update_mutex.unlock(); + self.name = name; + if (self.parent) |parent| { + @atomicStore(?*Node, &parent.recently_updated_child, self, .Release); + if (parent.parent) |grand_parent| { + @atomicStore(?*Node, &grand_parent.recently_updated_child, parent, .Release); + } + if (progress.timer) |*timer| progress.maybeRefreshWithHeldLock(timer); + } + } + /// Thread-safe. 0 means unknown. pub fn setEstimatedTotalItems(self: *Node, count: usize) void { @atomicStore(usize, &self.unprotected_estimated_total_items, count, .Monotonic); @@ -174,16 +189,20 @@ pub fn maybeRefresh(self: *Progress) void { if (self.timer) |*timer| { if (!self.update_mutex.tryLock()) return; defer self.update_mutex.unlock(); - const now = timer.read(); - if (now < self.initial_delay_ns) return; - // TODO I have observed this to happen sometimes. I think we need to follow Rust's - // lead and guarantee monotonically increasing times in the std lib itself. - if (now < self.prev_refresh_timestamp) return; - if (now - self.prev_refresh_timestamp < self.refresh_rate_ns) return; - return self.refreshWithHeldLock(); + maybeRefreshWithHeldLock(self, timer); } } +fn maybeRefreshWithHeldLock(self: *Progress, timer: *std.time.Timer) void { + const now = timer.read(); + if (now < self.initial_delay_ns) return; + // TODO I have observed this to happen sometimes. I think we need to follow Rust's + // lead and guarantee monotonically increasing times in the std lib itself. + if (now < self.prev_refresh_timestamp) return; + if (now - self.prev_refresh_timestamp < self.refresh_rate_ns) return; + return self.refreshWithHeldLock(); +} + /// Updates the terminal and resets `self.next_refresh_timestamp`. Thread-safe. pub fn refresh(self: *Progress) void { if (!self.update_mutex.tryLock()) return; @@ -192,32 +211,28 @@ pub fn refresh(self: *Progress) void { return self.refreshWithHeldLock(); } -fn refreshWithHeldLock(self: *Progress) void { - const is_dumb = !self.supports_ansi_escape_codes and !self.is_windows_terminal; - if (is_dumb and self.dont_print_on_dumb) return; - - const file = self.terminal orelse return; - - var end: usize = 0; - if (self.columns_written > 0) { +fn clearWithHeldLock(p: *Progress, end_ptr: *usize) void { + const file = p.terminal orelse return; + var end = end_ptr.*; + if (p.columns_written > 0) { // restore the cursor position by moving the cursor // `columns_written` cells to the left, then clear the rest of the // line - if (self.supports_ansi_escape_codes) { - end += (std.fmt.bufPrint(self.output_buffer[end..], "\x1b[{d}D", .{self.columns_written}) catch unreachable).len; - end += (std.fmt.bufPrint(self.output_buffer[end..], "\x1b[0K", .{}) catch unreachable).len; + if (p.supports_ansi_escape_codes) { + end += (std.fmt.bufPrint(p.output_buffer[end..], "\x1b[{d}D", .{p.columns_written}) catch unreachable).len; + end += (std.fmt.bufPrint(p.output_buffer[end..], "\x1b[0K", .{}) catch unreachable).len; } else if (builtin.os.tag == .windows) winapi: { - std.debug.assert(self.is_windows_terminal); + std.debug.assert(p.is_windows_terminal); var info: windows.CONSOLE_SCREEN_BUFFER_INFO = undefined; if (windows.kernel32.GetConsoleScreenBufferInfo(file.handle, &info) != windows.TRUE) { // stop trying to write to this file - self.terminal = null; + p.terminal = null; break :winapi; } var cursor_pos = windows.COORD{ - .X = info.dwCursorPosition.X - @intCast(windows.SHORT, self.columns_written), + .X = info.dwCursorPosition.X - @intCast(windows.SHORT, p.columns_written), .Y = info.dwCursorPosition.Y, }; @@ -235,7 +250,7 @@ fn refreshWithHeldLock(self: *Progress) void { &written, ) != windows.TRUE) { // stop trying to write to this file - self.terminal = null; + p.terminal = null; break :winapi; } if (windows.kernel32.FillConsoleOutputCharacterW( @@ -246,22 +261,33 @@ fn refreshWithHeldLock(self: *Progress) void { &written, ) != windows.TRUE) { // stop trying to write to this file - self.terminal = null; + p.terminal = null; break :winapi; } if (windows.kernel32.SetConsoleCursorPosition(file.handle, cursor_pos) != windows.TRUE) { // stop trying to write to this file - self.terminal = null; + p.terminal = null; break :winapi; } } else { // we are in a "dumb" terminal like in acme or writing to a file - self.output_buffer[end] = '\n'; + p.output_buffer[end] = '\n'; end += 1; } - self.columns_written = 0; + p.columns_written = 0; } + end_ptr.* = end; +} + +fn refreshWithHeldLock(self: *Progress) void { + const is_dumb = !self.supports_ansi_escape_codes and !self.is_windows_terminal; + if (is_dumb and self.dont_print_on_dumb) return; + + const file = self.terminal orelse return; + + var end: usize = 0; + clearWithHeldLock(self, &end); if (!self.done) { var need_ellipse = false; @@ -318,6 +344,26 @@ pub fn log(self: *Progress, comptime format: []const u8, args: anytype) void { self.columns_written = 0; } +/// Allows the caller to freely write to stderr until unlock_stderr() is called. +/// During the lock, the progress information is cleared from the terminal. +pub fn lock_stderr(p: *Progress) void { + p.update_mutex.lock(); + if (p.terminal) |file| { + var end: usize = 0; + clearWithHeldLock(p, &end); + _ = file.write(p.output_buffer[0..end]) catch { + // stop trying to write to this file + p.terminal = null; + }; + } + std.debug.getStderrMutex().lock(); +} + +pub fn unlock_stderr(p: *Progress) void { + std.debug.getStderrMutex().unlock(); + p.update_mutex.unlock(); +} + fn bufWrite(self: *Progress, end: *usize, comptime format: []const u8, args: anytype) void { if (std.fmt.bufPrint(self.output_buffer[end.*..], format, args)) |written| { const amt = written.len; diff --git a/lib/std/Thread.zig b/lib/std/Thread.zig index 27f7fa5030e9..e3345e4a4200 100644 --- a/lib/std/Thread.zig +++ b/lib/std/Thread.zig @@ -16,6 +16,8 @@ pub const Mutex = @import("Thread/Mutex.zig"); pub const Semaphore = @import("Thread/Semaphore.zig"); pub const Condition = @import("Thread/Condition.zig"); pub const RwLock = @import("Thread/RwLock.zig"); +pub const Pool = @import("Thread/Pool.zig"); +pub const WaitGroup = @import("Thread/WaitGroup.zig"); pub const use_pthreads = target.os.tag != .windows and target.os.tag != .wasi and builtin.link_libc; const is_gnu = target.abi.isGnu(); diff --git a/src/ThreadPool.zig b/lib/std/Thread/Pool.zig similarity index 86% rename from src/ThreadPool.zig rename to lib/std/Thread/Pool.zig index fde5ed27db15..ed1a4dc052fe 100644 --- a/src/ThreadPool.zig +++ b/lib/std/Thread/Pool.zig @@ -1,6 +1,6 @@ const std = @import("std"); const builtin = @import("builtin"); -const ThreadPool = @This(); +const Pool = @This(); const WaitGroup = @import("WaitGroup.zig"); mutex: std.Thread.Mutex = .{}, @@ -17,7 +17,14 @@ const Runnable = struct { const RunProto = *const fn (*Runnable) void; -pub fn init(pool: *ThreadPool, allocator: std.mem.Allocator) !void { +pub const Options = struct { + allocator: std.mem.Allocator, + n_jobs: ?u32 = null, +}; + +pub fn init(pool: *Pool, options: Options) !void { + const allocator = options.allocator; + pool.* = .{ .allocator = allocator, .threads = &[_]std.Thread{}, @@ -27,7 +34,7 @@ pub fn init(pool: *ThreadPool, allocator: std.mem.Allocator) !void { return; } - const thread_count = std.math.max(1, std.Thread.getCpuCount() catch 1); + const thread_count = options.n_jobs orelse @max(1, std.Thread.getCpuCount() catch 1); pool.threads = try allocator.alloc(std.Thread, thread_count); errdefer allocator.free(pool.threads); @@ -41,12 +48,12 @@ pub fn init(pool: *ThreadPool, allocator: std.mem.Allocator) !void { } } -pub fn deinit(pool: *ThreadPool) void { +pub fn deinit(pool: *Pool) void { pool.join(pool.threads.len); // kill and join all threads. pool.* = undefined; } -fn join(pool: *ThreadPool, spawned: usize) void { +fn join(pool: *Pool, spawned: usize) void { if (builtin.single_threaded) { return; } @@ -69,7 +76,7 @@ fn join(pool: *ThreadPool, spawned: usize) void { pool.allocator.free(pool.threads); } -pub fn spawn(pool: *ThreadPool, comptime func: anytype, args: anytype) !void { +pub fn spawn(pool: *Pool, comptime func: anytype, args: anytype) !void { if (builtin.single_threaded) { @call(.auto, func, args); return; @@ -78,7 +85,7 @@ pub fn spawn(pool: *ThreadPool, comptime func: anytype, args: anytype) !void { const Args = @TypeOf(args); const Closure = struct { arguments: Args, - pool: *ThreadPool, + pool: *Pool, run_node: RunQueue.Node = .{ .data = .{ .runFn = runFn } }, fn runFn(runnable: *Runnable) void { @@ -112,7 +119,7 @@ pub fn spawn(pool: *ThreadPool, comptime func: anytype, args: anytype) !void { pool.cond.signal(); } -fn worker(pool: *ThreadPool) void { +fn worker(pool: *Pool) void { pool.mutex.lock(); defer pool.mutex.unlock(); @@ -135,7 +142,7 @@ fn worker(pool: *ThreadPool) void { } } -pub fn waitAndWork(pool: *ThreadPool, wait_group: *WaitGroup) void { +pub fn waitAndWork(pool: *Pool, wait_group: *WaitGroup) void { while (!wait_group.isDone()) { if (blk: { pool.mutex.lock(); diff --git a/src/WaitGroup.zig b/lib/std/Thread/WaitGroup.zig similarity index 100% rename from src/WaitGroup.zig rename to lib/std/Thread/WaitGroup.zig diff --git a/lib/std/c.zig b/lib/std/c.zig index 9fc3b1d57e8a..a0b65c31c827 100644 --- a/lib/std/c.zig +++ b/lib/std/c.zig @@ -153,7 +153,8 @@ pub extern "c" fn linkat(oldfd: c.fd_t, oldpath: [*:0]const u8, newfd: c.fd_t, n pub extern "c" fn unlink(path: [*:0]const u8) c_int; pub extern "c" fn unlinkat(dirfd: c.fd_t, path: [*:0]const u8, flags: c_uint) c_int; pub extern "c" fn getcwd(buf: [*]u8, size: usize) ?[*]u8; -pub extern "c" fn waitpid(pid: c.pid_t, stat_loc: ?*c_int, options: c_int) c.pid_t; +pub extern "c" fn waitpid(pid: c.pid_t, status: ?*c_int, options: c_int) c.pid_t; +pub extern "c" fn wait4(pid: c.pid_t, status: ?*c_int, options: c_int, ru: ?*c.rusage) c.pid_t; pub extern "c" fn fork() c_int; pub extern "c" fn access(path: [*:0]const u8, mode: c_uint) c_int; pub extern "c" fn faccessat(dirfd: c.fd_t, path: [*:0]const u8, mode: c_uint, flags: c_uint) c_int; diff --git a/lib/std/child_process.zig b/lib/std/child_process.zig index 3bdef3177a1a..3748ca687768 100644 --- a/lib/std/child_process.zig +++ b/lib/std/child_process.zig @@ -17,10 +17,12 @@ const Os = std.builtin.Os; const TailQueue = std.TailQueue; const maxInt = std.math.maxInt; const assert = std.debug.assert; +const is_darwin = builtin.target.isDarwin(); pub const ChildProcess = struct { pub const Id = switch (builtin.os.tag) { .windows => windows.HANDLE, + .wasi => void, else => os.pid_t, }; @@ -70,6 +72,43 @@ pub const ChildProcess = struct { /// Darwin-only. Start child process in suspended state as if SIGSTOP was sent. start_suspended: bool = false, + /// Set to true to obtain rusage information for the child process. + /// Depending on the target platform and implementation status, the + /// requested statistics may or may not be available. If they are + /// available, then the `resource_usage_statistics` field will be populated + /// after calling `wait`. + /// On Linux, this obtains rusage statistics from wait4(). + request_resource_usage_statistics: bool = false, + + /// This is available after calling wait if + /// `request_resource_usage_statistics` was set to `true` before calling + /// `spawn`. + resource_usage_statistics: ResourceUsageStatistics = .{}, + + pub const ResourceUsageStatistics = struct { + rusage: @TypeOf(rusage_init) = rusage_init, + + /// Returns the peak resident set size of the child process, in bytes, + /// if available. + pub inline fn getMaxRss(rus: ResourceUsageStatistics) ?usize { + switch (builtin.os.tag) { + .linux => { + if (rus.rusage) |ru| { + return @intCast(usize, ru.maxrss) * 1024; + } else { + return null; + } + }, + else => return null, + } + } + + const rusage_init = switch (builtin.os.tag) { + .linux => @as(?std.os.rusage, null), + else => {}, + }; + }; + pub const Arg0Expand = os.Arg0Expand; pub const SpawnError = error{ @@ -332,7 +371,16 @@ pub const ChildProcess = struct { } fn waitUnwrapped(self: *ChildProcess) !void { - const res: os.WaitPidResult = os.waitpid(self.id, 0); + const res: os.WaitPidResult = res: { + if (builtin.os.tag == .linux and self.request_resource_usage_statistics) { + var ru: std.os.rusage = undefined; + const res = os.wait4(self.id, 0, &ru); + self.resource_usage_statistics.rusage = ru; + break :res res; + } + + break :res os.waitpid(self.id, 0); + }; const status = res.status; self.cleanupStreams(); self.handleWaitResult(status); diff --git a/lib/std/debug.zig b/lib/std/debug.zig index 97acf81af60a..6abceed9b8d4 100644 --- a/lib/std/debug.zig +++ b/lib/std/debug.zig @@ -635,6 +635,7 @@ pub const TTY = struct { pub const Color = enum { Red, Green, + Yellow, Cyan, White, Dim, @@ -659,6 +660,7 @@ pub const TTY = struct { const color_string = switch (color) { .Red => "\x1b[31;1m", .Green => "\x1b[32;1m", + .Yellow => "\x1b[33;1m", .Cyan => "\x1b[36;1m", .White => "\x1b[37;1m", .Bold => "\x1b[1m", @@ -671,6 +673,7 @@ pub const TTY = struct { const attributes = switch (color) { .Red => windows.FOREGROUND_RED | windows.FOREGROUND_INTENSITY, .Green => windows.FOREGROUND_GREEN | windows.FOREGROUND_INTENSITY, + .Yellow => windows.FOREGROUND_RED | windows.FOREGROUND_GREEN | windows.FOREGROUND_INTENSITY, .Cyan => windows.FOREGROUND_GREEN | windows.FOREGROUND_BLUE | windows.FOREGROUND_INTENSITY, .White, .Bold => windows.FOREGROUND_RED | windows.FOREGROUND_GREEN | windows.FOREGROUND_BLUE | windows.FOREGROUND_INTENSITY, .Dim => windows.FOREGROUND_INTENSITY, @@ -682,6 +685,36 @@ pub const TTY = struct { }, }; } + + pub fn writeDEC(conf: Config, writer: anytype, codepoint: u8) !void { + const bytes = switch (conf) { + .no_color, .windows_api => switch (codepoint) { + 0x50...0x5e => @as(*const [1]u8, &codepoint), + 0x6a => "+", // ┘ + 0x6b => "+", // ┐ + 0x6c => "+", // ┌ + 0x6d => "+", // └ + 0x6e => "+", // ┼ + 0x71 => "-", // ─ + 0x74 => "+", // ├ + 0x75 => "+", // ┤ + 0x76 => "+", // ┴ + 0x77 => "+", // ┬ + 0x78 => "|", // │ + else => " ", // TODO + }, + .escape_codes => switch (codepoint) { + // Here we avoid writing the DEC beginning sequence and + // ending sequence in separate syscalls by putting the + // beginning and ending sequence into the same string + // literals, to prevent terminals ending up in bad states + // in case a crash happens between syscalls. + inline 0x50...0x7f => |x| "\x1B\x28\x30" ++ [1]u8{x} ++ "\x1B\x28\x42", + else => unreachable, + }, + }; + return writer.writeAll(bytes); + } }; }; diff --git a/lib/std/fifo.zig b/lib/std/fifo.zig index b7c8f761d3dd..eddbff5af072 100644 --- a/lib/std/fifo.zig +++ b/lib/std/fifo.zig @@ -164,6 +164,17 @@ pub fn LinearFifo( return self.readableSliceMut(offset); } + pub fn readableSliceOfLen(self: *Self, len: usize) []const T { + assert(len <= self.count); + const buf = self.readableSlice(0); + if (buf.len >= len) { + return buf[0..len]; + } else { + self.realign(); + return self.readableSlice(0)[0..len]; + } + } + /// Discard first `count` items in the fifo pub fn discard(self: *Self, count: usize) void { assert(count <= self.count); @@ -383,6 +394,22 @@ pub fn LinearFifo( self.discard(try dest_writer.write(self.readableSlice(0))); } } + + pub fn toOwnedSlice(self: *Self) Allocator.Error![]T { + if (self.head != 0) self.realign(); + assert(self.head == 0); + assert(self.count <= self.buf.len); + const allocator = self.allocator; + if (allocator.resize(self.buf, self.count)) { + const result = self.buf[0..self.count]; + self.* = Self.init(allocator); + return result; + } + const new_memory = try allocator.dupe(T, self.buf[0..self.count]); + allocator.free(self.buf); + self.* = Self.init(allocator); + return new_memory; + } }; } diff --git a/lib/std/fs/file.zig b/lib/std/fs/file.zig index bf93a6123931..8235b8aecf8f 100644 --- a/lib/std/fs/file.zig +++ b/lib/std/fs/file.zig @@ -1048,12 +1048,27 @@ pub const File = struct { /// Returns the number of bytes read. If the number read is smaller than the total bytes /// from all the buffers, it means the file reached the end. Reaching the end of a file /// is not an error condition. - /// The `iovecs` parameter is mutable because this function needs to mutate the fields in - /// order to handle partial reads from the underlying OS layer. - /// See https://github.com/ziglang/zig/issues/7699 + /// + /// The `iovecs` parameter is mutable because: + /// * This function needs to mutate the fields in order to handle partial + /// reads from the underlying OS layer. + /// * The OS layer expects pointer addresses to be inside the application's address space + /// even if the length is zero. Meanwhile, in Zig, slices may have undefined pointer + /// addresses when the length is zero. So this function modifies the iov_base fields + /// when the length is zero. + /// + /// Related open issue: https://github.com/ziglang/zig/issues/7699 pub fn readvAll(self: File, iovecs: []os.iovec) ReadError!usize { if (iovecs.len == 0) return 0; + // We use the address of this local variable for all zero-length + // vectors so that the OS does not complain that we are giving it + // addresses outside the application's address space. + var garbage: [1]u8 = undefined; + for (iovecs) |*v| { + if (v.iov_len == 0) v.iov_base = &garbage; + } + var i: usize = 0; var off: usize = 0; while (true) { @@ -1181,13 +1196,26 @@ pub const File = struct { } } - /// The `iovecs` parameter is mutable because this function needs to mutate the fields in - /// order to handle partial writes from the underlying OS layer. + /// The `iovecs` parameter is mutable because: + /// * This function needs to mutate the fields in order to handle partial + /// writes from the underlying OS layer. + /// * The OS layer expects pointer addresses to be inside the application's address space + /// even if the length is zero. Meanwhile, in Zig, slices may have undefined pointer + /// addresses when the length is zero. So this function modifies the iov_base fields + /// when the length is zero. /// See https://github.com/ziglang/zig/issues/7699 /// See equivalent function: `std.net.Stream.writevAll`. pub fn writevAll(self: File, iovecs: []os.iovec_const) WriteError!void { if (iovecs.len == 0) return; + // We use the address of this local variable for all zero-length + // vectors so that the OS does not complain that we are giving it + // addresses outside the application's address space. + var garbage: [1]u8 = undefined; + for (iovecs) |*v| { + if (v.iov_len == 0) v.iov_base = &garbage; + } + var i: usize = 0; while (true) { var amt = try self.writev(iovecs[i..]); diff --git a/lib/std/fs/test.zig b/lib/std/fs/test.zig index 16458d7dc490..1fbd7dbd6348 100644 --- a/lib/std/fs/test.zig +++ b/lib/std/fs/test.zig @@ -1124,17 +1124,31 @@ test "open file with exclusive lock twice, make sure second lock waits" { test "open file with exclusive nonblocking lock twice (absolute paths)" { if (builtin.os.tag == .wasi) return error.SkipZigTest; - const allocator = testing.allocator; + var random_bytes: [12]u8 = undefined; + std.crypto.random.bytes(&random_bytes); - const cwd = try std.process.getCwdAlloc(allocator); - defer allocator.free(cwd); - const file_paths: [2][]const u8 = .{ cwd, "zig-test-absolute-paths.txt" }; - const filename = try fs.path.resolve(allocator, &file_paths); - defer allocator.free(filename); + var random_b64: [fs.base64_encoder.calcSize(random_bytes.len)]u8 = undefined; + _ = fs.base64_encoder.encode(&random_b64, &random_bytes); - const file1 = try fs.createFileAbsolute(filename, .{ .lock = .Exclusive, .lock_nonblocking = true }); + const sub_path = random_b64 ++ "-zig-test-absolute-paths.txt"; - const file2 = fs.createFileAbsolute(filename, .{ .lock = .Exclusive, .lock_nonblocking = true }); + const gpa = testing.allocator; + + const cwd = try std.process.getCwdAlloc(gpa); + defer gpa.free(cwd); + + const filename = try fs.path.resolve(gpa, &[_][]const u8{ cwd, sub_path }); + defer gpa.free(filename); + + const file1 = try fs.createFileAbsolute(filename, .{ + .lock = .Exclusive, + .lock_nonblocking = true, + }); + + const file2 = fs.createFileAbsolute(filename, .{ + .lock = .Exclusive, + .lock_nonblocking = true, + }); file1.close(); try testing.expectError(error.WouldBlock, file2); diff --git a/lib/std/heap.zig b/lib/std/heap.zig index c15e5d0ec29b..e2d000f31823 100644 --- a/lib/std/heap.zig +++ b/lib/std/heap.zig @@ -19,6 +19,7 @@ pub const GeneralPurposeAllocator = @import("heap/general_purpose_allocator.zig" pub const WasmAllocator = @import("heap/WasmAllocator.zig"); pub const WasmPageAllocator = @import("heap/WasmPageAllocator.zig"); pub const PageAllocator = @import("heap/PageAllocator.zig"); +pub const ThreadSafeAllocator = @import("heap/ThreadSafeAllocator.zig"); const memory_pool = @import("heap/memory_pool.zig"); pub const MemoryPool = memory_pool.MemoryPool; diff --git a/lib/std/heap/ThreadSafeAllocator.zig b/lib/std/heap/ThreadSafeAllocator.zig new file mode 100644 index 000000000000..fe10eb2fdb27 --- /dev/null +++ b/lib/std/heap/ThreadSafeAllocator.zig @@ -0,0 +1,45 @@ +//! Wraps a non-thread-safe allocator and makes it thread-safe. + +child_allocator: Allocator, +mutex: std.Thread.Mutex = .{}, + +pub fn allocator(self: *ThreadSafeAllocator) Allocator { + return .{ + .ptr = self, + .vtable = &.{ + .alloc = alloc, + .resize = resize, + .free = free, + }, + }; +} + +fn alloc(ctx: *anyopaque, n: usize, log2_ptr_align: u8, ra: usize) ?[*]u8 { + const self = @ptrCast(*ThreadSafeAllocator, @alignCast(@alignOf(ThreadSafeAllocator), ctx)); + self.mutex.lock(); + defer self.mutex.unlock(); + + return self.child_allocator.rawAlloc(n, log2_ptr_align, ra); +} + +fn resize(ctx: *anyopaque, buf: []u8, log2_buf_align: u8, new_len: usize, ret_addr: usize) bool { + const self = @ptrCast(*ThreadSafeAllocator, @alignCast(@alignOf(ThreadSafeAllocator), ctx)); + + self.mutex.lock(); + defer self.mutex.unlock(); + + return self.child_allocator.rawResize(buf, log2_buf_align, new_len, ret_addr); +} + +fn free(ctx: *anyopaque, buf: []u8, log2_buf_align: u8, ret_addr: usize) void { + const self = @ptrCast(*ThreadSafeAllocator, @alignCast(@alignOf(ThreadSafeAllocator), ctx)); + + self.mutex.lock(); + defer self.mutex.unlock(); + + return self.child_allocator.rawFree(buf, log2_buf_align, ret_addr); +} + +const std = @import("../std.zig"); +const ThreadSafeAllocator = @This(); +const Allocator = std.mem.Allocator; diff --git a/lib/std/mem.zig b/lib/std/mem.zig index b9b5fb100404..a486713e1c63 100644 --- a/lib/std/mem.zig +++ b/lib/std/mem.zig @@ -196,13 +196,8 @@ test "Allocator.resize" { /// dest.len must be >= source.len. /// If the slices overlap, dest.ptr must be <= src.ptr. pub fn copy(comptime T: type, dest: []T, source: []const T) void { - // TODO instead of manually doing this check for the whole array - // and turning off runtime safety, the compiler should detect loops like - // this and automatically omit safety checks for loops - @setRuntimeSafety(false); - assert(dest.len >= source.len); - for (source, 0..) |s, i| - dest[i] = s; + for (dest[0..source.len], source) |*d, s| + d.* = s; } /// Copy all of source into dest at position 0. @@ -611,8 +606,8 @@ test "lessThan" { pub fn eql(comptime T: type, a: []const T, b: []const T) bool { if (a.len != b.len) return false; if (a.ptr == b.ptr) return true; - for (a, 0..) |item, index| { - if (b[index] != item) return false; + for (a, b) |a_elem, b_elem| { + if (a_elem != b_elem) return false; } return true; } diff --git a/lib/std/os.zig b/lib/std/os.zig index 069a910408c4..722028b3f88b 100644 --- a/lib/std/os.zig +++ b/lib/std/os.zig @@ -766,6 +766,9 @@ pub fn read(fd: fd_t, buf: []u8) ReadError!usize { /// This operation is non-atomic on the following systems: /// * Windows /// On these systems, the read races with concurrent writes to the same file descriptor. +/// +/// This function assumes that all vectors, including zero-length vectors, have +/// a pointer within the address space of the application. pub fn readv(fd: fd_t, iov: []const iovec) ReadError!usize { if (builtin.os.tag == .windows) { // TODO improve this to use ReadFileScatter @@ -1167,6 +1170,9 @@ pub fn write(fd: fd_t, bytes: []const u8) WriteError!usize { /// used to perform the I/O. `error.WouldBlock` is not possible on Windows. /// /// If `iov.len` is larger than `IOV_MAX`, a partial write will occur. +/// +/// This function assumes that all vectors, including zero-length vectors, have +/// a pointer within the address space of the application. pub fn writev(fd: fd_t, iov: []const iovec_const) WriteError!usize { if (builtin.os.tag == .windows) { // TODO improve this to use WriteFileScatter @@ -4000,8 +4006,28 @@ pub const WaitPidResult = struct { pub fn waitpid(pid: pid_t, flags: u32) WaitPidResult { const Status = if (builtin.link_libc) c_int else u32; var status: Status = undefined; + const coerced_flags = if (builtin.link_libc) @intCast(c_int, flags) else flags; + while (true) { + const rc = system.waitpid(pid, &status, coerced_flags); + switch (errno(rc)) { + .SUCCESS => return .{ + .pid = @intCast(pid_t, rc), + .status = @bitCast(u32, status), + }, + .INTR => continue, + .CHILD => unreachable, // The process specified does not exist. It would be a race condition to handle this error. + .INVAL => unreachable, // Invalid flags. + else => unreachable, + } + } +} + +pub fn wait4(pid: pid_t, flags: u32, ru: ?*rusage) WaitPidResult { + const Status = if (builtin.link_libc) c_int else u32; + var status: Status = undefined; + const coerced_flags = if (builtin.link_libc) @intCast(c_int, flags) else flags; while (true) { - const rc = system.waitpid(pid, &status, if (builtin.link_libc) @intCast(c_int, flags) else flags); + const rc = system.wait4(pid, &status, coerced_flags, ru); switch (errno(rc)) { .SUCCESS => return .{ .pid = @intCast(pid_t, rc), diff --git a/lib/std/os/linux.zig b/lib/std/os/linux.zig index b151e5f235d5..53f6030b5fad 100644 --- a/lib/std/os/linux.zig +++ b/lib/std/os/linux.zig @@ -944,6 +944,16 @@ pub fn waitpid(pid: pid_t, status: *u32, flags: u32) usize { return syscall4(.wait4, @bitCast(usize, @as(isize, pid)), @ptrToInt(status), flags, 0); } +pub fn wait4(pid: pid_t, status: *u32, flags: u32, usage: ?*rusage) usize { + return syscall4( + .wait4, + @bitCast(usize, @as(isize, pid)), + @ptrToInt(status), + flags, + @ptrToInt(usage), + ); +} + pub fn waitid(id_type: P, id: i32, infop: *siginfo_t, flags: u32) usize { return syscall5(.waitid, @enumToInt(id_type), @bitCast(usize, @as(isize, id)), @ptrToInt(infop), flags, 0); } @@ -1716,26 +1726,26 @@ pub fn pidfd_send_signal(pidfd: fd_t, sig: i32, info: ?*siginfo_t, flags: u32) u ); } -pub fn process_vm_readv(pid: pid_t, local: [*]const iovec, local_count: usize, remote: [*]const iovec, remote_count: usize, flags: usize) usize { +pub fn process_vm_readv(pid: pid_t, local: []iovec, remote: []const iovec_const, flags: usize) usize { return syscall6( .process_vm_readv, @bitCast(usize, @as(isize, pid)), - @ptrToInt(local), - local_count, - @ptrToInt(remote), - remote_count, + @ptrToInt(local.ptr), + local.len, + @ptrToInt(remote.ptr), + remote.len, flags, ); } -pub fn process_vm_writev(pid: pid_t, local: [*]const iovec, local_count: usize, remote: [*]const iovec, remote_count: usize, flags: usize) usize { +pub fn process_vm_writev(pid: pid_t, local: []const iovec_const, remote: []const iovec_const, flags: usize) usize { return syscall6( .process_vm_writev, @bitCast(usize, @as(isize, pid)), - @ptrToInt(local), - local_count, - @ptrToInt(remote), - remote_count, + @ptrToInt(local.ptr), + local.len, + @ptrToInt(remote.ptr), + remote.len, flags, ); } @@ -1820,6 +1830,23 @@ pub fn seccomp(operation: u32, flags: u32, args: ?*const anyopaque) usize { return syscall3(.seccomp, operation, flags, @ptrToInt(args)); } +pub fn ptrace( + req: u32, + pid: pid_t, + addr: usize, + data: usize, + addr2: usize, +) usize { + return syscall5( + .ptrace, + req, + @bitCast(usize, @as(isize, pid)), + addr, + data, + addr2, + ); +} + pub const E = switch (native_arch) { .mips, .mipsel => @import("linux/errno/mips.zig").E, .sparc, .sparcel, .sparc64 => @import("linux/errno/sparc.zig").E, @@ -5721,3 +5748,40 @@ pub const AUDIT = struct { } }; }; + +pub const PTRACE = struct { + pub const TRACEME = 0; + pub const PEEKTEXT = 1; + pub const PEEKDATA = 2; + pub const PEEKUSER = 3; + pub const POKETEXT = 4; + pub const POKEDATA = 5; + pub const POKEUSER = 6; + pub const CONT = 7; + pub const KILL = 8; + pub const SINGLESTEP = 9; + pub const GETREGS = 12; + pub const SETREGS = 13; + pub const GETFPREGS = 14; + pub const SETFPREGS = 15; + pub const ATTACH = 16; + pub const DETACH = 17; + pub const GETFPXREGS = 18; + pub const SETFPXREGS = 19; + pub const SYSCALL = 24; + pub const SETOPTIONS = 0x4200; + pub const GETEVENTMSG = 0x4201; + pub const GETSIGINFO = 0x4202; + pub const SETSIGINFO = 0x4203; + pub const GETREGSET = 0x4204; + pub const SETREGSET = 0x4205; + pub const SEIZE = 0x4206; + pub const INTERRUPT = 0x4207; + pub const LISTEN = 0x4208; + pub const PEEKSIGINFO = 0x4209; + pub const GETSIGMASK = 0x420a; + pub const SETSIGMASK = 0x420b; + pub const SECCOMP_GET_FILTER = 0x420c; + pub const SECCOMP_GET_METADATA = 0x420d; + pub const GET_SYSCALL_INFO = 0x420e; +}; diff --git a/lib/std/os/linux/io_uring.zig b/lib/std/os/linux/io_uring.zig index 61bf39105f2f..4dbf87c501a2 100644 --- a/lib/std/os/linux/io_uring.zig +++ b/lib/std/os/linux/io_uring.zig @@ -1728,10 +1728,12 @@ test "writev/fsync/readv" { }; defer ring.deinit(); + var tmp = std.testing.tmpDir(.{}); + defer tmp.cleanup(); + const path = "test_io_uring_writev_fsync_readv"; - const file = try std.fs.cwd().createFile(path, .{ .read = true, .truncate = true }); + const file = try tmp.dir.createFile(path, .{ .read = true, .truncate = true }); defer file.close(); - defer std.fs.cwd().deleteFile(path) catch {}; const fd = file.handle; const buffer_write = [_]u8{42} ** 128; @@ -1796,10 +1798,11 @@ test "write/read" { }; defer ring.deinit(); + var tmp = std.testing.tmpDir(.{}); + defer tmp.cleanup(); const path = "test_io_uring_write_read"; - const file = try std.fs.cwd().createFile(path, .{ .read = true, .truncate = true }); + const file = try tmp.dir.createFile(path, .{ .read = true, .truncate = true }); defer file.close(); - defer std.fs.cwd().deleteFile(path) catch {}; const fd = file.handle; const buffer_write = [_]u8{97} ** 20; @@ -1842,10 +1845,12 @@ test "write_fixed/read_fixed" { }; defer ring.deinit(); + var tmp = std.testing.tmpDir(.{}); + defer tmp.cleanup(); + const path = "test_io_uring_write_read_fixed"; - const file = try std.fs.cwd().createFile(path, .{ .read = true, .truncate = true }); + const file = try tmp.dir.createFile(path, .{ .read = true, .truncate = true }); defer file.close(); - defer std.fs.cwd().deleteFile(path) catch {}; const fd = file.handle; var raw_buffers: [2][11]u8 = undefined; @@ -1899,8 +1904,10 @@ test "openat" { }; defer ring.deinit(); + var tmp = std.testing.tmpDir(.{}); + defer tmp.cleanup(); + const path = "test_io_uring_openat"; - defer std.fs.cwd().deleteFile(path) catch {}; // Workaround for LLVM bug: https://github.com/ziglang/zig/issues/12014 const path_addr = if (builtin.zig_backend == .stage2_llvm) p: { @@ -1910,12 +1917,12 @@ test "openat" { const flags: u32 = os.O.CLOEXEC | os.O.RDWR | os.O.CREAT; const mode: os.mode_t = 0o666; - const sqe_openat = try ring.openat(0x33333333, linux.AT.FDCWD, path, flags, mode); + const sqe_openat = try ring.openat(0x33333333, tmp.dir.fd, path, flags, mode); try testing.expectEqual(linux.io_uring_sqe{ .opcode = .OPENAT, .flags = 0, .ioprio = 0, - .fd = linux.AT.FDCWD, + .fd = tmp.dir.fd, .off = 0, .addr = path_addr, .len = mode, @@ -1931,12 +1938,6 @@ test "openat" { const cqe_openat = try ring.copy_cqe(); try testing.expectEqual(@as(u64, 0x33333333), cqe_openat.user_data); if (cqe_openat.err() == .INVAL) return error.SkipZigTest; - // AT.FDCWD is not fully supported before kernel 5.6: - // See https://lore.kernel.org/io-uring/20200207155039.12819-1-axboe@kernel.dk/T/ - // We use IORING_FEAT_RW_CUR_POS to know if we are pre-5.6 since that feature was added in 5.6. - if (cqe_openat.err() == .BADF and (ring.features & linux.IORING_FEAT_RW_CUR_POS) == 0) { - return error.SkipZigTest; - } if (cqe_openat.res <= 0) std.debug.print("\ncqe_openat.res={}\n", .{cqe_openat.res}); try testing.expect(cqe_openat.res > 0); try testing.expectEqual(@as(u32, 0), cqe_openat.flags); @@ -1954,10 +1955,12 @@ test "close" { }; defer ring.deinit(); + var tmp = std.testing.tmpDir(.{}); + defer tmp.cleanup(); + const path = "test_io_uring_close"; - const file = try std.fs.cwd().createFile(path, .{}); + const file = try tmp.dir.createFile(path, .{}); errdefer file.close(); - defer std.fs.cwd().deleteFile(path) catch {}; const sqe_close = try ring.close(0x44444444, file.handle); try testing.expectEqual(linux.IORING_OP.CLOSE, sqe_close.opcode); @@ -1976,6 +1979,11 @@ test "close" { test "accept/connect/send/recv" { if (builtin.os.tag != .linux) return error.SkipZigTest; + if (true) { + // https://github.com/ziglang/zig/issues/14907 + return error.SkipZigTest; + } + var ring = IO_Uring.init(16, 0) catch |err| switch (err) { error.SystemOutdated => return error.SkipZigTest, error.PermissionDenied => return error.SkipZigTest, @@ -2017,6 +2025,11 @@ test "accept/connect/send/recv" { test "sendmsg/recvmsg" { if (builtin.os.tag != .linux) return error.SkipZigTest; + if (true) { + // https://github.com/ziglang/zig/issues/14907 + return error.SkipZigTest; + } + var ring = IO_Uring.init(2, 0) catch |err| switch (err) { error.SystemOutdated => return error.SkipZigTest, error.PermissionDenied => return error.SkipZigTest, @@ -2024,6 +2037,7 @@ test "sendmsg/recvmsg" { }; defer ring.deinit(); + if (true) @compileError("don't hard code port numbers in unit tests"); // https://github.com/ziglang/zig/issues/14907 const address_server = try net.Address.parseIp4("127.0.0.1", 3131); const server = try os.socket(address_server.any.family, os.SOCK.DGRAM, 0); @@ -2223,6 +2237,11 @@ test "timeout_remove" { test "accept/connect/recv/link_timeout" { if (builtin.os.tag != .linux) return error.SkipZigTest; + if (true) { + // https://github.com/ziglang/zig/issues/14907 + return error.SkipZigTest; + } + var ring = IO_Uring.init(16, 0) catch |err| switch (err) { error.SystemOutdated => return error.SkipZigTest, error.PermissionDenied => return error.SkipZigTest, @@ -2279,10 +2298,12 @@ test "fallocate" { }; defer ring.deinit(); + var tmp = std.testing.tmpDir(.{}); + defer tmp.cleanup(); + const path = "test_io_uring_fallocate"; - const file = try std.fs.cwd().createFile(path, .{ .truncate = true, .mode = 0o666 }); + const file = try tmp.dir.createFile(path, .{ .truncate = true, .mode = 0o666 }); defer file.close(); - defer std.fs.cwd().deleteFile(path) catch {}; try testing.expectEqual(@as(u64, 0), (try file.stat()).size); @@ -2323,10 +2344,11 @@ test "statx" { }; defer ring.deinit(); + var tmp = std.testing.tmpDir(.{}); + defer tmp.cleanup(); const path = "test_io_uring_statx"; - const file = try std.fs.cwd().createFile(path, .{ .truncate = true, .mode = 0o666 }); + const file = try tmp.dir.createFile(path, .{ .truncate = true, .mode = 0o666 }); defer file.close(); - defer std.fs.cwd().deleteFile(path) catch {}; try testing.expectEqual(@as(u64, 0), (try file.stat()).size); @@ -2335,14 +2357,14 @@ test "statx" { var buf: linux.Statx = undefined; const sqe = try ring.statx( 0xaaaaaaaa, - linux.AT.FDCWD, + tmp.dir.fd, path, 0, linux.STATX_SIZE, &buf, ); try testing.expectEqual(linux.IORING_OP.STATX, sqe.opcode); - try testing.expectEqual(@as(i32, linux.AT.FDCWD), sqe.fd); + try testing.expectEqual(@as(i32, tmp.dir.fd), sqe.fd); try testing.expectEqual(@as(u32, 1), try ring.submit()); const cqe = try ring.copy_cqe(); @@ -2355,8 +2377,6 @@ test "statx" { // The filesystem containing the file referred to by fd does not support this operation; // or the mode is not supported by the filesystem containing the file referred to by fd: .OPNOTSUPP => return error.SkipZigTest, - // The kernel is too old to support FDCWD for dir_fd - .BADF => return error.SkipZigTest, else => |errno| std.debug.panic("unhandled errno: {}", .{errno}), } try testing.expectEqual(linux.io_uring_cqe{ @@ -2372,6 +2392,11 @@ test "statx" { test "accept/connect/recv/cancel" { if (builtin.os.tag != .linux) return error.SkipZigTest; + if (true) { + // https://github.com/ziglang/zig/issues/14907 + return error.SkipZigTest; + } + var ring = IO_Uring.init(16, 0) catch |err| switch (err) { error.SystemOutdated => return error.SkipZigTest, error.PermissionDenied => return error.SkipZigTest, @@ -2509,6 +2534,11 @@ test "register_files_update" { test "shutdown" { if (builtin.os.tag != .linux) return error.SkipZigTest; + if (true) { + // https://github.com/ziglang/zig/issues/14907 + return error.SkipZigTest; + } + var ring = IO_Uring.init(16, 0) catch |err| switch (err) { error.SystemOutdated => return error.SkipZigTest, error.PermissionDenied => return error.SkipZigTest, @@ -2516,6 +2546,7 @@ test "shutdown" { }; defer ring.deinit(); + if (true) @compileError("don't hard code port numbers in unit tests"); // https://github.com/ziglang/zig/issues/14907 const address = try net.Address.parseIp4("127.0.0.1", 3131); // Socket bound, expect shutdown to work @@ -2579,28 +2610,28 @@ test "renameat" { const old_path = "test_io_uring_renameat_old"; const new_path = "test_io_uring_renameat_new"; + var tmp = std.testing.tmpDir(.{}); + defer tmp.cleanup(); + // Write old file with data - const old_file = try std.fs.cwd().createFile(old_path, .{ .truncate = true, .mode = 0o666 }); - defer { - old_file.close(); - std.fs.cwd().deleteFile(new_path) catch {}; - } + const old_file = try tmp.dir.createFile(old_path, .{ .truncate = true, .mode = 0o666 }); + defer old_file.close(); try old_file.writeAll("hello"); // Submit renameat var sqe = try ring.renameat( 0x12121212, - linux.AT.FDCWD, + tmp.dir.fd, old_path, - linux.AT.FDCWD, + tmp.dir.fd, new_path, 0, ); try testing.expectEqual(linux.IORING_OP.RENAMEAT, sqe.opcode); - try testing.expectEqual(@as(i32, linux.AT.FDCWD), sqe.fd); - try testing.expectEqual(@as(i32, linux.AT.FDCWD), @bitCast(i32, sqe.len)); + try testing.expectEqual(@as(i32, tmp.dir.fd), sqe.fd); + try testing.expectEqual(@as(i32, tmp.dir.fd), @bitCast(i32, sqe.len)); try testing.expectEqual(@as(u32, 1), try ring.submit()); const cqe = try ring.copy_cqe(); @@ -2618,7 +2649,7 @@ test "renameat" { // Validate that the old file doesn't exist anymore { - _ = std.fs.cwd().openFile(old_path, .{}) catch |err| switch (err) { + _ = tmp.dir.openFile(old_path, .{}) catch |err| switch (err) { error.FileNotFound => {}, else => std.debug.panic("unexpected error: {}", .{err}), }; @@ -2626,7 +2657,7 @@ test "renameat" { // Validate that the new file exists with the proper content { - const new_file = try std.fs.cwd().openFile(new_path, .{}); + const new_file = try tmp.dir.openFile(new_path, .{}); defer new_file.close(); var new_file_data: [16]u8 = undefined; @@ -2647,22 +2678,24 @@ test "unlinkat" { const path = "test_io_uring_unlinkat"; + var tmp = std.testing.tmpDir(.{}); + defer tmp.cleanup(); + // Write old file with data - const file = try std.fs.cwd().createFile(path, .{ .truncate = true, .mode = 0o666 }); + const file = try tmp.dir.createFile(path, .{ .truncate = true, .mode = 0o666 }); defer file.close(); - defer std.fs.cwd().deleteFile(path) catch {}; // Submit unlinkat var sqe = try ring.unlinkat( 0x12121212, - linux.AT.FDCWD, + tmp.dir.fd, path, 0, ); try testing.expectEqual(linux.IORING_OP.UNLINKAT, sqe.opcode); - try testing.expectEqual(@as(i32, linux.AT.FDCWD), sqe.fd); + try testing.expectEqual(@as(i32, tmp.dir.fd), sqe.fd); try testing.expectEqual(@as(u32, 1), try ring.submit()); const cqe = try ring.copy_cqe(); @@ -2679,7 +2712,7 @@ test "unlinkat" { }, cqe); // Validate that the file doesn't exist anymore - _ = std.fs.cwd().openFile(path, .{}) catch |err| switch (err) { + _ = tmp.dir.openFile(path, .{}) catch |err| switch (err) { error.FileNotFound => {}, else => std.debug.panic("unexpected error: {}", .{err}), }; @@ -2695,20 +2728,21 @@ test "mkdirat" { }; defer ring.deinit(); - const path = "test_io_uring_mkdirat"; + var tmp = std.testing.tmpDir(.{}); + defer tmp.cleanup(); - defer std.fs.cwd().deleteDir(path) catch {}; + const path = "test_io_uring_mkdirat"; // Submit mkdirat var sqe = try ring.mkdirat( 0x12121212, - linux.AT.FDCWD, + tmp.dir.fd, path, 0o0755, ); try testing.expectEqual(linux.IORING_OP.MKDIRAT, sqe.opcode); - try testing.expectEqual(@as(i32, linux.AT.FDCWD), sqe.fd); + try testing.expectEqual(@as(i32, tmp.dir.fd), sqe.fd); try testing.expectEqual(@as(u32, 1), try ring.submit()); const cqe = try ring.copy_cqe(); @@ -2725,7 +2759,7 @@ test "mkdirat" { }, cqe); // Validate that the directory exist - _ = try std.fs.cwd().openDir(path, .{}); + _ = try tmp.dir.openDir(path, .{}); } test "symlinkat" { @@ -2738,26 +2772,25 @@ test "symlinkat" { }; defer ring.deinit(); + var tmp = std.testing.tmpDir(.{}); + defer tmp.cleanup(); + const path = "test_io_uring_symlinkat"; const link_path = "test_io_uring_symlinkat_link"; - const file = try std.fs.cwd().createFile(path, .{ .truncate = true, .mode = 0o666 }); - defer { - file.close(); - std.fs.cwd().deleteFile(path) catch {}; - std.fs.cwd().deleteFile(link_path) catch {}; - } + const file = try tmp.dir.createFile(path, .{ .truncate = true, .mode = 0o666 }); + defer file.close(); // Submit symlinkat var sqe = try ring.symlinkat( 0x12121212, path, - linux.AT.FDCWD, + tmp.dir.fd, link_path, ); try testing.expectEqual(linux.IORING_OP.SYMLINKAT, sqe.opcode); - try testing.expectEqual(@as(i32, linux.AT.FDCWD), sqe.fd); + try testing.expectEqual(@as(i32, tmp.dir.fd), sqe.fd); try testing.expectEqual(@as(u32, 1), try ring.submit()); const cqe = try ring.copy_cqe(); @@ -2774,7 +2807,7 @@ test "symlinkat" { }, cqe); // Validate that the symlink exist - _ = try std.fs.cwd().openFile(link_path, .{}); + _ = try tmp.dir.openFile(link_path, .{}); } test "linkat" { @@ -2787,32 +2820,31 @@ test "linkat" { }; defer ring.deinit(); + var tmp = std.testing.tmpDir(.{}); + defer tmp.cleanup(); + const first_path = "test_io_uring_linkat_first"; const second_path = "test_io_uring_linkat_second"; // Write file with data - const first_file = try std.fs.cwd().createFile(first_path, .{ .truncate = true, .mode = 0o666 }); - defer { - first_file.close(); - std.fs.cwd().deleteFile(first_path) catch {}; - std.fs.cwd().deleteFile(second_path) catch {}; - } + const first_file = try tmp.dir.createFile(first_path, .{ .truncate = true, .mode = 0o666 }); + defer first_file.close(); try first_file.writeAll("hello"); // Submit linkat var sqe = try ring.linkat( 0x12121212, - linux.AT.FDCWD, + tmp.dir.fd, first_path, - linux.AT.FDCWD, + tmp.dir.fd, second_path, 0, ); try testing.expectEqual(linux.IORING_OP.LINKAT, sqe.opcode); - try testing.expectEqual(@as(i32, linux.AT.FDCWD), sqe.fd); - try testing.expectEqual(@as(i32, linux.AT.FDCWD), @bitCast(i32, sqe.len)); + try testing.expectEqual(@as(i32, tmp.dir.fd), sqe.fd); + try testing.expectEqual(@as(i32, tmp.dir.fd), @bitCast(i32, sqe.len)); try testing.expectEqual(@as(u32, 1), try ring.submit()); const cqe = try ring.copy_cqe(); @@ -2829,7 +2861,7 @@ test "linkat" { }, cqe); // Validate the second file - const second_file = try std.fs.cwd().openFile(second_path, .{}); + const second_file = try tmp.dir.openFile(second_path, .{}); defer second_file.close(); var second_file_data: [16]u8 = undefined; @@ -3060,6 +3092,11 @@ test "remove_buffers" { test "provide_buffers: accept/connect/send/recv" { if (builtin.os.tag != .linux) return error.SkipZigTest; + if (true) { + // https://github.com/ziglang/zig/issues/14907 + return error.SkipZigTest; + } + var ring = IO_Uring.init(16, 0) catch |err| switch (err) { error.SystemOutdated => return error.SkipZigTest, error.PermissionDenied => return error.SkipZigTest, @@ -3236,6 +3273,7 @@ const SocketTestHarness = struct { fn createSocketTestHarness(ring: *IO_Uring) !SocketTestHarness { // Create a TCP server socket + if (true) @compileError("don't hard code port numbers in unit tests"); // https://github.com/ziglang/zig/issues/14907 const address = try net.Address.parseIp4("127.0.0.1", 3131); const kernel_backlog = 1; const listener_socket = try os.socket(address.any.family, os.SOCK.STREAM | os.SOCK.CLOEXEC, 0); diff --git a/lib/std/os/linux/test.zig b/lib/std/os/linux/test.zig index 63deab3edc42..e1ad36b2e555 100644 --- a/lib/std/os/linux/test.zig +++ b/lib/std/os/linux/test.zig @@ -8,10 +8,12 @@ const expectEqual = std.testing.expectEqual; const fs = std.fs; test "fallocate" { + var tmp = std.testing.tmpDir(.{}); + defer tmp.cleanup(); + const path = "test_fallocate"; - const file = try fs.cwd().createFile(path, .{ .truncate = true, .mode = 0o666 }); + const file = try tmp.dir.createFile(path, .{ .truncate = true, .mode = 0o666 }); defer file.close(); - defer fs.cwd().deleteFile(path) catch {}; try expect((try file.stat()).size == 0); @@ -67,12 +69,12 @@ test "timer" { } test "statx" { + var tmp = std.testing.tmpDir(.{}); + defer tmp.cleanup(); + const tmp_file_name = "just_a_temporary_file.txt"; - var file = try fs.cwd().createFile(tmp_file_name, .{}); - defer { - file.close(); - fs.cwd().deleteFile(tmp_file_name) catch {}; - } + var file = try tmp.dir.createFile(tmp_file_name, .{}); + defer file.close(); var statx_buf: linux.Statx = undefined; switch (linux.getErrno(linux.statx(file.handle, "", linux.AT.EMPTY_PATH, linux.STATX_BASIC_STATS, &statx_buf))) { @@ -105,21 +107,16 @@ test "user and group ids" { } test "fadvise" { + var tmp = std.testing.tmpDir(.{}); + defer tmp.cleanup(); + const tmp_file_name = "temp_posix_fadvise.txt"; - var file = try fs.cwd().createFile(tmp_file_name, .{}); - defer { - file.close(); - fs.cwd().deleteFile(tmp_file_name) catch {}; - } + var file = try tmp.dir.createFile(tmp_file_name, .{}); + defer file.close(); var buf: [2048]u8 = undefined; try file.writeAll(&buf); - const ret = linux.fadvise( - file.handle, - 0, - 0, - linux.POSIX_FADV.SEQUENTIAL, - ); + const ret = linux.fadvise(file.handle, 0, 0, linux.POSIX_FADV.SEQUENTIAL); try expectEqual(@as(usize, 0), ret); } diff --git a/lib/std/os/windows.zig b/lib/std/os/windows.zig index b63fdb9f9285..5576200ea538 100644 --- a/lib/std/os/windows.zig +++ b/lib/std/os/windows.zig @@ -105,41 +105,53 @@ pub fn OpenFile(sub_path_w: []const u16, options: OpenFileOptions) OpenError!HAN // If we're not following symlinks, we need to ensure we don't pass in any synchronization flags such as FILE_SYNCHRONOUS_IO_NONALERT. const flags: ULONG = if (options.follow_symlinks) file_or_dir_flag | blocking_flag else file_or_dir_flag | FILE_OPEN_REPARSE_POINT; - const rc = ntdll.NtCreateFile( - &result, - options.access_mask, - &attr, - &io, - null, - FILE_ATTRIBUTE_NORMAL, - options.share_access, - options.creation, - flags, - null, - 0, - ); - switch (rc) { - .SUCCESS => { - if (std.io.is_async and options.io_mode == .evented) { - _ = CreateIoCompletionPort(result, std.event.Loop.instance.?.os_data.io_port, undefined, undefined) catch undefined; - } - return result; - }, - .OBJECT_NAME_INVALID => unreachable, - .OBJECT_NAME_NOT_FOUND => return error.FileNotFound, - .OBJECT_PATH_NOT_FOUND => return error.FileNotFound, - .NO_MEDIA_IN_DEVICE => return error.NoDevice, - .INVALID_PARAMETER => unreachable, - .SHARING_VIOLATION => return error.AccessDenied, - .ACCESS_DENIED => return error.AccessDenied, - .PIPE_BUSY => return error.PipeBusy, - .OBJECT_PATH_SYNTAX_BAD => unreachable, - .OBJECT_NAME_COLLISION => return error.PathAlreadyExists, - .FILE_IS_A_DIRECTORY => return error.IsDir, - .NOT_A_DIRECTORY => return error.NotDir, - .USER_MAPPED_FILE => return error.AccessDenied, - .INVALID_HANDLE => unreachable, - else => return unexpectedStatus(rc), + while (true) { + const rc = ntdll.NtCreateFile( + &result, + options.access_mask, + &attr, + &io, + null, + FILE_ATTRIBUTE_NORMAL, + options.share_access, + options.creation, + flags, + null, + 0, + ); + switch (rc) { + .SUCCESS => { + if (std.io.is_async and options.io_mode == .evented) { + _ = CreateIoCompletionPort(result, std.event.Loop.instance.?.os_data.io_port, undefined, undefined) catch undefined; + } + return result; + }, + .OBJECT_NAME_INVALID => unreachable, + .OBJECT_NAME_NOT_FOUND => return error.FileNotFound, + .OBJECT_PATH_NOT_FOUND => return error.FileNotFound, + .NO_MEDIA_IN_DEVICE => return error.NoDevice, + .INVALID_PARAMETER => unreachable, + .SHARING_VIOLATION => return error.AccessDenied, + .ACCESS_DENIED => return error.AccessDenied, + .PIPE_BUSY => return error.PipeBusy, + .OBJECT_PATH_SYNTAX_BAD => unreachable, + .OBJECT_NAME_COLLISION => return error.PathAlreadyExists, + .FILE_IS_A_DIRECTORY => return error.IsDir, + .NOT_A_DIRECTORY => return error.NotDir, + .USER_MAPPED_FILE => return error.AccessDenied, + .INVALID_HANDLE => unreachable, + .DELETE_PENDING => { + // This error means that there *was* a file in this location on + // the file system, but it was deleted. However, the OS is not + // finished with the deletion operation, and so this CreateFile + // call has failed. There is not really a sane way to handle + // this other than retrying the creation after the OS finishes + // the deletion. + std.time.sleep(std.time.ns_per_ms); + continue; + }, + else => return unexpectedStatus(rc), + } } } diff --git a/lib/std/os/windows/kernel32.zig b/lib/std/os/windows/kernel32.zig index d3bfeaaf2c3b..1fd8a406d5c4 100644 --- a/lib/std/os/windows/kernel32.zig +++ b/lib/std/os/windows/kernel32.zig @@ -67,6 +67,7 @@ const RUNTIME_FUNCTION = windows.RUNTIME_FUNCTION; const KNONVOLATILE_CONTEXT_POINTERS = windows.KNONVOLATILE_CONTEXT_POINTERS; const EXCEPTION_ROUTINE = windows.EXCEPTION_ROUTINE; const MODULEENTRY32 = windows.MODULEENTRY32; +const ULONGLONG = windows.ULONGLONG; pub extern "kernel32" fn AddVectoredExceptionHandler(First: c_ulong, Handler: ?VECTORED_EXCEPTION_HANDLER) callconv(WINAPI) ?*anyopaque; pub extern "kernel32" fn RemoveVectoredExceptionHandler(Handle: HANDLE) callconv(WINAPI) c_ulong; @@ -457,3 +458,5 @@ pub extern "kernel32" fn RegOpenKeyExW( samDesired: REGSAM, phkResult: *HKEY, ) callconv(WINAPI) LSTATUS; + +pub extern "kernel32" fn GetPhysicallyInstalledSystemMemory(TotalMemoryInKilobytes: *ULONGLONG) BOOL; diff --git a/lib/std/process.zig b/lib/std/process.zig index eff29e86fa09..d06a012af2a8 100644 --- a/lib/std/process.zig +++ b/lib/std/process.zig @@ -828,24 +828,6 @@ pub fn argsWithAllocator(allocator: Allocator) ArgIterator.InitError!ArgIterator return ArgIterator.initWithAllocator(allocator); } -test "args iterator" { - var ga = std.testing.allocator; - var it = try argsWithAllocator(ga); - defer it.deinit(); // no-op unless WASI or Windows - - const prog_name = it.next() orelse unreachable; - const expected_suffix = switch (builtin.os.tag) { - .wasi => "test.wasm", - .windows => "test.exe", - else => "test", - }; - const given_suffix = std.fs.path.basename(prog_name); - - try testing.expect(mem.eql(u8, expected_suffix, given_suffix)); - try testing.expect(it.next() == null); - try testing.expect(!it.skip()); -} - /// Caller must call argsFree on result. pub fn argsAlloc(allocator: Allocator) ![][:0]u8 { // TODO refactor to only make 1 allocation. @@ -1169,3 +1151,51 @@ pub fn execve( return os.execvpeZ_expandArg0(.no_expand, argv_buf.ptr[0].?, argv_buf.ptr, envp); } + +pub const TotalSystemMemoryError = error{ + UnknownTotalSystemMemory, +}; + +/// Returns the total system memory, in bytes. +pub fn totalSystemMemory() TotalSystemMemoryError!usize { + switch (builtin.os.tag) { + .linux => { + return totalSystemMemoryLinux() catch return error.UnknownTotalSystemMemory; + }, + .windows => { + var kilobytes: std.os.windows.ULONGLONG = undefined; + assert(std.os.windows.kernel32.GetPhysicallyInstalledSystemMemory(&kilobytes) == std.os.windows.TRUE); + return kilobytes * 1024; + }, + else => return error.UnknownTotalSystemMemory, + } +} + +fn totalSystemMemoryLinux() !usize { + var file = try std.fs.openFileAbsoluteZ("/proc/meminfo", .{}); + defer file.close(); + var buf: [50]u8 = undefined; + const amt = try file.read(&buf); + if (amt != 50) return error.Unexpected; + var it = std.mem.tokenize(u8, buf[0..amt], " \n"); + const label = it.next().?; + if (!std.mem.eql(u8, label, "MemTotal:")) return error.Unexpected; + const int_text = it.next() orelse return error.Unexpected; + const units = it.next() orelse return error.Unexpected; + if (!std.mem.eql(u8, units, "kB")) return error.Unexpected; + const kilobytes = try std.fmt.parseInt(usize, int_text, 10); + return kilobytes * 1024; +} + +/// Indicate that we are now terminating with a successful exit code. +/// In debug builds, this is a no-op, so that the calling code's +/// cleanup mechanisms are tested and so that external tools that +/// check for resource leaks can be accurate. In release builds, this +/// calls exit(0), and does not return. +pub fn cleanExit() void { + if (builtin.mode == .Debug) { + return; + } else { + exit(0); + } +} diff --git a/lib/std/zig.zig b/lib/std/zig.zig index f85cf75e60ff..98edeabd1023 100644 --- a/lib/std/zig.zig +++ b/lib/std/zig.zig @@ -3,6 +3,9 @@ const tokenizer = @import("zig/tokenizer.zig"); const fmt = @import("zig/fmt.zig"); const assert = std.debug.assert; +pub const ErrorBundle = @import("zig/ErrorBundle.zig"); +pub const Server = @import("zig/Server.zig"); +pub const Client = @import("zig/Client.zig"); pub const Token = tokenizer.Token; pub const Tokenizer = tokenizer.Tokenizer; pub const fmtId = fmt.fmtId; diff --git a/lib/std/zig/Client.zig b/lib/std/zig/Client.zig new file mode 100644 index 000000000000..af4c29d37d6d --- /dev/null +++ b/lib/std/zig/Client.zig @@ -0,0 +1,39 @@ +pub const Message = struct { + pub const Header = extern struct { + tag: Tag, + /// Size of the body only; does not include this Header. + bytes_len: u32, + }; + + pub const Tag = enum(u32) { + /// Tells the compiler to shut down cleanly. + /// No body. + exit, + /// Tells the compiler to detect changes in source files and update the + /// affected output compilation artifacts. + /// If one of the compilation artifacts is an executable that is + /// running as a child process, the compiler will wait for it to exit + /// before performing the update. + /// No body. + update, + /// Tells the compiler to execute the executable as a child process. + /// No body. + run, + /// Tells the compiler to detect changes in source files and update the + /// affected output compilation artifacts. + /// If one of the compilation artifacts is an executable that is + /// running as a child process, the compiler will perform a hot code + /// swap. + /// No body. + hot_update, + /// Ask the test runner for metadata about all the unit tests that can + /// be run. Server will respond with a `test_metadata` message. + /// No body. + query_test_metadata, + /// Ask the test runner to run a particular test. + /// The message body is a u32 test index. + run_test, + + _, + }; +}; diff --git a/lib/std/zig/ErrorBundle.zig b/lib/std/zig/ErrorBundle.zig new file mode 100644 index 000000000000..ffe748203e82 --- /dev/null +++ b/lib/std/zig/ErrorBundle.zig @@ -0,0 +1,515 @@ +//! To support incremental compilation, errors are stored in various places +//! so that they can be created and destroyed appropriately. This structure +//! is used to collect all the errors from the various places into one +//! convenient place for API users to consume. +//! +//! There is one special encoding for this data structure. If both arrays are +//! empty, it means there are no errors. This special encoding exists so that +//! heap allocation is not needed in the common case of no errors. + +string_bytes: []const u8, +/// The first thing in this array is an `ErrorMessageList`. +extra: []const u32, + +/// Special encoding when there are no errors. +pub const empty: ErrorBundle = .{ + .string_bytes = &.{}, + .extra = &.{}, +}; + +// An index into `extra` pointing at an `ErrorMessage`. +pub const MessageIndex = enum(u32) { + _, +}; + +// An index into `extra` pointing at an `SourceLocation`. +pub const SourceLocationIndex = enum(u32) { + none = 0, + _, +}; + +/// There will be a MessageIndex for each len at start. +pub const ErrorMessageList = struct { + len: u32, + start: u32, + /// null-terminated string index. 0 means no compile log text. + compile_log_text: u32, +}; + +/// Trailing: +/// * ReferenceTrace for each reference_trace_len +pub const SourceLocation = struct { + /// null terminated string index + src_path: u32, + line: u32, + column: u32, + /// byte offset of starting token + span_start: u32, + /// byte offset of main error location + span_main: u32, + /// byte offset of end of last token + span_end: u32, + /// null terminated string index, possibly null. + /// Does not include the trailing newline. + source_line: u32 = 0, + reference_trace_len: u32 = 0, +}; + +/// Trailing: +/// * MessageIndex for each notes_len. +pub const ErrorMessage = struct { + /// null terminated string index + msg: u32, + /// Usually one, but incremented for redundant messages. + count: u32 = 1, + src_loc: SourceLocationIndex = .none, + notes_len: u32 = 0, +}; + +pub const ReferenceTrace = struct { + /// null terminated string index + /// Except for the sentinel ReferenceTrace element, in which case: + /// * 0 means remaining references hidden + /// * >0 means N references hidden + decl_name: u32, + /// Index into extra of a SourceLocation + /// If this is 0, this is the sentinel ReferenceTrace element. + src_loc: SourceLocationIndex, +}; + +pub fn deinit(eb: *ErrorBundle, gpa: Allocator) void { + gpa.free(eb.string_bytes); + gpa.free(eb.extra); + eb.* = undefined; +} + +pub fn errorMessageCount(eb: ErrorBundle) u32 { + if (eb.extra.len == 0) return 0; + return eb.getErrorMessageList().len; +} + +pub fn getErrorMessageList(eb: ErrorBundle) ErrorMessageList { + return eb.extraData(ErrorMessageList, 0).data; +} + +pub fn getMessages(eb: ErrorBundle) []const MessageIndex { + const list = eb.getErrorMessageList(); + return @ptrCast([]const MessageIndex, eb.extra[list.start..][0..list.len]); +} + +pub fn getErrorMessage(eb: ErrorBundle, index: MessageIndex) ErrorMessage { + return eb.extraData(ErrorMessage, @enumToInt(index)).data; +} + +pub fn getSourceLocation(eb: ErrorBundle, index: SourceLocationIndex) SourceLocation { + assert(index != .none); + return eb.extraData(SourceLocation, @enumToInt(index)).data; +} + +pub fn getNotes(eb: ErrorBundle, index: MessageIndex) []const MessageIndex { + const notes_len = eb.getErrorMessage(index).notes_len; + const start = @enumToInt(index) + @typeInfo(ErrorMessage).Struct.fields.len; + return @ptrCast([]const MessageIndex, eb.extra[start..][0..notes_len]); +} + +pub fn getCompileLogOutput(eb: ErrorBundle) [:0]const u8 { + return nullTerminatedString(eb, getErrorMessageList(eb).compile_log_text); +} + +/// Returns the requested data, as well as the new index which is at the start of the +/// trailers for the object. +fn extraData(eb: ErrorBundle, comptime T: type, index: usize) struct { data: T, end: usize } { + const fields = @typeInfo(T).Struct.fields; + var i: usize = index; + var result: T = undefined; + inline for (fields) |field| { + @field(result, field.name) = switch (field.type) { + u32 => eb.extra[i], + MessageIndex => @intToEnum(MessageIndex, eb.extra[i]), + SourceLocationIndex => @intToEnum(SourceLocationIndex, eb.extra[i]), + else => @compileError("bad field type"), + }; + i += 1; + } + return .{ + .data = result, + .end = i, + }; +} + +/// Given an index into `string_bytes` returns the null-terminated string found there. +pub fn nullTerminatedString(eb: ErrorBundle, index: usize) [:0]const u8 { + const string_bytes = eb.string_bytes; + var end: usize = index; + while (string_bytes[end] != 0) { + end += 1; + } + return string_bytes[index..end :0]; +} + +pub const RenderOptions = struct { + ttyconf: std.debug.TTY.Config, + include_reference_trace: bool = true, + include_source_line: bool = true, + include_log_text: bool = true, +}; + +pub fn renderToStdErr(eb: ErrorBundle, options: RenderOptions) void { + std.debug.getStderrMutex().lock(); + defer std.debug.getStderrMutex().unlock(); + const stderr = std.io.getStdErr(); + return renderToWriter(eb, options, stderr.writer()) catch return; +} + +pub fn renderToWriter(eb: ErrorBundle, options: RenderOptions, writer: anytype) anyerror!void { + for (eb.getMessages()) |err_msg| { + try renderErrorMessageToWriter(eb, options, err_msg, writer, "error", .Red, 0); + } + + if (options.include_log_text) { + const log_text = eb.getCompileLogOutput(); + if (log_text.len != 0) { + try writer.writeAll("\nCompile Log Output:\n"); + try writer.writeAll(log_text); + } + } +} + +fn renderErrorMessageToWriter( + eb: ErrorBundle, + options: RenderOptions, + err_msg_index: MessageIndex, + stderr: anytype, + kind: []const u8, + color: std.debug.TTY.Color, + indent: usize, +) anyerror!void { + const ttyconf = options.ttyconf; + var counting_writer = std.io.countingWriter(stderr); + const counting_stderr = counting_writer.writer(); + const err_msg = eb.getErrorMessage(err_msg_index); + if (err_msg.src_loc != .none) { + const src = eb.extraData(SourceLocation, @enumToInt(err_msg.src_loc)); + try counting_stderr.writeByteNTimes(' ', indent); + try ttyconf.setColor(stderr, .Bold); + try counting_stderr.print("{s}:{d}:{d}: ", .{ + eb.nullTerminatedString(src.data.src_path), + src.data.line + 1, + src.data.column + 1, + }); + try ttyconf.setColor(stderr, color); + try counting_stderr.writeAll(kind); + try counting_stderr.writeAll(": "); + // This is the length of the part before the error message: + // e.g. "file.zig:4:5: error: " + const prefix_len = @intCast(usize, counting_stderr.context.bytes_written); + try ttyconf.setColor(stderr, .Reset); + try ttyconf.setColor(stderr, .Bold); + if (err_msg.count == 1) { + try writeMsg(eb, err_msg, stderr, prefix_len); + try stderr.writeByte('\n'); + } else { + try writeMsg(eb, err_msg, stderr, prefix_len); + try ttyconf.setColor(stderr, .Dim); + try stderr.print(" ({d} times)\n", .{err_msg.count}); + } + try ttyconf.setColor(stderr, .Reset); + if (src.data.source_line != 0 and options.include_source_line) { + const line = eb.nullTerminatedString(src.data.source_line); + for (line) |b| switch (b) { + '\t' => try stderr.writeByte(' '), + else => try stderr.writeByte(b), + }; + try stderr.writeByte('\n'); + // TODO basic unicode code point monospace width + const before_caret = src.data.span_main - src.data.span_start; + // -1 since span.main includes the caret + const after_caret = src.data.span_end - src.data.span_main -| 1; + try stderr.writeByteNTimes(' ', src.data.column - before_caret); + try ttyconf.setColor(stderr, .Green); + try stderr.writeByteNTimes('~', before_caret); + try stderr.writeByte('^'); + try stderr.writeByteNTimes('~', after_caret); + try stderr.writeByte('\n'); + try ttyconf.setColor(stderr, .Reset); + } + for (eb.getNotes(err_msg_index)) |note| { + try renderErrorMessageToWriter(eb, options, note, stderr, "note", .Cyan, indent); + } + if (src.data.reference_trace_len > 0 and options.include_reference_trace) { + try ttyconf.setColor(stderr, .Reset); + try ttyconf.setColor(stderr, .Dim); + try stderr.print("referenced by:\n", .{}); + var ref_index = src.end; + for (0..src.data.reference_trace_len) |_| { + const ref_trace = eb.extraData(ReferenceTrace, ref_index); + ref_index = ref_trace.end; + if (ref_trace.data.src_loc != .none) { + const ref_src = eb.getSourceLocation(ref_trace.data.src_loc); + try stderr.print(" {s}: {s}:{d}:{d}\n", .{ + eb.nullTerminatedString(ref_trace.data.decl_name), + eb.nullTerminatedString(ref_src.src_path), + ref_src.line + 1, + ref_src.column + 1, + }); + } else if (ref_trace.data.decl_name != 0) { + const count = ref_trace.data.decl_name; + try stderr.print( + " {d} reference(s) hidden; use '-freference-trace={d}' to see all references\n", + .{ count, count + src.data.reference_trace_len - 1 }, + ); + } else { + try stderr.print( + " remaining reference traces hidden; use '-freference-trace' to see all reference traces\n", + .{}, + ); + } + } + try stderr.writeByte('\n'); + try ttyconf.setColor(stderr, .Reset); + } + } else { + try ttyconf.setColor(stderr, color); + try stderr.writeByteNTimes(' ', indent); + try stderr.writeAll(kind); + try stderr.writeAll(": "); + try ttyconf.setColor(stderr, .Reset); + const msg = eb.nullTerminatedString(err_msg.msg); + if (err_msg.count == 1) { + try stderr.print("{s}\n", .{msg}); + } else { + try stderr.print("{s}", .{msg}); + try ttyconf.setColor(stderr, .Dim); + try stderr.print(" ({d} times)\n", .{err_msg.count}); + } + try ttyconf.setColor(stderr, .Reset); + for (eb.getNotes(err_msg_index)) |note| { + try renderErrorMessageToWriter(eb, options, note, stderr, "note", .Cyan, indent + 4); + } + } +} + +/// Splits the error message up into lines to properly indent them +/// to allow for long, good-looking error messages. +/// +/// This is used to split the message in `@compileError("hello\nworld")` for example. +fn writeMsg(eb: ErrorBundle, err_msg: ErrorMessage, stderr: anytype, indent: usize) !void { + var lines = std.mem.split(u8, eb.nullTerminatedString(err_msg.msg), "\n"); + while (lines.next()) |line| { + try stderr.writeAll(line); + if (lines.index == null) break; + try stderr.writeByte('\n'); + try stderr.writeByteNTimes(' ', indent); + } +} + +const std = @import("std"); +const ErrorBundle = @This(); +const Allocator = std.mem.Allocator; +const assert = std.debug.assert; + +pub const Wip = struct { + gpa: Allocator, + string_bytes: std.ArrayListUnmanaged(u8), + /// The first thing in this array is a ErrorMessageList. + extra: std.ArrayListUnmanaged(u32), + root_list: std.ArrayListUnmanaged(MessageIndex), + + pub fn init(wip: *Wip, gpa: Allocator) !void { + wip.* = .{ + .gpa = gpa, + .string_bytes = .{}, + .extra = .{}, + .root_list = .{}, + }; + + // So that 0 can be used to indicate a null string. + try wip.string_bytes.append(gpa, 0); + + assert(0 == try addExtra(wip, ErrorMessageList{ + .len = 0, + .start = 0, + .compile_log_text = 0, + })); + } + + pub fn deinit(wip: *Wip) void { + const gpa = wip.gpa; + wip.root_list.deinit(gpa); + wip.string_bytes.deinit(gpa); + wip.extra.deinit(gpa); + wip.* = undefined; + } + + pub fn toOwnedBundle(wip: *Wip, compile_log_text: []const u8) !ErrorBundle { + const gpa = wip.gpa; + if (wip.root_list.items.len == 0) { + assert(compile_log_text.len == 0); + // Special encoding when there are no errors. + wip.deinit(); + wip.* = .{ + .gpa = gpa, + .string_bytes = .{}, + .extra = .{}, + .root_list = .{}, + }; + return empty; + } + + const compile_log_str_index = if (compile_log_text.len == 0) 0 else str: { + const str = @intCast(u32, wip.string_bytes.items.len); + try wip.string_bytes.ensureUnusedCapacity(gpa, compile_log_text.len + 1); + wip.string_bytes.appendSliceAssumeCapacity(compile_log_text); + wip.string_bytes.appendAssumeCapacity(0); + break :str str; + }; + + wip.setExtra(0, ErrorMessageList{ + .len = @intCast(u32, wip.root_list.items.len), + .start = @intCast(u32, wip.extra.items.len), + .compile_log_text = compile_log_str_index, + }); + try wip.extra.appendSlice(gpa, @ptrCast([]const u32, wip.root_list.items)); + wip.root_list.clearAndFree(gpa); + return .{ + .string_bytes = try wip.string_bytes.toOwnedSlice(gpa), + .extra = try wip.extra.toOwnedSlice(gpa), + }; + } + + pub fn tmpBundle(wip: Wip) ErrorBundle { + return .{ + .string_bytes = wip.string_bytes.items, + .extra = wip.extra.items, + }; + } + + pub fn addString(wip: *Wip, s: []const u8) !u32 { + const gpa = wip.gpa; + const index = @intCast(u32, wip.string_bytes.items.len); + try wip.string_bytes.ensureUnusedCapacity(gpa, s.len + 1); + wip.string_bytes.appendSliceAssumeCapacity(s); + wip.string_bytes.appendAssumeCapacity(0); + return index; + } + + pub fn printString(wip: *Wip, comptime fmt: []const u8, args: anytype) !u32 { + const gpa = wip.gpa; + const index = @intCast(u32, wip.string_bytes.items.len); + try wip.string_bytes.writer(gpa).print(fmt, args); + try wip.string_bytes.append(gpa, 0); + return index; + } + + pub fn addRootErrorMessage(wip: *Wip, em: ErrorMessage) !void { + try wip.root_list.ensureUnusedCapacity(wip.gpa, 1); + wip.root_list.appendAssumeCapacity(try addErrorMessage(wip, em)); + } + + pub fn addErrorMessage(wip: *Wip, em: ErrorMessage) !MessageIndex { + return @intToEnum(MessageIndex, try addExtra(wip, em)); + } + + pub fn addErrorMessageAssumeCapacity(wip: *Wip, em: ErrorMessage) MessageIndex { + return @intToEnum(MessageIndex, addExtraAssumeCapacity(wip, em)); + } + + pub fn addSourceLocation(wip: *Wip, sl: SourceLocation) !SourceLocationIndex { + return @intToEnum(SourceLocationIndex, try addExtra(wip, sl)); + } + + pub fn addReferenceTrace(wip: *Wip, rt: ReferenceTrace) !void { + _ = try addExtra(wip, rt); + } + + pub fn addBundle(wip: *Wip, other: ErrorBundle) !void { + const gpa = wip.gpa; + + try wip.string_bytes.ensureUnusedCapacity(gpa, other.string_bytes.len); + try wip.extra.ensureUnusedCapacity(gpa, other.extra.len); + + const other_list = other.getMessages(); + + // The ensureUnusedCapacity call above guarantees this. + const notes_start = wip.reserveNotes(@intCast(u32, other_list.len)) catch unreachable; + for (notes_start.., other_list) |note, message| { + wip.extra.items[note] = @enumToInt(wip.addOtherMessage(other, message) catch unreachable); + } + } + + pub fn reserveNotes(wip: *Wip, notes_len: u32) !u32 { + try wip.extra.ensureUnusedCapacity(wip.gpa, notes_len + + notes_len * @typeInfo(ErrorBundle.ErrorMessage).Struct.fields.len); + wip.extra.items.len += notes_len; + return @intCast(u32, wip.extra.items.len - notes_len); + } + + fn addOtherMessage(wip: *Wip, other: ErrorBundle, msg_index: MessageIndex) !MessageIndex { + const other_msg = other.getErrorMessage(msg_index); + const src_loc = try wip.addOtherSourceLocation(other, other_msg.src_loc); + const msg = try wip.addErrorMessage(.{ + .msg = try wip.addString(other.nullTerminatedString(other_msg.msg)), + .count = other_msg.count, + .src_loc = src_loc, + .notes_len = other_msg.notes_len, + }); + const notes_start = try wip.reserveNotes(other_msg.notes_len); + for (notes_start.., other.getNotes(msg_index)) |note, other_note| { + wip.extra.items[note] = @enumToInt(try wip.addOtherMessage(other, other_note)); + } + return msg; + } + + fn addOtherSourceLocation( + wip: *Wip, + other: ErrorBundle, + index: SourceLocationIndex, + ) !SourceLocationIndex { + if (index == .none) return .none; + const other_sl = other.getSourceLocation(index); + + const src_loc = try wip.addSourceLocation(.{ + .src_path = try wip.addString(other.nullTerminatedString(other_sl.src_path)), + .line = other_sl.line, + .column = other_sl.column, + .span_start = other_sl.span_start, + .span_main = other_sl.span_main, + .span_end = other_sl.span_end, + .source_line = try wip.addString(other.nullTerminatedString(other_sl.source_line)), + .reference_trace_len = other_sl.reference_trace_len, + }); + + // TODO: also add the reference trace + + return src_loc; + } + + fn addExtra(wip: *Wip, extra: anytype) Allocator.Error!u32 { + const gpa = wip.gpa; + const fields = @typeInfo(@TypeOf(extra)).Struct.fields; + try wip.extra.ensureUnusedCapacity(gpa, fields.len); + return addExtraAssumeCapacity(wip, extra); + } + + fn addExtraAssumeCapacity(wip: *Wip, extra: anytype) u32 { + const fields = @typeInfo(@TypeOf(extra)).Struct.fields; + const result = @intCast(u32, wip.extra.items.len); + wip.extra.items.len += fields.len; + setExtra(wip, result, extra); + return result; + } + + fn setExtra(wip: *Wip, index: usize, extra: anytype) void { + const fields = @typeInfo(@TypeOf(extra)).Struct.fields; + var i = index; + inline for (fields) |field| { + wip.extra.items[i] = switch (field.type) { + u32 => @field(extra, field.name), + MessageIndex => @enumToInt(@field(extra, field.name)), + SourceLocationIndex => @enumToInt(@field(extra, field.name)), + else => @compileError("bad field type"), + }; + i += 1; + } + } +}; diff --git a/lib/std/zig/Server.zig b/lib/std/zig/Server.zig new file mode 100644 index 000000000000..788e36178244 --- /dev/null +++ b/lib/std/zig/Server.zig @@ -0,0 +1,305 @@ +in: std.fs.File, +out: std.fs.File, +receive_fifo: std.fifo.LinearFifo(u8, .Dynamic), + +pub const Message = struct { + pub const Header = extern struct { + tag: Tag, + /// Size of the body only; does not include this Header. + bytes_len: u32, + }; + + pub const Tag = enum(u32) { + /// Body is a UTF-8 string. + zig_version, + /// Body is an ErrorBundle. + error_bundle, + /// Body is a UTF-8 string. + progress, + /// Body is a EmitBinPath. + emit_bin_path, + /// Body is a TestMetadata + test_metadata, + /// Body is a TestResults + test_results, + + _, + }; + + /// Trailing: + /// * extra: [extra_len]u32, + /// * string_bytes: [string_bytes_len]u8, + /// See `std.zig.ErrorBundle`. + pub const ErrorBundle = extern struct { + extra_len: u32, + string_bytes_len: u32, + }; + + /// Trailing: + /// * name: [tests_len]u32 + /// - null-terminated string_bytes index + /// * async_frame_len: [tests_len]u32, + /// - 0 means not async + /// * expected_panic_msg: [tests_len]u32, + /// - null-terminated string_bytes index + /// - 0 means does not expect pani + /// * string_bytes: [string_bytes_len]u8, + pub const TestMetadata = extern struct { + string_bytes_len: u32, + tests_len: u32, + }; + + pub const TestResults = extern struct { + index: u32, + flags: Flags, + + pub const Flags = packed struct(u8) { + fail: bool, + skip: bool, + leak: bool, + + reserved: u5 = 0, + }; + }; + + /// Trailing: + /// * the file system path the emitted binary can be found + pub const EmitBinPath = extern struct { + flags: Flags, + + pub const Flags = packed struct(u8) { + cache_hit: bool, + reserved: u7 = 0, + }; + }; +}; + +pub const Options = struct { + gpa: Allocator, + in: std.fs.File, + out: std.fs.File, + zig_version: []const u8, +}; + +pub fn init(options: Options) !Server { + var s: Server = .{ + .in = options.in, + .out = options.out, + .receive_fifo = std.fifo.LinearFifo(u8, .Dynamic).init(options.gpa), + }; + try s.serveStringMessage(.zig_version, options.zig_version); + return s; +} + +pub fn deinit(s: *Server) void { + s.receive_fifo.deinit(); + s.* = undefined; +} + +pub fn receiveMessage(s: *Server) !InMessage.Header { + const Header = InMessage.Header; + const fifo = &s.receive_fifo; + + while (true) { + const buf = fifo.readableSlice(0); + assert(fifo.readableLength() == buf.len); + if (buf.len >= @sizeOf(Header)) { + const header = @ptrCast(*align(1) const Header, buf[0..@sizeOf(Header)]); + // workaround for https://github.com/ziglang/zig/issues/14904 + const bytes_len = bswap_and_workaround_u32(&header.bytes_len); + // workaround for https://github.com/ziglang/zig/issues/14904 + const tag = bswap_and_workaround_tag(&header.tag); + + if (buf.len - @sizeOf(Header) >= bytes_len) { + fifo.discard(@sizeOf(Header)); + return .{ + .tag = tag, + .bytes_len = bytes_len, + }; + } else { + const needed = bytes_len - (buf.len - @sizeOf(Header)); + const write_buffer = try fifo.writableWithSize(needed); + const amt = try s.in.read(write_buffer); + fifo.update(amt); + continue; + } + } + + const write_buffer = try fifo.writableWithSize(256); + const amt = try s.in.read(write_buffer); + fifo.update(amt); + } +} + +pub fn receiveBody_u32(s: *Server) !u32 { + const fifo = &s.receive_fifo; + const buf = fifo.readableSlice(0); + const result = @ptrCast(*align(1) const u32, buf[0..4]).*; + fifo.discard(4); + return bswap(result); +} + +pub fn serveStringMessage(s: *Server, tag: OutMessage.Tag, msg: []const u8) !void { + return s.serveMessage(.{ + .tag = tag, + .bytes_len = @intCast(u32, msg.len), + }, &.{msg}); +} + +pub fn serveMessage( + s: *const Server, + header: OutMessage.Header, + bufs: []const []const u8, +) !void { + var iovecs: [10]std.os.iovec_const = undefined; + const header_le = bswap(header); + iovecs[0] = .{ + .iov_base = @ptrCast([*]const u8, &header_le), + .iov_len = @sizeOf(OutMessage.Header), + }; + for (bufs, iovecs[1 .. bufs.len + 1]) |buf, *iovec| { + iovec.* = .{ + .iov_base = buf.ptr, + .iov_len = buf.len, + }; + } + try s.out.writevAll(iovecs[0 .. bufs.len + 1]); +} + +pub fn serveEmitBinPath( + s: *Server, + fs_path: []const u8, + header: OutMessage.EmitBinPath, +) !void { + try s.serveMessage(.{ + .tag = .emit_bin_path, + .bytes_len = @intCast(u32, fs_path.len + @sizeOf(OutMessage.EmitBinPath)), + }, &.{ + std.mem.asBytes(&header), + fs_path, + }); +} + +pub fn serveTestResults( + s: *Server, + msg: OutMessage.TestResults, +) !void { + const msg_le = bswap(msg); + try s.serveMessage(.{ + .tag = .test_results, + .bytes_len = @intCast(u32, @sizeOf(OutMessage.TestResults)), + }, &.{ + std.mem.asBytes(&msg_le), + }); +} + +pub fn serveErrorBundle(s: *Server, error_bundle: std.zig.ErrorBundle) !void { + const eb_hdr: OutMessage.ErrorBundle = .{ + .extra_len = @intCast(u32, error_bundle.extra.len), + .string_bytes_len = @intCast(u32, error_bundle.string_bytes.len), + }; + const bytes_len = @sizeOf(OutMessage.ErrorBundle) + + 4 * error_bundle.extra.len + error_bundle.string_bytes.len; + try s.serveMessage(.{ + .tag = .error_bundle, + .bytes_len = @intCast(u32, bytes_len), + }, &.{ + std.mem.asBytes(&eb_hdr), + // TODO: implement @ptrCast between slices changing the length + std.mem.sliceAsBytes(error_bundle.extra), + error_bundle.string_bytes, + }); +} + +pub const TestMetadata = struct { + names: []u32, + async_frame_sizes: []u32, + expected_panic_msgs: []u32, + string_bytes: []const u8, +}; + +pub fn serveTestMetadata(s: *Server, test_metadata: TestMetadata) !void { + const header: OutMessage.TestMetadata = .{ + .tests_len = bswap(@intCast(u32, test_metadata.names.len)), + .string_bytes_len = bswap(@intCast(u32, test_metadata.string_bytes.len)), + }; + const bytes_len = @sizeOf(OutMessage.TestMetadata) + + 3 * 4 * test_metadata.names.len + test_metadata.string_bytes.len; + + if (need_bswap) { + bswap_u32_array(test_metadata.names); + bswap_u32_array(test_metadata.async_frame_sizes); + bswap_u32_array(test_metadata.expected_panic_msgs); + } + defer if (need_bswap) { + bswap_u32_array(test_metadata.names); + bswap_u32_array(test_metadata.async_frame_sizes); + bswap_u32_array(test_metadata.expected_panic_msgs); + }; + + return s.serveMessage(.{ + .tag = .test_metadata, + .bytes_len = @intCast(u32, bytes_len), + }, &.{ + std.mem.asBytes(&header), + // TODO: implement @ptrCast between slices changing the length + std.mem.sliceAsBytes(test_metadata.names), + std.mem.sliceAsBytes(test_metadata.async_frame_sizes), + std.mem.sliceAsBytes(test_metadata.expected_panic_msgs), + test_metadata.string_bytes, + }); +} + +fn bswap(x: anytype) @TypeOf(x) { + if (!need_bswap) return x; + + const T = @TypeOf(x); + switch (@typeInfo(T)) { + .Enum => return @intToEnum(T, @byteSwap(@enumToInt(x))), + .Int => return @byteSwap(x), + .Struct => |info| switch (info.layout) { + .Extern => { + var result: T = undefined; + inline for (info.fields) |field| { + @field(result, field.name) = bswap(@field(x, field.name)); + } + return result; + }, + .Packed => { + const I = info.backing_integer.?; + return @bitCast(T, @byteSwap(@bitCast(I, x))); + }, + .Auto => @compileError("auto layout struct"), + }, + else => @compileError("bswap on type " ++ @typeName(T)), + } +} + +fn bswap_u32_array(slice: []u32) void { + comptime assert(need_bswap); + for (slice) |*elem| elem.* = @byteSwap(elem.*); +} + +/// workaround for https://github.com/ziglang/zig/issues/14904 +fn bswap_and_workaround_u32(x: *align(1) const u32) u32 { + const bytes_ptr = @ptrCast(*const [4]u8, x); + return std.mem.readIntLittle(u32, bytes_ptr); +} + +/// workaround for https://github.com/ziglang/zig/issues/14904 +fn bswap_and_workaround_tag(x: *align(1) const InMessage.Tag) InMessage.Tag { + const bytes_ptr = @ptrCast(*const [4]u8, x); + const int = std.mem.readIntLittle(u32, bytes_ptr); + return @intToEnum(InMessage.Tag, int); +} + +const OutMessage = std.zig.Server.Message; +const InMessage = std.zig.Client.Message; + +const Server = @This(); +const builtin = @import("builtin"); +const std = @import("std"); +const Allocator = std.mem.Allocator; +const assert = std.debug.assert; +const native_endian = builtin.target.cpu.arch.endian(); +const need_bswap = native_endian != .Little; diff --git a/lib/std/zig/system/NativeTargetInfo.zig b/lib/std/zig/system/NativeTargetInfo.zig index dbbebb43c936..987358ed5aa6 100644 --- a/lib/std/zig/system/NativeTargetInfo.zig +++ b/lib/std/zig/system/NativeTargetInfo.zig @@ -1090,6 +1090,11 @@ pub fn getExternalExecutor( switch (candidate.target.os.tag) { .windows => { if (options.allow_wine) { + // x86_64 wine does not support emulating aarch64-windows and + // vice versa. + if (candidate.target.cpu.arch != builtin.cpu.arch) { + return bad_result; + } switch (candidate.target.cpu.arch.ptrBitWidth()) { 32 => return Executor{ .wine = "wine" }, 64 => return Executor{ .wine = "wine64" }, diff --git a/lib/test_runner.zig b/lib/test_runner.zig index 5968fdaa5433..3d872648516e 100644 --- a/lib/test_runner.zig +++ b/lib/test_runner.zig @@ -8,14 +8,126 @@ pub const std_options = struct { }; var log_err_count: usize = 0; +var cmdline_buffer: [4096]u8 = undefined; +var fba = std.heap.FixedBufferAllocator.init(&cmdline_buffer); pub fn main() void { - if (builtin.zig_backend != .stage1 and - builtin.zig_backend != .stage2_llvm and - builtin.zig_backend != .stage2_c) + if (builtin.zig_backend == .stage2_wasm or + builtin.zig_backend == .stage2_x86_64 or + builtin.zig_backend == .stage2_aarch64) { - return main2() catch @panic("test failure"); + return mainSimple() catch @panic("test failure"); + } + + const args = std.process.argsAlloc(fba.allocator()) catch + @panic("unable to parse command line args"); + + var listen = false; + + for (args[1..]) |arg| { + if (std.mem.eql(u8, arg, "--listen=-")) { + listen = true; + } else { + @panic("unrecognized command line argument"); + } + } + + if (listen) { + return mainServer() catch @panic("internal test runner failure"); + } else { + return mainTerminal(); + } +} + +fn mainServer() !void { + var server = try std.zig.Server.init(.{ + .gpa = fba.allocator(), + .in = std.io.getStdIn(), + .out = std.io.getStdOut(), + .zig_version = builtin.zig_version_string, + }); + defer server.deinit(); + + while (true) { + const hdr = try server.receiveMessage(); + switch (hdr.tag) { + .exit => { + return std.process.exit(0); + }, + .query_test_metadata => { + std.testing.allocator_instance = .{}; + defer if (std.testing.allocator_instance.deinit()) { + @panic("internal test runner memory leak"); + }; + + var string_bytes: std.ArrayListUnmanaged(u8) = .{}; + defer string_bytes.deinit(std.testing.allocator); + try string_bytes.append(std.testing.allocator, 0); // Reserve 0 for null. + + const test_fns = builtin.test_functions; + const names = try std.testing.allocator.alloc(u32, test_fns.len); + defer std.testing.allocator.free(names); + const async_frame_sizes = try std.testing.allocator.alloc(u32, test_fns.len); + defer std.testing.allocator.free(async_frame_sizes); + const expected_panic_msgs = try std.testing.allocator.alloc(u32, test_fns.len); + defer std.testing.allocator.free(expected_panic_msgs); + + for (test_fns, names, async_frame_sizes, expected_panic_msgs) |test_fn, *name, *async_frame_size, *expected_panic_msg| { + name.* = @intCast(u32, string_bytes.items.len); + try string_bytes.ensureUnusedCapacity(std.testing.allocator, test_fn.name.len + 1); + string_bytes.appendSliceAssumeCapacity(test_fn.name); + string_bytes.appendAssumeCapacity(0); + + async_frame_size.* = @intCast(u32, test_fn.async_frame_size orelse 0); + expected_panic_msg.* = 0; + } + + try server.serveTestMetadata(.{ + .names = names, + .async_frame_sizes = async_frame_sizes, + .expected_panic_msgs = expected_panic_msgs, + .string_bytes = string_bytes.items, + }); + }, + + .run_test => { + std.testing.allocator_instance = .{}; + const index = try server.receiveBody_u32(); + const test_fn = builtin.test_functions[index]; + if (test_fn.async_frame_size != null) + @panic("TODO test runner implement async tests"); + var fail = false; + var skip = false; + var leak = false; + test_fn.func() catch |err| switch (err) { + error.SkipZigTest => skip = true, + else => { + fail = true; + if (@errorReturnTrace()) |trace| { + std.debug.dumpStackTrace(trace.*); + } + }, + }; + leak = std.testing.allocator_instance.deinit(); + try server.serveTestResults(.{ + .index = index, + .flags = .{ + .fail = fail, + .skip = skip, + .leak = leak, + }, + }); + }, + + else => { + std.debug.print("unsupported message: {x}", .{@enumToInt(hdr.tag)}); + std.process.exit(1); + }, + } } +} + +fn mainTerminal() void { const test_fn_list = builtin.test_functions; var ok_count: usize = 0; var skip_count: usize = 0; @@ -118,51 +230,17 @@ pub fn log( } } -pub fn main2() anyerror!void { - var skipped: usize = 0; - var failed: usize = 0; - // Simpler main(), exercising fewer language features, so that stage2 can handle it. +/// Simpler main(), exercising fewer language features, so that +/// work-in-progress backends can handle it. +pub fn mainSimple() anyerror!void { + //const stderr = std.io.getStdErr(); for (builtin.test_functions) |test_fn| { test_fn.func() catch |err| { if (err != error.SkipZigTest) { - failed += 1; - } else { - skipped += 1; + //stderr.writeAll(test_fn.name) catch {}; + //stderr.writeAll("\n") catch {}; + return err; } }; } - if (builtin.zig_backend == .stage2_wasm or - builtin.zig_backend == .stage2_x86_64 or - builtin.zig_backend == .stage2_aarch64 or - builtin.zig_backend == .stage2_llvm or - builtin.zig_backend == .stage2_c) - { - const passed = builtin.test_functions.len - skipped - failed; - const stderr = std.io.getStdErr(); - writeInt(stderr, passed) catch {}; - stderr.writeAll(" passed; ") catch {}; - writeInt(stderr, skipped) catch {}; - stderr.writeAll(" skipped; ") catch {}; - writeInt(stderr, failed) catch {}; - stderr.writeAll(" failed.\n") catch {}; - } - if (failed != 0) { - return error.TestsFailed; - } -} - -fn writeInt(stderr: std.fs.File, int: usize) anyerror!void { - const base = 10; - var buf: [100]u8 = undefined; - var a: usize = int; - var index: usize = buf.len; - while (true) { - const digit = a % base; - index -= 1; - buf[index] = std.fmt.digitToChar(@intCast(u8, digit), .lower); - a /= base; - if (a == 0) break; - } - const slice = buf[index..]; - try stderr.writeAll(slice); } diff --git a/src/AstGen.zig b/src/AstGen.zig index 7b2138a535b4..182a28084f14 100644 --- a/src/AstGen.zig +++ b/src/AstGen.zig @@ -148,18 +148,24 @@ pub fn generate(gpa: Allocator, tree: Ast) Allocator.Error!Zir { }; defer gz_instructions.deinit(gpa); - if (AstGen.structDeclInner( - &gen_scope, - &gen_scope.base, - 0, - tree.containerDeclRoot(), - .Auto, - 0, - )) |struct_decl_ref| { - assert(refToIndex(struct_decl_ref).? == 0); - } else |err| switch (err) { - error.OutOfMemory => return error.OutOfMemory, - error.AnalysisFail => {}, // Handled via compile_errors below. + // The AST -> ZIR lowering process assumes an AST that does not have any + // parse errors. + if (tree.errors.len == 0) { + if (AstGen.structDeclInner( + &gen_scope, + &gen_scope.base, + 0, + tree.containerDeclRoot(), + .Auto, + 0, + )) |struct_decl_ref| { + assert(refToIndex(struct_decl_ref).? == 0); + } else |err| switch (err) { + error.OutOfMemory => return error.OutOfMemory, + error.AnalysisFail => {}, // Handled via compile_errors below. + } + } else { + try lowerAstErrors(&astgen); } const err_index = @enumToInt(Zir.ExtraIndex.compile_errors); @@ -10380,7 +10386,7 @@ fn appendErrorTok( comptime format: []const u8, args: anytype, ) !void { - try astgen.appendErrorTokNotes(token, format, args, &[0]u32{}); + try astgen.appendErrorTokNotesOff(token, 0, format, args, &[0]u32{}); } fn failTokNotes( @@ -10390,7 +10396,7 @@ fn failTokNotes( args: anytype, notes: []const u32, ) InnerError { - try appendErrorTokNotes(astgen, token, format, args, notes); + try appendErrorTokNotesOff(astgen, token, 0, format, args, notes); return error.AnalysisFail; } @@ -10401,27 +10407,11 @@ fn appendErrorTokNotes( args: anytype, notes: []const u32, ) !void { - @setCold(true); - const string_bytes = &astgen.string_bytes; - const msg = @intCast(u32, string_bytes.items.len); - try string_bytes.writer(astgen.gpa).print(format ++ "\x00", args); - const notes_index: u32 = if (notes.len != 0) blk: { - const notes_start = astgen.extra.items.len; - try astgen.extra.ensureTotalCapacity(astgen.gpa, notes_start + 1 + notes.len); - astgen.extra.appendAssumeCapacity(@intCast(u32, notes.len)); - astgen.extra.appendSliceAssumeCapacity(notes); - break :blk @intCast(u32, notes_start); - } else 0; - try astgen.compile_errors.append(astgen.gpa, .{ - .msg = msg, - .node = 0, - .token = token, - .byte_offset = 0, - .notes = notes_index, - }); + return appendErrorTokNotesOff(astgen, token, 0, format, args, notes); } -/// Same as `fail`, except given an absolute byte offset. +/// Same as `fail`, except given a token plus an offset from its starting byte +/// offset. fn failOff( astgen: *AstGen, token: Ast.TokenIndex, @@ -10429,27 +10419,36 @@ fn failOff( comptime format: []const u8, args: anytype, ) InnerError { - try appendErrorOff(astgen, token, byte_offset, format, args); + try appendErrorTokNotesOff(astgen, token, byte_offset, format, args, &.{}); return error.AnalysisFail; } -fn appendErrorOff( +fn appendErrorTokNotesOff( astgen: *AstGen, token: Ast.TokenIndex, byte_offset: u32, comptime format: []const u8, args: anytype, -) Allocator.Error!void { + notes: []const u32, +) !void { @setCold(true); + const gpa = astgen.gpa; const string_bytes = &astgen.string_bytes; const msg = @intCast(u32, string_bytes.items.len); - try string_bytes.writer(astgen.gpa).print(format ++ "\x00", args); - try astgen.compile_errors.append(astgen.gpa, .{ + try string_bytes.writer(gpa).print(format ++ "\x00", args); + const notes_index: u32 = if (notes.len != 0) blk: { + const notes_start = astgen.extra.items.len; + try astgen.extra.ensureTotalCapacity(gpa, notes_start + 1 + notes.len); + astgen.extra.appendAssumeCapacity(@intCast(u32, notes.len)); + astgen.extra.appendSliceAssumeCapacity(notes); + break :blk @intCast(u32, notes_start); + } else 0; + try astgen.compile_errors.append(gpa, .{ .msg = msg, .node = 0, .token = token, .byte_offset = byte_offset, - .notes = 0, + .notes = notes_index, }); } @@ -10458,6 +10457,16 @@ fn errNoteTok( token: Ast.TokenIndex, comptime format: []const u8, args: anytype, +) Allocator.Error!u32 { + return errNoteTokOff(astgen, token, 0, format, args); +} + +fn errNoteTokOff( + astgen: *AstGen, + token: Ast.TokenIndex, + byte_offset: u32, + comptime format: []const u8, + args: anytype, ) Allocator.Error!u32 { @setCold(true); const string_bytes = &astgen.string_bytes; @@ -10467,7 +10476,7 @@ fn errNoteTok( .msg = msg, .node = 0, .token = token, - .byte_offset = 0, + .byte_offset = byte_offset, .notes = 0, }); } @@ -12634,3 +12643,42 @@ fn emitDbgStmt(gz: *GenZir, line: u32, column: u32) !void { }, } }); } + +fn lowerAstErrors(astgen: *AstGen) !void { + const tree = astgen.tree; + assert(tree.errors.len > 0); + + const gpa = astgen.gpa; + const parse_err = tree.errors[0]; + + var msg: std.ArrayListUnmanaged(u8) = .{}; + defer msg.deinit(gpa); + + const token_starts = tree.tokens.items(.start); + const token_tags = tree.tokens.items(.tag); + + var notes: std.ArrayListUnmanaged(u32) = .{}; + defer notes.deinit(gpa); + + if (token_tags[parse_err.token + @boolToInt(parse_err.token_is_prev)] == .invalid) { + const tok = parse_err.token + @boolToInt(parse_err.token_is_prev); + const bad_off = @intCast(u32, tree.tokenSlice(parse_err.token + @boolToInt(parse_err.token_is_prev)).len); + const byte_abs = token_starts[parse_err.token + @boolToInt(parse_err.token_is_prev)] + bad_off; + try notes.append(gpa, try astgen.errNoteTokOff(tok, bad_off, "invalid byte: '{'}'", .{ + std.zig.fmtEscapes(tree.source[byte_abs..][0..1]), + })); + } + + for (tree.errors[1..]) |note| { + if (!note.is_note) break; + + msg.clearRetainingCapacity(); + try tree.renderError(note, msg.writer(gpa)); + try notes.append(gpa, try astgen.errNoteTok(note.token, "{s}", .{msg.items})); + } + + const extra_offset = tree.errorOffset(parse_err); + msg.clearRetainingCapacity(); + try tree.renderError(parse_err, msg.writer(gpa)); + try astgen.appendErrorTokNotesOff(parse_err.token, extra_offset, "{s}", .{msg.items}, notes.items); +} diff --git a/src/Compilation.zig b/src/Compilation.zig index de433a680075..89512ce7444d 100644 --- a/src/Compilation.zig +++ b/src/Compilation.zig @@ -7,6 +7,9 @@ const Allocator = std.mem.Allocator; const assert = std.debug.assert; const log = std.log.scoped(.compilation); const Target = std.Target; +const ThreadPool = std.Thread.Pool; +const WaitGroup = std.Thread.WaitGroup; +const ErrorBundle = std.zig.ErrorBundle; const Value = @import("value.zig").Value; const Type = @import("type.zig").Type; @@ -30,8 +33,6 @@ const Cache = std.Build.Cache; const translate_c = @import("translate_c.zig"); const clang = @import("clang.zig"); const c_codegen = @import("codegen/c.zig"); -const ThreadPool = @import("ThreadPool.zig"); -const WaitGroup = @import("WaitGroup.zig"); const libtsan = @import("libtsan.zig"); const Zir = @import("Zir.zig"); const Autodoc = @import("Autodoc.zig"); @@ -99,6 +100,7 @@ job_queued_compiler_rt_lib: bool = false, job_queued_compiler_rt_obj: bool = false, alloc_failure_occurred: bool = false, formatted_panics: bool = false, +last_update_was_cache_hit: bool = false, c_source_files: []const CSourceFile, clang_argv: []const []const u8, @@ -334,12 +336,41 @@ pub const MiscTask = enum { libssp, zig_libc, analyze_pkg, + + @"musl crti.o", + @"musl crtn.o", + @"musl crt1.o", + @"musl rcrt1.o", + @"musl Scrt1.o", + @"musl libc.a", + @"musl libc.so", + + @"wasi crt1-reactor.o", + @"wasi crt1-command.o", + @"wasi libc.a", + @"libwasi-emulated-process-clocks.a", + @"libwasi-emulated-getpid.a", + @"libwasi-emulated-mman.a", + @"libwasi-emulated-signal.a", + + @"glibc crti.o", + @"glibc crtn.o", + @"glibc Scrt1.o", + @"glibc libc_nonshared.a", + @"glibc shared object", + + @"mingw-w64 crt2.o", + @"mingw-w64 dllcrt2.o", + @"mingw-w64 mingw32.lib", + @"mingw-w64 msvcrt-os.lib", + @"mingw-w64 mingwex.lib", + @"mingw-w64 uuid.lib", }; pub const MiscError = struct { /// Allocated with gpa. msg: []u8, - children: ?AllErrors = null, + children: ?ErrorBundle = null, pub fn deinit(misc_err: *MiscError, gpa: Allocator) void { gpa.free(misc_err.msg); @@ -365,448 +396,6 @@ pub const LldError = struct { } }; -/// To support incremental compilation, errors are stored in various places -/// so that they can be created and destroyed appropriately. This structure -/// is used to collect all the errors from the various places into one -/// convenient place for API users to consume. It is allocated into 1 arena -/// and freed all at once. -pub const AllErrors = struct { - arena: std.heap.ArenaAllocator.State, - list: []const Message, - - pub const Message = union(enum) { - src: struct { - msg: []const u8, - src_path: []const u8, - line: u32, - column: u32, - span: Module.SrcLoc.Span, - /// Usually one, but incremented for redundant messages. - count: u32 = 1, - /// Does not include the trailing newline. - source_line: ?[]const u8, - notes: []const Message = &.{}, - reference_trace: []Message = &.{}, - - /// Splits the error message up into lines to properly indent them - /// to allow for long, good-looking error messages. - /// - /// This is used to split the message in `@compileError("hello\nworld")` for example. - fn writeMsg(src: @This(), stderr: anytype, indent: usize) !void { - var lines = mem.split(u8, src.msg, "\n"); - while (lines.next()) |line| { - try stderr.writeAll(line); - if (lines.index == null) break; - try stderr.writeByte('\n'); - try stderr.writeByteNTimes(' ', indent); - } - } - }, - plain: struct { - msg: []const u8, - notes: []Message = &.{}, - /// Usually one, but incremented for redundant messages. - count: u32 = 1, - }, - - pub fn incrementCount(msg: *Message) void { - switch (msg.*) { - .src => |*src| { - src.count += 1; - }, - .plain => |*plain| { - plain.count += 1; - }, - } - } - - pub fn renderToStdErr(msg: Message, ttyconf: std.debug.TTY.Config) void { - std.debug.getStderrMutex().lock(); - defer std.debug.getStderrMutex().unlock(); - const stderr = std.io.getStdErr(); - return msg.renderToWriter(ttyconf, stderr.writer(), "error", .Red, 0) catch return; - } - - pub fn renderToWriter( - msg: Message, - ttyconf: std.debug.TTY.Config, - stderr: anytype, - kind: []const u8, - color: std.debug.TTY.Color, - indent: usize, - ) anyerror!void { - var counting_writer = std.io.countingWriter(stderr); - const counting_stderr = counting_writer.writer(); - switch (msg) { - .src => |src| { - try counting_stderr.writeByteNTimes(' ', indent); - try ttyconf.setColor(stderr, .Bold); - try counting_stderr.print("{s}:{d}:{d}: ", .{ - src.src_path, - src.line + 1, - src.column + 1, - }); - try ttyconf.setColor(stderr, color); - try counting_stderr.writeAll(kind); - try counting_stderr.writeAll(": "); - // This is the length of the part before the error message: - // e.g. "file.zig:4:5: error: " - const prefix_len = @intCast(usize, counting_stderr.context.bytes_written); - try ttyconf.setColor(stderr, .Reset); - try ttyconf.setColor(stderr, .Bold); - if (src.count == 1) { - try src.writeMsg(stderr, prefix_len); - try stderr.writeByte('\n'); - } else { - try src.writeMsg(stderr, prefix_len); - try ttyconf.setColor(stderr, .Dim); - try stderr.print(" ({d} times)\n", .{src.count}); - } - try ttyconf.setColor(stderr, .Reset); - if (src.source_line) |line| { - for (line) |b| switch (b) { - '\t' => try stderr.writeByte(' '), - else => try stderr.writeByte(b), - }; - try stderr.writeByte('\n'); - // TODO basic unicode code point monospace width - const before_caret = src.span.main - src.span.start; - // -1 since span.main includes the caret - const after_caret = src.span.end - src.span.main -| 1; - try stderr.writeByteNTimes(' ', src.column - before_caret); - try ttyconf.setColor(stderr, .Green); - try stderr.writeByteNTimes('~', before_caret); - try stderr.writeByte('^'); - try stderr.writeByteNTimes('~', after_caret); - try stderr.writeByte('\n'); - try ttyconf.setColor(stderr, .Reset); - } - for (src.notes) |note| { - try note.renderToWriter(ttyconf, stderr, "note", .Cyan, indent); - } - if (src.reference_trace.len != 0) { - try ttyconf.setColor(stderr, .Reset); - try ttyconf.setColor(stderr, .Dim); - try stderr.print("referenced by:\n", .{}); - for (src.reference_trace) |reference| { - switch (reference) { - .src => |ref_src| try stderr.print(" {s}: {s}:{d}:{d}\n", .{ - ref_src.msg, - ref_src.src_path, - ref_src.line + 1, - ref_src.column + 1, - }), - .plain => |plain| if (plain.count != 0) { - try stderr.print( - " {d} reference(s) hidden; use '-freference-trace={d}' to see all references\n", - .{ plain.count, plain.count + src.reference_trace.len - 1 }, - ); - } else { - try stderr.print( - " remaining reference traces hidden; use '-freference-trace' to see all reference traces\n", - .{}, - ); - }, - } - } - try stderr.writeByte('\n'); - try ttyconf.setColor(stderr, .Reset); - } - }, - .plain => |plain| { - try ttyconf.setColor(stderr, color); - try stderr.writeByteNTimes(' ', indent); - try stderr.writeAll(kind); - try stderr.writeAll(": "); - try ttyconf.setColor(stderr, .Reset); - if (plain.count == 1) { - try stderr.print("{s}\n", .{plain.msg}); - } else { - try stderr.print("{s}", .{plain.msg}); - try ttyconf.setColor(stderr, .Dim); - try stderr.print(" ({d} times)\n", .{plain.count}); - } - try ttyconf.setColor(stderr, .Reset); - for (plain.notes) |note| { - try note.renderToWriter(ttyconf, stderr, "note", .Cyan, indent + 4); - } - }, - } - } - - pub const HashContext = struct { - pub fn hash(ctx: HashContext, key: *Message) u64 { - _ = ctx; - var hasher = std.hash.Wyhash.init(0); - - switch (key.*) { - .src => |src| { - hasher.update(src.msg); - hasher.update(src.src_path); - std.hash.autoHash(&hasher, src.line); - std.hash.autoHash(&hasher, src.column); - std.hash.autoHash(&hasher, src.span.main); - }, - .plain => |plain| { - hasher.update(plain.msg); - }, - } - - return hasher.final(); - } - - pub fn eql(ctx: HashContext, a: *Message, b: *Message) bool { - _ = ctx; - switch (a.*) { - .src => |a_src| switch (b.*) { - .src => |b_src| { - return mem.eql(u8, a_src.msg, b_src.msg) and - mem.eql(u8, a_src.src_path, b_src.src_path) and - a_src.line == b_src.line and - a_src.column == b_src.column and - a_src.span.main == b_src.span.main; - }, - .plain => return false, - }, - .plain => |a_plain| switch (b.*) { - .src => return false, - .plain => |b_plain| { - return mem.eql(u8, a_plain.msg, b_plain.msg); - }, - }, - } - } - }; - }; - - pub fn deinit(self: *AllErrors, gpa: Allocator) void { - self.arena.promote(gpa).deinit(); - } - - pub fn add( - module: *Module, - arena: *std.heap.ArenaAllocator, - errors: *std.ArrayList(Message), - module_err_msg: Module.ErrorMsg, - ) !void { - const allocator = arena.allocator(); - - const notes_buf = try allocator.alloc(Message, module_err_msg.notes.len); - var note_i: usize = 0; - - // De-duplicate error notes. The main use case in mind for this is - // too many "note: called from here" notes when eval branch quota is reached. - var seen_notes = std.HashMap( - *Message, - void, - Message.HashContext, - std.hash_map.default_max_load_percentage, - ).init(allocator); - const err_source = module_err_msg.src_loc.file_scope.getSource(module.gpa) catch |err| { - const file_path = try module_err_msg.src_loc.file_scope.fullPath(allocator); - try errors.append(.{ - .plain = .{ - .msg = try std.fmt.allocPrint(allocator, "unable to load '{s}': {s}", .{ - file_path, @errorName(err), - }), - }, - }); - return; - }; - const err_span = try module_err_msg.src_loc.span(module.gpa); - const err_loc = std.zig.findLineColumn(err_source.bytes, err_span.main); - - for (module_err_msg.notes) |module_note| { - const source = try module_note.src_loc.file_scope.getSource(module.gpa); - const span = try module_note.src_loc.span(module.gpa); - const loc = std.zig.findLineColumn(source.bytes, span.main); - const file_path = try module_note.src_loc.file_scope.fullPath(allocator); - const note = ¬es_buf[note_i]; - note.* = .{ - .src = .{ - .src_path = file_path, - .msg = try allocator.dupe(u8, module_note.msg), - .span = span, - .line = @intCast(u32, loc.line), - .column = @intCast(u32, loc.column), - .source_line = if (err_loc.eql(loc)) null else try allocator.dupe(u8, loc.source_line), - }, - }; - const gop = try seen_notes.getOrPut(note); - if (gop.found_existing) { - gop.key_ptr.*.incrementCount(); - } else { - note_i += 1; - } - } - - const reference_trace = try allocator.alloc(Message, module_err_msg.reference_trace.len); - for (reference_trace, 0..) |*reference, i| { - const module_reference = module_err_msg.reference_trace[i]; - if (module_reference.hidden != 0) { - reference.* = .{ .plain = .{ .msg = undefined, .count = module_reference.hidden } }; - break; - } else if (module_reference.decl == null) { - reference.* = .{ .plain = .{ .msg = undefined, .count = 0 } }; - break; - } - const source = try module_reference.src_loc.file_scope.getSource(module.gpa); - const span = try module_reference.src_loc.span(module.gpa); - const loc = std.zig.findLineColumn(source.bytes, span.main); - const file_path = try module_reference.src_loc.file_scope.fullPath(allocator); - reference.* = .{ - .src = .{ - .src_path = file_path, - .msg = try allocator.dupe(u8, std.mem.sliceTo(module_reference.decl.?, 0)), - .span = span, - .line = @intCast(u32, loc.line), - .column = @intCast(u32, loc.column), - .source_line = null, - }, - }; - } - const file_path = try module_err_msg.src_loc.file_scope.fullPath(allocator); - try errors.append(.{ - .src = .{ - .src_path = file_path, - .msg = try allocator.dupe(u8, module_err_msg.msg), - .span = err_span, - .line = @intCast(u32, err_loc.line), - .column = @intCast(u32, err_loc.column), - .notes = notes_buf[0..note_i], - .reference_trace = reference_trace, - .source_line = if (module_err_msg.src_loc.lazy == .entire_file) null else try allocator.dupe(u8, err_loc.source_line), - }, - }); - } - - pub fn addZir( - arena: Allocator, - errors: *std.ArrayList(Message), - file: *Module.File, - ) !void { - assert(file.zir_loaded); - assert(file.tree_loaded); - assert(file.source_loaded); - const payload_index = file.zir.extra[@enumToInt(Zir.ExtraIndex.compile_errors)]; - assert(payload_index != 0); - - const header = file.zir.extraData(Zir.Inst.CompileErrors, payload_index); - const items_len = header.data.items_len; - var extra_index = header.end; - var item_i: usize = 0; - while (item_i < items_len) : (item_i += 1) { - const item = file.zir.extraData(Zir.Inst.CompileErrors.Item, extra_index); - extra_index = item.end; - const err_span = blk: { - if (item.data.node != 0) { - break :blk Module.SrcLoc.nodeToSpan(&file.tree, item.data.node); - } - const token_starts = file.tree.tokens.items(.start); - const start = token_starts[item.data.token] + item.data.byte_offset; - const end = start + @intCast(u32, file.tree.tokenSlice(item.data.token).len) - item.data.byte_offset; - break :blk Module.SrcLoc.Span{ .start = start, .end = end, .main = start }; - }; - const err_loc = std.zig.findLineColumn(file.source, err_span.main); - - var notes: []Message = &[0]Message{}; - if (item.data.notes != 0) { - const block = file.zir.extraData(Zir.Inst.Block, item.data.notes); - const body = file.zir.extra[block.end..][0..block.data.body_len]; - notes = try arena.alloc(Message, body.len); - for (notes, 0..) |*note, i| { - const note_item = file.zir.extraData(Zir.Inst.CompileErrors.Item, body[i]); - const msg = file.zir.nullTerminatedString(note_item.data.msg); - const span = blk: { - if (note_item.data.node != 0) { - break :blk Module.SrcLoc.nodeToSpan(&file.tree, note_item.data.node); - } - const token_starts = file.tree.tokens.items(.start); - const start = token_starts[note_item.data.token] + note_item.data.byte_offset; - const end = start + @intCast(u32, file.tree.tokenSlice(note_item.data.token).len) - item.data.byte_offset; - break :blk Module.SrcLoc.Span{ .start = start, .end = end, .main = start }; - }; - const loc = std.zig.findLineColumn(file.source, span.main); - - note.* = .{ - .src = .{ - .src_path = try file.fullPath(arena), - .msg = try arena.dupe(u8, msg), - .span = span, - .line = @intCast(u32, loc.line), - .column = @intCast(u32, loc.column), - .notes = &.{}, // TODO rework this function to be recursive - .source_line = if (loc.eql(err_loc)) null else try arena.dupe(u8, loc.source_line), - }, - }; - } - } - - const msg = file.zir.nullTerminatedString(item.data.msg); - try errors.append(.{ - .src = .{ - .src_path = try file.fullPath(arena), - .msg = try arena.dupe(u8, msg), - .span = err_span, - .line = @intCast(u32, err_loc.line), - .column = @intCast(u32, err_loc.column), - .notes = notes, - .source_line = try arena.dupe(u8, err_loc.source_line), - }, - }); - } - } - - fn addPlain( - arena: *std.heap.ArenaAllocator, - errors: *std.ArrayList(Message), - msg: []const u8, - ) !void { - _ = arena; - try errors.append(.{ .plain = .{ .msg = msg } }); - } - - fn addPlainWithChildren( - arena: *std.heap.ArenaAllocator, - errors: *std.ArrayList(Message), - msg: []const u8, - optional_children: ?AllErrors, - ) !void { - const allocator = arena.allocator(); - const duped_msg = try allocator.dupe(u8, msg); - if (optional_children) |*children| { - try errors.append(.{ .plain = .{ - .msg = duped_msg, - .notes = try dupeList(children.list, allocator), - } }); - } else { - try errors.append(.{ .plain = .{ .msg = duped_msg } }); - } - } - - fn dupeList(list: []const Message, arena: Allocator) Allocator.Error![]Message { - const duped_list = try arena.alloc(Message, list.len); - for (list, 0..) |item, i| { - duped_list[i] = switch (item) { - .src => |src| .{ .src = .{ - .msg = try arena.dupe(u8, src.msg), - .src_path = try arena.dupe(u8, src.src_path), - .line = src.line, - .column = src.column, - .span = src.span, - .source_line = if (src.source_line) |s| try arena.dupe(u8, s) else null, - .notes = try dupeList(src.notes, arena), - } }, - .plain => |plain| .{ .plain = .{ - .msg = try arena.dupe(u8, plain.msg), - .notes = try dupeList(plain.notes, arena), - } }, - }; - } - return duped_list; - } -}; - pub const Directory = Cache.Directory; pub const EmitLoc = struct { @@ -2259,12 +1848,20 @@ fn cleanupTmpArtifactDirectory( } } +pub fn hotCodeSwap(comp: *Compilation, prog_node: *std.Progress.Node, pid: std.ChildProcess.Id) !void { + comp.bin_file.child_pid = pid; + try comp.makeBinFileWritable(); + try comp.update(prog_node); + try comp.makeBinFileExecutable(); +} + /// Detect changes to source files, perform semantic analysis, and update the output files. -pub fn update(comp: *Compilation) !void { +pub fn update(comp: *Compilation, main_progress_node: *std.Progress.Node) !void { const tracy_trace = trace(@src()); defer tracy_trace.end(); comp.clearMiscFailures(); + comp.last_update_was_cache_hit = false; var man: Cache.Manifest = undefined; defer if (comp.whole_cache_manifest != null) man.deinit(); @@ -2292,6 +1889,7 @@ pub fn update(comp: *Compilation) !void { return err; }; if (is_hit) { + comp.last_update_was_cache_hit = true; log.debug("CacheMode.whole cache hit for {s}", .{comp.bin_file.options.root_name}); const digest = man.final(); @@ -2407,21 +2005,6 @@ pub fn update(comp: *Compilation) !void { } } - // If the terminal is dumb, we dont want to show the user all the output. - var progress: std.Progress = .{ .dont_print_on_dumb = true }; - const main_progress_node = progress.start("", 0); - defer main_progress_node.end(); - switch (comp.color) { - .off => { - progress.terminal = null; - }, - .on => { - progress.terminal = std.io.getStdErr(); - progress.supports_ansi_escape_codes = true; - }, - .auto => {}, - } - try comp.performAllTheWork(main_progress_node); if (comp.bin_file.options.module) |module| { @@ -2891,7 +2474,7 @@ pub fn makeBinFileWritable(self: *Compilation) !void { } /// This function is temporally single-threaded. -pub fn totalErrorCount(self: *Compilation) usize { +pub fn totalErrorCount(self: *Compilation) u32 { var total: usize = self.failed_c_objects.count() + self.misc_failures.count() + @boolToInt(self.alloc_failure_occurred) + self.lld_errors.items.len; @@ -2951,17 +2534,16 @@ pub fn totalErrorCount(self: *Compilation) usize { } } - return total; + return @intCast(u32, total); } /// This function is temporally single-threaded. -pub fn getAllErrorsAlloc(self: *Compilation) !AllErrors { - var arena = std.heap.ArenaAllocator.init(self.gpa); - errdefer arena.deinit(); - const arena_allocator = arena.allocator(); +pub fn getAllErrorsAlloc(self: *Compilation) !ErrorBundle { + const gpa = self.gpa; - var errors = std.ArrayList(AllErrors.Message).init(self.gpa); - defer errors.deinit(); + var bundle: ErrorBundle.Wip = undefined; + try bundle.init(gpa); + defer bundle.deinit(); { var it = self.failed_c_objects.iterator(); @@ -2970,53 +2552,58 @@ pub fn getAllErrorsAlloc(self: *Compilation) !AllErrors { const err_msg = entry.value_ptr.*; // TODO these fields will need to be adjusted when we have proper // C error reporting bubbling up. - try errors.append(.{ - .src = .{ - .src_path = try arena_allocator.dupe(u8, c_object.src.src_path), - .msg = try std.fmt.allocPrint(arena_allocator, "unable to build C object: {s}", .{ - err_msg.msg, - }), - .span = .{ .start = 0, .end = 1, .main = 0 }, + try bundle.addRootErrorMessage(.{ + .msg = try bundle.printString("unable to build C object: {s}", .{err_msg.msg}), + .src_loc = try bundle.addSourceLocation(.{ + .src_path = try bundle.addString(c_object.src.src_path), + .span_start = 0, + .span_main = 0, + .span_end = 1, .line = err_msg.line, .column = err_msg.column, - .source_line = null, // TODO - }, + .source_line = 0, // TODO + }), }); } } + for (self.lld_errors.items) |lld_error| { - const notes = try arena_allocator.alloc(AllErrors.Message, lld_error.context_lines.len); - for (lld_error.context_lines, 0..) |context_line, i| { - notes[i] = .{ .plain = .{ - .msg = try arena_allocator.dupe(u8, context_line), - } }; - } + const notes_len = @intCast(u32, lld_error.context_lines.len); - try errors.append(.{ - .plain = .{ - .msg = try arena_allocator.dupe(u8, lld_error.msg), - .notes = notes, - }, + try bundle.addRootErrorMessage(.{ + .msg = try bundle.addString(lld_error.msg), + .notes_len = notes_len, }); + const notes_start = try bundle.reserveNotes(notes_len); + for (notes_start.., lld_error.context_lines) |note, context_line| { + bundle.extra.items[note] = @enumToInt(bundle.addErrorMessageAssumeCapacity(.{ + .msg = try bundle.addString(context_line), + })); + } } for (self.misc_failures.values()) |*value| { - try AllErrors.addPlainWithChildren(&arena, &errors, value.msg, value.children); + try bundle.addRootErrorMessage(.{ + .msg = try bundle.addString(value.msg), + .notes_len = if (value.children) |b| b.errorMessageCount() else 0, + }); + if (value.children) |b| try bundle.addBundle(b); } if (self.alloc_failure_occurred) { - try AllErrors.addPlain(&arena, &errors, "memory allocation failure"); + try bundle.addRootErrorMessage(.{ + .msg = try bundle.addString("memory allocation failure"), + }); } if (self.bin_file.options.module) |module| { { var it = module.failed_files.iterator(); while (it.next()) |entry| { if (entry.value_ptr.*) |msg| { - try AllErrors.add(module, &arena, &errors, msg.*); + try addModuleErrorMsg(&bundle, msg.*); } else { - // Must be ZIR errors. In order for ZIR errors to exist, the parsing - // must have completed successfully. - const tree = try entry.key_ptr.*.getTree(module.gpa); - assert(tree.errors.len == 0); - try AllErrors.addZir(arena_allocator, &errors, entry.key_ptr.*); + // Must be ZIR errors. Note that this may include AST errors. + // addZirErrorMessages asserts that the tree is loaded. + _ = try entry.key_ptr.*.getTree(gpa); + try addZirErrorMessages(&bundle, entry.key_ptr.*); } } } @@ -3024,7 +2611,7 @@ pub fn getAllErrorsAlloc(self: *Compilation) !AllErrors { var it = module.failed_embed_files.iterator(); while (it.next()) |entry| { const msg = entry.value_ptr.*; - try AllErrors.add(module, &arena, &errors, msg.*); + try addModuleErrorMsg(&bundle, msg.*); } } { @@ -3034,23 +2621,20 @@ pub fn getAllErrorsAlloc(self: *Compilation) !AllErrors { // Skip errors for Decls within files that had a parse failure. // We'll try again once parsing succeeds. if (decl.getFileScope().okToReportErrors()) { - try AllErrors.add(module, &arena, &errors, entry.value_ptr.*.*); + try addModuleErrorMsg(&bundle, entry.value_ptr.*.*); if (module.cimport_errors.get(entry.key_ptr.*)) |cimport_errors| for (cimport_errors) |c_error| { - if (c_error.path) |some| - try errors.append(.{ - .src = .{ - .src_path = try arena_allocator.dupe(u8, std.mem.span(some)), - .span = .{ .start = c_error.offset, .end = c_error.offset + 1, .main = c_error.offset }, - .msg = try arena_allocator.dupe(u8, std.mem.span(c_error.msg)), - .line = c_error.line, - .column = c_error.column, - .source_line = if (c_error.source_line) |line| try arena_allocator.dupe(u8, std.mem.span(line)) else null, - }, - }) - else - try errors.append(.{ - .plain = .{ .msg = try arena_allocator.dupe(u8, std.mem.span(c_error.msg)) }, - }); + try bundle.addRootErrorMessage(.{ + .msg = try bundle.addString(std.mem.span(c_error.msg)), + .src_loc = if (c_error.path) |some| try bundle.addSourceLocation(.{ + .src_path = try bundle.addString(std.mem.span(some)), + .span_start = c_error.offset, + .span_main = c_error.offset, + .span_end = c_error.offset + 1, + .line = c_error.line, + .column = c_error.column, + .source_line = if (c_error.source_line) |line| try bundle.addString(std.mem.span(line)) else 0, + }) else .none, + }); }; } } @@ -3062,45 +2646,39 @@ pub fn getAllErrorsAlloc(self: *Compilation) !AllErrors { // Skip errors for Decls within files that had a parse failure. // We'll try again once parsing succeeds. if (decl.getFileScope().okToReportErrors()) { - try AllErrors.add(module, &arena, &errors, entry.value_ptr.*.*); + try addModuleErrorMsg(&bundle, entry.value_ptr.*.*); } } } for (module.failed_exports.values()) |value| { - try AllErrors.add(module, &arena, &errors, value.*); + try addModuleErrorMsg(&bundle, value.*); } } - if (errors.items.len == 0) { + if (bundle.root_list.items.len == 0) { if (self.link_error_flags.no_entry_point_found) { - try errors.append(.{ - .plain = .{ - .msg = try std.fmt.allocPrint(arena_allocator, "no entry point found", .{}), - }, + try bundle.addRootErrorMessage(.{ + .msg = try bundle.addString("no entry point found"), }); } } if (self.link_error_flags.missing_libc) { - const notes = try arena_allocator.create([2]AllErrors.Message); - notes.* = .{ - .{ .plain = .{ - .msg = try arena_allocator.dupe(u8, "run 'zig libc -h' to learn about libc installations"), - } }, - .{ .plain = .{ - .msg = try arena_allocator.dupe(u8, "run 'zig targets' to see the targets for which zig can always provide libc"), - } }, - }; - try errors.append(.{ - .plain = .{ - .msg = try std.fmt.allocPrint(arena_allocator, "libc not available", .{}), - .notes = notes, - }, + try bundle.addRootErrorMessage(.{ + .msg = try bundle.addString("libc not available"), + .notes_len = 2, }); + const notes_start = try bundle.reserveNotes(2); + bundle.extra.items[notes_start + 0] = @enumToInt(try bundle.addErrorMessage(.{ + .msg = try bundle.addString("run 'zig libc -h' to learn about libc installations"), + })); + bundle.extra.items[notes_start + 1] = @enumToInt(try bundle.addErrorMessage(.{ + .msg = try bundle.addString("run 'zig targets' to see the targets for which zig can always provide libc"), + })); } if (self.bin_file.options.module) |module| { - if (errors.items.len == 0 and module.compile_log_decls.count() != 0) { + if (bundle.root_list.items.len == 0 and module.compile_log_decls.count() != 0) { const keys = module.compile_log_decls.keys(); const values = module.compile_log_decls.values(); // First one will be the error; subsequent ones will be notes. @@ -3109,9 +2687,9 @@ pub fn getAllErrorsAlloc(self: *Compilation) !AllErrors { const err_msg = Module.ErrorMsg{ .src_loc = src_loc, .msg = "found compile log statement", - .notes = try self.gpa.alloc(Module.ErrorMsg, module.compile_log_decls.count() - 1), + .notes = try gpa.alloc(Module.ErrorMsg, module.compile_log_decls.count() - 1), }; - defer self.gpa.free(err_msg.notes); + defer gpa.free(err_msg.notes); for (keys[1..], 0..) |key, i| { const note_decl = module.declPtr(key); @@ -3121,21 +2699,260 @@ pub fn getAllErrorsAlloc(self: *Compilation) !AllErrors { }; } - try AllErrors.add(module, &arena, &errors, err_msg); + try addModuleErrorMsg(&bundle, err_msg); + } + } + + assert(self.totalErrorCount() == bundle.root_list.items.len); + + const compile_log_text = if (self.bin_file.options.module) |m| m.compile_log_text.items else ""; + return bundle.toOwnedBundle(compile_log_text); +} + +pub const ErrorNoteHashContext = struct { + eb: *const ErrorBundle.Wip, + + pub fn hash(ctx: ErrorNoteHashContext, key: ErrorBundle.ErrorMessage) u32 { + var hasher = std.hash.Wyhash.init(0); + const eb = ctx.eb.tmpBundle(); + + hasher.update(eb.nullTerminatedString(key.msg)); + if (key.src_loc != .none) { + const src = eb.getSourceLocation(key.src_loc); + hasher.update(eb.nullTerminatedString(src.src_path)); + std.hash.autoHash(&hasher, src.line); + std.hash.autoHash(&hasher, src.column); + std.hash.autoHash(&hasher, src.span_main); } + + return @truncate(u32, hasher.final()); } - assert(errors.items.len == self.totalErrorCount()); + pub fn eql( + ctx: ErrorNoteHashContext, + a: ErrorBundle.ErrorMessage, + b: ErrorBundle.ErrorMessage, + b_index: usize, + ) bool { + _ = b_index; + const eb = ctx.eb.tmpBundle(); + const msg_a = eb.nullTerminatedString(a.msg); + const msg_b = eb.nullTerminatedString(b.msg); + if (!std.mem.eql(u8, msg_a, msg_b)) return false; + + if (a.src_loc == .none and b.src_loc == .none) return true; + if (a.src_loc == .none or b.src_loc == .none) return false; + const src_a = eb.getSourceLocation(a.src_loc); + const src_b = eb.getSourceLocation(b.src_loc); + + const src_path_a = eb.nullTerminatedString(src_a.src_path); + const src_path_b = eb.nullTerminatedString(src_b.src_path); + + return std.mem.eql(u8, src_path_a, src_path_b) and + src_a.line == src_b.line and + src_a.column == src_b.column and + src_a.span_main == src_b.span_main; + } +}; - return AllErrors{ - .list = try arena_allocator.dupe(AllErrors.Message, errors.items), - .arena = arena.state, +pub fn addModuleErrorMsg(eb: *ErrorBundle.Wip, module_err_msg: Module.ErrorMsg) !void { + const gpa = eb.gpa; + const err_source = module_err_msg.src_loc.file_scope.getSource(gpa) catch |err| { + const file_path = try module_err_msg.src_loc.file_scope.fullPath(gpa); + defer gpa.free(file_path); + try eb.addRootErrorMessage(.{ + .msg = try eb.printString("unable to load '{s}': {s}", .{ + file_path, @errorName(err), + }), + }); + return; }; + const err_span = try module_err_msg.src_loc.span(gpa); + const err_loc = std.zig.findLineColumn(err_source.bytes, err_span.main); + const file_path = try module_err_msg.src_loc.file_scope.fullPath(gpa); + defer gpa.free(file_path); + + var ref_traces: std.ArrayListUnmanaged(ErrorBundle.ReferenceTrace) = .{}; + defer ref_traces.deinit(gpa); + + for (module_err_msg.reference_trace) |module_reference| { + if (module_reference.hidden != 0) { + try ref_traces.append(gpa, .{ + .decl_name = module_reference.hidden, + .src_loc = .none, + }); + break; + } else if (module_reference.decl == null) { + try ref_traces.append(gpa, .{ + .decl_name = 0, + .src_loc = .none, + }); + break; + } + const source = try module_reference.src_loc.file_scope.getSource(gpa); + const span = try module_reference.src_loc.span(gpa); + const loc = std.zig.findLineColumn(source.bytes, span.main); + const rt_file_path = try module_reference.src_loc.file_scope.fullPath(gpa); + defer gpa.free(rt_file_path); + try ref_traces.append(gpa, .{ + .decl_name = try eb.addString(std.mem.sliceTo(module_reference.decl.?, 0)), + .src_loc = try eb.addSourceLocation(.{ + .src_path = try eb.addString(rt_file_path), + .span_start = span.start, + .span_main = span.main, + .span_end = span.end, + .line = @intCast(u32, loc.line), + .column = @intCast(u32, loc.column), + .source_line = 0, + }), + }); + } + + const src_loc = try eb.addSourceLocation(.{ + .src_path = try eb.addString(file_path), + .span_start = err_span.start, + .span_main = err_span.main, + .span_end = err_span.end, + .line = @intCast(u32, err_loc.line), + .column = @intCast(u32, err_loc.column), + .source_line = if (module_err_msg.src_loc.lazy == .entire_file) + 0 + else + try eb.addString(err_loc.source_line), + .reference_trace_len = @intCast(u32, ref_traces.items.len), + }); + + for (ref_traces.items) |rt| { + try eb.addReferenceTrace(rt); + } + + // De-duplicate error notes. The main use case in mind for this is + // too many "note: called from here" notes when eval branch quota is reached. + var notes: std.ArrayHashMapUnmanaged(ErrorBundle.ErrorMessage, void, ErrorNoteHashContext, true) = .{}; + defer notes.deinit(gpa); + + for (module_err_msg.notes) |module_note| { + const source = try module_note.src_loc.file_scope.getSource(gpa); + const span = try module_note.src_loc.span(gpa); + const loc = std.zig.findLineColumn(source.bytes, span.main); + const note_file_path = try module_note.src_loc.file_scope.fullPath(gpa); + defer gpa.free(note_file_path); + + const gop = try notes.getOrPutContext(gpa, .{ + .msg = try eb.addString(module_note.msg), + .src_loc = try eb.addSourceLocation(.{ + .src_path = try eb.addString(note_file_path), + .span_start = span.start, + .span_main = span.main, + .span_end = span.end, + .line = @intCast(u32, loc.line), + .column = @intCast(u32, loc.column), + .source_line = if (err_loc.eql(loc)) 0 else try eb.addString(loc.source_line), + }), + }, .{ .eb = eb }); + if (gop.found_existing) { + gop.key_ptr.count += 1; + } + } + + const notes_len = @intCast(u32, notes.entries.len); + + try eb.addRootErrorMessage(.{ + .msg = try eb.addString(module_err_msg.msg), + .src_loc = src_loc, + .notes_len = notes_len, + }); + + const notes_start = try eb.reserveNotes(notes_len); + + for (notes_start.., notes.keys()) |i, note| { + eb.extra.items[i] = @enumToInt(try eb.addErrorMessage(note)); + } } -pub fn getCompileLogOutput(self: *Compilation) []const u8 { - const module = self.bin_file.options.module orelse return &[0]u8{}; - return module.compile_log_text.items; +pub fn addZirErrorMessages(eb: *ErrorBundle.Wip, file: *Module.File) !void { + assert(file.zir_loaded); + assert(file.tree_loaded); + assert(file.source_loaded); + const payload_index = file.zir.extra[@enumToInt(Zir.ExtraIndex.compile_errors)]; + assert(payload_index != 0); + const gpa = eb.gpa; + + const header = file.zir.extraData(Zir.Inst.CompileErrors, payload_index); + const items_len = header.data.items_len; + var extra_index = header.end; + for (0..items_len) |_| { + const item = file.zir.extraData(Zir.Inst.CompileErrors.Item, extra_index); + extra_index = item.end; + const err_span = blk: { + if (item.data.node != 0) { + break :blk Module.SrcLoc.nodeToSpan(&file.tree, item.data.node); + } + const token_starts = file.tree.tokens.items(.start); + const start = token_starts[item.data.token] + item.data.byte_offset; + const end = start + @intCast(u32, file.tree.tokenSlice(item.data.token).len) - item.data.byte_offset; + break :blk Module.SrcLoc.Span{ .start = start, .end = end, .main = start }; + }; + const err_loc = std.zig.findLineColumn(file.source, err_span.main); + + { + const msg = file.zir.nullTerminatedString(item.data.msg); + const src_path = try file.fullPath(gpa); + defer gpa.free(src_path); + try eb.addRootErrorMessage(.{ + .msg = try eb.addString(msg), + .src_loc = try eb.addSourceLocation(.{ + .src_path = try eb.addString(src_path), + .span_start = err_span.start, + .span_main = err_span.main, + .span_end = err_span.end, + .line = @intCast(u32, err_loc.line), + .column = @intCast(u32, err_loc.column), + .source_line = try eb.addString(err_loc.source_line), + }), + .notes_len = item.data.notesLen(file.zir), + }); + } + + if (item.data.notes != 0) { + const notes_start = try eb.reserveNotes(item.data.notes); + const block = file.zir.extraData(Zir.Inst.Block, item.data.notes); + const body = file.zir.extra[block.end..][0..block.data.body_len]; + for (notes_start.., body) |note_i, body_elem| { + const note_item = file.zir.extraData(Zir.Inst.CompileErrors.Item, body_elem); + const msg = file.zir.nullTerminatedString(note_item.data.msg); + const span = blk: { + if (note_item.data.node != 0) { + break :blk Module.SrcLoc.nodeToSpan(&file.tree, note_item.data.node); + } + const token_starts = file.tree.tokens.items(.start); + const start = token_starts[note_item.data.token] + note_item.data.byte_offset; + const end = start + @intCast(u32, file.tree.tokenSlice(note_item.data.token).len) - item.data.byte_offset; + break :blk Module.SrcLoc.Span{ .start = start, .end = end, .main = start }; + }; + const loc = std.zig.findLineColumn(file.source, span.main); + const src_path = try file.fullPath(gpa); + defer gpa.free(src_path); + + eb.extra.items[note_i] = @enumToInt(try eb.addErrorMessage(.{ + .msg = try eb.addString(msg), + .src_loc = try eb.addSourceLocation(.{ + .src_path = try eb.addString(src_path), + .span_start = span.start, + .span_main = span.main, + .span_end = span.end, + .line = @intCast(u32, loc.line), + .column = @intCast(u32, loc.column), + .source_line = if (loc.eql(err_loc)) + 0 + else + try eb.addString(loc.source_line), + }), + .notes_len = 0, // TODO rework this function to be recursive + })); + } + } + } } pub fn performAllTheWork( @@ -3231,11 +3048,11 @@ pub fn performAllTheWork( // backend, preventing anonymous Decls from being prematurely destroyed. while (true) { if (comp.work_queue.readItem()) |work_item| { - try processOneJob(comp, work_item); + try processOneJob(comp, work_item, main_progress_node); continue; } if (comp.anon_work_queue.readItem()) |work_item| { - try processOneJob(comp, work_item); + try processOneJob(comp, work_item, main_progress_node); continue; } break; @@ -3243,16 +3060,16 @@ pub fn performAllTheWork( if (comp.job_queued_compiler_rt_lib) { comp.job_queued_compiler_rt_lib = false; - buildCompilerRtOneShot(comp, .Lib, &comp.compiler_rt_lib); + buildCompilerRtOneShot(comp, .Lib, &comp.compiler_rt_lib, main_progress_node); } if (comp.job_queued_compiler_rt_obj) { comp.job_queued_compiler_rt_obj = false; - buildCompilerRtOneShot(comp, .Obj, &comp.compiler_rt_obj); + buildCompilerRtOneShot(comp, .Obj, &comp.compiler_rt_obj, main_progress_node); } } -fn processOneJob(comp: *Compilation, job: Job) !void { +fn processOneJob(comp: *Compilation, job: Job, prog_node: *std.Progress.Node) !void { switch (job) { .codegen_decl => |decl_index| { const module = comp.bin_file.options.module.?; @@ -3404,7 +3221,7 @@ fn processOneJob(comp: *Compilation, job: Job) !void { const named_frame = tracy.namedFrame("glibc_crt_file"); defer named_frame.end(); - glibc.buildCRTFile(comp, crt_file) catch |err| { + glibc.buildCRTFile(comp, crt_file, prog_node) catch |err| { // TODO Surface more error details. comp.lockAndSetMiscFailure(.glibc_crt_file, "unable to build glibc CRT file: {s}", .{ @errorName(err), @@ -3415,7 +3232,7 @@ fn processOneJob(comp: *Compilation, job: Job) !void { const named_frame = tracy.namedFrame("glibc_shared_objects"); defer named_frame.end(); - glibc.buildSharedObjects(comp) catch |err| { + glibc.buildSharedObjects(comp, prog_node) catch |err| { // TODO Surface more error details. comp.lockAndSetMiscFailure( .glibc_shared_objects, @@ -3428,7 +3245,7 @@ fn processOneJob(comp: *Compilation, job: Job) !void { const named_frame = tracy.namedFrame("musl_crt_file"); defer named_frame.end(); - musl.buildCRTFile(comp, crt_file) catch |err| { + musl.buildCRTFile(comp, crt_file, prog_node) catch |err| { // TODO Surface more error details. comp.lockAndSetMiscFailure( .musl_crt_file, @@ -3441,7 +3258,7 @@ fn processOneJob(comp: *Compilation, job: Job) !void { const named_frame = tracy.namedFrame("mingw_crt_file"); defer named_frame.end(); - mingw.buildCRTFile(comp, crt_file) catch |err| { + mingw.buildCRTFile(comp, crt_file, prog_node) catch |err| { // TODO Surface more error details. comp.lockAndSetMiscFailure( .mingw_crt_file, @@ -3468,7 +3285,7 @@ fn processOneJob(comp: *Compilation, job: Job) !void { const named_frame = tracy.namedFrame("libunwind"); defer named_frame.end(); - libunwind.buildStaticLib(comp) catch |err| { + libunwind.buildStaticLib(comp, prog_node) catch |err| { // TODO Surface more error details. comp.lockAndSetMiscFailure( .libunwind, @@ -3481,7 +3298,7 @@ fn processOneJob(comp: *Compilation, job: Job) !void { const named_frame = tracy.namedFrame("libcxx"); defer named_frame.end(); - libcxx.buildLibCXX(comp) catch |err| { + libcxx.buildLibCXX(comp, prog_node) catch |err| { // TODO Surface more error details. comp.lockAndSetMiscFailure( .libcxx, @@ -3494,7 +3311,7 @@ fn processOneJob(comp: *Compilation, job: Job) !void { const named_frame = tracy.namedFrame("libcxxabi"); defer named_frame.end(); - libcxx.buildLibCXXABI(comp) catch |err| { + libcxx.buildLibCXXABI(comp, prog_node) catch |err| { // TODO Surface more error details. comp.lockAndSetMiscFailure( .libcxxabi, @@ -3507,7 +3324,7 @@ fn processOneJob(comp: *Compilation, job: Job) !void { const named_frame = tracy.namedFrame("libtsan"); defer named_frame.end(); - libtsan.buildTsan(comp) catch |err| { + libtsan.buildTsan(comp, prog_node) catch |err| { // TODO Surface more error details. comp.lockAndSetMiscFailure( .libtsan, @@ -3520,7 +3337,7 @@ fn processOneJob(comp: *Compilation, job: Job) !void { const named_frame = tracy.namedFrame("wasi_libc_crt_file"); defer named_frame.end(); - wasi_libc.buildCRTFile(comp, crt_file) catch |err| { + wasi_libc.buildCRTFile(comp, crt_file, prog_node) catch |err| { // TODO Surface more error details. comp.lockAndSetMiscFailure( .wasi_libc_crt_file, @@ -3538,6 +3355,7 @@ fn processOneJob(comp: *Compilation, job: Job) !void { .Lib, &comp.libssp_static_lib, .libssp, + prog_node, ) catch |err| switch (err) { error.OutOfMemory => return error.OutOfMemory, error.SubCompilationFailed => return, // error reported already @@ -3557,6 +3375,7 @@ fn processOneJob(comp: *Compilation, job: Job) !void { .Lib, &comp.libc_static_lib, .zig_libc, + prog_node, ) catch |err| switch (err) { error.OutOfMemory => return error.OutOfMemory, error.SubCompilationFailed => return, // error reported already @@ -3897,8 +3716,15 @@ fn buildCompilerRtOneShot( comp: *Compilation, output_mode: std.builtin.OutputMode, out: *?CRTFile, + prog_node: *std.Progress.Node, ) void { - comp.buildOutputFromZig("compiler_rt.zig", output_mode, out, .compiler_rt) catch |err| switch (err) { + comp.buildOutputFromZig( + "compiler_rt.zig", + output_mode, + out, + .compiler_rt, + prog_node, + ) catch |err| switch (err) { error.SubCompilationFailed => return, // error reported already else => comp.lockAndSetMiscFailure( .compiler_rt, @@ -5230,7 +5056,8 @@ pub fn generateBuiltinZigSource(comp: *Compilation, allocator: Allocator) Alloca \\const std = @import("std"); \\/// Zig version. When writing code that supports multiple versions of Zig, prefer \\/// feature detection (i.e. with `@hasDecl` or `@hasField`) over version checks. - \\pub const zig_version = std.SemanticVersion.parse("{s}") catch unreachable; + \\pub const zig_version = std.SemanticVersion.parse(zig_version_string) catch unreachable; + \\pub const zig_version_string = "{s}"; \\pub const zig_backend = std.builtin.CompilerBackend.{}; \\ \\pub const output_mode = std.builtin.OutputMode.{}; @@ -5417,34 +5244,36 @@ pub fn generateBuiltinZigSource(comp: *Compilation, allocator: Allocator) Alloca return buffer.toOwnedSliceSentinel(0); } -pub fn updateSubCompilation(sub_compilation: *Compilation) !void { - try sub_compilation.update(); - - // Look for compilation errors in this sub_compilation - // TODO instead of logging these errors, handle them in the callsites - // of updateSubCompilation and attach them as sub-errors, properly - // surfacing the errors. You can see an example of this already - // done inside buildOutputFromZig. - var errors = try sub_compilation.getAllErrorsAlloc(); - defer errors.deinit(sub_compilation.gpa); - - if (errors.list.len != 0) { - for (errors.list) |full_err_msg| { - switch (full_err_msg) { - .src => |src| { - log.err("{s}:{d}:{d}: {s}", .{ - src.src_path, - src.line + 1, - src.column + 1, - src.msg, - }); - }, - .plain => |plain| { - log.err("{s}", .{plain.msg}); - }, - } - } - return error.BuildingLibCObjectFailed; +pub fn updateSubCompilation( + parent_comp: *Compilation, + sub_comp: *Compilation, + misc_task: MiscTask, + prog_node: *std.Progress.Node, +) !void { + { + var sub_node = prog_node.start(@tagName(misc_task), 0); + sub_node.activate(); + defer sub_node.end(); + + try sub_comp.update(prog_node); + } + + // Look for compilation errors in this sub compilation + const gpa = parent_comp.gpa; + var keep_errors = false; + var errors = try sub_comp.getAllErrorsAlloc(); + defer if (!keep_errors) errors.deinit(gpa); + + if (errors.errorMessageCount() > 0) { + try parent_comp.misc_failures.ensureUnusedCapacity(gpa, 1); + parent_comp.misc_failures.putAssumeCapacityNoClobber(misc_task, .{ + .msg = try std.fmt.allocPrint(gpa, "sub-compilation of {s} failed", .{ + @tagName(misc_task), + }), + .children = errors, + }); + keep_errors = true; + return error.SubCompilationFailed; } } @@ -5454,6 +5283,7 @@ fn buildOutputFromZig( output_mode: std.builtin.OutputMode, out: *?CRTFile, misc_task_tag: MiscTask, + prog_node: *std.Progress.Node, ) !void { const tracy_trace = trace(@src()); defer tracy_trace.end(); @@ -5520,23 +5350,7 @@ fn buildOutputFromZig( }); defer sub_compilation.destroy(); - try sub_compilation.update(); - // Look for compilation errors in this sub_compilation. - var keep_errors = false; - var errors = try sub_compilation.getAllErrorsAlloc(); - defer if (!keep_errors) errors.deinit(sub_compilation.gpa); - - if (errors.list.len != 0) { - try comp.misc_failures.ensureUnusedCapacity(comp.gpa, 1); - comp.misc_failures.putAssumeCapacityNoClobber(misc_task_tag, .{ - .msg = try std.fmt.allocPrint(comp.gpa, "sub-compilation of {s} failed", .{ - @tagName(misc_task_tag), - }), - .children = errors, - }); - keep_errors = true; - return error.SubCompilationFailed; - } + try comp.updateSubCompilation(sub_compilation, misc_task_tag, prog_node); assert(out.* == null); out.* = Compilation.CRTFile{ @@ -5551,6 +5365,8 @@ pub fn build_crt_file( comp: *Compilation, root_name: []const u8, output_mode: std.builtin.OutputMode, + misc_task_tag: MiscTask, + prog_node: *std.Progress.Node, c_source_files: []const Compilation.CSourceFile, ) !void { const tracy_trace = trace(@src()); @@ -5611,7 +5427,7 @@ pub fn build_crt_file( }); defer sub_compilation.destroy(); - try sub_compilation.updateSubCompilation(); + try comp.updateSubCompilation(sub_compilation, misc_task_tag, prog_node); try comp.crt_files.ensureUnusedCapacity(comp.gpa, 1); diff --git a/src/Module.zig b/src/Module.zig index 8c52176edd1e..c47e4fc234a1 100644 --- a/src/Module.zig +++ b/src/Module.zig @@ -3756,67 +3756,9 @@ pub fn astGenFile(mod: *Module, file: *File) !void { file.source_loaded = true; file.tree = try Ast.parse(gpa, source, .zig); - defer if (!file.tree_loaded) file.tree.deinit(gpa); - - if (file.tree.errors.len != 0) { - const parse_err = file.tree.errors[0]; - - var msg = std.ArrayList(u8).init(gpa); - defer msg.deinit(); - - const token_starts = file.tree.tokens.items(.start); - const token_tags = file.tree.tokens.items(.tag); - - const extra_offset = file.tree.errorOffset(parse_err); - try file.tree.renderError(parse_err, msg.writer()); - const err_msg = try gpa.create(ErrorMsg); - err_msg.* = .{ - .src_loc = .{ - .file_scope = file, - .parent_decl_node = 0, - .lazy = if (extra_offset == 0) .{ - .token_abs = parse_err.token, - } else .{ - .byte_abs = token_starts[parse_err.token] + extra_offset, - }, - }, - .msg = try msg.toOwnedSlice(), - }; - if (token_tags[parse_err.token + @boolToInt(parse_err.token_is_prev)] == .invalid) { - const bad_off = @intCast(u32, file.tree.tokenSlice(parse_err.token + @boolToInt(parse_err.token_is_prev)).len); - const byte_abs = token_starts[parse_err.token + @boolToInt(parse_err.token_is_prev)] + bad_off; - try mod.errNoteNonLazy(.{ - .file_scope = file, - .parent_decl_node = 0, - .lazy = .{ .byte_abs = byte_abs }, - }, err_msg, "invalid byte: '{'}'", .{std.zig.fmtEscapes(source[byte_abs..][0..1])}); - } - - for (file.tree.errors[1..]) |note| { - if (!note.is_note) break; - - try file.tree.renderError(note, msg.writer()); - err_msg.notes = try mod.gpa.realloc(err_msg.notes, err_msg.notes.len + 1); - err_msg.notes[err_msg.notes.len - 1] = .{ - .src_loc = .{ - .file_scope = file, - .parent_decl_node = 0, - .lazy = .{ .token_abs = note.token }, - }, - .msg = try msg.toOwnedSlice(), - }; - } - - { - comp.mutex.lock(); - defer comp.mutex.unlock(); - try mod.failed_files.putNoClobber(gpa, file, err_msg); - } - file.status = .parse_failure; - return error.AnalysisFail; - } file.tree_loaded = true; + // Any potential AST errors are converted to ZIR errors here. file.zir = try AstGen.generate(gpa, file.tree); file.zir_loaded = true; file.status = .success_zir; @@ -3925,6 +3867,9 @@ fn updateZirRefs(mod: *Module, file: *File, old_zir: Zir) !void { const gpa = mod.gpa; const new_zir = file.zir; + // The root decl will be null if the previous ZIR had AST errors. + const root_decl = file.root_decl.unwrap() orelse return; + // Maps from old ZIR to new ZIR, struct_decl, enum_decl, etc. Any instruction which // creates a namespace, gets mapped from old to new here. var inst_map: std.AutoHashMapUnmanaged(Zir.Inst.Index, Zir.Inst.Index) = .{}; @@ -3942,7 +3887,6 @@ fn updateZirRefs(mod: *Module, file: *File, old_zir: Zir) !void { var decl_stack: ArrayListUnmanaged(Decl.Index) = .{}; defer decl_stack.deinit(gpa); - const root_decl = file.root_decl.unwrap().?; try decl_stack.append(gpa, root_decl); file.deleted_decls.clearRetainingCapacity(); diff --git a/src/Package.zig b/src/Package.zig index c238d3d567d7..d26daf5a0cc4 100644 --- a/src/Package.zig +++ b/src/Package.zig @@ -8,11 +8,11 @@ const Allocator = mem.Allocator; const assert = std.debug.assert; const log = std.log.scoped(.package); const main = @import("main.zig"); +const ThreadPool = std.Thread.Pool; +const WaitGroup = std.Thread.WaitGroup; const Compilation = @import("Compilation.zig"); const Module = @import("Module.zig"); -const ThreadPool = @import("ThreadPool.zig"); -const WaitGroup = @import("WaitGroup.zig"); const Cache = std.Build.Cache; const build_options = @import("build_options"); const Manifest = @import("Manifest.zig"); @@ -225,7 +225,7 @@ pub fn fetchAndAddDependencies( dependencies_source: *std.ArrayList(u8), build_roots_source: *std.ArrayList(u8), name_prefix: []const u8, - color: main.Color, + error_bundle: *std.zig.ErrorBundle.Wip, all_modules: *AllModules, ) !void { const max_bytes = 10 * 1024 * 1024; @@ -250,7 +250,7 @@ pub fn fetchAndAddDependencies( if (ast.errors.len > 0) { const file_path = try directory.join(arena, &.{Manifest.basename}); - try main.printErrsMsgToStdErr(gpa, arena, ast, file_path, color); + try main.putAstErrorsIntoBundle(gpa, ast, file_path, error_bundle); return error.PackageFetchFailed; } @@ -258,14 +258,9 @@ pub fn fetchAndAddDependencies( defer manifest.deinit(gpa); if (manifest.errors.len > 0) { - const ttyconf: std.debug.TTY.Config = switch (color) { - .auto => std.debug.detectTTYConfig(std.io.getStdErr()), - .on => .escape_codes, - .off => .no_color, - }; const file_path = try directory.join(arena, &.{Manifest.basename}); for (manifest.errors) |msg| { - Report.renderErrorMessage(ast, file_path, ttyconf, msg, &.{}); + try Report.addErrorMessage(ast, file_path, error_bundle, 0, msg); } return error.PackageFetchFailed; } @@ -273,8 +268,7 @@ pub fn fetchAndAddDependencies( const report: Report = .{ .ast = &ast, .directory = directory, - .color = color, - .arena = arena, + .error_bundle = error_bundle, }; var any_error = false; @@ -307,7 +301,7 @@ pub fn fetchAndAddDependencies( dependencies_source, build_roots_source, sub_prefix, - color, + error_bundle, all_modules, ); @@ -350,8 +344,7 @@ pub fn createFilePkg( const Report = struct { ast: *const std.zig.Ast, directory: Compilation.Directory, - color: main.Color, - arena: Allocator, + error_bundle: *std.zig.ErrorBundle.Wip, fn fail( report: Report, @@ -359,52 +352,46 @@ const Report = struct { comptime fmt_string: []const u8, fmt_args: anytype, ) error{ PackageFetchFailed, OutOfMemory } { - return failWithNotes(report, &.{}, tok, fmt_string, fmt_args); - } + const gpa = report.error_bundle.gpa; - fn failWithNotes( - report: Report, - notes: []const Compilation.AllErrors.Message, - tok: std.zig.Ast.TokenIndex, - comptime fmt_string: []const u8, - fmt_args: anytype, - ) error{ PackageFetchFailed, OutOfMemory } { - const ttyconf: std.debug.TTY.Config = switch (report.color) { - .auto => std.debug.detectTTYConfig(std.io.getStdErr()), - .on => .escape_codes, - .off => .no_color, - }; - const file_path = try report.directory.join(report.arena, &.{Manifest.basename}); - renderErrorMessage(report.ast.*, file_path, ttyconf, .{ + const file_path = try report.directory.join(gpa, &.{Manifest.basename}); + defer gpa.free(file_path); + + const msg = try std.fmt.allocPrint(gpa, fmt_string, fmt_args); + defer gpa.free(msg); + + try addErrorMessage(report.ast.*, file_path, report.error_bundle, 0, .{ .tok = tok, .off = 0, - .msg = try std.fmt.allocPrint(report.arena, fmt_string, fmt_args), - }, notes); + .msg = msg, + }); + return error.PackageFetchFailed; } - fn renderErrorMessage( + fn addErrorMessage( ast: std.zig.Ast, file_path: []const u8, - ttyconf: std.debug.TTY.Config, + eb: *std.zig.ErrorBundle.Wip, + notes_len: u32, msg: Manifest.ErrorMessage, - notes: []const Compilation.AllErrors.Message, - ) void { + ) error{OutOfMemory}!void { const token_starts = ast.tokens.items(.start); const start_loc = ast.tokenLocation(0, msg.tok); - Compilation.AllErrors.Message.renderToStdErr(.{ .src = .{ - .msg = msg.msg, - .src_path = file_path, - .line = @intCast(u32, start_loc.line), - .column = @intCast(u32, start_loc.column), - .span = .{ - .start = token_starts[msg.tok], - .end = @intCast(u32, token_starts[msg.tok] + ast.tokenSlice(msg.tok).len), - .main = token_starts[msg.tok] + msg.off, - }, - .source_line = ast.source[start_loc.line_start..start_loc.line_end], - .notes = notes, - } }, ttyconf); + + try eb.addRootErrorMessage(.{ + .msg = try eb.addString(msg.msg), + .src_loc = try eb.addSourceLocation(.{ + .src_path = try eb.addString(file_path), + .span_start = token_starts[msg.tok], + .span_end = @intCast(u32, token_starts[msg.tok] + ast.tokenSlice(msg.tok).len), + .span_main = token_starts[msg.tok] + msg.off, + .line = @intCast(u32, start_loc.line), + .column = @intCast(u32, start_loc.column), + .source_line = try eb.addString(ast.source[start_loc.line_start..start_loc.line_end]), + }), + .notes_len = notes_len, + }); } }; @@ -504,9 +491,7 @@ fn fetchAndUnpack( // by default, so the same logic applies for buffering the reader as for gzip. try unpackTarball(gpa, &req, tmp_directory.handle, std.compress.xz); } else { - return report.fail(dep.url_tok, "unknown file extension for path '{s}'", .{ - uri.path, - }); + return report.fail(dep.url_tok, "unknown file extension for path '{s}'", .{uri.path}); } // TODO: delete files not included in the package prior to computing the package hash. @@ -533,10 +518,21 @@ fn fetchAndUnpack( }); } } else { - const notes: [1]Compilation.AllErrors.Message = .{.{ .plain = .{ - .msg = try std.fmt.allocPrint(report.arena, "expected .hash = \"{s}\",", .{&actual_hex}), - } }}; - return report.failWithNotes(¬es, dep.url_tok, "url field is missing corresponding hash field", .{}); + const file_path = try report.directory.join(gpa, &.{Manifest.basename}); + defer gpa.free(file_path); + + const eb = report.error_bundle; + const notes_len = 1; + try Report.addErrorMessage(report.ast.*, file_path, eb, notes_len, .{ + .tok = dep.url_tok, + .off = 0, + .msg = "url field is missing corresponding hash field", + }); + const notes_start = try eb.reserveNotes(notes_len); + eb.extra.items[notes_start] = @enumToInt(try eb.addErrorMessage(.{ + .msg = try eb.printString("expected .hash = \"{s}\",", .{&actual_hex}), + })); + return error.PackageFetchFailed; } const build_root = try global_cache_directory.join(gpa, &.{pkg_dir_sub_path}); diff --git a/src/Sema.zig b/src/Sema.zig index 8b6c269246e7..fc39fbb9fcf8 100644 --- a/src/Sema.zig +++ b/src/Sema.zig @@ -2211,29 +2211,27 @@ pub fn fail( fn failWithOwnedErrorMsg(sema: *Sema, err_msg: *Module.ErrorMsg) CompileError { @setCold(true); + const gpa = sema.gpa; if (crash_report.is_enabled and sema.mod.comp.debug_compile_errors) { if (err_msg.src_loc.lazy == .unneeded) return error.NeededSourceLocation; - var arena = std.heap.ArenaAllocator.init(sema.gpa); - errdefer arena.deinit(); - var errors = std.ArrayList(Compilation.AllErrors.Message).init(sema.gpa); - defer errors.deinit(); - - Compilation.AllErrors.add(sema.mod, &arena, &errors, err_msg.*) catch unreachable; - + var wip_errors: std.zig.ErrorBundle.Wip = undefined; + wip_errors.init(gpa) catch unreachable; + Compilation.addModuleErrorMsg(&wip_errors, err_msg.*) catch unreachable; std.debug.print("compile error during Sema:\n", .{}); - Compilation.AllErrors.Message.renderToStdErr(errors.items[0], .no_color); + var error_bundle = wip_errors.toOwnedBundle("") catch unreachable; + error_bundle.renderToStdErr(.{ .ttyconf = .no_color }); crash_report.compilerPanic("unexpected compile error occurred", null, null); } const mod = sema.mod; ref: { - errdefer err_msg.destroy(mod.gpa); + errdefer err_msg.destroy(gpa); if (err_msg.src_loc.lazy == .unneeded) { return error.NeededSourceLocation; } - try mod.failed_decls.ensureUnusedCapacity(mod.gpa, 1); - try mod.failed_files.ensureUnusedCapacity(mod.gpa, 1); + try mod.failed_decls.ensureUnusedCapacity(gpa, 1); + try mod.failed_files.ensureUnusedCapacity(gpa, 1); const max_references = blk: { if (sema.mod.comp.reference_trace) |num| break :blk num; @@ -2243,11 +2241,11 @@ fn failWithOwnedErrorMsg(sema: *Sema, err_msg: *Module.ErrorMsg) CompileError { }; var referenced_by = if (sema.func) |some| some.owner_decl else sema.owner_decl_index; - var reference_stack = std.ArrayList(Module.ErrorMsg.Trace).init(sema.gpa); + var reference_stack = std.ArrayList(Module.ErrorMsg.Trace).init(gpa); defer reference_stack.deinit(); // Avoid infinite loops. - var seen = std.AutoHashMap(Module.Decl.Index, void).init(sema.gpa); + var seen = std.AutoHashMap(Module.Decl.Index, void).init(gpa); defer seen.deinit(); var cur_reference_trace: u32 = 0; @@ -2288,7 +2286,7 @@ fn failWithOwnedErrorMsg(sema: *Sema, err_msg: *Module.ErrorMsg) CompileError { if (gop.found_existing) { // If there are multiple errors for the same Decl, prefer the first one added. sema.err = null; - err_msg.destroy(mod.gpa); + err_msg.destroy(gpa); } else { sema.err = err_msg; gop.value_ptr.* = err_msg; diff --git a/src/Zir.zig b/src/Zir.zig index 65e2f21cc9dc..001c4e81018a 100644 --- a/src/Zir.zig +++ b/src/Zir.zig @@ -3594,6 +3594,12 @@ pub const Inst = struct { /// 0 or a payload index of a `Block`, each is a payload /// index of another `Item`. notes: u32, + + pub fn notesLen(item: Item, zir: Zir) u32 { + if (item.notes == 0) return 0; + const block = zir.extraData(Block, item.notes); + return block.data.body_len; + } }; }; diff --git a/src/glibc.zig b/src/glibc.zig index 3021e7c7ba2d..b37398bffda5 100644 --- a/src/glibc.zig +++ b/src/glibc.zig @@ -161,7 +161,7 @@ pub const CRTFile = enum { libc_nonshared_a, }; -pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile) !void { +pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile, prog_node: *std.Progress.Node) !void { if (!build_options.have_llvm) { return error.ZigCompilerNotBuiltWithLLVMExtensions; } @@ -196,7 +196,7 @@ pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile) !void { "-DASSEMBLER", "-Wa,--noexecstack", }); - return comp.build_crt_file("crti", .Obj, &[1]Compilation.CSourceFile{ + return comp.build_crt_file("crti", .Obj, .@"glibc crti.o", prog_node, &.{ .{ .src_path = try start_asm_path(comp, arena, "crti.S"), .cache_exempt_flags = args.items, @@ -215,7 +215,7 @@ pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile) !void { "-DASSEMBLER", "-Wa,--noexecstack", }); - return comp.build_crt_file("crtn", .Obj, &[1]Compilation.CSourceFile{ + return comp.build_crt_file("crtn", .Obj, .@"glibc crtn.o", prog_node, &.{ .{ .src_path = try start_asm_path(comp, arena, "crtn.S"), .cache_exempt_flags = args.items, @@ -265,7 +265,9 @@ pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile) !void { .cache_exempt_flags = args.items, }; }; - return comp.build_crt_file("Scrt1", .Obj, &[_]Compilation.CSourceFile{ start_o, abi_note_o }); + return comp.build_crt_file("Scrt1", .Obj, .@"glibc Scrt1.o", prog_node, &.{ + start_o, abi_note_o, + }); }, .libc_nonshared_a => { const s = path.sep_str; @@ -366,7 +368,7 @@ pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile) !void { files_index += 1; } const files = files_buf[0..files_index]; - return comp.build_crt_file("c_nonshared", .Lib, files); + return comp.build_crt_file("c_nonshared", .Lib, .@"glibc libc_nonshared.a", prog_node, files); }, } } @@ -639,7 +641,7 @@ pub const BuiltSharedObjects = struct { const all_map_basename = "all.map"; -pub fn buildSharedObjects(comp: *Compilation) !void { +pub fn buildSharedObjects(comp: *Compilation, prog_node: *std.Progress.Node) !void { const tracy = trace(@src()); defer tracy.end(); @@ -1023,7 +1025,7 @@ pub fn buildSharedObjects(comp: *Compilation) !void { const asm_file_basename = std.fmt.bufPrint(&lib_name_buf, "{s}.s", .{lib.name}) catch unreachable; try o_directory.handle.writeFile(asm_file_basename, stubs_asm.items); - try buildSharedLib(comp, arena, comp.global_cache_directory, o_directory, asm_file_basename, lib); + try buildSharedLib(comp, arena, comp.global_cache_directory, o_directory, asm_file_basename, lib, prog_node); } man.writeManifest() catch |err| { @@ -1046,6 +1048,7 @@ fn buildSharedLib( bin_directory: Compilation.Directory, asm_file_basename: []const u8, lib: Lib, + prog_node: *std.Progress.Node, ) !void { const tracy = trace(@src()); defer tracy.end(); @@ -1105,7 +1108,7 @@ fn buildSharedLib( }); defer sub_compilation.destroy(); - try sub_compilation.updateSubCompilation(); + try comp.updateSubCompilation(sub_compilation, .@"glibc shared object", prog_node); } // Return true if glibc has crti/crtn sources for that architecture. diff --git a/src/libcxx.zig b/src/libcxx.zig index 7ca405cf1567..9c5dc9426fcb 100644 --- a/src/libcxx.zig +++ b/src/libcxx.zig @@ -96,7 +96,7 @@ const libcxx_files = [_][]const u8{ "src/verbose_abort.cpp", }; -pub fn buildLibCXX(comp: *Compilation) !void { +pub fn buildLibCXX(comp: *Compilation, prog_node: *std.Progress.Node) !void { if (!build_options.have_llvm) { return error.ZigCompilerNotBuiltWithLLVMExtensions; } @@ -258,7 +258,7 @@ pub fn buildLibCXX(comp: *Compilation) !void { }); defer sub_compilation.destroy(); - try sub_compilation.updateSubCompilation(); + try comp.updateSubCompilation(sub_compilation, .libcxx, prog_node); assert(comp.libcxx_static_lib == null); comp.libcxx_static_lib = Compilation.CRTFile{ @@ -269,7 +269,7 @@ pub fn buildLibCXX(comp: *Compilation) !void { }; } -pub fn buildLibCXXABI(comp: *Compilation) !void { +pub fn buildLibCXXABI(comp: *Compilation, prog_node: *std.Progress.Node) !void { if (!build_options.have_llvm) { return error.ZigCompilerNotBuiltWithLLVMExtensions; } @@ -418,7 +418,7 @@ pub fn buildLibCXXABI(comp: *Compilation) !void { }); defer sub_compilation.destroy(); - try sub_compilation.updateSubCompilation(); + try comp.updateSubCompilation(sub_compilation, .libcxxabi, prog_node); assert(comp.libcxxabi_static_lib == null); comp.libcxxabi_static_lib = Compilation.CRTFile{ diff --git a/src/libtsan.zig b/src/libtsan.zig index 16e40c16f82c..54bf00e4b630 100644 --- a/src/libtsan.zig +++ b/src/libtsan.zig @@ -5,7 +5,7 @@ const Compilation = @import("Compilation.zig"); const build_options = @import("build_options"); const trace = @import("tracy.zig").trace; -pub fn buildTsan(comp: *Compilation) !void { +pub fn buildTsan(comp: *Compilation, prog_node: *std.Progress.Node) !void { if (!build_options.have_llvm) { return error.ZigCompilerNotBuiltWithLLVMExtensions; } @@ -235,7 +235,7 @@ pub fn buildTsan(comp: *Compilation) !void { }); defer sub_compilation.destroy(); - try sub_compilation.updateSubCompilation(); + try comp.updateSubCompilation(sub_compilation, .libtsan, prog_node); assert(comp.tsan_static_lib == null); comp.tsan_static_lib = Compilation.CRTFile{ diff --git a/src/libunwind.zig b/src/libunwind.zig index a20b5e81f734..aefbfb457d6c 100644 --- a/src/libunwind.zig +++ b/src/libunwind.zig @@ -7,7 +7,7 @@ const Compilation = @import("Compilation.zig"); const build_options = @import("build_options"); const trace = @import("tracy.zig").trace; -pub fn buildStaticLib(comp: *Compilation) !void { +pub fn buildStaticLib(comp: *Compilation, prog_node: *std.Progress.Node) !void { if (!build_options.have_llvm) { return error.ZigCompilerNotBuiltWithLLVMExtensions; } @@ -130,7 +130,7 @@ pub fn buildStaticLib(comp: *Compilation) !void { }); defer sub_compilation.destroy(); - try sub_compilation.updateSubCompilation(); + try comp.updateSubCompilation(sub_compilation, .libunwind, prog_node); assert(comp.libunwind_static_lib == null); diff --git a/src/link.zig b/src/link.zig index 1ecbeadd7f92..e68f9c97d0a7 100644 --- a/src/link.zig +++ b/src/link.zig @@ -264,6 +264,8 @@ pub const File = struct { /// of this linking operation. lock: ?Cache.Lock = null, + child_pid: ?std.ChildProcess.Id = null, + /// Attempts incremental linking, if the file already exists. If /// incremental linking fails, falls back to truncating the file and /// rewriting it. A malicious file is detected as incremental link failure @@ -376,6 +378,26 @@ pub const File = struct { if (build_options.only_c) unreachable; if (base.file != null) return; const emit = base.options.emit orelse return; + if (base.child_pid) |pid| { + // If we try to open the output file in write mode while it is running, + // it will return ETXTBSY. So instead, we copy the file, atomically rename it + // over top of the exe path, and then proceed normally. This changes the inode, + // avoiding the error. + const tmp_sub_path = try std.fmt.allocPrint(base.allocator, "{s}-{x}", .{ + emit.sub_path, std.crypto.random.int(u32), + }); + try emit.directory.handle.copyFile(emit.sub_path, emit.directory.handle, tmp_sub_path, .{}); + try emit.directory.handle.rename(tmp_sub_path, emit.sub_path); + switch (builtin.os.tag) { + .linux => { + switch (std.os.errno(std.os.linux.ptrace(std.os.linux.PTRACE.ATTACH, pid, 0, 0, 0))) { + .SUCCESS => {}, + else => |errno| log.warn("ptrace failure: {s}", .{@tagName(errno)}), + } + }, + else => return error.HotSwapUnavailableOnHostOperatingSystem, + } + } base.file = try emit.directory.handle.createFile(emit.sub_path, .{ .truncate = false, .read = true, @@ -424,6 +446,18 @@ pub const File = struct { } f.close(); base.file = null; + + if (base.child_pid) |pid| { + switch (builtin.os.tag) { + .linux => { + switch (std.os.errno(std.os.linux.ptrace(std.os.linux.PTRACE.DETACH, pid, 0, 0, 0))) { + .SUCCESS => {}, + else => |errno| log.warn("ptrace failure: {s}", .{@tagName(errno)}), + } + }, + else => return error.HotSwapUnavailableOnHostOperatingSystem, + } + } }, .c, .spirv, .nvptx => {}, } @@ -462,6 +496,7 @@ pub const File = struct { NetNameDeleted, DeviceBusy, InvalidArgument, + HotSwapUnavailableOnHostOperatingSystem, }; /// Called from within the CodeGen to lower a local variable instantion as an unnamed @@ -1053,9 +1088,11 @@ pub const File = struct { log.warn("failed to save archive hash digest file: {s}", .{@errorName(err)}); }; - man.writeManifest() catch |err| { - log.warn("failed to write cache manifest when archiving: {s}", .{@errorName(err)}); - }; + if (man.have_exclusive_lock) { + man.writeManifest() catch |err| { + log.warn("failed to write cache manifest when archiving: {s}", .{@errorName(err)}); + }; + } base.lock = man.toOwnedLock(); } diff --git a/src/link/Elf.zig b/src/link/Elf.zig index a91722d0726a..f1ab98372e54 100644 --- a/src/link/Elf.zig +++ b/src/link/Elf.zig @@ -467,7 +467,7 @@ pub fn populateMissingMetadata(self: *Elf) !void { .p_paddr = entry_addr, .p_memsz = file_size, .p_align = p_align, - .p_flags = elf.PF_X | elf.PF_R, + .p_flags = elf.PF_X | elf.PF_R | elf.PF_W, }); self.entry_addr = null; self.phdr_table_dirty = true; @@ -493,7 +493,7 @@ pub fn populateMissingMetadata(self: *Elf) !void { .p_paddr = got_addr, .p_memsz = file_size, .p_align = p_align, - .p_flags = elf.PF_R, + .p_flags = elf.PF_R | elf.PF_W, }); self.phdr_table_dirty = true; } @@ -516,7 +516,7 @@ pub fn populateMissingMetadata(self: *Elf) !void { .p_paddr = rodata_addr, .p_memsz = file_size, .p_align = p_align, - .p_flags = elf.PF_R, + .p_flags = elf.PF_R | elf.PF_W, }); self.phdr_table_dirty = true; } @@ -2166,7 +2166,7 @@ fn allocateAtom(self: *Elf, atom_index: Atom.Index, new_block_size: u64, alignme // First we look for an appropriately sized free list node. // The list is unordered. We'll just take the first thing that works. const vaddr = blk: { - var i: usize = 0; + var i: usize = if (self.base.child_pid == null) 0 else free_list.items.len; while (i < free_list.items.len) { const big_atom_index = free_list.items[i]; const big_atom = self.getAtom(big_atom_index); @@ -2397,7 +2397,7 @@ fn updateDeclCode(self: *Elf, decl_index: Module.Decl.Index, code: []const u8, s const atom = self.getAtom(atom_index); const shdr_index = decl_metadata.shdr; - if (atom.getSymbol(self).st_size != 0) { + if (atom.getSymbol(self).st_size != 0 and self.base.child_pid == null) { const local_sym = atom.getSymbolPtr(self); local_sym.st_name = try self.shstrtab.insert(gpa, decl_name); local_sym.st_info = (elf.STB_LOCAL << 4) | stt_bits; @@ -2451,6 +2451,28 @@ fn updateDeclCode(self: *Elf, decl_index: Module.Decl.Index, code: []const u8, s const phdr_index = self.sections.items(.phdr_index)[shdr_index]; const section_offset = local_sym.st_value - self.program_headers.items[phdr_index].p_vaddr; const file_offset = self.sections.items(.shdr)[shdr_index].sh_offset + section_offset; + + if (self.base.child_pid) |pid| { + switch (builtin.os.tag) { + .linux => { + var code_vec: [1]std.os.iovec_const = .{.{ + .iov_base = code.ptr, + .iov_len = code.len, + }}; + var remote_vec: [1]std.os.iovec_const = .{.{ + .iov_base = @intToPtr([*]u8, @intCast(usize, local_sym.st_value)), + .iov_len = code.len, + }}; + const rc = std.os.linux.process_vm_writev(pid, &code_vec, &remote_vec, 0); + switch (std.os.errno(rc)) { + .SUCCESS => assert(rc == code.len), + else => |errno| log.warn("process_vm_writev failure: {s}", .{@tagName(errno)}), + } + }, + else => return error.HotSwapUnavailableOnHostOperatingSystem, + } + } + try self.base.file.?.pwriteAll(code, file_offset); return local_sym; @@ -2820,6 +2842,8 @@ fn writeOffsetTableEntry(self: *Elf, index: usize) !void { const endian = self.base.options.target.cpu.arch.endian(); const shdr = &self.sections.items(.shdr)[self.got_section_index.?]; const off = shdr.sh_offset + @as(u64, entry_size) * index; + const phdr = &self.program_headers.items[self.phdr_got_index.?]; + const vaddr = phdr.p_vaddr + @as(u64, entry_size) * index; switch (entry_size) { 2 => { var buf: [2]u8 = undefined; @@ -2835,6 +2859,27 @@ fn writeOffsetTableEntry(self: *Elf, index: usize) !void { var buf: [8]u8 = undefined; mem.writeInt(u64, &buf, self.offset_table.items[index], endian); try self.base.file.?.pwriteAll(&buf, off); + + if (self.base.child_pid) |pid| { + switch (builtin.os.tag) { + .linux => { + var local_vec: [1]std.os.iovec_const = .{.{ + .iov_base = &buf, + .iov_len = buf.len, + }}; + var remote_vec: [1]std.os.iovec_const = .{.{ + .iov_base = @intToPtr([*]u8, @intCast(usize, vaddr)), + .iov_len = buf.len, + }}; + const rc = std.os.linux.process_vm_writev(pid, &local_vec, &remote_vec, 0); + switch (std.os.errno(rc)) { + .SUCCESS => assert(rc == buf.len), + else => |errno| log.warn("process_vm_writev failure: {s}", .{@tagName(errno)}), + } + }, + else => return error.HotSwapUnavailableOnHostOperatingSystem, + } + } }, else => unreachable, } diff --git a/src/link/MachO/CodeSignature.zig b/src/link/MachO/CodeSignature.zig index 8bc00d9181a5..6d1cd7b53676 100644 --- a/src/link/MachO/CodeSignature.zig +++ b/src/link/MachO/CodeSignature.zig @@ -7,12 +7,12 @@ const log = std.log.scoped(.link); const macho = std.macho; const mem = std.mem; const testing = std.testing; +const ThreadPool = std.Thread.Pool; +const WaitGroup = std.Thread.WaitGroup; const Allocator = mem.Allocator; const Compilation = @import("../../Compilation.zig"); const Sha256 = std.crypto.hash.sha2.Sha256; -const ThreadPool = @import("../../ThreadPool.zig"); -const WaitGroup = @import("../../WaitGroup.zig"); const hash_size = Sha256.digest_length; diff --git a/src/main.zig b/src/main.zig index 95cfca1463cd..e7d5c647b56a 100644 --- a/src/main.zig +++ b/src/main.zig @@ -9,6 +9,8 @@ const Allocator = mem.Allocator; const ArrayList = std.ArrayList; const Ast = std.zig.Ast; const warn = std.log.warn; +const ThreadPool = std.Thread.Pool; +const cleanExit = std.process.cleanExit; const tracy = @import("tracy.zig"); const Compilation = @import("Compilation.zig"); @@ -22,8 +24,10 @@ const translate_c = @import("translate_c.zig"); const clang = @import("clang.zig"); const Cache = std.Build.Cache; const target_util = @import("target.zig"); -const ThreadPool = @import("ThreadPool.zig"); const crash_report = @import("crash_report.zig"); +const Module = @import("Module.zig"); +const AstGen = @import("AstGen.zig"); +const Server = std.zig.Server; pub const std_options = struct { pub const wasiCwd = wasi_cwd; @@ -361,7 +365,6 @@ const usage_build_generic = \\ \\General Options: \\ -h, --help Print this help and exit - \\ --watch Enable compiler REPL \\ --color [auto|off|on] Enable or disable colored error messages \\ -femit-bin[=path] (default) Output machine code \\ -fno-emit-bin Do not output machine code @@ -666,6 +669,16 @@ const ArgMode = union(enum) { run, }; +/// Avoid dragging networking into zig2.c because it adds dependencies on some +/// linker symbols that are annoying to satisfy while bootstrapping. +const Ip4Address = if (build_options.omit_pkg_fetching_code) void else std.net.Ip4Address; + +const Listen = union(enum) { + none, + ip4: Ip4Address, + stdio, +}; + fn buildOutputType( gpa: Allocator, arena: Allocator, @@ -686,7 +699,7 @@ fn buildOutputType( var formatted_panics: ?bool = null; var function_sections = false; var no_builtin = false; - var watch = false; + var listen: Listen = .none; var debug_compile_errors = false; var verbose_link = (builtin.os.tag != .wasi or builtin.link_libc) and std.process.hasEnvVarConstant("ZIG_VERBOSE_LINK"); var verbose_cc = (builtin.os.tag != .wasi or builtin.link_libc) and std.process.hasEnvVarConstant("ZIG_VERBOSE_CC"); @@ -1144,6 +1157,23 @@ fn buildOutputType( } else { try log_scopes.append(gpa, args_iter.nextOrFatal()); } + } else if (mem.eql(u8, arg, "--listen")) { + const next_arg = args_iter.nextOrFatal(); + if (mem.eql(u8, next_arg, "-")) { + listen = .stdio; + } else { + if (build_options.omit_pkg_fetching_code) unreachable; + // example: --listen 127.0.0.1:9000 + var it = std.mem.split(u8, next_arg, ":"); + const host = it.next().?; + const port_text = it.next() orelse "14735"; + const port = std.fmt.parseInt(u16, port_text, 10) catch |err| + fatal("invalid port number: '{s}': {s}", .{ port_text, @errorName(err) }); + listen = .{ .ip4 = std.net.Ip4Address.parse(host, port) catch |err| + fatal("invalid host: '{s}': {s}", .{ host, @errorName(err) }) }; + } + } else if (mem.eql(u8, arg, "--listen=-")) { + listen = .stdio; } else if (mem.eql(u8, arg, "--debug-link-snapshot")) { if (!build_options.enable_link_snapshots) { std.log.warn("Zig was compiled without linker snapshots enabled (-Dlink-snapshot). --debug-link-snapshot has no effect.", .{}); @@ -1172,8 +1202,6 @@ fn buildOutputType( test_evented_io = true; } else if (mem.eql(u8, arg, "--test-no-exec")) { test_no_exec = true; - } else if (mem.eql(u8, arg, "--watch")) { - watch = true; } else if (mem.eql(u8, arg, "-ftime-report")) { time_report = true; } else if (mem.eql(u8, arg, "-fstack-report")) { @@ -2999,7 +3027,7 @@ fn buildOutputType( defer zig_lib_directory.handle.close(); var thread_pool: ThreadPool = undefined; - try thread_pool.init(gpa); + try thread_pool.init(.{ .allocator = gpa }); defer thread_pool.deinit(); var libc_installation: ?LibCInstallation = null; @@ -3259,8 +3287,52 @@ fn buildOutputType( if (show_builtin) { return std.io.getStdOut().writeAll(try comp.generateBuiltinZigSource(arena)); } + switch (listen) { + .none => {}, + .stdio => { + if (build_options.only_c) unreachable; + try serve( + comp, + std.io.getStdIn(), + std.io.getStdOut(), + test_exec_args.items, + self_exe_path, + arg_mode, + all_args, + runtime_args_start, + ); + return cleanExit(); + }, + .ip4 => |ip4_addr| { + if (build_options.omit_pkg_fetching_code) unreachable; + + var server = std.net.StreamServer.init(.{ + .reuse_address = true, + }); + defer server.deinit(); + + try server.listen(.{ .in = ip4_addr }); + + while (true) { + const conn = try server.accept(); + defer conn.stream.close(); + + try serve( + comp, + .{ .handle = conn.stream.handle }, + .{ .handle = conn.stream.handle }, + test_exec_args.items, + self_exe_path, + arg_mode, + all_args, + runtime_args_start, + ); + } + }, + } + if (arg_mode == .translate_c) { - return cmdTranslateC(comp, arena, have_enable_cache); + return cmdTranslateC(comp, arena, null); } const hook: AfterUpdateHook = blk: { @@ -3276,7 +3348,7 @@ fn buildOutputType( }; updateModule(gpa, comp, hook) catch |err| switch (err) { - error.SemanticAnalyzeFail => if (!watch) process.exit(1), + error.SemanticAnalyzeFail => if (listen == .none) process.exit(1), else => |e| return e, }; if (build_options.only_c) return cleanExit(); @@ -3332,7 +3404,6 @@ fn buildOutputType( self_exe_path.?, arg_mode, target_info, - watch, &comp_destroyed, all_args, runtime_args_start, @@ -3340,109 +3411,215 @@ fn buildOutputType( ); } - const stdin = std.io.getStdIn().reader(); - const stderr = std.io.getStdErr().writer(); - var repl_buf: [1024]u8 = undefined; + // Skip resource deallocation in release builds; let the OS do it. + return cleanExit(); +} + +fn serve( + comp: *Compilation, + in: fs.File, + out: fs.File, + test_exec_args: []const ?[]const u8, + self_exe_path: ?[]const u8, + arg_mode: ArgMode, + all_args: []const []const u8, + runtime_args_start: ?usize, +) !void { + const gpa = comp.gpa; - const ReplCmd = enum { - update, - help, - run, - update_and_run, + var server = try Server.init(.{ + .gpa = gpa, + .in = in, + .out = out, + .zig_version = build_options.version, + }); + defer server.deinit(); + + var child_pid: ?std.ChildProcess.Id = null; + + var progress: std.Progress = .{ + .terminal = null, + .root = .{ + .context = undefined, + .parent = null, + .name = "", + .unprotected_estimated_total_items = 0, + .unprotected_completed_items = 0, + }, + .columns_written = 0, + .prev_refresh_timestamp = 0, + .timer = null, + .done = false, }; + const main_progress_node = &progress.root; + main_progress_node.context = &progress; - var last_cmd: ReplCmd = .help; + while (true) { + const hdr = try server.receiveMessage(); - while (watch) { - try stderr.print("(zig) ", .{}); - try comp.makeBinFileExecutable(); - if (stdin.readUntilDelimiterOrEof(&repl_buf, '\n') catch |err| { - try stderr.print("\nUnable to parse command: {s}\n", .{@errorName(err)}); - continue; - }) |line| { - const actual_line = mem.trimRight(u8, line, "\r\n "); - const cmd: ReplCmd = blk: { - if (mem.eql(u8, actual_line, "update")) { - break :blk .update; - } else if (mem.eql(u8, actual_line, "exit")) { - break; - } else if (mem.eql(u8, actual_line, "help")) { - break :blk .help; - } else if (mem.eql(u8, actual_line, "run")) { - break :blk .run; - } else if (mem.eql(u8, actual_line, "update-and-run")) { - break :blk .update_and_run; - } else if (actual_line.len == 0) { - break :blk last_cmd; - } else { - try stderr.print("unknown command: {s}\n", .{actual_line}); + switch (hdr.tag) { + .exit => { + return cleanExit(); + }, + .update => { + assert(main_progress_node.recently_updated_child == null); + tracy.frameMark(); + + if (arg_mode == .translate_c) { + var arena_instance = std.heap.ArenaAllocator.init(gpa); + defer arena_instance.deinit(); + const arena = arena_instance.allocator(); + var output: TranslateCOutput = undefined; + try cmdTranslateC(comp, arena, &output); + try server.serveEmitBinPath(output.path, .{ + .flags = .{ .cache_hit = output.cache_hit }, + }); continue; } - }; - last_cmd = cmd; - switch (cmd) { - .update => { - tracy.frameMark(); - if (output_mode == .Exe) { - try comp.makeBinFileWritable(); + + if (comp.bin_file.options.output_mode == .Exe) { + try comp.makeBinFileWritable(); + } + + { + var reset: std.Thread.ResetEvent = .{}; + + var progress_thread = try std.Thread.spawn(.{}, progressThread, .{ + &progress, &server, &reset, + }); + defer { + reset.set(); + progress_thread.join(); } - updateModule(gpa, comp, hook) catch |err| switch (err) { - error.SemanticAnalyzeFail => continue, - else => |e| return e, - }; - }, - .help => { - try stderr.writeAll(repl_help); - }, - .run => { - tracy.frameMark(); - try runOrTest( - comp, - gpa, - arena, - test_exec_args.items, - self_exe_path.?, - arg_mode, - target_info, - watch, - &comp_destroyed, - all_args, - runtime_args_start, - link_libc, - ); - }, - .update_and_run => { - tracy.frameMark(); - if (output_mode == .Exe) { + + try comp.update(main_progress_node); + } + + try comp.makeBinFileExecutable(); + try serveUpdateResults(&server, comp); + }, + .run => { + if (child_pid != null) { + @panic("TODO block until the child exits"); + } + @panic("TODO call runOrTest"); + //try runOrTest( + // comp, + // gpa, + // arena, + // test_exec_args, + // self_exe_path.?, + // arg_mode, + // target_info, + // true, + // &comp_destroyed, + // all_args, + // runtime_args_start, + // link_libc, + //); + }, + .hot_update => { + tracy.frameMark(); + assert(main_progress_node.recently_updated_child == null); + if (child_pid) |pid| { + try comp.hotCodeSwap(main_progress_node, pid); + try serveUpdateResults(&server, comp); + } else { + if (comp.bin_file.options.output_mode == .Exe) { try comp.makeBinFileWritable(); } - updateModule(gpa, comp, hook) catch |err| switch (err) { - error.SemanticAnalyzeFail => continue, - else => |e| return e, - }; + try comp.update(main_progress_node); try comp.makeBinFileExecutable(); - try runOrTest( + try serveUpdateResults(&server, comp); + + child_pid = try runOrTestHotSwap( comp, gpa, - arena, - test_exec_args.items, + test_exec_args, self_exe_path.?, arg_mode, - target_info, - watch, - &comp_destroyed, all_args, runtime_args_start, - link_libc, ); - }, + } + }, + else => { + fatal("unrecognized message from client: 0x{x}", .{@enumToInt(hdr.tag)}); + }, + } + } +} + +fn progressThread(progress: *std.Progress, server: *const Server, reset: *std.Thread.ResetEvent) void { + while (true) { + if (reset.timedWait(500 * std.time.ns_per_ms)) |_| { + // The Compilation update has completed. + return; + } else |err| switch (err) { + error.Timeout => {}, + } + + var buf: std.BoundedArray(u8, 160) = .{}; + + { + progress.update_mutex.lock(); + defer progress.update_mutex.unlock(); + + var need_ellipse = false; + var maybe_node: ?*std.Progress.Node = &progress.root; + while (maybe_node) |node| { + if (need_ellipse) { + buf.appendSlice("... ") catch {}; + } + need_ellipse = false; + const eti = @atomicLoad(usize, &node.unprotected_estimated_total_items, .Monotonic); + const completed_items = @atomicLoad(usize, &node.unprotected_completed_items, .Monotonic); + const current_item = completed_items + 1; + if (node.name.len != 0 or eti > 0) { + if (node.name.len != 0) { + buf.appendSlice(node.name) catch {}; + need_ellipse = true; + } + if (eti > 0) { + if (need_ellipse) buf.appendSlice(" ") catch {}; + buf.writer().print("[{d}/{d}] ", .{ current_item, eti }) catch {}; + need_ellipse = false; + } else if (completed_items != 0) { + if (need_ellipse) buf.appendSlice(" ") catch {}; + buf.writer().print("[{d}] ", .{current_item}) catch {}; + need_ellipse = false; + } + } + maybe_node = @atomicLoad(?*std.Progress.Node, &node.recently_updated_child, .Acquire); } - } else { - break; } + + const progress_string = buf.slice(); + + server.serveMessage(.{ + .tag = .progress, + .bytes_len = @intCast(u32, progress_string.len), + }, &.{ + progress_string, + }) catch |err| { + fatal("unable to write to client: {s}", .{@errorName(err)}); + }; + } +} + +fn serveUpdateResults(s: *Server, comp: *Compilation) !void { + const gpa = comp.gpa; + var error_bundle = try comp.getAllErrorsAlloc(); + defer error_bundle.deinit(gpa); + if (error_bundle.errorMessageCount() > 0) { + try s.serveErrorBundle(error_bundle); + } else if (comp.bin_file.options.emit) |emit| { + const full_path = try emit.directory.join(gpa, &.{emit.sub_path}); + defer gpa.free(full_path); + try s.serveEmitBinPath(full_path, .{ + .flags = .{ .cache_hit = comp.last_update_was_cache_hit }, + }); } - // Skip resource deallocation in release builds; let the OS do it. - return cleanExit(); } const ModuleDepIterator = struct { @@ -3530,7 +3707,6 @@ fn runOrTest( self_exe_path: []const u8, arg_mode: ArgMode, target_info: std.zig.system.NativeTargetInfo, - watch: bool, comp_destroyed: *bool, all_args: []const []const u8, runtime_args_start: ?usize, @@ -3561,7 +3737,7 @@ fn runOrTest( // We do not execve for tests because if the test fails we want to print // the error message and invocation below. - if (std.process.can_execv and arg_mode == .run and !watch) { + if (std.process.can_execv and arg_mode == .run) { // execv releases the locks; no need to destroy the Compilation here. const err = std.process.execve(gpa, argv.items, &env_map); try warnAboutForeignBinaries(arena, arg_mode, target_info, link_libc); @@ -3574,12 +3750,10 @@ fn runOrTest( child.stdout_behavior = .Inherit; child.stderr_behavior = .Inherit; - if (!watch) { - // Here we release all the locks associated with the Compilation so - // that whatever this child process wants to do won't deadlock. - comp.destroy(); - comp_destroyed.* = true; - } + // Here we release all the locks associated with the Compilation so + // that whatever this child process wants to do won't deadlock. + comp.destroy(); + comp_destroyed.* = true; const term = child.spawnAndWait() catch |err| { try warnAboutForeignBinaries(arena, arg_mode, target_info, link_libc); @@ -3591,19 +3765,13 @@ fn runOrTest( switch (term) { .Exited => |code| { if (code == 0) { - if (!watch) return cleanExit(); - } else if (watch) { - warn("process exited with code {d}", .{code}); + return cleanExit(); } else { process.exit(code); } }, else => { - if (watch) { - warn("process aborted abnormally", .{}); - } else { - process.exit(1); - } + process.exit(1); }, } }, @@ -3611,7 +3779,7 @@ fn runOrTest( switch (term) { .Exited => |code| { if (code == 0) { - if (!watch) return cleanExit(); + return cleanExit(); } else { const cmd = try std.mem.join(arena, " ", argv.items); fatal("the following test command failed with exit code {d}:\n{s}", .{ code, cmd }); @@ -3631,6 +3799,62 @@ fn runOrTest( } } +fn runOrTestHotSwap( + comp: *Compilation, + gpa: Allocator, + test_exec_args: []const ?[]const u8, + self_exe_path: []const u8, + arg_mode: ArgMode, + all_args: []const []const u8, + runtime_args_start: ?usize, +) !std.ChildProcess.Id { + const exe_emit = comp.bin_file.options.emit.?; + // A naive `directory.join` here will indeed get the correct path to the binary, + // however, in the case of cwd, we actually want `./foo` so that the path can be executed. + const exe_path = try fs.path.join(gpa, &[_][]const u8{ + exe_emit.directory.path orelse ".", exe_emit.sub_path, + }); + defer gpa.free(exe_path); + + var argv = std.ArrayList([]const u8).init(gpa); + defer argv.deinit(); + + if (test_exec_args.len == 0) { + // when testing pass the zig_exe_path to argv + if (arg_mode == .zig_test) + try argv.appendSlice(&[_][]const u8{ + exe_path, self_exe_path, + }) + // when running just pass the current exe + else + try argv.appendSlice(&[_][]const u8{ + exe_path, + }); + } else { + for (test_exec_args) |arg| { + if (arg) |a| { + try argv.append(a); + } else { + try argv.appendSlice(&[_][]const u8{ + exe_path, self_exe_path, + }); + } + } + } + if (runtime_args_start) |i| { + try argv.appendSlice(all_args[i..]); + } + var child = std.ChildProcess.init(argv.items, gpa); + + child.stdin_behavior = .Inherit; + child.stdout_behavior = .Inherit; + child.stderr_behavior = .Inherit; + + try child.spawn(); + + return child.id; +} + const AfterUpdateHook = union(enum) { none, print_emit_bin_dir_path, @@ -3638,24 +3862,30 @@ const AfterUpdateHook = union(enum) { }; fn updateModule(gpa: Allocator, comp: *Compilation, hook: AfterUpdateHook) !void { - try comp.update(); + { + // If the terminal is dumb, we dont want to show the user all the output. + var progress: std.Progress = .{ .dont_print_on_dumb = true }; + const main_progress_node = progress.start("", 0); + defer main_progress_node.end(); + switch (comp.color) { + .off => { + progress.terminal = null; + }, + .on => { + progress.terminal = std.io.getStdErr(); + progress.supports_ansi_escape_codes = true; + }, + .auto => {}, + } + + try comp.update(main_progress_node); + } var errors = try comp.getAllErrorsAlloc(); defer errors.deinit(comp.gpa); - if (errors.list.len != 0) { - const ttyconf: std.debug.TTY.Config = switch (comp.color) { - .auto => std.debug.detectTTYConfig(std.io.getStdErr()), - .on => .escape_codes, - .off => .no_color, - }; - for (errors.list) |full_err_msg| { - full_err_msg.renderToStdErr(ttyconf); - } - const log_text = comp.getCompileLogOutput(); - if (log_text.len != 0) { - std.debug.print("\nCompile Log Output:\n{s}", .{log_text}); - } + if (errors.errorMessageCount() > 0) { + errors.renderToStdErr(renderOptions(comp.color)); return error.SemanticAnalyzeFail; } else switch (hook) { .none => {}, @@ -3697,7 +3927,12 @@ fn updateModule(gpa: Allocator, comp: *Compilation, hook: AfterUpdateHook) !void } } -fn cmdTranslateC(comp: *Compilation, arena: Allocator, enable_cache: bool) !void { +const TranslateCOutput = struct { + path: []const u8, + cache_hit: bool, +}; + +fn cmdTranslateC(comp: *Compilation, arena: Allocator, fancy_output: ?*TranslateCOutput) !void { if (!build_options.have_llvm) fatal("cannot translate-c: compiler built without LLVM extensions", .{}); @@ -3708,14 +3943,16 @@ fn cmdTranslateC(comp: *Compilation, arena: Allocator, enable_cache: bool) !void var man: Cache.Manifest = comp.obtainCObjectCacheManifest(); man.want_shared_lock = false; - defer if (enable_cache) man.deinit(); + defer man.deinit(); man.hash.add(@as(u16, 0xb945)); // Random number to distinguish translate-c from compiling C objects Compilation.cache_helpers.hashCSource(&man, c_source_file) catch |err| { fatal("unable to process '{s}': {s}", .{ c_source_file.src_path, @errorName(err) }); }; + if (fancy_output) |p| p.cache_hit = true; const digest = if (try man.hit()) man.final() else digest: { + if (fancy_output) |p| p.cache_hit = false; var argv = std.ArrayList([]const u8).init(arena); try argv.append(""); // argv[0] is program name, actual args start at [1] @@ -3766,6 +4003,7 @@ fn cmdTranslateC(comp: *Compilation, arena: Allocator, enable_cache: bool) !void error.OutOfMemory => return error.OutOfMemory, error.ASTUnitFailure => fatal("clang API returned errors but due to a clang bug, it is not exposing the errors for zig to see. For more details: https://github.com/ziglang/zig/issues/4455", .{}), error.SemanticAnalyzeFail => { + // TODO convert these to zig errors for (clang_errors) |clang_err| { std.debug.print("{s}:{d}:{d}: {s}\n", .{ if (clang_err.filename_ptr) |p| p[0..clang_err.filename_len] else "(no file)", @@ -3810,12 +4048,11 @@ fn cmdTranslateC(comp: *Compilation, arena: Allocator, enable_cache: bool) !void break :digest digest; }; - if (enable_cache) { + if (fancy_output) |p| { const full_zig_path = try comp.local_cache_directory.join(arena, &[_][]const u8{ "o", &digest, translated_zig_basename, }); - try io.getStdOut().writer().print("{s}\n", .{full_zig_path}); - return cleanExit(); + p.path = full_zig_path; } else { const out_zig_path = try fs.path.join(arena, &[_][]const u8{ "o", &digest, translated_zig_basename }); const zig_file = comp.local_cache_directory.handle.openFile(out_zig_path, .{}) catch |err| { @@ -4009,6 +4246,8 @@ pub const usage_build = \\Options: \\ -freference-trace[=num] How many lines of reference trace should be shown per compile error \\ -fno-reference-trace Disable reference trace + \\ -fsummary Print the build summary, even on success + \\ -fno-summary Omit the build summary, even on failure \\ --build-file [file] Override path to build.zig \\ --cache-dir [path] Override path to local Zig cache directory \\ --global-cache-dir [path] Override path to global Zig cache directory @@ -4021,7 +4260,6 @@ pub const usage_build = pub fn cmdBuild(gpa: Allocator, arena: Allocator, args: []const []const u8) !void { var color: Color = .auto; - var prominent_compile_errors: bool = false; // We want to release all the locks before executing the child process, so we make a nice // big block here to ensure the cleanup gets run when we extract out our argv. @@ -4082,8 +4320,6 @@ pub fn cmdBuild(gpa: Allocator, arena: Allocator, args: []const []const u8) !voi i += 1; override_global_cache_dir = args[i]; continue; - } else if (mem.eql(u8, arg, "--prominent-compile-errors")) { - prominent_compile_errors = true; } else if (mem.eql(u8, arg, "-freference-trace")) { try child_argv.append(arg); reference_trace = 256; @@ -4201,7 +4437,7 @@ pub fn cmdBuild(gpa: Allocator, arena: Allocator, args: []const []const u8) !voi .basename = exe_basename, }; var thread_pool: ThreadPool = undefined; - try thread_pool.init(gpa); + try thread_pool.init(.{ .allocator = gpa }); defer thread_pool.deinit(); var cleanup_build_runner_dir: ?fs.Dir = null; @@ -4251,9 +4487,13 @@ pub fn cmdBuild(gpa: Allocator, arena: Allocator, args: []const []const u8) !voi var all_modules: Package.AllModules = .{}; defer all_modules.deinit(gpa); + var wip_errors: std.zig.ErrorBundle.Wip = undefined; + try wip_errors.init(gpa); + defer wip_errors.deinit(); + // Here we borrow main package's table and will replace it with a fresh // one after this process completes. - build_pkg.fetchAndAddDependencies( + const fetch_result = build_pkg.fetchAndAddDependencies( &main_pkg, arena, &thread_pool, @@ -4264,12 +4504,16 @@ pub fn cmdBuild(gpa: Allocator, arena: Allocator, args: []const []const u8) !voi &dependencies_source, &build_roots_source, "", - color, + &wip_errors, &all_modules, - ) catch |err| switch (err) { - error.PackageFetchFailed => process.exit(1), - else => |e| return e, - }; + ); + if (wip_errors.root_list.items.len > 0) { + var errors = try wip_errors.toOwnedBundle(""); + defer errors.deinit(gpa); + errors.renderToStdErr(renderOptions(color)); + process.exit(1); + } + try fetch_result; try dependencies_source.appendSlice("};\npub const build_root = struct {\n"); try dependencies_source.appendSlice(build_roots_source.items); @@ -4312,7 +4556,7 @@ pub fn cmdBuild(gpa: Allocator, arena: Allocator, args: []const []const u8) !voi defer comp.destroy(); updateModule(gpa, comp, .none) catch |err| switch (err) { - error.SemanticAnalyzeFail => process.exit(1), + error.SemanticAnalyzeFail => process.exit(2), else => |e| return e, }; try comp.makeBinFileExecutable(); @@ -4336,13 +4580,13 @@ pub fn cmdBuild(gpa: Allocator, arena: Allocator, args: []const []const u8) !voi switch (term) { .Exited => |code| { if (code == 0) return cleanExit(); + // Indicates that the build runner has reported compile errors + // and this parent process does not need to report any further + // diagnostics. + if (code == 2) process.exit(2); - if (prominent_compile_errors) { - fatal("the build command failed with exit code {d}", .{code}); - } else { - const cmd = try std.mem.join(arena, " ", child_argv); - fatal("the following build command failed with exit code {d}:\n{s}", .{ code, cmd }); - } + const cmd = try std.mem.join(arena, " ", child_argv); + fatal("the following build command failed with exit code {d}:\n{s}", .{ code, cmd }); }, else => { const cmd = try std.mem.join(arena, " ", child_argv); @@ -4356,7 +4600,7 @@ pub fn cmdBuild(gpa: Allocator, arena: Allocator, args: []const []const u8) !voi } fn readSourceFileToEndAlloc( - allocator: mem.Allocator, + allocator: Allocator, input: *const fs.File, size_hint: ?usize, ) ![:0]u8 { @@ -4500,12 +4744,7 @@ pub fn cmdFmt(gpa: Allocator, arena: Allocator, args: []const []const u8) !void }; defer tree.deinit(gpa); - try printErrsMsgToStdErr(gpa, arena, tree, "", color); - var has_ast_error = false; if (check_ast_flag) { - const Module = @import("Module.zig"); - const AstGen = @import("AstGen.zig"); - var file: Module.File = .{ .status = .never_loaded, .source_loaded = true, @@ -4528,25 +4767,18 @@ pub fn cmdFmt(gpa: Allocator, arena: Allocator, args: []const []const u8) !void defer file.zir.deinit(gpa); if (file.zir.hasCompileErrors()) { - var arena_instance = std.heap.ArenaAllocator.init(gpa); - defer arena_instance.deinit(); - var errors = std.ArrayList(Compilation.AllErrors.Message).init(gpa); - defer errors.deinit(); - - try Compilation.AllErrors.addZir(arena_instance.allocator(), &errors, &file); - const ttyconf: std.debug.TTY.Config = switch (color) { - .auto => std.debug.detectTTYConfig(std.io.getStdErr()), - .on => .escape_codes, - .off => .no_color, - }; - for (errors.items) |full_err_msg| { - full_err_msg.renderToStdErr(ttyconf); - } - has_ast_error = true; + var wip_errors: std.zig.ErrorBundle.Wip = undefined; + try wip_errors.init(gpa); + defer wip_errors.deinit(); + try Compilation.addZirErrorMessages(&wip_errors, &file); + var error_bundle = try wip_errors.toOwnedBundle(""); + defer error_bundle.deinit(gpa); + error_bundle.renderToStdErr(renderOptions(color)); + process.exit(2); } - } - if (tree.errors.len != 0 or has_ast_error) { - process.exit(1); + } else if (tree.errors.len != 0) { + try printAstErrorsToStderr(gpa, tree, "", color); + process.exit(2); } const formatted = try tree.render(gpa); defer gpa.free(formatted); @@ -4688,12 +4920,13 @@ fn fmtPathFile( if (stat.kind == .Directory) return error.IsDir; + const gpa = fmt.gpa; const source_code = try readSourceFileToEndAlloc( - fmt.gpa, + gpa, &source_file, std.math.cast(usize, stat.size) orelse return error.FileTooBig, ); - defer fmt.gpa.free(source_code); + defer gpa.free(source_code); source_file.close(); file_closed = true; @@ -4701,19 +4934,16 @@ fn fmtPathFile( // Add to set after no longer possible to get error.IsDir. if (try fmt.seen.fetchPut(stat.inode, {})) |_| return; - var tree = try Ast.parse(fmt.gpa, source_code, .zig); - defer tree.deinit(fmt.gpa); + var tree = try Ast.parse(gpa, source_code, .zig); + defer tree.deinit(gpa); - try printErrsMsgToStdErr(fmt.gpa, fmt.arena, tree, file_path, fmt.color); if (tree.errors.len != 0) { + try printAstErrorsToStderr(gpa, tree, file_path, fmt.color); fmt.any_error = true; return; } if (fmt.check_ast) { - const Module = @import("Module.zig"); - const AstGen = @import("AstGen.zig"); - var file: Module.File = .{ .status = .never_loaded, .source_loaded = true, @@ -4732,31 +4962,24 @@ fn fmtPathFile( .root_decl = .none, }; - file.pkg = try Package.create(fmt.gpa, null, file.sub_file_path); - defer file.pkg.destroy(fmt.gpa); + file.pkg = try Package.create(gpa, null, file.sub_file_path); + defer file.pkg.destroy(gpa); if (stat.size > max_src_size) return error.FileTooBig; - file.zir = try AstGen.generate(fmt.gpa, file.tree); + file.zir = try AstGen.generate(gpa, file.tree); file.zir_loaded = true; - defer file.zir.deinit(fmt.gpa); + defer file.zir.deinit(gpa); if (file.zir.hasCompileErrors()) { - var arena_instance = std.heap.ArenaAllocator.init(fmt.gpa); - defer arena_instance.deinit(); - var errors = std.ArrayList(Compilation.AllErrors.Message).init(fmt.gpa); - defer errors.deinit(); - - try Compilation.AllErrors.addZir(arena_instance.allocator(), &errors, &file); - const ttyconf: std.debug.TTY.Config = switch (fmt.color) { - .auto => std.debug.detectTTYConfig(std.io.getStdErr()), - .on => .escape_codes, - .off => .no_color, - }; - for (errors.items) |full_err_msg| { - full_err_msg.renderToStdErr(ttyconf); - } + var wip_errors: std.zig.ErrorBundle.Wip = undefined; + try wip_errors.init(gpa); + defer wip_errors.deinit(); + try Compilation.addZirErrorMessages(&wip_errors, &file); + var error_bundle = try wip_errors.toOwnedBundle(""); + defer error_bundle.deinit(gpa); + error_bundle.renderToStdErr(renderOptions(fmt.color)); fmt.any_error = true; } } @@ -4784,100 +5007,50 @@ fn fmtPathFile( } } -pub fn printErrsMsgToStdErr( - gpa: mem.Allocator, - arena: mem.Allocator, +fn printAstErrorsToStderr(gpa: Allocator, tree: Ast, path: []const u8, color: Color) !void { + var wip_errors: std.zig.ErrorBundle.Wip = undefined; + try wip_errors.init(gpa); + defer wip_errors.deinit(); + + try putAstErrorsIntoBundle(gpa, tree, path, &wip_errors); + + var error_bundle = try wip_errors.toOwnedBundle(""); + defer error_bundle.deinit(gpa); + error_bundle.renderToStdErr(renderOptions(color)); +} + +pub fn putAstErrorsIntoBundle( + gpa: Allocator, tree: Ast, path: []const u8, - color: Color, + wip_errors: *std.zig.ErrorBundle.Wip, ) !void { - const parse_errors: []const Ast.Error = tree.errors; - var i: usize = 0; - while (i < parse_errors.len) : (i += 1) { - const parse_error = parse_errors[i]; - const lok_token = parse_error.token; - const token_tags = tree.tokens.items(.tag); - const start_loc = tree.tokenLocation(0, lok_token); - const source_line = tree.source[start_loc.line_start..start_loc.line_end]; - - var text_buf = std.ArrayList(u8).init(gpa); - defer text_buf.deinit(); - const writer = text_buf.writer(); - try tree.renderError(parse_error, writer); - const text = try arena.dupe(u8, text_buf.items); - - var notes_buffer: [2]Compilation.AllErrors.Message = undefined; - var notes_len: usize = 0; - - if (token_tags[parse_error.token + @boolToInt(parse_error.token_is_prev)] == .invalid) { - const bad_off = @intCast(u32, tree.tokenSlice(parse_error.token + @boolToInt(parse_error.token_is_prev)).len); - const byte_offset = @intCast(u32, start_loc.line_start) + @intCast(u32, start_loc.column) + bad_off; - notes_buffer[notes_len] = .{ - .src = .{ - .src_path = path, - .msg = try std.fmt.allocPrint(arena, "invalid byte: '{'}'", .{ - std.zig.fmtEscapes(tree.source[byte_offset..][0..1]), - }), - .span = .{ .start = byte_offset, .end = byte_offset + 1, .main = byte_offset }, - .line = @intCast(u32, start_loc.line), - .column = @intCast(u32, start_loc.column) + bad_off, - .source_line = source_line, - }, - }; - notes_len += 1; - } - - for (parse_errors[i + 1 ..]) |note| { - if (!note.is_note) break; - - text_buf.items.len = 0; - try tree.renderError(note, writer); - const note_loc = tree.tokenLocation(0, note.token); - const byte_offset = @intCast(u32, note_loc.line_start); - notes_buffer[notes_len] = .{ - .src = .{ - .src_path = path, - .msg = try arena.dupe(u8, text_buf.items), - .span = .{ - .start = byte_offset, - .end = byte_offset + @intCast(u32, tree.tokenSlice(note.token).len), - .main = byte_offset, - }, - .line = @intCast(u32, note_loc.line), - .column = @intCast(u32, note_loc.column), - .source_line = tree.source[note_loc.line_start..note_loc.line_end], - }, - }; - i += 1; - notes_len += 1; - } + var file: Module.File = .{ + .status = .never_loaded, + .source_loaded = true, + .zir_loaded = false, + .sub_file_path = path, + .source = tree.source, + .stat = .{ + .size = 0, + .inode = 0, + .mtime = 0, + }, + .tree = tree, + .tree_loaded = true, + .zir = undefined, + .pkg = undefined, + .root_decl = .none, + }; - const extra_offset = tree.errorOffset(parse_error); - const byte_offset = @intCast(u32, start_loc.line_start) + extra_offset; - const message: Compilation.AllErrors.Message = .{ - .src = .{ - .src_path = path, - .msg = text, - .span = .{ - .start = byte_offset, - .end = byte_offset + @intCast(u32, tree.tokenSlice(lok_token).len), - .main = byte_offset, - }, - .line = @intCast(u32, start_loc.line), - .column = @intCast(u32, start_loc.column) + extra_offset, - .source_line = source_line, - .notes = notes_buffer[0..notes_len], - }, - }; + file.pkg = try Package.create(gpa, null, path); + defer file.pkg.destroy(gpa); - const ttyconf: std.debug.TTY.Config = switch (color) { - .auto => std.debug.detectTTYConfig(std.io.getStdErr()), - .on => .escape_codes, - .off => .no_color, - }; + file.zir = try AstGen.generate(gpa, file.tree); + file.zir_loaded = true; + defer file.zir.deinit(gpa); - message.renderToStdErr(ttyconf); - } + try Compilation.addZirErrorMessages(wip_errors, &file); } pub const info_zen = @@ -5325,19 +5498,6 @@ fn detectNativeTargetInfo(cross_target: std.zig.CrossTarget) !std.zig.system.Nat return std.zig.system.NativeTargetInfo.detect(cross_target); } -/// Indicate that we are now terminating with a successful exit code. -/// In debug builds, this is a no-op, so that the calling code's -/// cleanup mechanisms are tested and so that external tools that -/// check for resource leaks can be accurate. In release builds, this -/// calls exit(0), and does not return. -pub fn cleanExit() void { - if (builtin.mode == .Debug) { - return; - } else { - process.exit(0); - } -} - const usage_ast_check = \\Usage: zig ast-check [file] \\ @@ -5360,8 +5520,6 @@ pub fn cmdAstCheck( arena: Allocator, args: []const []const u8, ) !void { - const Module = @import("Module.zig"); - const AstGen = @import("AstGen.zig"); const Zir = @import("Zir.zig"); var color: Color = .auto; @@ -5451,26 +5609,18 @@ pub fn cmdAstCheck( file.tree_loaded = true; defer file.tree.deinit(gpa); - try printErrsMsgToStdErr(gpa, arena, file.tree, file.sub_file_path, color); - if (file.tree.errors.len != 0) { - process.exit(1); - } - file.zir = try AstGen.generate(gpa, file.tree); file.zir_loaded = true; defer file.zir.deinit(gpa); if (file.zir.hasCompileErrors()) { - var errors = std.ArrayList(Compilation.AllErrors.Message).init(arena); - try Compilation.AllErrors.addZir(arena, &errors, &file); - const ttyconf: std.debug.TTY.Config = switch (color) { - .auto => std.debug.detectTTYConfig(std.io.getStdErr()), - .on => .escape_codes, - .off => .no_color, - }; - for (errors.items) |full_err_msg| { - full_err_msg.renderToStdErr(ttyconf); - } + var wip_errors: std.zig.ErrorBundle.Wip = undefined; + try wip_errors.init(gpa); + defer wip_errors.deinit(); + try Compilation.addZirErrorMessages(&wip_errors, &file); + var error_bundle = try wip_errors.toOwnedBundle(""); + defer error_bundle.deinit(gpa); + error_bundle.renderToStdErr(renderOptions(color)); process.exit(1); } @@ -5528,8 +5678,7 @@ pub fn cmdChangelist( arena: Allocator, args: []const []const u8, ) !void { - const Module = @import("Module.zig"); - const AstGen = @import("AstGen.zig"); + const color: Color = .auto; const Zir = @import("Zir.zig"); const old_source_file = args[0]; @@ -5577,22 +5726,18 @@ pub fn cmdChangelist( file.tree_loaded = true; defer file.tree.deinit(gpa); - try printErrsMsgToStdErr(gpa, arena, file.tree, old_source_file, .auto); - if (file.tree.errors.len != 0) { - process.exit(1); - } - file.zir = try AstGen.generate(gpa, file.tree); file.zir_loaded = true; defer file.zir.deinit(gpa); if (file.zir.hasCompileErrors()) { - var errors = std.ArrayList(Compilation.AllErrors.Message).init(arena); - try Compilation.AllErrors.addZir(arena, &errors, &file); - const ttyconf = std.debug.detectTTYConfig(std.io.getStdErr()); - for (errors.items) |full_err_msg| { - full_err_msg.renderToStdErr(ttyconf); - } + var wip_errors: std.zig.ErrorBundle.Wip = undefined; + try wip_errors.init(gpa); + defer wip_errors.deinit(); + try Compilation.addZirErrorMessages(&wip_errors, &file); + var error_bundle = try wip_errors.toOwnedBundle(""); + defer error_bundle.deinit(gpa); + error_bundle.renderToStdErr(renderOptions(color)); process.exit(1); } @@ -5614,11 +5759,6 @@ pub fn cmdChangelist( var new_tree = try Ast.parse(gpa, new_source, .zig); defer new_tree.deinit(gpa); - try printErrsMsgToStdErr(gpa, arena, new_tree, new_source_file, .auto); - if (new_tree.errors.len != 0) { - process.exit(1); - } - var old_zir = file.zir; defer old_zir.deinit(gpa); file.zir_loaded = false; @@ -5626,12 +5766,13 @@ pub fn cmdChangelist( file.zir_loaded = true; if (file.zir.hasCompileErrors()) { - var errors = std.ArrayList(Compilation.AllErrors.Message).init(arena); - try Compilation.AllErrors.addZir(arena, &errors, &file); - const ttyconf = std.debug.detectTTYConfig(std.io.getStdErr()); - for (errors.items) |full_err_msg| { - full_err_msg.renderToStdErr(ttyconf); - } + var wip_errors: std.zig.ErrorBundle.Wip = undefined; + try wip_errors.init(gpa); + defer wip_errors.deinit(); + try Compilation.addZirErrorMessages(&wip_errors, &file); + var error_bundle = try wip_errors.toOwnedBundle(""); + defer error_bundle.deinit(gpa); + error_bundle.renderToStdErr(renderOptions(color)); process.exit(1); } @@ -5892,3 +6033,20 @@ const ClangSearchSanitizer = struct { iframework: bool = false, }; }; + +fn get_tty_conf(color: Color) std.debug.TTY.Config { + return switch (color) { + .auto => std.debug.detectTTYConfig(std.io.getStdErr()), + .on => .escape_codes, + .off => .no_color, + }; +} + +fn renderOptions(color: Color) std.zig.ErrorBundle.RenderOptions { + const ttyconf = get_tty_conf(color); + return .{ + .ttyconf = ttyconf, + .include_source_line = ttyconf != .no_color, + .include_reference_trace = ttyconf != .no_color, + }; +} diff --git a/src/mingw.zig b/src/mingw.zig index 9e9e180945f6..a85645e80b6c 100644 --- a/src/mingw.zig +++ b/src/mingw.zig @@ -19,7 +19,7 @@ pub const CRTFile = enum { uuid_lib, }; -pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile) !void { +pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile, prog_node: *std.Progress.Node) !void { if (!build_options.have_llvm) { return error.ZigCompilerNotBuiltWithLLVMExtensions; } @@ -41,7 +41,7 @@ pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile) !void { //"-D_UNICODE", //"-DWPRFLAG=1", }); - return comp.build_crt_file("crt2", .Obj, &[1]Compilation.CSourceFile{ + return comp.build_crt_file("crt2", .Obj, .@"mingw-w64 crt2.o", prog_node, &.{ .{ .src_path = try comp.zig_lib_directory.join(arena, &[_][]const u8{ "libc", "mingw", "crt", "crtexe.c", @@ -60,7 +60,7 @@ pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile) !void { "-U__CRTDLL__", "-D__MSVCRT__", }); - return comp.build_crt_file("dllcrt2", .Obj, &[1]Compilation.CSourceFile{ + return comp.build_crt_file("dllcrt2", .Obj, .@"mingw-w64 dllcrt2.o", prog_node, &.{ .{ .src_path = try comp.zig_lib_directory.join(arena, &[_][]const u8{ "libc", "mingw", "crt", "crtdll.c", @@ -100,7 +100,7 @@ pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile) !void { .extra_flags = args.items, }; } - return comp.build_crt_file("mingw32", .Lib, &c_source_files); + return comp.build_crt_file("mingw32", .Lib, .@"mingw-w64 mingw32.lib", prog_node, &c_source_files); }, .msvcrt_os_lib => { @@ -148,7 +148,7 @@ pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile) !void { }; } } - return comp.build_crt_file("msvcrt-os", .Lib, c_source_files.items); + return comp.build_crt_file("msvcrt-os", .Lib, .@"mingw-w64 msvcrt-os.lib", prog_node, c_source_files.items); }, .mingwex_lib => { @@ -211,7 +211,7 @@ pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile) !void { } else { @panic("unsupported arch"); } - return comp.build_crt_file("mingwex", .Lib, c_source_files.items); + return comp.build_crt_file("mingwex", .Lib, .@"mingw-w64 mingwex.lib", prog_node, c_source_files.items); }, .uuid_lib => { @@ -244,7 +244,7 @@ pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile) !void { .extra_flags = extra_flags, }; } - return comp.build_crt_file("uuid", .Lib, &c_source_files); + return comp.build_crt_file("uuid", .Lib, .@"mingw-w64 uuid.lib", prog_node, &c_source_files); }, } } diff --git a/src/musl.zig b/src/musl.zig index 18e618df8f17..4a3f1e6dde9a 100644 --- a/src/musl.zig +++ b/src/musl.zig @@ -17,7 +17,7 @@ pub const CRTFile = enum { libc_so, }; -pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile) !void { +pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile, prog_node: *std.Progress.Node) !void { if (!build_options.have_llvm) { return error.ZigCompilerNotBuiltWithLLVMExtensions; } @@ -33,7 +33,7 @@ pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile) !void { try args.appendSlice(&[_][]const u8{ "-Qunused-arguments", }); - return comp.build_crt_file("crti", .Obj, &[1]Compilation.CSourceFile{ + return comp.build_crt_file("crti", .Obj, .@"musl crti.o", prog_node, &.{ .{ .src_path = try start_asm_path(comp, arena, "crti.s"), .extra_flags = args.items, @@ -46,7 +46,7 @@ pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile) !void { try args.appendSlice(&[_][]const u8{ "-Qunused-arguments", }); - return comp.build_crt_file("crtn", .Obj, &[1]Compilation.CSourceFile{ + return comp.build_crt_file("crtn", .Obj, .@"musl crtn.o", prog_node, &.{ .{ .src_path = try start_asm_path(comp, arena, "crtn.s"), .extra_flags = args.items, @@ -60,7 +60,7 @@ pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile) !void { "-fno-stack-protector", "-DCRT", }); - return comp.build_crt_file("crt1", .Obj, &[1]Compilation.CSourceFile{ + return comp.build_crt_file("crt1", .Obj, .@"musl crt1.o", prog_node, &.{ .{ .src_path = try comp.zig_lib_directory.join(arena, &[_][]const u8{ "libc", "musl", "crt", "crt1.c", @@ -77,7 +77,7 @@ pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile) !void { "-fno-stack-protector", "-DCRT", }); - return comp.build_crt_file("rcrt1", .Obj, &[1]Compilation.CSourceFile{ + return comp.build_crt_file("rcrt1", .Obj, .@"musl rcrt1.o", prog_node, &.{ .{ .src_path = try comp.zig_lib_directory.join(arena, &[_][]const u8{ "libc", "musl", "crt", "rcrt1.c", @@ -94,7 +94,7 @@ pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile) !void { "-fno-stack-protector", "-DCRT", }); - return comp.build_crt_file("Scrt1", .Obj, &[1]Compilation.CSourceFile{ + return comp.build_crt_file("Scrt1", .Obj, .@"musl Scrt1.o", prog_node, &.{ .{ .src_path = try comp.zig_lib_directory.join(arena, &[_][]const u8{ "libc", "musl", "crt", "Scrt1.c", @@ -187,7 +187,7 @@ pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile) !void { .extra_flags = args.items, }; } - return comp.build_crt_file("c", .Lib, c_source_files.items); + return comp.build_crt_file("c", .Lib, .@"musl libc.a", prog_node, c_source_files.items); }, .libc_so => { const target = comp.getTarget(); @@ -241,7 +241,7 @@ pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile) !void { }); defer sub_compilation.destroy(); - try sub_compilation.updateSubCompilation(); + try comp.updateSubCompilation(sub_compilation, .@"musl libc.so", prog_node); try comp.crt_files.ensureUnusedCapacity(comp.gpa, 1); diff --git a/src/objcopy.zig b/src/objcopy.zig index 31e3d60d0ddc..c3305e8c047e 100644 --- a/src/objcopy.zig +++ b/src/objcopy.zig @@ -4,22 +4,25 @@ const fs = std.fs; const elf = std.elf; const Allocator = std.mem.Allocator; const File = std.fs.File; +const assert = std.debug.assert; + const main = @import("main.zig"); const fatal = main.fatal; -const cleanExit = main.cleanExit; +const Server = std.zig.Server; +const build_options = @import("build_options"); pub fn cmdObjCopy( gpa: Allocator, arena: Allocator, args: []const []const u8, ) !void { - _ = gpa; var i: usize = 0; var opt_out_fmt: ?std.Target.ObjectFormat = null; var opt_input: ?[]const u8 = null; var opt_output: ?[]const u8 = null; var only_section: ?[]const u8 = null; var pad_to: ?u64 = null; + var listen = false; while (i < args.len) : (i += 1) { const arg = args[i]; if (!mem.startsWith(u8, arg, "-")) { @@ -54,6 +57,8 @@ pub fn cmdObjCopy( i += 1; if (i >= args.len) fatal("expected another argument after '{s}'", .{arg}); only_section = args[i]; + } else if (mem.eql(u8, arg, "--listen=-")) { + listen = true; } else if (mem.startsWith(u8, arg, "--only-section=")) { only_section = arg["--output-target=".len..]; } else if (mem.eql(u8, arg, "--pad-to")) { @@ -102,10 +107,45 @@ pub fn cmdObjCopy( .only_section = only_section, .pad_to = pad_to, }); - return cleanExit(); }, else => fatal("unsupported output object format: {s}", .{@tagName(out_fmt)}), } + + if (listen) { + var server = try Server.init(.{ + .gpa = gpa, + .in = std.io.getStdIn(), + .out = std.io.getStdOut(), + .zig_version = build_options.version, + }); + defer server.deinit(); + + var seen_update = false; + while (true) { + const hdr = try server.receiveMessage(); + switch (hdr.tag) { + .exit => { + return std.process.cleanExit(); + }, + .update => { + if (seen_update) { + std.debug.print("zig objcopy only supports 1 update for now\n", .{}); + std.process.exit(1); + } + seen_update = true; + + try server.serveEmitBinPath(output, .{ + .flags = .{ .cache_hit = false }, + }); + }, + else => { + std.debug.print("unsupported message: {s}", .{@tagName(hdr.tag)}); + std.process.exit(1); + }, + } + } + } + return std.process.cleanExit(); } const usage = @@ -417,7 +457,7 @@ const HexWriter = struct { } fn Address(address: u32) Record { - std.debug.assert(address > 0xFFFF); + assert(address > 0xFFFF); const segment = @intCast(u16, address / 0x10000); if (address > 0xFFFFF) { return Record{ @@ -460,7 +500,7 @@ const HexWriter = struct { const BUFSIZE = 1 + (1 + 2 + 1 + MAX_PAYLOAD_LEN + 1) * 2 + linesep.len; var outbuf: [BUFSIZE]u8 = undefined; const payload_bytes = self.getPayloadBytes(); - std.debug.assert(payload_bytes.len <= MAX_PAYLOAD_LEN); + assert(payload_bytes.len <= MAX_PAYLOAD_LEN); const line = try std.fmt.bufPrint(&outbuf, ":{0X:0>2}{1X:0>4}{2X:0>2}{3s}{4X:0>2}" ++ linesep, .{ @intCast(u8, payload_bytes.len), diff --git a/src/test.zig b/src/test.zig deleted file mode 100644 index 61cdb705e3a2..000000000000 --- a/src/test.zig +++ /dev/null @@ -1,1984 +0,0 @@ -const std = @import("std"); -const builtin = @import("builtin"); -const Allocator = std.mem.Allocator; -const CrossTarget = std.zig.CrossTarget; -const print = std.debug.print; -const assert = std.debug.assert; - -const link = @import("link.zig"); -const Compilation = @import("Compilation.zig"); -const Package = @import("Package.zig"); -const introspect = @import("introspect.zig"); -const build_options = @import("build_options"); -const ThreadPool = @import("ThreadPool.zig"); -const WaitGroup = @import("WaitGroup.zig"); -const zig_h = link.File.C.zig_h; - -const enable_qemu: bool = build_options.enable_qemu; -const enable_wine: bool = build_options.enable_wine; -const enable_wasmtime: bool = build_options.enable_wasmtime; -const enable_darling: bool = build_options.enable_darling; -const enable_rosetta: bool = build_options.enable_rosetta; -const glibc_runtimes_dir: ?[]const u8 = build_options.glibc_runtimes_dir; -const skip_stage1 = true; - -const hr = "=" ** 80; - -test { - const use_gpa = build_options.force_gpa or !builtin.link_libc; - const gpa = gpa: { - if (use_gpa) { - break :gpa std.testing.allocator; - } - // We would prefer to use raw libc allocator here, but cannot - // use it if it won't support the alignment we need. - if (@alignOf(std.c.max_align_t) < @alignOf(i128)) { - break :gpa std.heap.c_allocator; - } - break :gpa std.heap.raw_c_allocator; - }; - - var arena_allocator = std.heap.ArenaAllocator.init(gpa); - defer arena_allocator.deinit(); - const arena = arena_allocator.allocator(); - - var ctx = TestContext.init(gpa, arena); - defer ctx.deinit(); - - { - const dir_path = try std.fs.path.join(arena, &.{ - std.fs.path.dirname(@src().file).?, "..", "test", "cases", - }); - - var dir = try std.fs.cwd().openIterableDir(dir_path, .{}); - defer dir.close(); - - ctx.addTestCasesFromDir(dir); - } - - try @import("../test/cases.zig").addCases(&ctx); - - try ctx.run(); -} - -const ErrorMsg = union(enum) { - src: struct { - src_path: []const u8, - msg: []const u8, - // maxint means match anything - // this is a workaround for stage1 compiler bug I ran into when making it ?u32 - line: u32, - // maxint means match anything - // this is a workaround for stage1 compiler bug I ran into when making it ?u32 - column: u32, - kind: Kind, - count: u32, - }, - plain: struct { - msg: []const u8, - kind: Kind, - count: u32, - }, - - const Kind = enum { - @"error", - note, - }; - - fn init(other: Compilation.AllErrors.Message, kind: Kind) ErrorMsg { - switch (other) { - .src => |src| return .{ - .src = .{ - .src_path = src.src_path, - .msg = src.msg, - .line = @intCast(u32, src.line), - .column = @intCast(u32, src.column), - .kind = kind, - .count = src.count, - }, - }, - .plain => |plain| return .{ - .plain = .{ - .msg = plain.msg, - .kind = kind, - .count = plain.count, - }, - }, - } - } - - pub fn format( - self: ErrorMsg, - comptime fmt: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) !void { - _ = fmt; - _ = options; - switch (self) { - .src => |src| { - if (!std.mem.eql(u8, src.src_path, "?") or - src.line != std.math.maxInt(u32) or - src.column != std.math.maxInt(u32)) - { - try writer.print("{s}:", .{src.src_path}); - if (src.line != std.math.maxInt(u32)) { - try writer.print("{d}:", .{src.line + 1}); - } else { - try writer.writeAll("?:"); - } - if (src.column != std.math.maxInt(u32)) { - try writer.print("{d}: ", .{src.column + 1}); - } else { - try writer.writeAll("?: "); - } - } - try writer.print("{s}: {s}", .{ @tagName(src.kind), src.msg }); - if (src.count != 1) { - try writer.print(" ({d} times)", .{src.count}); - } - }, - .plain => |plain| { - try writer.print("{s}: {s}", .{ @tagName(plain.kind), plain.msg }); - if (plain.count != 1) { - try writer.print(" ({d} times)", .{plain.count}); - } - }, - } - } -}; - -/// Default config values for known test manifest key-value pairings. -/// Currently handled defaults are: -/// * backend -/// * target -/// * output_mode -/// * is_test -const TestManifestConfigDefaults = struct { - /// Asserts if the key doesn't exist - yep, it's an oversight alright. - fn get(@"type": TestManifest.Type, key: []const u8) []const u8 { - if (std.mem.eql(u8, key, "backend")) { - return "stage2"; - } else if (std.mem.eql(u8, key, "target")) { - comptime { - var defaults: []const u8 = ""; - // TODO should we only return "mainstream" targets by default here? - // TODO we should also specify ABIs explicitly as the backends are - // getting more and more complete - // Linux - inline for (&[_][]const u8{ "x86_64", "arm", "aarch64" }) |arch| { - defaults = defaults ++ arch ++ "-linux" ++ ","; - } - // macOS - inline for (&[_][]const u8{ "x86_64", "aarch64" }) |arch| { - defaults = defaults ++ arch ++ "-macos" ++ ","; - } - // Windows - defaults = defaults ++ "x86_64-windows" ++ ","; - // Wasm - defaults = defaults ++ "wasm32-wasi"; - return defaults; - } - } else if (std.mem.eql(u8, key, "output_mode")) { - return switch (@"type") { - .@"error" => "Obj", - .run => "Exe", - .cli => @panic("TODO test harness for CLI tests"), - }; - } else if (std.mem.eql(u8, key, "is_test")) { - return "0"; - } else unreachable; - } -}; - -/// Manifest syntax example: -/// (see https://github.com/ziglang/zig/issues/11288) -/// -/// error -/// backend=stage1,stage2 -/// output_mode=exe -/// -/// :3:19: error: foo -/// -/// run -/// target=x86_64-linux,aarch64-macos -/// -/// I am expected stdout! Hello! -/// -/// cli -/// -/// build test -const TestManifest = struct { - type: Type, - config_map: std.StringHashMap([]const u8), - trailing_bytes: []const u8 = "", - - const Type = enum { - @"error", - run, - cli, - }; - - const TrailingIterator = struct { - inner: std.mem.TokenIterator(u8), - - fn next(self: *TrailingIterator) ?[]const u8 { - const next_inner = self.inner.next() orelse return null; - return std.mem.trim(u8, next_inner[2..], " \t"); - } - }; - - fn ConfigValueIterator(comptime T: type) type { - return struct { - inner: std.mem.SplitIterator(u8), - - fn next(self: *@This()) !?T { - const next_raw = self.inner.next() orelse return null; - const parseFn = getDefaultParser(T); - return try parseFn(next_raw); - } - }; - } - - fn parse(arena: Allocator, bytes: []const u8) !TestManifest { - // The manifest is the last contiguous block of comments in the file - // We scan for the beginning by searching backward for the first non-empty line that does not start with "//" - var start: ?usize = null; - var end: usize = bytes.len; - if (bytes.len > 0) { - var cursor: usize = bytes.len - 1; - while (true) { - // Move to beginning of line - while (cursor > 0 and bytes[cursor - 1] != '\n') cursor -= 1; - - if (std.mem.startsWith(u8, bytes[cursor..], "//")) { - start = cursor; // Contiguous comment line, include in manifest - } else { - if (start != null) break; // Encountered non-comment line, end of manifest - - // We ignore all-whitespace lines following the comment block, but anything else - // means that there is no manifest present. - if (std.mem.trim(u8, bytes[cursor..end], " \r\n\t").len == 0) { - end = cursor; - } else break; // If it's not whitespace, there is no manifest - } - - // Move to previous line - if (cursor != 0) cursor -= 1 else break; - } - } - - const actual_start = start orelse return error.MissingTestManifest; - const manifest_bytes = bytes[actual_start..end]; - - var it = std.mem.tokenize(u8, manifest_bytes, "\r\n"); - - // First line is the test type - const tt: Type = blk: { - const line = it.next() orelse return error.MissingTestCaseType; - const raw = std.mem.trim(u8, line[2..], " \t"); - if (std.mem.eql(u8, raw, "error")) { - break :blk .@"error"; - } else if (std.mem.eql(u8, raw, "run")) { - break :blk .run; - } else if (std.mem.eql(u8, raw, "cli")) { - break :blk .cli; - } else { - std.log.warn("unknown test case type requested: {s}", .{raw}); - return error.UnknownTestCaseType; - } - }; - - var manifest: TestManifest = .{ - .type = tt, - .config_map = std.StringHashMap([]const u8).init(arena), - }; - - // Any subsequent line until a blank comment line is key=value(s) pair - while (it.next()) |line| { - const trimmed = std.mem.trim(u8, line[2..], " \t"); - if (trimmed.len == 0) break; - - // Parse key=value(s) - var kv_it = std.mem.split(u8, trimmed, "="); - const key = kv_it.first(); - try manifest.config_map.putNoClobber(key, kv_it.next() orelse return error.MissingValuesForConfig); - } - - // Finally, trailing is expected output - manifest.trailing_bytes = manifest_bytes[it.index..]; - - return manifest; - } - - fn getConfigForKey( - self: TestManifest, - key: []const u8, - comptime T: type, - ) ConfigValueIterator(T) { - const bytes = self.config_map.get(key) orelse TestManifestConfigDefaults.get(self.type, key); - return ConfigValueIterator(T){ - .inner = std.mem.split(u8, bytes, ","), - }; - } - - fn getConfigForKeyAlloc( - self: TestManifest, - allocator: Allocator, - key: []const u8, - comptime T: type, - ) ![]const T { - var out = std.ArrayList(T).init(allocator); - defer out.deinit(); - var it = self.getConfigForKey(key, T); - while (try it.next()) |item| { - try out.append(item); - } - return try out.toOwnedSlice(); - } - - fn getConfigForKeyAssertSingle(self: TestManifest, key: []const u8, comptime T: type) !T { - var it = self.getConfigForKey(key, T); - const res = (try it.next()) orelse unreachable; - assert((try it.next()) == null); - return res; - } - - fn trailing(self: TestManifest) TrailingIterator { - return .{ - .inner = std.mem.tokenize(u8, self.trailing_bytes, "\r\n"), - }; - } - - fn trailingAlloc(self: TestManifest, allocator: Allocator) error{OutOfMemory}![]const []const u8 { - var out = std.ArrayList([]const u8).init(allocator); - defer out.deinit(); - var it = self.trailing(); - while (it.next()) |line| { - try out.append(line); - } - return try out.toOwnedSlice(); - } - - fn ParseFn(comptime T: type) type { - return fn ([]const u8) anyerror!T; - } - - fn getDefaultParser(comptime T: type) ParseFn(T) { - if (T == CrossTarget) return struct { - fn parse(str: []const u8) anyerror!T { - var opts = CrossTarget.ParseOptions{ - .arch_os_abi = str, - }; - return try CrossTarget.parse(opts); - } - }.parse; - - switch (@typeInfo(T)) { - .Int => return struct { - fn parse(str: []const u8) anyerror!T { - return try std.fmt.parseInt(T, str, 0); - } - }.parse, - .Bool => return struct { - fn parse(str: []const u8) anyerror!T { - const as_int = try std.fmt.parseInt(u1, str, 0); - return as_int > 0; - } - }.parse, - .Enum => return struct { - fn parse(str: []const u8) anyerror!T { - return std.meta.stringToEnum(T, str) orelse { - std.log.err("unknown enum variant for {s}: {s}", .{ @typeName(T), str }); - return error.UnknownEnumVariant; - }; - } - }.parse, - .Struct => @compileError("no default parser for " ++ @typeName(T)), - else => @compileError("no default parser for " ++ @typeName(T)), - } - } -}; - -const TestStrategy = enum { - /// Execute tests as independent compilations, unless they are explicitly - /// incremental ("foo.0.zig", "foo.1.zig", etc.) - independent, - /// Execute all tests as incremental updates to a single compilation. Explicitly - /// incremental tests ("foo.0.zig", "foo.1.zig", etc.) still execute in order - incremental, -}; - -/// Iterates a set of filenames extracting batches that are either incremental -/// ("foo.0.zig", "foo.1.zig", etc.) or independent ("foo.zig", "bar.zig", etc.). -/// Assumes filenames are sorted. -const TestIterator = struct { - start: usize = 0, - end: usize = 0, - filenames: []const []const u8, - /// reset on each call to `next` - index: usize = 0, - - const Error = error{InvalidIncrementalTestIndex}; - - fn next(it: *TestIterator) Error!?[]const []const u8 { - try it.nextInner(); - if (it.start == it.end) return null; - return it.filenames[it.start..it.end]; - } - - fn nextInner(it: *TestIterator) Error!void { - it.start = it.end; - if (it.end == it.filenames.len) return; - if (it.end + 1 == it.filenames.len) { - it.end += 1; - return; - } - - const remaining = it.filenames[it.end..]; - it.index = 0; - while (it.index < remaining.len - 1) : (it.index += 1) { - // First, check if this file is part of an incremental update sequence - // Split filename into ".." - const prev_parts = getTestFileNameParts(remaining[it.index]); - const new_parts = getTestFileNameParts(remaining[it.index + 1]); - - // If base_name and file_ext match, these files are in the same test sequence - // and the new one should be the incremented version of the previous test - if (std.mem.eql(u8, prev_parts.base_name, new_parts.base_name) and - std.mem.eql(u8, prev_parts.file_ext, new_parts.file_ext)) - { - // This is "foo.X.zig" followed by "foo.Y.zig". Make sure that X = Y + 1 - if (prev_parts.test_index == null) - return error.InvalidIncrementalTestIndex; - if (new_parts.test_index == null) - return error.InvalidIncrementalTestIndex; - if (new_parts.test_index.? != prev_parts.test_index.? + 1) - return error.InvalidIncrementalTestIndex; - } else { - // This is not the same test sequence, so the new file must be the first file - // in a new sequence ("*.0.zig") or an independent test file ("*.zig") - if (new_parts.test_index != null and new_parts.test_index.? != 0) - return error.InvalidIncrementalTestIndex; - - it.end += it.index + 1; - break; - } - } else { - it.end += remaining.len; - } - } - - /// In the event of an `error.InvalidIncrementalTestIndex`, this function can - /// be used to find the current filename that was being processed. - /// Asserts the iterator hasn't reached the end. - fn currentFilename(it: TestIterator) []const u8 { - assert(it.end != it.filenames.len); - const remaining = it.filenames[it.end..]; - return remaining[it.index + 1]; - } -}; - -/// For a filename in the format ".X." or ".", returns -/// "", "" and X parsed as a decimal number. If X is not present, or -/// cannot be parsed as a decimal number, it is treated as part of -fn getTestFileNameParts(name: []const u8) struct { - base_name: []const u8, - file_ext: []const u8, - test_index: ?usize, -} { - const file_ext = std.fs.path.extension(name); - const trimmed = name[0 .. name.len - file_ext.len]; // Trim off "." - const maybe_index = std.fs.path.extension(trimmed); // Extract ".X" - - // Attempt to parse index - const index: ?usize = if (maybe_index.len > 0) - std.fmt.parseInt(usize, maybe_index[1..], 10) catch null - else - null; - - // Adjust "" extent based on parsing success - const base_name_end = trimmed.len - if (index != null) maybe_index.len else 0; - return .{ - .base_name = name[0..base_name_end], - .file_ext = if (file_ext.len > 0) file_ext[1..] else file_ext, - .test_index = index, - }; -} - -/// Sort test filenames in-place, so that incremental test cases ("foo.0.zig", -/// "foo.1.zig", etc.) are contiguous and appear in numerical order. -fn sortTestFilenames(filenames: [][]const u8) void { - const Context = struct { - pub fn lessThan(_: @This(), a: []const u8, b: []const u8) bool { - const a_parts = getTestFileNameParts(a); - const b_parts = getTestFileNameParts(b); - - // Sort ".X." based on "" and "" first - return switch (std.mem.order(u8, a_parts.base_name, b_parts.base_name)) { - .lt => true, - .gt => false, - .eq => switch (std.mem.order(u8, a_parts.file_ext, b_parts.file_ext)) { - .lt => true, - .gt => false, - .eq => { - // a and b differ only in their ".X" part - - // Sort "." before any ".X." - if (a_parts.test_index) |a_index| { - if (b_parts.test_index) |b_index| { - // Make sure that incremental tests appear in linear order - return a_index < b_index; - } else { - return false; - } - } else { - return b_parts.test_index != null; - } - }, - }, - }; - } - }; - std.sort.sort([]const u8, filenames, Context{}, Context.lessThan); -} - -pub const TestContext = struct { - gpa: Allocator, - arena: Allocator, - cases: std.ArrayList(Case), - - pub const Update = struct { - /// The input to the current update. We simulate an incremental update - /// with the file's contents changed to this value each update. - /// - /// This value can change entirely between updates, which would be akin - /// to deleting the source file and creating a new one from scratch; or - /// you can keep it mostly consistent, with small changes, testing the - /// effects of the incremental compilation. - src: [:0]const u8, - name: []const u8, - case: union(enum) { - /// Check the main binary output file against an expected set of bytes. - /// This is most useful with, for example, `-ofmt=c`. - CompareObjectFile: []const u8, - /// An error update attempts to compile bad code, and ensures that it - /// fails to compile, and for the expected reasons. - /// A slice containing the expected errors *in sequential order*. - Error: []const ErrorMsg, - /// An execution update compiles and runs the input, testing the - /// stdout against the expected results - /// This is a slice containing the expected message. - Execution: []const u8, - /// A header update compiles the input with the equivalent of - /// `-femit-h` and tests the produced header against the - /// expected result - Header: []const u8, - }, - }; - - pub const File = struct { - /// Contents of the importable file. Doesn't yet support incremental updates. - src: [:0]const u8, - path: []const u8, - }; - - pub const DepModule = struct { - name: []const u8, - path: []const u8, - }; - - pub const Backend = enum { - stage1, - stage2, - llvm, - }; - - /// A `Case` consists of a list of `Update`. The same `Compilation` is used for each - /// update, so each update's source is treated as a single file being - /// updated by the test harness and incrementally compiled. - pub const Case = struct { - /// The name of the test case. This is shown if a test fails, and - /// otherwise ignored. - name: []const u8, - /// The platform the test targets. For non-native platforms, an emulator - /// such as QEMU is required for tests to complete. - target: CrossTarget, - /// In order to be able to run e.g. Execution updates, this must be set - /// to Executable. - output_mode: std.builtin.OutputMode, - optimize_mode: std.builtin.Mode = .Debug, - updates: std.ArrayList(Update), - emit_h: bool = false, - is_test: bool = false, - expect_exact: bool = false, - backend: Backend = .stage2, - link_libc: bool = false, - - files: std.ArrayList(File), - deps: std.ArrayList(DepModule), - - result: anyerror!void = {}, - - pub fn addSourceFile(case: *Case, name: []const u8, src: [:0]const u8) void { - case.files.append(.{ .path = name, .src = src }) catch @panic("out of memory"); - } - - pub fn addDepModule(case: *Case, name: []const u8, path: []const u8) void { - case.deps.append(.{ - .name = name, - .path = path, - }) catch @panic("out of memory"); - } - - /// Adds a subcase in which the module is updated with `src`, and a C - /// header is generated. - pub fn addHeader(self: *Case, src: [:0]const u8, result: [:0]const u8) void { - self.emit_h = true; - self.updates.append(.{ - .src = src, - .name = "update", - .case = .{ .Header = result }, - }) catch @panic("out of memory"); - } - - /// Adds a subcase in which the module is updated with `src`, compiled, - /// run, and the output is tested against `result`. - pub fn addCompareOutput(self: *Case, src: [:0]const u8, result: []const u8) void { - self.updates.append(.{ - .src = src, - .name = "update", - .case = .{ .Execution = result }, - }) catch @panic("out of memory"); - } - - /// Adds a subcase in which the module is updated with `src`, compiled, - /// and the object file data is compared against `result`. - pub fn addCompareObjectFile(self: *Case, src: [:0]const u8, result: []const u8) void { - self.updates.append(.{ - .src = src, - .name = "update", - .case = .{ .CompareObjectFile = result }, - }) catch @panic("out of memory"); - } - - pub fn addError(self: *Case, src: [:0]const u8, errors: []const []const u8) void { - return self.addErrorNamed("update", src, errors); - } - - /// Adds a subcase in which the module is updated with `src`, which - /// should contain invalid input, and ensures that compilation fails - /// for the expected reasons, given in sequential order in `errors` in - /// the form `:line:column: error: message`. - pub fn addErrorNamed( - self: *Case, - name: []const u8, - src: [:0]const u8, - errors: []const []const u8, - ) void { - var array = self.updates.allocator.alloc(ErrorMsg, errors.len) catch @panic("out of memory"); - for (errors, 0..) |err_msg_line, i| { - if (std.mem.startsWith(u8, err_msg_line, "error: ")) { - array[i] = .{ - .plain = .{ - .msg = err_msg_line["error: ".len..], - .kind = .@"error", - .count = 1, - }, - }; - continue; - } else if (std.mem.startsWith(u8, err_msg_line, "note: ")) { - array[i] = .{ - .plain = .{ - .msg = err_msg_line["note: ".len..], - .kind = .note, - .count = 1, - }, - }; - continue; - } - // example: "file.zig:1:2: error: bad thing happened" - var it = std.mem.split(u8, err_msg_line, ":"); - const src_path = it.first(); - const line_text = it.next() orelse @panic("missing line"); - const col_text = it.next() orelse @panic("missing column"); - const kind_text = it.next() orelse @panic("missing 'error'/'note'"); - var msg = it.rest()[1..]; // skip over the space at end of "error: " - - const line: ?u32 = if (std.mem.eql(u8, line_text, "?")) - null - else - std.fmt.parseInt(u32, line_text, 10) catch @panic("bad line number"); - const column: ?u32 = if (std.mem.eql(u8, line_text, "?")) - null - else - std.fmt.parseInt(u32, col_text, 10) catch @panic("bad column number"); - const kind: ErrorMsg.Kind = if (std.mem.eql(u8, kind_text, " error")) - .@"error" - else if (std.mem.eql(u8, kind_text, " note")) - .note - else - @panic("expected 'error'/'note'"); - - const line_0based: u32 = if (line) |n| blk: { - if (n == 0) { - print("{s}: line must be specified starting at one\n", .{self.name}); - return; - } - break :blk n - 1; - } else std.math.maxInt(u32); - - const column_0based: u32 = if (column) |n| blk: { - if (n == 0) { - print("{s}: line must be specified starting at one\n", .{self.name}); - return; - } - break :blk n - 1; - } else std.math.maxInt(u32); - - const suffix = " times)"; - const count = if (std.mem.endsWith(u8, msg, suffix)) count: { - const lparen = std.mem.lastIndexOfScalar(u8, msg, '(').?; - const count = std.fmt.parseInt(u32, msg[lparen + 1 .. msg.len - suffix.len], 10) catch @panic("bad error note count number"); - msg = msg[0 .. lparen - 1]; - break :count count; - } else 1; - - array[i] = .{ - .src = .{ - .src_path = src_path, - .msg = msg, - .line = line_0based, - .column = column_0based, - .kind = kind, - .count = count, - }, - }; - } - self.updates.append(.{ - .src = src, - .name = name, - .case = .{ .Error = array }, - }) catch @panic("out of memory"); - } - - /// Adds a subcase in which the module is updated with `src`, and - /// asserts that it compiles without issue - pub fn compiles(self: *Case, src: [:0]const u8) void { - self.addError(src, &[_][]const u8{}); - } - }; - - pub fn addExe( - ctx: *TestContext, - name: []const u8, - target: CrossTarget, - ) *Case { - ctx.cases.append(Case{ - .name = name, - .target = target, - .updates = std.ArrayList(Update).init(ctx.cases.allocator), - .output_mode = .Exe, - .files = std.ArrayList(File).init(ctx.arena), - .deps = std.ArrayList(DepModule).init(ctx.arena), - }) catch @panic("out of memory"); - return &ctx.cases.items[ctx.cases.items.len - 1]; - } - - /// Adds a test case for Zig input, producing an executable - pub fn exe(ctx: *TestContext, name: []const u8, target: CrossTarget) *Case { - return ctx.addExe(name, target); - } - - pub fn exeFromCompiledC(ctx: *TestContext, name: []const u8, target: CrossTarget) *Case { - const prefixed_name = std.fmt.allocPrint(ctx.arena, "CBE: {s}", .{name}) catch - @panic("out of memory"); - var target_adjusted = target; - target_adjusted.ofmt = std.Target.ObjectFormat.c; - ctx.cases.append(Case{ - .name = prefixed_name, - .target = target_adjusted, - .updates = std.ArrayList(Update).init(ctx.cases.allocator), - .output_mode = .Exe, - .files = std.ArrayList(File).init(ctx.arena), - .deps = std.ArrayList(DepModule).init(ctx.arena), - .link_libc = true, - }) catch @panic("out of memory"); - return &ctx.cases.items[ctx.cases.items.len - 1]; - } - - /// Adds a test case that uses the LLVM backend to emit an executable. - /// Currently this implies linking libc, because only then we can generate a testable executable. - pub fn exeUsingLlvmBackend(ctx: *TestContext, name: []const u8, target: CrossTarget) *Case { - ctx.cases.append(Case{ - .name = name, - .target = target, - .updates = std.ArrayList(Update).init(ctx.cases.allocator), - .output_mode = .Exe, - .files = std.ArrayList(File).init(ctx.arena), - .deps = std.ArrayList(DepModule).init(ctx.arena), - .backend = .llvm, - .link_libc = true, - }) catch @panic("out of memory"); - return &ctx.cases.items[ctx.cases.items.len - 1]; - } - - pub fn addObj( - ctx: *TestContext, - name: []const u8, - target: CrossTarget, - ) *Case { - ctx.cases.append(Case{ - .name = name, - .target = target, - .updates = std.ArrayList(Update).init(ctx.cases.allocator), - .output_mode = .Obj, - .files = std.ArrayList(File).init(ctx.arena), - .deps = std.ArrayList(DepModule).init(ctx.arena), - }) catch @panic("out of memory"); - return &ctx.cases.items[ctx.cases.items.len - 1]; - } - - pub fn addTest( - ctx: *TestContext, - name: []const u8, - target: CrossTarget, - ) *Case { - ctx.cases.append(Case{ - .name = name, - .target = target, - .updates = std.ArrayList(Update).init(ctx.cases.allocator), - .output_mode = .Exe, - .is_test = true, - .files = std.ArrayList(File).init(ctx.arena), - .deps = std.ArrayList(DepModule).init(ctx.arena), - }) catch @panic("out of memory"); - return &ctx.cases.items[ctx.cases.items.len - 1]; - } - - /// Adds a test case for Zig input, producing an object file. - pub fn obj(ctx: *TestContext, name: []const u8, target: CrossTarget) *Case { - return ctx.addObj(name, target); - } - - /// Adds a test case for ZIR input, producing an object file. - pub fn objZIR(ctx: *TestContext, name: []const u8, target: CrossTarget) *Case { - return ctx.addObj(name, target, .ZIR); - } - - /// Adds a test case for Zig or ZIR input, producing C code. - pub fn addC(ctx: *TestContext, name: []const u8, target: CrossTarget) *Case { - var target_adjusted = target; - target_adjusted.ofmt = std.Target.ObjectFormat.c; - ctx.cases.append(Case{ - .name = name, - .target = target_adjusted, - .updates = std.ArrayList(Update).init(ctx.cases.allocator), - .output_mode = .Obj, - .files = std.ArrayList(File).init(ctx.arena), - .deps = std.ArrayList(DepModule).init(ctx.arena), - }) catch @panic("out of memory"); - return &ctx.cases.items[ctx.cases.items.len - 1]; - } - - pub fn c(ctx: *TestContext, name: []const u8, target: CrossTarget, src: [:0]const u8, comptime out: [:0]const u8) void { - ctx.addC(name, target).addCompareObjectFile(src, zig_h ++ out); - } - - pub fn h(ctx: *TestContext, name: []const u8, target: CrossTarget, src: [:0]const u8, comptime out: [:0]const u8) void { - ctx.addC(name, target).addHeader(src, zig_h ++ out); - } - - pub fn objErrStage1( - ctx: *TestContext, - name: []const u8, - src: [:0]const u8, - expected_errors: []const []const u8, - ) void { - const case = ctx.addObj(name, .{}); - case.backend = .stage1; - case.addError(src, expected_errors); - } - - pub fn testErrStage1( - ctx: *TestContext, - name: []const u8, - src: [:0]const u8, - expected_errors: []const []const u8, - ) void { - const case = ctx.addTest(name, .{}); - case.backend = .stage1; - case.addError(src, expected_errors); - } - - pub fn exeErrStage1( - ctx: *TestContext, - name: []const u8, - src: [:0]const u8, - expected_errors: []const []const u8, - ) void { - const case = ctx.addExe(name, .{}); - case.backend = .stage1; - case.addError(src, expected_errors); - } - - pub fn addCompareOutput( - ctx: *TestContext, - name: []const u8, - src: [:0]const u8, - expected_stdout: []const u8, - ) void { - ctx.addExe(name, .{}).addCompareOutput(src, expected_stdout); - } - - /// Adds a test case that compiles the Zig source given in `src`, executes - /// it, runs it, and tests the output against `expected_stdout` - pub fn compareOutput( - ctx: *TestContext, - name: []const u8, - src: [:0]const u8, - expected_stdout: []const u8, - ) void { - return ctx.addCompareOutput(name, src, expected_stdout); - } - - /// Adds a test case that compiles the ZIR source given in `src`, executes - /// it, runs it, and tests the output against `expected_stdout` - pub fn compareOutputZIR( - ctx: *TestContext, - name: []const u8, - src: [:0]const u8, - expected_stdout: []const u8, - ) void { - ctx.addCompareOutput(name, .ZIR, src, expected_stdout); - } - - pub fn addTransform( - ctx: *TestContext, - name: []const u8, - target: CrossTarget, - src: [:0]const u8, - result: [:0]const u8, - ) void { - ctx.addObj(name, target).addTransform(src, result); - } - - /// Adds a test case that compiles the Zig given in `src` to ZIR and tests - /// the ZIR against `result` - pub fn transform( - ctx: *TestContext, - name: []const u8, - target: CrossTarget, - src: [:0]const u8, - result: [:0]const u8, - ) void { - ctx.addTransform(name, target, src, result); - } - - pub fn addError( - ctx: *TestContext, - name: []const u8, - target: CrossTarget, - src: [:0]const u8, - expected_errors: []const []const u8, - ) void { - ctx.addObj(name, target).addError(src, expected_errors); - } - - /// Adds a test case that ensures that the Zig given in `src` fails to - /// compile for the expected reasons, given in sequential order in - /// `expected_errors` in the form `:line:column: error: message`. - pub fn compileError( - ctx: *TestContext, - name: []const u8, - target: CrossTarget, - src: [:0]const u8, - expected_errors: []const []const u8, - ) void { - ctx.addError(name, target, src, expected_errors); - } - - /// Adds a test case that ensures that the ZIR given in `src` fails to - /// compile for the expected reasons, given in sequential order in - /// `expected_errors` in the form `:line:column: error: message`. - pub fn compileErrorZIR( - ctx: *TestContext, - name: []const u8, - target: CrossTarget, - src: [:0]const u8, - expected_errors: []const []const u8, - ) void { - ctx.addError(name, target, .ZIR, src, expected_errors); - } - - pub fn addCompiles( - ctx: *TestContext, - name: []const u8, - target: CrossTarget, - src: [:0]const u8, - ) void { - ctx.addObj(name, target).compiles(src); - } - - /// Adds a test case that asserts that the Zig given in `src` compiles - /// without any errors. - pub fn compiles( - ctx: *TestContext, - name: []const u8, - target: CrossTarget, - src: [:0]const u8, - ) void { - ctx.addCompiles(name, target, src); - } - - /// Adds a test case that asserts that the ZIR given in `src` compiles - /// without any errors. - pub fn compilesZIR( - ctx: *TestContext, - name: []const u8, - target: CrossTarget, - src: [:0]const u8, - ) void { - ctx.addCompiles(name, target, .ZIR, src); - } - - /// Adds a test case that first ensures that the Zig given in `src` fails - /// to compile for the reasons given in sequential order in - /// `expected_errors` in the form `:line:column: error: message`, then - /// asserts that fixing the source (updating with `fixed_src`) isn't broken - /// by incremental compilation. - pub fn incrementalFailure( - ctx: *TestContext, - name: []const u8, - target: CrossTarget, - src: [:0]const u8, - expected_errors: []const []const u8, - fixed_src: [:0]const u8, - ) void { - var case = ctx.addObj(name, target); - case.addError(src, expected_errors); - case.compiles(fixed_src); - } - - /// Adds a test case that first ensures that the ZIR given in `src` fails - /// to compile for the reasons given in sequential order in - /// `expected_errors` in the form `:line:column: error: message`, then - /// asserts that fixing the source (updating with `fixed_src`) isn't broken - /// by incremental compilation. - pub fn incrementalFailureZIR( - ctx: *TestContext, - name: []const u8, - target: CrossTarget, - src: [:0]const u8, - expected_errors: []const []const u8, - fixed_src: [:0]const u8, - ) void { - var case = ctx.addObj(name, target, .ZIR); - case.addError(src, expected_errors); - case.compiles(fixed_src); - } - - /// Adds a test for each file in the provided directory. - /// Testing strategy (TestStrategy) is inferred automatically from filenames. - /// Recurses nested directories. - /// - /// Each file should include a test manifest as a contiguous block of comments at - /// the end of the file. The first line should be the test type, followed by a set of - /// key-value config values, followed by a blank line, then the expected output. - pub fn addTestCasesFromDir(ctx: *TestContext, dir: std.fs.IterableDir) void { - var current_file: []const u8 = "none"; - ctx.addTestCasesFromDirInner(dir, ¤t_file) catch |err| { - std.debug.panic("test harness failed to process file '{s}': {s}\n", .{ - current_file, @errorName(err), - }); - }; - } - - fn addTestCasesFromDirInner( - ctx: *TestContext, - iterable_dir: std.fs.IterableDir, - /// This is kept up to date with the currently being processed file so - /// that if any errors occur the caller knows it happened during this file. - current_file: *[]const u8, - ) !void { - var it = try iterable_dir.walk(ctx.arena); - var filenames = std.ArrayList([]const u8).init(ctx.arena); - - while (try it.next()) |entry| { - if (entry.kind != .File) continue; - - // Ignore stuff such as .swp files - switch (Compilation.classifyFileExt(entry.basename)) { - .unknown => continue, - else => {}, - } - try filenames.append(try ctx.arena.dupe(u8, entry.path)); - } - - // Sort filenames, so that incremental tests are contiguous and in-order - sortTestFilenames(filenames.items); - - var test_it = TestIterator{ .filenames = filenames.items }; - while (test_it.next()) |maybe_batch| { - const batch = maybe_batch orelse break; - const strategy: TestStrategy = if (batch.len > 1) .incremental else .independent; - var cases = std.ArrayList(usize).init(ctx.arena); - - for (batch) |filename| { - current_file.* = filename; - - const max_file_size = 10 * 1024 * 1024; - const src = try iterable_dir.dir.readFileAllocOptions(ctx.arena, filename, max_file_size, null, 1, 0); - - // Parse the manifest - var manifest = try TestManifest.parse(ctx.arena, src); - - if (cases.items.len == 0) { - const backends = try manifest.getConfigForKeyAlloc(ctx.arena, "backend", Backend); - const targets = try manifest.getConfigForKeyAlloc(ctx.arena, "target", CrossTarget); - const is_test = try manifest.getConfigForKeyAssertSingle("is_test", bool); - const output_mode = try manifest.getConfigForKeyAssertSingle("output_mode", std.builtin.OutputMode); - - const name_prefix = blk: { - const ext_index = std.mem.lastIndexOfScalar(u8, current_file.*, '.') orelse - return error.InvalidFilename; - const index = std.mem.lastIndexOfScalar(u8, current_file.*[0..ext_index], '.') orelse ext_index; - break :blk current_file.*[0..index]; - }; - - // Cross-product to get all possible test combinations - for (backends) |backend| { - for (targets) |target| { - const name = try std.fmt.allocPrint(ctx.arena, "{s} ({s}, {s})", .{ - name_prefix, - @tagName(backend), - try target.zigTriple(ctx.arena), - }); - const next = ctx.cases.items.len; - try ctx.cases.append(.{ - .name = name, - .target = target, - .backend = backend, - .updates = std.ArrayList(TestContext.Update).init(ctx.cases.allocator), - .is_test = is_test, - .output_mode = output_mode, - .link_libc = backend == .llvm, - .files = std.ArrayList(TestContext.File).init(ctx.cases.allocator), - .deps = std.ArrayList(DepModule).init(ctx.cases.allocator), - }); - try cases.append(next); - } - } - } - - for (cases.items) |case_index| { - const case = &ctx.cases.items[case_index]; - switch (manifest.type) { - .@"error" => { - const errors = try manifest.trailingAlloc(ctx.arena); - switch (strategy) { - .independent => { - case.addError(src, errors); - }, - .incremental => { - case.addErrorNamed("update", src, errors); - }, - } - }, - .run => { - var output = std.ArrayList(u8).init(ctx.arena); - var trailing_it = manifest.trailing(); - while (trailing_it.next()) |line| { - try output.appendSlice(line); - try output.append('\n'); - } - if (output.items.len > 0) { - try output.resize(output.items.len - 1); - } - case.addCompareOutput(src, try output.toOwnedSlice()); - }, - .cli => @panic("TODO cli tests"), - } - } - } - } else |err| { - // make sure the current file is set to the file that produced an error - current_file.* = test_it.currentFilename(); - return err; - } - } - - fn init(gpa: Allocator, arena: Allocator) TestContext { - return .{ - .gpa = gpa, - .cases = std.ArrayList(Case).init(gpa), - .arena = arena, - }; - } - - fn deinit(self: *TestContext) void { - for (self.cases.items) |case| { - for (case.updates.items) |u| { - if (u.case == .Error) { - case.updates.allocator.free(u.case.Error); - } - } - case.updates.deinit(); - } - self.cases.deinit(); - self.* = undefined; - } - - fn run(self: *TestContext) !void { - const host = try std.zig.system.NativeTargetInfo.detect(.{}); - const zig_exe_path = try std.process.getEnvVarOwned(self.arena, "ZIG_EXE"); - - var progress = std.Progress{}; - const root_node = progress.start("compiler", self.cases.items.len); - defer root_node.end(); - - var zig_lib_directory = try introspect.findZigLibDir(self.gpa); - defer zig_lib_directory.handle.close(); - defer self.gpa.free(zig_lib_directory.path.?); - - var aux_thread_pool: ThreadPool = undefined; - try aux_thread_pool.init(self.gpa); - defer aux_thread_pool.deinit(); - - // Use the same global cache dir for all the tests, such that we for example don't have to - // rebuild musl libc for every case (when LLVM backend is enabled). - var global_tmp = std.testing.tmpDir(.{}); - defer global_tmp.cleanup(); - - var cache_dir = try global_tmp.dir.makeOpenPath("zig-cache", .{}); - defer cache_dir.close(); - const tmp_dir_path = try std.fs.path.join(self.gpa, &[_][]const u8{ ".", "zig-cache", "tmp", &global_tmp.sub_path }); - defer self.gpa.free(tmp_dir_path); - - const global_cache_directory: Compilation.Directory = .{ - .handle = cache_dir, - .path = try std.fs.path.join(self.gpa, &[_][]const u8{ tmp_dir_path, "zig-cache" }), - }; - defer self.gpa.free(global_cache_directory.path.?); - - { - for (self.cases.items) |*case| { - if (build_options.skip_non_native) { - if (case.target.getCpuArch() != builtin.cpu.arch) - continue; - if (case.target.getObjectFormat() != builtin.object_format) - continue; - } - - // Skip tests that require LLVM backend when it is not available - if (!build_options.have_llvm and case.backend == .llvm) - continue; - - if (skip_stage1 and case.backend == .stage1) - continue; - - if (build_options.test_filter) |test_filter| { - if (std.mem.indexOf(u8, case.name, test_filter) == null) continue; - } - - var prg_node = root_node.start(case.name, case.updates.items.len); - prg_node.activate(); - defer prg_node.end(); - - case.result = runOneCase( - self.gpa, - &prg_node, - case.*, - zig_lib_directory, - zig_exe_path, - &aux_thread_pool, - global_cache_directory, - host, - ); - } - } - - var fail_count: usize = 0; - for (self.cases.items) |*case| { - case.result catch |err| { - fail_count += 1; - print("{s} failed: {s}\n", .{ case.name, @errorName(err) }); - }; - } - - if (fail_count != 0) { - print("{d} tests failed\n", .{fail_count}); - return error.TestFailed; - } - } - - fn runOneCase( - allocator: Allocator, - root_node: *std.Progress.Node, - case: Case, - zig_lib_directory: Compilation.Directory, - zig_exe_path: []const u8, - thread_pool: *ThreadPool, - global_cache_directory: Compilation.Directory, - host: std.zig.system.NativeTargetInfo, - ) !void { - const target_info = try std.zig.system.NativeTargetInfo.detect(case.target); - const target = target_info.target; - - var arena_allocator = std.heap.ArenaAllocator.init(allocator); - defer arena_allocator.deinit(); - const arena = arena_allocator.allocator(); - - var tmp = std.testing.tmpDir(.{}); - defer tmp.cleanup(); - - var cache_dir = try tmp.dir.makeOpenPath("zig-cache", .{}); - defer cache_dir.close(); - - const tmp_dir_path = try std.fs.path.join( - arena, - &[_][]const u8{ ".", "zig-cache", "tmp", &tmp.sub_path }, - ); - const tmp_dir_path_plus_slash = try std.fmt.allocPrint( - arena, - "{s}" ++ std.fs.path.sep_str, - .{tmp_dir_path}, - ); - const local_cache_path = try std.fs.path.join( - arena, - &[_][]const u8{ tmp_dir_path, "zig-cache" }, - ); - - for (case.files.items) |file| { - try tmp.dir.writeFile(file.path, file.src); - } - - if (case.backend == .stage1) { - // stage1 backend has limitations: - // * leaks memory - // * calls exit() when a compile error happens - // * cannot handle updates - // because of this we must spawn a child process rather than - // using Compilation directly. - - if (!std.process.can_spawn) { - print("Unable to spawn child processes on {s}, skipping test.\n", .{@tagName(builtin.os.tag)}); - return; // Pass test. - } - - assert(case.updates.items.len == 1); - const update = case.updates.items[0]; - try tmp.dir.writeFile(tmp_src_path, update.src); - - var zig_args = std.ArrayList([]const u8).init(arena); - try zig_args.append(zig_exe_path); - - if (case.is_test) { - try zig_args.append("test"); - } else if (update.case == .Execution) { - try zig_args.append("run"); - } else switch (case.output_mode) { - .Obj => try zig_args.append("build-obj"), - .Exe => try zig_args.append("build-exe"), - .Lib => try zig_args.append("build-lib"), - } - - try zig_args.append(try std.fs.path.join(arena, &.{ tmp_dir_path, tmp_src_path })); - - try zig_args.append("--name"); - try zig_args.append("test"); - - try zig_args.append("--cache-dir"); - try zig_args.append(local_cache_path); - - try zig_args.append("--global-cache-dir"); - try zig_args.append(global_cache_directory.path orelse "."); - - if (!case.target.isNative()) { - try zig_args.append("-target"); - try zig_args.append(try target.zigTriple(arena)); - } - - try zig_args.append("-O"); - try zig_args.append(@tagName(case.optimize_mode)); - - // Prevent sub-process progress bar from interfering with the - // one in this parent process. - try zig_args.append("--color"); - try zig_args.append("off"); - - const result = try std.ChildProcess.exec(.{ - .allocator = arena, - .argv = zig_args.items, - }); - switch (update.case) { - .Error => |case_error_list| { - switch (result.term) { - .Exited => |code| { - if (code == 0) { - dumpArgs(zig_args.items); - return error.CompilationIncorrectlySucceeded; - } - }, - else => { - std.debug.print("{s}", .{result.stderr}); - dumpArgs(zig_args.items); - return error.CompilationCrashed; - }, - } - var ok = true; - if (case.expect_exact) { - var err_iter = std.mem.split(u8, result.stderr, "\n"); - var i: usize = 0; - ok = while (err_iter.next()) |line| : (i += 1) { - if (i >= case_error_list.len) break false; - const expected = try std.mem.replaceOwned( - u8, - arena, - try std.fmt.allocPrint(arena, "{s}", .{case_error_list[i]}), - "${DIR}", - tmp_dir_path_plus_slash, - ); - - if (std.mem.indexOf(u8, line, expected) == null) break false; - continue; - } else true; - - ok = ok and i == case_error_list.len; - - if (!ok) { - print("\n======== Expected these compile errors: ========\n", .{}); - for (case_error_list) |msg| { - const expected = try std.fmt.allocPrint(arena, "{s}", .{msg}); - print("{s}\n", .{expected}); - } - } - } else { - for (case_error_list) |msg| { - const expected = try std.mem.replaceOwned( - u8, - arena, - try std.fmt.allocPrint(arena, "{s}", .{msg}), - "${DIR}", - tmp_dir_path_plus_slash, - ); - if (std.mem.indexOf(u8, result.stderr, expected) == null) { - print( - \\ - \\=========== Expected compile error: ============ - \\{s} - \\ - , .{expected}); - ok = false; - break; - } - } - } - - if (!ok) { - print( - \\================= Full output: ================= - \\{s} - \\================================================ - \\ - , .{result.stderr}); - return error.TestFailed; - } - }, - .CompareObjectFile => @panic("TODO implement in the test harness"), - .Execution => |expected_stdout| { - switch (result.term) { - .Exited => |code| { - if (code != 0) { - std.debug.print("{s}", .{result.stderr}); - dumpArgs(zig_args.items); - return error.CompilationFailed; - } - }, - else => { - std.debug.print("{s}", .{result.stderr}); - dumpArgs(zig_args.items); - return error.CompilationCrashed; - }, - } - try std.testing.expectEqualStrings("", result.stderr); - try std.testing.expectEqualStrings(expected_stdout, result.stdout); - }, - .Header => @panic("TODO implement in the test harness"), - } - return; - } - - const zig_cache_directory: Compilation.Directory = .{ - .handle = cache_dir, - .path = local_cache_path, - }; - - var main_pkg: Package = .{ - .root_src_directory = .{ .path = tmp_dir_path, .handle = tmp.dir }, - .root_src_path = tmp_src_path, - }; - defer { - var it = main_pkg.table.iterator(); - while (it.next()) |kv| { - allocator.free(kv.key_ptr.*); - kv.value_ptr.*.destroy(allocator); - } - main_pkg.table.deinit(allocator); - } - - for (case.deps.items) |dep| { - var pkg = try Package.create( - allocator, - tmp_dir_path, - dep.path, - ); - errdefer pkg.destroy(allocator); - try main_pkg.add(allocator, dep.name, pkg); - } - - const bin_name = try std.zig.binNameAlloc(arena, .{ - .root_name = "test_case", - .target = target, - .output_mode = case.output_mode, - }); - - const emit_directory: Compilation.Directory = .{ - .path = tmp_dir_path, - .handle = tmp.dir, - }; - const emit_bin: Compilation.EmitLoc = .{ - .directory = emit_directory, - .basename = bin_name, - }; - const emit_h: ?Compilation.EmitLoc = if (case.emit_h) .{ - .directory = emit_directory, - .basename = "test_case.h", - } else null; - const use_llvm: bool = switch (case.backend) { - .llvm => true, - else => false, - }; - const comp = try Compilation.create(allocator, .{ - .local_cache_directory = zig_cache_directory, - .global_cache_directory = global_cache_directory, - .zig_lib_directory = zig_lib_directory, - .thread_pool = thread_pool, - .root_name = "test_case", - .target = target, - // TODO: support tests for object file building, and library builds - // and linking. This will require a rework to support multi-file - // tests. - .output_mode = case.output_mode, - .is_test = case.is_test, - .optimize_mode = case.optimize_mode, - .emit_bin = emit_bin, - .emit_h = emit_h, - .main_pkg = &main_pkg, - .keep_source_files_loaded = true, - .is_native_os = case.target.isNativeOs(), - .is_native_abi = case.target.isNativeAbi(), - .dynamic_linker = target_info.dynamic_linker.get(), - .link_libc = case.link_libc, - .use_llvm = use_llvm, - .self_exe_path = zig_exe_path, - // TODO instead of turning off color, pass in a std.Progress.Node - .color = .off, - .reference_trace = 0, - // TODO: force self-hosted linkers with stage2 backend to avoid LLD creeping in - // until the auto-select mechanism deems them worthy - .use_lld = switch (case.backend) { - .stage2 => false, - else => null, - }, - }); - defer comp.destroy(); - - update: for (case.updates.items, 0..) |update, update_index| { - var update_node = root_node.start(update.name, 3); - update_node.activate(); - defer update_node.end(); - - var sync_node = update_node.start("write", 0); - sync_node.activate(); - try tmp.dir.writeFile(tmp_src_path, update.src); - sync_node.end(); - - var module_node = update_node.start("parse/analysis/codegen", 0); - module_node.activate(); - module_node.context.refresh(); - try comp.makeBinFileWritable(); - try comp.update(); - module_node.end(); - - if (update.case != .Error) { - var all_errors = try comp.getAllErrorsAlloc(); - defer all_errors.deinit(allocator); - if (all_errors.list.len != 0) { - print( - "\nCase '{s}': unexpected errors at update_index={d}:\n{s}\n", - .{ case.name, update_index, hr }, - ); - for (all_errors.list) |err_msg| { - switch (err_msg) { - .src => |src| { - print("{s}:{d}:{d}: error: {s}\n{s}\n", .{ - src.src_path, src.line + 1, src.column + 1, src.msg, hr, - }); - }, - .plain => |plain| { - print("error: {s}\n{s}\n", .{ plain.msg, hr }); - }, - } - } - // TODO print generated C code - return error.UnexpectedCompileErrors; - } - } - - switch (update.case) { - .Header => |expected_output| { - var file = try tmp.dir.openFile("test_case.h", .{ .mode = .read_only }); - defer file.close(); - const out = try file.reader().readAllAlloc(arena, 5 * 1024 * 1024); - - try std.testing.expectEqualStrings(expected_output, out); - }, - .CompareObjectFile => |expected_output| { - var file = try tmp.dir.openFile(bin_name, .{ .mode = .read_only }); - defer file.close(); - const out = try file.reader().readAllAlloc(arena, 5 * 1024 * 1024); - - try std.testing.expectEqualStrings(expected_output, out); - }, - .Error => |case_error_list| { - var test_node = update_node.start("assert", 0); - test_node.activate(); - defer test_node.end(); - - const handled_errors = try arena.alloc(bool, case_error_list.len); - std.mem.set(bool, handled_errors, false); - - var actual_errors = try comp.getAllErrorsAlloc(); - defer actual_errors.deinit(allocator); - - var any_failed = false; - var notes_to_check = std.ArrayList(*const Compilation.AllErrors.Message).init(allocator); - defer notes_to_check.deinit(); - - for (actual_errors.list) |actual_error| { - for (case_error_list, 0..) |case_msg, i| { - if (handled_errors[i]) continue; - - const ex_tag: std.meta.Tag(@TypeOf(case_msg)) = case_msg; - switch (actual_error) { - .src => |actual_msg| { - for (actual_msg.notes) |*note| { - try notes_to_check.append(note); - } - - if (ex_tag != .src) continue; - - const src_path_ok = case_msg.src.src_path.len == 0 or - std.mem.eql(u8, case_msg.src.src_path, actual_msg.src_path); - - const expected_msg = try std.mem.replaceOwned( - u8, - arena, - case_msg.src.msg, - "${DIR}", - tmp_dir_path_plus_slash, - ); - - var buf: [1024]u8 = undefined; - const rendered_msg = blk: { - var msg: Compilation.AllErrors.Message = actual_error; - msg.src.src_path = case_msg.src.src_path; - msg.src.notes = &.{}; - msg.src.source_line = null; - var fib = std.io.fixedBufferStream(&buf); - try msg.renderToWriter(.no_color, fib.writer(), "error", .Red, 0); - var it = std.mem.split(u8, fib.getWritten(), "error: "); - _ = it.first(); - const rendered = it.rest(); - break :blk rendered[0 .. rendered.len - 1]; // trim final newline - }; - - if (src_path_ok and - (case_msg.src.line == std.math.maxInt(u32) or - actual_msg.line == case_msg.src.line) and - (case_msg.src.column == std.math.maxInt(u32) or - actual_msg.column == case_msg.src.column) and - std.mem.eql(u8, expected_msg, rendered_msg) and - case_msg.src.kind == .@"error" and - actual_msg.count == case_msg.src.count) - { - handled_errors[i] = true; - break; - } - }, - .plain => |plain| { - if (ex_tag != .plain) continue; - - if (std.mem.eql(u8, case_msg.plain.msg, plain.msg) and - case_msg.plain.kind == .@"error" and - case_msg.plain.count == plain.count) - { - handled_errors[i] = true; - break; - } - }, - } - } else { - print( - "\nUnexpected error:\n{s}\n{}\n{s}", - .{ hr, ErrorMsg.init(actual_error, .@"error"), hr }, - ); - any_failed = true; - } - } - while (notes_to_check.popOrNull()) |note| { - for (case_error_list, 0..) |case_msg, i| { - const ex_tag: std.meta.Tag(@TypeOf(case_msg)) = case_msg; - switch (note.*) { - .src => |actual_msg| { - for (actual_msg.notes) |*sub_note| { - try notes_to_check.append(sub_note); - } - if (ex_tag != .src) continue; - - const expected_msg = try std.mem.replaceOwned( - u8, - arena, - case_msg.src.msg, - "${DIR}", - tmp_dir_path_plus_slash, - ); - - if ((case_msg.src.line == std.math.maxInt(u32) or - actual_msg.line == case_msg.src.line) and - (case_msg.src.column == std.math.maxInt(u32) or - actual_msg.column == case_msg.src.column) and - std.mem.eql(u8, expected_msg, actual_msg.msg) and - case_msg.src.kind == .note and - actual_msg.count == case_msg.src.count) - { - handled_errors[i] = true; - break; - } - }, - .plain => |plain| { - if (ex_tag != .plain) continue; - - if (std.mem.eql(u8, case_msg.plain.msg, plain.msg) and - case_msg.plain.kind == .note and - case_msg.plain.count == plain.count) - { - handled_errors[i] = true; - break; - } - }, - } - } else { - print( - "\nUnexpected note:\n{s}\n{}\n{s}", - .{ hr, ErrorMsg.init(note.*, .note), hr }, - ); - any_failed = true; - } - } - - for (handled_errors, 0..) |handled, i| { - if (!handled) { - print( - "\nExpected error not found:\n{s}\n{}\n{s}", - .{ hr, case_error_list[i], hr }, - ); - any_failed = true; - } - } - - if (any_failed) { - print("\nupdate_index={d}\n", .{update_index}); - return error.WrongCompileErrors; - } - }, - .Execution => |expected_stdout| { - if (!std.process.can_spawn) { - print("Unable to spawn child processes on {s}, skipping test.\n", .{@tagName(builtin.os.tag)}); - continue :update; // Pass test. - } - - update_node.setEstimatedTotalItems(4); - - var argv = std.ArrayList([]const u8).init(allocator); - defer argv.deinit(); - - var exec_result = x: { - var exec_node = update_node.start("execute", 0); - exec_node.activate(); - defer exec_node.end(); - - // We go out of our way here to use the unique temporary directory name in - // the exe_path so that it makes its way into the cache hash, avoiding - // cache collisions from multiple threads doing `zig run` at the same time - // on the same test_case.c input filename. - const ss = std.fs.path.sep_str; - const exe_path = try std.fmt.allocPrint( - arena, - ".." ++ ss ++ "{s}" ++ ss ++ "{s}", - .{ &tmp.sub_path, bin_name }, - ); - if (case.target.ofmt != null and case.target.ofmt.? == .c) { - if (host.getExternalExecutor(target_info, .{ .link_libc = true }) != .native) { - // We wouldn't be able to run the compiled C code. - continue :update; // Pass test. - } - try argv.appendSlice(&[_][]const u8{ - zig_exe_path, - "run", - "-cflags", - "-std=c99", - "-pedantic", - "-Werror", - "-Wno-incompatible-library-redeclaration", // https://github.com/ziglang/zig/issues/875 - "--", - "-lc", - exe_path, - }); - if (zig_lib_directory.path) |p| { - try argv.appendSlice(&.{ "-I", p }); - } - } else switch (host.getExternalExecutor(target_info, .{ .link_libc = case.link_libc })) { - .native => { - if (case.backend == .stage2 and case.target.getCpuArch() == .arm) { - // https://github.com/ziglang/zig/issues/13623 - continue :update; // Pass test. - } - try argv.append(exe_path); - }, - .bad_dl, .bad_os_or_cpu => continue :update, // Pass test. - - .rosetta => if (enable_rosetta) { - try argv.append(exe_path); - } else { - continue :update; // Rosetta not available, pass test. - }, - - .qemu => |qemu_bin_name| if (enable_qemu) { - const need_cross_glibc = target.isGnuLibC() and case.link_libc; - const glibc_dir_arg: ?[]const u8 = if (need_cross_glibc) - glibc_runtimes_dir orelse continue :update // glibc dir not available; pass test - else - null; - try argv.append(qemu_bin_name); - if (glibc_dir_arg) |dir| { - const linux_triple = try target.linuxTriple(arena); - const full_dir = try std.fs.path.join(arena, &[_][]const u8{ - dir, - linux_triple, - }); - - try argv.append("-L"); - try argv.append(full_dir); - } - try argv.append(exe_path); - } else { - continue :update; // QEMU not available; pass test. - }, - - .wine => |wine_bin_name| if (enable_wine) { - try argv.append(wine_bin_name); - try argv.append(exe_path); - } else { - continue :update; // Wine not available; pass test. - }, - - .wasmtime => |wasmtime_bin_name| if (enable_wasmtime) { - try argv.append(wasmtime_bin_name); - try argv.append("--dir=."); - try argv.append(exe_path); - } else { - continue :update; // wasmtime not available; pass test. - }, - - .darling => |darling_bin_name| if (enable_darling) { - try argv.append(darling_bin_name); - // Since we use relative to cwd here, we invoke darling with - // "shell" subcommand. - try argv.append("shell"); - try argv.append(exe_path); - } else { - continue :update; // Darling not available; pass test. - }, - } - - try comp.makeBinFileExecutable(); - - while (true) { - break :x std.ChildProcess.exec(.{ - .allocator = allocator, - .argv = argv.items, - .cwd_dir = tmp.dir, - .cwd = tmp_dir_path, - }) catch |err| switch (err) { - error.FileBusy => { - // There is a fundamental design flaw in Unix systems with how - // ETXTBSY interacts with fork+exec. - // https://github.com/golang/go/issues/22315 - // https://bugs.openjdk.org/browse/JDK-8068370 - // Unfortunately, this could be a real error, but we can't - // tell the difference here. - continue; - }, - else => { - print("\n{s}.{d} The following command failed with {s}:\n", .{ - case.name, update_index, @errorName(err), - }); - dumpArgs(argv.items); - return error.ChildProcessExecution; - }, - }; - } - }; - var test_node = update_node.start("test", 0); - test_node.activate(); - defer test_node.end(); - defer allocator.free(exec_result.stdout); - defer allocator.free(exec_result.stderr); - switch (exec_result.term) { - .Exited => |code| { - if (code != 0) { - print("\n{s}\n{s}: execution exited with code {d}:\n", .{ - exec_result.stderr, case.name, code, - }); - dumpArgs(argv.items); - return error.ChildProcessExecution; - } - }, - else => { - print("\n{s}\n{s}: execution crashed:\n", .{ - exec_result.stderr, case.name, - }); - dumpArgs(argv.items); - return error.ChildProcessExecution; - }, - } - try std.testing.expectEqualStrings(expected_stdout, exec_result.stdout); - // We allow stderr to have garbage in it because wasmtime prints a - // warning about --invoke even though we don't pass it. - //std.testing.expectEqualStrings("", exec_result.stderr); - }, - } - } - } -}; - -fn dumpArgs(argv: []const []const u8) void { - for (argv) |arg| { - print("{s} ", .{arg}); - } - print("\n", .{}); -} - -const tmp_src_path = "tmp.zig"; diff --git a/src/wasi_libc.zig b/src/wasi_libc.zig index fc8c81d5afe1..38a4f17190d0 100644 --- a/src/wasi_libc.zig +++ b/src/wasi_libc.zig @@ -59,7 +59,7 @@ pub fn execModelCrtFileFullName(wasi_exec_model: std.builtin.WasiExecModel) []co }; } -pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile) !void { +pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile, prog_node: *std.Progress.Node) !void { if (!build_options.have_llvm) { return error.ZigCompilerNotBuiltWithLLVMExtensions; } @@ -74,7 +74,7 @@ pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile) !void { var args = std.ArrayList([]const u8).init(arena); try addCCArgs(comp, arena, &args, false); try addLibcBottomHalfIncludes(comp, arena, &args); - return comp.build_crt_file("crt1-reactor", .Obj, &[1]Compilation.CSourceFile{ + return comp.build_crt_file("crt1-reactor", .Obj, .@"wasi crt1-reactor.o", prog_node, &.{ .{ .src_path = try comp.zig_lib_directory.join(arena, &[_][]const u8{ "libc", try sanitize(arena, crt1_reactor_src_file), @@ -87,7 +87,7 @@ pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile) !void { var args = std.ArrayList([]const u8).init(arena); try addCCArgs(comp, arena, &args, false); try addLibcBottomHalfIncludes(comp, arena, &args); - return comp.build_crt_file("crt1-command", .Obj, &[1]Compilation.CSourceFile{ + return comp.build_crt_file("crt1-command", .Obj, .@"wasi crt1-command.o", prog_node, &.{ .{ .src_path = try comp.zig_lib_directory.join(arena, &[_][]const u8{ "libc", try sanitize(arena, crt1_command_src_file), @@ -145,7 +145,7 @@ pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile) !void { } } - try comp.build_crt_file("c", .Lib, libc_sources.items); + try comp.build_crt_file("c", .Lib, .@"wasi libc.a", prog_node, libc_sources.items); }, .libwasi_emulated_process_clocks_a => { var args = std.ArrayList([]const u8).init(arena); @@ -161,7 +161,7 @@ pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile) !void { .extra_flags = args.items, }); } - try comp.build_crt_file("wasi-emulated-process-clocks", .Lib, emu_clocks_sources.items); + try comp.build_crt_file("wasi-emulated-process-clocks", .Lib, .@"libwasi-emulated-process-clocks.a", prog_node, emu_clocks_sources.items); }, .libwasi_emulated_getpid_a => { var args = std.ArrayList([]const u8).init(arena); @@ -177,7 +177,7 @@ pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile) !void { .extra_flags = args.items, }); } - try comp.build_crt_file("wasi-emulated-getpid", .Lib, emu_getpid_sources.items); + try comp.build_crt_file("wasi-emulated-getpid", .Lib, .@"libwasi-emulated-getpid.a", prog_node, emu_getpid_sources.items); }, .libwasi_emulated_mman_a => { var args = std.ArrayList([]const u8).init(arena); @@ -193,7 +193,7 @@ pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile) !void { .extra_flags = args.items, }); } - try comp.build_crt_file("wasi-emulated-mman", .Lib, emu_mman_sources.items); + try comp.build_crt_file("wasi-emulated-mman", .Lib, .@"libwasi-emulated-mman.a", prog_node, emu_mman_sources.items); }, .libwasi_emulated_signal_a => { var emu_signal_sources = std.ArrayList(Compilation.CSourceFile).init(arena); @@ -228,7 +228,7 @@ pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile) !void { } } - try comp.build_crt_file("wasi-emulated-signal", .Lib, emu_signal_sources.items); + try comp.build_crt_file("wasi-emulated-signal", .Lib, .@"libwasi-emulated-signal.a", prog_node, emu_signal_sources.items); }, } } diff --git a/test/behavior/array.zig b/test/behavior/array.zig index 6c68d50fda56..c78bf4ab8565 100644 --- a/test/behavior/array.zig +++ b/test/behavior/array.zig @@ -84,6 +84,7 @@ test "array concat with tuple" { } test "array init with concat" { + if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO const a = 'a'; var i: [4]u8 = [2]u8{ a, 'b' } ++ [2]u8{ 'c', 'd' }; try expect(std.mem.eql(u8, &i, "abcd")); diff --git a/test/behavior/ptrcast.zig b/test/behavior/ptrcast.zig index 599d13be1d98..845ea3751eb6 100644 --- a/test/behavior/ptrcast.zig +++ b/test/behavior/ptrcast.zig @@ -170,6 +170,7 @@ test "lower reinterpreted comptime field ptr" { test "reinterpret struct field at comptime" { if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO + if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO const numNative = comptime Bytes.init(0x12345678); if (native_endian != .Little) { @@ -232,6 +233,7 @@ test "ptrcast of const integer has the correct object size" { test "implicit optional pointer to optional anyopaque pointer" { if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO + if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO var buf: [4]u8 = "aoeu".*; var x: ?[*]u8 = &buf; diff --git a/test/behavior/slice.zig b/test/behavior/slice.zig index d749697ec570..2a0944a5b697 100644 --- a/test/behavior/slice.zig +++ b/test/behavior/slice.zig @@ -227,6 +227,7 @@ fn sliceFromLenToLen(a_slice: []u8, start: usize, end: usize) []u8 { test "C pointer" { if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO + if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO var buf: [*c]const u8 = "kjdhfkjdhfdkjhfkfjhdfkjdhfkdjhfdkjhf"; var len: u32 = 10; diff --git a/test/cases.zig b/test/cases.zig index 412b4cb5e280..ffe046c70ede 100644 --- a/test/cases.zig +++ b/test/cases.zig @@ -1,8 +1,8 @@ const std = @import("std"); -const TestContext = @import("../src/test.zig").TestContext; +const Cases = @import("src/Cases.zig"); -pub fn addCases(ctx: *TestContext) !void { - try @import("compile_errors.zig").addCases(ctx); - try @import("stage2/cbe.zig").addCases(ctx); - try @import("stage2/nvptx.zig").addCases(ctx); +pub fn addCases(cases: *Cases) !void { + try @import("compile_errors.zig").addCases(cases); + try @import("cbe.zig").addCases(cases); + try @import("nvptx.zig").addCases(cases); } diff --git a/test/cases/compile_errors/access_inactive_union_field_comptime.zig b/test/cases/compile_errors/access_inactive_union_field_comptime.zig index d990a85f9ee1..2098b19d1450 100644 --- a/test/cases/compile_errors/access_inactive_union_field_comptime.zig +++ b/test/cases/compile_errors/access_inactive_union_field_comptime.zig @@ -21,3 +21,4 @@ pub export fn entry1() void { // :9:15: error: access of union field 'a' while field 'b' is active // :2:21: note: union declared here // :14:16: error: access of union field 'a' while field 'b' is active +// :2:21: note: union declared here diff --git a/test/cases/compile_errors/bad_import.zig b/test/cases/compile_errors/bad_import.zig index 49e78a4be430..e624d7104c68 100644 --- a/test/cases/compile_errors/bad_import.zig +++ b/test/cases/compile_errors/bad_import.zig @@ -4,4 +4,4 @@ const bogus = @import("bogus-does-not-exist.zig",); // backend=stage2 // target=native // -// :1:23: error: unable to load '${DIR}bogus-does-not-exist.zig': FileNotFound +// bogus-does-not-exist.zig': FileNotFound diff --git a/test/cases/compile_errors/compileLog_of_tagged_enum_doesnt_crash_the_compiler.zig b/test/cases/compile_errors/compileLog_of_tagged_enum_doesnt_crash_the_compiler.zig index 9189eeb48d48..55676f923064 100644 --- a/test/cases/compile_errors/compileLog_of_tagged_enum_doesnt_crash_the_compiler.zig +++ b/test/cases/compile_errors/compileLog_of_tagged_enum_doesnt_crash_the_compiler.zig @@ -15,3 +15,7 @@ pub export fn entry() void { // target=native // // :6:5: error: found compile log statement +// +// Compile Log Output: +// @as(tmp.Bar, .{ .X = 123 }) +// @as(tmp.Bar, [runtime value]) diff --git a/test/cases/compile_errors/compile_log.zig b/test/cases/compile_errors/compile_log.zig index 772853b02327..e1ea460dc352 100644 --- a/test/cases/compile_errors/compile_log.zig +++ b/test/cases/compile_errors/compile_log.zig @@ -17,3 +17,12 @@ export fn baz() void { // // :5:5: error: found compile log statement // :11:5: note: also here +// +// Compile Log Output: +// @as(*const [5:0]u8, "begin") +// @as(*const [1:0]u8, "a"), @as(i32, 12), @as(*const [1:0]u8, "b"), @as([]const u8, "hi") +// @as(*const [3:0]u8, "end") +// @as(comptime_int, 4) +// @as(*const [5:0]u8, "begin") +// @as(*const [1:0]u8, "a"), @as(i32, [runtime value]), @as(*const [1:0]u8, "b"), @as([]const u8, [runtime value]) +// @as(*const [3:0]u8, "end") diff --git a/test/cases/compile_errors/compile_log_a_pointer_to_an_opaque_value.zig b/test/cases/compile_errors/compile_log_a_pointer_to_an_opaque_value.zig index 252b6e5f14f2..73de52fc97f1 100644 --- a/test/cases/compile_errors/compile_log_a_pointer_to_an_opaque_value.zig +++ b/test/cases/compile_errors/compile_log_a_pointer_to_an_opaque_value.zig @@ -1,5 +1,5 @@ export fn entry() void { - @compileLog(@ptrCast(*const anyopaque, &entry)); + @compileLog(@as(*align(1) const anyopaque, @ptrCast(*const anyopaque, &entry))); } // error @@ -7,3 +7,6 @@ export fn entry() void { // target=native // // :2:5: error: found compile log statement +// +// Compile Log Output: +// @as(*const anyopaque, (function 'entry')) diff --git a/test/cases/compile_errors/compile_log_statement_inside_function_which_must_be_comptime_evaluated.zig b/test/cases/compile_errors/compile_log_statement_inside_function_which_must_be_comptime_evaluated.zig index 0bc45eae0a75..8a39fdec462b 100644 --- a/test/cases/compile_errors/compile_log_statement_inside_function_which_must_be_comptime_evaluated.zig +++ b/test/cases/compile_errors/compile_log_statement_inside_function_which_must_be_comptime_evaluated.zig @@ -12,3 +12,6 @@ export fn entry() void { // target=native // // :2:5: error: found compile log statement +// +// Compile Log Output: +// @as(*const [3:0]u8, "i32\x00") diff --git a/test/cases/compile_errors/compile_log_statement_warning_deduplication_in_generic_fn.zig b/test/cases/compile_errors/compile_log_statement_warning_deduplication_in_generic_fn.zig index 76e1c80cf94e..4b31d9924a5c 100644 --- a/test/cases/compile_errors/compile_log_statement_warning_deduplication_in_generic_fn.zig +++ b/test/cases/compile_errors/compile_log_statement_warning_deduplication_in_generic_fn.zig @@ -13,3 +13,8 @@ fn inner(comptime n: usize) void { // // :7:39: error: found compile log statement // :7:39: note: also here +// +// Compile Log Output: +// @as(*const [4:0]u8, "!@#$") +// @as(*const [4:0]u8, "!@#$") +// @as(*const [4:0]u8, "!@#$") diff --git a/test/cases/compile_errors/condition_comptime_reason_explained.zig b/test/cases/compile_errors/condition_comptime_reason_explained.zig index 332ae8afc8c4..d0193986a8dd 100644 --- a/test/cases/compile_errors/condition_comptime_reason_explained.zig +++ b/test/cases/compile_errors/condition_comptime_reason_explained.zig @@ -45,4 +45,6 @@ pub export fn entry2() void { // :22:13: error: unable to resolve comptime value // :22:13: note: condition in comptime switch must be comptime-known // :21:17: note: expression is evaluated at comptime because the function returns a comptime-only type 'tmp.S' +// :2:12: note: struct requires comptime because of this field +// :2:12: note: use '*const fn() void' for a function pointer type // :32:19: note: called from here diff --git a/test/cases/compile_errors/directly_embedding_opaque_type_in_struct_and_union.zig b/test/cases/compile_errors/directly_embedding_opaque_type_in_struct_and_union.zig index 2a6432609310..ace90bccfc1a 100644 --- a/test/cases/compile_errors/directly_embedding_opaque_type_in_struct_and_union.zig +++ b/test/cases/compile_errors/directly_embedding_opaque_type_in_struct_and_union.zig @@ -32,6 +32,7 @@ export fn d() void { // :3:8: error: opaque types have unknown size and therefore cannot be directly embedded in structs // :1:11: note: opaque declared here // :7:10: error: opaque types have unknown size and therefore cannot be directly embedded in unions +// :1:11: note: opaque declared here // :19:18: error: opaque types have unknown size and therefore cannot be directly embedded in structs // :18:22: note: opaque declared here // :24:23: error: opaque types have unknown size and therefore cannot be directly embedded in structs diff --git a/test/cases/compile_errors/extern_function_with_comptime_parameter.zig b/test/cases/compile_errors/extern_function_with_comptime_parameter.zig index de69fa409fb5..58f15f7fab53 100644 --- a/test/cases/compile_errors/extern_function_with_comptime_parameter.zig +++ b/test/cases/compile_errors/extern_function_with_comptime_parameter.zig @@ -12,6 +12,6 @@ comptime { _ = entry2; } // backend=stage2 // target=native // -// :1:15: error: comptime parameters not allowed in function with calling convention 'C' // :5:30: error: comptime parameters not allowed in function with calling convention 'C' // :6:30: error: generic parameters not allowed in function with calling convention 'C' +// :1:15: error: comptime parameters not allowed in function with calling convention 'C' diff --git a/test/cases/compile_errors/function_parameter_is_opaque.zig b/test/cases/compile_errors/function_parameter_is_opaque.zig index 1f92274577d8..57c89bd7f47f 100644 --- a/test/cases/compile_errors/function_parameter_is_opaque.zig +++ b/test/cases/compile_errors/function_parameter_is_opaque.zig @@ -27,4 +27,5 @@ export fn entry4() void { // :1:17: note: opaque declared here // :8:28: error: parameter of type '@TypeOf(null)' not allowed // :12:8: error: parameter of opaque type 'tmp.FooType' not allowed +// :1:17: note: opaque declared here // :17:8: error: parameter of type '@TypeOf(null)' not allowed diff --git a/test/cases/compile_errors/helpful_return_type_error_message.zig b/test/cases/compile_errors/helpful_return_type_error_message.zig index 871e9485372c..83342c7ec382 100644 --- a/test/cases/compile_errors/helpful_return_type_error_message.zig +++ b/test/cases/compile_errors/helpful_return_type_error_message.zig @@ -24,9 +24,9 @@ export fn quux() u32 { // :8:5: error: expected type 'void', found '@typeInfo(@typeInfo(@TypeOf(tmp.bar)).Fn.return_type.?).ErrorUnion.error_set' // :7:17: note: function cannot return an error // :11:15: error: expected type 'u32', found '@typeInfo(@typeInfo(@TypeOf(tmp.bar)).Fn.return_type.?).ErrorUnion.error_set!u32' -// :10:17: note: function cannot return an error // :11:15: note: cannot convert error union to payload type // :11:15: note: consider using 'try', 'catch', or 'if' +// :10:17: note: function cannot return an error // :15:14: error: expected type 'u32', found '@typeInfo(@typeInfo(@TypeOf(tmp.bar)).Fn.return_type.?).ErrorUnion.error_set!u32' // :15:14: note: cannot convert error union to payload type // :15:14: note: consider using 'try', 'catch', or 'if' diff --git a/test/cases/compile_errors/implicit_semicolon-block_expr.zig b/test/cases/compile_errors/implicit_semicolon-block_expr.zig index 7dd82b897ba3..bab8ec29c02c 100644 --- a/test/cases/compile_errors/implicit_semicolon-block_expr.zig +++ b/test/cases/compile_errors/implicit_semicolon-block_expr.zig @@ -3,6 +3,8 @@ export fn entry() void { var good = {}; _ = {} var bad = {}; + _ = good; + _ = bad; } // error diff --git a/test/cases/compile_errors/implicit_semicolon-block_statement.zig b/test/cases/compile_errors/implicit_semicolon-block_statement.zig index 189ba84d980a..912ccbc79004 100644 --- a/test/cases/compile_errors/implicit_semicolon-block_statement.zig +++ b/test/cases/compile_errors/implicit_semicolon-block_statement.zig @@ -3,6 +3,8 @@ export fn entry() void { var good = {}; ({}) var bad = {}; + _ = good; + _ = bad; } // error diff --git a/test/cases/compile_errors/implicit_semicolon-comptime_expression.zig b/test/cases/compile_errors/implicit_semicolon-comptime_expression.zig index decbc352e85a..e8dc8bb534a2 100644 --- a/test/cases/compile_errors/implicit_semicolon-comptime_expression.zig +++ b/test/cases/compile_errors/implicit_semicolon-comptime_expression.zig @@ -3,6 +3,8 @@ export fn entry() void { var good = {}; _ = comptime {} var bad = {}; + _ = good; + _ = bad; } // error diff --git a/test/cases/compile_errors/implicit_semicolon-comptime_statement.zig b/test/cases/compile_errors/implicit_semicolon-comptime_statement.zig index d17db1592453..afc179866940 100644 --- a/test/cases/compile_errors/implicit_semicolon-comptime_statement.zig +++ b/test/cases/compile_errors/implicit_semicolon-comptime_statement.zig @@ -3,6 +3,8 @@ export fn entry() void { var good = {}; comptime ({}) var bad = {}; + _ = good; + _ = bad; } // error diff --git a/test/cases/compile_errors/implicit_semicolon-defer.zig b/test/cases/compile_errors/implicit_semicolon-defer.zig index 57fd3a262600..e91dbae7f817 100644 --- a/test/cases/compile_errors/implicit_semicolon-defer.zig +++ b/test/cases/compile_errors/implicit_semicolon-defer.zig @@ -3,6 +3,8 @@ export fn entry() void { var good = {}; defer ({}) var bad = {}; + _ = good; + _ = bad; } // error diff --git a/test/cases/compile_errors/implicit_semicolon-for_expression.zig b/test/cases/compile_errors/implicit_semicolon-for_expression.zig index c751384e11b2..1fbe4dd3adf0 100644 --- a/test/cases/compile_errors/implicit_semicolon-for_expression.zig +++ b/test/cases/compile_errors/implicit_semicolon-for_expression.zig @@ -3,7 +3,10 @@ export fn entry() void { var good = {}; _ = for(foo()) |_| {} var bad = {}; + _ = good; + _ = bad; } +fn foo() void {} // error // backend=stage2 diff --git a/test/cases/compile_errors/implicit_semicolon-for_statement.zig b/test/cases/compile_errors/implicit_semicolon-for_statement.zig index 14709cef4ccd..2830293b7044 100644 --- a/test/cases/compile_errors/implicit_semicolon-for_statement.zig +++ b/test/cases/compile_errors/implicit_semicolon-for_statement.zig @@ -3,7 +3,10 @@ export fn entry() void { var good = {}; for(foo()) |_| ({}) var bad = {}; + _ = good; + _ = bad; } +fn foo() void {} // error // backend=stage2 diff --git a/test/cases/compile_errors/implicit_semicolon-if-else-if-else_expression.zig b/test/cases/compile_errors/implicit_semicolon-if-else-if-else_expression.zig index 72a4fa7d3e01..9e99421cd19a 100644 --- a/test/cases/compile_errors/implicit_semicolon-if-else-if-else_expression.zig +++ b/test/cases/compile_errors/implicit_semicolon-if-else-if-else_expression.zig @@ -3,6 +3,8 @@ export fn entry() void { var good = {}; _ = if(true) {} else if(true) {} else {} var bad = {}; + _ = good; + _ = bad; } // error diff --git a/test/cases/compile_errors/implicit_semicolon-if-else-if-else_statement.zig b/test/cases/compile_errors/implicit_semicolon-if-else-if-else_statement.zig index 95135006bae7..e2e7b7e3b337 100644 --- a/test/cases/compile_errors/implicit_semicolon-if-else-if-else_statement.zig +++ b/test/cases/compile_errors/implicit_semicolon-if-else-if-else_statement.zig @@ -3,6 +3,8 @@ export fn entry() void { var good = {}; if(true) ({}) else if(true) ({}) else ({}) var bad = {}; + _ = good; + _ = bad; } // error diff --git a/test/cases/compile_errors/implicit_semicolon-if-else-if_expression.zig b/test/cases/compile_errors/implicit_semicolon-if-else-if_expression.zig index a29636bd1d22..33ca6ab60086 100644 --- a/test/cases/compile_errors/implicit_semicolon-if-else-if_expression.zig +++ b/test/cases/compile_errors/implicit_semicolon-if-else-if_expression.zig @@ -3,6 +3,8 @@ export fn entry() void { var good = {}; _ = if(true) {} else if(true) {} var bad = {}; + _ = good; + _ = bad; } // error diff --git a/test/cases/compile_errors/implicit_semicolon-if-else-if_statement.zig b/test/cases/compile_errors/implicit_semicolon-if-else-if_statement.zig index c62430a0a207..e3d004fee1d3 100644 --- a/test/cases/compile_errors/implicit_semicolon-if-else-if_statement.zig +++ b/test/cases/compile_errors/implicit_semicolon-if-else-if_statement.zig @@ -3,6 +3,8 @@ export fn entry() void { var good = {}; if(true) ({}) else if(true) ({}) var bad = {}; + _ = good; + _ = bad; } // error diff --git a/test/cases/compile_errors/implicit_semicolon-if-else_expression.zig b/test/cases/compile_errors/implicit_semicolon-if-else_expression.zig index d5bee6e52be6..a23809528be0 100644 --- a/test/cases/compile_errors/implicit_semicolon-if-else_expression.zig +++ b/test/cases/compile_errors/implicit_semicolon-if-else_expression.zig @@ -3,6 +3,8 @@ export fn entry() void { var good = {}; _ = if(true) {} else {} var bad = {}; + _ = good; + _ = bad; } // error diff --git a/test/cases/compile_errors/implicit_semicolon-if-else_statement.zig b/test/cases/compile_errors/implicit_semicolon-if-else_statement.zig index 94df12862600..ed01aa7df20e 100644 --- a/test/cases/compile_errors/implicit_semicolon-if-else_statement.zig +++ b/test/cases/compile_errors/implicit_semicolon-if-else_statement.zig @@ -3,6 +3,8 @@ export fn entry() void { var good = {}; if(true) ({}) else ({}) var bad = {}; + _ = good; + _ = bad; } // error diff --git a/test/cases/compile_errors/implicit_semicolon-if_expression.zig b/test/cases/compile_errors/implicit_semicolon-if_expression.zig index 339a5378cfdc..e28f8616e290 100644 --- a/test/cases/compile_errors/implicit_semicolon-if_expression.zig +++ b/test/cases/compile_errors/implicit_semicolon-if_expression.zig @@ -3,6 +3,8 @@ export fn entry() void { var good = {}; _ = if(true) {} var bad = {}; + _ = good; + _ = bad; } // error diff --git a/test/cases/compile_errors/implicit_semicolon-if_statement.zig b/test/cases/compile_errors/implicit_semicolon-if_statement.zig index b8ccb5e4016e..3067c0776785 100644 --- a/test/cases/compile_errors/implicit_semicolon-if_statement.zig +++ b/test/cases/compile_errors/implicit_semicolon-if_statement.zig @@ -3,6 +3,8 @@ export fn entry() void { var good = {}; if(true) ({}) var bad = {}; + _ = good; + _ = bad; } // error diff --git a/test/cases/compile_errors/implicit_semicolon-test_expression.zig b/test/cases/compile_errors/implicit_semicolon-test_expression.zig index 2a37c0aa0e0f..0bb345b38775 100644 --- a/test/cases/compile_errors/implicit_semicolon-test_expression.zig +++ b/test/cases/compile_errors/implicit_semicolon-test_expression.zig @@ -3,7 +3,10 @@ export fn entry() void { var good = {}; _ = if (foo()) |_| {} var bad = {}; + _ = good; + _ = bad; } +fn foo() void {} // error // backend=stage2 diff --git a/test/cases/compile_errors/implicit_semicolon-test_statement.zig b/test/cases/compile_errors/implicit_semicolon-test_statement.zig index afe00eba758e..3a4eb8b5ba17 100644 --- a/test/cases/compile_errors/implicit_semicolon-test_statement.zig +++ b/test/cases/compile_errors/implicit_semicolon-test_statement.zig @@ -3,7 +3,10 @@ export fn entry() void { var good = {}; if (foo()) |_| ({}) var bad = {}; + _ = good; + _ = bad; } +fn foo() void {} // error // backend=stage2 diff --git a/test/cases/compile_errors/implicit_semicolon-while-continue_expression.zig b/test/cases/compile_errors/implicit_semicolon-while-continue_expression.zig index 5587627597a0..f05c70bc14e2 100644 --- a/test/cases/compile_errors/implicit_semicolon-while-continue_expression.zig +++ b/test/cases/compile_errors/implicit_semicolon-while-continue_expression.zig @@ -3,6 +3,8 @@ export fn entry() void { var good = {}; _ = while(true):({}) {} var bad = {}; + _ = good; + _ = bad; } // error diff --git a/test/cases/compile_errors/implicit_semicolon-while-continue_statement.zig b/test/cases/compile_errors/implicit_semicolon-while-continue_statement.zig index 9bebe3861ea6..2d27824f6baa 100644 --- a/test/cases/compile_errors/implicit_semicolon-while-continue_statement.zig +++ b/test/cases/compile_errors/implicit_semicolon-while-continue_statement.zig @@ -3,6 +3,8 @@ export fn entry() void { var good = {}; while(true):({}) ({}) var bad = {}; + _ = good; + _ = bad; } // error diff --git a/test/cases/compile_errors/implicit_semicolon-while_expression.zig b/test/cases/compile_errors/implicit_semicolon-while_expression.zig index df388a7c39c4..4b39ed7c16af 100644 --- a/test/cases/compile_errors/implicit_semicolon-while_expression.zig +++ b/test/cases/compile_errors/implicit_semicolon-while_expression.zig @@ -3,6 +3,8 @@ export fn entry() void { var good = {}; _ = while(true) {} var bad = {}; + _ = good; + _ = bad; } // error diff --git a/test/cases/compile_errors/implicit_semicolon-while_statement.zig b/test/cases/compile_errors/implicit_semicolon-while_statement.zig index d9ed3d1e2a7e..538a56faf15f 100644 --- a/test/cases/compile_errors/implicit_semicolon-while_statement.zig +++ b/test/cases/compile_errors/implicit_semicolon-while_statement.zig @@ -3,6 +3,8 @@ export fn entry() void { var good = {}; while(true) 1 var bad = {}; + _ = good; + _ = bad; } // error diff --git a/test/cases/compile_errors/invalid_member_of_builtin_enum.zig b/test/cases/compile_errors/invalid_member_of_builtin_enum.zig index b0a176d792ba..f3ea66ae1c60 100644 --- a/test/cases/compile_errors/invalid_member_of_builtin_enum.zig +++ b/test/cases/compile_errors/invalid_member_of_builtin_enum.zig @@ -9,4 +9,4 @@ export fn entry() void { // target=native // // :3:38: error: enum 'builtin.OptimizeMode' has no member named 'x86' -// :?:18: note: enum declared here +// : note: enum declared here diff --git a/test/cases/compile_errors/invalid_store_to_comptime_field.zig b/test/cases/compile_errors/invalid_store_to_comptime_field.zig index 0f444ba78c50..fd6fff5e177e 100644 --- a/test/cases/compile_errors/invalid_store_to_comptime_field.zig +++ b/test/cases/compile_errors/invalid_store_to_comptime_field.zig @@ -73,11 +73,11 @@ pub export fn entry8() void { // // :6:19: error: value stored in comptime field does not match the default value of the field // :14:19: error: value stored in comptime field does not match the default value of the field -// :53:16: error: value stored in comptime field does not match the default value of the field // :19:38: error: value stored in comptime field does not match the default value of the field // :31:19: error: value stored in comptime field does not match the default value of the field // :25:29: note: default value set here // :41:16: error: value stored in comptime field does not match the default value of the field // :45:12: error: value stored in comptime field does not match the default value of the field +// :53:16: error: value stored in comptime field does not match the default value of the field // :66:43: error: value stored in comptime field does not match the default value of the field // :59:35: error: value stored in comptime field does not match the default value of the field diff --git a/test/cases/compile_errors/invalid_struct_field.zig b/test/cases/compile_errors/invalid_struct_field.zig index 4450375cb8dc..ff8c96a0b6ac 100644 --- a/test/cases/compile_errors/invalid_struct_field.zig +++ b/test/cases/compile_errors/invalid_struct_field.zig @@ -25,5 +25,6 @@ export fn e() void { // :4:7: error: no field named 'foo' in struct 'tmp.A' // :1:11: note: struct declared here // :10:17: error: no field named 'bar' in struct 'tmp.A' +// :1:11: note: struct declared here // :18:45: error: no field named 'f' in struct 'tmp.e.B' // :14:15: note: struct declared here diff --git a/test/cases/compile_errors/missing_main_fn_in_executable.zig b/test/cases/compile_errors/missing_main_fn_in_executable.zig index 2d608ad2b8be..3c1ae631ac40 100644 --- a/test/cases/compile_errors/missing_main_fn_in_executable.zig +++ b/test/cases/compile_errors/missing_main_fn_in_executable.zig @@ -5,5 +5,7 @@ // target=x86_64-linux // output_mode=Exe // -// :?:?: error: root struct of file 'tmp' has no member named 'main' -// :?:?: note: called from here +// : error: root struct of file 'tmp' has no member named 'main' +// : note: called from here +// : note: called from here +// : note: called from here diff --git a/test/cases/compile_errors/private_main_fn.zig b/test/cases/compile_errors/private_main_fn.zig index 26ad3d22db88..6e53fbdce207 100644 --- a/test/cases/compile_errors/private_main_fn.zig +++ b/test/cases/compile_errors/private_main_fn.zig @@ -5,6 +5,8 @@ fn main() void {} // target=x86_64-linux // output_mode=Exe // -// :?:?: error: 'main' is not marked 'pub' +// : error: 'main' is not marked 'pub' // :1:1: note: declared here -// :?:?: note: called from here +// : note: called from here +// : note: called from here +// : note: called from here diff --git a/test/cases/compile_errors/runtime_index_into_comptime_type_slice.zig b/test/cases/compile_errors/runtime_index_into_comptime_type_slice.zig index 4c235ed94bc3..9cd0fe1798a5 100644 --- a/test/cases/compile_errors/runtime_index_into_comptime_type_slice.zig +++ b/test/cases/compile_errors/runtime_index_into_comptime_type_slice.zig @@ -15,5 +15,6 @@ export fn entry() void { // target=native // // :9:51: error: values of type '[]const builtin.Type.StructField' must be comptime-known, but index value is runtime-known -// :?:21: note: struct requires comptime because of this field -// :?:21: note: types are not available at runtime +// : note: struct requires comptime because of this field +// : note: types are not available at runtime +// : struct requires comptime because of this field diff --git a/test/cases/compile_errors/struct_type_mismatch_in_arg.zig b/test/cases/compile_errors/struct_type_mismatch_in_arg.zig index a52bdfab6c59..d051966c523d 100644 --- a/test/cases/compile_errors/struct_type_mismatch_in_arg.zig +++ b/test/cases/compile_errors/struct_type_mismatch_in_arg.zig @@ -13,6 +13,6 @@ comptime { // target=native // // :7:16: error: expected type 'tmp.Foo', found 'tmp.Bar' -// :1:13: note: struct declared here // :2:13: note: struct declared here +// :1:13: note: struct declared here // :4:18: note: parameter type declared here diff --git a/test/cases/compile_errors/undefined_as_field_type_is_rejected.zig b/test/cases/compile_errors/undefined_as_field_type_is_rejected.zig index e78cadc87818..b6eb059661c1 100644 --- a/test/cases/compile_errors/undefined_as_field_type_is_rejected.zig +++ b/test/cases/compile_errors/undefined_as_field_type_is_rejected.zig @@ -1,9 +1,13 @@ -export fn a() void { - b(); +const Foo = struct { + a: undefined, +}; +export fn entry1() void { + const foo: Foo = undefined; + _ = foo; } // error -// backend=stage2 +// backend=stage1 // target=native // -// :2:5: error: use of undeclared identifier 'b' +// tmp.zig:2:8: error: use of undefined value here causes undefined behavior diff --git a/test/cases/compile_errors/union_init_with_none_or_multiple_fields.zig b/test/cases/compile_errors/union_init_with_none_or_multiple_fields.zig index 5f486bf2b7f3..a700f0d0f29a 100644 --- a/test/cases/compile_errors/union_init_with_none_or_multiple_fields.zig +++ b/test/cases/compile_errors/union_init_with_none_or_multiple_fields.zig @@ -28,10 +28,11 @@ export fn u2m() void { // target=native // // :9:1: error: union initializer must initialize one field +// :1:12: note: union declared here // :14:20: error: cannot initialize multiple union fields at once, unions can only have one active field // :14:31: note: additional initializer here +// :1:12: note: union declared here // :18:21: error: union initializer must initialize one field // :22:20: error: cannot initialize multiple union fields at once, unions can only have one active field // :22:31: note: additional initializer here -// :1:12: note: union declared here // :5:12: note: union declared here diff --git a/test/cases/compile_log.0.zig b/test/cases/compile_log.0.zig index 161a6f37ca51..27dfbc706d42 100644 --- a/test/cases/compile_log.0.zig +++ b/test/cases/compile_log.0.zig @@ -15,3 +15,8 @@ fn x() void {} // error // // :6:23: error: expected type 'usize', found 'bool' +// +// Compile Log Output: +// @as(bool, true), @as(comptime_int, 20), @as(u32, [runtime value]), @as(fn() void, (function 'x')) +// @as(comptime_int, 1000) +// @as(comptime_int, 1234) diff --git a/test/cases/compile_log.1.zig b/test/cases/compile_log.1.zig index 12e664154208..286722653cac 100644 --- a/test/cases/compile_log.1.zig +++ b/test/cases/compile_log.1.zig @@ -14,3 +14,7 @@ fn x() void {} // // :9:5: error: found compile log statement // :4:5: note: also here +// +// Compile Log Output: +// @as(bool, true), @as(comptime_int, 20), @as(u32, [runtime value]), @as(fn() void, (function 'x')) +// @as(comptime_int, 1000) diff --git a/test/cases/f32_passed_to_variadic_fn.zig b/test/cases/f32_passed_to_variadic_fn.zig index c029b4b69f20..4ae1d2cf0842 100644 --- a/test/cases/f32_passed_to_variadic_fn.zig +++ b/test/cases/f32_passed_to_variadic_fn.zig @@ -9,7 +9,8 @@ pub fn main() void { // run // backend=llvm // target=x86_64-linux-gnu +// link_libc=1 // // f64: 2.000000 // f32: 10.000000 -// \ No newline at end of file +// diff --git a/test/cases/fn_typeinfo_passed_to_comptime_fn.zig b/test/cases/fn_typeinfo_passed_to_comptime_fn.zig index 31673e5b8104..fb64788126da 100644 --- a/test/cases/fn_typeinfo_passed_to_comptime_fn.zig +++ b/test/cases/fn_typeinfo_passed_to_comptime_fn.zig @@ -14,4 +14,5 @@ fn foo(comptime info: std.builtin.Type) !void { // run // is_test=1 +// backend=llvm // diff --git a/test/cases/llvm/address_space_pointer_access_chaining_pointer_to_optional_array.zig b/test/cases/llvm/address_space_pointer_access_chaining_pointer_to_optional_array.zig index cf43513159d1..00d4a7ecc944 100644 --- a/test/cases/llvm/address_space_pointer_access_chaining_pointer_to_optional_array.zig +++ b/test/cases/llvm/address_space_pointer_access_chaining_pointer_to_optional_array.zig @@ -5,7 +5,7 @@ pub fn main() void { _ = entry; } -// error +// compile // output_mode=Exe // backend=llvm // target=x86_64-linux,x86_64-macos diff --git a/test/cases/llvm/address_spaces_pointer_access_chaining_array_pointer.zig b/test/cases/llvm/address_spaces_pointer_access_chaining_array_pointer.zig index 5907c1dad5e9..f23498e955f3 100644 --- a/test/cases/llvm/address_spaces_pointer_access_chaining_array_pointer.zig +++ b/test/cases/llvm/address_spaces_pointer_access_chaining_array_pointer.zig @@ -5,7 +5,7 @@ pub fn main() void { _ = entry; } -// error +// compile // output_mode=Exe // backend=stage2,llvm // target=x86_64-linux,x86_64-macos diff --git a/test/cases/llvm/address_spaces_pointer_access_chaining_complex.zig b/test/cases/llvm/address_spaces_pointer_access_chaining_complex.zig index ece0614f7304..4f54f38e6b67 100644 --- a/test/cases/llvm/address_spaces_pointer_access_chaining_complex.zig +++ b/test/cases/llvm/address_spaces_pointer_access_chaining_complex.zig @@ -6,7 +6,7 @@ pub fn main() void { _ = entry; } -// error +// compile // output_mode=Exe // backend=llvm // target=x86_64-linux,x86_64-macos diff --git a/test/cases/llvm/address_spaces_pointer_access_chaining_struct_pointer.zig b/test/cases/llvm/address_spaces_pointer_access_chaining_struct_pointer.zig index 9175bcbc0ea9..84695cb35bca 100644 --- a/test/cases/llvm/address_spaces_pointer_access_chaining_struct_pointer.zig +++ b/test/cases/llvm/address_spaces_pointer_access_chaining_struct_pointer.zig @@ -6,7 +6,7 @@ pub fn main() void { _ = entry; } -// error +// compile // output_mode=Exe // backend=stage2,llvm // target=x86_64-linux,x86_64-macos diff --git a/test/cases/llvm/dereferencing_though_multiple_pointers_with_address_spaces.zig b/test/cases/llvm/dereferencing_though_multiple_pointers_with_address_spaces.zig index 8f3670075717..badab821d342 100644 --- a/test/cases/llvm/dereferencing_though_multiple_pointers_with_address_spaces.zig +++ b/test/cases/llvm/dereferencing_though_multiple_pointers_with_address_spaces.zig @@ -5,7 +5,7 @@ pub fn main() void { _ = entry; } -// error +// compile // output_mode=Exe // backend=stage2,llvm // target=x86_64-linux,x86_64-macos diff --git a/test/cases/llvm/hello_world.zig b/test/cases/llvm/hello_world.zig index 4243191b0fb7..0f75f624ecf4 100644 --- a/test/cases/llvm/hello_world.zig +++ b/test/cases/llvm/hello_world.zig @@ -7,6 +7,7 @@ pub fn main() void { // run // backend=llvm // target=x86_64-linux,x86_64-macos +// link_libc=1 // // hello world! // diff --git a/test/cases/llvm/pointer_keeps_address_space.zig b/test/cases/llvm/pointer_keeps_address_space.zig index bfd40566f840..f894c96d7b5f 100644 --- a/test/cases/llvm/pointer_keeps_address_space.zig +++ b/test/cases/llvm/pointer_keeps_address_space.zig @@ -5,7 +5,7 @@ pub fn main() void { _ = entry; } -// error +// compile // output_mode=Exe // backend=stage2,llvm // target=x86_64-linux,x86_64-macos diff --git a/test/cases/llvm/pointer_keeps_address_space_when_taking_address_of_dereference.zig b/test/cases/llvm/pointer_keeps_address_space_when_taking_address_of_dereference.zig index 8114e86c5da0..b5803a30764a 100644 --- a/test/cases/llvm/pointer_keeps_address_space_when_taking_address_of_dereference.zig +++ b/test/cases/llvm/pointer_keeps_address_space_when_taking_address_of_dereference.zig @@ -5,7 +5,7 @@ pub fn main() void { _ = entry; } -// error +// compile // output_mode=Exe // backend=stage2,llvm // target=x86_64-linux,x86_64-macos diff --git a/test/cases/llvm/pointer_to_explicit_generic_address_space_coerces_to_implicit_pointer.zig b/test/cases/llvm/pointer_to_explicit_generic_address_space_coerces_to_implicit_pointer.zig index 78bc3e4bd6f4..b3c011698338 100644 --- a/test/cases/llvm/pointer_to_explicit_generic_address_space_coerces_to_implicit_pointer.zig +++ b/test/cases/llvm/pointer_to_explicit_generic_address_space_coerces_to_implicit_pointer.zig @@ -5,7 +5,7 @@ pub fn main() void { _ = entry; } -// error +// compile // output_mode=Exe // backend=stage2,llvm // target=x86_64-linux,x86_64-macos diff --git a/test/stage2/cbe.zig b/test/cbe.zig similarity index 92% rename from test/stage2/cbe.zig rename to test/cbe.zig index e9750853a6b5..25ac3cb1372d 100644 --- a/test/stage2/cbe.zig +++ b/test/cbe.zig @@ -1,5 +1,5 @@ const std = @import("std"); -const TestContext = @import("../../src/test.zig").TestContext; +const Cases = @import("src/Cases.zig"); // These tests should work with all platforms, but we're using linux_x64 for // now for consistency. Will be expanded eventually. @@ -8,7 +8,7 @@ const linux_x64 = std.zig.CrossTarget{ .os_tag = .linux, }; -pub fn addCases(ctx: *TestContext) !void { +pub fn addCases(ctx: *Cases) !void { { var case = ctx.exeFromCompiledC("hello world with updates", .{}); @@ -71,7 +71,7 @@ pub fn addCases(ctx: *TestContext) !void { } { - var case = ctx.exeFromCompiledC("@intToError", .{}); + var case = ctx.exeFromCompiledC("intToError", .{}); case.addCompareOutput( \\pub export fn main() c_int { @@ -837,7 +837,7 @@ pub fn addCases(ctx: *TestContext) !void { } { - var case = ctx.exeFromCompiledC("shift right + left", .{}); + var case = ctx.exeFromCompiledC("shift right and left", .{}); case.addCompareOutput( \\pub export fn main() c_int { \\ var i: u32 = 16; @@ -883,7 +883,7 @@ pub fn addCases(ctx: *TestContext) !void { { // TODO: add u64 tests, ran into issues with the literal generated for std.math.maxInt(u64) - var case = ctx.exeFromCompiledC("add/sub wrapping operations", .{}); + var case = ctx.exeFromCompiledC("add and sub wrapping operations", .{}); case.addCompareOutput( \\pub export fn main() c_int { \\ // Addition @@ -932,7 +932,7 @@ pub fn addCases(ctx: *TestContext) !void { } { - var case = ctx.exeFromCompiledC("@rem", linux_x64); + var case = ctx.exeFromCompiledC("rem", linux_x64); case.addCompareOutput( \\fn assert(ok: bool) void { \\ if (!ok) unreachable; @@ -947,69 +947,4 @@ pub fn addCases(ctx: *TestContext) !void { \\} , ""); } - - ctx.h("simple header", linux_x64, - \\export fn start() void{} - , - \\zig_extern void start(void); - \\ - ); - ctx.h("header with single param function", linux_x64, - \\export fn start(a: u8) void{ - \\ _ = a; - \\} - , - \\zig_extern void start(uint8_t const a0); - \\ - ); - ctx.h("header with multiple param function", linux_x64, - \\export fn start(a: u8, b: u8, c: u8) void{ - \\ _ = a; _ = b; _ = c; - \\} - , - \\zig_extern void start(uint8_t const a0, uint8_t const a1, uint8_t const a2); - \\ - ); - ctx.h("header with u32 param function", linux_x64, - \\export fn start(a: u32) void{ _ = a; } - , - \\zig_extern void start(uint32_t const a0); - \\ - ); - ctx.h("header with usize param function", linux_x64, - \\export fn start(a: usize) void{ _ = a; } - , - \\zig_extern void start(uintptr_t const a0); - \\ - ); - ctx.h("header with bool param function", linux_x64, - \\export fn start(a: bool) void{_ = a;} - , - \\zig_extern void start(bool const a0); - \\ - ); - ctx.h("header with noreturn function", linux_x64, - \\export fn start() noreturn { - \\ unreachable; - \\} - , - \\zig_extern zig_noreturn void start(void); - \\ - ); - ctx.h("header with multiple functions", linux_x64, - \\export fn a() void{} - \\export fn b() void{} - \\export fn c() void{} - , - \\zig_extern void a(void); - \\zig_extern void b(void); - \\zig_extern void c(void); - \\ - ); - ctx.h("header with multiple includes", linux_x64, - \\export fn start(a: u32, b: usize) void{ _ = a; _ = b; } - , - \\zig_extern void start(uint32_t const a0, uintptr_t const a1); - \\ - ); } diff --git a/test/cli.zig b/test/cli.zig deleted file mode 100644 index 57f26f73d78b..000000000000 --- a/test/cli.zig +++ /dev/null @@ -1,195 +0,0 @@ -const std = @import("std"); -const builtin = @import("builtin"); -const testing = std.testing; -const process = std.process; -const fs = std.fs; -const ChildProcess = std.ChildProcess; - -var a: std.mem.Allocator = undefined; - -pub fn main() !void { - var gpa = std.heap.GeneralPurposeAllocator(.{}){}; - defer _ = gpa.deinit(); - var arena = std.heap.ArenaAllocator.init(gpa.allocator()); - defer arena.deinit(); - - a = arena.allocator(); - var arg_it = try process.argsWithAllocator(a); - - // skip my own exe name - _ = arg_it.skip(); - - const zig_exe_rel = arg_it.next() orelse { - std.debug.print("Expected first argument to be path to zig compiler\n", .{}); - return error.InvalidArgs; - }; - const cache_root = arg_it.next() orelse { - std.debug.print("Expected second argument to be cache root directory path\n", .{}); - return error.InvalidArgs; - }; - const zig_exe = try fs.path.resolve(a, &[_][]const u8{zig_exe_rel}); - - const dir_path = try fs.path.join(a, &[_][]const u8{ cache_root, "clitest" }); - defer fs.cwd().deleteTree(dir_path) catch {}; - - const TestFn = fn ([]const u8, []const u8) anyerror!void; - const Test = struct { - func: TestFn, - name: []const u8, - }; - const tests = [_]Test{ - .{ .func = testZigInitLib, .name = "zig init-lib" }, - .{ .func = testZigInitExe, .name = "zig init-exe" }, - .{ .func = testGodboltApi, .name = "godbolt API" }, - .{ .func = testMissingOutputPath, .name = "missing output path" }, - .{ .func = testZigFmt, .name = "zig fmt" }, - }; - inline for (tests) |t| { - try fs.cwd().deleteTree(dir_path); - try fs.cwd().makeDir(dir_path); - t.func(zig_exe, dir_path) catch |err| { - std.debug.print("test '{s}' failed: {s}\n", .{ - t.name, @errorName(err), - }); - return err; - }; - } -} - -fn printCmd(cwd: []const u8, argv: []const []const u8) void { - std.debug.print("cd {s} && ", .{cwd}); - for (argv) |arg| { - std.debug.print("{s} ", .{arg}); - } - std.debug.print("\n", .{}); -} - -fn exec(cwd: []const u8, expect_0: bool, argv: []const []const u8) !ChildProcess.ExecResult { - const max_output_size = 100 * 1024; - const result = ChildProcess.exec(.{ - .allocator = a, - .argv = argv, - .cwd = cwd, - .max_output_bytes = max_output_size, - }) catch |err| { - std.debug.print("The following command failed:\n", .{}); - printCmd(cwd, argv); - return err; - }; - switch (result.term) { - .Exited => |code| { - if ((code != 0) == expect_0) { - std.debug.print("The following command exited with error code {}:\n", .{code}); - printCmd(cwd, argv); - std.debug.print("stderr:\n{s}\n", .{result.stderr}); - return error.CommandFailed; - } - }, - else => { - std.debug.print("The following command terminated unexpectedly:\n", .{}); - printCmd(cwd, argv); - std.debug.print("stderr:\n{s}\n", .{result.stderr}); - return error.CommandFailed; - }, - } - return result; -} - -fn testZigInitLib(zig_exe: []const u8, dir_path: []const u8) !void { - _ = try exec(dir_path, true, &[_][]const u8{ zig_exe, "init-lib" }); - const test_result = try exec(dir_path, true, &[_][]const u8{ zig_exe, "build", "test" }); - try testing.expectStringEndsWith(test_result.stderr, "All 1 tests passed.\n"); -} - -fn testZigInitExe(zig_exe: []const u8, dir_path: []const u8) !void { - _ = try exec(dir_path, true, &[_][]const u8{ zig_exe, "init-exe" }); - const run_result = try exec(dir_path, true, &[_][]const u8{ zig_exe, "build", "run" }); - try testing.expectEqualStrings("All your codebase are belong to us.\n", run_result.stderr); - try testing.expectEqualStrings("Run `zig build test` to run the tests.\n", run_result.stdout); -} - -fn testGodboltApi(zig_exe: []const u8, dir_path: []const u8) anyerror!void { - if (builtin.os.tag != .linux or builtin.cpu.arch != .x86_64) return; - - const example_zig_path = try fs.path.join(a, &[_][]const u8{ dir_path, "example.zig" }); - const example_s_path = try fs.path.join(a, &[_][]const u8{ dir_path, "example.s" }); - - try fs.cwd().writeFile(example_zig_path, - \\// Type your code here, or load an example. - \\export fn square(num: i32) i32 { - \\ return num * num; - \\} - \\extern fn zig_panic() noreturn; - \\pub fn panic(msg: []const u8, error_return_trace: ?*@import("std").builtin.StackTrace, _: ?usize) noreturn { - \\ _ = msg; - \\ _ = error_return_trace; - \\ zig_panic(); - \\} - ); - - var args = std.ArrayList([]const u8).init(a); - try args.appendSlice(&[_][]const u8{ - zig_exe, "build-obj", - "--cache-dir", dir_path, - "--name", "example", - "-fno-emit-bin", "-fno-emit-h", - "-fstrip", "-OReleaseFast", - example_zig_path, - }); - - const emit_asm_arg = try std.fmt.allocPrint(a, "-femit-asm={s}", .{example_s_path}); - try args.append(emit_asm_arg); - - _ = try exec(dir_path, true, args.items); - - const out_asm = try std.fs.cwd().readFileAlloc(a, example_s_path, std.math.maxInt(usize)); - try testing.expect(std.mem.indexOf(u8, out_asm, "square:") != null); - try testing.expect(std.mem.indexOf(u8, out_asm, "mov\teax, edi") != null); - try testing.expect(std.mem.indexOf(u8, out_asm, "imul\teax, edi") != null); -} - -fn testMissingOutputPath(zig_exe: []const u8, dir_path: []const u8) !void { - _ = try exec(dir_path, true, &[_][]const u8{ zig_exe, "init-exe" }); - const output_path = try fs.path.join(a, &[_][]const u8{ "does", "not", "exist", "foo.exe" }); - const output_arg = try std.fmt.allocPrint(a, "-femit-bin={s}", .{output_path}); - const source_path = try fs.path.join(a, &[_][]const u8{ "src", "main.zig" }); - const result = try exec(dir_path, false, &[_][]const u8{ zig_exe, "build-exe", source_path, output_arg }); - const s = std.fs.path.sep_str; - const expected: []const u8 = "error: unable to open output directory 'does" ++ s ++ "not" ++ s ++ "exist': FileNotFound\n"; - try testing.expectEqualStrings(expected, result.stderr); -} - -fn testZigFmt(zig_exe: []const u8, dir_path: []const u8) !void { - _ = try exec(dir_path, true, &[_][]const u8{ zig_exe, "init-exe" }); - - const unformatted_code = " // no reason for indent"; - - const fmt1_zig_path = try fs.path.join(a, &[_][]const u8{ dir_path, "fmt1.zig" }); - try fs.cwd().writeFile(fmt1_zig_path, unformatted_code); - - const run_result1 = try exec(dir_path, true, &[_][]const u8{ zig_exe, "fmt", fmt1_zig_path }); - // stderr should be file path + \n - try testing.expect(std.mem.startsWith(u8, run_result1.stdout, fmt1_zig_path)); - try testing.expect(run_result1.stdout.len == fmt1_zig_path.len + 1 and run_result1.stdout[run_result1.stdout.len - 1] == '\n'); - - const fmt2_zig_path = try fs.path.join(a, &[_][]const u8{ dir_path, "fmt2.zig" }); - try fs.cwd().writeFile(fmt2_zig_path, unformatted_code); - - const run_result2 = try exec(dir_path, true, &[_][]const u8{ zig_exe, "fmt", dir_path }); - // running it on the dir, only the new file should be changed - try testing.expect(std.mem.startsWith(u8, run_result2.stdout, fmt2_zig_path)); - try testing.expect(run_result2.stdout.len == fmt2_zig_path.len + 1 and run_result2.stdout[run_result2.stdout.len - 1] == '\n'); - - const run_result3 = try exec(dir_path, true, &[_][]const u8{ zig_exe, "fmt", dir_path }); - // both files have been formatted, nothing should change now - try testing.expect(run_result3.stdout.len == 0); - - // Check UTF-16 decoding - const fmt4_zig_path = try fs.path.join(a, &[_][]const u8{ dir_path, "fmt4.zig" }); - var unformatted_code_utf16 = "\xff\xfe \x00 \x00 \x00 \x00/\x00/\x00 \x00n\x00o\x00 \x00r\x00e\x00a\x00s\x00o\x00n\x00"; - try fs.cwd().writeFile(fmt4_zig_path, unformatted_code_utf16); - - const run_result4 = try exec(dir_path, true, &[_][]const u8{ zig_exe, "fmt", dir_path }); - try testing.expect(std.mem.startsWith(u8, run_result4.stdout, fmt4_zig_path)); - try testing.expect(run_result4.stdout.len == fmt4_zig_path.len + 1 and run_result4.stdout[run_result4.stdout.len - 1] == '\n'); -} diff --git a/test/compile_errors.zig b/test/compile_errors.zig index e0b78b300046..2d796b94639b 100644 --- a/test/compile_errors.zig +++ b/test/compile_errors.zig @@ -1,146 +1,10 @@ const std = @import("std"); const builtin = @import("builtin"); -const TestContext = @import("../src/test.zig").TestContext; - -pub fn addCases(ctx: *TestContext) !void { - { - const case = ctx.obj("wrong same named struct", .{}); - case.backend = .stage1; - - case.addSourceFile("a.zig", - \\pub const Foo = struct { - \\ x: i32, - \\}; - ); - - case.addSourceFile("b.zig", - \\pub const Foo = struct { - \\ z: f64, - \\}; - ); - - case.addError( - \\const a = @import("a.zig"); - \\const b = @import("b.zig"); - \\ - \\export fn entry() void { - \\ var a1: a.Foo = undefined; - \\ bar(&a1); - \\} - \\ - \\fn bar(x: *b.Foo) void {_ = x;} - , &[_][]const u8{ - "tmp.zig:6:10: error: expected type '*b.Foo', found '*a.Foo'", - "tmp.zig:6:10: note: pointer type child 'a.Foo' cannot cast into pointer type child 'b.Foo'", - "a.zig:1:17: note: a.Foo declared here", - "b.zig:1:17: note: b.Foo declared here", - }); - } - - { - const case = ctx.obj("multiple files with private function error", .{}); - case.backend = .stage1; - - case.addSourceFile("foo.zig", - \\fn privateFunction() void { } - ); - - case.addError( - \\const foo = @import("foo.zig",); - \\ - \\export fn callPrivFunction() void { - \\ foo.privateFunction(); - \\} - , &[_][]const u8{ - "tmp.zig:4:8: error: 'privateFunction' is private", - "foo.zig:1:1: note: declared here", - }); - } - - { - const case = ctx.obj("multiple files with private member instance function (canonical invocation) error", .{}); - case.backend = .stage1; - - case.addSourceFile("foo.zig", - \\pub const Foo = struct { - \\ fn privateFunction(self: *Foo) void { _ = self; } - \\}; - ); - - case.addError( - \\const Foo = @import("foo.zig",).Foo; - \\ - \\export fn callPrivFunction() void { - \\ var foo = Foo{}; - \\ Foo.privateFunction(foo); - \\} - , &[_][]const u8{ - "tmp.zig:5:8: error: 'privateFunction' is private", - "foo.zig:2:5: note: declared here", - }); - } - - { - const case = ctx.obj("multiple files with private member instance function error", .{}); - case.backend = .stage1; - - case.addSourceFile("foo.zig", - \\pub const Foo = struct { - \\ fn privateFunction(self: *Foo) void { _ = self; } - \\}; - ); - - case.addError( - \\const Foo = @import("foo.zig",).Foo; - \\ - \\export fn callPrivFunction() void { - \\ var foo = Foo{}; - \\ foo.privateFunction(); - \\} - , &[_][]const u8{ - "tmp.zig:5:8: error: 'privateFunction' is private", - "foo.zig:2:5: note: declared here", - }); - } - - { - const case = ctx.obj("export collision", .{}); - case.backend = .stage1; - - case.addSourceFile("foo.zig", - \\export fn bar() void {} - \\pub const baz = 1234; - ); - - case.addError( - \\const foo = @import("foo.zig",); - \\ - \\export fn bar() usize { - \\ return foo.baz; - \\} - , &[_][]const u8{ - "foo.zig:1:1: error: exported symbol collision: 'bar'", - "tmp.zig:3:1: note: other symbol here", - }); - } - - ctx.objErrStage1("non-printable invalid character", "\xff\xfe" ++ - "fn foo() bool {\r\n" ++ - " return true;\r\n" ++ - "}\r\n", &[_][]const u8{ - "tmp.zig:1:1: error: expected test, comptime, var decl, or container field, found 'invalid bytes'", - "tmp.zig:1:1: note: invalid byte: '\\xff'", - }); - - ctx.objErrStage1("non-printable invalid character with escape alternative", "fn foo() bool {\n" ++ - "\treturn true;\n" ++ - "}\n", &[_][]const u8{ - "tmp.zig:2:1: error: invalid character: '\\t'", - }); +const Cases = @import("src/Cases.zig"); +pub fn addCases(ctx: *Cases) !void { { const case = ctx.obj("multiline error messages", .{}); - case.backend = .stage2; case.addError( \\comptime { @@ -176,7 +40,6 @@ pub fn addCases(ctx: *TestContext) !void { { const case = ctx.obj("isolated carriage return in multiline string literal", .{}); - case.backend = .stage2; case.addError("const foo = \\\\\test\r\r rogue carriage return\n;", &[_][]const u8{ ":1:19: error: expected ';' after declaration", @@ -195,16 +58,6 @@ pub fn addCases(ctx: *TestContext) !void { { const case = ctx.obj("argument causes error", .{}); - case.backend = .stage2; - - case.addSourceFile("b.zig", - \\pub const ElfDynLib = struct { - \\ pub fn lookup(self: *ElfDynLib, comptime T: type) ?T { - \\ _ = self; - \\ return undefined; - \\ } - \\}; - ); case.addError( \\pub export fn entry() void { @@ -216,15 +69,18 @@ pub fn addCases(ctx: *TestContext) !void { ":3:12: note: argument to function being called at comptime must be comptime-known", ":2:55: note: expression is evaluated at comptime because the generic function was instantiated with a comptime-only return type", }); + case.addSourceFile("b.zig", + \\pub const ElfDynLib = struct { + \\ pub fn lookup(self: *ElfDynLib, comptime T: type) ?T { + \\ _ = self; + \\ return undefined; + \\ } + \\}; + ); } { const case = ctx.obj("astgen failure in file struct", .{}); - case.backend = .stage2; - - case.addSourceFile("b.zig", - \\+ - ); case.addError( \\pub export fn entry() void { @@ -233,21 +89,13 @@ pub fn addCases(ctx: *TestContext) !void { , &[_][]const u8{ ":1:1: error: expected type expression, found '+'", }); + case.addSourceFile("b.zig", + \\+ + ); } { const case = ctx.obj("invalid store to comptime field", .{}); - case.backend = .stage2; - - case.addSourceFile("a.zig", - \\pub const S = struct { - \\ comptime foo: u32 = 1, - \\ bar: u32, - \\ pub fn foo(x: @This()) void { - \\ _ = x; - \\ } - \\}; - ); case.addError( \\const a = @import("a.zig"); @@ -259,44 +107,19 @@ pub fn addCases(ctx: *TestContext) !void { ":4:23: error: value stored in comptime field does not match the default value of the field", ":2:25: note: default value set here", }); + case.addSourceFile("a.zig", + \\pub const S = struct { + \\ comptime foo: u32 = 1, + \\ bar: u32, + \\ pub fn foo(x: @This()) void { + \\ _ = x; + \\ } + \\}; + ); } - // TODO test this in stage2, but we won't even try in stage1 - //ctx.objErrStage1("inline fn calls itself indirectly", - // \\export fn foo() void { - // \\ bar(); - // \\} - // \\fn bar() callconv(.Inline) void { - // \\ baz(); - // \\ quux(); - // \\} - // \\fn baz() callconv(.Inline) void { - // \\ bar(); - // \\ quux(); - // \\} - // \\extern fn quux() void; - //, &[_][]const u8{ - // "tmp.zig:4:1: error: unable to inline function", - //}); - - //ctx.objErrStage1("save reference to inline function", - // \\export fn foo() void { - // \\ quux(@ptrToInt(bar)); - // \\} - // \\fn bar() callconv(.Inline) void { } - // \\extern fn quux(usize) void; - //, &[_][]const u8{ - // "tmp.zig:4:1: error: unable to inline function", - //}); - { const case = ctx.obj("file in multiple modules", .{}); - case.backend = .stage2; - - case.addSourceFile("foo.zig", - \\const dummy = 0; - ); - case.addDepModule("foo", "foo.zig"); case.addError( @@ -309,5 +132,8 @@ pub fn addCases(ctx: *TestContext) !void { ":1:1: note: root of module root.foo", ":3:17: note: imported from module root", }); + case.addSourceFile("foo.zig", + \\const dummy = 0; + ); } } diff --git a/test/link.zig b/test/link.zig index c787e8b1aedd..aa0ed4817e33 100644 --- a/test/link.zig +++ b/test/link.zig @@ -1,213 +1,172 @@ -const std = @import("std"); -const builtin = @import("builtin"); -const tests = @import("tests.zig"); - -pub fn addCases(cases: *tests.StandaloneContext) void { - cases.addBuildFile("test/link/bss/build.zig", .{ - .build_modes = false, // we only guarantee zerofill for undefined in Debug - }); - - cases.addBuildFile("test/link/common_symbols/build.zig", .{ - .build_modes = true, - }); - - cases.addBuildFile("test/link/common_symbols_alignment/build.zig", .{ - .build_modes = true, - }); - - cases.addBuildFile("test/link/interdependent_static_c_libs/build.zig", .{ - .build_modes = true, - }); - - cases.addBuildFile("test/link/static_lib_as_system_lib/build.zig", .{ - .build_modes = true, - }); - - addWasmCases(cases); - addMachOCases(cases); -} - -fn addWasmCases(cases: *tests.StandaloneContext) void { - cases.addBuildFile("test/link/wasm/archive/build.zig", .{ - .build_modes = true, - .requires_stage2 = true, - }); - - cases.addBuildFile("test/link/wasm/basic-features/build.zig", .{ - .requires_stage2 = true, - }); - - cases.addBuildFile("test/link/wasm/bss/build.zig", .{ - .build_modes = false, - .requires_stage2 = true, - }); - - cases.addBuildFile("test/link/wasm/export/build.zig", .{ - .build_modes = true, - .requires_stage2 = true, - }); - - // TODO: Fix open handle in wasm-linker refraining rename from working on Windows. - if (builtin.os.tag != .windows) { - cases.addBuildFile("test/link/wasm/export-data/build.zig", .{}); - } - - cases.addBuildFile("test/link/wasm/extern/build.zig", .{ - .build_modes = true, - .requires_stage2 = true, - .use_emulation = true, - }); - - cases.addBuildFile("test/link/wasm/extern-mangle/build.zig", .{ - .build_modes = true, - .requires_stage2 = true, - }); - - cases.addBuildFile("test/link/wasm/function-table/build.zig", .{ - .build_modes = true, - .requires_stage2 = true, - }); - - cases.addBuildFile("test/link/wasm/infer-features/build.zig", .{ - .requires_stage2 = true, - }); - - cases.addBuildFile("test/link/wasm/producers/build.zig", .{ - .build_modes = true, - .requires_stage2 = true, - }); - - cases.addBuildFile("test/link/wasm/segments/build.zig", .{ - .build_modes = true, - .requires_stage2 = true, - }); - - cases.addBuildFile("test/link/wasm/stack_pointer/build.zig", .{ - .build_modes = true, - .requires_stage2 = true, - }); - - cases.addBuildFile("test/link/wasm/type/build.zig", .{ - .build_modes = true, - .requires_stage2 = true, - }); -} - -fn addMachOCases(cases: *tests.StandaloneContext) void { - cases.addBuildFile("test/link/macho/bugs/13056/build.zig", .{ - .build_modes = true, - .requires_macos_sdk = true, - .requires_symlinks = true, - }); - - cases.addBuildFile("test/link/macho/bugs/13457/build.zig", .{ - .build_modes = true, - .requires_symlinks = true, - }); - - cases.addBuildFile("test/link/macho/dead_strip/build.zig", .{ - .build_modes = false, - .requires_symlinks = true, - }); - - cases.addBuildFile("test/link/macho/dead_strip_dylibs/build.zig", .{ - .build_modes = true, - .requires_macos_sdk = true, - .requires_symlinks = true, - }); - - cases.addBuildFile("test/link/macho/dylib/build.zig", .{ - .build_modes = true, - .requires_symlinks = true, - }); - - cases.addBuildFile("test/link/macho/empty/build.zig", .{ - .build_modes = true, - .requires_symlinks = true, - }); - - cases.addBuildFile("test/link/macho/entry/build.zig", .{ - .build_modes = true, - .requires_symlinks = true, - }); - - cases.addBuildFile("test/link/macho/headerpad/build.zig", .{ - .build_modes = true, - .requires_macos_sdk = true, - .requires_symlinks = true, - }); - - cases.addBuildFile("test/link/macho/linksection/build.zig", .{ - .build_modes = true, - .requires_symlinks = true, - }); - - cases.addBuildFile("test/link/macho/needed_framework/build.zig", .{ - .build_modes = true, - .requires_macos_sdk = true, - .requires_symlinks = true, - }); - - cases.addBuildFile("test/link/macho/needed_library/build.zig", .{ - .build_modes = true, - .requires_symlinks = true, - }); - - cases.addBuildFile("test/link/macho/objc/build.zig", .{ - .build_modes = true, - .requires_macos_sdk = true, - .requires_symlinks = true, - }); - - cases.addBuildFile("test/link/macho/objcpp/build.zig", .{ - .build_modes = true, - .requires_macos_sdk = true, - .requires_symlinks = true, - }); - - cases.addBuildFile("test/link/macho/pagezero/build.zig", .{ - .build_modes = false, - .requires_symlinks = true, - }); - - cases.addBuildFile("test/link/macho/search_strategy/build.zig", .{ - .build_modes = true, - .requires_symlinks = true, - }); - - cases.addBuildFile("test/link/macho/stack_size/build.zig", .{ - .build_modes = true, - .requires_symlinks = true, - }); - - cases.addBuildFile("test/link/macho/strict_validation/build.zig", .{ - .build_modes = true, - .requires_symlinks = true, - }); - - cases.addBuildFile("test/link/macho/tls/build.zig", .{ - .build_modes = true, - .requires_symlinks = true, - }); - - cases.addBuildFile("test/link/macho/unwind_info/build.zig", .{ - .build_modes = true, - .requires_symlinks = true, - }); - - cases.addBuildFile("test/link/macho/uuid/build.zig", .{ - .build_modes = false, - .requires_symlinks = true, - }); - - cases.addBuildFile("test/link/macho/weak_library/build.zig", .{ - .build_modes = true, - .requires_symlinks = true, - }); - - cases.addBuildFile("test/link/macho/weak_framework/build.zig", .{ - .build_modes = true, - .requires_macos_sdk = true, - .requires_symlinks = true, - }); -} +pub const Case = struct { + build_root: []const u8, + import: type, +}; + +pub const cases = [_]Case{ + .{ + .build_root = "test/link/bss", + .import = @import("link/bss/build.zig"), + }, + .{ + .build_root = "test/link/common_symbols", + .import = @import("link/common_symbols/build.zig"), + }, + .{ + .build_root = "test/link/common_symbols_alignment", + .import = @import("link/common_symbols_alignment/build.zig"), + }, + .{ + .build_root = "test/link/interdependent_static_c_libs", + .import = @import("link/interdependent_static_c_libs/build.zig"), + }, + + // WASM Cases + .{ + .build_root = "test/link/wasm/archive", + .import = @import("link/wasm/archive/build.zig"), + }, + .{ + .build_root = "test/link/wasm/basic-features", + .import = @import("link/wasm/basic-features/build.zig"), + }, + .{ + .build_root = "test/link/wasm/bss", + .import = @import("link/wasm/bss/build.zig"), + }, + .{ + .build_root = "test/link/wasm/export", + .import = @import("link/wasm/export/build.zig"), + }, + .{ + .build_root = "test/link/wasm/export-data", + .import = @import("link/wasm/export-data/build.zig"), + }, + .{ + .build_root = "test/link/wasm/extern", + .import = @import("link/wasm/extern/build.zig"), + }, + .{ + .build_root = "test/link/wasm/extern-mangle", + .import = @import("link/wasm/extern-mangle/build.zig"), + }, + .{ + .build_root = "test/link/wasm/function-table", + .import = @import("link/wasm/function-table/build.zig"), + }, + .{ + .build_root = "test/link/wasm/infer-features", + .import = @import("link/wasm/infer-features/build.zig"), + }, + .{ + .build_root = "test/link/wasm/producers", + .import = @import("link/wasm/producers/build.zig"), + }, + .{ + .build_root = "test/link/wasm/segments", + .import = @import("link/wasm/segments/build.zig"), + }, + .{ + .build_root = "test/link/wasm/stack_pointer", + .import = @import("link/wasm/stack_pointer/build.zig"), + }, + .{ + .build_root = "test/link/wasm/type", + .import = @import("link/wasm/type/build.zig"), + }, + + // Mach-O Cases + .{ + .build_root = "test/link/macho/bugs/13056", + .import = @import("link/macho/bugs/13056/build.zig"), + }, + .{ + .build_root = "test/link/macho/bugs/13457", + .import = @import("link/macho/bugs/13457/build.zig"), + }, + .{ + .build_root = "test/link/macho/dead_strip", + .import = @import("link/macho/dead_strip/build.zig"), + }, + .{ + .build_root = "test/link/macho/dead_strip_dylibs", + .import = @import("link/macho/dead_strip_dylibs/build.zig"), + }, + .{ + .build_root = "test/link/macho/dylib", + .import = @import("link/macho/dylib/build.zig"), + }, + .{ + .build_root = "test/link/macho/empty", + .import = @import("link/macho/empty/build.zig"), + }, + .{ + .build_root = "test/link/macho/entry", + .import = @import("link/macho/entry/build.zig"), + }, + .{ + .build_root = "test/link/macho/headerpad", + .import = @import("link/macho/headerpad/build.zig"), + }, + .{ + .build_root = "test/link/macho/linksection", + .import = @import("link/macho/linksection/build.zig"), + }, + .{ + .build_root = "test/link/macho/needed_framework", + .import = @import("link/macho/needed_framework/build.zig"), + }, + .{ + .build_root = "test/link/macho/needed_library", + .import = @import("link/macho/needed_library/build.zig"), + }, + .{ + .build_root = "test/link/macho/objc", + .import = @import("link/macho/objc/build.zig"), + }, + .{ + .build_root = "test/link/macho/objcpp", + .import = @import("link/macho/objcpp/build.zig"), + }, + .{ + .build_root = "test/link/macho/pagezero", + .import = @import("link/macho/pagezero/build.zig"), + }, + .{ + .build_root = "test/link/macho/search_strategy", + .import = @import("link/macho/search_strategy/build.zig"), + }, + .{ + .build_root = "test/link/macho/stack_size", + .import = @import("link/macho/stack_size/build.zig"), + }, + .{ + .build_root = "test/link/macho/strict_validation", + .import = @import("link/macho/strict_validation/build.zig"), + }, + .{ + .build_root = "test/link/macho/tls", + .import = @import("link/macho/tls/build.zig"), + }, + .{ + .build_root = "test/link/macho/unwind_info", + .import = @import("link/macho/unwind_info/build.zig"), + }, + // TODO: re-enable this test. It currently has some incompatibilities with + // the new build system API. In particular, it depends on installing the build + // artifacts, which should be unnecessary, and it has a custom build step that + // prints directly to stderr instead of failing the step with an error message. + //.{ + // .build_root = "test/link/macho/uuid", + // .import = @import("link/macho/uuid/build.zig"), + //}, + + .{ + .build_root = "test/link/macho/weak_library", + .import = @import("link/macho/weak_library/build.zig"), + }, + .{ + .build_root = "test/link/macho/weak_framework", + .import = @import("link/macho/weak_framework/build.zig"), + }, +}; diff --git a/test/link/bss/build.zig b/test/link/bss/build.zig index 0df9c1d323ea..86600b58f51c 100644 --- a/test/link/bss/build.zig +++ b/test/link/bss/build.zig @@ -1,17 +1,17 @@ const std = @import("std"); pub fn build(b: *std.Build) void { - const optimize = b.standardOptimizeOption(.{}); const test_step = b.step("test", "Test"); + b.default_step = test_step; const exe = b.addExecutable(.{ .name = "bss", .root_source_file = .{ .path = "main.zig" }, - .optimize = optimize, + .optimize = .Debug, }); - b.default_step.dependOn(&exe.step); const run = exe.run(); run.expectStdOutEqual("0, 1, 0\n"); + test_step.dependOn(&run.step); } diff --git a/test/link/bss/main.zig b/test/link/bss/main.zig index c901f0bb2736..d2ecffe9822b 100644 --- a/test/link/bss/main.zig +++ b/test/link/bss/main.zig @@ -1,7 +1,7 @@ const std = @import("std"); // Stress test zerofill layout -var buffer: [0x1000000]u64 = undefined; +var buffer: [0x1000000]u64 = [1]u64{0} ** 0x1000000; pub fn main() anyerror!void { buffer[0x10] = 1; diff --git a/test/link/common_symbols/build.zig b/test/link/common_symbols/build.zig index ee9dd94ebd12..a8c276d1f396 100644 --- a/test/link/common_symbols/build.zig +++ b/test/link/common_symbols/build.zig @@ -1,8 +1,16 @@ const std = @import("std"); pub fn build(b: *std.Build) void { - const optimize = b.standardOptimizeOption(.{}); + const test_step = b.step("test", "Test it"); + b.default_step = test_step; + + add(b, test_step, .Debug); + add(b, test_step, .ReleaseFast); + add(b, test_step, .ReleaseSmall); + add(b, test_step, .ReleaseSafe); +} +fn add(b: *std.Build, test_step: *std.Build.Step, optimize: std.builtin.OptimizeMode) void { const lib_a = b.addStaticLibrary(.{ .name = "a", .optimize = optimize, @@ -16,6 +24,5 @@ pub fn build(b: *std.Build) void { }); test_exe.linkLibrary(lib_a); - const test_step = b.step("test", "Test it"); - test_step.dependOn(&test_exe.step); + test_step.dependOn(&test_exe.run().step); } diff --git a/test/link/common_symbols_alignment/build.zig b/test/link/common_symbols_alignment/build.zig index f6efdc784b15..83548f2d8a42 100644 --- a/test/link/common_symbols_alignment/build.zig +++ b/test/link/common_symbols_alignment/build.zig @@ -1,23 +1,28 @@ const std = @import("std"); pub fn build(b: *std.Build) void { - const optimize = b.standardOptimizeOption(.{}); - const target = b.standardTargetOptions(.{}); + const test_step = b.step("test", "Test it"); + b.default_step = test_step; + + add(b, test_step, .Debug); + add(b, test_step, .ReleaseFast); + add(b, test_step, .ReleaseSmall); + add(b, test_step, .ReleaseSafe); +} +fn add(b: *std.Build, test_step: *std.Build.Step, optimize: std.builtin.OptimizeMode) void { const lib_a = b.addStaticLibrary(.{ .name = "a", .optimize = optimize, - .target = target, + .target = .{}, }); lib_a.addCSourceFiles(&.{"a.c"}, &.{"-fcommon"}); const test_exe = b.addTest(.{ .root_source_file = .{ .path = "main.zig" }, .optimize = optimize, - .target = target, }); test_exe.linkLibrary(lib_a); - const test_step = b.step("test", "Test it"); - test_step.dependOn(&test_exe.step); + test_step.dependOn(&test_exe.run().step); } diff --git a/test/link/interdependent_static_c_libs/build.zig b/test/link/interdependent_static_c_libs/build.zig index d8962a8e0842..0d06410a797b 100644 --- a/test/link/interdependent_static_c_libs/build.zig +++ b/test/link/interdependent_static_c_libs/build.zig @@ -1,13 +1,20 @@ const std = @import("std"); pub fn build(b: *std.Build) void { - const optimize = b.standardOptimizeOption(.{}); - const target = b.standardTargetOptions(.{}); + const test_step = b.step("test", "Test it"); + b.default_step = test_step; + + add(b, test_step, .Debug); + add(b, test_step, .ReleaseFast); + add(b, test_step, .ReleaseSmall); + add(b, test_step, .ReleaseSafe); +} +fn add(b: *std.Build, test_step: *std.Build.Step, optimize: std.builtin.OptimizeMode) void { const lib_a = b.addStaticLibrary(.{ .name = "a", .optimize = optimize, - .target = target, + .target = .{}, }); lib_a.addCSourceFile("a.c", &[_][]const u8{}); lib_a.addIncludePath("."); @@ -15,7 +22,7 @@ pub fn build(b: *std.Build) void { const lib_b = b.addStaticLibrary(.{ .name = "b", .optimize = optimize, - .target = target, + .target = .{}, }); lib_b.addCSourceFile("b.c", &[_][]const u8{}); lib_b.addIncludePath("."); @@ -23,12 +30,10 @@ pub fn build(b: *std.Build) void { const test_exe = b.addTest(.{ .root_source_file = .{ .path = "main.zig" }, .optimize = optimize, - .target = target, }); test_exe.linkLibrary(lib_a); test_exe.linkLibrary(lib_b); test_exe.addIncludePath("."); - const test_step = b.step("test", "Test it"); - test_step.dependOn(&test_exe.step); + test_step.dependOn(&test_exe.run().step); } diff --git a/test/link/macho/bugs/13056/build.zig b/test/link/macho/bugs/13056/build.zig index 662fd25c92ba..db7c129cbfd2 100644 --- a/test/link/macho/bugs/13056/build.zig +++ b/test/link/macho/bugs/13056/build.zig @@ -1,20 +1,28 @@ const std = @import("std"); +pub const requires_macos_sdk = true; +pub const requires_symlinks = true; + pub fn build(b: *std.Build) void { - const optimize = b.standardOptimizeOption(.{}); + const test_step = b.step("test", "Test it"); + b.default_step = test_step; + + add(b, test_step, .Debug); + add(b, test_step, .ReleaseFast); + add(b, test_step, .ReleaseSmall); + add(b, test_step, .ReleaseSafe); +} +fn add(b: *std.Build, test_step: *std.Build.Step, optimize: std.builtin.OptimizeMode) void { const target: std.zig.CrossTarget = .{ .os_tag = .macos }; const target_info = std.zig.system.NativeTargetInfo.detect(target) catch unreachable; const sdk = std.zig.system.darwin.getDarwinSDK(b.allocator, target_info.target) orelse @panic("macOS SDK is required to run the test"); - const test_step = b.step("test", "Test the program"); - const exe = b.addExecutable(.{ .name = "test", .optimize = optimize, }); - b.default_step.dependOn(&exe.step); exe.addIncludePath(std.fs.path.join(b.allocator, &.{ sdk.path, "/usr/include" }) catch unreachable); exe.addIncludePath(std.fs.path.join(b.allocator, &.{ sdk.path, "/usr/include/c++/v1" }) catch unreachable); exe.addCSourceFile("test.cpp", &.{ diff --git a/test/link/macho/bugs/13457/build.zig b/test/link/macho/bugs/13457/build.zig index 3560b4a168c9..89096bba387d 100644 --- a/test/link/macho/bugs/13457/build.zig +++ b/test/link/macho/bugs/13457/build.zig @@ -1,10 +1,19 @@ const std = @import("std"); +pub const requires_symlinks = true; + pub fn build(b: *std.Build) void { - const optimize = b.standardOptimizeOption(.{}); - const target: std.zig.CrossTarget = .{ .os_tag = .macos }; + const test_step = b.step("test", "Test it"); + b.default_step = test_step; + + add(b, test_step, .Debug); + add(b, test_step, .ReleaseFast); + add(b, test_step, .ReleaseSmall); + add(b, test_step, .ReleaseSafe); +} - const test_step = b.step("test", "Test the program"); +fn add(b: *std.Build, test_step: *std.Build.Step, optimize: std.builtin.OptimizeMode) void { + const target: std.zig.CrossTarget = .{ .os_tag = .macos }; const exe = b.addExecutable(.{ .name = "test", @@ -13,6 +22,9 @@ pub fn build(b: *std.Build) void { .target = target, }); - const run = exe.runEmulatable(); + const run = b.addRunArtifact(exe); + run.skip_foreign_checks = true; + run.expectStdOutEqual(""); + test_step.dependOn(&run.step); } diff --git a/test/link/macho/dead_strip/build.zig b/test/link/macho/dead_strip/build.zig index d82c81edca42..4c739b3d8c6c 100644 --- a/test/link/macho/dead_strip/build.zig +++ b/test/link/macho/dead_strip/build.zig @@ -1,17 +1,19 @@ const std = @import("std"); +pub const requires_symlinks = true; + pub fn build(b: *std.Build) void { - const optimize = b.standardOptimizeOption(.{}); + const optimize: std.builtin.OptimizeMode = .Debug; const target: std.zig.CrossTarget = .{ .os_tag = .macos }; const test_step = b.step("test", "Test the program"); - test_step.dependOn(b.getInstallStep()); + b.default_step = test_step; { // Without -dead_strip, we expect `iAmUnused` symbol present - const exe = createScenario(b, optimize, target); + const exe = createScenario(b, optimize, target, "no-gc"); - const check = exe.checkObject(.macho); + const check = exe.checkObject(); check.checkInSymtab(); check.checkNext("{*} (__TEXT,__text) external _iAmUnused"); @@ -22,10 +24,10 @@ pub fn build(b: *std.Build) void { { // With -dead_strip, no `iAmUnused` symbol should be present - const exe = createScenario(b, optimize, target); + const exe = createScenario(b, optimize, target, "yes-gc"); exe.link_gc_sections = true; - const check = exe.checkObject(.macho); + const check = exe.checkObject(); check.checkInSymtab(); check.checkNotPresent("{*} (__TEXT,__text) external _iAmUnused"); @@ -39,9 +41,10 @@ fn createScenario( b: *std.Build, optimize: std.builtin.OptimizeMode, target: std.zig.CrossTarget, + name: []const u8, ) *std.Build.CompileStep { const exe = b.addExecutable(.{ - .name = "test", + .name = name, .optimize = optimize, .target = target, }); diff --git a/test/link/macho/dead_strip_dylibs/build.zig b/test/link/macho/dead_strip_dylibs/build.zig index af2f5cf0dcb4..b9b97949c13e 100644 --- a/test/link/macho/dead_strip_dylibs/build.zig +++ b/test/link/macho/dead_strip_dylibs/build.zig @@ -1,16 +1,24 @@ const std = @import("std"); +pub const requires_macos_sdk = true; +pub const requires_symlinks = true; + pub fn build(b: *std.Build) void { - const optimize = b.standardOptimizeOption(.{}); + const test_step = b.step("test", "Test it"); + b.default_step = test_step; - const test_step = b.step("test", "Test the program"); - test_step.dependOn(b.getInstallStep()); + add(b, test_step, .Debug); + add(b, test_step, .ReleaseFast); + add(b, test_step, .ReleaseSmall); + add(b, test_step, .ReleaseSafe); +} +fn add(b: *std.Build, test_step: *std.Build.Step, optimize: std.builtin.OptimizeMode) void { { // Without -dead_strip_dylibs we expect `-la` to include liba.dylib in the final executable - const exe = createScenario(b, optimize); + const exe = createScenario(b, optimize, "no-dead-strip"); - const check = exe.checkObject(.macho); + const check = exe.checkObject(); check.checkStart("cmd LOAD_DYLIB"); check.checkNext("name {*}Cocoa"); @@ -25,18 +33,22 @@ pub fn build(b: *std.Build) void { { // With -dead_strip_dylibs, we should include liba.dylib as it's unreachable - const exe = createScenario(b, optimize); + const exe = createScenario(b, optimize, "yes-dead-strip"); exe.dead_strip_dylibs = true; - const run_cmd = exe.run(); - run_cmd.expected_term = .{ .Exited = @bitCast(u8, @as(i8, -2)) }; // should fail + const run_cmd = b.addRunArtifact(exe); + run_cmd.expectExitCode(@bitCast(u8, @as(i8, -2))); // should fail test_step.dependOn(&run_cmd.step); } } -fn createScenario(b: *std.Build, optimize: std.builtin.OptimizeMode) *std.Build.CompileStep { +fn createScenario( + b: *std.Build, + optimize: std.builtin.OptimizeMode, + name: []const u8, +) *std.Build.CompileStep { const exe = b.addExecutable(.{ - .name = "test", + .name = name, .optimize = optimize, }); exe.addCSourceFile("main.c", &[0][]const u8{}); diff --git a/test/link/macho/dylib/build.zig b/test/link/macho/dylib/build.zig index 7a1e2d862c7a..2d775aa23fbe 100644 --- a/test/link/macho/dylib/build.zig +++ b/test/link/macho/dylib/build.zig @@ -1,11 +1,19 @@ const std = @import("std"); +pub const requires_symlinks = true; + pub fn build(b: *std.Build) void { - const optimize = b.standardOptimizeOption(.{}); - const target: std.zig.CrossTarget = .{ .os_tag = .macos }; + const test_step = b.step("test", "Test it"); + b.default_step = test_step; - const test_step = b.step("test", "Test"); - test_step.dependOn(b.getInstallStep()); + add(b, test_step, .Debug); + add(b, test_step, .ReleaseFast); + add(b, test_step, .ReleaseSmall); + add(b, test_step, .ReleaseSafe); +} + +fn add(b: *std.Build, test_step: *std.Build.Step, optimize: std.builtin.OptimizeMode) void { + const target: std.zig.CrossTarget = .{ .os_tag = .macos }; const dylib = b.addSharedLibrary(.{ .name = "a", @@ -15,9 +23,8 @@ pub fn build(b: *std.Build) void { }); dylib.addCSourceFile("a.c", &.{}); dylib.linkLibC(); - dylib.install(); - const check_dylib = dylib.checkObject(.macho); + const check_dylib = dylib.checkObject(); check_dylib.checkStart("cmd ID_DYLIB"); check_dylib.checkNext("name @rpath/liba.dylib"); check_dylib.checkNext("timestamp 2"); @@ -33,11 +40,11 @@ pub fn build(b: *std.Build) void { }); exe.addCSourceFile("main.c", &.{}); exe.linkSystemLibrary("a"); + exe.addLibraryPathDirectorySource(dylib.getOutputDirectorySource()); + exe.addRPathDirectorySource(dylib.getOutputDirectorySource()); exe.linkLibC(); - exe.addLibraryPath(b.pathFromRoot("zig-out/lib/")); - exe.addRPath(b.pathFromRoot("zig-out/lib")); - const check_exe = exe.checkObject(.macho); + const check_exe = exe.checkObject(); check_exe.checkStart("cmd LOAD_DYLIB"); check_exe.checkNext("name @rpath/liba.dylib"); check_exe.checkNext("timestamp 2"); @@ -45,10 +52,12 @@ pub fn build(b: *std.Build) void { check_exe.checkNext("compatibility version 10000"); check_exe.checkStart("cmd RPATH"); - check_exe.checkNext(std.fmt.allocPrint(b.allocator, "path {s}", .{b.pathFromRoot("zig-out/lib")}) catch unreachable); + // TODO check this (perhaps with `checkNextFileSource(dylib.getOutputDirectorySource())`) + //check_exe.checkNext(std.fmt.allocPrint(b.allocator, "path {s}", .{ + // b.pathFromRoot("zig-out/lib"), + //}) catch unreachable); const run = check_exe.runAndCompare(); - run.cwd = b.pathFromRoot("."); run.expectStdOutEqual("Hello world"); test_step.dependOn(&run.step); } diff --git a/test/link/macho/empty/build.zig b/test/link/macho/empty/build.zig index 586da1511b0c..9933746d533a 100644 --- a/test/link/macho/empty/build.zig +++ b/test/link/macho/empty/build.zig @@ -1,11 +1,19 @@ const std = @import("std"); +pub const requires_symlinks = true; + pub fn build(b: *std.Build) void { - const optimize = b.standardOptimizeOption(.{}); - const target: std.zig.CrossTarget = .{ .os_tag = .macos }; + const test_step = b.step("test", "Test it"); + b.default_step = test_step; - const test_step = b.step("test", "Test the program"); - test_step.dependOn(b.getInstallStep()); + add(b, test_step, .Debug); + add(b, test_step, .ReleaseFast); + add(b, test_step, .ReleaseSmall); + add(b, test_step, .ReleaseSafe); +} + +fn add(b: *std.Build, test_step: *std.Build.Step, optimize: std.builtin.OptimizeMode) void { + const target: std.zig.CrossTarget = .{ .os_tag = .macos }; const exe = b.addExecutable(.{ .name = "test", @@ -16,7 +24,8 @@ pub fn build(b: *std.Build) void { exe.addCSourceFile("empty.c", &[0][]const u8{}); exe.linkLibC(); - const run_cmd = std.Build.EmulatableRunStep.create(b, "run", exe); + const run_cmd = b.addRunArtifact(exe); + run_cmd.skip_foreign_checks = true; run_cmd.expectStdOutEqual("Hello!\n"); test_step.dependOn(&run_cmd.step); } diff --git a/test/link/macho/entry/build.zig b/test/link/macho/entry/build.zig index 4504da9c6ca1..e983bc9391cf 100644 --- a/test/link/macho/entry/build.zig +++ b/test/link/macho/entry/build.zig @@ -1,11 +1,18 @@ const std = @import("std"); +pub const requires_symlinks = true; + pub fn build(b: *std.Build) void { - const optimize = b.standardOptimizeOption(.{}); + const test_step = b.step("test", "Test it"); + b.default_step = test_step; - const test_step = b.step("test", "Test"); - test_step.dependOn(b.getInstallStep()); + add(b, test_step, .Debug); + add(b, test_step, .ReleaseFast); + add(b, test_step, .ReleaseSmall); + add(b, test_step, .ReleaseSafe); +} +fn add(b: *std.Build, test_step: *std.Build.Step, optimize: std.builtin.OptimizeMode) void { const exe = b.addExecutable(.{ .name = "main", .optimize = optimize, @@ -15,7 +22,7 @@ pub fn build(b: *std.Build) void { exe.linkLibC(); exe.entry_symbol_name = "_non_main"; - const check_exe = exe.checkObject(.macho); + const check_exe = exe.checkObject(); check_exe.checkStart("segname __TEXT"); check_exe.checkNext("vmaddr {vmaddr}"); diff --git a/test/link/macho/headerpad/build.zig b/test/link/macho/headerpad/build.zig index 3ef17573f881..0c9275b8d8d8 100644 --- a/test/link/macho/headerpad/build.zig +++ b/test/link/macho/headerpad/build.zig @@ -1,18 +1,26 @@ const std = @import("std"); const builtin = @import("builtin"); +pub const requires_symlinks = true; +pub const requires_macos_sdk = true; + pub fn build(b: *std.Build) void { - const optimize = b.standardOptimizeOption(.{}); + const test_step = b.step("test", "Test it"); + b.default_step = test_step; - const test_step = b.step("test", "Test"); - test_step.dependOn(b.getInstallStep()); + add(b, test_step, .Debug); + add(b, test_step, .ReleaseFast); + add(b, test_step, .ReleaseSmall); + add(b, test_step, .ReleaseSafe); +} +fn add(b: *std.Build, test_step: *std.Build.Step, optimize: std.builtin.OptimizeMode) void { { // Test -headerpad_max_install_names - const exe = simpleExe(b, optimize); + const exe = simpleExe(b, optimize, "headerpad_max_install_names"); exe.headerpad_max_install_names = true; - const check = exe.checkObject(.macho); + const check = exe.checkObject(); check.checkStart("sectname __text"); check.checkNext("offset {offset}"); @@ -34,10 +42,10 @@ pub fn build(b: *std.Build) void { { // Test -headerpad - const exe = simpleExe(b, optimize); + const exe = simpleExe(b, optimize, "headerpad"); exe.headerpad_size = 0x10000; - const check = exe.checkObject(.macho); + const check = exe.checkObject(); check.checkStart("sectname __text"); check.checkNext("offset {offset}"); check.checkComputeCompare("offset", .{ .op = .gte, .value = .{ .literal = 0x10000 } }); @@ -50,11 +58,11 @@ pub fn build(b: *std.Build) void { { // Test both flags with -headerpad overriding -headerpad_max_install_names - const exe = simpleExe(b, optimize); + const exe = simpleExe(b, optimize, "headerpad_overriding"); exe.headerpad_max_install_names = true; exe.headerpad_size = 0x10000; - const check = exe.checkObject(.macho); + const check = exe.checkObject(); check.checkStart("sectname __text"); check.checkNext("offset {offset}"); check.checkComputeCompare("offset", .{ .op = .gte, .value = .{ .literal = 0x10000 } }); @@ -67,11 +75,11 @@ pub fn build(b: *std.Build) void { { // Test both flags with -headerpad_max_install_names overriding -headerpad - const exe = simpleExe(b, optimize); + const exe = simpleExe(b, optimize, "headerpad_max_install_names_overriding"); exe.headerpad_size = 0x1000; exe.headerpad_max_install_names = true; - const check = exe.checkObject(.macho); + const check = exe.checkObject(); check.checkStart("sectname __text"); check.checkNext("offset {offset}"); @@ -92,9 +100,13 @@ pub fn build(b: *std.Build) void { } } -fn simpleExe(b: *std.Build, optimize: std.builtin.OptimizeMode) *std.Build.CompileStep { +fn simpleExe( + b: *std.Build, + optimize: std.builtin.OptimizeMode, + name: []const u8, +) *std.Build.CompileStep { const exe = b.addExecutable(.{ - .name = "main", + .name = name, .optimize = optimize, }); exe.addCSourceFile("main.c", &.{}); diff --git a/test/link/macho/linksection/build.zig b/test/link/macho/linksection/build.zig index 227d4eeb63f1..b8b3a59f35ba 100644 --- a/test/link/macho/linksection/build.zig +++ b/test/link/macho/linksection/build.zig @@ -1,11 +1,19 @@ const std = @import("std"); +pub const requires_symlinks = true; + pub fn build(b: *std.Build) void { - const optimize = b.standardOptimizeOption(.{}); - const target = std.zig.CrossTarget{ .os_tag = .macos }; + const test_step = b.step("test", "Test it"); + b.default_step = test_step; - const test_step = b.step("test", "Test"); - test_step.dependOn(b.getInstallStep()); + add(b, test_step, .Debug); + add(b, test_step, .ReleaseFast); + add(b, test_step, .ReleaseSmall); + add(b, test_step, .ReleaseSafe); +} + +fn add(b: *std.Build, test_step: *std.Build.Step, optimize: std.builtin.OptimizeMode) void { + const target = std.zig.CrossTarget{ .os_tag = .macos }; const obj = b.addObject(.{ .name = "test", @@ -14,7 +22,7 @@ pub fn build(b: *std.Build) void { .target = target, }); - const check = obj.checkObject(.macho); + const check = obj.checkObject(); check.checkInSymtab(); check.checkNext("{*} (__DATA,__TestGlobal) external _test_global"); diff --git a/test/link/macho/needed_framework/build.zig b/test/link/macho/needed_framework/build.zig index 8b6e3dd87f6f..3de96efbc73c 100644 --- a/test/link/macho/needed_framework/build.zig +++ b/test/link/macho/needed_framework/build.zig @@ -1,11 +1,19 @@ const std = @import("std"); +pub const requires_symlinks = true; +pub const requires_macos_sdk = true; + pub fn build(b: *std.Build) void { - const optimize = b.standardOptimizeOption(.{}); + const test_step = b.step("test", "Test it"); + b.default_step = test_step; - const test_step = b.step("test", "Test the program"); - test_step.dependOn(b.getInstallStep()); + add(b, test_step, .Debug); + add(b, test_step, .ReleaseFast); + add(b, test_step, .ReleaseSmall); + add(b, test_step, .ReleaseSafe); +} +fn add(b: *std.Build, test_step: *std.Build.Step, optimize: std.builtin.OptimizeMode) void { // -dead_strip_dylibs // -needed_framework Cocoa const exe = b.addExecutable(.{ @@ -17,7 +25,7 @@ pub fn build(b: *std.Build) void { exe.linkFrameworkNeeded("Cocoa"); exe.dead_strip_dylibs = true; - const check = exe.checkObject(.macho); + const check = exe.checkObject(); check.checkStart("cmd LOAD_DYLIB"); check.checkNext("name {*}Cocoa"); test_step.dependOn(&check.step); diff --git a/test/link/macho/needed_library/build.zig b/test/link/macho/needed_library/build.zig index 92a73d22b795..cb9ea38d4bcc 100644 --- a/test/link/macho/needed_library/build.zig +++ b/test/link/macho/needed_library/build.zig @@ -1,11 +1,19 @@ const std = @import("std"); +pub const requires_symlinks = true; + pub fn build(b: *std.Build) void { - const optimize = b.standardOptimizeOption(.{}); - const target: std.zig.CrossTarget = .{ .os_tag = .macos }; + const test_step = b.step("test", "Test it"); + b.default_step = test_step; - const test_step = b.step("test", "Test the program"); - test_step.dependOn(b.getInstallStep()); + add(b, test_step, .Debug); + add(b, test_step, .ReleaseFast); + add(b, test_step, .ReleaseSmall); + add(b, test_step, .ReleaseSafe); +} + +fn add(b: *std.Build, test_step: *std.Build.Step, optimize: std.builtin.OptimizeMode) void { + const target: std.zig.CrossTarget = .{ .os_tag = .macos }; const dylib = b.addSharedLibrary(.{ .name = "a", @@ -15,7 +23,6 @@ pub fn build(b: *std.Build) void { }); dylib.addCSourceFile("a.c", &.{}); dylib.linkLibC(); - dylib.install(); // -dead_strip_dylibs // -needed-la @@ -27,14 +34,15 @@ pub fn build(b: *std.Build) void { exe.addCSourceFile("main.c", &[0][]const u8{}); exe.linkLibC(); exe.linkSystemLibraryNeeded("a"); - exe.addLibraryPath(b.pathFromRoot("zig-out/lib")); - exe.addRPath(b.pathFromRoot("zig-out/lib")); + exe.addLibraryPathDirectorySource(dylib.getOutputDirectorySource()); + exe.addRPathDirectorySource(dylib.getOutputDirectorySource()); exe.dead_strip_dylibs = true; - const check = exe.checkObject(.macho); + const check = exe.checkObject(); check.checkStart("cmd LOAD_DYLIB"); check.checkNext("name @rpath/liba.dylib"); const run_cmd = check.runAndCompare(); + run_cmd.expectStdOutEqual(""); test_step.dependOn(&run_cmd.step); } diff --git a/test/link/macho/objc/build.zig b/test/link/macho/objc/build.zig index 10d293baab72..4cd12f786fe1 100644 --- a/test/link/macho/objc/build.zig +++ b/test/link/macho/objc/build.zig @@ -1,10 +1,19 @@ const std = @import("std"); +pub const requires_symlinks = true; +pub const requires_macos_sdk = true; + pub fn build(b: *std.Build) void { - const optimize = b.standardOptimizeOption(.{}); + const test_step = b.step("test", "Test it"); + b.default_step = test_step; - const test_step = b.step("test", "Test the program"); + add(b, test_step, .Debug); + add(b, test_step, .ReleaseFast); + add(b, test_step, .ReleaseSmall); + add(b, test_step, .ReleaseSafe); +} +fn add(b: *std.Build, test_step: *std.Build.Step, optimize: std.builtin.OptimizeMode) void { const exe = b.addExecutable(.{ .name = "test", .optimize = optimize, @@ -17,6 +26,8 @@ pub fn build(b: *std.Build) void { // populate paths to the sysroot here. exe.linkFramework("Foundation"); - const run_cmd = std.Build.EmulatableRunStep.create(b, "run", exe); + const run_cmd = b.addRunArtifact(exe); + run_cmd.skip_foreign_checks = true; + run_cmd.expectStdOutEqual(""); test_step.dependOn(&run_cmd.step); } diff --git a/test/link/macho/objcpp/build.zig b/test/link/macho/objcpp/build.zig index 2a3459be5051..06876247a9e9 100644 --- a/test/link/macho/objcpp/build.zig +++ b/test/link/macho/objcpp/build.zig @@ -1,10 +1,19 @@ const std = @import("std"); +pub const requires_symlinks = true; +pub const requires_macos_sdk = true; + pub fn build(b: *std.Build) void { - const optimize = b.standardOptimizeOption(.{}); + const test_step = b.step("test", "Test it"); + b.default_step = test_step; - const test_step = b.step("test", "Test the program"); + add(b, test_step, .Debug); + add(b, test_step, .ReleaseFast); + add(b, test_step, .ReleaseSmall); + add(b, test_step, .ReleaseSafe); +} +fn add(b: *std.Build, test_step: *std.Build.Step, optimize: std.builtin.OptimizeMode) void { const exe = b.addExecutable(.{ .name = "test", .optimize = optimize, diff --git a/test/link/macho/pagezero/build.zig b/test/link/macho/pagezero/build.zig index 0a8471b919f4..b467df2b20b7 100644 --- a/test/link/macho/pagezero/build.zig +++ b/test/link/macho/pagezero/build.zig @@ -1,11 +1,13 @@ const std = @import("std"); +pub const requires_symlinks = true; + pub fn build(b: *std.Build) void { - const optimize = b.standardOptimizeOption(.{}); - const target: std.zig.CrossTarget = .{ .os_tag = .macos }; + const test_step = b.step("test", "Test it"); + b.default_step = test_step; - const test_step = b.step("test", "Test"); - test_step.dependOn(b.getInstallStep()); + const optimize: std.builtin.OptimizeMode = .Debug; + const target: std.zig.CrossTarget = .{ .os_tag = .macos }; { const exe = b.addExecutable(.{ @@ -17,7 +19,7 @@ pub fn build(b: *std.Build) void { exe.linkLibC(); exe.pagezero_size = 0x4000; - const check = exe.checkObject(.macho); + const check = exe.checkObject(); check.checkStart("LC 0"); check.checkNext("segname __PAGEZERO"); check.checkNext("vmaddr 0"); @@ -39,7 +41,7 @@ pub fn build(b: *std.Build) void { exe.linkLibC(); exe.pagezero_size = 0; - const check = exe.checkObject(.macho); + const check = exe.checkObject(); check.checkStart("LC 0"); check.checkNext("segname __TEXT"); check.checkNext("vmaddr 0"); diff --git a/test/link/macho/search_strategy/build.zig b/test/link/macho/search_strategy/build.zig index 62757f885bd1..4777629c8b9a 100644 --- a/test/link/macho/search_strategy/build.zig +++ b/test/link/macho/search_strategy/build.zig @@ -1,34 +1,41 @@ const std = @import("std"); +pub const requires_symlinks = true; + pub fn build(b: *std.Build) void { - const optimize = b.standardOptimizeOption(.{}); - const target: std.zig.CrossTarget = .{ .os_tag = .macos }; + const test_step = b.step("test", "Test it"); + b.default_step = test_step; - const test_step = b.step("test", "Test"); - test_step.dependOn(b.getInstallStep()); + add(b, test_step, .Debug); + add(b, test_step, .ReleaseFast); + add(b, test_step, .ReleaseSmall); + add(b, test_step, .ReleaseSafe); +} + +fn add(b: *std.Build, test_step: *std.Build.Step, optimize: std.builtin.OptimizeMode) void { + const target: std.zig.CrossTarget = .{ .os_tag = .macos }; { // -search_dylibs_first - const exe = createScenario(b, optimize, target); + const exe = createScenario(b, optimize, target, "search_dylibs_first"); exe.search_strategy = .dylibs_first; - const check = exe.checkObject(.macho); + const check = exe.checkObject(); check.checkStart("cmd LOAD_DYLIB"); - check.checkNext("name @rpath/liba.dylib"); + check.checkNext("name @rpath/libsearch_dylibs_first.dylib"); const run = check.runAndCompare(); - run.cwd = b.pathFromRoot("."); run.expectStdOutEqual("Hello world"); test_step.dependOn(&run.step); } { // -search_paths_first - const exe = createScenario(b, optimize, target); + const exe = createScenario(b, optimize, target, "search_paths_first"); exe.search_strategy = .paths_first; - const run = std.Build.EmulatableRunStep.create(b, "run", exe); - run.cwd = b.pathFromRoot("."); + const run = b.addRunArtifact(exe); + run.skip_foreign_checks = true; run.expectStdOutEqual("Hello world"); test_step.dependOn(&run.step); } @@ -38,9 +45,10 @@ fn createScenario( b: *std.Build, optimize: std.builtin.OptimizeMode, target: std.zig.CrossTarget, + name: []const u8, ) *std.Build.CompileStep { const static = b.addStaticLibrary(.{ - .name = "a", + .name = name, .optimize = optimize, .target = target, }); @@ -49,10 +57,9 @@ fn createScenario( static.override_dest_dir = std.Build.InstallDir{ .custom = "static", }; - static.install(); const dylib = b.addSharedLibrary(.{ - .name = "a", + .name = name, .version = .{ .major = 1, .minor = 0 }, .optimize = optimize, .target = target, @@ -62,18 +69,17 @@ fn createScenario( dylib.override_dest_dir = std.Build.InstallDir{ .custom = "dynamic", }; - dylib.install(); const exe = b.addExecutable(.{ - .name = "main", + .name = name, .optimize = optimize, .target = target, }); exe.addCSourceFile("main.c", &.{}); - exe.linkSystemLibraryName("a"); + exe.linkSystemLibraryName(name); exe.linkLibC(); - exe.addLibraryPath(b.pathFromRoot("zig-out/static")); - exe.addLibraryPath(b.pathFromRoot("zig-out/dynamic")); - exe.addRPath(b.pathFromRoot("zig-out/dynamic")); + exe.addLibraryPathDirectorySource(static.getOutputDirectorySource()); + exe.addLibraryPathDirectorySource(dylib.getOutputDirectorySource()); + exe.addRPathDirectorySource(dylib.getOutputDirectorySource()); return exe; } diff --git a/test/link/macho/stack_size/build.zig b/test/link/macho/stack_size/build.zig index 3529a134ebd1..c7d308d004e4 100644 --- a/test/link/macho/stack_size/build.zig +++ b/test/link/macho/stack_size/build.zig @@ -1,11 +1,19 @@ const std = @import("std"); +pub const requires_symlinks = true; + pub fn build(b: *std.Build) void { - const optimize = b.standardOptimizeOption(.{}); - const target: std.zig.CrossTarget = .{ .os_tag = .macos }; + const test_step = b.step("test", "Test it"); + b.default_step = test_step; - const test_step = b.step("test", "Test"); - test_step.dependOn(b.getInstallStep()); + add(b, test_step, .Debug); + add(b, test_step, .ReleaseFast); + add(b, test_step, .ReleaseSmall); + add(b, test_step, .ReleaseSafe); +} + +fn add(b: *std.Build, test_step: *std.Build.Step, optimize: std.builtin.OptimizeMode) void { + const target: std.zig.CrossTarget = .{ .os_tag = .macos }; const exe = b.addExecutable(.{ .name = "main", @@ -16,10 +24,11 @@ pub fn build(b: *std.Build) void { exe.linkLibC(); exe.stack_size = 0x100000000; - const check_exe = exe.checkObject(.macho); + const check_exe = exe.checkObject(); check_exe.checkStart("cmd MAIN"); check_exe.checkNext("stacksize 100000000"); const run = check_exe.runAndCompare(); + run.expectStdOutEqual(""); test_step.dependOn(&run.step); } diff --git a/test/link/macho/strict_validation/build.zig b/test/link/macho/strict_validation/build.zig index 408076657b13..34a0cd73fc1c 100644 --- a/test/link/macho/strict_validation/build.zig +++ b/test/link/macho/strict_validation/build.zig @@ -1,12 +1,20 @@ const std = @import("std"); const builtin = @import("builtin"); +pub const requires_symlinks = true; + pub fn build(b: *std.Build) void { - const optimize = b.standardOptimizeOption(.{}); - const target: std.zig.CrossTarget = .{ .os_tag = .macos }; + const test_step = b.step("test", "Test it"); + b.default_step = test_step; - const test_step = b.step("test", "Test"); - test_step.dependOn(b.getInstallStep()); + add(b, test_step, .Debug); + add(b, test_step, .ReleaseFast); + add(b, test_step, .ReleaseSmall); + add(b, test_step, .ReleaseSafe); +} + +fn add(b: *std.Build, test_step: *std.Build.Step, optimize: std.builtin.OptimizeMode) void { + const target: std.zig.CrossTarget = .{ .os_tag = .macos }; const exe = b.addExecutable(.{ .name = "main", @@ -16,7 +24,7 @@ pub fn build(b: *std.Build) void { }); exe.linkLibC(); - const check_exe = exe.checkObject(.macho); + const check_exe = exe.checkObject(); check_exe.checkStart("cmd SEGMENT_64"); check_exe.checkNext("segname __LINKEDIT"); diff --git a/test/link/macho/tls/build.zig b/test/link/macho/tls/build.zig index c77588cb5d3f..5981fea19401 100644 --- a/test/link/macho/tls/build.zig +++ b/test/link/macho/tls/build.zig @@ -1,7 +1,18 @@ const std = @import("std"); +pub const requires_symlinks = true; + pub fn build(b: *std.Build) void { - const optimize = b.standardOptimizeOption(.{}); + const test_step = b.step("test", "Test it"); + b.default_step = test_step; + + add(b, test_step, .Debug); + add(b, test_step, .ReleaseFast); + add(b, test_step, .ReleaseSmall); + add(b, test_step, .ReleaseSafe); +} + +fn add(b: *std.Build, test_step: *std.Build.Step, optimize: std.builtin.OptimizeMode) void { const target: std.zig.CrossTarget = .{ .os_tag = .macos }; const lib = b.addSharedLibrary(.{ @@ -21,6 +32,8 @@ pub fn build(b: *std.Build) void { test_exe.linkLibrary(lib); test_exe.linkLibC(); - const test_step = b.step("test", "Test it"); - test_step.dependOn(&test_exe.step); + const run = test_exe.run(); + run.skip_foreign_checks = true; + + test_step.dependOn(&run.step); } diff --git a/test/link/macho/unwind_info/build.zig b/test/link/macho/unwind_info/build.zig index 408f762f5db2..4ace2a4e96d3 100644 --- a/test/link/macho/unwind_info/build.zig +++ b/test/link/macho/unwind_info/build.zig @@ -1,14 +1,23 @@ const std = @import("std"); const builtin = @import("builtin"); +pub const requires_symlinks = true; + pub fn build(b: *std.Build) void { - const optimize = b.standardOptimizeOption(.{}); - const target: std.zig.CrossTarget = .{ .os_tag = .macos }; + const test_step = b.step("test", "Test it"); + b.default_step = test_step; - const test_step = b.step("test", "Test the program"); + add(b, test_step, .Debug); + add(b, test_step, .ReleaseFast); + add(b, test_step, .ReleaseSmall); + add(b, test_step, .ReleaseSafe); +} + +fn add(b: *std.Build, test_step: *std.Build.Step, optimize: std.builtin.OptimizeMode) void { + const target: std.zig.CrossTarget = .{ .os_tag = .macos }; - testUnwindInfo(b, test_step, optimize, target, false); - testUnwindInfo(b, test_step, optimize, target, true); + testUnwindInfo(b, test_step, optimize, target, false, "no-dead-strip"); + testUnwindInfo(b, test_step, optimize, target, true, "yes-dead-strip"); } fn testUnwindInfo( @@ -17,11 +26,12 @@ fn testUnwindInfo( optimize: std.builtin.OptimizeMode, target: std.zig.CrossTarget, dead_strip: bool, + name: []const u8, ) void { - const exe = createScenario(b, optimize, target); + const exe = createScenario(b, optimize, target, name); exe.link_gc_sections = dead_strip; - const check = exe.checkObject(.macho); + const check = exe.checkObject(); check.checkStart("segname __TEXT"); check.checkNext("sectname __gcc_except_tab"); check.checkNext("sectname __unwind_info"); @@ -54,9 +64,10 @@ fn createScenario( b: *std.Build, optimize: std.builtin.OptimizeMode, target: std.zig.CrossTarget, + name: []const u8, ) *std.Build.CompileStep { const exe = b.addExecutable(.{ - .name = "test", + .name = name, .optimize = optimize, .target = target, }); diff --git a/test/link/macho/uuid/build.zig b/test/link/macho/uuid/build.zig index 5a8c14ae37e2..df58aeacb76b 100644 --- a/test/link/macho/uuid/build.zig +++ b/test/link/macho/uuid/build.zig @@ -1,14 +1,16 @@ const std = @import("std"); -const Builder = std.Build.Builder; const CompileStep = std.Build.CompileStep; const FileSource = std.Build.FileSource; const Step = std.Build.Step; +pub const requires_symlinks = true; + pub fn build(b: *std.Build) void { const test_step = b.step("test", "Test"); - test_step.dependOn(b.getInstallStep()); + b.default_step = test_step; - // We force cross-compilation to ensure we always pick a generic CPU with constant set of CPU features. + // We force cross-compilation to ensure we always pick a generic CPU with + // constant set of CPU features. const aarch64_macos = std.zig.CrossTarget{ .cpu_arch = .aarch64, .os_tag = .macos, @@ -38,13 +40,15 @@ fn testUuid( // stay the same across builds. { const dylib = simpleDylib(b, optimize, target); - const install_step = installWithRename(dylib, "test1.dylib"); + const install_step = b.addInstallArtifact(dylib); + install_step.dest_sub_path = "test1.dylib"; install_step.step.dependOn(&dylib.step); } { const dylib = simpleDylib(b, optimize, target); dylib.strip = true; - const install_step = installWithRename(dylib, "test2.dylib"); + const install_step = b.addInstallArtifact(dylib); + install_step.dest_sub_path = "test2.dylib"; install_step.step.dependOn(&dylib.step); } @@ -68,86 +72,43 @@ fn simpleDylib( return dylib; } -fn installWithRename(cs: *CompileStep, name: []const u8) *InstallWithRename { - const step = InstallWithRename.create(cs.builder, cs.getOutputSource(), name); - cs.builder.getInstallStep().dependOn(&step.step); - return step; -} - -const InstallWithRename = struct { - pub const base_id = .custom; - - step: Step, - builder: *Builder, - source: FileSource, - name: []const u8, - - pub fn create( - builder: *Builder, - source: FileSource, - name: []const u8, - ) *InstallWithRename { - const self = builder.allocator.create(InstallWithRename) catch @panic("OOM"); - self.* = InstallWithRename{ - .builder = builder, - .step = Step.init(.custom, builder.fmt("install and rename: {s} -> {s}", .{ - source.getDisplayName(), - name, - }), builder.allocator, make), - .source = source, - .name = builder.dupe(name), - }; - return self; - } - - fn make(step: *Step) anyerror!void { - const self = @fieldParentPtr(InstallWithRename, "step", step); - const source_path = self.source.getPath(self.builder); - const target_path = self.builder.getInstallPath(.lib, self.name); - self.builder.updateFile(source_path, target_path) catch |err| { - std.log.err("Unable to rename: {s} -> {s}", .{ source_path, target_path }); - return err; - }; - } -}; - const CompareUuid = struct { pub const base_id = .custom; step: Step, - builder: *Builder, lhs: []const u8, rhs: []const u8, - pub fn create(builder: *Builder, lhs: []const u8, rhs: []const u8) *CompareUuid { - const self = builder.allocator.create(CompareUuid) catch @panic("OOM"); + pub fn create(owner: *std.Build, lhs: []const u8, rhs: []const u8) *CompareUuid { + const self = owner.allocator.create(CompareUuid) catch @panic("OOM"); self.* = CompareUuid{ - .builder = builder, - .step = Step.init( - .custom, - builder.fmt("compare uuid: {s} and {s}", .{ + .step = Step.init(.{ + .id = base_id, + .name = owner.fmt("compare uuid: {s} and {s}", .{ lhs, rhs, }), - builder.allocator, - make, - ), + .owner = owner, + .makeFn = make, + }), .lhs = lhs, .rhs = rhs, }; return self; } - fn make(step: *Step) anyerror!void { + fn make(step: *Step, prog_node: *std.Progress.Node) anyerror!void { + _ = prog_node; + const b = step.owner; const self = @fieldParentPtr(CompareUuid, "step", step); - const gpa = self.builder.allocator; + const gpa = b.allocator; var lhs_uuid: [16]u8 = undefined; - const lhs_path = self.builder.getInstallPath(.lib, self.lhs); + const lhs_path = b.getInstallPath(.lib, self.lhs); try parseUuid(gpa, lhs_path, &lhs_uuid); var rhs_uuid: [16]u8 = undefined; - const rhs_path = self.builder.getInstallPath(.lib, self.rhs); + const rhs_path = b.getInstallPath(.lib, self.rhs); try parseUuid(gpa, rhs_path, &rhs_uuid); try std.testing.expectEqualStrings(&lhs_uuid, &rhs_uuid); diff --git a/test/link/macho/weak_framework/build.zig b/test/link/macho/weak_framework/build.zig index ca28458d7754..7cc08f5b9d96 100644 --- a/test/link/macho/weak_framework/build.zig +++ b/test/link/macho/weak_framework/build.zig @@ -1,11 +1,19 @@ const std = @import("std"); +pub const requires_symlinks = true; +pub const requires_macos_sdk = true; + pub fn build(b: *std.Build) void { - const optimize = b.standardOptimizeOption(.{}); + const test_step = b.step("test", "Test it"); + b.default_step = test_step; - const test_step = b.step("test", "Test the program"); - test_step.dependOn(b.getInstallStep()); + add(b, test_step, .Debug); + add(b, test_step, .ReleaseFast); + add(b, test_step, .ReleaseSmall); + add(b, test_step, .ReleaseSafe); +} +fn add(b: *std.Build, test_step: *std.Build.Step, optimize: std.builtin.OptimizeMode) void { const exe = b.addExecutable(.{ .name = "test", .optimize = optimize, @@ -14,7 +22,7 @@ pub fn build(b: *std.Build) void { exe.linkLibC(); exe.linkFrameworkWeak("Cocoa"); - const check = exe.checkObject(.macho); + const check = exe.checkObject(); check.checkStart("cmd LOAD_WEAK_DYLIB"); check.checkNext("name {*}Cocoa"); test_step.dependOn(&check.step); diff --git a/test/link/macho/weak_library/build.zig b/test/link/macho/weak_library/build.zig index de5aa45e30de..b12ec087f560 100644 --- a/test/link/macho/weak_library/build.zig +++ b/test/link/macho/weak_library/build.zig @@ -1,11 +1,19 @@ const std = @import("std"); +pub const requires_symlinks = true; + pub fn build(b: *std.Build) void { - const optimize = b.standardOptimizeOption(.{}); - const target: std.zig.CrossTarget = .{ .os_tag = .macos }; + const test_step = b.step("test", "Test it"); + b.default_step = test_step; - const test_step = b.step("test", "Test the program"); - test_step.dependOn(b.getInstallStep()); + add(b, test_step, .Debug); + add(b, test_step, .ReleaseFast); + add(b, test_step, .ReleaseSmall); + add(b, test_step, .ReleaseSafe); +} + +fn add(b: *std.Build, test_step: *std.Build.Step, optimize: std.builtin.OptimizeMode) void { + const target: std.zig.CrossTarget = .{ .os_tag = .macos }; const dylib = b.addSharedLibrary(.{ .name = "a", @@ -25,10 +33,10 @@ pub fn build(b: *std.Build) void { exe.addCSourceFile("main.c", &[0][]const u8{}); exe.linkLibC(); exe.linkSystemLibraryWeak("a"); - exe.addLibraryPath(b.pathFromRoot("zig-out/lib")); - exe.addRPath(b.pathFromRoot("zig-out/lib")); + exe.addLibraryPathDirectorySource(dylib.getOutputDirectorySource()); + exe.addRPathDirectorySource(dylib.getOutputDirectorySource()); - const check = exe.checkObject(.macho); + const check = exe.checkObject(); check.checkStart("cmd LOAD_WEAK_DYLIB"); check.checkNext("name @rpath/liba.dylib"); diff --git a/test/link/static_lib_as_system_lib/a.c b/test/link/static_lib_as_system_lib/a.c deleted file mode 100644 index ee9da97a3a8f..000000000000 --- a/test/link/static_lib_as_system_lib/a.c +++ /dev/null @@ -1,4 +0,0 @@ -#include "a.h" -int32_t add(int32_t a, int32_t b) { - return a + b; -} diff --git a/test/link/static_lib_as_system_lib/a.h b/test/link/static_lib_as_system_lib/a.h deleted file mode 100644 index 7b45d54d5675..000000000000 --- a/test/link/static_lib_as_system_lib/a.h +++ /dev/null @@ -1,2 +0,0 @@ -#include -int32_t add(int32_t a, int32_t b); diff --git a/test/link/static_lib_as_system_lib/build.zig b/test/link/static_lib_as_system_lib/build.zig deleted file mode 100644 index b6cf32d71138..000000000000 --- a/test/link/static_lib_as_system_lib/build.zig +++ /dev/null @@ -1,29 +0,0 @@ -const std = @import("std"); - -pub fn build(b: *std.Build) void { - const optimize = b.standardOptimizeOption(.{}); - const target = b.standardTargetOptions(.{}); - - const lib_a = b.addStaticLibrary(.{ - .name = "a", - .optimize = optimize, - .target = target, - }); - lib_a.addCSourceFile("a.c", &[_][]const u8{}); - lib_a.addIncludePath("."); - lib_a.install(); - - const test_exe = b.addTest(.{ - .root_source_file = .{ .path = "main.zig" }, - .optimize = optimize, - .target = target, - }); - test_exe.linkSystemLibrary("a"); // force linking liba.a as -la - test_exe.addSystemIncludePath("."); - const search_path = std.fs.path.join(b.allocator, &[_][]const u8{ b.install_path, "lib" }) catch unreachable; - test_exe.addLibraryPath(search_path); - - const test_step = b.step("test", "Test it"); - test_step.dependOn(b.getInstallStep()); - test_step.dependOn(&test_exe.step); -} diff --git a/test/link/static_lib_as_system_lib/main.zig b/test/link/static_lib_as_system_lib/main.zig deleted file mode 100644 index 0b9c46217fa0..000000000000 --- a/test/link/static_lib_as_system_lib/main.zig +++ /dev/null @@ -1,8 +0,0 @@ -const std = @import("std"); -const expect = std.testing.expect; -const c = @cImport(@cInclude("a.h")); - -test "import C add" { - const result = c.add(2, 1); - try expect(result == 3); -} diff --git a/test/link/wasm/archive/build.zig b/test/link/wasm/archive/build.zig index 342c4c08d19d..6c242a6bf15e 100644 --- a/test/link/wasm/archive/build.zig +++ b/test/link/wasm/archive/build.zig @@ -1,22 +1,31 @@ const std = @import("std"); +pub const requires_stage2 = true; + pub fn build(b: *std.Build) void { - const test_step = b.step("test", "Test"); - test_step.dependOn(b.getInstallStep()); + const test_step = b.step("test", "Test it"); + b.default_step = test_step; + + add(b, test_step, .Debug); + add(b, test_step, .ReleaseFast); + add(b, test_step, .ReleaseSmall); + add(b, test_step, .ReleaseSafe); +} +fn add(b: *std.Build, test_step: *std.Build.Step, optimize: std.builtin.OptimizeMode) void { // The code in question will pull-in compiler-rt, // and therefore link with its archive file. const lib = b.addSharedLibrary(.{ .name = "main", .root_source_file = .{ .path = "main.zig" }, - .optimize = b.standardOptimizeOption(.{}), + .optimize = optimize, .target = .{ .cpu_arch = .wasm32, .os_tag = .freestanding }, }); lib.use_llvm = false; lib.use_lld = false; lib.strip = false; - const check = lib.checkObject(.wasm); + const check = lib.checkObject(); check.checkStart("Section custom"); check.checkNext("name __truncsfhf2"); // Ensure it was imported and resolved diff --git a/test/link/wasm/basic-features/build.zig b/test/link/wasm/basic-features/build.zig index 9f570665183c..be709a698fa2 100644 --- a/test/link/wasm/basic-features/build.zig +++ b/test/link/wasm/basic-features/build.zig @@ -1,11 +1,13 @@ const std = @import("std"); +pub const requires_stage2 = true; + pub fn build(b: *std.Build) void { // Library with explicitly set cpu features const lib = b.addSharedLibrary(.{ .name = "lib", .root_source_file = .{ .path = "main.zig" }, - .optimize = b.standardOptimizeOption(.{}), + .optimize = .Debug, .target = .{ .cpu_arch = .wasm32, .cpu_model = .{ .explicit = &std.Target.wasm.cpu.mvp }, @@ -17,11 +19,12 @@ pub fn build(b: *std.Build) void { lib.use_lld = false; // Verify the result contains the features explicitly set on the target for the library. - const check = lib.checkObject(.wasm); + const check = lib.checkObject(); check.checkStart("name target_features"); check.checkNext("features 1"); check.checkNext("+ atomics"); const test_step = b.step("test", "Run linker test"); test_step.dependOn(&check.step); + b.default_step = test_step; } diff --git a/test/link/wasm/bss/build.zig b/test/link/wasm/bss/build.zig index 1017e70a7192..bba2e7c60252 100644 --- a/test/link/wasm/bss/build.zig +++ b/test/link/wasm/bss/build.zig @@ -1,14 +1,16 @@ const std = @import("std"); +pub const requires_stage2 = true; + pub fn build(b: *std.Build) void { const test_step = b.step("test", "Test"); - test_step.dependOn(b.getInstallStep()); + b.default_step = test_step; const lib = b.addSharedLibrary(.{ .name = "lib", .root_source_file = .{ .path = "lib.zig" }, .target = .{ .cpu_arch = .wasm32, .os_tag = .freestanding }, - .optimize = b.standardOptimizeOption(.{}), + .optimize = .Debug, }); lib.use_llvm = false; lib.use_lld = false; @@ -17,7 +19,7 @@ pub fn build(b: *std.Build) void { lib.import_memory = true; lib.install(); - const check_lib = lib.checkObject(.wasm); + const check_lib = lib.checkObject(); // since we import memory, make sure it exists with the correct naming check_lib.checkStart("Section import"); @@ -36,5 +38,6 @@ pub fn build(b: *std.Build) void { check_lib.checkNext("name .rodata"); check_lib.checkNext("index 1"); // bss section always last check_lib.checkNext("name .bss"); + test_step.dependOn(&check_lib.step); } diff --git a/test/link/wasm/export-data/build.zig b/test/link/wasm/export-data/build.zig index c989153e47b0..38b8c3e19e82 100644 --- a/test/link/wasm/export-data/build.zig +++ b/test/link/wasm/export-data/build.zig @@ -2,7 +2,12 @@ const std = @import("std"); pub fn build(b: *std.Build) void { const test_step = b.step("test", "Test"); - test_step.dependOn(b.getInstallStep()); + b.default_step = test_step; + + if (@import("builtin").os.tag == .windows) { + // TODO: Fix open handle in wasm-linker refraining rename from working on Windows. + return; + } const lib = b.addSharedLibrary(.{ .name = "lib", @@ -14,7 +19,7 @@ pub fn build(b: *std.Build) void { lib.export_symbol_names = &.{ "foo", "bar" }; lib.global_base = 0; // put data section at address 0 to make data symbols easier to parse - const check_lib = lib.checkObject(.wasm); + const check_lib = lib.checkObject(); check_lib.checkStart("Section global"); check_lib.checkNext("entries 3"); diff --git a/test/link/wasm/export/build.zig b/test/link/wasm/export/build.zig index 69c34a320eee..794201dbf6c9 100644 --- a/test/link/wasm/export/build.zig +++ b/test/link/wasm/export/build.zig @@ -1,8 +1,18 @@ const std = @import("std"); +pub const requires_stage2 = true; + pub fn build(b: *std.Build) void { - const optimize = b.standardOptimizeOption(.{}); + const test_step = b.step("test", "Test it"); + b.default_step = test_step; + + add(b, test_step, .Debug); + add(b, test_step, .ReleaseFast); + add(b, test_step, .ReleaseSmall); + add(b, test_step, .ReleaseSafe); +} +fn add(b: *std.Build, test_step: *std.Build.Step, optimize: std.builtin.OptimizeMode) void { const no_export = b.addSharedLibrary(.{ .name = "no-export", .root_source_file = .{ .path = "main.zig" }, @@ -32,25 +42,24 @@ pub fn build(b: *std.Build) void { force_export.use_llvm = false; force_export.use_lld = false; - const check_no_export = no_export.checkObject(.wasm); + const check_no_export = no_export.checkObject(); check_no_export.checkStart("Section export"); check_no_export.checkNext("entries 1"); check_no_export.checkNext("name memory"); check_no_export.checkNext("kind memory"); - const check_dynamic_export = dynamic_export.checkObject(.wasm); + const check_dynamic_export = dynamic_export.checkObject(); check_dynamic_export.checkStart("Section export"); check_dynamic_export.checkNext("entries 2"); check_dynamic_export.checkNext("name foo"); check_dynamic_export.checkNext("kind function"); - const check_force_export = force_export.checkObject(.wasm); + const check_force_export = force_export.checkObject(); check_force_export.checkStart("Section export"); check_force_export.checkNext("entries 2"); check_force_export.checkNext("name foo"); check_force_export.checkNext("kind function"); - const test_step = b.step("test", "Run linker test"); test_step.dependOn(&check_no_export.step); test_step.dependOn(&check_dynamic_export.step); test_step.dependOn(&check_force_export.step); diff --git a/test/link/wasm/extern-mangle/build.zig b/test/link/wasm/extern-mangle/build.zig index 19913e6eca3d..6c292acbab6e 100644 --- a/test/link/wasm/extern-mangle/build.zig +++ b/test/link/wasm/extern-mangle/build.zig @@ -1,20 +1,26 @@ const std = @import("std"); pub fn build(b: *std.Build) void { - const test_step = b.step("test", "Test"); - test_step.dependOn(b.getInstallStep()); + const test_step = b.step("test", "Test it"); + b.default_step = test_step; + add(b, test_step, .Debug); + add(b, test_step, .ReleaseFast); + add(b, test_step, .ReleaseSmall); + add(b, test_step, .ReleaseSafe); +} + +fn add(b: *std.Build, test_step: *std.Build.Step, optimize: std.builtin.OptimizeMode) void { const lib = b.addSharedLibrary(.{ .name = "lib", .root_source_file = .{ .path = "lib.zig" }, .target = .{ .cpu_arch = .wasm32, .os_tag = .freestanding }, - .optimize = b.standardOptimizeOption(.{}), + .optimize = optimize, }); lib.import_symbols = true; // import `a` and `b` lib.rdynamic = true; // export `foo` - lib.install(); - const check_lib = lib.checkObject(.wasm); + const check_lib = lib.checkObject(); check_lib.checkStart("Section import"); check_lib.checkNext("entries 2"); // a.hello & b.hello check_lib.checkNext("module a"); diff --git a/test/link/wasm/extern/build.zig b/test/link/wasm/extern/build.zig index 569d94091add..55562143c2f0 100644 --- a/test/link/wasm/extern/build.zig +++ b/test/link/wasm/extern/build.zig @@ -1,19 +1,31 @@ const std = @import("std"); +pub const requires_stage2 = true; + pub fn build(b: *std.Build) void { + const test_step = b.step("test", "Test it"); + b.default_step = test_step; + + add(b, test_step, .Debug); + add(b, test_step, .ReleaseFast); + add(b, test_step, .ReleaseSmall); + add(b, test_step, .ReleaseSafe); +} + +fn add(b: *std.Build, test_step: *std.Build.Step, optimize: std.builtin.OptimizeMode) void { const exe = b.addExecutable(.{ .name = "extern", .root_source_file = .{ .path = "main.zig" }, - .optimize = b.standardOptimizeOption(.{}), + .optimize = optimize, .target = .{ .cpu_arch = .wasm32, .os_tag = .wasi }, }); exe.addCSourceFile("foo.c", &.{}); exe.use_llvm = false; exe.use_lld = false; - const run = exe.runEmulatable(); + const run = b.addRunArtifact(exe); + run.skip_foreign_checks = true; run.expectStdOutEqual("Result: 30"); - const test_step = b.step("test", "Run linker test"); test_step.dependOn(&run.step); } diff --git a/test/link/wasm/function-table/build.zig b/test/link/wasm/function-table/build.zig index 4c25d0d8609a..4ce62947277b 100644 --- a/test/link/wasm/function-table/build.zig +++ b/test/link/wasm/function-table/build.zig @@ -1,13 +1,20 @@ const std = @import("std"); +pub const requires_stage2 = true; + pub fn build(b: *std.Build) void { - const optimize = b.standardOptimizeOption(.{}); + const test_step = b.step("test", "Test it"); + b.default_step = test_step; - const test_step = b.step("test", "Test"); - test_step.dependOn(b.getInstallStep()); + add(b, test_step, .Debug); + add(b, test_step, .ReleaseFast); + add(b, test_step, .ReleaseSmall); + add(b, test_step, .ReleaseSafe); +} +fn add(b: *std.Build, test_step: *std.Build.Step, optimize: std.builtin.OptimizeMode) void { const import_table = b.addSharedLibrary(.{ - .name = "lib", + .name = "import_table", .root_source_file = .{ .path = "lib.zig" }, .target = .{ .cpu_arch = .wasm32, .os_tag = .freestanding }, .optimize = optimize, @@ -17,7 +24,7 @@ pub fn build(b: *std.Build) void { import_table.import_table = true; const export_table = b.addSharedLibrary(.{ - .name = "lib", + .name = "export_table", .root_source_file = .{ .path = "lib.zig" }, .target = .{ .cpu_arch = .wasm32, .os_tag = .freestanding }, .optimize = optimize, @@ -27,7 +34,7 @@ pub fn build(b: *std.Build) void { export_table.export_table = true; const regular_table = b.addSharedLibrary(.{ - .name = "lib", + .name = "regular_table", .root_source_file = .{ .path = "lib.zig" }, .target = .{ .cpu_arch = .wasm32, .os_tag = .freestanding }, .optimize = optimize, @@ -35,9 +42,9 @@ pub fn build(b: *std.Build) void { regular_table.use_llvm = false; regular_table.use_lld = false; - const check_import = import_table.checkObject(.wasm); - const check_export = export_table.checkObject(.wasm); - const check_regular = regular_table.checkObject(.wasm); + const check_import = import_table.checkObject(); + const check_export = export_table.checkObject(); + const check_regular = regular_table.checkObject(); check_import.checkStart("Section import"); check_import.checkNext("entries 1"); diff --git a/test/link/wasm/infer-features/build.zig b/test/link/wasm/infer-features/build.zig index d6d706a33d1f..00fb48651b03 100644 --- a/test/link/wasm/infer-features/build.zig +++ b/test/link/wasm/infer-features/build.zig @@ -1,12 +1,12 @@ const std = @import("std"); -pub fn build(b: *std.Build) void { - const optimize = b.standardOptimizeOption(.{}); +pub const requires_stage2 = true; +pub fn build(b: *std.Build) void { // Wasm Object file which we will use to infer the features from const c_obj = b.addObject(.{ .name = "c_obj", - .optimize = optimize, + .optimize = .Debug, .target = .{ .cpu_arch = .wasm32, .cpu_model = .{ .explicit = &std.Target.wasm.cpu.bleeding_edge }, @@ -20,7 +20,7 @@ pub fn build(b: *std.Build) void { const lib = b.addSharedLibrary(.{ .name = "lib", .root_source_file = .{ .path = "main.zig" }, - .optimize = optimize, + .optimize = .Debug, .target = .{ .cpu_arch = .wasm32, .cpu_model = .{ .explicit = &std.Target.wasm.cpu.mvp }, @@ -32,7 +32,7 @@ pub fn build(b: *std.Build) void { lib.addObject(c_obj); // Verify the result contains the features from the C Object file. - const check = lib.checkObject(.wasm); + const check = lib.checkObject(); check.checkStart("name target_features"); check.checkNext("features 7"); check.checkNext("+ atomics"); @@ -45,4 +45,5 @@ pub fn build(b: *std.Build) void { const test_step = b.step("test", "Run linker test"); test_step.dependOn(&check.step); + b.default_step = test_step; } diff --git a/test/link/wasm/producers/build.zig b/test/link/wasm/producers/build.zig index 2589b0dfcf3b..7b7cefd7e06d 100644 --- a/test/link/wasm/producers/build.zig +++ b/test/link/wasm/producers/build.zig @@ -1,26 +1,33 @@ const std = @import("std"); const builtin = @import("builtin"); +pub const requires_stage2 = true; + pub fn build(b: *std.Build) void { - const test_step = b.step("test", "Test"); - test_step.dependOn(b.getInstallStep()); + const test_step = b.step("test", "Test it"); + b.default_step = test_step; + + add(b, test_step, .Debug); + add(b, test_step, .ReleaseFast); + add(b, test_step, .ReleaseSmall); + add(b, test_step, .ReleaseSafe); +} +fn add(b: *std.Build, test_step: *std.Build.Step, optimize: std.builtin.OptimizeMode) void { const lib = b.addSharedLibrary(.{ .name = "lib", .root_source_file = .{ .path = "lib.zig" }, .target = .{ .cpu_arch = .wasm32, .os_tag = .freestanding }, - .optimize = b.standardOptimizeOption(.{}), + .optimize = optimize, }); lib.use_llvm = false; lib.use_lld = false; lib.strip = false; lib.install(); - const zig_version = builtin.zig_version; - var version_buf: [100]u8 = undefined; - const version_fmt = std.fmt.bufPrint(&version_buf, "version {}", .{zig_version}) catch unreachable; + const version_fmt = "version " ++ builtin.zig_version_string; - const check_lib = lib.checkObject(.wasm); + const check_lib = lib.checkObject(); check_lib.checkStart("name producers"); check_lib.checkNext("fields 2"); check_lib.checkNext("field_name language"); diff --git a/test/link/wasm/segments/build.zig b/test/link/wasm/segments/build.zig index 76160e905f8d..281d8ae32b9d 100644 --- a/test/link/wasm/segments/build.zig +++ b/test/link/wasm/segments/build.zig @@ -1,21 +1,30 @@ const std = @import("std"); +pub const requires_stage2 = true; + pub fn build(b: *std.Build) void { - const test_step = b.step("test", "Test"); - test_step.dependOn(b.getInstallStep()); + const test_step = b.step("test", "Test it"); + b.default_step = test_step; + + add(b, test_step, .Debug); + add(b, test_step, .ReleaseFast); + add(b, test_step, .ReleaseSmall); + add(b, test_step, .ReleaseSafe); +} +fn add(b: *std.Build, test_step: *std.Build.Step, optimize: std.builtin.OptimizeMode) void { const lib = b.addSharedLibrary(.{ .name = "lib", .root_source_file = .{ .path = "lib.zig" }, .target = .{ .cpu_arch = .wasm32, .os_tag = .freestanding }, - .optimize = b.standardOptimizeOption(.{}), + .optimize = optimize, }); lib.use_llvm = false; lib.use_lld = false; lib.strip = false; lib.install(); - const check_lib = lib.checkObject(.wasm); + const check_lib = lib.checkObject(); check_lib.checkStart("Section data"); check_lib.checkNext("entries 2"); // rodata & data, no bss because we're exporting memory diff --git a/test/link/wasm/stack_pointer/build.zig b/test/link/wasm/stack_pointer/build.zig index 95c764388005..794b7d27fbb3 100644 --- a/test/link/wasm/stack_pointer/build.zig +++ b/test/link/wasm/stack_pointer/build.zig @@ -1,14 +1,23 @@ const std = @import("std"); +pub const requires_stage2 = true; + pub fn build(b: *std.Build) void { - const test_step = b.step("test", "Test"); - test_step.dependOn(b.getInstallStep()); + const test_step = b.step("test", "Test it"); + b.default_step = test_step; + + add(b, test_step, .Debug); + add(b, test_step, .ReleaseFast); + add(b, test_step, .ReleaseSmall); + add(b, test_step, .ReleaseSafe); +} +fn add(b: *std.Build, test_step: *std.Build.Step, optimize: std.builtin.OptimizeMode) void { const lib = b.addSharedLibrary(.{ .name = "lib", .root_source_file = .{ .path = "lib.zig" }, .target = .{ .cpu_arch = .wasm32, .os_tag = .freestanding }, - .optimize = b.standardOptimizeOption(.{}), + .optimize = optimize, }); lib.use_llvm = false; lib.use_lld = false; @@ -16,7 +25,7 @@ pub fn build(b: *std.Build) void { lib.stack_size = std.wasm.page_size * 2; // set an explicit stack size lib.install(); - const check_lib = lib.checkObject(.wasm); + const check_lib = lib.checkObject(); // ensure global exists and its initial value is equal to explitic stack size check_lib.checkStart("Section global"); diff --git a/test/link/wasm/type/build.zig b/test/link/wasm/type/build.zig index 816b57ccabed..4a8395645f39 100644 --- a/test/link/wasm/type/build.zig +++ b/test/link/wasm/type/build.zig @@ -1,21 +1,30 @@ const std = @import("std"); +pub const requires_stage2 = true; + pub fn build(b: *std.Build) void { - const test_step = b.step("test", "Test"); - test_step.dependOn(b.getInstallStep()); + const test_step = b.step("test", "Test it"); + b.default_step = test_step; + + add(b, test_step, .Debug); + add(b, test_step, .ReleaseFast); + add(b, test_step, .ReleaseSmall); + add(b, test_step, .ReleaseSafe); +} +fn add(b: *std.Build, test_step: *std.Build.Step, optimize: std.builtin.OptimizeMode) void { const lib = b.addSharedLibrary(.{ .name = "lib", .root_source_file = .{ .path = "lib.zig" }, .target = .{ .cpu_arch = .wasm32, .os_tag = .freestanding }, - .optimize = b.standardOptimizeOption(.{}), + .optimize = optimize, }); lib.use_llvm = false; lib.use_lld = false; lib.strip = false; lib.install(); - const check_lib = lib.checkObject(.wasm); + const check_lib = lib.checkObject(); check_lib.checkStart("Section type"); // only 2 entries, although we have 3 functions. // This is to test functions with the same function signature diff --git a/test/stage2/nvptx.zig b/test/nvptx.zig similarity index 76% rename from test/stage2/nvptx.zig rename to test/nvptx.zig index f08aa9fca463..57853a657dc9 100644 --- a/test/stage2/nvptx.zig +++ b/test/nvptx.zig @@ -1,11 +1,11 @@ const std = @import("std"); -const TestContext = @import("../../src/test.zig").TestContext; +const Cases = @import("src/Cases.zig"); -pub fn addCases(ctx: *TestContext) !void { +pub fn addCases(ctx: *Cases) !void { { - var case = addPtx(ctx, "nvptx: simple addition and subtraction"); + var case = addPtx(ctx, "simple addition and subtraction"); - case.compiles( + case.addCompile( \\fn add(a: i32, b: i32) i32 { \\ return a + b; \\} @@ -20,9 +20,9 @@ pub fn addCases(ctx: *TestContext) !void { } { - var case = addPtx(ctx, "nvptx: read special registers"); + var case = addPtx(ctx, "read special registers"); - case.compiles( + case.addCompile( \\fn threadIdX() u32 { \\ return asm ("mov.u32 \t%[r], %tid.x;" \\ : [r] "=r" (-> u32), @@ -37,9 +37,9 @@ pub fn addCases(ctx: *TestContext) !void { } { - var case = addPtx(ctx, "nvptx: address spaces"); + var case = addPtx(ctx, "address spaces"); - case.compiles( + case.addCompile( \\var x: i32 addrspace(.global) = 0; \\ \\pub export fn increment(out: *i32) callconv(.PtxKernel) void { @@ -50,8 +50,8 @@ pub fn addCases(ctx: *TestContext) !void { } { - var case = addPtx(ctx, "nvptx: reduce in shared mem"); - case.compiles( + var case = addPtx(ctx, "reduce in shared mem"); + case.addCompile( \\fn threadIdX() u32 { \\ return asm ("mov.u32 \t%[r], %tid.x;" \\ : [r] "=r" (-> u32), @@ -88,16 +88,15 @@ const nvptx_target = std.zig.CrossTarget{ }; pub fn addPtx( - ctx: *TestContext, + ctx: *Cases, name: []const u8, -) *TestContext.Case { - ctx.cases.append(TestContext.Case{ +) *Cases.Case { + ctx.cases.append(.{ .name = name, .target = nvptx_target, - .updates = std.ArrayList(TestContext.Update).init(ctx.cases.allocator), + .updates = std.ArrayList(Cases.Update).init(ctx.cases.allocator), .output_mode = .Obj, - .files = std.ArrayList(TestContext.File).init(ctx.cases.allocator), - .deps = std.ArrayList(TestContext.DepModule).init(ctx.cases.allocator), + .deps = std.ArrayList(Cases.DepModule).init(ctx.cases.allocator), .link_libc = false, .backend = .llvm, // Bug in Debug mode diff --git a/test/src/Cases.zig b/test/src/Cases.zig new file mode 100644 index 000000000000..c3a4c1df4737 --- /dev/null +++ b/test/src/Cases.zig @@ -0,0 +1,1587 @@ +gpa: Allocator, +arena: Allocator, +cases: std.ArrayList(Case), +incremental_cases: std.ArrayList(IncrementalCase), + +pub const IncrementalCase = struct { + base_path: []const u8, +}; + +pub const Update = struct { + /// The input to the current update. We simulate an incremental update + /// with the file's contents changed to this value each update. + /// + /// This value can change entirely between updates, which would be akin + /// to deleting the source file and creating a new one from scratch; or + /// you can keep it mostly consistent, with small changes, testing the + /// effects of the incremental compilation. + files: std.ArrayList(File), + /// This is a description of what happens with the update, for debugging + /// purposes. + name: []const u8, + case: union(enum) { + /// Check that it compiles with no errors. + Compile: void, + /// Check the main binary output file against an expected set of bytes. + /// This is most useful with, for example, `-ofmt=c`. + CompareObjectFile: []const u8, + /// An error update attempts to compile bad code, and ensures that it + /// fails to compile, and for the expected reasons. + /// A slice containing the expected stderr template, which + /// gets some values substituted. + Error: []const []const u8, + /// An execution update compiles and runs the input, testing the + /// stdout against the expected results + /// This is a slice containing the expected message. + Execution: []const u8, + /// A header update compiles the input with the equivalent of + /// `-femit-h` and tests the produced header against the + /// expected result + Header: []const u8, + }, + + pub fn addSourceFile(update: *Update, name: []const u8, src: [:0]const u8) void { + update.files.append(.{ .path = name, .src = src }) catch @panic("out of memory"); + } +}; + +pub const File = struct { + src: [:0]const u8, + path: []const u8, +}; + +pub const DepModule = struct { + name: []const u8, + path: []const u8, +}; + +pub const Backend = enum { + stage1, + stage2, + llvm, +}; + +/// A `Case` consists of a list of `Update`. The same `Compilation` is used for each +/// update, so each update's source is treated as a single file being +/// updated by the test harness and incrementally compiled. +pub const Case = struct { + /// The name of the test case. This is shown if a test fails, and + /// otherwise ignored. + name: []const u8, + /// The platform the test targets. For non-native platforms, an emulator + /// such as QEMU is required for tests to complete. + target: CrossTarget, + /// In order to be able to run e.g. Execution updates, this must be set + /// to Executable. + output_mode: std.builtin.OutputMode, + optimize_mode: std.builtin.Mode = .Debug, + updates: std.ArrayList(Update), + emit_h: bool = false, + is_test: bool = false, + expect_exact: bool = false, + backend: Backend = .stage2, + link_libc: bool = false, + + deps: std.ArrayList(DepModule), + + pub fn addSourceFile(case: *Case, name: []const u8, src: [:0]const u8) void { + const update = &case.updates.items[case.updates.items.len - 1]; + update.files.append(.{ .path = name, .src = src }) catch @panic("OOM"); + } + + pub fn addDepModule(case: *Case, name: []const u8, path: []const u8) void { + case.deps.append(.{ + .name = name, + .path = path, + }) catch @panic("out of memory"); + } + + /// Adds a subcase in which the module is updated with `src`, compiled, + /// run, and the output is tested against `result`. + pub fn addCompareOutput(self: *Case, src: [:0]const u8, result: []const u8) void { + self.updates.append(.{ + .files = std.ArrayList(File).init(self.updates.allocator), + .name = "update", + .case = .{ .Execution = result }, + }) catch @panic("out of memory"); + addSourceFile(self, "tmp.zig", src); + } + + pub fn addError(self: *Case, src: [:0]const u8, errors: []const []const u8) void { + return self.addErrorNamed("update", src, errors); + } + + /// Adds a subcase in which the module is updated with `src`, which + /// should contain invalid input, and ensures that compilation fails + /// for the expected reasons, given in sequential order in `errors` in + /// the form `:line:column: error: message`. + pub fn addErrorNamed( + self: *Case, + name: []const u8, + src: [:0]const u8, + errors: []const []const u8, + ) void { + assert(errors.len != 0); + self.updates.append(.{ + .files = std.ArrayList(File).init(self.updates.allocator), + .name = name, + .case = .{ .Error = errors }, + }) catch @panic("out of memory"); + addSourceFile(self, "tmp.zig", src); + } + + /// Adds a subcase in which the module is updated with `src`, and + /// asserts that it compiles without issue + pub fn addCompile(self: *Case, src: [:0]const u8) void { + self.updates.append(.{ + .files = std.ArrayList(File).init(self.updates.allocator), + .name = "compile", + .case = .{ .Compile = {} }, + }) catch @panic("out of memory"); + addSourceFile(self, "tmp.zig", src); + } +}; + +pub fn addExe( + ctx: *Cases, + name: []const u8, + target: CrossTarget, +) *Case { + ctx.cases.append(Case{ + .name = name, + .target = target, + .updates = std.ArrayList(Update).init(ctx.cases.allocator), + .output_mode = .Exe, + .deps = std.ArrayList(DepModule).init(ctx.arena), + }) catch @panic("out of memory"); + return &ctx.cases.items[ctx.cases.items.len - 1]; +} + +/// Adds a test case for Zig input, producing an executable +pub fn exe(ctx: *Cases, name: []const u8, target: CrossTarget) *Case { + return ctx.addExe(name, target); +} + +pub fn exeFromCompiledC(ctx: *Cases, name: []const u8, target: CrossTarget) *Case { + var target_adjusted = target; + target_adjusted.ofmt = .c; + ctx.cases.append(Case{ + .name = name, + .target = target_adjusted, + .updates = std.ArrayList(Update).init(ctx.cases.allocator), + .output_mode = .Exe, + .deps = std.ArrayList(DepModule).init(ctx.arena), + .link_libc = true, + }) catch @panic("out of memory"); + return &ctx.cases.items[ctx.cases.items.len - 1]; +} + +/// Adds a test case that uses the LLVM backend to emit an executable. +/// Currently this implies linking libc, because only then we can generate a testable executable. +pub fn exeUsingLlvmBackend(ctx: *Cases, name: []const u8, target: CrossTarget) *Case { + ctx.cases.append(Case{ + .name = name, + .target = target, + .updates = std.ArrayList(Update).init(ctx.cases.allocator), + .output_mode = .Exe, + .deps = std.ArrayList(DepModule).init(ctx.arena), + .backend = .llvm, + .link_libc = true, + }) catch @panic("out of memory"); + return &ctx.cases.items[ctx.cases.items.len - 1]; +} + +pub fn addObj( + ctx: *Cases, + name: []const u8, + target: CrossTarget, +) *Case { + ctx.cases.append(Case{ + .name = name, + .target = target, + .updates = std.ArrayList(Update).init(ctx.cases.allocator), + .output_mode = .Obj, + .deps = std.ArrayList(DepModule).init(ctx.arena), + }) catch @panic("out of memory"); + return &ctx.cases.items[ctx.cases.items.len - 1]; +} + +pub fn addTest( + ctx: *Cases, + name: []const u8, + target: CrossTarget, +) *Case { + ctx.cases.append(Case{ + .name = name, + .target = target, + .updates = std.ArrayList(Update).init(ctx.cases.allocator), + .output_mode = .Exe, + .is_test = true, + .deps = std.ArrayList(DepModule).init(ctx.arena), + }) catch @panic("out of memory"); + return &ctx.cases.items[ctx.cases.items.len - 1]; +} + +/// Adds a test case for Zig input, producing an object file. +pub fn obj(ctx: *Cases, name: []const u8, target: CrossTarget) *Case { + return ctx.addObj(name, target); +} + +/// Adds a test case for ZIR input, producing an object file. +pub fn objZIR(ctx: *Cases, name: []const u8, target: CrossTarget) *Case { + return ctx.addObj(name, target, .ZIR); +} + +/// Adds a test case for Zig or ZIR input, producing C code. +pub fn addC(ctx: *Cases, name: []const u8, target: CrossTarget) *Case { + var target_adjusted = target; + target_adjusted.ofmt = std.Target.ObjectFormat.c; + ctx.cases.append(Case{ + .name = name, + .target = target_adjusted, + .updates = std.ArrayList(Update).init(ctx.cases.allocator), + .output_mode = .Obj, + .deps = std.ArrayList(DepModule).init(ctx.arena), + }) catch @panic("out of memory"); + return &ctx.cases.items[ctx.cases.items.len - 1]; +} + +pub fn addCompareOutput( + ctx: *Cases, + name: []const u8, + src: [:0]const u8, + expected_stdout: []const u8, +) void { + ctx.addExe(name, .{}).addCompareOutput(src, expected_stdout); +} + +/// Adds a test case that compiles the Zig source given in `src`, executes +/// it, runs it, and tests the output against `expected_stdout` +pub fn compareOutput( + ctx: *Cases, + name: []const u8, + src: [:0]const u8, + expected_stdout: []const u8, +) void { + return ctx.addCompareOutput(name, src, expected_stdout); +} + +pub fn addTransform( + ctx: *Cases, + name: []const u8, + target: CrossTarget, + src: [:0]const u8, + result: [:0]const u8, +) void { + ctx.addObj(name, target).addTransform(src, result); +} + +/// Adds a test case that compiles the Zig given in `src` to ZIR and tests +/// the ZIR against `result` +pub fn transform( + ctx: *Cases, + name: []const u8, + target: CrossTarget, + src: [:0]const u8, + result: [:0]const u8, +) void { + ctx.addTransform(name, target, src, result); +} + +pub fn addError( + ctx: *Cases, + name: []const u8, + target: CrossTarget, + src: [:0]const u8, + expected_errors: []const []const u8, +) void { + ctx.addObj(name, target).addError(src, expected_errors); +} + +/// Adds a test case that ensures that the Zig given in `src` fails to +/// compile for the expected reasons, given in sequential order in +/// `expected_errors` in the form `:line:column: error: message`. +pub fn compileError( + ctx: *Cases, + name: []const u8, + target: CrossTarget, + src: [:0]const u8, + expected_errors: []const []const u8, +) void { + ctx.addError(name, target, src, expected_errors); +} + +/// Adds a test case that asserts that the Zig given in `src` compiles +/// without any errors. +pub fn addCompile( + ctx: *Cases, + name: []const u8, + target: CrossTarget, + src: [:0]const u8, +) void { + ctx.addObj(name, target).addCompile(src); +} + +/// Adds a test for each file in the provided directory. +/// Testing strategy (TestStrategy) is inferred automatically from filenames. +/// Recurses nested directories. +/// +/// Each file should include a test manifest as a contiguous block of comments at +/// the end of the file. The first line should be the test type, followed by a set of +/// key-value config values, followed by a blank line, then the expected output. +pub fn addFromDir(ctx: *Cases, dir: std.fs.IterableDir) void { + var current_file: []const u8 = "none"; + ctx.addFromDirInner(dir, ¤t_file) catch |err| { + std.debug.panic("test harness failed to process file '{s}': {s}\n", .{ + current_file, @errorName(err), + }); + }; +} + +fn addFromDirInner( + ctx: *Cases, + iterable_dir: std.fs.IterableDir, + /// This is kept up to date with the currently being processed file so + /// that if any errors occur the caller knows it happened during this file. + current_file: *[]const u8, +) !void { + var it = try iterable_dir.walk(ctx.arena); + var filenames = std.ArrayList([]const u8).init(ctx.arena); + + while (try it.next()) |entry| { + if (entry.kind != .File) continue; + + // Ignore stuff such as .swp files + switch (Compilation.classifyFileExt(entry.basename)) { + .unknown => continue, + else => {}, + } + try filenames.append(try ctx.arena.dupe(u8, entry.path)); + } + + // Sort filenames, so that incremental tests are contiguous and in-order + sortTestFilenames(filenames.items); + + var test_it = TestIterator{ .filenames = filenames.items }; + while (test_it.next()) |maybe_batch| { + const batch = maybe_batch orelse break; + const strategy: TestStrategy = if (batch.len > 1) .incremental else .independent; + const filename = batch[0]; + current_file.* = filename; + if (strategy == .incremental) { + try ctx.incremental_cases.append(.{ .base_path = filename }); + continue; + } + + const max_file_size = 10 * 1024 * 1024; + const src = try iterable_dir.dir.readFileAllocOptions(ctx.arena, filename, max_file_size, null, 1, 0); + + // Parse the manifest + var manifest = try TestManifest.parse(ctx.arena, src); + + const backends = try manifest.getConfigForKeyAlloc(ctx.arena, "backend", Backend); + const targets = try manifest.getConfigForKeyAlloc(ctx.arena, "target", CrossTarget); + const is_test = try manifest.getConfigForKeyAssertSingle("is_test", bool); + const link_libc = try manifest.getConfigForKeyAssertSingle("link_libc", bool); + const output_mode = try manifest.getConfigForKeyAssertSingle("output_mode", std.builtin.OutputMode); + + var cases = std.ArrayList(usize).init(ctx.arena); + + // Cross-product to get all possible test combinations + for (backends) |backend| { + for (targets) |target| { + const next = ctx.cases.items.len; + try ctx.cases.append(.{ + .name = std.fs.path.stem(filename), + .target = target, + .backend = backend, + .updates = std.ArrayList(Cases.Update).init(ctx.cases.allocator), + .is_test = is_test, + .output_mode = output_mode, + .link_libc = link_libc, + .deps = std.ArrayList(DepModule).init(ctx.cases.allocator), + }); + try cases.append(next); + } + } + + for (cases.items) |case_index| { + const case = &ctx.cases.items[case_index]; + switch (manifest.type) { + .compile => { + case.addCompile(src); + }, + .@"error" => { + const errors = try manifest.trailingAlloc(ctx.arena); + case.addError(src, errors); + }, + .run => { + var output = std.ArrayList(u8).init(ctx.arena); + var trailing_it = manifest.trailing(); + while (trailing_it.next()) |line| { + try output.appendSlice(line); + try output.append('\n'); + } + if (output.items.len > 0) { + try output.resize(output.items.len - 1); + } + case.addCompareOutput(src, try output.toOwnedSlice()); + }, + .cli => @panic("TODO cli tests"), + } + } + } else |err| { + // make sure the current file is set to the file that produced an error + current_file.* = test_it.currentFilename(); + return err; + } +} + +pub fn init(gpa: Allocator, arena: Allocator) Cases { + return .{ + .gpa = gpa, + .cases = std.ArrayList(Case).init(gpa), + .incremental_cases = std.ArrayList(IncrementalCase).init(gpa), + .arena = arena, + }; +} + +pub fn lowerToBuildSteps( + self: *Cases, + b: *std.Build, + parent_step: *std.Build.Step, + opt_test_filter: ?[]const u8, + cases_dir_path: []const u8, + incremental_exe: *std.Build.CompileStep, +) void { + for (self.incremental_cases.items) |incr_case| { + if (opt_test_filter) |test_filter| { + if (std.mem.indexOf(u8, incr_case.base_path, test_filter) == null) continue; + } + const case_base_path_with_dir = std.fs.path.join(b.allocator, &.{ + cases_dir_path, incr_case.base_path, + }) catch @panic("OOM"); + const run = b.addRunArtifact(incremental_exe); + run.setName(incr_case.base_path); + run.addArgs(&.{ + case_base_path_with_dir, + b.zig_exe, + }); + run.expectStdOutEqual(""); + parent_step.dependOn(&run.step); + } + + for (self.cases.items) |case| { + if (case.updates.items.len != 1) continue; // handled with incremental_cases above + assert(case.updates.items.len == 1); + const update = case.updates.items[0]; + + if (opt_test_filter) |test_filter| { + if (std.mem.indexOf(u8, case.name, test_filter) == null) continue; + } + + const writefiles = b.addWriteFiles(); + for (update.files.items) |file| { + writefiles.add(file.path, file.src); + } + const root_source_file = writefiles.getFileSource(update.files.items[0].path).?; + + const artifact = if (case.is_test) b.addTest(.{ + .root_source_file = root_source_file, + .name = case.name, + .target = case.target, + .optimize = case.optimize_mode, + }) else switch (case.output_mode) { + .Obj => b.addObject(.{ + .root_source_file = root_source_file, + .name = case.name, + .target = case.target, + .optimize = case.optimize_mode, + }), + .Lib => b.addStaticLibrary(.{ + .root_source_file = root_source_file, + .name = case.name, + .target = case.target, + .optimize = case.optimize_mode, + }), + .Exe => b.addExecutable(.{ + .root_source_file = root_source_file, + .name = case.name, + .target = case.target, + .optimize = case.optimize_mode, + }), + }; + + if (case.link_libc) artifact.linkLibC(); + + switch (case.backend) { + .stage1 => continue, + .stage2 => { + artifact.use_llvm = false; + artifact.use_lld = false; + }, + .llvm => { + artifact.use_llvm = true; + }, + } + + for (case.deps.items) |dep| { + artifact.addAnonymousModule(dep.name, .{ + .source_file = writefiles.getFileSource(dep.path).?, + }); + } + + switch (update.case) { + .Compile => { + parent_step.dependOn(&artifact.step); + }, + .CompareObjectFile => |expected_output| { + const check = b.addCheckFile(artifact.getOutputSource(), .{ + .expected_exact = expected_output, + }); + + parent_step.dependOn(&check.step); + }, + .Error => |expected_msgs| { + assert(expected_msgs.len != 0); + artifact.expect_errors = expected_msgs; + parent_step.dependOn(&artifact.step); + }, + .Execution => |expected_stdout| { + const run = b.addRunArtifact(artifact); + run.skip_foreign_checks = true; + if (!case.is_test) { + run.expectStdOutEqual(expected_stdout); + } + parent_step.dependOn(&run.step); + }, + .Header => @panic("TODO"), + } + } +} + +/// Sort test filenames in-place, so that incremental test cases ("foo.0.zig", +/// "foo.1.zig", etc.) are contiguous and appear in numerical order. +fn sortTestFilenames(filenames: [][]const u8) void { + const Context = struct { + pub fn lessThan(_: @This(), a: []const u8, b: []const u8) bool { + const a_parts = getTestFileNameParts(a); + const b_parts = getTestFileNameParts(b); + + // Sort ".X." based on "" and "" first + return switch (std.mem.order(u8, a_parts.base_name, b_parts.base_name)) { + .lt => true, + .gt => false, + .eq => switch (std.mem.order(u8, a_parts.file_ext, b_parts.file_ext)) { + .lt => true, + .gt => false, + .eq => { + // a and b differ only in their ".X" part + + // Sort "." before any ".X." + if (a_parts.test_index) |a_index| { + if (b_parts.test_index) |b_index| { + // Make sure that incremental tests appear in linear order + return a_index < b_index; + } else { + return false; + } + } else { + return b_parts.test_index != null; + } + }, + }, + }; + } + }; + std.sort.sort([]const u8, filenames, Context{}, Context.lessThan); +} + +/// Iterates a set of filenames extracting batches that are either incremental +/// ("foo.0.zig", "foo.1.zig", etc.) or independent ("foo.zig", "bar.zig", etc.). +/// Assumes filenames are sorted. +const TestIterator = struct { + start: usize = 0, + end: usize = 0, + filenames: []const []const u8, + /// reset on each call to `next` + index: usize = 0, + + const Error = error{InvalidIncrementalTestIndex}; + + fn next(it: *TestIterator) Error!?[]const []const u8 { + try it.nextInner(); + if (it.start == it.end) return null; + return it.filenames[it.start..it.end]; + } + + fn nextInner(it: *TestIterator) Error!void { + it.start = it.end; + if (it.end == it.filenames.len) return; + if (it.end + 1 == it.filenames.len) { + it.end += 1; + return; + } + + const remaining = it.filenames[it.end..]; + it.index = 0; + while (it.index < remaining.len - 1) : (it.index += 1) { + // First, check if this file is part of an incremental update sequence + // Split filename into ".." + const prev_parts = getTestFileNameParts(remaining[it.index]); + const new_parts = getTestFileNameParts(remaining[it.index + 1]); + + // If base_name and file_ext match, these files are in the same test sequence + // and the new one should be the incremented version of the previous test + if (std.mem.eql(u8, prev_parts.base_name, new_parts.base_name) and + std.mem.eql(u8, prev_parts.file_ext, new_parts.file_ext)) + { + // This is "foo.X.zig" followed by "foo.Y.zig". Make sure that X = Y + 1 + if (prev_parts.test_index == null) + return error.InvalidIncrementalTestIndex; + if (new_parts.test_index == null) + return error.InvalidIncrementalTestIndex; + if (new_parts.test_index.? != prev_parts.test_index.? + 1) + return error.InvalidIncrementalTestIndex; + } else { + // This is not the same test sequence, so the new file must be the first file + // in a new sequence ("*.0.zig") or an independent test file ("*.zig") + if (new_parts.test_index != null and new_parts.test_index.? != 0) + return error.InvalidIncrementalTestIndex; + + it.end += it.index + 1; + break; + } + } else { + it.end += remaining.len; + } + } + + /// In the event of an `error.InvalidIncrementalTestIndex`, this function can + /// be used to find the current filename that was being processed. + /// Asserts the iterator hasn't reached the end. + fn currentFilename(it: TestIterator) []const u8 { + assert(it.end != it.filenames.len); + const remaining = it.filenames[it.end..]; + return remaining[it.index + 1]; + } +}; + +/// For a filename in the format ".X." or ".", returns +/// "", "" and X parsed as a decimal number. If X is not present, or +/// cannot be parsed as a decimal number, it is treated as part of +fn getTestFileNameParts(name: []const u8) struct { + base_name: []const u8, + file_ext: []const u8, + test_index: ?usize, +} { + const file_ext = std.fs.path.extension(name); + const trimmed = name[0 .. name.len - file_ext.len]; // Trim off "." + const maybe_index = std.fs.path.extension(trimmed); // Extract ".X" + + // Attempt to parse index + const index: ?usize = if (maybe_index.len > 0) + std.fmt.parseInt(usize, maybe_index[1..], 10) catch null + else + null; + + // Adjust "" extent based on parsing success + const base_name_end = trimmed.len - if (index != null) maybe_index.len else 0; + return .{ + .base_name = name[0..base_name_end], + .file_ext = if (file_ext.len > 0) file_ext[1..] else file_ext, + .test_index = index, + }; +} + +const TestStrategy = enum { + /// Execute tests as independent compilations, unless they are explicitly + /// incremental ("foo.0.zig", "foo.1.zig", etc.) + independent, + /// Execute all tests as incremental updates to a single compilation. Explicitly + /// incremental tests ("foo.0.zig", "foo.1.zig", etc.) still execute in order + incremental, +}; + +/// Default config values for known test manifest key-value pairings. +/// Currently handled defaults are: +/// * backend +/// * target +/// * output_mode +/// * is_test +const TestManifestConfigDefaults = struct { + /// Asserts if the key doesn't exist - yep, it's an oversight alright. + fn get(@"type": TestManifest.Type, key: []const u8) []const u8 { + if (std.mem.eql(u8, key, "backend")) { + return "stage2"; + } else if (std.mem.eql(u8, key, "target")) { + if (@"type" == .@"error") { + return "native"; + } + comptime { + var defaults: []const u8 = ""; + // TODO should we only return "mainstream" targets by default here? + // TODO we should also specify ABIs explicitly as the backends are + // getting more and more complete + // Linux + inline for (&[_][]const u8{ "x86_64", "arm", "aarch64" }) |arch| { + defaults = defaults ++ arch ++ "-linux" ++ ","; + } + // macOS + inline for (&[_][]const u8{ "x86_64", "aarch64" }) |arch| { + defaults = defaults ++ arch ++ "-macos" ++ ","; + } + // Windows + defaults = defaults ++ "x86_64-windows" ++ ","; + // Wasm + defaults = defaults ++ "wasm32-wasi"; + return defaults; + } + } else if (std.mem.eql(u8, key, "output_mode")) { + return switch (@"type") { + .@"error" => "Obj", + .run => "Exe", + .compile => "Obj", + .cli => @panic("TODO test harness for CLI tests"), + }; + } else if (std.mem.eql(u8, key, "is_test")) { + return "0"; + } else if (std.mem.eql(u8, key, "link_libc")) { + return "0"; + } else unreachable; + } +}; + +/// Manifest syntax example: +/// (see https://github.com/ziglang/zig/issues/11288) +/// +/// error +/// backend=stage1,stage2 +/// output_mode=exe +/// +/// :3:19: error: foo +/// +/// run +/// target=x86_64-linux,aarch64-macos +/// +/// I am expected stdout! Hello! +/// +/// cli +/// +/// build test +const TestManifest = struct { + type: Type, + config_map: std.StringHashMap([]const u8), + trailing_bytes: []const u8 = "", + + const Type = enum { + @"error", + run, + cli, + compile, + }; + + const TrailingIterator = struct { + inner: std.mem.TokenIterator(u8), + + fn next(self: *TrailingIterator) ?[]const u8 { + const next_inner = self.inner.next() orelse return null; + return std.mem.trim(u8, next_inner[2..], " \t"); + } + }; + + fn ConfigValueIterator(comptime T: type) type { + return struct { + inner: std.mem.SplitIterator(u8), + + fn next(self: *@This()) !?T { + const next_raw = self.inner.next() orelse return null; + const parseFn = getDefaultParser(T); + return try parseFn(next_raw); + } + }; + } + + fn parse(arena: Allocator, bytes: []const u8) !TestManifest { + // The manifest is the last contiguous block of comments in the file + // We scan for the beginning by searching backward for the first non-empty line that does not start with "//" + var start: ?usize = null; + var end: usize = bytes.len; + if (bytes.len > 0) { + var cursor: usize = bytes.len - 1; + while (true) { + // Move to beginning of line + while (cursor > 0 and bytes[cursor - 1] != '\n') cursor -= 1; + + if (std.mem.startsWith(u8, bytes[cursor..], "//")) { + start = cursor; // Contiguous comment line, include in manifest + } else { + if (start != null) break; // Encountered non-comment line, end of manifest + + // We ignore all-whitespace lines following the comment block, but anything else + // means that there is no manifest present. + if (std.mem.trim(u8, bytes[cursor..end], " \r\n\t").len == 0) { + end = cursor; + } else break; // If it's not whitespace, there is no manifest + } + + // Move to previous line + if (cursor != 0) cursor -= 1 else break; + } + } + + const actual_start = start orelse return error.MissingTestManifest; + const manifest_bytes = bytes[actual_start..end]; + + var it = std.mem.tokenize(u8, manifest_bytes, "\r\n"); + + // First line is the test type + const tt: Type = blk: { + const line = it.next() orelse return error.MissingTestCaseType; + const raw = std.mem.trim(u8, line[2..], " \t"); + if (std.mem.eql(u8, raw, "error")) { + break :blk .@"error"; + } else if (std.mem.eql(u8, raw, "run")) { + break :blk .run; + } else if (std.mem.eql(u8, raw, "cli")) { + break :blk .cli; + } else if (std.mem.eql(u8, raw, "compile")) { + break :blk .compile; + } else { + std.log.warn("unknown test case type requested: {s}", .{raw}); + return error.UnknownTestCaseType; + } + }; + + var manifest: TestManifest = .{ + .type = tt, + .config_map = std.StringHashMap([]const u8).init(arena), + }; + + // Any subsequent line until a blank comment line is key=value(s) pair + while (it.next()) |line| { + const trimmed = std.mem.trim(u8, line[2..], " \t"); + if (trimmed.len == 0) break; + + // Parse key=value(s) + var kv_it = std.mem.split(u8, trimmed, "="); + const key = kv_it.first(); + try manifest.config_map.putNoClobber(key, kv_it.next() orelse return error.MissingValuesForConfig); + } + + // Finally, trailing is expected output + manifest.trailing_bytes = manifest_bytes[it.index..]; + + return manifest; + } + + fn getConfigForKey( + self: TestManifest, + key: []const u8, + comptime T: type, + ) ConfigValueIterator(T) { + const bytes = self.config_map.get(key) orelse TestManifestConfigDefaults.get(self.type, key); + return ConfigValueIterator(T){ + .inner = std.mem.split(u8, bytes, ","), + }; + } + + fn getConfigForKeyAlloc( + self: TestManifest, + allocator: Allocator, + key: []const u8, + comptime T: type, + ) ![]const T { + var out = std.ArrayList(T).init(allocator); + defer out.deinit(); + var it = self.getConfigForKey(key, T); + while (try it.next()) |item| { + try out.append(item); + } + return try out.toOwnedSlice(); + } + + fn getConfigForKeyAssertSingle(self: TestManifest, key: []const u8, comptime T: type) !T { + var it = self.getConfigForKey(key, T); + const res = (try it.next()) orelse unreachable; + assert((try it.next()) == null); + return res; + } + + fn trailing(self: TestManifest) TrailingIterator { + return .{ + .inner = std.mem.tokenize(u8, self.trailing_bytes, "\r\n"), + }; + } + + fn trailingAlloc(self: TestManifest, allocator: Allocator) error{OutOfMemory}![]const []const u8 { + var out = std.ArrayList([]const u8).init(allocator); + defer out.deinit(); + var it = self.trailing(); + while (it.next()) |line| { + try out.append(line); + } + return try out.toOwnedSlice(); + } + + fn ParseFn(comptime T: type) type { + return fn ([]const u8) anyerror!T; + } + + fn getDefaultParser(comptime T: type) ParseFn(T) { + if (T == CrossTarget) return struct { + fn parse(str: []const u8) anyerror!T { + var opts = CrossTarget.ParseOptions{ + .arch_os_abi = str, + }; + return try CrossTarget.parse(opts); + } + }.parse; + + switch (@typeInfo(T)) { + .Int => return struct { + fn parse(str: []const u8) anyerror!T { + return try std.fmt.parseInt(T, str, 0); + } + }.parse, + .Bool => return struct { + fn parse(str: []const u8) anyerror!T { + const as_int = try std.fmt.parseInt(u1, str, 0); + return as_int > 0; + } + }.parse, + .Enum => return struct { + fn parse(str: []const u8) anyerror!T { + return std.meta.stringToEnum(T, str) orelse { + std.log.err("unknown enum variant for {s}: {s}", .{ @typeName(T), str }); + return error.UnknownEnumVariant; + }; + } + }.parse, + .Struct => @compileError("no default parser for " ++ @typeName(T)), + else => @compileError("no default parser for " ++ @typeName(T)), + } + } +}; + +const Cases = @This(); +const builtin = @import("builtin"); +const std = @import("std"); +const assert = std.debug.assert; +const Allocator = std.mem.Allocator; +const CrossTarget = std.zig.CrossTarget; +const Compilation = @import("../../src/Compilation.zig"); +const zig_h = @import("../../src/link.zig").File.C.zig_h; +const introspect = @import("../../src/introspect.zig"); +const ThreadPool = std.Thread.Pool; +const WaitGroup = std.Thread.WaitGroup; +const build_options = @import("build_options"); +const Package = @import("../../src/Package.zig"); + +pub const std_options = struct { + pub const log_level: std.log.Level = .err; +}; + +var general_purpose_allocator = std.heap.GeneralPurposeAllocator(.{ + .stack_trace_frames = build_options.mem_leak_frames, +}){}; + +// TODO: instead of embedding the compiler in this process, spawn the compiler +// as a sub-process and communicate the updates using the compiler protocol. +pub fn main() !void { + const use_gpa = build_options.force_gpa or !builtin.link_libc; + const gpa = gpa: { + if (use_gpa) { + break :gpa general_purpose_allocator.allocator(); + } + // We would prefer to use raw libc allocator here, but cannot + // use it if it won't support the alignment we need. + if (@alignOf(std.c.max_align_t) < @alignOf(i128)) { + break :gpa std.heap.c_allocator; + } + break :gpa std.heap.raw_c_allocator; + }; + + var single_threaded_arena = std.heap.ArenaAllocator.init(gpa); + defer single_threaded_arena.deinit(); + + var thread_safe_arena: std.heap.ThreadSafeAllocator = .{ + .child_allocator = single_threaded_arena.allocator(), + }; + const arena = thread_safe_arena.allocator(); + + const args = try std.process.argsAlloc(arena); + const case_file_path = args[1]; + const zig_exe_path = args[2]; + + var filenames = std.ArrayList([]const u8).init(arena); + + const case_dirname = std.fs.path.dirname(case_file_path).?; + var iterable_dir = try std.fs.cwd().openIterableDir(case_dirname, .{}); + defer iterable_dir.close(); + + if (std.mem.endsWith(u8, case_file_path, ".0.zig")) { + const stem = case_file_path[case_dirname.len + 1 .. case_file_path.len - "0.zig".len]; + var it = iterable_dir.iterate(); + while (try it.next()) |entry| { + if (entry.kind != .File) continue; + if (!std.mem.startsWith(u8, entry.name, stem)) continue; + try filenames.append(try std.fs.path.join(arena, &.{ case_dirname, entry.name })); + } + } else { + try filenames.append(case_file_path); + } + + if (filenames.items.len == 0) { + std.debug.print("failed to find the input source file(s) from '{s}'\n", .{ + case_file_path, + }); + std.process.exit(1); + } + + // Sort filenames, so that incremental tests are contiguous and in-order + sortTestFilenames(filenames.items); + + var ctx = Cases.init(gpa, arena); + + var test_it = TestIterator{ .filenames = filenames.items }; + while (test_it.next()) |maybe_batch| { + const batch = maybe_batch orelse break; + const strategy: TestStrategy = if (batch.len > 1) .incremental else .independent; + var cases = std.ArrayList(usize).init(arena); + + for (batch) |filename| { + const max_file_size = 10 * 1024 * 1024; + const src = try iterable_dir.dir.readFileAllocOptions(arena, filename, max_file_size, null, 1, 0); + + // Parse the manifest + var manifest = try TestManifest.parse(arena, src); + + if (cases.items.len == 0) { + const backends = try manifest.getConfigForKeyAlloc(arena, "backend", Backend); + const targets = try manifest.getConfigForKeyAlloc(arena, "target", CrossTarget); + const is_test = try manifest.getConfigForKeyAssertSingle("is_test", bool); + const output_mode = try manifest.getConfigForKeyAssertSingle("output_mode", std.builtin.OutputMode); + + // Cross-product to get all possible test combinations + for (backends) |backend| { + for (targets) |target| { + const next = ctx.cases.items.len; + try ctx.cases.append(.{ + .name = std.fs.path.stem(filename), + .target = target, + .backend = backend, + .updates = std.ArrayList(Cases.Update).init(ctx.cases.allocator), + .is_test = is_test, + .output_mode = output_mode, + .link_libc = backend == .llvm, + .deps = std.ArrayList(DepModule).init(ctx.cases.allocator), + }); + try cases.append(next); + } + } + } + + for (cases.items) |case_index| { + const case = &ctx.cases.items[case_index]; + switch (manifest.type) { + .compile => { + case.addCompile(src); + }, + .@"error" => { + const errors = try manifest.trailingAlloc(arena); + switch (strategy) { + .independent => { + case.addError(src, errors); + }, + .incremental => { + case.addErrorNamed("update", src, errors); + }, + } + }, + .run => { + var output = std.ArrayList(u8).init(arena); + var trailing_it = manifest.trailing(); + while (trailing_it.next()) |line| { + try output.appendSlice(line); + try output.append('\n'); + } + if (output.items.len > 0) { + try output.resize(output.items.len - 1); + } + case.addCompareOutput(src, try output.toOwnedSlice()); + }, + .cli => @panic("TODO cli tests"), + } + } + } + } else |err| { + return err; + } + + return runCases(&ctx, zig_exe_path); +} + +fn runCases(self: *Cases, zig_exe_path: []const u8) !void { + const host = try std.zig.system.NativeTargetInfo.detect(.{}); + + var progress = std.Progress{}; + const root_node = progress.start("compiler", self.cases.items.len); + progress.terminal = null; + defer root_node.end(); + + var zig_lib_directory = try introspect.findZigLibDir(self.gpa); + defer zig_lib_directory.handle.close(); + defer self.gpa.free(zig_lib_directory.path.?); + + var aux_thread_pool: ThreadPool = undefined; + try aux_thread_pool.init(.{ .allocator = self.gpa }); + defer aux_thread_pool.deinit(); + + // Use the same global cache dir for all the tests, such that we for example don't have to + // rebuild musl libc for every case (when LLVM backend is enabled). + var global_tmp = std.testing.tmpDir(.{}); + defer global_tmp.cleanup(); + + var cache_dir = try global_tmp.dir.makeOpenPath("zig-cache", .{}); + defer cache_dir.close(); + const tmp_dir_path = try std.fs.path.join(self.gpa, &[_][]const u8{ ".", "zig-cache", "tmp", &global_tmp.sub_path }); + defer self.gpa.free(tmp_dir_path); + + const global_cache_directory: Compilation.Directory = .{ + .handle = cache_dir, + .path = try std.fs.path.join(self.gpa, &[_][]const u8{ tmp_dir_path, "zig-cache" }), + }; + defer self.gpa.free(global_cache_directory.path.?); + + { + for (self.cases.items) |*case| { + if (build_options.skip_non_native) { + if (case.target.getCpuArch() != builtin.cpu.arch) + continue; + if (case.target.getObjectFormat() != builtin.object_format) + continue; + } + + // Skip tests that require LLVM backend when it is not available + if (!build_options.have_llvm and case.backend == .llvm) + continue; + + assert(case.backend != .stage1); + + if (build_options.test_filter) |test_filter| { + if (std.mem.indexOf(u8, case.name, test_filter) == null) continue; + } + + var prg_node = root_node.start(case.name, case.updates.items.len); + prg_node.activate(); + defer prg_node.end(); + + try runOneCase( + self.gpa, + &prg_node, + case.*, + zig_lib_directory, + zig_exe_path, + &aux_thread_pool, + global_cache_directory, + host, + ); + } + } +} + +fn runOneCase( + allocator: Allocator, + root_node: *std.Progress.Node, + case: Case, + zig_lib_directory: Compilation.Directory, + zig_exe_path: []const u8, + thread_pool: *ThreadPool, + global_cache_directory: Compilation.Directory, + host: std.zig.system.NativeTargetInfo, +) !void { + const tmp_src_path = "tmp.zig"; + const enable_rosetta = build_options.enable_rosetta; + const enable_qemu = build_options.enable_qemu; + const enable_wine = build_options.enable_wine; + const enable_wasmtime = build_options.enable_wasmtime; + const enable_darling = build_options.enable_darling; + const glibc_runtimes_dir: ?[]const u8 = build_options.glibc_runtimes_dir; + + const target_info = try std.zig.system.NativeTargetInfo.detect(case.target); + const target = target_info.target; + + var arena_allocator = std.heap.ArenaAllocator.init(allocator); + defer arena_allocator.deinit(); + const arena = arena_allocator.allocator(); + + var tmp = std.testing.tmpDir(.{}); + defer tmp.cleanup(); + + var cache_dir = try tmp.dir.makeOpenPath("zig-cache", .{}); + defer cache_dir.close(); + + const tmp_dir_path = try std.fs.path.join( + arena, + &[_][]const u8{ ".", "zig-cache", "tmp", &tmp.sub_path }, + ); + const local_cache_path = try std.fs.path.join( + arena, + &[_][]const u8{ tmp_dir_path, "zig-cache" }, + ); + + const zig_cache_directory: Compilation.Directory = .{ + .handle = cache_dir, + .path = local_cache_path, + }; + + var main_pkg: Package = .{ + .root_src_directory = .{ .path = tmp_dir_path, .handle = tmp.dir }, + .root_src_path = tmp_src_path, + }; + defer { + var it = main_pkg.table.iterator(); + while (it.next()) |kv| { + allocator.free(kv.key_ptr.*); + kv.value_ptr.*.destroy(allocator); + } + main_pkg.table.deinit(allocator); + } + + for (case.deps.items) |dep| { + var pkg = try Package.create( + allocator, + tmp_dir_path, + dep.path, + ); + errdefer pkg.destroy(allocator); + try main_pkg.add(allocator, dep.name, pkg); + } + + const bin_name = try std.zig.binNameAlloc(arena, .{ + .root_name = "test_case", + .target = target, + .output_mode = case.output_mode, + }); + + const emit_directory: Compilation.Directory = .{ + .path = tmp_dir_path, + .handle = tmp.dir, + }; + const emit_bin: Compilation.EmitLoc = .{ + .directory = emit_directory, + .basename = bin_name, + }; + const emit_h: ?Compilation.EmitLoc = if (case.emit_h) .{ + .directory = emit_directory, + .basename = "test_case.h", + } else null; + const use_llvm: bool = switch (case.backend) { + .llvm => true, + else => false, + }; + const comp = try Compilation.create(allocator, .{ + .local_cache_directory = zig_cache_directory, + .global_cache_directory = global_cache_directory, + .zig_lib_directory = zig_lib_directory, + .thread_pool = thread_pool, + .root_name = "test_case", + .target = target, + // TODO: support tests for object file building, and library builds + // and linking. This will require a rework to support multi-file + // tests. + .output_mode = case.output_mode, + .is_test = case.is_test, + .optimize_mode = case.optimize_mode, + .emit_bin = emit_bin, + .emit_h = emit_h, + .main_pkg = &main_pkg, + .keep_source_files_loaded = true, + .is_native_os = case.target.isNativeOs(), + .is_native_abi = case.target.isNativeAbi(), + .dynamic_linker = target_info.dynamic_linker.get(), + .link_libc = case.link_libc, + .use_llvm = use_llvm, + .self_exe_path = zig_exe_path, + // TODO instead of turning off color, pass in a std.Progress.Node + .color = .off, + .reference_trace = 0, + // TODO: force self-hosted linkers with stage2 backend to avoid LLD creeping in + // until the auto-select mechanism deems them worthy + .use_lld = switch (case.backend) { + .stage2 => false, + else => null, + }, + }); + defer comp.destroy(); + + update: for (case.updates.items, 0..) |update, update_index| { + var update_node = root_node.start(update.name, 3); + update_node.activate(); + defer update_node.end(); + + var sync_node = update_node.start("write", 0); + sync_node.activate(); + for (update.files.items) |file| { + try tmp.dir.writeFile(file.path, file.src); + } + sync_node.end(); + + var module_node = update_node.start("parse/analysis/codegen", 0); + module_node.activate(); + try comp.makeBinFileWritable(); + try comp.update(&module_node); + module_node.end(); + + if (update.case != .Error) { + var all_errors = try comp.getAllErrorsAlloc(); + defer all_errors.deinit(allocator); + if (all_errors.errorMessageCount() > 0) { + all_errors.renderToStdErr(.{ + .ttyconf = std.debug.detectTTYConfig(std.io.getStdErr()), + }); + // TODO print generated C code + return error.UnexpectedCompileErrors; + } + } + + switch (update.case) { + .Header => |expected_output| { + var file = try tmp.dir.openFile("test_case.h", .{ .mode = .read_only }); + defer file.close(); + const out = try file.reader().readAllAlloc(arena, 5 * 1024 * 1024); + + try std.testing.expectEqualStrings(expected_output, out); + }, + .CompareObjectFile => |expected_output| { + var file = try tmp.dir.openFile(bin_name, .{ .mode = .read_only }); + defer file.close(); + const out = try file.reader().readAllAlloc(arena, 5 * 1024 * 1024); + + try std.testing.expectEqualStrings(expected_output, out); + }, + .Compile => {}, + .Error => |expected_errors| { + var test_node = update_node.start("assert", 0); + test_node.activate(); + defer test_node.end(); + + var error_bundle = try comp.getAllErrorsAlloc(); + defer error_bundle.deinit(allocator); + + if (error_bundle.errorMessageCount() == 0) { + return error.ExpectedCompilationErrors; + } + + var actual_stderr = std.ArrayList(u8).init(arena); + try error_bundle.renderToWriter(.{ + .ttyconf = .no_color, + .include_reference_trace = false, + .include_source_line = false, + }, actual_stderr.writer()); + + // Render the expected lines into a string that we can compare verbatim. + var expected_generated = std.ArrayList(u8).init(arena); + + var actual_line_it = std.mem.split(u8, actual_stderr.items, "\n"); + for (expected_errors) |expect_line| { + const actual_line = actual_line_it.next() orelse { + try expected_generated.appendSlice(expect_line); + try expected_generated.append('\n'); + continue; + }; + if (std.mem.endsWith(u8, actual_line, expect_line)) { + try expected_generated.appendSlice(actual_line); + try expected_generated.append('\n'); + continue; + } + if (std.mem.startsWith(u8, expect_line, ":?:?: ")) { + if (std.mem.endsWith(u8, actual_line, expect_line[":?:?: ".len..])) { + try expected_generated.appendSlice(actual_line); + try expected_generated.append('\n'); + continue; + } + } + try expected_generated.appendSlice(expect_line); + try expected_generated.append('\n'); + } + + try std.testing.expectEqualStrings(expected_generated.items, actual_stderr.items); + }, + .Execution => |expected_stdout| { + if (!std.process.can_spawn) { + std.debug.print("Unable to spawn child processes on {s}, skipping test.\n", .{@tagName(builtin.os.tag)}); + continue :update; // Pass test. + } + + update_node.setEstimatedTotalItems(4); + + var argv = std.ArrayList([]const u8).init(allocator); + defer argv.deinit(); + + var exec_result = x: { + var exec_node = update_node.start("execute", 0); + exec_node.activate(); + defer exec_node.end(); + + // We go out of our way here to use the unique temporary directory name in + // the exe_path so that it makes its way into the cache hash, avoiding + // cache collisions from multiple threads doing `zig run` at the same time + // on the same test_case.c input filename. + const ss = std.fs.path.sep_str; + const exe_path = try std.fmt.allocPrint( + arena, + ".." ++ ss ++ "{s}" ++ ss ++ "{s}", + .{ &tmp.sub_path, bin_name }, + ); + if (case.target.ofmt != null and case.target.ofmt.? == .c) { + if (host.getExternalExecutor(target_info, .{ .link_libc = true }) != .native) { + // We wouldn't be able to run the compiled C code. + continue :update; // Pass test. + } + try argv.appendSlice(&[_][]const u8{ + zig_exe_path, + "run", + "-cflags", + "-std=c99", + "-pedantic", + "-Werror", + "-Wno-incompatible-library-redeclaration", // https://github.com/ziglang/zig/issues/875 + "--", + "-lc", + exe_path, + }); + if (zig_lib_directory.path) |p| { + try argv.appendSlice(&.{ "-I", p }); + } + } else switch (host.getExternalExecutor(target_info, .{ .link_libc = case.link_libc })) { + .native => { + if (case.backend == .stage2 and case.target.getCpuArch() == .arm) { + // https://github.com/ziglang/zig/issues/13623 + continue :update; // Pass test. + } + try argv.append(exe_path); + }, + .bad_dl, .bad_os_or_cpu => continue :update, // Pass test. + + .rosetta => if (enable_rosetta) { + try argv.append(exe_path); + } else { + continue :update; // Rosetta not available, pass test. + }, + + .qemu => |qemu_bin_name| if (enable_qemu) { + const need_cross_glibc = target.isGnuLibC() and case.link_libc; + const glibc_dir_arg: ?[]const u8 = if (need_cross_glibc) + glibc_runtimes_dir orelse continue :update // glibc dir not available; pass test + else + null; + try argv.append(qemu_bin_name); + if (glibc_dir_arg) |dir| { + const linux_triple = try target.linuxTriple(arena); + const full_dir = try std.fs.path.join(arena, &[_][]const u8{ + dir, + linux_triple, + }); + + try argv.append("-L"); + try argv.append(full_dir); + } + try argv.append(exe_path); + } else { + continue :update; // QEMU not available; pass test. + }, + + .wine => |wine_bin_name| if (enable_wine) { + try argv.append(wine_bin_name); + try argv.append(exe_path); + } else { + continue :update; // Wine not available; pass test. + }, + + .wasmtime => |wasmtime_bin_name| if (enable_wasmtime) { + try argv.append(wasmtime_bin_name); + try argv.append("--dir=."); + try argv.append(exe_path); + } else { + continue :update; // wasmtime not available; pass test. + }, + + .darling => |darling_bin_name| if (enable_darling) { + try argv.append(darling_bin_name); + // Since we use relative to cwd here, we invoke darling with + // "shell" subcommand. + try argv.append("shell"); + try argv.append(exe_path); + } else { + continue :update; // Darling not available; pass test. + }, + } + + try comp.makeBinFileExecutable(); + + while (true) { + break :x std.ChildProcess.exec(.{ + .allocator = allocator, + .argv = argv.items, + .cwd_dir = tmp.dir, + .cwd = tmp_dir_path, + }) catch |err| switch (err) { + error.FileBusy => { + // There is a fundamental design flaw in Unix systems with how + // ETXTBSY interacts with fork+exec. + // https://github.com/golang/go/issues/22315 + // https://bugs.openjdk.org/browse/JDK-8068370 + // Unfortunately, this could be a real error, but we can't + // tell the difference here. + continue; + }, + else => { + std.debug.print("\n{s}.{d} The following command failed with {s}:\n", .{ + case.name, update_index, @errorName(err), + }); + dumpArgs(argv.items); + return error.ChildProcessExecution; + }, + }; + } + }; + var test_node = update_node.start("test", 0); + test_node.activate(); + defer test_node.end(); + defer allocator.free(exec_result.stdout); + defer allocator.free(exec_result.stderr); + switch (exec_result.term) { + .Exited => |code| { + if (code != 0) { + std.debug.print("\n{s}\n{s}: execution exited with code {d}:\n", .{ + exec_result.stderr, case.name, code, + }); + dumpArgs(argv.items); + return error.ChildProcessExecution; + } + }, + else => { + std.debug.print("\n{s}\n{s}: execution crashed:\n", .{ + exec_result.stderr, case.name, + }); + dumpArgs(argv.items); + return error.ChildProcessExecution; + }, + } + try std.testing.expectEqualStrings(expected_stdout, exec_result.stdout); + // We allow stderr to have garbage in it because wasmtime prints a + // warning about --invoke even though we don't pass it. + //std.testing.expectEqualStrings("", exec_result.stderr); + }, + } + } +} + +fn dumpArgs(argv: []const []const u8) void { + for (argv) |arg| { + std.debug.print("{s} ", .{arg}); + } + std.debug.print("\n", .{}); +} diff --git a/test/src/CompareOutput.zig b/test/src/CompareOutput.zig new file mode 100644 index 000000000000..854bd11f9cad --- /dev/null +++ b/test/src/CompareOutput.zig @@ -0,0 +1,174 @@ +//! This is the implementation of the test harness. +//! For the actual test cases, see test/compare_output.zig. + +b: *std.Build, +step: *std.Build.Step, +test_index: usize, +test_filter: ?[]const u8, +optimize_modes: []const OptimizeMode, + +const Special = enum { + None, + Asm, + RuntimeSafety, +}; + +const TestCase = struct { + name: []const u8, + sources: ArrayList(SourceFile), + expected_output: []const u8, + link_libc: bool, + special: Special, + cli_args: []const []const u8, + + const SourceFile = struct { + filename: []const u8, + source: []const u8, + }; + + pub fn addSourceFile(self: *TestCase, filename: []const u8, source: []const u8) void { + self.sources.append(SourceFile{ + .filename = filename, + .source = source, + }) catch @panic("OOM"); + } + + pub fn setCommandLineArgs(self: *TestCase, args: []const []const u8) void { + self.cli_args = args; + } +}; + +pub fn createExtra(self: *CompareOutput, name: []const u8, source: []const u8, expected_output: []const u8, special: Special) TestCase { + var tc = TestCase{ + .name = name, + .sources = ArrayList(TestCase.SourceFile).init(self.b.allocator), + .expected_output = expected_output, + .link_libc = false, + .special = special, + .cli_args = &[_][]const u8{}, + }; + const root_src_name = if (special == Special.Asm) "source.s" else "source.zig"; + tc.addSourceFile(root_src_name, source); + return tc; +} + +pub fn create(self: *CompareOutput, name: []const u8, source: []const u8, expected_output: []const u8) TestCase { + return createExtra(self, name, source, expected_output, Special.None); +} + +pub fn addC(self: *CompareOutput, name: []const u8, source: []const u8, expected_output: []const u8) void { + var tc = self.create(name, source, expected_output); + tc.link_libc = true; + self.addCase(tc); +} + +pub fn add(self: *CompareOutput, name: []const u8, source: []const u8, expected_output: []const u8) void { + const tc = self.create(name, source, expected_output); + self.addCase(tc); +} + +pub fn addAsm(self: *CompareOutput, name: []const u8, source: []const u8, expected_output: []const u8) void { + const tc = self.createExtra(name, source, expected_output, Special.Asm); + self.addCase(tc); +} + +pub fn addRuntimeSafety(self: *CompareOutput, name: []const u8, source: []const u8) void { + const tc = self.createExtra(name, source, undefined, Special.RuntimeSafety); + self.addCase(tc); +} + +pub fn addCase(self: *CompareOutput, case: TestCase) void { + const b = self.b; + + const write_src = b.addWriteFiles(); + for (case.sources.items) |src_file| { + write_src.add(src_file.filename, src_file.source); + } + + switch (case.special) { + Special.Asm => { + const annotated_case_name = fmt.allocPrint(self.b.allocator, "run assemble-and-link {s}", .{ + case.name, + }) catch @panic("OOM"); + if (self.test_filter) |filter| { + if (mem.indexOf(u8, annotated_case_name, filter) == null) return; + } + + const exe = b.addExecutable(.{ + .name = "test", + .target = .{}, + .optimize = .Debug, + }); + exe.addAssemblyFileSource(write_src.getFileSource(case.sources.items[0].filename).?); + + const run = exe.run(); + run.setName(annotated_case_name); + run.addArgs(case.cli_args); + run.expectStdOutEqual(case.expected_output); + + self.step.dependOn(&run.step); + }, + Special.None => { + for (self.optimize_modes) |optimize| { + const annotated_case_name = fmt.allocPrint(self.b.allocator, "run compare-output {s} ({s})", .{ + case.name, @tagName(optimize), + }) catch @panic("OOM"); + if (self.test_filter) |filter| { + if (mem.indexOf(u8, annotated_case_name, filter) == null) continue; + } + + const basename = case.sources.items[0].filename; + const exe = b.addExecutable(.{ + .name = "test", + .root_source_file = write_src.getFileSource(basename).?, + .optimize = optimize, + .target = .{}, + }); + if (case.link_libc) { + exe.linkSystemLibrary("c"); + } + + const run = exe.run(); + run.setName(annotated_case_name); + run.addArgs(case.cli_args); + run.expectStdOutEqual(case.expected_output); + + self.step.dependOn(&run.step); + } + }, + Special.RuntimeSafety => { + // TODO iterate over self.optimize_modes and test this in both + // debug and release safe mode + const annotated_case_name = fmt.allocPrint(self.b.allocator, "run safety {s}", .{case.name}) catch @panic("OOM"); + if (self.test_filter) |filter| { + if (mem.indexOf(u8, annotated_case_name, filter) == null) return; + } + + const basename = case.sources.items[0].filename; + const exe = b.addExecutable(.{ + .name = "test", + .root_source_file = write_src.getFileSource(basename).?, + .target = .{}, + .optimize = .Debug, + }); + if (case.link_libc) { + exe.linkSystemLibrary("c"); + } + + const run = exe.run(); + run.setName(annotated_case_name); + run.addArgs(case.cli_args); + run.expectExitCode(126); + + self.step.dependOn(&run.step); + }, + } +} + +const CompareOutput = @This(); +const std = @import("std"); +const ArrayList = std.ArrayList; +const fmt = std.fmt; +const mem = std.mem; +const fs = std.fs; +const OptimizeMode = std.builtin.OptimizeMode; diff --git a/test/src/StackTrace.zig b/test/src/StackTrace.zig new file mode 100644 index 000000000000..c32720a210b3 --- /dev/null +++ b/test/src/StackTrace.zig @@ -0,0 +1,107 @@ +b: *std.Build, +step: *Step, +test_index: usize, +test_filter: ?[]const u8, +optimize_modes: []const OptimizeMode, +check_exe: *std.Build.CompileStep, + +const Expect = [@typeInfo(OptimizeMode).Enum.fields.len][]const u8; + +pub fn addCase(self: *StackTrace, config: anytype) void { + if (@hasField(@TypeOf(config), "exclude")) { + if (config.exclude.exclude()) return; + } + if (@hasField(@TypeOf(config), "exclude_arch")) { + const exclude_arch: []const std.Target.Cpu.Arch = &config.exclude_arch; + for (exclude_arch) |arch| if (arch == builtin.cpu.arch) return; + } + if (@hasField(@TypeOf(config), "exclude_os")) { + const exclude_os: []const std.Target.Os.Tag = &config.exclude_os; + for (exclude_os) |os| if (os == builtin.os.tag) return; + } + for (self.optimize_modes) |optimize_mode| { + switch (optimize_mode) { + .Debug => { + if (@hasField(@TypeOf(config), "Debug")) { + self.addExpect(config.name, config.source, optimize_mode, config.Debug); + } + }, + .ReleaseSafe => { + if (@hasField(@TypeOf(config), "ReleaseSafe")) { + self.addExpect(config.name, config.source, optimize_mode, config.ReleaseSafe); + } + }, + .ReleaseFast => { + if (@hasField(@TypeOf(config), "ReleaseFast")) { + self.addExpect(config.name, config.source, optimize_mode, config.ReleaseFast); + } + }, + .ReleaseSmall => { + if (@hasField(@TypeOf(config), "ReleaseSmall")) { + self.addExpect(config.name, config.source, optimize_mode, config.ReleaseSmall); + } + }, + } + } +} + +fn addExpect( + self: *StackTrace, + name: []const u8, + source: []const u8, + optimize_mode: OptimizeMode, + mode_config: anytype, +) void { + if (@hasField(@TypeOf(mode_config), "exclude")) { + if (mode_config.exclude.exclude()) return; + } + if (@hasField(@TypeOf(mode_config), "exclude_arch")) { + const exclude_arch: []const std.Target.Cpu.Arch = &mode_config.exclude_arch; + for (exclude_arch) |arch| if (arch == builtin.cpu.arch) return; + } + if (@hasField(@TypeOf(mode_config), "exclude_os")) { + const exclude_os: []const std.Target.Os.Tag = &mode_config.exclude_os; + for (exclude_os) |os| if (os == builtin.os.tag) return; + } + + const b = self.b; + const annotated_case_name = fmt.allocPrint(b.allocator, "check {s} ({s})", .{ + name, @tagName(optimize_mode), + }) catch @panic("OOM"); + if (self.test_filter) |filter| { + if (mem.indexOf(u8, annotated_case_name, filter) == null) return; + } + + const src_basename = "source.zig"; + const write_src = b.addWriteFile(src_basename, source); + const exe = b.addExecutable(.{ + .name = "test", + .root_source_file = write_src.getFileSource(src_basename).?, + .optimize = optimize_mode, + .target = .{}, + }); + + const run = b.addRunArtifact(exe); + run.removeEnvironmentVariable("ZIG_DEBUG_COLOR"); + run.setEnvironmentVariable("NO_COLOR", "1"); + run.expectExitCode(1); + run.expectStdOutEqual(""); + + const check_run = b.addRunArtifact(self.check_exe); + check_run.setName(annotated_case_name); + check_run.addFileSourceArg(run.captureStdErr()); + check_run.addArgs(&.{ + @tagName(optimize_mode), + }); + check_run.expectStdOutEqual(mode_config.expect); + + self.step.dependOn(&check_run.step); +} + +const StackTrace = @This(); +const std = @import("std"); +const builtin = @import("builtin"); +const Step = std.Build.Step; +const OptimizeMode = std.builtin.OptimizeMode; +const fmt = std.fmt; +const mem = std.mem; diff --git a/test/src/check-stack-trace.zig b/test/src/check-stack-trace.zig new file mode 100644 index 000000000000..bb1db55076e0 --- /dev/null +++ b/test/src/check-stack-trace.zig @@ -0,0 +1,79 @@ +const builtin = @import("builtin"); +const std = @import("std"); +const mem = std.mem; +const fs = std.fs; + +pub fn main() !void { + var arena_instance = std.heap.ArenaAllocator.init(std.heap.page_allocator); + defer arena_instance.deinit(); + const arena = arena_instance.allocator(); + + const args = try std.process.argsAlloc(arena); + + const input_path = args[1]; + const optimize_mode_text = args[2]; + + const input_bytes = try std.fs.cwd().readFileAlloc(arena, input_path, 5 * 1024 * 1024); + const optimize_mode = std.meta.stringToEnum(std.builtin.OptimizeMode, optimize_mode_text).?; + + var stderr = input_bytes; + + // process result + // - keep only basename of source file path + // - replace address with symbolic string + // - replace function name with symbolic string when optimize_mode != .Debug + // - skip empty lines + const got: []const u8 = got_result: { + var buf = std.ArrayList(u8).init(arena); + defer buf.deinit(); + if (stderr.len != 0 and stderr[stderr.len - 1] == '\n') stderr = stderr[0 .. stderr.len - 1]; + var it = mem.split(u8, stderr, "\n"); + process_lines: while (it.next()) |line| { + if (line.len == 0) continue; + + // offset search past `[drive]:` on windows + var pos: usize = if (builtin.os.tag == .windows) 2 else 0; + // locate delims/anchor + const delims = [_][]const u8{ ":", ":", ":", " in ", "(", ")" }; + var marks = [_]usize{0} ** delims.len; + for (delims, 0..) |delim, i| { + marks[i] = mem.indexOfPos(u8, line, pos, delim) orelse { + // unexpected pattern: emit raw line and cont + try buf.appendSlice(line); + try buf.appendSlice("\n"); + continue :process_lines; + }; + pos = marks[i] + delim.len; + } + // locate source basename + pos = mem.lastIndexOfScalar(u8, line[0..marks[0]], fs.path.sep) orelse { + // unexpected pattern: emit raw line and cont + try buf.appendSlice(line); + try buf.appendSlice("\n"); + continue :process_lines; + }; + // end processing if source basename changes + if (!mem.eql(u8, "source.zig", line[pos + 1 .. marks[0]])) break; + // emit substituted line + try buf.appendSlice(line[pos + 1 .. marks[2] + delims[2].len]); + try buf.appendSlice(" [address]"); + if (optimize_mode == .Debug) { + // On certain platforms (windows) or possibly depending on how we choose to link main + // the object file extension may be present so we simply strip any extension. + if (mem.indexOfScalar(u8, line[marks[4]..marks[5]], '.')) |idot| { + try buf.appendSlice(line[marks[3] .. marks[4] + idot]); + try buf.appendSlice(line[marks[5]..]); + } else { + try buf.appendSlice(line[marks[3]..]); + } + } else { + try buf.appendSlice(line[marks[3] .. marks[3] + delims[3].len]); + try buf.appendSlice("[function]"); + } + try buf.appendSlice("\n"); + } + break :got_result try buf.toOwnedSlice(); + }; + + try std.io.getStdOut().writeAll(got); +} diff --git a/test/src/compare_output.zig b/test/src/compare_output.zig deleted file mode 100644 index 3bda3bdacd73..000000000000 --- a/test/src/compare_output.zig +++ /dev/null @@ -1,177 +0,0 @@ -// This is the implementation of the test harness. -// For the actual test cases, see test/compare_output.zig. -const std = @import("std"); -const ArrayList = std.ArrayList; -const fmt = std.fmt; -const mem = std.mem; -const fs = std.fs; -const OptimizeMode = std.builtin.OptimizeMode; - -pub const CompareOutputContext = struct { - b: *std.Build, - step: *std.Build.Step, - test_index: usize, - test_filter: ?[]const u8, - optimize_modes: []const OptimizeMode, - - const Special = enum { - None, - Asm, - RuntimeSafety, - }; - - const TestCase = struct { - name: []const u8, - sources: ArrayList(SourceFile), - expected_output: []const u8, - link_libc: bool, - special: Special, - cli_args: []const []const u8, - - const SourceFile = struct { - filename: []const u8, - source: []const u8, - }; - - pub fn addSourceFile(self: *TestCase, filename: []const u8, source: []const u8) void { - self.sources.append(SourceFile{ - .filename = filename, - .source = source, - }) catch unreachable; - } - - pub fn setCommandLineArgs(self: *TestCase, args: []const []const u8) void { - self.cli_args = args; - } - }; - - pub fn createExtra(self: *CompareOutputContext, name: []const u8, source: []const u8, expected_output: []const u8, special: Special) TestCase { - var tc = TestCase{ - .name = name, - .sources = ArrayList(TestCase.SourceFile).init(self.b.allocator), - .expected_output = expected_output, - .link_libc = false, - .special = special, - .cli_args = &[_][]const u8{}, - }; - const root_src_name = if (special == Special.Asm) "source.s" else "source.zig"; - tc.addSourceFile(root_src_name, source); - return tc; - } - - pub fn create(self: *CompareOutputContext, name: []const u8, source: []const u8, expected_output: []const u8) TestCase { - return createExtra(self, name, source, expected_output, Special.None); - } - - pub fn addC(self: *CompareOutputContext, name: []const u8, source: []const u8, expected_output: []const u8) void { - var tc = self.create(name, source, expected_output); - tc.link_libc = true; - self.addCase(tc); - } - - pub fn add(self: *CompareOutputContext, name: []const u8, source: []const u8, expected_output: []const u8) void { - const tc = self.create(name, source, expected_output); - self.addCase(tc); - } - - pub fn addAsm(self: *CompareOutputContext, name: []const u8, source: []const u8, expected_output: []const u8) void { - const tc = self.createExtra(name, source, expected_output, Special.Asm); - self.addCase(tc); - } - - pub fn addRuntimeSafety(self: *CompareOutputContext, name: []const u8, source: []const u8) void { - const tc = self.createExtra(name, source, undefined, Special.RuntimeSafety); - self.addCase(tc); - } - - pub fn addCase(self: *CompareOutputContext, case: TestCase) void { - const b = self.b; - - const write_src = b.addWriteFiles(); - for (case.sources.items) |src_file| { - write_src.add(src_file.filename, src_file.source); - } - - switch (case.special) { - Special.Asm => { - const annotated_case_name = fmt.allocPrint(self.b.allocator, "assemble-and-link {s}", .{ - case.name, - }) catch unreachable; - if (self.test_filter) |filter| { - if (mem.indexOf(u8, annotated_case_name, filter) == null) return; - } - - const exe = b.addExecutable(.{ - .name = "test", - .target = .{}, - .optimize = .Debug, - }); - exe.addAssemblyFileSource(write_src.getFileSource(case.sources.items[0].filename).?); - - const run = exe.run(); - run.addArgs(case.cli_args); - run.expectStdErrEqual(""); - run.expectStdOutEqual(case.expected_output); - - self.step.dependOn(&run.step); - }, - Special.None => { - for (self.optimize_modes) |optimize| { - const annotated_case_name = fmt.allocPrint(self.b.allocator, "{s} {s} ({s})", .{ - "compare-output", - case.name, - @tagName(optimize), - }) catch unreachable; - if (self.test_filter) |filter| { - if (mem.indexOf(u8, annotated_case_name, filter) == null) continue; - } - - const basename = case.sources.items[0].filename; - const exe = b.addExecutable(.{ - .name = "test", - .root_source_file = write_src.getFileSource(basename).?, - .optimize = optimize, - .target = .{}, - }); - if (case.link_libc) { - exe.linkSystemLibrary("c"); - } - - const run = exe.run(); - run.addArgs(case.cli_args); - run.expectStdErrEqual(""); - run.expectStdOutEqual(case.expected_output); - - self.step.dependOn(&run.step); - } - }, - Special.RuntimeSafety => { - // TODO iterate over self.optimize_modes and test this in both - // debug and release safe mode - const annotated_case_name = fmt.allocPrint(self.b.allocator, "safety {s}", .{case.name}) catch unreachable; - if (self.test_filter) |filter| { - if (mem.indexOf(u8, annotated_case_name, filter) == null) return; - } - - const basename = case.sources.items[0].filename; - const exe = b.addExecutable(.{ - .name = "test", - .root_source_file = write_src.getFileSource(basename).?, - .target = .{}, - .optimize = .Debug, - }); - if (case.link_libc) { - exe.linkSystemLibrary("c"); - } - - const run = exe.run(); - run.addArgs(case.cli_args); - run.stderr_action = .ignore; - run.stdout_action = .ignore; - run.expected_term = .{ .Exited = 126 }; - - self.step.dependOn(&run.step); - }, - } - } -}; diff --git a/test/standalone.zig b/test/standalone.zig index 7aa4d81f97ab..4cf795a85f00 100644 --- a/test/standalone.zig +++ b/test/standalone.zig @@ -1,117 +1,218 @@ -const std = @import("std"); -const builtin = @import("builtin"); -const tests = @import("tests.zig"); +pub const SimpleCase = struct { + src_path: []const u8, + link_libc: bool = false, + all_modes: bool = false, + target: std.zig.CrossTarget = .{}, + is_test: bool = false, + is_exe: bool = true, + /// Run only on this OS. + os_filter: ?std.Target.Os.Tag = null, +}; -pub fn addCases(cases: *tests.StandaloneContext) void { - cases.add("test/standalone/hello_world/hello.zig"); - cases.addC("test/standalone/hello_world/hello_libc.zig"); +pub const BuildCase = struct { + build_root: []const u8, + import: type, +}; - cases.addBuildFile("test/standalone/options/build.zig", .{ - .extra_argv = &.{ - "-Dbool_true", - "-Dbool_false=false", - "-Dint=1234", - "-De=two", - "-Dstring=hello", +pub const simple_cases = [_]SimpleCase{ + .{ + .src_path = "test/standalone/hello_world/hello.zig", + .all_modes = true, + }, + .{ + .src_path = "test/standalone/hello_world/hello_libc.zig", + .link_libc = true, + .all_modes = true, + }, + .{ + .src_path = "test/standalone/cat/main.zig", + }, + // https://github.com/ziglang/zig/issues/6025 + //.{ + // .src_path = "test/standalone/issue_9693/main.zig", + //}, + .{ + .src_path = "test/standalone/brace_expansion.zig", + .is_test = true, + }, + .{ + .src_path = "test/standalone/issue_7030.zig", + .target = .{ + .cpu_arch = .wasm32, + .os_tag = .freestanding, }, - }); - - cases.add("test/standalone/cat/main.zig"); - if (builtin.zig_backend == .stage1) { // https://github.com/ziglang/zig/issues/6025 - cases.add("test/standalone/issue_9693/main.zig"); - } - cases.add("test/standalone/issue_12471/main.zig"); - cases.add("test/standalone/guess_number/main.zig"); - cases.add("test/standalone/main_return_error/error_u8.zig"); - cases.add("test/standalone/main_return_error/error_u8_non_zero.zig"); - cases.add("test/standalone/noreturn_call/inline.zig"); - cases.add("test/standalone/noreturn_call/as_arg.zig"); - cases.addBuildFile("test/standalone/test_runner_path/build.zig", .{ .requires_stage2 = true }); - cases.addBuildFile("test/standalone/issue_13970/build.zig", .{}); - cases.addBuildFile("test/standalone/main_pkg_path/build.zig", .{}); - cases.addBuildFile("test/standalone/shared_library/build.zig", .{}); - cases.addBuildFile("test/standalone/mix_o_files/build.zig", .{}); - cases.addBuildFile("test/standalone/mix_c_files/build.zig", .{ - .build_modes = true, - .cross_targets = true, - }); - cases.addBuildFile("test/standalone/global_linkage/build.zig", .{}); - cases.addBuildFile("test/standalone/static_c_lib/build.zig", .{}); - cases.addBuildFile("test/standalone/issue_339/build.zig", .{}); - cases.addBuildFile("test/standalone/issue_8550/build.zig", .{}); - cases.addBuildFile("test/standalone/issue_794/build.zig", .{}); - cases.addBuildFile("test/standalone/issue_5825/build.zig", .{}); - cases.addBuildFile("test/standalone/pkg_import/build.zig", .{}); - cases.addBuildFile("test/standalone/use_alias/build.zig", .{}); - cases.addBuildFile("test/standalone/brace_expansion/build.zig", .{}); - if (builtin.os.tag != .windows or builtin.cpu.arch != .aarch64) { - // https://github.com/ziglang/zig/issues/13685 - cases.addBuildFile("test/standalone/empty_env/build.zig", .{}); - } - cases.addBuildFile("test/standalone/issue_7030/build.zig", .{}); - cases.addBuildFile("test/standalone/install_raw_hex/build.zig", .{}); - if (builtin.zig_backend == .stage1) { // https://github.com/ziglang/zig/issues/12194 - cases.addBuildFile("test/standalone/issue_9812/build.zig", .{}); - } - if (builtin.os.tag != .windows) { - // https://github.com/ziglang/zig/issues/12419 - cases.addBuildFile("test/standalone/issue_11595/build.zig", .{}); - } - - if (builtin.os.tag != .wasi and - // https://github.com/ziglang/zig/issues/13550 - (builtin.os.tag != .macos or builtin.cpu.arch != .aarch64) and - // https://github.com/ziglang/zig/issues/13686 - (builtin.os.tag != .windows or builtin.cpu.arch != .aarch64)) - { - cases.addBuildFile("test/standalone/load_dynamic_library/build.zig", .{}); - } + }, - if (builtin.os.tag == .windows) { - cases.addBuildFile("test/standalone/windows_spawn/build.zig", .{}); - } + .{ .src_path = "test/standalone/issue_12471/main.zig" }, + .{ .src_path = "test/standalone/guess_number/main.zig" }, + .{ .src_path = "test/standalone/main_return_error/error_u8.zig" }, + .{ .src_path = "test/standalone/main_return_error/error_u8_non_zero.zig" }, + .{ .src_path = "test/standalone/noreturn_call/inline.zig" }, + .{ .src_path = "test/standalone/noreturn_call/as_arg.zig" }, - cases.addBuildFile("test/standalone/c_compiler/build.zig", .{ - .build_modes = true, - .cross_targets = true, - }); - - if (builtin.os.tag == .windows) { - cases.addC("test/standalone/issue_9402/main.zig"); - } - // Try to build and run a PIE executable. - if (builtin.os.tag == .linux) { - cases.addBuildFile("test/standalone/pie/build.zig", .{}); - } - cases.addBuildFile("test/standalone/issue_12706/build.zig", .{}); - if (std.os.have_sigpipe_support) { - cases.addBuildFile("test/standalone/sigpipe/build.zig", .{}); - } + .{ + .src_path = "test/standalone/issue_9402/main.zig", + .os_filter = .windows, + .link_libc = true, + }, // Ensure the development tools are buildable. Alphabetically sorted. // No need to build `tools/spirv/grammar.zig`. - cases.add("tools/extract-grammar.zig"); - cases.add("tools/gen_outline_atomics.zig"); - cases.add("tools/gen_spirv_spec.zig"); - cases.add("tools/gen_stubs.zig"); - cases.add("tools/generate_linux_syscalls.zig"); - cases.add("tools/process_headers.zig"); - cases.add("tools/update-license-headers.zig"); - cases.add("tools/update-linux-headers.zig"); - cases.add("tools/update_clang_options.zig"); - cases.add("tools/update_cpu_features.zig"); - cases.add("tools/update_glibc.zig"); - cases.add("tools/update_spirv_features.zig"); + .{ .src_path = "tools/extract-grammar.zig" }, + .{ .src_path = "tools/gen_outline_atomics.zig" }, + .{ .src_path = "tools/gen_spirv_spec.zig" }, + .{ .src_path = "tools/gen_stubs.zig" }, + .{ .src_path = "tools/generate_linux_syscalls.zig" }, + .{ .src_path = "tools/process_headers.zig" }, + .{ .src_path = "tools/update-license-headers.zig" }, + .{ .src_path = "tools/update-linux-headers.zig" }, + .{ .src_path = "tools/update_clang_options.zig" }, + .{ .src_path = "tools/update_cpu_features.zig" }, + .{ .src_path = "tools/update_glibc.zig" }, + .{ .src_path = "tools/update_spirv_features.zig" }, +}; - cases.addBuildFile("test/standalone/issue_13030/build.zig", .{ .build_modes = true }); - cases.addBuildFile("test/standalone/emit_asm_and_bin/build.zig", .{}); - cases.addBuildFile("test/standalone/issue_12588/build.zig", .{}); - cases.addBuildFile("test/standalone/embed_generated_file/build.zig", .{}); - cases.addBuildFile("test/standalone/extern/build.zig", .{}); +pub const build_cases = [_]BuildCase{ + .{ + .build_root = "test/standalone/test_runner_path", + .import = @import("standalone/test_runner_path/build.zig"), + }, + .{ + .build_root = "test/standalone/issue_13970", + .import = @import("standalone/issue_13970/build.zig"), + }, + .{ + .build_root = "test/standalone/main_pkg_path", + .import = @import("standalone/main_pkg_path/build.zig"), + }, + .{ + .build_root = "test/standalone/shared_library", + .import = @import("standalone/shared_library/build.zig"), + }, + .{ + .build_root = "test/standalone/mix_o_files", + .import = @import("standalone/mix_o_files/build.zig"), + }, + .{ + .build_root = "test/standalone/mix_c_files", + .import = @import("standalone/mix_c_files/build.zig"), + }, + .{ + .build_root = "test/standalone/global_linkage", + .import = @import("standalone/global_linkage/build.zig"), + }, + .{ + .build_root = "test/standalone/static_c_lib", + .import = @import("standalone/static_c_lib/build.zig"), + }, + .{ + .build_root = "test/standalone/issue_339", + .import = @import("standalone/issue_339/build.zig"), + }, + .{ + .build_root = "test/standalone/issue_8550", + .import = @import("standalone/issue_8550/build.zig"), + }, + .{ + .build_root = "test/standalone/issue_794", + .import = @import("standalone/issue_794/build.zig"), + }, + .{ + .build_root = "test/standalone/issue_5825", + .import = @import("standalone/issue_5825/build.zig"), + }, + .{ + .build_root = "test/standalone/pkg_import", + .import = @import("standalone/pkg_import/build.zig"), + }, + .{ + .build_root = "test/standalone/use_alias", + .import = @import("standalone/use_alias/build.zig"), + }, + .{ + .build_root = "test/standalone/install_raw_hex", + .import = @import("standalone/install_raw_hex/build.zig"), + }, + // TODO take away EmitOption.emit_to option and make it give a FileSource + //.{ + // .build_root = "test/standalone/emit_asm_and_bin", + // .import = @import("standalone/emit_asm_and_bin/build.zig"), + //}, + // TODO take away EmitOption.emit_to option and make it give a FileSource + //.{ + // .build_root = "test/standalone/issue_12588", + // .import = @import("standalone/issue_12588/build.zig"), + //}, + .{ + .build_root = "test/standalone/embed_generated_file", + .import = @import("standalone/embed_generated_file/build.zig"), + }, + .{ + .build_root = "test/standalone/extern", + .import = @import("standalone/extern/build.zig"), + }, + .{ + .build_root = "test/standalone/dep_diamond", + .import = @import("standalone/dep_diamond/build.zig"), + }, + .{ + .build_root = "test/standalone/dep_triangle", + .import = @import("standalone/dep_triangle/build.zig"), + }, + .{ + .build_root = "test/standalone/dep_recursive", + .import = @import("standalone/dep_recursive/build.zig"), + }, + .{ + .build_root = "test/standalone/dep_mutually_recursive", + .import = @import("standalone/dep_mutually_recursive/build.zig"), + }, + .{ + .build_root = "test/standalone/dep_shared_builtin", + .import = @import("standalone/dep_shared_builtin/build.zig"), + }, + .{ + .build_root = "test/standalone/empty_env", + .import = @import("standalone/empty_env/build.zig"), + }, + .{ + .build_root = "test/standalone/issue_9812", + .import = @import("standalone/issue_9812/build.zig"), + }, + .{ + .build_root = "test/standalone/issue_11595", + .import = @import("standalone/issue_11595/build.zig"), + }, + .{ + .build_root = "test/standalone/load_dynamic_library", + .import = @import("standalone/load_dynamic_library/build.zig"), + }, + .{ + .build_root = "test/standalone/windows_spawn", + .import = @import("standalone/windows_spawn/build.zig"), + }, + .{ + .build_root = "test/standalone/c_compiler", + .import = @import("standalone/c_compiler/build.zig"), + }, + .{ + .build_root = "test/standalone/pie", + .import = @import("standalone/pie/build.zig"), + }, + .{ + .build_root = "test/standalone/issue_12706", + .import = @import("standalone/issue_12706/build.zig"), + }, + // TODO This test is disabled for doing naughty things in the build script. + // The logic needs to get moved to a child process instead of build.zig. + //.{ + // .build_root = "test/standalone/sigpipe", + // .import = @import("standalone/sigpipe/build.zig"), + //}, + .{ + .build_root = "test/standalone/issue_13030", + .import = @import("standalone/issue_13030/build.zig"), + }, +}; - cases.addBuildFile("test/standalone/dep_diamond/build.zig", .{}); - cases.addBuildFile("test/standalone/dep_triangle/build.zig", .{}); - cases.addBuildFile("test/standalone/dep_recursive/build.zig", .{}); - cases.addBuildFile("test/standalone/dep_mutually_recursive/build.zig", .{}); - cases.addBuildFile("test/standalone/dep_shared_builtin/build.zig", .{}); -} +const std = @import("std"); diff --git a/test/standalone/brace_expansion/main.zig b/test/standalone/brace_expansion.zig similarity index 98% rename from test/standalone/brace_expansion/main.zig rename to test/standalone/brace_expansion.zig index dcdcad3865d2..7a769f6af743 100644 --- a/test/standalone/brace_expansion/main.zig +++ b/test/standalone/brace_expansion.zig @@ -234,8 +234,8 @@ pub fn main() !void { var result_buf = ArrayList(u8).init(global_allocator); defer result_buf.deinit(); - try expandString(stdin.items, &result_buf); - try stdout_file.write(result_buf.items); + try expandString(stdin, &result_buf); + try stdout_file.writeAll(result_buf.items); } test "invalid inputs" { diff --git a/test/standalone/brace_expansion/build.zig b/test/standalone/brace_expansion/build.zig deleted file mode 100644 index 7c32a09befcb..000000000000 --- a/test/standalone/brace_expansion/build.zig +++ /dev/null @@ -1,11 +0,0 @@ -const std = @import("std"); - -pub fn build(b: *std.Build) void { - const main = b.addTest(.{ - .root_source_file = .{ .path = "main.zig" }, - .optimize = b.standardOptimizeOption(.{}), - }); - - const test_step = b.step("test", "Test it"); - test_step.dependOn(&main.step); -} diff --git a/test/standalone/c_compiler/build.zig b/test/standalone/c_compiler/build.zig index dce999d4a22e..6c5f2b4db6b9 100644 --- a/test/standalone/c_compiler/build.zig +++ b/test/standalone/c_compiler/build.zig @@ -1,27 +1,24 @@ const std = @import("std"); const builtin = @import("builtin"); -const CrossTarget = std.zig.CrossTarget; -// TODO integrate this with the std.Build executor API -fn isRunnableTarget(t: CrossTarget) bool { - if (t.isNative()) return true; +pub fn build(b: *std.Build) void { + const test_step = b.step("test", "Test it"); + b.default_step = test_step; - return (t.getOsTag() == builtin.os.tag and - t.getCpuArch() == builtin.cpu.arch); + add(b, test_step, .Debug); + add(b, test_step, .ReleaseFast); + add(b, test_step, .ReleaseSmall); + add(b, test_step, .ReleaseSafe); } -pub fn build(b: *std.Build) void { - const optimize = b.standardOptimizeOption(.{}); - const target = b.standardTargetOptions(.{}); - - const test_step = b.step("test", "Test the program"); +fn add(b: *std.Build, test_step: *std.Build.Step, optimize: std.builtin.OptimizeMode) void { + const target: std.zig.CrossTarget = .{}; const exe_c = b.addExecutable(.{ .name = "test_c", .optimize = optimize, .target = target, }); - b.default_step.dependOn(&exe_c.step); exe_c.addCSourceFile("test.c", &[0][]const u8{}); exe_c.linkLibC(); @@ -47,13 +44,13 @@ pub fn build(b: *std.Build) void { else => {}, } - if (isRunnableTarget(target)) { - const run_c_cmd = exe_c.run(); - test_step.dependOn(&run_c_cmd.step); - const run_cpp_cmd = exe_cpp.run(); - test_step.dependOn(&run_cpp_cmd.step); - } else { - test_step.dependOn(&exe_c.step); - test_step.dependOn(&exe_cpp.step); - } + const run_c_cmd = b.addRunArtifact(exe_c); + run_c_cmd.expectExitCode(0); + run_c_cmd.skip_foreign_checks = true; + test_step.dependOn(&run_c_cmd.step); + + const run_cpp_cmd = b.addRunArtifact(exe_cpp); + run_cpp_cmd.expectExitCode(0); + run_cpp_cmd.skip_foreign_checks = true; + test_step.dependOn(&run_cpp_cmd.step); } diff --git a/test/standalone/dep_diamond/build.zig b/test/standalone/dep_diamond/build.zig index b60f898f0b34..12eda4ec5d72 100644 --- a/test/standalone/dep_diamond/build.zig +++ b/test/standalone/dep_diamond/build.zig @@ -1,7 +1,10 @@ const std = @import("std"); pub fn build(b: *std.Build) void { - const optimize = b.standardOptimizeOption(.{}); + const test_step = b.step("test", "Test it"); + b.default_step = test_step; + + const optimize: std.builtin.OptimizeMode = .Debug; const shared = b.createModule(.{ .source_file = .{ .path = "shared.zig" }, @@ -23,6 +26,5 @@ pub fn build(b: *std.Build) void { const run = exe.run(); - const test_step = b.step("test", "Test it"); test_step.dependOn(&run.step); } diff --git a/test/standalone/dep_mutually_recursive/build.zig b/test/standalone/dep_mutually_recursive/build.zig index 0123646a9a7d..1a6bff85010f 100644 --- a/test/standalone/dep_mutually_recursive/build.zig +++ b/test/standalone/dep_mutually_recursive/build.zig @@ -1,7 +1,10 @@ const std = @import("std"); pub fn build(b: *std.Build) void { - const optimize = b.standardOptimizeOption(.{}); + const test_step = b.step("test", "Test it"); + b.default_step = test_step; + + const optimize: std.builtin.OptimizeMode = .Debug; const foo = b.createModule(.{ .source_file = .{ .path = "foo.zig" }, @@ -21,6 +24,5 @@ pub fn build(b: *std.Build) void { const run = exe.run(); - const test_step = b.step("test", "Test it"); test_step.dependOn(&run.step); } diff --git a/test/standalone/dep_recursive/build.zig b/test/standalone/dep_recursive/build.zig index 32d546e283d6..35b9f3cc474f 100644 --- a/test/standalone/dep_recursive/build.zig +++ b/test/standalone/dep_recursive/build.zig @@ -1,7 +1,10 @@ const std = @import("std"); pub fn build(b: *std.Build) void { - const optimize = b.standardOptimizeOption(.{}); + const test_step = b.step("test", "Test it"); + b.default_step = test_step; + + const optimize: std.builtin.OptimizeMode = .Debug; const foo = b.createModule(.{ .source_file = .{ .path = "foo.zig" }, @@ -17,6 +20,5 @@ pub fn build(b: *std.Build) void { const run = exe.run(); - const test_step = b.step("test", "Test it"); test_step.dependOn(&run.step); } diff --git a/test/standalone/dep_shared_builtin/build.zig b/test/standalone/dep_shared_builtin/build.zig index 6c029b654bf1..776794f95e31 100644 --- a/test/standalone/dep_shared_builtin/build.zig +++ b/test/standalone/dep_shared_builtin/build.zig @@ -1,7 +1,10 @@ const std = @import("std"); pub fn build(b: *std.Build) void { - const optimize = b.standardOptimizeOption(.{}); + const test_step = b.step("test", "Test it"); + b.default_step = test_step; + + const optimize: std.builtin.OptimizeMode = .Debug; const exe = b.addExecutable(.{ .name = "test", @@ -14,6 +17,5 @@ pub fn build(b: *std.Build) void { const run = exe.run(); - const test_step = b.step("test", "Test it"); test_step.dependOn(&run.step); } diff --git a/test/standalone/dep_triangle/build.zig b/test/standalone/dep_triangle/build.zig index f3b73aaf35f7..14163df84c9a 100644 --- a/test/standalone/dep_triangle/build.zig +++ b/test/standalone/dep_triangle/build.zig @@ -1,7 +1,10 @@ const std = @import("std"); pub fn build(b: *std.Build) void { - const optimize = b.standardOptimizeOption(.{}); + const test_step = b.step("test", "Test it"); + b.default_step = test_step; + + const optimize: std.builtin.OptimizeMode = .Debug; const shared = b.createModule(.{ .source_file = .{ .path = "shared.zig" }, @@ -20,6 +23,5 @@ pub fn build(b: *std.Build) void { const run = exe.run(); - const test_step = b.step("test", "Test it"); test_step.dependOn(&run.step); } diff --git a/test/standalone/embed_generated_file/build.zig b/test/standalone/embed_generated_file/build.zig index 3b17ff0b8f8d..af1ae0a00f70 100644 --- a/test/standalone/embed_generated_file/build.zig +++ b/test/standalone/embed_generated_file/build.zig @@ -1,8 +1,8 @@ const std = @import("std"); pub fn build(b: *std.Build) void { - const target = b.standardTargetOptions(.{}); - const optimize = b.standardOptimizeOption(.{}); + const test_step = b.step("test", "Test it"); + b.default_step = test_step; const bootloader = b.addExecutable(.{ .name = "bootloader", @@ -16,13 +16,11 @@ pub fn build(b: *std.Build) void { const exe = b.addTest(.{ .root_source_file = .{ .path = "main.zig" }, - .target = target, - .optimize = optimize, + .optimize = .Debug, }); exe.addAnonymousModule("bootloader.elf", .{ .source_file = bootloader.getOutputSource(), }); - const test_step = b.step("test", "Test the program"); test_step.dependOn(&exe.step); } diff --git a/test/standalone/emit_asm_and_bin/build.zig b/test/standalone/emit_asm_and_bin/build.zig index 5345f0f5387f..594bf6552e0c 100644 --- a/test/standalone/emit_asm_and_bin/build.zig +++ b/test/standalone/emit_asm_and_bin/build.zig @@ -1,6 +1,9 @@ const std = @import("std"); pub fn build(b: *std.Build) void { + const test_step = b.step("test", "Test it"); + b.default_step = test_step; + const main = b.addTest(.{ .root_source_file = .{ .path = "main.zig" }, .optimize = b.standardOptimizeOption(.{}), @@ -8,6 +11,5 @@ pub fn build(b: *std.Build) void { main.emit_asm = .{ .emit_to = b.pathFromRoot("main.s") }; main.emit_bin = .{ .emit_to = b.pathFromRoot("main") }; - const test_step = b.step("test", "Run test"); - test_step.dependOn(&main.step); + test_step.dependOn(&main.run().step); } diff --git a/test/standalone/empty_env/build.zig b/test/standalone/empty_env/build.zig index c4b48461410f..27ec75be22df 100644 --- a/test/standalone/empty_env/build.zig +++ b/test/standalone/empty_env/build.zig @@ -1,15 +1,25 @@ const std = @import("std"); +const builtin = @import("builtin"); pub fn build(b: *std.Build) void { + const test_step = b.step("test", "Test it"); + b.default_step = test_step; + + const optimize: std.builtin.OptimizeMode = .Debug; + + if (builtin.os.tag == .windows and builtin.cpu.arch == .aarch64) { + // https://github.com/ziglang/zig/issues/13685 + return; + } + const main = b.addExecutable(.{ .name = "main", .root_source_file = .{ .path = "main.zig" }, - .optimize = b.standardOptimizeOption(.{}), + .optimize = optimize, }); - const run = main.run(); + const run = b.addRunArtifact(main); run.clearEnvironment(); - const test_step = b.step("test", "Test it"); test_step.dependOn(&run.step); } diff --git a/test/standalone/extern/build.zig b/test/standalone/extern/build.zig index 8a44a6ca8f9e..153380e91ded 100644 --- a/test/standalone/extern/build.zig +++ b/test/standalone/extern/build.zig @@ -1,7 +1,7 @@ const std = @import("std"); pub fn build(b: *std.Build) void { - const optimize = b.standardOptimizeOption(.{}); + const optimize: std.builtin.OptimizeMode = .Debug; const obj = b.addObject(.{ .name = "exports", diff --git a/test/standalone/global_linkage/build.zig b/test/standalone/global_linkage/build.zig index 9f79c80fcf60..ddcddd612a13 100644 --- a/test/standalone/global_linkage/build.zig +++ b/test/standalone/global_linkage/build.zig @@ -1,20 +1,24 @@ const std = @import("std"); pub fn build(b: *std.Build) void { - const optimize = b.standardOptimizeOption(.{}); + const test_step = b.step("test", "Test the program"); + b.default_step = test_step; + + const optimize: std.builtin.OptimizeMode = .Debug; + const target: std.zig.CrossTarget = .{}; const obj1 = b.addStaticLibrary(.{ .name = "obj1", .root_source_file = .{ .path = "obj1.zig" }, .optimize = optimize, - .target = .{}, + .target = target, }); const obj2 = b.addStaticLibrary(.{ .name = "obj2", .root_source_file = .{ .path = "obj2.zig" }, .optimize = optimize, - .target = .{}, + .target = target, }); const main = b.addTest(.{ @@ -24,6 +28,5 @@ pub fn build(b: *std.Build) void { main.linkLibrary(obj1); main.linkLibrary(obj2); - const test_step = b.step("test", "Test it"); - test_step.dependOn(&main.step); + test_step.dependOn(&main.run().step); } diff --git a/test/standalone/install_raw_hex/build.zig b/test/standalone/install_raw_hex/build.zig index 6ed515e3816d..b34bb013788e 100644 --- a/test/standalone/install_raw_hex/build.zig +++ b/test/standalone/install_raw_hex/build.zig @@ -3,8 +3,8 @@ const std = @import("std"); const CheckFileStep = std.Build.CheckFileStep; pub fn build(b: *std.Build) void { - const test_step = b.step("test", "Test the program"); - b.default_step.dependOn(test_step); + const test_step = b.step("test", "Test it"); + b.default_step = test_step; const target = .{ .cpu_arch = .thumb, @@ -13,7 +13,7 @@ pub fn build(b: *std.Build) void { .abi = .gnueabihf, }; - const optimize = b.standardOptimizeOption(.{}); + const optimize: std.builtin.OptimizeMode = .Debug; const elf = b.addExecutable(.{ .name = "zig-nrf52-blink.elf", diff --git a/test/standalone/issue_11595/build.zig b/test/standalone/issue_11595/build.zig index c335fb73dafe..7d9530c6904d 100644 --- a/test/standalone/issue_11595/build.zig +++ b/test/standalone/issue_11595/build.zig @@ -1,18 +1,17 @@ const std = @import("std"); const builtin = @import("builtin"); -const CrossTarget = std.zig.CrossTarget; -// TODO integrate this with the std.Build executor API -fn isRunnableTarget(t: CrossTarget) bool { - if (t.isNative()) return true; +pub fn build(b: *std.Build) void { + const test_step = b.step("test", "Test it"); + b.default_step = test_step; - return (t.getOsTag() == builtin.os.tag and - t.getCpuArch() == builtin.cpu.arch); -} + const optimize: std.builtin.OptimizeMode = .Debug; + const target: std.zig.CrossTarget = .{}; -pub fn build(b: *std.Build) void { - const optimize = b.standardOptimizeOption(.{}); - const target = b.standardTargetOptions(.{}); + if (builtin.os.tag == .windows) { + // https://github.com/ziglang/zig/issues/12419 + return; + } const exe = b.addExecutable(.{ .name = "zigtest", @@ -44,11 +43,9 @@ pub fn build(b: *std.Build) void { b.default_step.dependOn(&exe.step); - const test_step = b.step("test", "Test the program"); - if (isRunnableTarget(target)) { - const run_cmd = exe.run(); - test_step.dependOn(&run_cmd.step); - } else { - test_step.dependOn(&exe.step); - } + const run_cmd = b.addRunArtifact(exe); + run_cmd.skip_foreign_checks = true; + run_cmd.expectExitCode(0); + + test_step.dependOn(&run_cmd.step); } diff --git a/test/standalone/issue_12588/build.zig b/test/standalone/issue_12588/build.zig index 9f14c53e3868..fa22252fcc83 100644 --- a/test/standalone/issue_12588/build.zig +++ b/test/standalone/issue_12588/build.zig @@ -1,8 +1,11 @@ const std = @import("std"); pub fn build(b: *std.Build) void { - const optimize = b.standardOptimizeOption(.{}); - const target = b.standardTargetOptions(.{}); + const test_step = b.step("test", "Test it"); + b.default_step = test_step; + + const optimize: std.builtin.OptimizeMode = .Debug; + const target: std.zig.CrossTarget = .{}; const obj = b.addObject(.{ .name = "main", @@ -15,6 +18,5 @@ pub fn build(b: *std.Build) void { obj.emit_bin = .no_emit; b.default_step.dependOn(&obj.step); - const test_step = b.step("test", "Test the program"); test_step.dependOn(&obj.step); } diff --git a/test/standalone/issue_12706/build.zig b/test/standalone/issue_12706/build.zig index 9d616477a2c7..04eb826b44d8 100644 --- a/test/standalone/issue_12706/build.zig +++ b/test/standalone/issue_12706/build.zig @@ -2,17 +2,12 @@ const std = @import("std"); const builtin = @import("builtin"); const CrossTarget = std.zig.CrossTarget; -// TODO integrate this with the std.Build executor API -fn isRunnableTarget(t: CrossTarget) bool { - if (t.isNative()) return true; - - return (t.getOsTag() == builtin.os.tag and - t.getCpuArch() == builtin.cpu.arch); -} - pub fn build(b: *std.Build) void { - const optimize = b.standardOptimizeOption(.{}); - const target = b.standardTargetOptions(.{}); + const test_step = b.step("test", "Test it"); + b.default_step = test_step; + + const optimize: std.builtin.OptimizeMode = .Debug; + const target: std.zig.CrossTarget = .{}; const exe = b.addExecutable(.{ .name = "main", @@ -20,22 +15,15 @@ pub fn build(b: *std.Build) void { .optimize = optimize, .target = target, }); - exe.install(); const c_sources = [_][]const u8{ "test.c", }; - exe.addCSourceFiles(&c_sources, &.{}); exe.linkLibC(); - b.default_step.dependOn(&exe.step); - - const test_step = b.step("test", "Test the program"); - if (isRunnableTarget(target)) { - const run_cmd = exe.run(); - test_step.dependOn(&run_cmd.step); - } else { - test_step.dependOn(&exe.step); - } + const run_cmd = b.addRunArtifact(exe); + run_cmd.expectExitCode(0); + run_cmd.skip_foreign_checks = true; + test_step.dependOn(&run_cmd.step); } diff --git a/test/standalone/issue_13030/build.zig b/test/standalone/issue_13030/build.zig index 258d9b7db8a3..e31863fee2fb 100644 --- a/test/standalone/issue_13030/build.zig +++ b/test/standalone/issue_13030/build.zig @@ -3,17 +3,22 @@ const builtin = @import("builtin"); const CrossTarget = std.zig.CrossTarget; pub fn build(b: *std.Build) void { - const optimize = b.standardOptimizeOption(.{}); - const target = b.standardTargetOptions(.{}); + const test_step = b.step("test", "Test it"); + b.default_step = test_step; + add(b, test_step, .Debug); + add(b, test_step, .ReleaseFast); + add(b, test_step, .ReleaseSmall); + add(b, test_step, .ReleaseSafe); +} + +fn add(b: *std.Build, test_step: *std.Build.Step, optimize: std.builtin.OptimizeMode) void { const obj = b.addObject(.{ .name = "main", .root_source_file = .{ .path = "main.zig" }, .optimize = optimize, - .target = target, + .target = .{}, }); - b.default_step.dependOn(&obj.step); - const test_step = b.step("test", "Test the program"); test_step.dependOn(&obj.step); } diff --git a/test/standalone/issue_13970/build.zig b/test/standalone/issue_13970/build.zig index f5e07d890326..1eb8a5a1213f 100644 --- a/test/standalone/issue_13970/build.zig +++ b/test/standalone/issue_13970/build.zig @@ -1,6 +1,9 @@ const std = @import("std"); pub fn build(b: *std.Build) void { + const test_step = b.step("test", "Test it"); + b.default_step = test_step; + const test1 = b.addTest(.{ .root_source_file = .{ .path = "test_root/empty.zig" }, }); @@ -14,8 +17,7 @@ pub fn build(b: *std.Build) void { test2.setTestRunner("src/main.zig"); test3.setTestRunner("src/main.zig"); - const test_step = b.step("test", "Test package path resolution of custom test runner"); - test_step.dependOn(&test1.step); - test_step.dependOn(&test2.step); - test_step.dependOn(&test3.step); + test_step.dependOn(&test1.run().step); + test_step.dependOn(&test2.run().step); + test_step.dependOn(&test3.run().step); } diff --git a/test/standalone/issue_339/build.zig b/test/standalone/issue_339/build.zig index 62ac128aabd7..f4215dbb8bc2 100644 --- a/test/standalone/issue_339/build.zig +++ b/test/standalone/issue_339/build.zig @@ -1,13 +1,18 @@ const std = @import("std"); pub fn build(b: *std.Build) void { + const test_step = b.step("test", "Test it"); + b.default_step = test_step; + + const optimize: std.builtin.OptimizeMode = .Debug; + const target: std.zig.CrossTarget = .{}; + const obj = b.addObject(.{ .name = "test", .root_source_file = .{ .path = "test.zig" }, - .target = b.standardTargetOptions(.{}), - .optimize = b.standardOptimizeOption(.{}), + .target = target, + .optimize = optimize, }); - const test_step = b.step("test", "Test the program"); test_step.dependOn(&obj.step); } diff --git a/test/standalone/issue_5825/build.zig b/test/standalone/issue_5825/build.zig index 89272280d4e4..e8e8d4877230 100644 --- a/test/standalone/issue_5825/build.zig +++ b/test/standalone/issue_5825/build.zig @@ -1,12 +1,15 @@ const std = @import("std"); pub fn build(b: *std.Build) void { + const test_step = b.step("test", "Test it"); + b.default_step = test_step; + const target = .{ .cpu_arch = .x86_64, .os_tag = .windows, .abi = .msvc, }; - const optimize = b.standardOptimizeOption(.{}); + const optimize: std.builtin.OptimizeMode = .Debug; const obj = b.addObject(.{ .name = "issue_5825", .root_source_file = .{ .path = "main.zig" }, @@ -24,6 +27,5 @@ pub fn build(b: *std.Build) void { exe.linkSystemLibrary("ntdll"); exe.addObject(obj); - const test_step = b.step("test", "Test the program"); test_step.dependOn(&exe.step); } diff --git a/test/standalone/issue_7030/main.zig b/test/standalone/issue_7030.zig similarity index 100% rename from test/standalone/issue_7030/main.zig rename to test/standalone/issue_7030.zig diff --git a/test/standalone/issue_7030/build.zig b/test/standalone/issue_7030/build.zig deleted file mode 100644 index dc535318ccb5..000000000000 --- a/test/standalone/issue_7030/build.zig +++ /dev/null @@ -1,17 +0,0 @@ -const std = @import("std"); - -pub fn build(b: *std.Build) void { - const exe = b.addExecutable(.{ - .name = "issue_7030", - .root_source_file = .{ .path = "main.zig" }, - .target = .{ - .cpu_arch = .wasm32, - .os_tag = .freestanding, - }, - }); - exe.install(); - b.default_step.dependOn(&exe.step); - - const test_step = b.step("test", "Test the program"); - test_step.dependOn(&exe.step); -} diff --git a/test/standalone/issue_794/build.zig b/test/standalone/issue_794/build.zig index 3089a28fd012..8527f4af2c77 100644 --- a/test/standalone/issue_794/build.zig +++ b/test/standalone/issue_794/build.zig @@ -1,13 +1,13 @@ const std = @import("std"); pub fn build(b: *std.Build) void { + const test_step = b.step("test", "Test it"); + b.default_step = test_step; + const test_artifact = b.addTest(.{ .root_source_file = .{ .path = "main.zig" }, }); test_artifact.addIncludePath("a_directory"); - b.default_step.dependOn(&test_artifact.step); - - const test_step = b.step("test", "Test the program"); test_step.dependOn(&test_artifact.step); } diff --git a/test/standalone/issue_8550/build.zig b/test/standalone/issue_8550/build.zig index c3303d55db5e..8f7631e68f77 100644 --- a/test/standalone/issue_8550/build.zig +++ b/test/standalone/issue_8550/build.zig @@ -1,6 +1,10 @@ const std = @import("std"); pub fn build(b: *std.Build) !void { + const test_step = b.step("test", "Test it"); + b.default_step = test_step; + + const optimize: std.builtin.OptimizeMode = .Debug; const target = std.zig.CrossTarget{ .os_tag = .freestanding, .cpu_arch = .arm, @@ -8,7 +12,7 @@ pub fn build(b: *std.Build) !void { .explicit = &std.Target.arm.cpu.arm1176jz_s, }, }; - const optimize = b.standardOptimizeOption(.{}); + const kernel = b.addExecutable(.{ .name = "kernel", .root_source_file = .{ .path = "./main.zig" }, @@ -19,6 +23,5 @@ pub fn build(b: *std.Build) !void { kernel.setLinkerScriptPath(.{ .path = "./linker.ld" }); kernel.install(); - const test_step = b.step("test", "Test it"); test_step.dependOn(&kernel.step); } diff --git a/test/standalone/issue_9812/build.zig b/test/standalone/issue_9812/build.zig index 4ca55ce99919..71104b903caf 100644 --- a/test/standalone/issue_9812/build.zig +++ b/test/standalone/issue_9812/build.zig @@ -1,7 +1,11 @@ const std = @import("std"); pub fn build(b: *std.Build) !void { - const optimize = b.standardOptimizeOption(.{}); + const test_step = b.step("test", "Test it"); + b.default_step = test_step; + + const optimize: std.builtin.OptimizeMode = .Debug; + const zip_add = b.addTest(.{ .root_source_file = .{ .path = "main.zig" }, .optimize = optimize, @@ -13,6 +17,5 @@ pub fn build(b: *std.Build) !void { zip_add.addIncludePath("vendor/kuba-zip"); zip_add.linkLibC(); - const test_step = b.step("test", "Test it"); test_step.dependOn(&zip_add.step); } diff --git a/test/standalone/load_dynamic_library/build.zig b/test/standalone/load_dynamic_library/build.zig index 44fc37893cea..06a5424a8deb 100644 --- a/test/standalone/load_dynamic_library/build.zig +++ b/test/standalone/load_dynamic_library/build.zig @@ -1,8 +1,19 @@ const std = @import("std"); +const builtin = @import("builtin"); pub fn build(b: *std.Build) void { - const target = b.standardTargetOptions(.{}); - const optimize = b.standardOptimizeOption(.{}); + const test_step = b.step("test", "Test it"); + b.default_step = test_step; + + const optimize: std.builtin.OptimizeMode = .Debug; + const target: std.zig.CrossTarget = .{}; + + const ok = (builtin.os.tag != .wasi and + // https://github.com/ziglang/zig/issues/13550 + (builtin.os.tag != .macos or builtin.cpu.arch != .aarch64) and + // https://github.com/ziglang/zig/issues/13686 + (builtin.os.tag != .windows or builtin.cpu.arch != .aarch64)); + if (!ok) return; const lib = b.addSharedLibrary(.{ .name = "add", @@ -19,9 +30,10 @@ pub fn build(b: *std.Build) void { .target = target, }); - const run = main.run(); + const run = b.addRunArtifact(main); run.addArtifactArg(lib); + run.skip_foreign_checks = true; + run.expectExitCode(0); - const test_step = b.step("test", "Test the program"); test_step.dependOn(&run.step); } diff --git a/test/standalone/load_dynamic_library/main.zig b/test/standalone/load_dynamic_library/main.zig index baf47c23adf6..b47ea8a81f1c 100644 --- a/test/standalone/load_dynamic_library/main.zig +++ b/test/standalone/load_dynamic_library/main.zig @@ -11,11 +11,7 @@ pub fn main() !void { var lib = try std.DynLib.open(dynlib_name); defer lib.close(); - const Add = switch (@import("builtin").zig_backend) { - .stage1 => fn (i32, i32) callconv(.C) i32, - else => *const fn (i32, i32) callconv(.C) i32, - }; - + const Add = *const fn (i32, i32) callconv(.C) i32; const addFn = lib.lookup(Add, "add") orelse return error.SymbolNotFound; const result = addFn(12, 34); diff --git a/test/standalone/main_pkg_path/build.zig b/test/standalone/main_pkg_path/build.zig index f9919d5ab57a..a4dd301c43df 100644 --- a/test/standalone/main_pkg_path/build.zig +++ b/test/standalone/main_pkg_path/build.zig @@ -1,11 +1,13 @@ const std = @import("std"); pub fn build(b: *std.Build) void { + const test_step = b.step("test", "Test it"); + b.default_step = test_step; + const test_exe = b.addTest(.{ .root_source_file = .{ .path = "a/test.zig" }, }); test_exe.setMainPkgPath("."); - const test_step = b.step("test", "Test the program"); - test_step.dependOn(&test_exe.step); + test_step.dependOn(&test_exe.run().step); } diff --git a/test/standalone/mix_c_files/build.zig b/test/standalone/mix_c_files/build.zig index f2dfb2093f10..0ea585e4e0cb 100644 --- a/test/standalone/mix_c_files/build.zig +++ b/test/standalone/mix_c_files/build.zig @@ -1,34 +1,28 @@ const std = @import("std"); -const builtin = @import("builtin"); -const CrossTarget = std.zig.CrossTarget; -// TODO integrate this with the std.Build executor API -fn isRunnableTarget(t: CrossTarget) bool { - if (t.isNative()) return true; +pub fn build(b: *std.Build) void { + const test_step = b.step("test", "Test it"); + b.default_step = test_step; - return (t.getOsTag() == builtin.os.tag and - t.getCpuArch() == builtin.cpu.arch); + add(b, test_step, .Debug); + add(b, test_step, .ReleaseFast); + add(b, test_step, .ReleaseSmall); + add(b, test_step, .ReleaseSafe); } -pub fn build(b: *std.Build) void { - const optimize = b.standardOptimizeOption(.{}); - const target = b.standardTargetOptions(.{}); - +fn add(b: *std.Build, test_step: *std.Build.Step, optimize: std.builtin.OptimizeMode) void { const exe = b.addExecutable(.{ .name = "test", .root_source_file = .{ .path = "main.zig" }, .optimize = optimize, - .target = target, }); exe.addCSourceFile("test.c", &[_][]const u8{"-std=c11"}); exe.linkLibC(); b.default_step.dependOn(&exe.step); - const test_step = b.step("test", "Test the program"); - if (isRunnableTarget(target)) { - const run_cmd = exe.run(); - test_step.dependOn(&run_cmd.step); - } else { - test_step.dependOn(&exe.step); - } + const run_cmd = b.addRunArtifact(exe); + run_cmd.skip_foreign_checks = true; + run_cmd.expectExitCode(0); + + test_step.dependOn(&run_cmd.step); } diff --git a/test/standalone/mix_o_files/build.zig b/test/standalone/mix_o_files/build.zig index 2708343aa5a9..17ce55a8aaa1 100644 --- a/test/standalone/mix_o_files/build.zig +++ b/test/standalone/mix_o_files/build.zig @@ -1,18 +1,23 @@ const std = @import("std"); pub fn build(b: *std.Build) void { - const optimize = b.standardOptimizeOption(.{}); + const test_step = b.step("test", "Test it"); + b.default_step = test_step; + + const optimize: std.builtin.OptimizeMode = .Debug; + const target: std.zig.CrossTarget = .{}; const obj = b.addObject(.{ .name = "base64", .root_source_file = .{ .path = "base64.zig" }, .optimize = optimize, - .target = .{}, + .target = target, }); const exe = b.addExecutable(.{ .name = "test", .optimize = optimize, + .target = target, }); exe.addCSourceFile("test.c", &[_][]const u8{"-std=c99"}); exe.addObject(obj); @@ -22,6 +27,5 @@ pub fn build(b: *std.Build) void { const run_cmd = exe.run(); - const test_step = b.step("test", "Test the program"); test_step.dependOn(&run_cmd.step); } diff --git a/test/standalone/options/build.zig b/test/standalone/options/build.zig index 3f1e823359b5..5e894102a716 100644 --- a/test/standalone/options/build.zig +++ b/test/standalone/options/build.zig @@ -20,5 +20,5 @@ pub fn build(b: *std.Build) void { options.addOption([]const u8, "string", b.option([]const u8, "string", "s").?); const test_step = b.step("test", "Run unit tests"); - test_step.dependOn(&main.step); + test_step.dependOn(&main.run().step); } diff --git a/test/standalone/pie/build.zig b/test/standalone/pie/build.zig index d51ea27328c9..e7ef5f97cc38 100644 --- a/test/standalone/pie/build.zig +++ b/test/standalone/pie/build.zig @@ -1,14 +1,24 @@ const std = @import("std"); pub fn build(b: *std.Build) void { + const test_step = b.step("test", "Test it"); + b.default_step = test_step; + + const optimize: std.builtin.OptimizeMode = .Debug; + const target: std.zig.CrossTarget = .{ + .os_tag = .linux, + .cpu_arch = .x86_64, + }; + const main = b.addTest(.{ .root_source_file = .{ .path = "main.zig" }, - .optimize = b.standardOptimizeOption(.{}), + .optimize = optimize, + .target = target, }); main.pie = true; - const test_step = b.step("test", "Test the program"); - test_step.dependOn(&main.step); + const run = main.run(); + run.skip_foreign_checks = true; - b.default_step.dependOn(test_step); + test_step.dependOn(&run.step); } diff --git a/test/standalone/pkg_import/build.zig b/test/standalone/pkg_import/build.zig index 5ea6c90af776..42799ab8963b 100644 --- a/test/standalone/pkg_import/build.zig +++ b/test/standalone/pkg_import/build.zig @@ -1,7 +1,10 @@ const std = @import("std"); pub fn build(b: *std.Build) void { - const optimize = b.standardOptimizeOption(.{}); + const test_step = b.step("test", "Test it"); + b.default_step = test_step; + + const optimize: std.builtin.OptimizeMode = .Debug; const exe = b.addExecutable(.{ .name = "test", @@ -12,6 +15,5 @@ pub fn build(b: *std.Build) void { const run = exe.run(); - const test_step = b.step("test", "Test it"); test_step.dependOn(&run.step); } diff --git a/test/standalone/shared_library/build.zig b/test/standalone/shared_library/build.zig index 91f7c8a06a67..63370af0cc04 100644 --- a/test/standalone/shared_library/build.zig +++ b/test/standalone/shared_library/build.zig @@ -1,8 +1,11 @@ const std = @import("std"); pub fn build(b: *std.Build) void { - const optimize = b.standardOptimizeOption(.{}); - const target = b.standardTargetOptions(.{}); + const test_step = b.step("test", "Test it"); + b.default_step = test_step; + + const optimize: std.builtin.OptimizeMode = .Debug; + const target: std.zig.CrossTarget = .{}; const lib = b.addSharedLibrary(.{ .name = "mathtest", .root_source_file = .{ .path = "mathtest.zig" }, @@ -20,10 +23,7 @@ pub fn build(b: *std.Build) void { exe.linkLibrary(lib); exe.linkSystemLibrary("c"); - b.default_step.dependOn(&exe.step); - const run_cmd = exe.run(); - const test_step = b.step("test", "Test the program"); test_step.dependOn(&run_cmd.step); } diff --git a/test/standalone/sigpipe/build.zig b/test/standalone/sigpipe/build.zig index 763df5fe462f..6f50a86d68ce 100644 --- a/test/standalone/sigpipe/build.zig +++ b/test/standalone/sigpipe/build.zig @@ -2,7 +2,16 @@ const std = @import("std"); const os = std.os; pub fn build(b: *std.build.Builder) !void { - const test_step = b.step("test", "Run the tests"); + const test_step = b.step("test", "Test it"); + b.default_step = test_step; + + // TODO signal handling code has no business being in a build script. + // this logic needs to move to a file called parent.zig which is + // added as an executable. + + //if (!std.os.have_sigpipe_support) { + // return; + //} // This test runs "breakpipe" as a child process and that process // depends on inheriting a SIGPIPE disposition of "default". @@ -23,12 +32,12 @@ pub fn build(b: *std.build.Builder) !void { .root_source_file = .{ .path = "breakpipe.zig" }, }); exe.addOptions("build_options", options); - const run = exe.run(); + const run = b.addRunArtifact(exe); if (keep_sigpipe) { - run.expected_term = .{ .Signal = std.os.SIG.PIPE }; + run.addCheck(.{ .expect_term = .{ .Signal = std.os.SIG.PIPE } }); } else { - run.stdout_action = .{ .expect_exact = "BrokenPipe\n" }; - run.expected_term = .{ .Exited = 123 }; + run.addCheck(.{ .expect_stdout_exact = "BrokenPipe\n" }); + run.addCheck(.{ .expect_term = .{ .Exited = 123 } }); } test_step.dependOn(&run.step); } diff --git a/test/standalone/static_c_lib/build.zig b/test/standalone/static_c_lib/build.zig index 99378888433e..794b813b75f3 100644 --- a/test/standalone/static_c_lib/build.zig +++ b/test/standalone/static_c_lib/build.zig @@ -1,7 +1,10 @@ const std = @import("std"); pub fn build(b: *std.Build) void { - const optimize = b.standardOptimizeOption(.{}); + const test_step = b.step("test", "Test it"); + b.default_step = test_step; + + const optimize: std.builtin.OptimizeMode = .Debug; const foo = b.addStaticLibrary(.{ .name = "foo", @@ -18,6 +21,5 @@ pub fn build(b: *std.Build) void { test_exe.linkLibrary(foo); test_exe.addIncludePath("."); - const test_step = b.step("test", "Test it"); - test_step.dependOn(&test_exe.step); + test_step.dependOn(&test_exe.run().step); } diff --git a/test/standalone/test_runner_module_imports/build.zig b/test/standalone/test_runner_module_imports/build.zig index 973365e49594..73c5536dc6e1 100644 --- a/test/standalone/test_runner_module_imports/build.zig +++ b/test/standalone/test_runner_module_imports/build.zig @@ -15,5 +15,5 @@ pub fn build(b: *std.Build) void { t.addModule("module2", module2); const test_step = b.step("test", "Run unit tests"); - test_step.dependOn(&t.step); + test_step.dependOn(&t.run().step); } diff --git a/test/standalone/test_runner_path/build.zig b/test/standalone/test_runner_path/build.zig index f073c55d4a32..ce5b66805480 100644 --- a/test/standalone/test_runner_path/build.zig +++ b/test/standalone/test_runner_path/build.zig @@ -1,14 +1,17 @@ const std = @import("std"); +pub const requires_stage2 = true; + pub fn build(b: *std.Build) void { + const test_step = b.step("test", "Test the program"); + b.default_step = test_step; + const test_exe = b.addTest(.{ .root_source_file = .{ .path = "test.zig" }, - .kind = .test_exe, }); test_exe.test_runner = "test_runner.zig"; const test_run = test_exe.run(); - const test_step = b.step("test", "Test the program"); test_step.dependOn(&test_run.step); } diff --git a/test/standalone/test_runner_path/test_runner.zig b/test/standalone/test_runner_path/test_runner.zig index f49ff55caee7..2139ea8f68f1 100644 --- a/test/standalone/test_runner_path/test_runner.zig +++ b/test/standalone/test_runner_path/test_runner.zig @@ -1,51 +1,19 @@ const std = @import("std"); -const io = std.io; const builtin = @import("builtin"); -pub const io_mode: io.Mode = builtin.test_io_mode; - pub fn main() void { - const test_fn_list = builtin.test_functions; var ok_count: usize = 0; var skip_count: usize = 0; var fail_count: usize = 0; - var async_frame_buffer: []align(std.Target.stack_align) u8 = undefined; - // TODO this is on the next line (using `undefined` above) because otherwise zig incorrectly - // ignores the alignment of the slice. - async_frame_buffer = &[_]u8{}; - - for (test_fn_list) |test_fn| { - const result = if (test_fn.async_frame_size) |size| switch (io_mode) { - .evented => blk: { - if (async_frame_buffer.len < size) { - std.heap.page_allocator.free(async_frame_buffer); - async_frame_buffer = std.heap.page_allocator.alignedAlloc(u8, std.Target.stack_align, size) catch @panic("out of memory"); - } - const casted_fn = @ptrCast(fn () callconv(.Async) anyerror!void, test_fn.func); - break :blk await @asyncCall(async_frame_buffer, {}, casted_fn, .{}); - }, - .blocking => { - skip_count += 1; - continue; - }, - } else test_fn.func(); - if (result) |_| { + for (builtin.test_functions) |test_fn| { + if (test_fn.func()) |_| { ok_count += 1; } else |err| switch (err) { - error.SkipZigTest => { - skip_count += 1; - }, - else => { - fail_count += 1; - }, + error.SkipZigTest => skip_count += 1, + else => fail_count += 1, } } - if (ok_count == test_fn_list.len) { - std.debug.print("All {d} tests passed.\n", .{ok_count}); - } else { - std.debug.print("{d} passed; {d} skipped; {d} failed.\n", .{ ok_count, skip_count, fail_count }); - } if (ok_count != 1 or skip_count != 1 or fail_count != 1) { std.process.exit(1); } diff --git a/test/standalone/use_alias/build.zig b/test/standalone/use_alias/build.zig index 89e07efb22cc..db47fe669205 100644 --- a/test/standalone/use_alias/build.zig +++ b/test/standalone/use_alias/build.zig @@ -1,12 +1,16 @@ const std = @import("std"); pub fn build(b: *std.Build) void { + const test_step = b.step("test", "Test it"); + b.default_step = test_step; + + const optimize: std.builtin.OptimizeMode = .Debug; + const main = b.addTest(.{ .root_source_file = .{ .path = "main.zig" }, - .optimize = b.standardOptimizeOption(.{}), + .optimize = optimize, }); main.addIncludePath("."); - const test_step = b.step("test", "Test it"); - test_step.dependOn(&main.step); + test_step.dependOn(&main.run().step); } diff --git a/test/standalone/windows_spawn/build.zig b/test/standalone/windows_spawn/build.zig index 3ebde5a50c82..6c865f0a9fc1 100644 --- a/test/standalone/windows_spawn/build.zig +++ b/test/standalone/windows_spawn/build.zig @@ -1,23 +1,33 @@ const std = @import("std"); +const builtin = @import("builtin"); pub fn build(b: *std.Build) void { - const optimize = b.standardOptimizeOption(.{}); + const test_step = b.step("test", "Test it"); + b.default_step = test_step; + + const optimize: std.builtin.OptimizeMode = .Debug; + const target: std.zig.CrossTarget = .{}; + + if (builtin.os.tag != .windows) return; const hello = b.addExecutable(.{ .name = "hello", .root_source_file = .{ .path = "hello.zig" }, .optimize = optimize, + .target = target, }); const main = b.addExecutable(.{ .name = "main", .root_source_file = .{ .path = "main.zig" }, .optimize = optimize, + .target = target, }); - const run = main.run(); + const run = b.addRunArtifact(main); run.addArtifactArg(hello); + run.expectExitCode(0); + run.skip_foreign_checks = true; - const test_step = b.step("test", "Test it"); test_step.dependOn(&run.step); } diff --git a/test/tests.zig b/test/tests.zig index 035311372f61..3166fbc14ab4 100644 --- a/test/tests.zig +++ b/test/tests.zig @@ -1,16 +1,9 @@ const std = @import("std"); const builtin = @import("builtin"); -const debug = std.debug; +const assert = std.debug.assert; const CrossTarget = std.zig.CrossTarget; -const io = std.io; -const fs = std.fs; const mem = std.mem; -const fmt = std.fmt; -const ArrayList = std.ArrayList; const OptimizeMode = std.builtin.OptimizeMode; -const CompileStep = std.Build.CompileStep; -const Allocator = mem.Allocator; -const ExecError = std.Build.ExecError; const Step = std.Build.Step; // Cases @@ -20,13 +13,13 @@ const stack_traces = @import("stack_traces.zig"); const assemble_and_link = @import("assemble_and_link.zig"); const translate_c = @import("translate_c.zig"); const run_translated_c = @import("run_translated_c.zig"); -const gen_h = @import("gen_h.zig"); const link = @import("link.zig"); // Implementations pub const TranslateCContext = @import("src/translate_c.zig").TranslateCContext; pub const RunTranslatedCContext = @import("src/run_translated_c.zig").RunTranslatedCContext; -pub const CompareOutputContext = @import("src/compare_output.zig").CompareOutputContext; +pub const CompareOutputContext = @import("src/CompareOutput.zig"); +pub const StackTracesContext = @import("src/StackTrace.zig"); const TestTarget = struct { target: CrossTarget = @as(CrossTarget, .{}), @@ -460,10 +453,71 @@ const test_targets = blk: { }; }; -const max_stdout_size = 1 * 1024 * 1024; // 1 MB +const c_abi_targets = [_]CrossTarget{ + .{}, + .{ + .cpu_arch = .x86_64, + .os_tag = .linux, + .abi = .musl, + }, + .{ + .cpu_arch = .x86, + .os_tag = .linux, + .abi = .musl, + }, + .{ + .cpu_arch = .aarch64, + .os_tag = .linux, + .abi = .musl, + }, + .{ + .cpu_arch = .arm, + .os_tag = .linux, + .abi = .musleabihf, + }, + .{ + .cpu_arch = .mips, + .os_tag = .linux, + .abi = .musl, + }, + .{ + .cpu_arch = .riscv64, + .os_tag = .linux, + .abi = .musl, + }, + .{ + .cpu_arch = .wasm32, + .os_tag = .wasi, + .abi = .musl, + }, + .{ + .cpu_arch = .powerpc, + .os_tag = .linux, + .abi = .musl, + }, + .{ + .cpu_arch = .powerpc64le, + .os_tag = .linux, + .abi = .musl, + }, + .{ + .cpu_arch = .x86, + .os_tag = .windows, + .abi = .gnu, + }, + .{ + .cpu_arch = .x86_64, + .os_tag = .windows, + .abi = .gnu, + }, +}; -pub fn addCompareOutputTests(b: *std.Build, test_filter: ?[]const u8, optimize_modes: []const OptimizeMode) *Step { - const cases = b.allocator.create(CompareOutputContext) catch unreachable; +pub fn addCompareOutputTests( + b: *std.Build, + test_filter: ?[]const u8, + optimize_modes: []const OptimizeMode, +) *Step { + const cases = b.allocator.create(CompareOutputContext) catch @panic("OOM"); cases.* = CompareOutputContext{ .b = b, .step = b.step("test-compare-output", "Run the compare output tests"), @@ -477,14 +531,26 @@ pub fn addCompareOutputTests(b: *std.Build, test_filter: ?[]const u8, optimize_m return cases.step; } -pub fn addStackTraceTests(b: *std.Build, test_filter: ?[]const u8, optimize_modes: []const OptimizeMode) *Step { - const cases = b.allocator.create(StackTracesContext) catch unreachable; - cases.* = StackTracesContext{ +pub fn addStackTraceTests( + b: *std.Build, + test_filter: ?[]const u8, + optimize_modes: []const OptimizeMode, +) *Step { + const check_exe = b.addExecutable(.{ + .name = "check-stack-trace", + .root_source_file = .{ .path = "test/src/check-stack-trace.zig" }, + .target = .{}, + .optimize = .Debug, + }); + + const cases = b.allocator.create(StackTracesContext) catch @panic("OOM"); + cases.* = .{ .b = b, .step = b.step("test-stack-traces", "Run the stack trace tests"), .test_index = 0, .test_filter = test_filter, .optimize_modes = optimize_modes, + .check_exe = check_exe, }; stack_traces.addCases(cases); @@ -494,91 +560,302 @@ pub fn addStackTraceTests(b: *std.Build, test_filter: ?[]const u8, optimize_mode pub fn addStandaloneTests( b: *std.Build, - test_filter: ?[]const u8, optimize_modes: []const OptimizeMode, - skip_non_native: bool, enable_macos_sdk: bool, - target: std.zig.CrossTarget, omit_stage2: bool, - enable_darling: bool, - enable_qemu: bool, - enable_rosetta: bool, - enable_wasmtime: bool, - enable_wine: bool, enable_symlinks_windows: bool, ) *Step { - const cases = b.allocator.create(StandaloneContext) catch unreachable; - cases.* = StandaloneContext{ - .b = b, - .step = b.step("test-standalone", "Run the standalone tests"), - .test_index = 0, - .test_filter = test_filter, - .optimize_modes = optimize_modes, - .skip_non_native = skip_non_native, - .enable_macos_sdk = enable_macos_sdk, - .target = target, - .omit_stage2 = omit_stage2, - .enable_darling = enable_darling, - .enable_qemu = enable_qemu, - .enable_rosetta = enable_rosetta, - .enable_wasmtime = enable_wasmtime, - .enable_wine = enable_wine, - .enable_symlinks_windows = enable_symlinks_windows, - }; + const step = b.step("test-standalone", "Run the standalone tests"); + const omit_symlinks = builtin.os.tag == .windows and !enable_symlinks_windows; + + for (standalone.simple_cases) |case| { + for (optimize_modes) |optimize| { + if (!case.all_modes and optimize != .Debug) continue; + if (case.os_filter) |os_tag| { + if (os_tag != builtin.os.tag) continue; + } - standalone.addCases(cases); + if (case.is_exe) { + const exe = b.addExecutable(.{ + .name = std.fs.path.stem(case.src_path), + .root_source_file = .{ .path = case.src_path }, + .optimize = optimize, + .target = case.target, + }); + if (case.link_libc) exe.linkLibC(); - return cases.step; + step.dependOn(&exe.step); + } + + if (case.is_test) { + const exe = b.addTest(.{ + .name = std.fs.path.stem(case.src_path), + .root_source_file = .{ .path = case.src_path }, + .optimize = optimize, + .target = case.target, + }); + if (case.link_libc) exe.linkLibC(); + + step.dependOn(&exe.run().step); + } + } + } + + inline for (standalone.build_cases) |case| { + const requires_stage2 = @hasDecl(case.import, "requires_stage2") and + case.import.requires_stage2; + const requires_symlinks = @hasDecl(case.import, "requires_symlinks") and + case.import.requires_symlinks; + const requires_macos_sdk = @hasDecl(case.import, "requires_macos_sdk") and + case.import.requires_macos_sdk; + const bad = + (requires_stage2 and omit_stage2) or + (requires_symlinks and omit_symlinks) or + (requires_macos_sdk and !enable_macos_sdk); + if (!bad) { + const dep = b.anonymousDependency(case.build_root, case.import, .{}); + const dep_step = dep.builder.default_step; + assert(mem.startsWith(u8, dep.builder.dep_prefix, "test.")); + const dep_prefix_adjusted = dep.builder.dep_prefix["test.".len..]; + dep_step.name = b.fmt("{s}{s}", .{ dep_prefix_adjusted, dep_step.name }); + step.dependOn(dep_step); + } + } + + return step; } pub fn addLinkTests( b: *std.Build, - test_filter: ?[]const u8, - optimize_modes: []const OptimizeMode, enable_macos_sdk: bool, omit_stage2: bool, enable_symlinks_windows: bool, ) *Step { - const cases = b.allocator.create(StandaloneContext) catch unreachable; - cases.* = StandaloneContext{ - .b = b, - .step = b.step("test-link", "Run the linker tests"), - .test_index = 0, - .test_filter = test_filter, - .optimize_modes = optimize_modes, - .skip_non_native = true, - .enable_macos_sdk = enable_macos_sdk, - .target = .{}, - .omit_stage2 = omit_stage2, - .enable_symlinks_windows = enable_symlinks_windows, - }; - link.addCases(cases); - return cases.step; + const step = b.step("test-link", "Run the linker tests"); + const omit_symlinks = builtin.os.tag == .windows and !enable_symlinks_windows; + + inline for (link.cases) |case| { + const requires_stage2 = @hasDecl(case.import, "requires_stage2") and + case.import.requires_stage2; + const requires_symlinks = @hasDecl(case.import, "requires_symlinks") and + case.import.requires_symlinks; + const requires_macos_sdk = @hasDecl(case.import, "requires_macos_sdk") and + case.import.requires_macos_sdk; + const bad = + (requires_stage2 and omit_stage2) or + (requires_symlinks and omit_symlinks) or + (requires_macos_sdk and !enable_macos_sdk); + if (!bad) { + const dep = b.anonymousDependency(case.build_root, case.import, .{}); + const dep_step = dep.builder.default_step; + assert(mem.startsWith(u8, dep.builder.dep_prefix, "test.")); + const dep_prefix_adjusted = dep.builder.dep_prefix["test.".len..]; + dep_step.name = b.fmt("{s}{s}", .{ dep_prefix_adjusted, dep_step.name }); + step.dependOn(dep_step); + } + } + + return step; } -pub fn addCliTests(b: *std.Build, test_filter: ?[]const u8, optimize_modes: []const OptimizeMode) *Step { - _ = test_filter; - _ = optimize_modes; +pub fn addCliTests(b: *std.Build) *Step { const step = b.step("test-cli", "Test the command line interface"); + const s = std.fs.path.sep_str; + + { + + // Test `zig init-lib`. + const tmp_path = b.makeTempPath(); + const init_lib = b.addSystemCommand(&.{ b.zig_exe, "init-lib" }); + init_lib.cwd = tmp_path; + init_lib.setName("zig init-lib"); + init_lib.expectStdOutEqual(""); + init_lib.expectStdErrEqual("info: Created build.zig\n" ++ + "info: Created src" ++ s ++ "main.zig\n" ++ + "info: Next, try `zig build --help` or `zig build test`\n"); + + const run_test = b.addSystemCommand(&.{ b.zig_exe, "build", "test" }); + run_test.cwd = tmp_path; + run_test.setName("zig build test"); + run_test.expectStdOutEqual(""); + run_test.step.dependOn(&init_lib.step); + + const cleanup = b.addRemoveDirTree(tmp_path); + cleanup.step.dependOn(&run_test.step); + + step.dependOn(&cleanup.step); + } - const exe = b.addExecutable(.{ - .name = "test-cli", - .root_source_file = .{ .path = "test/cli.zig" }, - .target = .{}, - .optimize = .Debug, - }); - const run_cmd = exe.run(); - run_cmd.addArgs(&[_][]const u8{ - fs.realpathAlloc(b.allocator, b.zig_exe) catch unreachable, - b.pathFromRoot(b.cache_root.path orelse "."), - }); + { + // Test `zig init-exe`. + const tmp_path = b.makeTempPath(); + const init_exe = b.addSystemCommand(&.{ b.zig_exe, "init-exe" }); + init_exe.cwd = tmp_path; + init_exe.setName("zig init-exe"); + init_exe.expectStdOutEqual(""); + init_exe.expectStdErrEqual("info: Created build.zig\n" ++ + "info: Created src" ++ s ++ "main.zig\n" ++ + "info: Next, try `zig build --help` or `zig build run`\n"); + + // Test missing output path. + const bad_out_arg = "-femit-bin=does" ++ s ++ "not" ++ s ++ "exist" ++ s ++ "foo.exe"; + const ok_src_arg = "src" ++ s ++ "main.zig"; + const expected = "error: unable to open output directory 'does" ++ s ++ "not" ++ s ++ "exist': FileNotFound\n"; + const run_bad = b.addSystemCommand(&.{ b.zig_exe, "build-exe", ok_src_arg, bad_out_arg }); + run_bad.setName("zig build-exe error message for bad -femit-bin arg"); + run_bad.expectExitCode(1); + run_bad.expectStdErrEqual(expected); + run_bad.expectStdOutEqual(""); + run_bad.step.dependOn(&init_exe.step); + + const run_test = b.addSystemCommand(&.{ b.zig_exe, "build", "test" }); + run_test.cwd = tmp_path; + run_test.setName("zig build test"); + run_test.expectStdOutEqual(""); + run_test.step.dependOn(&init_exe.step); + + const run_run = b.addSystemCommand(&.{ b.zig_exe, "build", "run" }); + run_run.cwd = tmp_path; + run_run.setName("zig build run"); + run_run.expectStdOutEqual("Run `zig build test` to run the tests.\n"); + run_run.expectStdErrEqual("All your codebase are belong to us.\n"); + run_run.step.dependOn(&init_exe.step); + + const cleanup = b.addRemoveDirTree(tmp_path); + cleanup.step.dependOn(&run_test.step); + cleanup.step.dependOn(&run_run.step); + cleanup.step.dependOn(&run_bad.step); + + step.dependOn(&cleanup.step); + } + + // Test Godbolt API + if (builtin.os.tag == .linux and builtin.cpu.arch == .x86_64) { + const tmp_path = b.makeTempPath(); + + const writefile = b.addWriteFile("example.zig", + \\// Type your code here, or load an example. + \\export fn square(num: i32) i32 { + \\ return num * num; + \\} + \\extern fn zig_panic() noreturn; + \\pub fn panic(msg: []const u8, error_return_trace: ?*@import("std").builtin.StackTrace, _: ?usize) noreturn { + \\ _ = msg; + \\ _ = error_return_trace; + \\ zig_panic(); + \\} + ); + + // This is intended to be the exact CLI usage used by godbolt.org. + const run = b.addSystemCommand(&.{ + b.zig_exe, "build-obj", + "--cache-dir", tmp_path, + "--name", "example", + "-fno-emit-bin", "-fno-emit-h", + "-fstrip", "-OReleaseFast", + }); + run.addFileSourceArg(writefile.getFileSource("example.zig").?); + const example_s = run.addPrefixedOutputFileArg("-femit-asm=", "example.s"); + + const checkfile = b.addCheckFile(example_s, .{ + .expected_matches = &.{ + "square:", + "mov\teax, edi", + "imul\teax, edi", + }, + }); + checkfile.setName("check godbolt.org CLI usage generating valid asm"); + + const cleanup = b.addRemoveDirTree(tmp_path); + cleanup.step.dependOn(&checkfile.step); + + step.dependOn(&cleanup.step); + } + + { + // Test `zig fmt`. + // This test must use a temporary directory rather than a cache + // directory because this test will be mutating the files. The cache + // system relies on cache directories being mutated only by their + // owners. + const tmp_path = b.makeTempPath(); + const unformatted_code = " // no reason for indent"; + + var dir = std.fs.cwd().openDir(tmp_path, .{}) catch @panic("unhandled"); + defer dir.close(); + dir.writeFile("fmt1.zig", unformatted_code) catch @panic("unhandled"); + dir.writeFile("fmt2.zig", unformatted_code) catch @panic("unhandled"); + + // Test zig fmt affecting only the appropriate files. + const run1 = b.addSystemCommand(&.{ b.zig_exe, "fmt", "fmt1.zig" }); + run1.setName("run zig fmt one file"); + run1.cwd = tmp_path; + run1.has_side_effects = true; + // stdout should be file path + \n + run1.expectStdOutEqual("fmt1.zig\n"); + + // running it on the dir, only the new file should be changed + const run2 = b.addSystemCommand(&.{ b.zig_exe, "fmt", "." }); + run2.setName("run zig fmt the directory"); + run2.cwd = tmp_path; + run2.has_side_effects = true; + run2.expectStdOutEqual("." ++ s ++ "fmt2.zig\n"); + run2.step.dependOn(&run1.step); + + // both files have been formatted, nothing should change now + const run3 = b.addSystemCommand(&.{ b.zig_exe, "fmt", "." }); + run3.setName("run zig fmt with nothing to do"); + run3.cwd = tmp_path; + run3.has_side_effects = true; + run3.expectStdOutEqual(""); + run3.step.dependOn(&run2.step); + + const unformatted_code_utf16 = "\xff\xfe \x00 \x00 \x00 \x00/\x00/\x00 \x00n\x00o\x00 \x00r\x00e\x00a\x00s\x00o\x00n\x00"; + const fmt4_path = std.fs.path.join(b.allocator, &.{ tmp_path, "fmt4.zig" }) catch @panic("OOM"); + const write4 = b.addWriteFiles(); + write4.addBytesToSource(unformatted_code_utf16, fmt4_path); + write4.step.dependOn(&run3.step); + + // Test `zig fmt` handling UTF-16 decoding. + const run4 = b.addSystemCommand(&.{ b.zig_exe, "fmt", "." }); + run4.setName("run zig fmt convert UTF-16 to UTF-8"); + run4.cwd = tmp_path; + run4.has_side_effects = true; + run4.expectStdOutEqual("." ++ s ++ "fmt4.zig\n"); + run4.step.dependOn(&write4.step); + + // TODO change this to an exact match + const check4 = b.addCheckFile(.{ .path = fmt4_path }, .{ + .expected_matches = &.{ + "// no reason", + }, + }); + check4.step.dependOn(&run4.step); + + const cleanup = b.addRemoveDirTree(tmp_path); + cleanup.step.dependOn(&check4.step); + + step.dependOn(&cleanup.step); + } + + { + // TODO this should move to become a CLI test rather than standalone + // cases.addBuildFile("test/standalone/options/build.zig", .{ + // .extra_argv = &.{ + // "-Dbool_true", + // "-Dbool_false=false", + // "-Dint=1234", + // "-De=two", + // "-Dstring=hello", + // }, + // }); + } - step.dependOn(&run_cmd.step); return step; } pub fn addAssembleAndLinkTests(b: *std.Build, test_filter: ?[]const u8, optimize_modes: []const OptimizeMode) *Step { - const cases = b.allocator.create(CompareOutputContext) catch unreachable; + const cases = b.allocator.create(CompareOutputContext) catch @panic("OOM"); cases.* = CompareOutputContext{ .b = b, .step = b.step("test-asm-link", "Run the assemble and link tests"), @@ -593,7 +870,7 @@ pub fn addAssembleAndLinkTests(b: *std.Build, test_filter: ?[]const u8, optimize } pub fn addTranslateCTests(b: *std.Build, test_filter: ?[]const u8) *Step { - const cases = b.allocator.create(TranslateCContext) catch unreachable; + const cases = b.allocator.create(TranslateCContext) catch @panic("OOM"); cases.* = TranslateCContext{ .b = b, .step = b.step("test-translate-c", "Run the C translation tests"), @@ -611,7 +888,7 @@ pub fn addRunTranslatedCTests( test_filter: ?[]const u8, target: std.zig.CrossTarget, ) *Step { - const cases = b.allocator.create(RunTranslatedCContext) catch unreachable; + const cases = b.allocator.create(RunTranslatedCContext) catch @panic("OOM"); cases.* = .{ .b = b, .step = b.step("test-run-translated-c", "Run the Run-Translated-C tests"), @@ -625,22 +902,7 @@ pub fn addRunTranslatedCTests( return cases.step; } -pub fn addGenHTests(b: *std.Build, test_filter: ?[]const u8) *Step { - const cases = b.allocator.create(GenHContext) catch unreachable; - cases.* = GenHContext{ - .b = b, - .step = b.step("test-gen-h", "Run the C header file generation tests"), - .test_index = 0, - .test_filter = test_filter, - }; - - gen_h.addCases(cases); - - return cases.step; -} - -pub fn addPkgTests( - b: *std.Build, +const ModuleTestOptions = struct { test_filter: ?[]const u8, root_src: []const u8, name: []const u8, @@ -651,14 +913,17 @@ pub fn addPkgTests( skip_libc: bool, skip_stage1: bool, skip_stage2: bool, -) *Step { - const step = b.step(b.fmt("test-{s}", .{name}), desc); + max_rss: usize = 0, +}; + +pub fn addModuleTests(b: *std.Build, options: ModuleTestOptions) *Step { + const step = b.step(b.fmt("test-{s}", .{options.name}), options.desc); for (test_targets) |test_target| { - if (skip_non_native and !test_target.target.isNative()) + if (options.skip_non_native and !test_target.target.isNative()) continue; - if (skip_libc and test_target.link_libc) + if (options.skip_libc and test_target.link_libc) continue; if (test_target.link_libc and test_target.target.getOs().requiresLibC()) { @@ -666,7 +931,7 @@ pub fn addPkgTests( continue; } - if (skip_single_threaded and test_target.single_threaded) + if (options.skip_single_threaded and test_target.single_threaded) continue; if (test_target.disable_native and @@ -677,12 +942,12 @@ pub fn addPkgTests( } if (test_target.backend) |backend| switch (backend) { - .stage1 => if (skip_stage1) continue, + .stage1 => if (options.skip_stage1) continue, .stage2_llvm => {}, - else => if (skip_stage2) continue, + else => if (options.skip_stage2) continue, }; - const want_this_mode = for (optimize_modes) |m| { + const want_this_mode = for (options.optimize_modes) |m| { if (m == test_target.optimize_mode) break true; } else false; if (!want_this_mode) continue; @@ -694,25 +959,24 @@ pub fn addPkgTests( else "bare"; - const triple_prefix = test_target.target.zigTriple(b.allocator) catch unreachable; + const triple_prefix = test_target.target.zigTriple(b.allocator) catch @panic("OOM"); + + // wasm32-wasi builds need more RAM, idk why + const max_rss = if (test_target.target.getOs().tag == .wasi) + options.max_rss * 2 + else + options.max_rss; const these_tests = b.addTest(.{ - .root_source_file = .{ .path = root_src }, + .root_source_file = .{ .path = options.root_src }, .optimize = test_target.optimize_mode, .target = test_target.target, + .max_rss = max_rss, }); const single_threaded_txt = if (test_target.single_threaded) "single" else "multi"; const backend_txt = if (test_target.backend) |backend| @tagName(backend) else "default"; - these_tests.setNamePrefix(b.fmt("{s}-{s}-{s}-{s}-{s}-{s} ", .{ - name, - triple_prefix, - @tagName(test_target.optimize_mode), - libc_prefix, - single_threaded_txt, - backend_txt, - })); these_tests.single_threaded = test_target.single_threaded; - these_tests.setFilter(test_filter); + these_tests.setFilter(options.test_filter); if (test_target.link_libc) { these_tests.linkSystemLibrary("c"); } @@ -736,654 +1000,94 @@ pub fn addPkgTests( }, }; - step.dependOn(&these_tests.step); + const run = these_tests.run(); + run.skip_foreign_checks = true; + run.setName(b.fmt("run test {s}-{s}-{s}-{s}-{s}-{s}", .{ + options.name, + triple_prefix, + @tagName(test_target.optimize_mode), + libc_prefix, + single_threaded_txt, + backend_txt, + })); + + step.dependOn(&run.step); } return step; } -pub const StackTracesContext = struct { - b: *std.Build, - step: *Step, - test_index: usize, - test_filter: ?[]const u8, - optimize_modes: []const OptimizeMode, - - const Expect = [@typeInfo(OptimizeMode).Enum.fields.len][]const u8; - - pub fn addCase(self: *StackTracesContext, config: anytype) void { - if (@hasField(@TypeOf(config), "exclude")) { - if (config.exclude.exclude()) return; - } - if (@hasField(@TypeOf(config), "exclude_arch")) { - const exclude_arch: []const std.Target.Cpu.Arch = &config.exclude_arch; - for (exclude_arch) |arch| if (arch == builtin.cpu.arch) return; - } - if (@hasField(@TypeOf(config), "exclude_os")) { - const exclude_os: []const std.Target.Os.Tag = &config.exclude_os; - for (exclude_os) |os| if (os == builtin.os.tag) return; - } - for (self.optimize_modes) |optimize_mode| { - switch (optimize_mode) { - .Debug => { - if (@hasField(@TypeOf(config), "Debug")) { - self.addExpect(config.name, config.source, optimize_mode, config.Debug); - } - }, - .ReleaseSafe => { - if (@hasField(@TypeOf(config), "ReleaseSafe")) { - self.addExpect(config.name, config.source, optimize_mode, config.ReleaseSafe); - } - }, - .ReleaseFast => { - if (@hasField(@TypeOf(config), "ReleaseFast")) { - self.addExpect(config.name, config.source, optimize_mode, config.ReleaseFast); - } - }, - .ReleaseSmall => { - if (@hasField(@TypeOf(config), "ReleaseSmall")) { - self.addExpect(config.name, config.source, optimize_mode, config.ReleaseSmall); - } - }, - } - } - } - - fn addExpect( - self: *StackTracesContext, - name: []const u8, - source: []const u8, - optimize_mode: OptimizeMode, - mode_config: anytype, - ) void { - if (@hasField(@TypeOf(mode_config), "exclude")) { - if (mode_config.exclude.exclude()) return; - } - if (@hasField(@TypeOf(mode_config), "exclude_arch")) { - const exclude_arch: []const std.Target.Cpu.Arch = &mode_config.exclude_arch; - for (exclude_arch) |arch| if (arch == builtin.cpu.arch) return; - } - if (@hasField(@TypeOf(mode_config), "exclude_os")) { - const exclude_os: []const std.Target.Os.Tag = &mode_config.exclude_os; - for (exclude_os) |os| if (os == builtin.os.tag) return; - } - - const annotated_case_name = fmt.allocPrint(self.b.allocator, "{s} {s} ({s})", .{ - "stack-trace", - name, - @tagName(optimize_mode), - }) catch unreachable; - if (self.test_filter) |filter| { - if (mem.indexOf(u8, annotated_case_name, filter) == null) return; - } - - const b = self.b; - const src_basename = "source.zig"; - const write_src = b.addWriteFile(src_basename, source); - const exe = b.addExecutable(.{ - .name = "test", - .root_source_file = write_src.getFileSource(src_basename).?, - .optimize = optimize_mode, - .target = .{}, - }); - - const run_and_compare = RunAndCompareStep.create( - self, - exe, - annotated_case_name, - optimize_mode, - mode_config.expect, - ); - - self.step.dependOn(&run_and_compare.step); - } - - const RunAndCompareStep = struct { - pub const base_id = .custom; - - step: Step, - context: *StackTracesContext, - exe: *CompileStep, - name: []const u8, - optimize_mode: OptimizeMode, - expect_output: []const u8, - test_index: usize, - - pub fn create( - context: *StackTracesContext, - exe: *CompileStep, - name: []const u8, - optimize_mode: OptimizeMode, - expect_output: []const u8, - ) *RunAndCompareStep { - const allocator = context.b.allocator; - const ptr = allocator.create(RunAndCompareStep) catch unreachable; - ptr.* = RunAndCompareStep{ - .step = Step.init(.custom, "StackTraceCompareOutputStep", allocator, make), - .context = context, - .exe = exe, - .name = name, - .optimize_mode = optimize_mode, - .expect_output = expect_output, - .test_index = context.test_index, - }; - ptr.step.dependOn(&exe.step); - context.test_index += 1; - return ptr; - } - - fn make(step: *Step) !void { - const self = @fieldParentPtr(RunAndCompareStep, "step", step); - const b = self.context.b; - - const full_exe_path = self.exe.getOutputSource().getPath(b); - var args = ArrayList([]const u8).init(b.allocator); - defer args.deinit(); - args.append(full_exe_path) catch unreachable; - - std.debug.print("Test {d}/{d} {s}...", .{ self.test_index + 1, self.context.test_index, self.name }); - - if (!std.process.can_spawn) { - const cmd = try std.mem.join(b.allocator, " ", args.items); - std.debug.print("the following command cannot be executed ({s} does not support spawning a child process):\n{s}", .{ @tagName(builtin.os.tag), cmd }); - b.allocator.free(cmd); - return ExecError.ExecNotSupported; - } - - var child = std.ChildProcess.init(args.items, b.allocator); - child.stdin_behavior = .Ignore; - child.stdout_behavior = .Pipe; - child.stderr_behavior = .Pipe; - child.env_map = b.env_map; - - if (b.verbose) { - printInvocation(args.items); - } - child.spawn() catch |err| debug.panic("Unable to spawn {s}: {s}\n", .{ full_exe_path, @errorName(err) }); - - const stdout = child.stdout.?.reader().readAllAlloc(b.allocator, max_stdout_size) catch unreachable; - defer b.allocator.free(stdout); - const stderrFull = child.stderr.?.reader().readAllAlloc(b.allocator, max_stdout_size) catch unreachable; - defer b.allocator.free(stderrFull); - var stderr = stderrFull; - - const term = child.wait() catch |err| { - debug.panic("Unable to spawn {s}: {s}\n", .{ full_exe_path, @errorName(err) }); - }; - - switch (term) { - .Exited => |code| { - const expect_code: u32 = 1; - if (code != expect_code) { - std.debug.print("Process {s} exited with error code {d} but expected code {d}\n", .{ - full_exe_path, - code, - expect_code, - }); - printInvocation(args.items); - return error.TestFailed; - } - }, - .Signal => |signum| { - std.debug.print("Process {s} terminated on signal {d}\n", .{ full_exe_path, signum }); - printInvocation(args.items); - return error.TestFailed; - }, - .Stopped => |signum| { - std.debug.print("Process {s} stopped on signal {d}\n", .{ full_exe_path, signum }); - printInvocation(args.items); - return error.TestFailed; - }, - .Unknown => |code| { - std.debug.print("Process {s} terminated unexpectedly with error code {d}\n", .{ full_exe_path, code }); - printInvocation(args.items); - return error.TestFailed; - }, - } - - // process result - // - keep only basename of source file path - // - replace address with symbolic string - // - replace function name with symbolic string when optimize_mode != .Debug - // - skip empty lines - const got: []const u8 = got_result: { - var buf = ArrayList(u8).init(b.allocator); - defer buf.deinit(); - if (stderr.len != 0 and stderr[stderr.len - 1] == '\n') stderr = stderr[0 .. stderr.len - 1]; - var it = mem.split(u8, stderr, "\n"); - process_lines: while (it.next()) |line| { - if (line.len == 0) continue; - - // offset search past `[drive]:` on windows - var pos: usize = if (builtin.os.tag == .windows) 2 else 0; - // locate delims/anchor - const delims = [_][]const u8{ ":", ":", ":", " in ", "(", ")" }; - var marks = [_]usize{0} ** delims.len; - for (delims, 0..) |delim, i| { - marks[i] = mem.indexOfPos(u8, line, pos, delim) orelse { - // unexpected pattern: emit raw line and cont - try buf.appendSlice(line); - try buf.appendSlice("\n"); - continue :process_lines; - }; - pos = marks[i] + delim.len; - } - // locate source basename - pos = mem.lastIndexOfScalar(u8, line[0..marks[0]], fs.path.sep) orelse { - // unexpected pattern: emit raw line and cont - try buf.appendSlice(line); - try buf.appendSlice("\n"); - continue :process_lines; - }; - // end processing if source basename changes - if (!mem.eql(u8, "source.zig", line[pos + 1 .. marks[0]])) break; - // emit substituted line - try buf.appendSlice(line[pos + 1 .. marks[2] + delims[2].len]); - try buf.appendSlice(" [address]"); - if (self.optimize_mode == .Debug) { - // On certain platforms (windows) or possibly depending on how we choose to link main - // the object file extension may be present so we simply strip any extension. - if (mem.indexOfScalar(u8, line[marks[4]..marks[5]], '.')) |idot| { - try buf.appendSlice(line[marks[3] .. marks[4] + idot]); - try buf.appendSlice(line[marks[5]..]); - } else { - try buf.appendSlice(line[marks[3]..]); - } - } else { - try buf.appendSlice(line[marks[3] .. marks[3] + delims[3].len]); - try buf.appendSlice("[function]"); - } - try buf.appendSlice("\n"); - } - break :got_result try buf.toOwnedSlice(); - }; - - if (!mem.eql(u8, self.expect_output, got)) { - std.debug.print( - \\ - \\========= Expected this output: ========= - \\{s} - \\================================================ - \\{s} - \\ - , .{ self.expect_output, got }); - return error.TestFailed; - } - std.debug.print("OK\n", .{}); - } - }; -}; - -pub const StandaloneContext = struct { - b: *std.Build, - step: *Step, - test_index: usize, - test_filter: ?[]const u8, - optimize_modes: []const OptimizeMode, - skip_non_native: bool, - enable_macos_sdk: bool, - target: std.zig.CrossTarget, - omit_stage2: bool, - enable_darling: bool = false, - enable_qemu: bool = false, - enable_rosetta: bool = false, - enable_wasmtime: bool = false, - enable_wine: bool = false, - enable_symlinks_windows: bool, - - pub fn addC(self: *StandaloneContext, root_src: []const u8) void { - self.addAllArgs(root_src, true); - } - - pub fn add(self: *StandaloneContext, root_src: []const u8) void { - self.addAllArgs(root_src, false); - } - - pub fn addBuildFile(self: *StandaloneContext, build_file: []const u8, features: struct { - build_modes: bool = false, - cross_targets: bool = false, - requires_macos_sdk: bool = false, - requires_stage2: bool = false, - use_emulation: bool = false, - requires_symlinks: bool = false, - extra_argv: []const []const u8 = &.{}, - }) void { - const b = self.b; - - if (features.requires_macos_sdk and !self.enable_macos_sdk) return; - if (features.requires_stage2 and self.omit_stage2) return; - if (features.requires_symlinks and !self.enable_symlinks_windows and builtin.os.tag == .windows) return; - - const annotated_case_name = b.fmt("build {s}", .{build_file}); - if (self.test_filter) |filter| { - if (mem.indexOf(u8, annotated_case_name, filter) == null) return; - } - - var zig_args = ArrayList([]const u8).init(b.allocator); - const rel_zig_exe = fs.path.relative(b.allocator, b.build_root.path orelse ".", b.zig_exe) catch unreachable; - zig_args.append(rel_zig_exe) catch unreachable; - zig_args.append("build") catch unreachable; - - zig_args.append("--build-file") catch unreachable; - zig_args.append(b.pathFromRoot(build_file)) catch unreachable; - - zig_args.appendSlice(features.extra_argv) catch unreachable; - - zig_args.append("test") catch unreachable; - - if (b.verbose) { - zig_args.append("--verbose") catch unreachable; - } - - if (features.cross_targets and !self.target.isNative()) { - const target_triple = self.target.zigTriple(b.allocator) catch unreachable; - const target_arg = fmt.allocPrint(b.allocator, "-Dtarget={s}", .{target_triple}) catch unreachable; - zig_args.append(target_arg) catch unreachable; - } +pub fn addCAbiTests(b: *std.Build, skip_non_native: bool, skip_release: bool) *Step { + const step = b.step("test-c-abi", "Run the C ABI tests"); - if (features.use_emulation) { - if (self.enable_darling) { - zig_args.append("-fdarling") catch unreachable; - } - if (self.enable_qemu) { - zig_args.append("-fqemu") catch unreachable; - } - if (self.enable_rosetta) { - zig_args.append("-frosetta") catch unreachable; - } - if (self.enable_wasmtime) { - zig_args.append("-fwasmtime") catch unreachable; - } - if (self.enable_wine) { - zig_args.append("-fwine") catch unreachable; - } - } + const optimize_modes: [2]OptimizeMode = .{ .Debug, .ReleaseFast }; - const optimize_modes = if (features.build_modes) self.optimize_modes else &[1]OptimizeMode{.Debug}; - for (optimize_modes) |optimize_mode| { - const arg = switch (optimize_mode) { - .Debug => "", - .ReleaseFast => "-Doptimize=ReleaseFast", - .ReleaseSafe => "-Doptimize=ReleaseSafe", - .ReleaseSmall => "-Doptimize=ReleaseSmall", - }; - const zig_args_base_len = zig_args.items.len; - if (arg.len > 0) - zig_args.append(arg) catch unreachable; - defer zig_args.resize(zig_args_base_len) catch unreachable; - - const run_cmd = b.addSystemCommand(zig_args.items); - const log_step = b.addLog("PASS {s} ({s})", .{ annotated_case_name, @tagName(optimize_mode) }); - log_step.step.dependOn(&run_cmd.step); - - self.step.dependOn(&log_step.step); - } - } + for (optimize_modes) |optimize_mode| { + if (optimize_mode != .Debug and skip_release) continue; - pub fn addAllArgs(self: *StandaloneContext, root_src: []const u8, link_libc: bool) void { - const b = self.b; + for (c_abi_targets) |c_abi_target| { + if (skip_non_native and !c_abi_target.isNative()) continue; - for (self.optimize_modes) |optimize| { - const annotated_case_name = fmt.allocPrint(self.b.allocator, "build {s} ({s})", .{ - root_src, - @tagName(optimize), - }) catch unreachable; - if (self.test_filter) |filter| { - if (mem.indexOf(u8, annotated_case_name, filter) == null) continue; + if (c_abi_target.isWindows() and c_abi_target.getCpuArch() == .aarch64) { + // https://github.com/ziglang/zig/issues/14908 + continue; } - const exe = b.addExecutable(.{ - .name = "test", - .root_source_file = .{ .path = root_src }, - .optimize = optimize, - .target = .{}, + const test_step = b.addTest(.{ + .root_source_file = .{ .path = "test/c_abi/main.zig" }, + .optimize = optimize_mode, + .target = c_abi_target, }); - if (link_libc) { - exe.linkSystemLibrary("c"); + if (c_abi_target.abi != null and c_abi_target.abi.?.isMusl()) { + // TODO NativeTargetInfo insists on dynamically linking musl + // for some reason? + test_step.target_info.dynamic_linker.max_byte = null; } - - const log_step = b.addLog("PASS {s}", .{annotated_case_name}); - log_step.step.dependOn(&exe.step); - - self.step.dependOn(&log_step.step); - } - } -}; - -pub const GenHContext = struct { - b: *std.Build, - step: *Step, - test_index: usize, - test_filter: ?[]const u8, - - const TestCase = struct { - name: []const u8, - sources: ArrayList(SourceFile), - expected_lines: ArrayList([]const u8), - - const SourceFile = struct { - filename: []const u8, - source: []const u8, - }; - - pub fn addSourceFile(self: *TestCase, filename: []const u8, source: []const u8) void { - self.sources.append(SourceFile{ - .filename = filename, - .source = source, - }) catch unreachable; - } - - pub fn addExpectedLine(self: *TestCase, text: []const u8) void { - self.expected_lines.append(text) catch unreachable; - } - }; - - const GenHCmpOutputStep = struct { - step: Step, - context: *GenHContext, - obj: *CompileStep, - name: []const u8, - test_index: usize, - case: *const TestCase, - - pub fn create( - context: *GenHContext, - obj: *CompileStep, - name: []const u8, - case: *const TestCase, - ) *GenHCmpOutputStep { - const allocator = context.b.allocator; - const ptr = allocator.create(GenHCmpOutputStep) catch unreachable; - ptr.* = GenHCmpOutputStep{ - .step = Step.init(.Custom, "ParseCCmpOutput", allocator, make), - .context = context, - .obj = obj, - .name = name, - .test_index = context.test_index, - .case = case, - }; - ptr.step.dependOn(&obj.step); - context.test_index += 1; - return ptr; - } - - fn make(step: *Step) !void { - const self = @fieldParentPtr(GenHCmpOutputStep, "step", step); - const b = self.context.b; - - std.debug.print("Test {d}/{d} {s}...", .{ self.test_index + 1, self.context.test_index, self.name }); - - const full_h_path = self.obj.getOutputHPath(); - const actual_h = try io.readFileAlloc(b.allocator, full_h_path); - - for (self.case.expected_lines.items) |expected_line| { - if (mem.indexOf(u8, actual_h, expected_line) == null) { - std.debug.print( - \\ - \\========= Expected this output: ================ - \\{s} - \\========= But found: =========================== - \\{s} - \\ - , .{ expected_line, actual_h }); - return error.TestFailed; - } + test_step.linkLibC(); + test_step.addCSourceFile("test/c_abi/cfuncs.c", &.{"-std=c99"}); + + // test-c-abi should test both with LTO on and with LTO off. Only + // some combinations are passing currently: + // https://github.com/ziglang/zig/issues/14908 + if (c_abi_target.isWindows()) { + test_step.want_lto = false; } - std.debug.print("OK\n", .{}); - } - }; - - pub fn create( - self: *GenHContext, - filename: []const u8, - name: []const u8, - source: []const u8, - expected_lines: []const []const u8, - ) *TestCase { - const tc = self.b.allocator.create(TestCase) catch unreachable; - tc.* = TestCase{ - .name = name, - .sources = ArrayList(TestCase.SourceFile).init(self.b.allocator), - .expected_lines = ArrayList([]const u8).init(self.b.allocator), - }; - - tc.addSourceFile(filename, source); - var arg_i: usize = 0; - while (arg_i < expected_lines.len) : (arg_i += 1) { - tc.addExpectedLine(expected_lines[arg_i]); - } - return tc; - } - - pub fn add(self: *GenHContext, name: []const u8, source: []const u8, expected_lines: []const []const u8) void { - const tc = self.create("test.zig", name, source, expected_lines); - self.addCase(tc); - } - pub fn addCase(self: *GenHContext, case: *const TestCase) void { - const b = self.b; + const triple_prefix = c_abi_target.zigTriple(b.allocator) catch @panic("OOM"); + test_step.setName(b.fmt("test-c-abi-{s}-{s} ", .{ + triple_prefix, @tagName(optimize_mode), + })); - const optimize_mode = std.builtin.OptimizeMode.Debug; - const annotated_case_name = fmt.allocPrint(self.b.allocator, "gen-h {s} ({s})", .{ case.name, @tagName(optimize_mode) }) catch unreachable; - if (self.test_filter) |filter| { - if (mem.indexOf(u8, annotated_case_name, filter) == null) return; + const run = test_step.run(); + run.skip_foreign_checks = true; + step.dependOn(&run.step); } - - const write_src = b.addWriteFiles(); - for (case.sources.items) |src_file| { - write_src.add(src_file.filename, src_file.source); - } - - const obj = b.addObjectFromWriteFileStep("test", write_src, case.sources.items[0].filename); - obj.setBuildMode(optimize_mode); - - const cmp_h = GenHCmpOutputStep.create(self, obj, annotated_case_name, case); - - self.step.dependOn(&cmp_h.step); } -}; - -fn printInvocation(args: []const []const u8) void { - for (args) |arg| { - std.debug.print("{s} ", .{arg}); - } - std.debug.print("\n", .{}); + return step; } -const c_abi_targets = [_]CrossTarget{ - .{}, - .{ - .cpu_arch = .x86_64, - .os_tag = .linux, - .abi = .musl, - }, - .{ - .cpu_arch = .x86, - .os_tag = .linux, - .abi = .musl, - }, - .{ - .cpu_arch = .aarch64, - .os_tag = .linux, - .abi = .musl, - }, - .{ - .cpu_arch = .arm, - .os_tag = .linux, - .abi = .musleabihf, - }, - .{ - .cpu_arch = .mips, - .os_tag = .linux, - .abi = .musl, - }, - .{ - .cpu_arch = .riscv64, - .os_tag = .linux, - .abi = .musl, - }, - .{ - .cpu_arch = .wasm32, - .os_tag = .wasi, - .abi = .musl, - }, - .{ - .cpu_arch = .powerpc, - .os_tag = .linux, - .abi = .musl, - }, - .{ - .cpu_arch = .powerpc64le, - .os_tag = .linux, - .abi = .musl, - }, - .{ - .cpu_arch = .x86, - .os_tag = .windows, - .abi = .gnu, - }, - .{ - .cpu_arch = .x86_64, - .os_tag = .windows, - .abi = .gnu, - }, -}; - -pub fn addCAbiTests(b: *std.Build, skip_non_native: bool, skip_release: bool) *Step { - const step = b.step("test-c-abi", "Run the C ABI tests"); - - const optimize_modes: [2]OptimizeMode = .{ .Debug, .ReleaseFast }; - - for (optimize_modes[0 .. @as(u8, 1) + @boolToInt(!skip_release)]) |optimize_mode| for (c_abi_targets) |c_abi_target| { - if (skip_non_native and !c_abi_target.isNative()) - continue; - - const test_step = b.addTest(.{ - .root_source_file = .{ .path = "test/c_abi/main.zig" }, - .optimize = optimize_mode, - .target = c_abi_target, - }); - if (c_abi_target.abi != null and c_abi_target.abi.?.isMusl()) { - // TODO NativeTargetInfo insists on dynamically linking musl - // for some reason? - test_step.target_info.dynamic_linker.max_byte = null; - } - test_step.linkLibC(); - test_step.addCSourceFile("test/c_abi/cfuncs.c", &.{"-std=c99"}); - - if (c_abi_target.isWindows() and (c_abi_target.getCpuArch() == .x86 or builtin.target.os.tag == .linux)) { - // LTO currently incorrectly strips stdcall name-mangled functions - // LLD crashes in LTO here when cross compiling for windows on linux - test_step.want_lto = false; - } - - const triple_prefix = c_abi_target.zigTriple(b.allocator) catch unreachable; - test_step.setNamePrefix(b.fmt("{s}-{s}-{s} ", .{ - "test-c-abi", - triple_prefix, - @tagName(optimize_mode), - })); - - step.dependOn(&test_step.step); - }; - return step; +pub fn addCases( + b: *std.Build, + parent_step: *Step, + opt_test_filter: ?[]const u8, + check_case_exe: *std.Build.CompileStep, +) !void { + const arena = b.allocator; + const gpa = b.allocator; + + var cases = @import("src/Cases.zig").init(gpa, arena); + + var dir = try b.build_root.handle.openIterableDir("test/cases", .{}); + defer dir.close(); + + cases.addFromDir(dir); + try @import("cases.zig").addCases(&cases); + + const cases_dir_path = try b.build_root.join(b.allocator, &.{ "test", "cases" }); + cases.lowerToBuildSteps( + b, + parent_step, + opt_test_filter, + cases_dir_path, + check_case_exe, + ); }