From 1b432072b5ae6a70a00464b48b7ebf8610293fc3 Mon Sep 17 00:00:00 2001 From: dweiller <4678790+dweiller@users.noreplay.github.com> Date: Fri, 10 Mar 2023 13:03:40 +1100 Subject: std.Build: detect and disallow top-level step name clashes --- lib/std/Build.zig | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) (limited to 'lib/std/Build.zig') diff --git a/lib/std/Build.zig b/lib/std/Build.zig index f23b3ba5aa..a2c8a22e32 100644 --- a/lib/std/Build.zig +++ b/lib/std/Build.zig @@ -926,7 +926,12 @@ pub fn step(self: *Build, name: []const u8, description: []const u8) *Step { }), .description = self.dupe(description), }; - self.top_level_steps.put(self.allocator, step_info.step.name, step_info) catch @panic("OOM"); + const gop = self.top_level_steps.getOrPut(self.allocator, name) catch @panic("OOM"); + if (gop.found_existing) std.debug.panic("A top-level step with name \"{s}\" already exists", .{name}); + + gop.key_ptr.* = step_info.step.name; + gop.value_ptr.* = step_info; + return &step_info.step; } -- cgit v1.2.3 From 13eb7251d37759bd47403db304c6120c706fe353 Mon Sep 17 00:00:00 2001 From: Nicolas Sterchele Date: Mon, 20 Mar 2023 09:23:10 +0100 Subject: build: rename std.Build.*Step to std.Build.Step.* Follow-up actions from #14647 Fixes #14947 --- lib/std/Build.zig | 28 +- lib/std/Build/CheckFileStep.zig | 88 -- lib/std/Build/CheckObjectStep.zig | 1055 --------------- lib/std/Build/CompileStep.zig | 2183 -------------------------------- lib/std/Build/ConfigHeaderStep.zig | 437 ------- lib/std/Build/FmtStep.zig | 73 -- lib/std/Build/InstallArtifactStep.zig | 130 -- lib/std/Build/InstallDirStep.zig | 110 -- lib/std/Build/InstallFileStep.zig | 57 - lib/std/Build/ObjCopyStep.zig | 122 -- lib/std/Build/OptionsStep.zig | 421 ------ lib/std/Build/RemoveDirStep.zig | 42 - lib/std/Build/RunStep.zig | 1254 ------------------ lib/std/Build/Step/CheckFile.zig | 87 ++ lib/std/Build/Step/CheckObject.zig | 1055 +++++++++++++++ lib/std/Build/Step/Compile.zig | 2183 ++++++++++++++++++++++++++++++++ lib/std/Build/Step/ConfigHeader.zig | 437 +++++++ lib/std/Build/Step/Fmt.zig | 72 ++ lib/std/Build/Step/InstallArtifact.zig | 130 ++ lib/std/Build/Step/InstallDir.zig | 110 ++ lib/std/Build/Step/InstallFile.zig | 57 + lib/std/Build/Step/ObjCopy.zig | 122 ++ lib/std/Build/Step/Options.zig | 421 ++++++ lib/std/Build/Step/RemoveDir.zig | 42 + lib/std/Build/Step/Run.zig | 1254 ++++++++++++++++++ lib/std/Build/Step/TranslateC.zig | 136 ++ lib/std/Build/Step/WriteFile.zig | 291 +++++ lib/std/Build/TranslateCStep.zig | 136 -- lib/std/Build/WriteFileStep.zig | 293 ----- 29 files changed, 6411 insertions(+), 6415 deletions(-) delete mode 100644 lib/std/Build/CheckFileStep.zig delete mode 100644 lib/std/Build/CheckObjectStep.zig delete mode 100644 lib/std/Build/CompileStep.zig delete mode 100644 lib/std/Build/ConfigHeaderStep.zig delete mode 100644 lib/std/Build/FmtStep.zig delete mode 100644 lib/std/Build/InstallArtifactStep.zig delete mode 100644 lib/std/Build/InstallDirStep.zig delete mode 100644 lib/std/Build/InstallFileStep.zig delete mode 100644 lib/std/Build/ObjCopyStep.zig delete mode 100644 lib/std/Build/OptionsStep.zig delete mode 100644 lib/std/Build/RemoveDirStep.zig delete mode 100644 lib/std/Build/RunStep.zig create mode 100644 lib/std/Build/Step/CheckFile.zig create mode 100644 lib/std/Build/Step/CheckObject.zig create mode 100644 lib/std/Build/Step/Compile.zig create mode 100644 lib/std/Build/Step/ConfigHeader.zig create mode 100644 lib/std/Build/Step/Fmt.zig create mode 100644 lib/std/Build/Step/InstallArtifact.zig create mode 100644 lib/std/Build/Step/InstallDir.zig create mode 100644 lib/std/Build/Step/InstallFile.zig create mode 100644 lib/std/Build/Step/ObjCopy.zig create mode 100644 lib/std/Build/Step/Options.zig create mode 100644 lib/std/Build/Step/RemoveDir.zig create mode 100644 lib/std/Build/Step/Run.zig create mode 100644 lib/std/Build/Step/TranslateC.zig create mode 100644 lib/std/Build/Step/WriteFile.zig delete mode 100644 lib/std/Build/TranslateCStep.zig delete mode 100644 lib/std/Build/WriteFileStep.zig (limited to 'lib/std/Build.zig') diff --git a/lib/std/Build.zig b/lib/std/Build.zig index a2c8a22e32..bda50112b6 100644 --- a/lib/std/Build.zig +++ b/lib/std/Build.zig @@ -29,20 +29,20 @@ pub const Builder = Build; pub const InstallDirectoryOptions = InstallDirStep.Options; pub const Step = @import("Build/Step.zig"); -pub const CheckFileStep = @import("Build/CheckFileStep.zig"); -pub const CheckObjectStep = @import("Build/CheckObjectStep.zig"); -pub const ConfigHeaderStep = @import("Build/ConfigHeaderStep.zig"); -pub const FmtStep = @import("Build/FmtStep.zig"); -pub const InstallArtifactStep = @import("Build/InstallArtifactStep.zig"); -pub const InstallDirStep = @import("Build/InstallDirStep.zig"); -pub const InstallFileStep = @import("Build/InstallFileStep.zig"); -pub const ObjCopyStep = @import("Build/ObjCopyStep.zig"); -pub const CompileStep = @import("Build/CompileStep.zig"); -pub const OptionsStep = @import("Build/OptionsStep.zig"); -pub const RemoveDirStep = @import("Build/RemoveDirStep.zig"); -pub const RunStep = @import("Build/RunStep.zig"); -pub const TranslateCStep = @import("Build/TranslateCStep.zig"); -pub const WriteFileStep = @import("Build/WriteFileStep.zig"); +pub const CheckFileStep = @import("Build/Step/CheckFile.zig"); +pub const CheckObjectStep = @import("Build/Step/CheckObject.zig"); +pub const ConfigHeaderStep = @import("Build/Step/ConfigHeader.zig"); +pub const FmtStep = @import("Build/Step/Fmt.zig"); +pub const InstallArtifactStep = @import("Build/Step/InstallArtifact.zig"); +pub const InstallDirStep = @import("Build/Step/InstallDir.zig"); +pub const InstallFileStep = @import("Build/Step/InstallFile.zig"); +pub const ObjCopyStep = @import("Build/Step/ObjCopy.zig"); +pub const CompileStep = @import("Build/Step/Compile.zig"); +pub const OptionsStep = @import("Build/Step/Options.zig"); +pub const RemoveDirStep = @import("Build/Step/RemoveDir.zig"); +pub const RunStep = @import("Build/Step/Run.zig"); +pub const TranslateCStep = @import("Build/Step/TranslateC.zig"); +pub const WriteFileStep = @import("Build/Step/WriteFile.zig"); install_tls: TopLevelStep, uninstall_tls: TopLevelStep, diff --git a/lib/std/Build/CheckFileStep.zig b/lib/std/Build/CheckFileStep.zig deleted file mode 100644 index 1c2b6b7786..0000000000 --- a/lib/std/Build/CheckFileStep.zig +++ /dev/null @@ -1,88 +0,0 @@ -//! Fail the build step if a file does not match certain checks. -//! TODO: make this more flexible, supporting more kinds of checks. -//! TODO: generalize the code in std.testing.expectEqualStrings and make this -//! CheckFileStep produce those helpful diagnostics when there is not a match. - -step: Step, -expected_matches: []const []const u8, -expected_exact: ?[]const u8, -source: std.Build.FileSource, -max_bytes: usize = 20 * 1024 * 1024, - -pub const base_id = .check_file; - -pub const Options = struct { - expected_matches: []const []const u8 = &.{}, - expected_exact: ?[]const u8 = null, -}; - -pub fn create( - owner: *std.Build, - source: std.Build.FileSource, - options: Options, -) *CheckFileStep { - const self = owner.allocator.create(CheckFileStep) catch @panic("OOM"); - self.* = .{ - .step = Step.init(.{ - .id = .check_file, - .name = "CheckFile", - .owner = owner, - .makeFn = make, - }), - .source = source.dupe(owner), - .expected_matches = owner.dupeStrings(options.expected_matches), - .expected_exact = options.expected_exact, - }; - self.source.addStepDependencies(&self.step); - return self; -} - -pub fn setName(self: *CheckFileStep, name: []const u8) void { - self.step.name = name; -} - -fn make(step: *Step, prog_node: *std.Progress.Node) !void { - _ = prog_node; - const b = step.owner; - const self = @fieldParentPtr(CheckFileStep, "step", step); - - const src_path = self.source.getPath(b); - const contents = fs.cwd().readFileAlloc(b.allocator, src_path, self.max_bytes) catch |err| { - return step.fail("unable to read '{s}': {s}", .{ - src_path, @errorName(err), - }); - }; - - for (self.expected_matches) |expected_match| { - if (mem.indexOf(u8, contents, expected_match) == null) { - return step.fail( - \\ - \\========= expected to find: =================== - \\{s} - \\========= but file does not contain it: ======= - \\{s} - \\=============================================== - , .{ expected_match, contents }); - } - } - - if (self.expected_exact) |expected_exact| { - if (!mem.eql(u8, expected_exact, contents)) { - return step.fail( - \\ - \\========= expected: ===================== - \\{s} - \\========= but found: ==================== - \\{s} - \\========= from the following file: ====== - \\{s} - , .{ expected_exact, contents, src_path }); - } - } -} - -const CheckFileStep = @This(); -const std = @import("../std.zig"); -const Step = std.Build.Step; -const fs = std.fs; -const mem = std.mem; diff --git a/lib/std/Build/CheckObjectStep.zig b/lib/std/Build/CheckObjectStep.zig deleted file mode 100644 index e79ce9d3df..0000000000 --- a/lib/std/Build/CheckObjectStep.zig +++ /dev/null @@ -1,1055 +0,0 @@ -const std = @import("../std.zig"); -const assert = std.debug.assert; -const fs = std.fs; -const macho = std.macho; -const math = std.math; -const mem = std.mem; -const testing = std.testing; - -const CheckObjectStep = @This(); - -const Allocator = mem.Allocator; -const Step = std.Build.Step; - -pub const base_id = .check_object; - -step: Step, -source: std.Build.FileSource, -max_bytes: usize = 20 * 1024 * 1024, -checks: std.ArrayList(Check), -dump_symtab: bool = false, -obj_format: std.Target.ObjectFormat, - -pub fn create( - owner: *std.Build, - source: std.Build.FileSource, - obj_format: std.Target.ObjectFormat, -) *CheckObjectStep { - const gpa = owner.allocator; - const self = gpa.create(CheckObjectStep) catch @panic("OOM"); - self.* = .{ - .step = Step.init(.{ - .id = .check_file, - .name = "CheckObject", - .owner = owner, - .makeFn = make, - }), - .source = source.dupe(owner), - .checks = std.ArrayList(Check).init(gpa), - .obj_format = obj_format, - }; - self.source.addStepDependencies(&self.step); - return self; -} - -/// Runs and (optionally) compares the output of a binary. -/// Asserts `self` was generated from an executable step. -/// TODO this doesn't actually compare, and there's no apparent reason for it -/// to depend on the check object step. I don't see why this function should exist, -/// the caller could just add the run step directly. -pub fn runAndCompare(self: *CheckObjectStep) *std.Build.RunStep { - const dependencies_len = self.step.dependencies.items.len; - assert(dependencies_len > 0); - const exe_step = self.step.dependencies.items[dependencies_len - 1]; - const exe = exe_step.cast(std.Build.CompileStep).?; - const run = self.step.owner.addRunArtifact(exe); - run.skip_foreign_checks = true; - run.step.dependOn(&self.step); - return run; -} - -const SearchPhrase = struct { - string: []const u8, - file_source: ?std.Build.FileSource = null, - - fn resolve(phrase: SearchPhrase, b: *std.Build, step: *Step) []const u8 { - const file_source = phrase.file_source orelse return phrase.string; - return b.fmt("{s} {s}", .{ phrase.string, file_source.getPath2(b, step) }); - } -}; - -/// There two types of actions currently supported: -/// * `.match` - is the main building block of standard matchers with optional eat-all token `{*}` -/// and extractors by name such as `{n_value}`. Please note this action is very simplistic in nature -/// i.e., it won't really handle edge cases/nontrivial examples. But given that we do want to use -/// it mainly to test the output of our object format parser-dumpers when testing the linkers, etc. -/// it should be plenty useful in its current form. -/// * `.compute_cmp` - can be used to perform an operation on the extracted global variables -/// using the MatchAction. It currently only supports an addition. The operation is required -/// to be specified in Reverse Polish Notation to ease in operator-precedence parsing (well, -/// to avoid any parsing really). -/// For example, if the two extracted values were saved as `vmaddr` and `entryoff` respectively -/// they could then be added with this simple program `vmaddr entryoff +`. -const Action = struct { - tag: enum { match, not_present, compute_cmp }, - phrase: SearchPhrase, - expected: ?ComputeCompareExpected = null, - - /// Will return true if the `phrase` was found in the `haystack`. - /// Some examples include: - /// - /// LC 0 => will match in its entirety - /// vmaddr {vmaddr} => will match `vmaddr` and then extract the following value as u64 - /// and save under `vmaddr` global name (see `global_vars` param) - /// name {*}libobjc{*}.dylib => will match `name` followed by a token which contains `libobjc` and `.dylib` - /// in that order with other letters in between - fn match( - act: Action, - b: *std.Build, - step: *Step, - haystack: []const u8, - global_vars: anytype, - ) !bool { - assert(act.tag == .match or act.tag == .not_present); - const phrase = act.phrase.resolve(b, step); - var candidate_var: ?struct { name: []const u8, value: u64 } = null; - var hay_it = mem.tokenize(u8, mem.trim(u8, haystack, " "), " "); - var needle_it = mem.tokenize(u8, mem.trim(u8, phrase, " "), " "); - - while (needle_it.next()) |needle_tok| { - const hay_tok = hay_it.next() orelse return false; - - if (mem.indexOf(u8, needle_tok, "{*}")) |index| { - // We have fuzzy matchers within the search pattern, so we match substrings. - var start = index; - var n_tok = needle_tok; - var h_tok = hay_tok; - while (true) { - n_tok = n_tok[start + 3 ..]; - const inner = if (mem.indexOf(u8, n_tok, "{*}")) |sub_end| - n_tok[0..sub_end] - else - n_tok; - if (mem.indexOf(u8, h_tok, inner) == null) return false; - start = mem.indexOf(u8, n_tok, "{*}") orelse break; - } - } else if (mem.startsWith(u8, needle_tok, "{")) { - const closing_brace = mem.indexOf(u8, needle_tok, "}") orelse return error.MissingClosingBrace; - if (closing_brace != needle_tok.len - 1) return error.ClosingBraceNotLast; - - const name = needle_tok[1..closing_brace]; - if (name.len == 0) return error.MissingBraceValue; - const value = try std.fmt.parseInt(u64, hay_tok, 16); - candidate_var = .{ - .name = name, - .value = value, - }; - } else { - if (!mem.eql(u8, hay_tok, needle_tok)) return false; - } - } - - if (candidate_var) |v| { - try global_vars.putNoClobber(v.name, v.value); - } - - return true; - } - - /// Will return true if the `phrase` is correctly parsed into an RPN program and - /// its reduced, computed value compares using `op` with the expected value, either - /// a literal or another extracted variable. - fn computeCmp(act: Action, b: *std.Build, step: *Step, global_vars: anytype) !bool { - const gpa = step.owner.allocator; - const phrase = act.phrase.resolve(b, step); - var op_stack = std.ArrayList(enum { add, sub, mod, mul }).init(gpa); - var values = std.ArrayList(u64).init(gpa); - - var it = mem.tokenize(u8, phrase, " "); - while (it.next()) |next| { - if (mem.eql(u8, next, "+")) { - try op_stack.append(.add); - } else if (mem.eql(u8, next, "-")) { - try op_stack.append(.sub); - } else if (mem.eql(u8, next, "%")) { - try op_stack.append(.mod); - } else if (mem.eql(u8, next, "*")) { - try op_stack.append(.mul); - } else { - const val = std.fmt.parseInt(u64, next, 0) catch blk: { - break :blk global_vars.get(next) orelse { - try step.addError( - \\ - \\========= variable was not extracted: =========== - \\{s} - \\================================================= - , .{next}); - return error.UnknownVariable; - }; - }; - try values.append(val); - } - } - - var op_i: usize = 1; - var reduced: u64 = values.items[0]; - for (op_stack.items) |op| { - const other = values.items[op_i]; - switch (op) { - .add => { - reduced += other; - }, - .sub => { - reduced -= other; - }, - .mod => { - reduced %= other; - }, - .mul => { - reduced *= other; - }, - } - op_i += 1; - } - - const exp_value = switch (act.expected.?.value) { - .variable => |name| global_vars.get(name) orelse { - try step.addError( - \\ - \\========= variable was not extracted: =========== - \\{s} - \\================================================= - , .{name}); - return error.UnknownVariable; - }, - .literal => |x| x, - }; - return math.compare(reduced, act.expected.?.op, exp_value); - } -}; - -const ComputeCompareExpected = struct { - op: math.CompareOperator, - value: union(enum) { - variable: []const u8, - literal: u64, - }, - - pub fn format( - value: @This(), - comptime fmt: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) !void { - if (fmt.len != 0) std.fmt.invalidFmtError(fmt, value); - _ = options; - try writer.print("{s} ", .{@tagName(value.op)}); - switch (value.value) { - .variable => |name| try writer.writeAll(name), - .literal => |x| try writer.print("{x}", .{x}), - } - } -}; - -const Check = struct { - actions: std.ArrayList(Action), - - fn create(allocator: Allocator) Check { - return .{ - .actions = std.ArrayList(Action).init(allocator), - }; - } - - fn match(self: *Check, phrase: SearchPhrase) void { - self.actions.append(.{ - .tag = .match, - .phrase = phrase, - }) catch @panic("OOM"); - } - - fn notPresent(self: *Check, phrase: SearchPhrase) void { - self.actions.append(.{ - .tag = .not_present, - .phrase = phrase, - }) catch @panic("OOM"); - } - - fn computeCmp(self: *Check, phrase: SearchPhrase, expected: ComputeCompareExpected) void { - self.actions.append(.{ - .tag = .compute_cmp, - .phrase = phrase, - .expected = expected, - }) catch @panic("OOM"); - } -}; - -/// Creates a new sequence of actions with `phrase` as the first anchor searched phrase. -pub fn checkStart(self: *CheckObjectStep, phrase: []const u8) void { - var new_check = Check.create(self.step.owner.allocator); - new_check.match(.{ .string = self.step.owner.dupe(phrase) }); - self.checks.append(new_check) catch @panic("OOM"); -} - -/// Adds another searched phrase to the latest created Check with `CheckObjectStep.checkStart(...)`. -/// Asserts at least one check already exists. -pub fn checkNext(self: *CheckObjectStep, phrase: []const u8) void { - assert(self.checks.items.len > 0); - const last = &self.checks.items[self.checks.items.len - 1]; - last.match(.{ .string = self.step.owner.dupe(phrase) }); -} - -/// Like `checkNext()` but takes an additional argument `FileSource` which will be -/// resolved to a full search query in `make()`. -pub fn checkNextFileSource( - self: *CheckObjectStep, - phrase: []const u8, - file_source: std.Build.FileSource, -) void { - assert(self.checks.items.len > 0); - const last = &self.checks.items[self.checks.items.len - 1]; - last.match(.{ .string = self.step.owner.dupe(phrase), .file_source = file_source }); -} - -/// Adds another searched phrase to the latest created Check with `CheckObjectStep.checkStart(...)` -/// however ensures there is no matching phrase in the output. -/// Asserts at least one check already exists. -pub fn checkNotPresent(self: *CheckObjectStep, phrase: []const u8) void { - assert(self.checks.items.len > 0); - const last = &self.checks.items[self.checks.items.len - 1]; - last.notPresent(.{ .string = self.step.owner.dupe(phrase) }); -} - -/// Creates a new check checking specifically symbol table parsed and dumped from the object -/// file. -/// Issuing this check will force parsing and dumping of the symbol table. -pub fn checkInSymtab(self: *CheckObjectStep) void { - self.dump_symtab = true; - const symtab_label = switch (self.obj_format) { - .macho => MachODumper.symtab_label, - else => @panic("TODO other parsers"), - }; - self.checkStart(symtab_label); -} - -/// Creates a new standalone, singular check which allows running simple binary operations -/// on the extracted variables. It will then compare the reduced program with the value of -/// the expected variable. -pub fn checkComputeCompare( - self: *CheckObjectStep, - program: []const u8, - expected: ComputeCompareExpected, -) void { - var new_check = Check.create(self.step.owner.allocator); - new_check.computeCmp(.{ .string = self.step.owner.dupe(program) }, expected); - self.checks.append(new_check) catch @panic("OOM"); -} - -fn make(step: *Step, prog_node: *std.Progress.Node) !void { - _ = prog_node; - const b = step.owner; - const gpa = b.allocator; - const self = @fieldParentPtr(CheckObjectStep, "step", step); - - const src_path = self.source.getPath(b); - const contents = fs.cwd().readFileAllocOptions( - gpa, - src_path, - self.max_bytes, - null, - @alignOf(u64), - null, - ) catch |err| return step.fail("unable to read '{s}': {s}", .{ src_path, @errorName(err) }); - - const output = switch (self.obj_format) { - .macho => try MachODumper.parseAndDump(step, contents, .{ - .dump_symtab = self.dump_symtab, - }), - .elf => @panic("TODO elf parser"), - .coff => @panic("TODO coff parser"), - .wasm => try WasmDumper.parseAndDump(step, contents, .{ - .dump_symtab = self.dump_symtab, - }), - else => unreachable, - }; - - var vars = std.StringHashMap(u64).init(gpa); - - for (self.checks.items) |chk| { - var it = mem.tokenize(u8, output, "\r\n"); - for (chk.actions.items) |act| { - switch (act.tag) { - .match => { - while (it.next()) |line| { - if (try act.match(b, step, line, &vars)) break; - } else { - return step.fail( - \\ - \\========= expected to find: ========================== - \\{s} - \\========= but parsed file does not contain it: ======= - \\{s} - \\====================================================== - , .{ act.phrase.resolve(b, step), output }); - } - }, - .not_present => { - while (it.next()) |line| { - if (try act.match(b, step, line, &vars)) { - return step.fail( - \\ - \\========= expected not to find: =================== - \\{s} - \\========= but parsed file does contain it: ======== - \\{s} - \\=================================================== - , .{ act.phrase.resolve(b, step), output }); - } - } - }, - .compute_cmp => { - const res = act.computeCmp(b, step, vars) catch |err| switch (err) { - error.UnknownVariable => { - return step.fail( - \\========= from parsed file: ===================== - \\{s} - \\================================================= - , .{output}); - }, - else => |e| return e, - }; - if (!res) { - return step.fail( - \\ - \\========= comparison failed for action: =========== - \\{s} {} - \\========= from parsed file: ======================= - \\{s} - \\=================================================== - , .{ act.phrase.resolve(b, step), act.expected.?, output }); - } - }, - } - } - } -} - -const Opts = struct { - dump_symtab: bool = false, -}; - -const MachODumper = struct { - const LoadCommandIterator = macho.LoadCommandIterator; - const symtab_label = "symtab"; - - fn parseAndDump(step: *Step, bytes: []align(@alignOf(u64)) const u8, opts: Opts) ![]const u8 { - const gpa = step.owner.allocator; - var stream = std.io.fixedBufferStream(bytes); - const reader = stream.reader(); - - const hdr = try reader.readStruct(macho.mach_header_64); - if (hdr.magic != macho.MH_MAGIC_64) { - return error.InvalidMagicNumber; - } - - var output = std.ArrayList(u8).init(gpa); - const writer = output.writer(); - - var symtab: []const macho.nlist_64 = undefined; - var strtab: []const u8 = undefined; - var sections = std.ArrayList(macho.section_64).init(gpa); - var imports = std.ArrayList([]const u8).init(gpa); - - var it = LoadCommandIterator{ - .ncmds = hdr.ncmds, - .buffer = bytes[@sizeOf(macho.mach_header_64)..][0..hdr.sizeofcmds], - }; - var i: usize = 0; - while (it.next()) |cmd| { - switch (cmd.cmd()) { - .SEGMENT_64 => { - const seg = cmd.cast(macho.segment_command_64).?; - try sections.ensureUnusedCapacity(seg.nsects); - for (cmd.getSections()) |sect| { - sections.appendAssumeCapacity(sect); - } - }, - .SYMTAB => if (opts.dump_symtab) { - const lc = cmd.cast(macho.symtab_command).?; - symtab = @ptrCast( - [*]const macho.nlist_64, - @alignCast(@alignOf(macho.nlist_64), &bytes[lc.symoff]), - )[0..lc.nsyms]; - strtab = bytes[lc.stroff..][0..lc.strsize]; - }, - .LOAD_DYLIB, - .LOAD_WEAK_DYLIB, - .REEXPORT_DYLIB, - => { - try imports.append(cmd.getDylibPathName()); - }, - else => {}, - } - - try dumpLoadCommand(cmd, i, writer); - try writer.writeByte('\n'); - - i += 1; - } - - if (opts.dump_symtab) { - try writer.print("{s}\n", .{symtab_label}); - for (symtab) |sym| { - if (sym.stab()) continue; - const sym_name = mem.sliceTo(@ptrCast([*:0]const u8, strtab.ptr + sym.n_strx), 0); - if (sym.sect()) { - const sect = sections.items[sym.n_sect - 1]; - try writer.print("{x} ({s},{s})", .{ - sym.n_value, - sect.segName(), - sect.sectName(), - }); - if (sym.ext()) { - try writer.writeAll(" external"); - } - try writer.print(" {s}\n", .{sym_name}); - } else if (sym.undf()) { - const ordinal = @divTrunc(@bitCast(i16, sym.n_desc), macho.N_SYMBOL_RESOLVER); - const import_name = blk: { - if (ordinal <= 0) { - if (ordinal == macho.BIND_SPECIAL_DYLIB_SELF) - break :blk "self import"; - if (ordinal == macho.BIND_SPECIAL_DYLIB_MAIN_EXECUTABLE) - break :blk "main executable"; - if (ordinal == macho.BIND_SPECIAL_DYLIB_FLAT_LOOKUP) - break :blk "flat lookup"; - unreachable; - } - const full_path = imports.items[@bitCast(u16, ordinal) - 1]; - const basename = fs.path.basename(full_path); - assert(basename.len > 0); - const ext = mem.lastIndexOfScalar(u8, basename, '.') orelse basename.len; - break :blk basename[0..ext]; - }; - try writer.writeAll("(undefined)"); - if (sym.weakRef()) { - try writer.writeAll(" weak"); - } - if (sym.ext()) { - try writer.writeAll(" external"); - } - try writer.print(" {s} (from {s})\n", .{ - sym_name, - import_name, - }); - } else unreachable; - } - } - - return output.toOwnedSlice(); - } - - fn dumpLoadCommand(lc: macho.LoadCommandIterator.LoadCommand, index: usize, writer: anytype) !void { - // print header first - try writer.print( - \\LC {d} - \\cmd {s} - \\cmdsize {d} - , .{ index, @tagName(lc.cmd()), lc.cmdsize() }); - - switch (lc.cmd()) { - .SEGMENT_64 => { - const seg = lc.cast(macho.segment_command_64).?; - try writer.writeByte('\n'); - try writer.print( - \\segname {s} - \\vmaddr {x} - \\vmsize {x} - \\fileoff {x} - \\filesz {x} - , .{ - seg.segName(), - seg.vmaddr, - seg.vmsize, - seg.fileoff, - seg.filesize, - }); - - for (lc.getSections()) |sect| { - try writer.writeByte('\n'); - try writer.print( - \\sectname {s} - \\addr {x} - \\size {x} - \\offset {x} - \\align {x} - , .{ - sect.sectName(), - sect.addr, - sect.size, - sect.offset, - sect.@"align", - }); - } - }, - - .ID_DYLIB, - .LOAD_DYLIB, - .LOAD_WEAK_DYLIB, - .REEXPORT_DYLIB, - => { - const dylib = lc.cast(macho.dylib_command).?; - try writer.writeByte('\n'); - try writer.print( - \\name {s} - \\timestamp {d} - \\current version {x} - \\compatibility version {x} - , .{ - lc.getDylibPathName(), - dylib.dylib.timestamp, - dylib.dylib.current_version, - dylib.dylib.compatibility_version, - }); - }, - - .MAIN => { - const main = lc.cast(macho.entry_point_command).?; - try writer.writeByte('\n'); - try writer.print( - \\entryoff {x} - \\stacksize {x} - , .{ main.entryoff, main.stacksize }); - }, - - .RPATH => { - try writer.writeByte('\n'); - try writer.print( - \\path {s} - , .{ - lc.getRpathPathName(), - }); - }, - - .UUID => { - const uuid = lc.cast(macho.uuid_command).?; - try writer.writeByte('\n'); - try writer.print("uuid {x}", .{std.fmt.fmtSliceHexLower(&uuid.uuid)}); - }, - - .DATA_IN_CODE, - .FUNCTION_STARTS, - .CODE_SIGNATURE, - => { - const llc = lc.cast(macho.linkedit_data_command).?; - try writer.writeByte('\n'); - try writer.print( - \\dataoff {x} - \\datasize {x} - , .{ llc.dataoff, llc.datasize }); - }, - - .DYLD_INFO_ONLY => { - const dlc = lc.cast(macho.dyld_info_command).?; - try writer.writeByte('\n'); - try writer.print( - \\rebaseoff {x} - \\rebasesize {x} - \\bindoff {x} - \\bindsize {x} - \\weakbindoff {x} - \\weakbindsize {x} - \\lazybindoff {x} - \\lazybindsize {x} - \\exportoff {x} - \\exportsize {x} - , .{ - dlc.rebase_off, - dlc.rebase_size, - dlc.bind_off, - dlc.bind_size, - dlc.weak_bind_off, - dlc.weak_bind_size, - dlc.lazy_bind_off, - dlc.lazy_bind_size, - dlc.export_off, - dlc.export_size, - }); - }, - - .SYMTAB => { - const slc = lc.cast(macho.symtab_command).?; - try writer.writeByte('\n'); - try writer.print( - \\symoff {x} - \\nsyms {x} - \\stroff {x} - \\strsize {x} - , .{ - slc.symoff, - slc.nsyms, - slc.stroff, - slc.strsize, - }); - }, - - .DYSYMTAB => { - const dlc = lc.cast(macho.dysymtab_command).?; - try writer.writeByte('\n'); - try writer.print( - \\ilocalsym {x} - \\nlocalsym {x} - \\iextdefsym {x} - \\nextdefsym {x} - \\iundefsym {x} - \\nundefsym {x} - \\indirectsymoff {x} - \\nindirectsyms {x} - , .{ - dlc.ilocalsym, - dlc.nlocalsym, - dlc.iextdefsym, - dlc.nextdefsym, - dlc.iundefsym, - dlc.nundefsym, - dlc.indirectsymoff, - dlc.nindirectsyms, - }); - }, - - else => {}, - } - } -}; - -const WasmDumper = struct { - const symtab_label = "symbols"; - - fn parseAndDump(step: *Step, bytes: []const u8, opts: Opts) ![]const u8 { - const gpa = step.owner.allocator; - if (opts.dump_symtab) { - @panic("TODO: Implement symbol table parsing and dumping"); - } - - var fbs = std.io.fixedBufferStream(bytes); - const reader = fbs.reader(); - - const buf = try reader.readBytesNoEof(8); - if (!mem.eql(u8, buf[0..4], &std.wasm.magic)) { - return error.InvalidMagicByte; - } - if (!mem.eql(u8, buf[4..], &std.wasm.version)) { - return error.UnsupportedWasmVersion; - } - - var output = std.ArrayList(u8).init(gpa); - errdefer output.deinit(); - const writer = output.writer(); - - while (reader.readByte()) |current_byte| { - const section = std.meta.intToEnum(std.wasm.Section, current_byte) catch { - return step.fail("Found invalid section id '{d}'", .{current_byte}); - }; - - const section_length = try std.leb.readULEB128(u32, reader); - try parseAndDumpSection(step, section, bytes[fbs.pos..][0..section_length], writer); - fbs.pos += section_length; - } else |_| {} // reached end of stream - - return output.toOwnedSlice(); - } - - fn parseAndDumpSection( - step: *Step, - section: std.wasm.Section, - data: []const u8, - writer: anytype, - ) !void { - var fbs = std.io.fixedBufferStream(data); - const reader = fbs.reader(); - - try writer.print( - \\Section {s} - \\size {d} - , .{ @tagName(section), data.len }); - - switch (section) { - .type, - .import, - .function, - .table, - .memory, - .global, - .@"export", - .element, - .code, - .data, - => { - const entries = try std.leb.readULEB128(u32, reader); - try writer.print("\nentries {d}\n", .{entries}); - try dumpSection(step, section, data[fbs.pos..], entries, writer); - }, - .custom => { - const name_length = try std.leb.readULEB128(u32, reader); - const name = data[fbs.pos..][0..name_length]; - fbs.pos += name_length; - try writer.print("\nname {s}\n", .{name}); - - if (mem.eql(u8, name, "name")) { - try parseDumpNames(step, reader, writer, data); - } else if (mem.eql(u8, name, "producers")) { - try parseDumpProducers(reader, writer, data); - } else if (mem.eql(u8, name, "target_features")) { - try parseDumpFeatures(reader, writer, data); - } - // TODO: Implement parsing and dumping other custom sections (such as relocations) - }, - .start => { - const start = try std.leb.readULEB128(u32, reader); - try writer.print("\nstart {d}\n", .{start}); - }, - else => {}, // skip unknown sections - } - } - - fn dumpSection(step: *Step, section: std.wasm.Section, data: []const u8, entries: u32, writer: anytype) !void { - var fbs = std.io.fixedBufferStream(data); - const reader = fbs.reader(); - - switch (section) { - .type => { - var i: u32 = 0; - while (i < entries) : (i += 1) { - const func_type = try reader.readByte(); - if (func_type != std.wasm.function_type) { - return step.fail("expected function type, found byte '{d}'", .{func_type}); - } - const params = try std.leb.readULEB128(u32, reader); - try writer.print("params {d}\n", .{params}); - var index: u32 = 0; - while (index < params) : (index += 1) { - try parseDumpType(step, std.wasm.Valtype, reader, writer); - } else index = 0; - const returns = try std.leb.readULEB128(u32, reader); - try writer.print("returns {d}\n", .{returns}); - while (index < returns) : (index += 1) { - try parseDumpType(step, std.wasm.Valtype, reader, writer); - } - } - }, - .import => { - var i: u32 = 0; - while (i < entries) : (i += 1) { - const module_name_len = try std.leb.readULEB128(u32, reader); - const module_name = data[fbs.pos..][0..module_name_len]; - fbs.pos += module_name_len; - const name_len = try std.leb.readULEB128(u32, reader); - const name = data[fbs.pos..][0..name_len]; - fbs.pos += name_len; - - const kind = std.meta.intToEnum(std.wasm.ExternalKind, try reader.readByte()) catch { - return step.fail("invalid import kind", .{}); - }; - - try writer.print( - \\module {s} - \\name {s} - \\kind {s} - , .{ module_name, name, @tagName(kind) }); - try writer.writeByte('\n'); - switch (kind) { - .function => { - try writer.print("index {d}\n", .{try std.leb.readULEB128(u32, reader)}); - }, - .memory => { - try parseDumpLimits(reader, writer); - }, - .global => { - try parseDumpType(step, std.wasm.Valtype, reader, writer); - try writer.print("mutable {}\n", .{0x01 == try std.leb.readULEB128(u32, reader)}); - }, - .table => { - try parseDumpType(step, std.wasm.RefType, reader, writer); - try parseDumpLimits(reader, writer); - }, - } - } - }, - .function => { - var i: u32 = 0; - while (i < entries) : (i += 1) { - try writer.print("index {d}\n", .{try std.leb.readULEB128(u32, reader)}); - } - }, - .table => { - var i: u32 = 0; - while (i < entries) : (i += 1) { - try parseDumpType(step, std.wasm.RefType, reader, writer); - try parseDumpLimits(reader, writer); - } - }, - .memory => { - var i: u32 = 0; - while (i < entries) : (i += 1) { - try parseDumpLimits(reader, writer); - } - }, - .global => { - var i: u32 = 0; - while (i < entries) : (i += 1) { - try parseDumpType(step, std.wasm.Valtype, reader, writer); - try writer.print("mutable {}\n", .{0x01 == try std.leb.readULEB128(u1, reader)}); - try parseDumpInit(step, reader, writer); - } - }, - .@"export" => { - var i: u32 = 0; - while (i < entries) : (i += 1) { - const name_len = try std.leb.readULEB128(u32, reader); - const name = data[fbs.pos..][0..name_len]; - fbs.pos += name_len; - const kind_byte = try std.leb.readULEB128(u8, reader); - const kind = std.meta.intToEnum(std.wasm.ExternalKind, kind_byte) catch { - return step.fail("invalid export kind value '{d}'", .{kind_byte}); - }; - const index = try std.leb.readULEB128(u32, reader); - try writer.print( - \\name {s} - \\kind {s} - \\index {d} - , .{ name, @tagName(kind), index }); - try writer.writeByte('\n'); - } - }, - .element => { - var i: u32 = 0; - while (i < entries) : (i += 1) { - try writer.print("table index {d}\n", .{try std.leb.readULEB128(u32, reader)}); - try parseDumpInit(step, reader, writer); - - const function_indexes = try std.leb.readULEB128(u32, reader); - var function_index: u32 = 0; - try writer.print("indexes {d}\n", .{function_indexes}); - while (function_index < function_indexes) : (function_index += 1) { - try writer.print("index {d}\n", .{try std.leb.readULEB128(u32, reader)}); - } - } - }, - .code => {}, // code section is considered opaque to linker - .data => { - var i: u32 = 0; - while (i < entries) : (i += 1) { - const index = try std.leb.readULEB128(u32, reader); - try writer.print("memory index 0x{x}\n", .{index}); - try parseDumpInit(step, reader, writer); - const size = try std.leb.readULEB128(u32, reader); - try writer.print("size {d}\n", .{size}); - try reader.skipBytes(size, .{}); // we do not care about the content of the segments - } - }, - else => unreachable, - } - } - - fn parseDumpType(step: *Step, comptime WasmType: type, reader: anytype, writer: anytype) !void { - const type_byte = try reader.readByte(); - const valtype = std.meta.intToEnum(WasmType, type_byte) catch { - return step.fail("Invalid wasm type value '{d}'", .{type_byte}); - }; - try writer.print("type {s}\n", .{@tagName(valtype)}); - } - - fn parseDumpLimits(reader: anytype, writer: anytype) !void { - const flags = try std.leb.readULEB128(u8, reader); - const min = try std.leb.readULEB128(u32, reader); - - try writer.print("min {x}\n", .{min}); - if (flags != 0) { - try writer.print("max {x}\n", .{try std.leb.readULEB128(u32, reader)}); - } - } - - fn parseDumpInit(step: *Step, reader: anytype, writer: anytype) !void { - const byte = try std.leb.readULEB128(u8, reader); - const opcode = std.meta.intToEnum(std.wasm.Opcode, byte) catch { - return step.fail("invalid wasm opcode '{d}'", .{byte}); - }; - switch (opcode) { - .i32_const => try writer.print("i32.const {x}\n", .{try std.leb.readILEB128(i32, reader)}), - .i64_const => try writer.print("i64.const {x}\n", .{try std.leb.readILEB128(i64, reader)}), - .f32_const => try writer.print("f32.const {x}\n", .{@bitCast(f32, try reader.readIntLittle(u32))}), - .f64_const => try writer.print("f64.const {x}\n", .{@bitCast(f64, try reader.readIntLittle(u64))}), - .global_get => try writer.print("global.get {x}\n", .{try std.leb.readULEB128(u32, reader)}), - else => unreachable, - } - const end_opcode = try std.leb.readULEB128(u8, reader); - if (end_opcode != std.wasm.opcode(.end)) { - return step.fail("expected 'end' opcode in init expression", .{}); - } - } - - fn parseDumpNames(step: *Step, reader: anytype, writer: anytype, data: []const u8) !void { - while (reader.context.pos < data.len) { - try parseDumpType(step, std.wasm.NameSubsection, reader, writer); - const size = try std.leb.readULEB128(u32, reader); - const entries = try std.leb.readULEB128(u32, reader); - try writer.print( - \\size {d} - \\names {d} - , .{ size, entries }); - try writer.writeByte('\n'); - var i: u32 = 0; - while (i < entries) : (i += 1) { - const index = try std.leb.readULEB128(u32, reader); - const name_len = try std.leb.readULEB128(u32, reader); - const pos = reader.context.pos; - const name = data[pos..][0..name_len]; - reader.context.pos += name_len; - - try writer.print( - \\index {d} - \\name {s} - , .{ index, name }); - try writer.writeByte('\n'); - } - } - } - - fn parseDumpProducers(reader: anytype, writer: anytype, data: []const u8) !void { - const field_count = try std.leb.readULEB128(u32, reader); - try writer.print("fields {d}\n", .{field_count}); - var current_field: u32 = 0; - while (current_field < field_count) : (current_field += 1) { - const field_name_length = try std.leb.readULEB128(u32, reader); - const field_name = data[reader.context.pos..][0..field_name_length]; - reader.context.pos += field_name_length; - - const value_count = try std.leb.readULEB128(u32, reader); - try writer.print( - \\field_name {s} - \\values {d} - , .{ field_name, value_count }); - try writer.writeByte('\n'); - var current_value: u32 = 0; - while (current_value < value_count) : (current_value += 1) { - const value_length = try std.leb.readULEB128(u32, reader); - const value = data[reader.context.pos..][0..value_length]; - reader.context.pos += value_length; - - const version_length = try std.leb.readULEB128(u32, reader); - const version = data[reader.context.pos..][0..version_length]; - reader.context.pos += version_length; - - try writer.print( - \\value_name {s} - \\version {s} - , .{ value, version }); - try writer.writeByte('\n'); - } - } - } - - fn parseDumpFeatures(reader: anytype, writer: anytype, data: []const u8) !void { - const feature_count = try std.leb.readULEB128(u32, reader); - try writer.print("features {d}\n", .{feature_count}); - - var index: u32 = 0; - while (index < feature_count) : (index += 1) { - const prefix_byte = try std.leb.readULEB128(u8, reader); - const name_length = try std.leb.readULEB128(u32, reader); - const feature_name = data[reader.context.pos..][0..name_length]; - reader.context.pos += name_length; - - try writer.print("{c} {s}\n", .{ prefix_byte, feature_name }); - } - } -}; diff --git a/lib/std/Build/CompileStep.zig b/lib/std/Build/CompileStep.zig deleted file mode 100644 index d5a135e24b..0000000000 --- a/lib/std/Build/CompileStep.zig +++ /dev/null @@ -1,2183 +0,0 @@ -const builtin = @import("builtin"); -const std = @import("../std.zig"); -const mem = std.mem; -const fs = std.fs; -const assert = std.debug.assert; -const panic = std.debug.panic; -const ArrayList = std.ArrayList; -const StringHashMap = std.StringHashMap; -const Sha256 = std.crypto.hash.sha2.Sha256; -const Allocator = mem.Allocator; -const Step = std.Build.Step; -const CrossTarget = std.zig.CrossTarget; -const NativeTargetInfo = std.zig.system.NativeTargetInfo; -const FileSource = std.Build.FileSource; -const PkgConfigPkg = std.Build.PkgConfigPkg; -const PkgConfigError = std.Build.PkgConfigError; -const ExecError = std.Build.ExecError; -const Module = std.Build.Module; -const VcpkgRoot = std.Build.VcpkgRoot; -const InstallDir = std.Build.InstallDir; -const InstallArtifactStep = std.Build.InstallArtifactStep; -const GeneratedFile = std.Build.GeneratedFile; -const ObjCopyStep = std.Build.ObjCopyStep; -const CheckObjectStep = std.Build.CheckObjectStep; -const RunStep = std.Build.RunStep; -const OptionsStep = std.Build.OptionsStep; -const ConfigHeaderStep = std.Build.ConfigHeaderStep; -const CompileStep = @This(); - -pub const base_id: Step.Id = .compile; - -step: Step, -name: []const u8, -target: CrossTarget, -target_info: NativeTargetInfo, -optimize: std.builtin.Mode, -linker_script: ?FileSource = null, -version_script: ?[]const u8 = null, -out_filename: []const u8, -linkage: ?Linkage = null, -version: ?std.builtin.Version, -kind: Kind, -major_only_filename: ?[]const u8, -name_only_filename: ?[]const u8, -strip: ?bool, -unwind_tables: ?bool, -// keep in sync with src/link.zig:CompressDebugSections -compress_debug_sections: enum { none, zlib } = .none, -lib_paths: ArrayList(FileSource), -rpaths: ArrayList(FileSource), -framework_dirs: ArrayList(FileSource), -frameworks: StringHashMap(FrameworkLinkInfo), -verbose_link: bool, -verbose_cc: bool, -emit_analysis: EmitOption = .default, -emit_asm: EmitOption = .default, -emit_bin: EmitOption = .default, -emit_docs: EmitOption = .default, -emit_implib: EmitOption = .default, -emit_llvm_bc: EmitOption = .default, -emit_llvm_ir: EmitOption = .default, -// Lots of things depend on emit_h having a consistent path, -// so it is not an EmitOption for now. -emit_h: bool = false, -bundle_compiler_rt: ?bool = null, -single_threaded: ?bool, -stack_protector: ?bool = null, -disable_stack_probing: bool, -disable_sanitize_c: bool, -sanitize_thread: bool, -rdynamic: bool, -dwarf_format: ?std.dwarf.Format = null, -import_memory: bool = false, -/// For WebAssembly targets, this will allow for undefined symbols to -/// be imported from the host environment. -import_symbols: bool = false, -import_table: bool = false, -export_table: bool = false, -initial_memory: ?u64 = null, -max_memory: ?u64 = null, -shared_memory: bool = false, -global_base: ?u64 = null, -c_std: std.Build.CStd, -zig_lib_dir: ?[]const u8, -main_pkg_path: ?[]const u8, -exec_cmd_args: ?[]const ?[]const u8, -filter: ?[]const u8, -test_evented_io: bool = false, -test_runner: ?[]const u8, -code_model: std.builtin.CodeModel = .default, -wasi_exec_model: ?std.builtin.WasiExecModel = null, -/// Symbols to be exported when compiling to wasm -export_symbol_names: []const []const u8 = &.{}, - -root_src: ?FileSource, -out_h_filename: []const u8, -out_lib_filename: []const u8, -out_pdb_filename: []const u8, -modules: std.StringArrayHashMap(*Module), - -link_objects: ArrayList(LinkObject), -include_dirs: ArrayList(IncludeDir), -c_macros: ArrayList([]const u8), -installed_headers: ArrayList(*Step), -is_linking_libc: bool, -is_linking_libcpp: bool, -vcpkg_bin_path: ?[]const u8 = null, - -/// This may be set in order to override the default install directory -override_dest_dir: ?InstallDir, -installed_path: ?[]const u8, - -/// Base address for an executable image. -image_base: ?u64 = null, - -libc_file: ?FileSource = null, - -valgrind_support: ?bool = null, -each_lib_rpath: ?bool = null, -/// On ELF targets, this will emit a link section called ".note.gnu.build-id" -/// which can be used to coordinate a stripped binary with its debug symbols. -/// As an example, the bloaty project refuses to work unless its inputs have -/// build ids, in order to prevent accidental mismatches. -/// The default is to not include this section because it slows down linking. -build_id: ?bool = null, - -/// Create a .eh_frame_hdr section and a PT_GNU_EH_FRAME segment in the ELF -/// file. -link_eh_frame_hdr: bool = false, -link_emit_relocs: bool = false, - -/// Place every function in its own section so that unused ones may be -/// safely garbage-collected during the linking phase. -link_function_sections: bool = false, - -/// Remove functions and data that are unreachable by the entry point or -/// exported symbols. -link_gc_sections: ?bool = null, - -/// (Windows) Whether or not to enable ASLR. Maps to the /DYNAMICBASE[:NO] linker argument. -linker_dynamicbase: bool = true, - -linker_allow_shlib_undefined: ?bool = null, - -/// Permit read-only relocations in read-only segments. Disallowed by default. -link_z_notext: bool = false, - -/// Force all relocations to be read-only after processing. -link_z_relro: bool = true, - -/// Allow relocations to be lazily processed after load. -link_z_lazy: bool = false, - -/// Common page size -link_z_common_page_size: ?u64 = null, - -/// Maximum page size -link_z_max_page_size: ?u64 = null, - -/// (Darwin) Install name for the dylib -install_name: ?[]const u8 = null, - -/// (Darwin) Path to entitlements file -entitlements: ?[]const u8 = null, - -/// (Darwin) Size of the pagezero segment. -pagezero_size: ?u64 = null, - -/// (Darwin) Search strategy for searching system libraries. Either `paths_first` or `dylibs_first`. -/// The former lowers to `-search_paths_first` linker option, while the latter to `-search_dylibs_first` -/// option. -/// By default, if no option is specified, the linker assumes `paths_first` as the default -/// search strategy. -search_strategy: ?enum { paths_first, dylibs_first } = null, - -/// (Darwin) Set size of the padding between the end of load commands -/// and start of `__TEXT,__text` section. -headerpad_size: ?u32 = null, - -/// (Darwin) Automatically Set size of the padding between the end of load commands -/// and start of `__TEXT,__text` section to a value fitting all paths expanded to MAXPATHLEN. -headerpad_max_install_names: bool = false, - -/// (Darwin) Remove dylibs that are unreachable by the entry point or exported symbols. -dead_strip_dylibs: bool = false, - -/// Position Independent Code -force_pic: ?bool = null, - -/// Position Independent Executable -pie: ?bool = null, - -red_zone: ?bool = null, - -omit_frame_pointer: ?bool = null, -dll_export_fns: ?bool = null, - -subsystem: ?std.Target.SubSystem = null, - -entry_symbol_name: ?[]const u8 = null, - -/// List of symbols forced as undefined in the symbol table -/// thus forcing their resolution by the linker. -/// Corresponds to `-u ` for ELF/MachO and `/include:` for COFF/PE. -force_undefined_symbols: std.StringHashMap(void), - -/// Overrides the default stack size -stack_size: ?u64 = null, - -want_lto: ?bool = null, -use_llvm: ?bool, -use_lld: ?bool, - -/// This is an advanced setting that can change the intent of this CompileStep. -/// If this slice has nonzero length, it means that this CompileStep exists to -/// check for compile errors and return *success* if they match, and failure -/// otherwise. -expect_errors: []const []const u8 = &.{}, - -output_path_source: GeneratedFile, -output_lib_path_source: GeneratedFile, -output_h_path_source: GeneratedFile, -output_pdb_path_source: GeneratedFile, -output_dirname_source: GeneratedFile, - -pub const CSourceFiles = struct { - files: []const []const u8, - flags: []const []const u8, -}; - -pub const CSourceFile = struct { - source: FileSource, - args: []const []const u8, - - pub fn dupe(self: CSourceFile, b: *std.Build) CSourceFile { - return .{ - .source = self.source.dupe(b), - .args = b.dupeStrings(self.args), - }; - } -}; - -pub const LinkObject = union(enum) { - static_path: FileSource, - other_step: *CompileStep, - system_lib: SystemLib, - assembly_file: FileSource, - c_source_file: *CSourceFile, - c_source_files: *CSourceFiles, -}; - -pub const SystemLib = struct { - name: []const u8, - needed: bool, - weak: bool, - use_pkg_config: enum { - /// Don't use pkg-config, just pass -lfoo where foo is name. - no, - /// Try to get information on how to link the library from pkg-config. - /// If that fails, fall back to passing -lfoo where foo is name. - yes, - /// Try to get information on how to link the library from pkg-config. - /// If that fails, error out. - force, - }, -}; - -const FrameworkLinkInfo = struct { - needed: bool = false, - weak: bool = false, -}; - -pub const IncludeDir = union(enum) { - raw_path: []const u8, - raw_path_system: []const u8, - other_step: *CompileStep, - config_header_step: *ConfigHeaderStep, -}; - -pub const Options = struct { - name: []const u8, - root_source_file: ?FileSource = null, - target: CrossTarget, - optimize: std.builtin.Mode, - kind: Kind, - linkage: ?Linkage = null, - version: ?std.builtin.Version = null, - max_rss: usize = 0, - filter: ?[]const u8 = null, - test_runner: ?[]const u8 = null, - link_libc: ?bool = null, - single_threaded: ?bool = null, - use_llvm: ?bool = null, - use_lld: ?bool = null, -}; - -pub const Kind = enum { - exe, - lib, - obj, - @"test", -}; - -pub const Linkage = enum { dynamic, static }; - -pub const EmitOption = union(enum) { - default: void, - no_emit: void, - emit: void, - emit_to: []const u8, - - fn getArg(self: @This(), b: *std.Build, arg_name: []const u8) ?[]const u8 { - return switch (self) { - .no_emit => b.fmt("-fno-{s}", .{arg_name}), - .default => null, - .emit => b.fmt("-f{s}", .{arg_name}), - .emit_to => |path| b.fmt("-f{s}={s}", .{ arg_name, path }), - }; - } -}; - -pub fn create(owner: *std.Build, options: Options) *CompileStep { - const name = owner.dupe(options.name); - const root_src: ?FileSource = if (options.root_source_file) |rsrc| rsrc.dupe(owner) else null; - if (mem.indexOf(u8, name, "/") != null or mem.indexOf(u8, name, "\\") != null) { - panic("invalid name: '{s}'. It looks like a file path, but it is supposed to be the library or application name.", .{name}); - } - - // Avoid the common case of the step name looking like "zig test test". - const name_adjusted = if (options.kind == .@"test" and mem.eql(u8, name, "test")) - "" - else - owner.fmt("{s} ", .{name}); - - const step_name = owner.fmt("{s} {s}{s} {s}", .{ - switch (options.kind) { - .exe => "zig build-exe", - .lib => "zig build-lib", - .obj => "zig build-obj", - .@"test" => "zig test", - }, - name_adjusted, - @tagName(options.optimize), - options.target.zigTriple(owner.allocator) catch @panic("OOM"), - }); - - const target_info = NativeTargetInfo.detect(options.target) catch @panic("unhandled error"); - - const out_filename = std.zig.binNameAlloc(owner.allocator, .{ - .root_name = name, - .target = target_info.target, - .output_mode = switch (options.kind) { - .lib => .Lib, - .obj => .Obj, - .exe, .@"test" => .Exe, - }, - .link_mode = if (options.linkage) |some| @as(std.builtin.LinkMode, switch (some) { - .dynamic => .Dynamic, - .static => .Static, - }) else null, - .version = options.version, - }) catch @panic("OOM"); - - const self = owner.allocator.create(CompileStep) catch @panic("OOM"); - self.* = CompileStep{ - .strip = null, - .unwind_tables = null, - .verbose_link = false, - .verbose_cc = false, - .optimize = options.optimize, - .target = options.target, - .linkage = options.linkage, - .kind = options.kind, - .root_src = root_src, - .name = name, - .frameworks = StringHashMap(FrameworkLinkInfo).init(owner.allocator), - .step = Step.init(.{ - .id = base_id, - .name = step_name, - .owner = owner, - .makeFn = make, - .max_rss = options.max_rss, - }), - .version = options.version, - .out_filename = out_filename, - .out_h_filename = owner.fmt("{s}.h", .{name}), - .out_lib_filename = undefined, - .out_pdb_filename = owner.fmt("{s}.pdb", .{name}), - .major_only_filename = null, - .name_only_filename = null, - .modules = std.StringArrayHashMap(*Module).init(owner.allocator), - .include_dirs = ArrayList(IncludeDir).init(owner.allocator), - .link_objects = ArrayList(LinkObject).init(owner.allocator), - .c_macros = ArrayList([]const u8).init(owner.allocator), - .lib_paths = ArrayList(FileSource).init(owner.allocator), - .rpaths = ArrayList(FileSource).init(owner.allocator), - .framework_dirs = ArrayList(FileSource).init(owner.allocator), - .installed_headers = ArrayList(*Step).init(owner.allocator), - .c_std = std.Build.CStd.C99, - .zig_lib_dir = null, - .main_pkg_path = null, - .exec_cmd_args = null, - .filter = options.filter, - .test_runner = options.test_runner, - .disable_stack_probing = false, - .disable_sanitize_c = false, - .sanitize_thread = false, - .rdynamic = false, - .override_dest_dir = null, - .installed_path = null, - .force_undefined_symbols = StringHashMap(void).init(owner.allocator), - - .output_path_source = GeneratedFile{ .step = &self.step }, - .output_lib_path_source = GeneratedFile{ .step = &self.step }, - .output_h_path_source = GeneratedFile{ .step = &self.step }, - .output_pdb_path_source = GeneratedFile{ .step = &self.step }, - .output_dirname_source = GeneratedFile{ .step = &self.step }, - - .target_info = target_info, - - .is_linking_libc = options.link_libc orelse false, - .is_linking_libcpp = false, - .single_threaded = options.single_threaded, - .use_llvm = options.use_llvm, - .use_lld = options.use_lld, - }; - - if (self.kind == .lib) { - if (self.linkage != null and self.linkage.? == .static) { - self.out_lib_filename = self.out_filename; - } else if (self.version) |version| { - if (target_info.target.isDarwin()) { - self.major_only_filename = owner.fmt("lib{s}.{d}.dylib", .{ - self.name, - version.major, - }); - self.name_only_filename = owner.fmt("lib{s}.dylib", .{self.name}); - self.out_lib_filename = self.out_filename; - } else if (target_info.target.os.tag == .windows) { - self.out_lib_filename = owner.fmt("{s}.lib", .{self.name}); - } else { - self.major_only_filename = owner.fmt("lib{s}.so.{d}", .{ self.name, version.major }); - self.name_only_filename = owner.fmt("lib{s}.so", .{self.name}); - self.out_lib_filename = self.out_filename; - } - } else { - if (target_info.target.isDarwin()) { - self.out_lib_filename = self.out_filename; - } else if (target_info.target.os.tag == .windows) { - self.out_lib_filename = owner.fmt("{s}.lib", .{self.name}); - } else { - self.out_lib_filename = self.out_filename; - } - } - } - - if (root_src) |rs| rs.addStepDependencies(&self.step); - - return self; -} - -pub fn installHeader(cs: *CompileStep, src_path: []const u8, dest_rel_path: []const u8) void { - const b = cs.step.owner; - const install_file = b.addInstallHeaderFile(src_path, dest_rel_path); - b.getInstallStep().dependOn(&install_file.step); - cs.installed_headers.append(&install_file.step) catch @panic("OOM"); -} - -pub const InstallConfigHeaderOptions = struct { - install_dir: InstallDir = .header, - dest_rel_path: ?[]const u8 = null, -}; - -pub fn installConfigHeader( - cs: *CompileStep, - config_header: *ConfigHeaderStep, - options: InstallConfigHeaderOptions, -) void { - const dest_rel_path = options.dest_rel_path orelse config_header.include_path; - const b = cs.step.owner; - const install_file = b.addInstallFileWithDir( - .{ .generated = &config_header.output_file }, - options.install_dir, - dest_rel_path, - ); - install_file.step.dependOn(&config_header.step); - b.getInstallStep().dependOn(&install_file.step); - cs.installed_headers.append(&install_file.step) catch @panic("OOM"); -} - -pub fn installHeadersDirectory( - a: *CompileStep, - src_dir_path: []const u8, - dest_rel_path: []const u8, -) void { - return installHeadersDirectoryOptions(a, .{ - .source_dir = src_dir_path, - .install_dir = .header, - .install_subdir = dest_rel_path, - }); -} - -pub fn installHeadersDirectoryOptions( - cs: *CompileStep, - options: std.Build.InstallDirStep.Options, -) void { - const b = cs.step.owner; - const install_dir = b.addInstallDirectory(options); - b.getInstallStep().dependOn(&install_dir.step); - cs.installed_headers.append(&install_dir.step) catch @panic("OOM"); -} - -pub fn installLibraryHeaders(cs: *CompileStep, l: *CompileStep) void { - assert(l.kind == .lib); - const b = cs.step.owner; - const install_step = b.getInstallStep(); - // Copy each element from installed_headers, modifying the builder - // to be the new parent's builder. - for (l.installed_headers.items) |step| { - const step_copy = switch (step.id) { - inline .install_file, .install_dir => |id| blk: { - const T = id.Type(); - const ptr = b.allocator.create(T) catch @panic("OOM"); - ptr.* = step.cast(T).?.*; - ptr.dest_builder = b; - break :blk &ptr.step; - }, - else => unreachable, - }; - cs.installed_headers.append(step_copy) catch @panic("OOM"); - install_step.dependOn(step_copy); - } - cs.installed_headers.appendSlice(l.installed_headers.items) catch @panic("OOM"); -} - -pub fn addObjCopy(cs: *CompileStep, options: ObjCopyStep.Options) *ObjCopyStep { - const b = cs.step.owner; - var copy = options; - if (copy.basename == null) { - if (options.format) |f| { - copy.basename = b.fmt("{s}.{s}", .{ cs.name, @tagName(f) }); - } else { - copy.basename = cs.name; - } - } - return b.addObjCopy(cs.getOutputSource(), copy); -} - -/// This function would run in the context of the package that created the executable, -/// which is undesirable when running an executable provided by a dependency package. -pub const run = @compileError("deprecated; use std.Build.addRunArtifact"); - -/// This function would install in the context of the package that created the artifact, -/// which is undesirable when installing an artifact provided by a dependency package. -pub const install = @compileError("deprecated; use std.Build.installArtifact"); - -pub fn checkObject(self: *CompileStep) *CheckObjectStep { - return CheckObjectStep.create(self.step.owner, self.getOutputSource(), self.target_info.target.ofmt); -} - -pub fn setLinkerScriptPath(self: *CompileStep, source: FileSource) void { - const b = self.step.owner; - self.linker_script = source.dupe(b); - source.addStepDependencies(&self.step); -} - -pub fn forceUndefinedSymbol(self: *CompileStep, symbol_name: []const u8) void { - const b = self.step.owner; - self.force_undefined_symbols.put(b.dupe(symbol_name), {}) catch @panic("OOM"); -} - -pub fn linkFramework(self: *CompileStep, framework_name: []const u8) void { - const b = self.step.owner; - self.frameworks.put(b.dupe(framework_name), .{}) catch @panic("OOM"); -} - -pub fn linkFrameworkNeeded(self: *CompileStep, framework_name: []const u8) void { - const b = self.step.owner; - self.frameworks.put(b.dupe(framework_name), .{ - .needed = true, - }) catch @panic("OOM"); -} - -pub fn linkFrameworkWeak(self: *CompileStep, framework_name: []const u8) void { - const b = self.step.owner; - self.frameworks.put(b.dupe(framework_name), .{ - .weak = true, - }) catch @panic("OOM"); -} - -/// Returns whether the library, executable, or object depends on a particular system library. -pub fn dependsOnSystemLibrary(self: CompileStep, name: []const u8) bool { - if (isLibCLibrary(name)) { - return self.is_linking_libc; - } - if (isLibCppLibrary(name)) { - return self.is_linking_libcpp; - } - for (self.link_objects.items) |link_object| { - switch (link_object) { - .system_lib => |lib| if (mem.eql(u8, lib.name, name)) return true, - else => continue, - } - } - return false; -} - -pub fn linkLibrary(self: *CompileStep, lib: *CompileStep) void { - assert(lib.kind == .lib); - self.linkLibraryOrObject(lib); -} - -pub fn isDynamicLibrary(self: *CompileStep) bool { - return self.kind == .lib and self.linkage == Linkage.dynamic; -} - -pub fn isStaticLibrary(self: *CompileStep) bool { - return self.kind == .lib and self.linkage != Linkage.dynamic; -} - -pub fn producesPdbFile(self: *CompileStep) bool { - if (!self.target.isWindows() and !self.target.isUefi()) return false; - if (self.target.getObjectFormat() == .c) return false; - if (self.strip == true) return false; - return self.isDynamicLibrary() or self.kind == .exe or self.kind == .@"test"; -} - -pub fn linkLibC(self: *CompileStep) void { - self.is_linking_libc = true; -} - -pub fn linkLibCpp(self: *CompileStep) void { - self.is_linking_libcpp = true; -} - -/// If the value is omitted, it is set to 1. -/// `name` and `value` need not live longer than the function call. -pub fn defineCMacro(self: *CompileStep, name: []const u8, value: ?[]const u8) void { - const b = self.step.owner; - const macro = std.Build.constructCMacro(b.allocator, name, value); - self.c_macros.append(macro) catch @panic("OOM"); -} - -/// name_and_value looks like [name]=[value]. If the value is omitted, it is set to 1. -pub fn defineCMacroRaw(self: *CompileStep, name_and_value: []const u8) void { - const b = self.step.owner; - self.c_macros.append(b.dupe(name_and_value)) catch @panic("OOM"); -} - -/// This one has no integration with anything, it just puts -lname on the command line. -/// Prefer to use `linkSystemLibrary` instead. -pub fn linkSystemLibraryName(self: *CompileStep, name: []const u8) void { - const b = self.step.owner; - self.link_objects.append(.{ - .system_lib = .{ - .name = b.dupe(name), - .needed = false, - .weak = false, - .use_pkg_config = .no, - }, - }) catch @panic("OOM"); -} - -/// This one has no integration with anything, it just puts -needed-lname on the command line. -/// Prefer to use `linkSystemLibraryNeeded` instead. -pub fn linkSystemLibraryNeededName(self: *CompileStep, name: []const u8) void { - const b = self.step.owner; - self.link_objects.append(.{ - .system_lib = .{ - .name = b.dupe(name), - .needed = true, - .weak = false, - .use_pkg_config = .no, - }, - }) catch @panic("OOM"); -} - -/// Darwin-only. This one has no integration with anything, it just puts -weak-lname on the -/// command line. Prefer to use `linkSystemLibraryWeak` instead. -pub fn linkSystemLibraryWeakName(self: *CompileStep, name: []const u8) void { - const b = self.step.owner; - self.link_objects.append(.{ - .system_lib = .{ - .name = b.dupe(name), - .needed = false, - .weak = true, - .use_pkg_config = .no, - }, - }) catch @panic("OOM"); -} - -/// This links against a system library, exclusively using pkg-config to find the library. -/// Prefer to use `linkSystemLibrary` instead. -pub fn linkSystemLibraryPkgConfigOnly(self: *CompileStep, lib_name: []const u8) void { - const b = self.step.owner; - self.link_objects.append(.{ - .system_lib = .{ - .name = b.dupe(lib_name), - .needed = false, - .weak = false, - .use_pkg_config = .force, - }, - }) catch @panic("OOM"); -} - -/// This links against a system library, exclusively using pkg-config to find the library. -/// Prefer to use `linkSystemLibraryNeeded` instead. -pub fn linkSystemLibraryNeededPkgConfigOnly(self: *CompileStep, lib_name: []const u8) void { - const b = self.step.owner; - self.link_objects.append(.{ - .system_lib = .{ - .name = b.dupe(lib_name), - .needed = true, - .weak = false, - .use_pkg_config = .force, - }, - }) catch @panic("OOM"); -} - -/// Run pkg-config for the given library name and parse the output, returning the arguments -/// that should be passed to zig to link the given library. -fn runPkgConfig(self: *CompileStep, lib_name: []const u8) ![]const []const u8 { - const b = self.step.owner; - const pkg_name = match: { - // First we have to map the library name to pkg config name. Unfortunately, - // there are several examples where this is not straightforward: - // -lSDL2 -> pkg-config sdl2 - // -lgdk-3 -> pkg-config gdk-3.0 - // -latk-1.0 -> pkg-config atk - const pkgs = try getPkgConfigList(b); - - // Exact match means instant winner. - for (pkgs) |pkg| { - if (mem.eql(u8, pkg.name, lib_name)) { - break :match pkg.name; - } - } - - // Next we'll try ignoring case. - for (pkgs) |pkg| { - if (std.ascii.eqlIgnoreCase(pkg.name, lib_name)) { - break :match pkg.name; - } - } - - // Now try appending ".0". - for (pkgs) |pkg| { - if (std.ascii.indexOfIgnoreCase(pkg.name, lib_name)) |pos| { - if (pos != 0) continue; - if (mem.eql(u8, pkg.name[lib_name.len..], ".0")) { - break :match pkg.name; - } - } - } - - // Trimming "-1.0". - if (mem.endsWith(u8, lib_name, "-1.0")) { - const trimmed_lib_name = lib_name[0 .. lib_name.len - "-1.0".len]; - for (pkgs) |pkg| { - if (std.ascii.eqlIgnoreCase(pkg.name, trimmed_lib_name)) { - break :match pkg.name; - } - } - } - - return error.PackageNotFound; - }; - - var code: u8 = undefined; - const stdout = if (b.execAllowFail(&[_][]const u8{ - "pkg-config", - pkg_name, - "--cflags", - "--libs", - }, &code, .Ignore)) |stdout| stdout else |err| switch (err) { - error.ProcessTerminated => return error.PkgConfigCrashed, - error.ExecNotSupported => return error.PkgConfigFailed, - error.ExitCodeFailure => return error.PkgConfigFailed, - error.FileNotFound => return error.PkgConfigNotInstalled, - else => return err, - }; - - var zig_args = ArrayList([]const u8).init(b.allocator); - defer zig_args.deinit(); - - var it = mem.tokenize(u8, stdout, " \r\n\t"); - while (it.next()) |tok| { - if (mem.eql(u8, tok, "-I")) { - const dir = it.next() orelse return error.PkgConfigInvalidOutput; - try zig_args.appendSlice(&[_][]const u8{ "-I", dir }); - } else if (mem.startsWith(u8, tok, "-I")) { - try zig_args.append(tok); - } else if (mem.eql(u8, tok, "-L")) { - const dir = it.next() orelse return error.PkgConfigInvalidOutput; - try zig_args.appendSlice(&[_][]const u8{ "-L", dir }); - } else if (mem.startsWith(u8, tok, "-L")) { - try zig_args.append(tok); - } else if (mem.eql(u8, tok, "-l")) { - const lib = it.next() orelse return error.PkgConfigInvalidOutput; - try zig_args.appendSlice(&[_][]const u8{ "-l", lib }); - } else if (mem.startsWith(u8, tok, "-l")) { - try zig_args.append(tok); - } else if (mem.eql(u8, tok, "-D")) { - const macro = it.next() orelse return error.PkgConfigInvalidOutput; - try zig_args.appendSlice(&[_][]const u8{ "-D", macro }); - } else if (mem.startsWith(u8, tok, "-D")) { - try zig_args.append(tok); - } else if (b.debug_pkg_config) { - return self.step.fail("unknown pkg-config flag '{s}'", .{tok}); - } - } - - return zig_args.toOwnedSlice(); -} - -pub fn linkSystemLibrary(self: *CompileStep, name: []const u8) void { - self.linkSystemLibraryInner(name, .{}); -} - -pub fn linkSystemLibraryNeeded(self: *CompileStep, name: []const u8) void { - self.linkSystemLibraryInner(name, .{ .needed = true }); -} - -pub fn linkSystemLibraryWeak(self: *CompileStep, name: []const u8) void { - self.linkSystemLibraryInner(name, .{ .weak = true }); -} - -fn linkSystemLibraryInner(self: *CompileStep, name: []const u8, opts: struct { - needed: bool = false, - weak: bool = false, -}) void { - const b = self.step.owner; - if (isLibCLibrary(name)) { - self.linkLibC(); - return; - } - if (isLibCppLibrary(name)) { - self.linkLibCpp(); - return; - } - - self.link_objects.append(.{ - .system_lib = .{ - .name = b.dupe(name), - .needed = opts.needed, - .weak = opts.weak, - .use_pkg_config = .yes, - }, - }) catch @panic("OOM"); -} - -/// Handy when you have many C/C++ source files and want them all to have the same flags. -pub fn addCSourceFiles(self: *CompileStep, files: []const []const u8, flags: []const []const u8) void { - const b = self.step.owner; - const c_source_files = b.allocator.create(CSourceFiles) catch @panic("OOM"); - - const files_copy = b.dupeStrings(files); - const flags_copy = b.dupeStrings(flags); - - c_source_files.* = .{ - .files = files_copy, - .flags = flags_copy, - }; - self.link_objects.append(.{ .c_source_files = c_source_files }) catch @panic("OOM"); -} - -pub fn addCSourceFile(self: *CompileStep, file: []const u8, flags: []const []const u8) void { - self.addCSourceFileSource(.{ - .args = flags, - .source = .{ .path = file }, - }); -} - -pub fn addCSourceFileSource(self: *CompileStep, source: CSourceFile) void { - const b = self.step.owner; - const c_source_file = b.allocator.create(CSourceFile) catch @panic("OOM"); - c_source_file.* = source.dupe(b); - self.link_objects.append(.{ .c_source_file = c_source_file }) catch @panic("OOM"); - source.source.addStepDependencies(&self.step); -} - -pub fn setVerboseLink(self: *CompileStep, value: bool) void { - self.verbose_link = value; -} - -pub fn setVerboseCC(self: *CompileStep, value: bool) void { - self.verbose_cc = value; -} - -pub fn overrideZigLibDir(self: *CompileStep, dir_path: []const u8) void { - const b = self.step.owner; - self.zig_lib_dir = b.dupePath(dir_path); -} - -pub fn setMainPkgPath(self: *CompileStep, dir_path: []const u8) void { - const b = self.step.owner; - self.main_pkg_path = b.dupePath(dir_path); -} - -pub fn setLibCFile(self: *CompileStep, libc_file: ?FileSource) void { - const b = self.step.owner; - self.libc_file = if (libc_file) |f| f.dupe(b) else null; -} - -/// Returns the generated executable, library or object file. -/// To run an executable built with zig build, use `run`, or create an install step and invoke it. -pub fn getOutputSource(self: *CompileStep) FileSource { - return .{ .generated = &self.output_path_source }; -} - -pub fn getOutputDirectorySource(self: *CompileStep) FileSource { - return .{ .generated = &self.output_dirname_source }; -} - -/// Returns the generated import library. This function can only be called for libraries. -pub fn getOutputLibSource(self: *CompileStep) FileSource { - assert(self.kind == .lib); - return .{ .generated = &self.output_lib_path_source }; -} - -/// Returns the generated header file. -/// This function can only be called for libraries or object files which have `emit_h` set. -pub fn getOutputHSource(self: *CompileStep) FileSource { - assert(self.kind != .exe and self.kind != .@"test"); - assert(self.emit_h); - return .{ .generated = &self.output_h_path_source }; -} - -/// Returns the generated PDB file. This function can only be called for Windows and UEFI. -pub fn getOutputPdbSource(self: *CompileStep) FileSource { - // TODO: Is this right? Isn't PDB for *any* PE/COFF file? - assert(self.target.isWindows() or self.target.isUefi()); - return .{ .generated = &self.output_pdb_path_source }; -} - -pub fn addAssemblyFile(self: *CompileStep, path: []const u8) void { - const b = self.step.owner; - self.link_objects.append(.{ - .assembly_file = .{ .path = b.dupe(path) }, - }) catch @panic("OOM"); -} - -pub fn addAssemblyFileSource(self: *CompileStep, source: FileSource) void { - const b = self.step.owner; - const source_duped = source.dupe(b); - self.link_objects.append(.{ .assembly_file = source_duped }) catch @panic("OOM"); - source_duped.addStepDependencies(&self.step); -} - -pub fn addObjectFile(self: *CompileStep, source_file: []const u8) void { - self.addObjectFileSource(.{ .path = source_file }); -} - -pub fn addObjectFileSource(self: *CompileStep, source: FileSource) void { - const b = self.step.owner; - self.link_objects.append(.{ .static_path = source.dupe(b) }) catch @panic("OOM"); - source.addStepDependencies(&self.step); -} - -pub fn addObject(self: *CompileStep, obj: *CompileStep) void { - assert(obj.kind == .obj); - self.linkLibraryOrObject(obj); -} - -pub const addSystemIncludeDir = @compileError("deprecated; use addSystemIncludePath"); -pub const addIncludeDir = @compileError("deprecated; use addIncludePath"); -pub const addLibPath = @compileError("deprecated, use addLibraryPath"); -pub const addFrameworkDir = @compileError("deprecated, use addFrameworkPath"); - -pub fn addSystemIncludePath(self: *CompileStep, path: []const u8) void { - const b = self.step.owner; - self.include_dirs.append(IncludeDir{ .raw_path_system = b.dupe(path) }) catch @panic("OOM"); -} - -pub fn addIncludePath(self: *CompileStep, path: []const u8) void { - const b = self.step.owner; - self.include_dirs.append(IncludeDir{ .raw_path = b.dupe(path) }) catch @panic("OOM"); -} - -pub fn addConfigHeader(self: *CompileStep, config_header: *ConfigHeaderStep) void { - self.step.dependOn(&config_header.step); - self.include_dirs.append(.{ .config_header_step = config_header }) catch @panic("OOM"); -} - -pub fn addLibraryPath(self: *CompileStep, path: []const u8) void { - const b = self.step.owner; - self.lib_paths.append(.{ .path = b.dupe(path) }) catch @panic("OOM"); -} - -pub fn addLibraryPathDirectorySource(self: *CompileStep, directory_source: FileSource) void { - self.lib_paths.append(directory_source) catch @panic("OOM"); - directory_source.addStepDependencies(&self.step); -} - -pub fn addRPath(self: *CompileStep, path: []const u8) void { - const b = self.step.owner; - self.rpaths.append(.{ .path = b.dupe(path) }) catch @panic("OOM"); -} - -pub fn addRPathDirectorySource(self: *CompileStep, directory_source: FileSource) void { - self.rpaths.append(directory_source) catch @panic("OOM"); - directory_source.addStepDependencies(&self.step); -} - -pub fn addFrameworkPath(self: *CompileStep, dir_path: []const u8) void { - const b = self.step.owner; - self.framework_dirs.append(.{ .path = b.dupe(dir_path) }) catch @panic("OOM"); -} - -pub fn addFrameworkPathDirectorySource(self: *CompileStep, directory_source: FileSource) void { - self.framework_dirs.append(directory_source) catch @panic("OOM"); - directory_source.addStepDependencies(&self.step); -} - -/// Adds a module to be used with `@import` and exposing it in the current -/// package's module table using `name`. -pub fn addModule(cs: *CompileStep, name: []const u8, module: *Module) void { - const b = cs.step.owner; - cs.modules.put(b.dupe(name), module) catch @panic("OOM"); - - var done = std.AutoHashMap(*Module, void).init(b.allocator); - defer done.deinit(); - cs.addRecursiveBuildDeps(module, &done) catch @panic("OOM"); -} - -/// Adds a module to be used with `@import` without exposing it in the current -/// package's module table. -pub fn addAnonymousModule(cs: *CompileStep, name: []const u8, options: std.Build.CreateModuleOptions) void { - const b = cs.step.owner; - const module = b.createModule(options); - return addModule(cs, name, module); -} - -pub fn addOptions(cs: *CompileStep, module_name: []const u8, options: *OptionsStep) void { - addModule(cs, module_name, options.createModule()); -} - -fn addRecursiveBuildDeps(cs: *CompileStep, module: *Module, done: *std.AutoHashMap(*Module, void)) !void { - if (done.contains(module)) return; - try done.put(module, {}); - module.source_file.addStepDependencies(&cs.step); - for (module.dependencies.values()) |dep| { - try cs.addRecursiveBuildDeps(dep, done); - } -} - -/// If Vcpkg was found on the system, it will be added to include and lib -/// paths for the specified target. -pub fn addVcpkgPaths(self: *CompileStep, linkage: CompileStep.Linkage) !void { - const b = self.step.owner; - // Ideally in the Unattempted case we would call the function recursively - // after findVcpkgRoot and have only one switch statement, but the compiler - // cannot resolve the error set. - switch (b.vcpkg_root) { - .unattempted => { - b.vcpkg_root = if (try findVcpkgRoot(b.allocator)) |root| - VcpkgRoot{ .found = root } - else - .not_found; - }, - .not_found => return error.VcpkgNotFound, - .found => {}, - } - - switch (b.vcpkg_root) { - .unattempted => unreachable, - .not_found => return error.VcpkgNotFound, - .found => |root| { - const allocator = b.allocator; - const triplet = try self.target.vcpkgTriplet(allocator, if (linkage == .static) .Static else .Dynamic); - defer b.allocator.free(triplet); - - const include_path = b.pathJoin(&.{ root, "installed", triplet, "include" }); - errdefer allocator.free(include_path); - try self.include_dirs.append(IncludeDir{ .raw_path = include_path }); - - const lib_path = b.pathJoin(&.{ root, "installed", triplet, "lib" }); - try self.lib_paths.append(.{ .path = lib_path }); - - self.vcpkg_bin_path = b.pathJoin(&.{ root, "installed", triplet, "bin" }); - }, - } -} - -pub fn setExecCmd(self: *CompileStep, args: []const ?[]const u8) void { - const b = self.step.owner; - assert(self.kind == .@"test"); - const duped_args = b.allocator.alloc(?[]u8, args.len) catch @panic("OOM"); - for (args, 0..) |arg, i| { - duped_args[i] = if (arg) |a| b.dupe(a) else null; - } - self.exec_cmd_args = duped_args; -} - -fn linkLibraryOrObject(self: *CompileStep, other: *CompileStep) void { - self.step.dependOn(&other.step); - self.link_objects.append(.{ .other_step = other }) catch @panic("OOM"); - self.include_dirs.append(.{ .other_step = other }) catch @panic("OOM"); - - for (other.installed_headers.items) |install_step| { - self.step.dependOn(install_step); - } -} - -fn appendModuleArgs( - cs: *CompileStep, - zig_args: *ArrayList([]const u8), -) error{OutOfMemory}!void { - const b = cs.step.owner; - // First, traverse the whole dependency graph and give every module a unique name, ideally one - // named after what it's called somewhere in the graph. It will help here to have both a mapping - // from module to name and a set of all the currently-used names. - var mod_names = std.AutoHashMap(*Module, []const u8).init(b.allocator); - var names = std.StringHashMap(void).init(b.allocator); - - var to_name = std.ArrayList(struct { - name: []const u8, - mod: *Module, - }).init(b.allocator); - { - var it = cs.modules.iterator(); - while (it.next()) |kv| { - // While we're traversing the root dependencies, let's make sure that no module names - // have colons in them, since the CLI forbids it. We handle this for transitive - // dependencies further down. - if (std.mem.indexOfScalar(u8, kv.key_ptr.*, ':') != null) { - @panic("Module names cannot contain colons"); - } - try to_name.append(.{ - .name = kv.key_ptr.*, - .mod = kv.value_ptr.*, - }); - } - } - - while (to_name.popOrNull()) |dep| { - if (mod_names.contains(dep.mod)) continue; - - // We'll use this buffer to store the name we decide on - var buf = try b.allocator.alloc(u8, dep.name.len + 32); - // First, try just the exposed dependency name - @memcpy(buf[0..dep.name.len], dep.name); - var name = buf[0..dep.name.len]; - var n: usize = 0; - while (names.contains(name)) { - // If that failed, append an incrementing number to the end - name = std.fmt.bufPrint(buf, "{s}{}", .{ dep.name, n }) catch unreachable; - n += 1; - } - - try mod_names.put(dep.mod, name); - try names.put(name, {}); - - var it = dep.mod.dependencies.iterator(); - while (it.next()) |kv| { - // Same colon-in-name check as above, but for transitive dependencies. - if (std.mem.indexOfScalar(u8, kv.key_ptr.*, ':') != null) { - @panic("Module names cannot contain colons"); - } - try to_name.append(.{ - .name = kv.key_ptr.*, - .mod = kv.value_ptr.*, - }); - } - } - - // Since the module names given to the CLI are based off of the exposed names, we already know - // that none of the CLI names have colons in them, so there's no need to check that explicitly. - - // Every module in the graph is now named; output their definitions - { - var it = mod_names.iterator(); - while (it.next()) |kv| { - const mod = kv.key_ptr.*; - const name = kv.value_ptr.*; - - const deps_str = try constructDepString(b.allocator, mod_names, mod.dependencies); - const src = mod.builder.pathFromRoot(mod.source_file.getPath(mod.builder)); - try zig_args.append("--mod"); - try zig_args.append(try std.fmt.allocPrint(b.allocator, "{s}:{s}:{s}", .{ name, deps_str, src })); - } - } - - // Lastly, output the root dependencies - const deps_str = try constructDepString(b.allocator, mod_names, cs.modules); - if (deps_str.len > 0) { - try zig_args.append("--deps"); - try zig_args.append(deps_str); - } -} - -fn constructDepString( - allocator: std.mem.Allocator, - mod_names: std.AutoHashMap(*Module, []const u8), - deps: std.StringArrayHashMap(*Module), -) ![]const u8 { - var deps_str = std.ArrayList(u8).init(allocator); - var it = deps.iterator(); - while (it.next()) |kv| { - const expose = kv.key_ptr.*; - const name = mod_names.get(kv.value_ptr.*).?; - if (std.mem.eql(u8, expose, name)) { - try deps_str.writer().print("{s},", .{name}); - } else { - try deps_str.writer().print("{s}={s},", .{ expose, name }); - } - } - if (deps_str.items.len > 0) { - return deps_str.items[0 .. deps_str.items.len - 1]; // omit trailing comma - } else { - return ""; - } -} - -fn make(step: *Step, prog_node: *std.Progress.Node) !void { - const b = step.owner; - const self = @fieldParentPtr(CompileStep, "step", step); - - if (self.root_src == null and self.link_objects.items.len == 0) { - return step.fail("the linker needs one or more objects to link", .{}); - } - - var zig_args = ArrayList([]const u8).init(b.allocator); - defer zig_args.deinit(); - - try zig_args.append(b.zig_exe); - - const cmd = switch (self.kind) { - .lib => "build-lib", - .exe => "build-exe", - .obj => "build-obj", - .@"test" => "test", - }; - try zig_args.append(cmd); - - if (b.reference_trace) |some| { - try zig_args.append(try std.fmt.allocPrint(b.allocator, "-freference-trace={d}", .{some})); - } - - try addFlag(&zig_args, "LLVM", self.use_llvm); - try addFlag(&zig_args, "LLD", self.use_lld); - - if (self.target.ofmt) |ofmt| { - try zig_args.append(try std.fmt.allocPrint(b.allocator, "-ofmt={s}", .{@tagName(ofmt)})); - } - - if (self.entry_symbol_name) |entry| { - try zig_args.append("--entry"); - try zig_args.append(entry); - } - - { - var it = self.force_undefined_symbols.keyIterator(); - while (it.next()) |symbol_name| { - try zig_args.append("--force_undefined"); - try zig_args.append(symbol_name.*); - } - } - - if (self.stack_size) |stack_size| { - try zig_args.append("--stack"); - try zig_args.append(try std.fmt.allocPrint(b.allocator, "{}", .{stack_size})); - } - - if (self.root_src) |root_src| try zig_args.append(root_src.getPath(b)); - - // We will add link objects from transitive dependencies, but we want to keep - // all link objects in the same order provided. - // This array is used to keep self.link_objects immutable. - var transitive_deps: TransitiveDeps = .{ - .link_objects = ArrayList(LinkObject).init(b.allocator), - .seen_system_libs = StringHashMap(void).init(b.allocator), - .seen_steps = std.AutoHashMap(*const Step, void).init(b.allocator), - .is_linking_libcpp = self.is_linking_libcpp, - .is_linking_libc = self.is_linking_libc, - .frameworks = &self.frameworks, - }; - - try transitive_deps.seen_steps.put(&self.step, {}); - try transitive_deps.add(self.link_objects.items); - - var prev_has_extra_flags = false; - - for (transitive_deps.link_objects.items) |link_object| { - switch (link_object) { - .static_path => |static_path| try zig_args.append(static_path.getPath(b)), - - .other_step => |other| switch (other.kind) { - .exe => @panic("Cannot link with an executable build artifact"), - .@"test" => @panic("Cannot link with a test"), - .obj => { - try zig_args.append(other.getOutputSource().getPath(b)); - }, - .lib => l: { - if (self.isStaticLibrary() and other.isStaticLibrary()) { - // Avoid putting a static library inside a static library. - break :l; - } - - const full_path_lib = other.getOutputLibSource().getPath(b); - try zig_args.append(full_path_lib); - - if (other.linkage == Linkage.dynamic and !self.target.isWindows()) { - if (fs.path.dirname(full_path_lib)) |dirname| { - try zig_args.append("-rpath"); - try zig_args.append(dirname); - } - } - }, - }, - - .system_lib => |system_lib| { - const prefix: []const u8 = prefix: { - if (system_lib.needed) break :prefix "-needed-l"; - if (system_lib.weak) break :prefix "-weak-l"; - break :prefix "-l"; - }; - switch (system_lib.use_pkg_config) { - .no => try zig_args.append(b.fmt("{s}{s}", .{ prefix, system_lib.name })), - .yes, .force => { - if (self.runPkgConfig(system_lib.name)) |args| { - try zig_args.appendSlice(args); - } else |err| switch (err) { - error.PkgConfigInvalidOutput, - error.PkgConfigCrashed, - error.PkgConfigFailed, - error.PkgConfigNotInstalled, - error.PackageNotFound, - => switch (system_lib.use_pkg_config) { - .yes => { - // pkg-config failed, so fall back to linking the library - // by name directly. - try zig_args.append(b.fmt("{s}{s}", .{ - prefix, - system_lib.name, - })); - }, - .force => { - panic("pkg-config failed for library {s}", .{system_lib.name}); - }, - .no => unreachable, - }, - - else => |e| return e, - } - }, - } - }, - - .assembly_file => |asm_file| { - if (prev_has_extra_flags) { - try zig_args.append("-extra-cflags"); - try zig_args.append("--"); - prev_has_extra_flags = false; - } - try zig_args.append(asm_file.getPath(b)); - }, - - .c_source_file => |c_source_file| { - if (c_source_file.args.len == 0) { - if (prev_has_extra_flags) { - try zig_args.append("-cflags"); - try zig_args.append("--"); - prev_has_extra_flags = false; - } - } else { - try zig_args.append("-cflags"); - for (c_source_file.args) |arg| { - try zig_args.append(arg); - } - try zig_args.append("--"); - } - try zig_args.append(c_source_file.source.getPath(b)); - }, - - .c_source_files => |c_source_files| { - if (c_source_files.flags.len == 0) { - if (prev_has_extra_flags) { - try zig_args.append("-cflags"); - try zig_args.append("--"); - prev_has_extra_flags = false; - } - } else { - try zig_args.append("-cflags"); - for (c_source_files.flags) |flag| { - try zig_args.append(flag); - } - try zig_args.append("--"); - } - for (c_source_files.files) |file| { - try zig_args.append(b.pathFromRoot(file)); - } - }, - } - } - - if (transitive_deps.is_linking_libcpp) { - try zig_args.append("-lc++"); - } - - if (transitive_deps.is_linking_libc) { - try zig_args.append("-lc"); - } - - if (self.image_base) |image_base| { - try zig_args.append("--image-base"); - try zig_args.append(b.fmt("0x{x}", .{image_base})); - } - - if (self.filter) |filter| { - try zig_args.append("--test-filter"); - try zig_args.append(filter); - } - - if (self.test_evented_io) { - try zig_args.append("--test-evented-io"); - } - - if (self.test_runner) |test_runner| { - try zig_args.append("--test-runner"); - try zig_args.append(b.pathFromRoot(test_runner)); - } - - for (b.debug_log_scopes) |log_scope| { - try zig_args.append("--debug-log"); - try zig_args.append(log_scope); - } - - if (b.debug_compile_errors) { - try zig_args.append("--debug-compile-errors"); - } - - if (b.verbose_cimport) try zig_args.append("--verbose-cimport"); - if (b.verbose_air) try zig_args.append("--verbose-air"); - if (b.verbose_llvm_ir) |path| try zig_args.append(b.fmt("--verbose-llvm-ir={s}", .{path})); - if (b.verbose_llvm_bc) |path| try zig_args.append(b.fmt("--verbose-llvm-bc={s}", .{path})); - if (b.verbose_link or self.verbose_link) try zig_args.append("--verbose-link"); - if (b.verbose_cc or self.verbose_cc) try zig_args.append("--verbose-cc"); - if (b.verbose_llvm_cpu_features) try zig_args.append("--verbose-llvm-cpu-features"); - - if (self.emit_analysis.getArg(b, "emit-analysis")) |arg| try zig_args.append(arg); - if (self.emit_asm.getArg(b, "emit-asm")) |arg| try zig_args.append(arg); - if (self.emit_bin.getArg(b, "emit-bin")) |arg| try zig_args.append(arg); - if (self.emit_docs.getArg(b, "emit-docs")) |arg| try zig_args.append(arg); - if (self.emit_implib.getArg(b, "emit-implib")) |arg| try zig_args.append(arg); - if (self.emit_llvm_bc.getArg(b, "emit-llvm-bc")) |arg| try zig_args.append(arg); - if (self.emit_llvm_ir.getArg(b, "emit-llvm-ir")) |arg| try zig_args.append(arg); - - if (self.emit_h) try zig_args.append("-femit-h"); - - try addFlag(&zig_args, "strip", self.strip); - try addFlag(&zig_args, "unwind-tables", self.unwind_tables); - - if (self.dwarf_format) |dwarf_format| { - try zig_args.append(switch (dwarf_format) { - .@"32" => "-gdwarf32", - .@"64" => "-gdwarf64", - }); - } - - switch (self.compress_debug_sections) { - .none => {}, - .zlib => try zig_args.append("--compress-debug-sections=zlib"), - } - - if (self.link_eh_frame_hdr) { - try zig_args.append("--eh-frame-hdr"); - } - if (self.link_emit_relocs) { - try zig_args.append("--emit-relocs"); - } - if (self.link_function_sections) { - try zig_args.append("-ffunction-sections"); - } - if (self.link_gc_sections) |x| { - try zig_args.append(if (x) "--gc-sections" else "--no-gc-sections"); - } - if (!self.linker_dynamicbase) { - try zig_args.append("--no-dynamicbase"); - } - if (self.linker_allow_shlib_undefined) |x| { - try zig_args.append(if (x) "-fallow-shlib-undefined" else "-fno-allow-shlib-undefined"); - } - if (self.link_z_notext) { - try zig_args.append("-z"); - try zig_args.append("notext"); - } - if (!self.link_z_relro) { - try zig_args.append("-z"); - try zig_args.append("norelro"); - } - if (self.link_z_lazy) { - try zig_args.append("-z"); - try zig_args.append("lazy"); - } - if (self.link_z_common_page_size) |size| { - try zig_args.append("-z"); - try zig_args.append(b.fmt("common-page-size={d}", .{size})); - } - if (self.link_z_max_page_size) |size| { - try zig_args.append("-z"); - try zig_args.append(b.fmt("max-page-size={d}", .{size})); - } - - if (self.libc_file) |libc_file| { - try zig_args.append("--libc"); - try zig_args.append(libc_file.getPath(b)); - } else if (b.libc_file) |libc_file| { - try zig_args.append("--libc"); - try zig_args.append(libc_file); - } - - switch (self.optimize) { - .Debug => {}, // Skip since it's the default. - else => try zig_args.append(b.fmt("-O{s}", .{@tagName(self.optimize)})), - } - - try zig_args.append("--cache-dir"); - try zig_args.append(b.cache_root.path orelse "."); - - try zig_args.append("--global-cache-dir"); - try zig_args.append(b.global_cache_root.path orelse "."); - - try zig_args.append("--name"); - try zig_args.append(self.name); - - if (self.linkage) |some| switch (some) { - .dynamic => try zig_args.append("-dynamic"), - .static => try zig_args.append("-static"), - }; - if (self.kind == .lib and self.linkage != null and self.linkage.? == .dynamic) { - if (self.version) |version| { - try zig_args.append("--version"); - try zig_args.append(b.fmt("{}", .{version})); - } - - if (self.target.isDarwin()) { - const install_name = self.install_name orelse b.fmt("@rpath/{s}{s}{s}", .{ - self.target.libPrefix(), - self.name, - self.target.dynamicLibSuffix(), - }); - try zig_args.append("-install_name"); - try zig_args.append(install_name); - } - } - - if (self.entitlements) |entitlements| { - try zig_args.appendSlice(&[_][]const u8{ "--entitlements", entitlements }); - } - if (self.pagezero_size) |pagezero_size| { - const size = try std.fmt.allocPrint(b.allocator, "{x}", .{pagezero_size}); - try zig_args.appendSlice(&[_][]const u8{ "-pagezero_size", size }); - } - if (self.search_strategy) |strat| switch (strat) { - .paths_first => try zig_args.append("-search_paths_first"), - .dylibs_first => try zig_args.append("-search_dylibs_first"), - }; - if (self.headerpad_size) |headerpad_size| { - const size = try std.fmt.allocPrint(b.allocator, "{x}", .{headerpad_size}); - try zig_args.appendSlice(&[_][]const u8{ "-headerpad", size }); - } - if (self.headerpad_max_install_names) { - try zig_args.append("-headerpad_max_install_names"); - } - if (self.dead_strip_dylibs) { - try zig_args.append("-dead_strip_dylibs"); - } - - try addFlag(&zig_args, "compiler-rt", self.bundle_compiler_rt); - try addFlag(&zig_args, "single-threaded", self.single_threaded); - if (self.disable_stack_probing) { - try zig_args.append("-fno-stack-check"); - } - try addFlag(&zig_args, "stack-protector", self.stack_protector); - if (self.red_zone) |red_zone| { - if (red_zone) { - try zig_args.append("-mred-zone"); - } else { - try zig_args.append("-mno-red-zone"); - } - } - try addFlag(&zig_args, "omit-frame-pointer", self.omit_frame_pointer); - try addFlag(&zig_args, "dll-export-fns", self.dll_export_fns); - - if (self.disable_sanitize_c) { - try zig_args.append("-fno-sanitize-c"); - } - if (self.sanitize_thread) { - try zig_args.append("-fsanitize-thread"); - } - if (self.rdynamic) { - try zig_args.append("-rdynamic"); - } - if (self.import_memory) { - try zig_args.append("--import-memory"); - } - if (self.import_symbols) { - try zig_args.append("--import-symbols"); - } - if (self.import_table) { - try zig_args.append("--import-table"); - } - if (self.export_table) { - try zig_args.append("--export-table"); - } - if (self.initial_memory) |initial_memory| { - try zig_args.append(b.fmt("--initial-memory={d}", .{initial_memory})); - } - if (self.max_memory) |max_memory| { - try zig_args.append(b.fmt("--max-memory={d}", .{max_memory})); - } - if (self.shared_memory) { - try zig_args.append("--shared-memory"); - } - if (self.global_base) |global_base| { - try zig_args.append(b.fmt("--global-base={d}", .{global_base})); - } - - if (self.code_model != .default) { - try zig_args.append("-mcmodel"); - try zig_args.append(@tagName(self.code_model)); - } - if (self.wasi_exec_model) |model| { - try zig_args.append(b.fmt("-mexec-model={s}", .{@tagName(model)})); - } - for (self.export_symbol_names) |symbol_name| { - try zig_args.append(b.fmt("--export={s}", .{symbol_name})); - } - - if (!self.target.isNative()) { - try zig_args.appendSlice(&.{ - "-target", try self.target.zigTriple(b.allocator), - "-mcpu", try std.Build.serializeCpu(b.allocator, self.target.getCpu()), - }); - - if (self.target.dynamic_linker.get()) |dynamic_linker| { - try zig_args.append("--dynamic-linker"); - try zig_args.append(dynamic_linker); - } - } - - if (self.linker_script) |linker_script| { - try zig_args.append("--script"); - try zig_args.append(linker_script.getPath(b)); - } - - if (self.version_script) |version_script| { - try zig_args.append("--version-script"); - try zig_args.append(b.pathFromRoot(version_script)); - } - - if (self.kind == .@"test") { - if (self.exec_cmd_args) |exec_cmd_args| { - for (exec_cmd_args) |cmd_arg| { - if (cmd_arg) |arg| { - try zig_args.append("--test-cmd"); - try zig_args.append(arg); - } else { - try zig_args.append("--test-cmd-bin"); - } - } - } - } - - try self.appendModuleArgs(&zig_args); - - for (self.include_dirs.items) |include_dir| { - switch (include_dir) { - .raw_path => |include_path| { - try zig_args.append("-I"); - try zig_args.append(b.pathFromRoot(include_path)); - }, - .raw_path_system => |include_path| { - if (b.sysroot != null) { - try zig_args.append("-iwithsysroot"); - } else { - try zig_args.append("-isystem"); - } - - const resolved_include_path = b.pathFromRoot(include_path); - - const common_include_path = if (builtin.os.tag == .windows and b.sysroot != null and fs.path.isAbsolute(resolved_include_path)) blk: { - // We need to check for disk designator and strip it out from dir path so - // that zig/clang can concat resolved_include_path with sysroot. - const disk_designator = fs.path.diskDesignatorWindows(resolved_include_path); - - if (mem.indexOf(u8, resolved_include_path, disk_designator)) |where| { - break :blk resolved_include_path[where + disk_designator.len ..]; - } - - break :blk resolved_include_path; - } else resolved_include_path; - - try zig_args.append(common_include_path); - }, - .other_step => |other| { - if (other.emit_h) { - const h_path = other.getOutputHSource().getPath(b); - try zig_args.append("-isystem"); - try zig_args.append(fs.path.dirname(h_path).?); - } - if (other.installed_headers.items.len > 0) { - try zig_args.append("-I"); - try zig_args.append(b.pathJoin(&.{ - other.step.owner.install_prefix, "include", - })); - } - }, - .config_header_step => |config_header| { - const full_file_path = config_header.output_file.path.?; - const header_dir_path = full_file_path[0 .. full_file_path.len - config_header.include_path.len]; - try zig_args.appendSlice(&.{ "-I", header_dir_path }); - }, - } - } - - for (self.c_macros.items) |c_macro| { - try zig_args.append("-D"); - try zig_args.append(c_macro); - } - - try zig_args.ensureUnusedCapacity(2 * self.lib_paths.items.len); - for (self.lib_paths.items) |lib_path| { - zig_args.appendAssumeCapacity("-L"); - zig_args.appendAssumeCapacity(lib_path.getPath2(b, step)); - } - - try zig_args.ensureUnusedCapacity(2 * self.rpaths.items.len); - for (self.rpaths.items) |rpath| { - zig_args.appendAssumeCapacity("-rpath"); - - if (self.target_info.target.isDarwin()) switch (rpath) { - .path => |path| { - // On Darwin, we should not try to expand special runtime paths such as - // * @executable_path - // * @loader_path - if (mem.startsWith(u8, path, "@executable_path") or - mem.startsWith(u8, path, "@loader_path")) - { - zig_args.appendAssumeCapacity(path); - continue; - } - }, - .generated => {}, - }; - - zig_args.appendAssumeCapacity(rpath.getPath2(b, step)); - } - - for (self.framework_dirs.items) |directory_source| { - if (b.sysroot != null) { - try zig_args.append("-iframeworkwithsysroot"); - } else { - try zig_args.append("-iframework"); - } - try zig_args.append(directory_source.getPath2(b, step)); - try zig_args.append("-F"); - try zig_args.append(directory_source.getPath2(b, step)); - } - - { - var it = self.frameworks.iterator(); - while (it.next()) |entry| { - const name = entry.key_ptr.*; - const info = entry.value_ptr.*; - if (info.needed) { - try zig_args.append("-needed_framework"); - } else if (info.weak) { - try zig_args.append("-weak_framework"); - } else { - try zig_args.append("-framework"); - } - try zig_args.append(name); - } - } - - if (b.sysroot) |sysroot| { - try zig_args.appendSlice(&[_][]const u8{ "--sysroot", sysroot }); - } - - for (b.search_prefixes.items) |search_prefix| { - var prefix_dir = fs.cwd().openDir(search_prefix, .{}) catch |err| { - return step.fail("unable to open prefix directory '{s}': {s}", .{ - search_prefix, @errorName(err), - }); - }; - defer prefix_dir.close(); - - // Avoid passing -L and -I flags for nonexistent directories. - // This prevents a warning, that should probably be upgraded to an error in Zig's - // CLI parsing code, when the linker sees an -L directory that does not exist. - - if (prefix_dir.accessZ("lib", .{})) |_| { - try zig_args.appendSlice(&.{ - "-L", try fs.path.join(b.allocator, &.{ search_prefix, "lib" }), - }); - } else |err| switch (err) { - error.FileNotFound => {}, - else => |e| return step.fail("unable to access '{s}/lib' directory: {s}", .{ - search_prefix, @errorName(e), - }), - } - - if (prefix_dir.accessZ("include", .{})) |_| { - try zig_args.appendSlice(&.{ - "-I", try fs.path.join(b.allocator, &.{ search_prefix, "include" }), - }); - } else |err| switch (err) { - error.FileNotFound => {}, - else => |e| return step.fail("unable to access '{s}/include' directory: {s}", .{ - search_prefix, @errorName(e), - }), - } - } - - try addFlag(&zig_args, "valgrind", self.valgrind_support); - try addFlag(&zig_args, "each-lib-rpath", self.each_lib_rpath); - try addFlag(&zig_args, "build-id", self.build_id); - - if (self.zig_lib_dir) |dir| { - try zig_args.append("--zig-lib-dir"); - try zig_args.append(b.pathFromRoot(dir)); - } else if (b.zig_lib_dir) |dir| { - try zig_args.append("--zig-lib-dir"); - try zig_args.append(dir); - } - - if (self.main_pkg_path) |dir| { - try zig_args.append("--main-pkg-path"); - try zig_args.append(b.pathFromRoot(dir)); - } - - try addFlag(&zig_args, "PIC", self.force_pic); - try addFlag(&zig_args, "PIE", self.pie); - try addFlag(&zig_args, "lto", self.want_lto); - - if (self.subsystem) |subsystem| { - try zig_args.append("--subsystem"); - try zig_args.append(switch (subsystem) { - .Console => "console", - .Windows => "windows", - .Posix => "posix", - .Native => "native", - .EfiApplication => "efi_application", - .EfiBootServiceDriver => "efi_boot_service_driver", - .EfiRom => "efi_rom", - .EfiRuntimeDriver => "efi_runtime_driver", - }); - } - - try zig_args.append("--listen=-"); - - // Windows has an argument length limit of 32,766 characters, macOS 262,144 and Linux - // 2,097,152. If our args exceed 30 KiB, we instead write them to a "response file" and - // pass that to zig, e.g. via 'zig build-lib @args.rsp' - // See @file syntax here: https://gcc.gnu.org/onlinedocs/gcc/Overall-Options.html - var args_length: usize = 0; - for (zig_args.items) |arg| { - args_length += arg.len + 1; // +1 to account for null terminator - } - if (args_length >= 30 * 1024) { - try b.cache_root.handle.makePath("args"); - - const args_to_escape = zig_args.items[2..]; - var escaped_args = try ArrayList([]const u8).initCapacity(b.allocator, args_to_escape.len); - arg_blk: for (args_to_escape) |arg| { - for (arg, 0..) |c, arg_idx| { - if (c == '\\' or c == '"') { - // Slow path for arguments that need to be escaped. We'll need to allocate and copy - var escaped = try ArrayList(u8).initCapacity(b.allocator, arg.len + 1); - const writer = escaped.writer(); - try writer.writeAll(arg[0..arg_idx]); - for (arg[arg_idx..]) |to_escape| { - if (to_escape == '\\' or to_escape == '"') try writer.writeByte('\\'); - try writer.writeByte(to_escape); - } - escaped_args.appendAssumeCapacity(escaped.items); - continue :arg_blk; - } - } - escaped_args.appendAssumeCapacity(arg); // no escaping needed so just use original argument - } - - // Write the args to zig-cache/args/ to avoid conflicts with - // other zig build commands running in parallel. - const partially_quoted = try std.mem.join(b.allocator, "\" \"", escaped_args.items); - const args = try std.mem.concat(b.allocator, u8, &[_][]const u8{ "\"", partially_quoted, "\"" }); - - var args_hash: [Sha256.digest_length]u8 = undefined; - Sha256.hash(args, &args_hash, .{}); - var args_hex_hash: [Sha256.digest_length * 2]u8 = undefined; - _ = try std.fmt.bufPrint( - &args_hex_hash, - "{s}", - .{std.fmt.fmtSliceHexLower(&args_hash)}, - ); - - const args_file = "args" ++ fs.path.sep_str ++ args_hex_hash; - try b.cache_root.handle.writeFile(args_file, args); - - const resolved_args_file = try mem.concat(b.allocator, u8, &.{ - "@", - try b.cache_root.join(b.allocator, &.{args_file}), - }); - - zig_args.shrinkRetainingCapacity(2); - try zig_args.append(resolved_args_file); - } - - const output_bin_path = step.evalZigProcess(zig_args.items, prog_node) catch |err| switch (err) { - error.NeedCompileErrorCheck => { - assert(self.expect_errors.len != 0); - try checkCompileErrors(self); - return; - }, - else => |e| return e, - }; - const output_dir = fs.path.dirname(output_bin_path).?; - - // Update generated files - { - self.output_dirname_source.path = output_dir; - - self.output_path_source.path = b.pathJoin( - &.{ output_dir, self.out_filename }, - ); - - if (self.kind == .lib) { - self.output_lib_path_source.path = b.pathJoin( - &.{ output_dir, self.out_lib_filename }, - ); - } - - if (self.emit_h) { - self.output_h_path_source.path = b.pathJoin( - &.{ output_dir, self.out_h_filename }, - ); - } - - if (self.target.isWindows() or self.target.isUefi()) { - self.output_pdb_path_source.path = b.pathJoin( - &.{ output_dir, self.out_pdb_filename }, - ); - } - } - - if (self.kind == .lib and self.linkage != null and self.linkage.? == .dynamic and - self.version != null and self.target.wantSharedLibSymLinks()) - { - try doAtomicSymLinks( - step, - self.getOutputSource().getPath(b), - self.major_only_filename.?, - self.name_only_filename.?, - ); - } -} - -fn isLibCLibrary(name: []const u8) bool { - const libc_libraries = [_][]const u8{ "c", "m", "dl", "rt", "pthread" }; - for (libc_libraries) |libc_lib_name| { - if (mem.eql(u8, name, libc_lib_name)) - return true; - } - return false; -} - -fn isLibCppLibrary(name: []const u8) bool { - const libcpp_libraries = [_][]const u8{ "c++", "stdc++" }; - for (libcpp_libraries) |libcpp_lib_name| { - if (mem.eql(u8, name, libcpp_lib_name)) - return true; - } - return false; -} - -/// Returned slice must be freed by the caller. -fn findVcpkgRoot(allocator: Allocator) !?[]const u8 { - const appdata_path = try fs.getAppDataDir(allocator, "vcpkg"); - defer allocator.free(appdata_path); - - const path_file = try fs.path.join(allocator, &[_][]const u8{ appdata_path, "vcpkg.path.txt" }); - defer allocator.free(path_file); - - const file = fs.cwd().openFile(path_file, .{}) catch return null; - defer file.close(); - - const size = @intCast(usize, try file.getEndPos()); - const vcpkg_path = try allocator.alloc(u8, size); - const size_read = try file.read(vcpkg_path); - std.debug.assert(size == size_read); - - return vcpkg_path; -} - -pub fn doAtomicSymLinks( - step: *Step, - output_path: []const u8, - filename_major_only: []const u8, - filename_name_only: []const u8, -) !void { - const arena = step.owner.allocator; - const out_dir = fs.path.dirname(output_path) orelse "."; - const out_basename = fs.path.basename(output_path); - // sym link for libfoo.so.1 to libfoo.so.1.2.3 - const major_only_path = try fs.path.join(arena, &.{ out_dir, filename_major_only }); - fs.atomicSymLink(arena, out_basename, major_only_path) catch |err| { - return step.fail("unable to symlink {s} -> {s}: {s}", .{ - major_only_path, out_basename, @errorName(err), - }); - }; - // sym link for libfoo.so to libfoo.so.1 - const name_only_path = try fs.path.join(arena, &.{ out_dir, filename_name_only }); - fs.atomicSymLink(arena, filename_major_only, name_only_path) catch |err| { - return step.fail("Unable to symlink {s} -> {s}: {s}", .{ - name_only_path, filename_major_only, @errorName(err), - }); - }; -} - -fn execPkgConfigList(self: *std.Build, out_code: *u8) (PkgConfigError || ExecError)![]const PkgConfigPkg { - const stdout = try self.execAllowFail(&[_][]const u8{ "pkg-config", "--list-all" }, out_code, .Ignore); - var list = ArrayList(PkgConfigPkg).init(self.allocator); - errdefer list.deinit(); - var line_it = mem.tokenize(u8, stdout, "\r\n"); - while (line_it.next()) |line| { - if (mem.trim(u8, line, " \t").len == 0) continue; - var tok_it = mem.tokenize(u8, line, " \t"); - try list.append(PkgConfigPkg{ - .name = tok_it.next() orelse return error.PkgConfigInvalidOutput, - .desc = tok_it.rest(), - }); - } - return list.toOwnedSlice(); -} - -fn getPkgConfigList(self: *std.Build) ![]const PkgConfigPkg { - if (self.pkg_config_pkg_list) |res| { - return res; - } - var code: u8 = undefined; - if (execPkgConfigList(self, &code)) |list| { - self.pkg_config_pkg_list = list; - return list; - } else |err| { - const result = switch (err) { - error.ProcessTerminated => error.PkgConfigCrashed, - error.ExecNotSupported => error.PkgConfigFailed, - error.ExitCodeFailure => error.PkgConfigFailed, - error.FileNotFound => error.PkgConfigNotInstalled, - error.InvalidName => error.PkgConfigNotInstalled, - error.PkgConfigInvalidOutput => error.PkgConfigInvalidOutput, - else => return err, - }; - self.pkg_config_pkg_list = result; - return result; - } -} - -fn addFlag(args: *ArrayList([]const u8), comptime name: []const u8, opt: ?bool) !void { - const cond = opt orelse return; - try args.ensureUnusedCapacity(1); - if (cond) { - args.appendAssumeCapacity("-f" ++ name); - } else { - args.appendAssumeCapacity("-fno-" ++ name); - } -} - -const TransitiveDeps = struct { - link_objects: ArrayList(LinkObject), - seen_system_libs: StringHashMap(void), - seen_steps: std.AutoHashMap(*const Step, void), - is_linking_libcpp: bool, - is_linking_libc: bool, - frameworks: *StringHashMap(FrameworkLinkInfo), - - fn add(td: *TransitiveDeps, link_objects: []const LinkObject) !void { - try td.link_objects.ensureUnusedCapacity(link_objects.len); - - for (link_objects) |link_object| { - try td.link_objects.append(link_object); - switch (link_object) { - .other_step => |other| try addInner(td, other, other.isDynamicLibrary()), - else => {}, - } - } - } - - fn addInner(td: *TransitiveDeps, other: *CompileStep, dyn: bool) !void { - // Inherit dependency on libc and libc++ - td.is_linking_libcpp = td.is_linking_libcpp or other.is_linking_libcpp; - td.is_linking_libc = td.is_linking_libc or other.is_linking_libc; - - // Inherit dependencies on darwin frameworks - if (!dyn) { - var it = other.frameworks.iterator(); - while (it.next()) |framework| { - try td.frameworks.put(framework.key_ptr.*, framework.value_ptr.*); - } - } - - // Inherit dependencies on system libraries and static libraries. - for (other.link_objects.items) |other_link_object| { - switch (other_link_object) { - .system_lib => |system_lib| { - if ((try td.seen_system_libs.fetchPut(system_lib.name, {})) != null) - continue; - - if (dyn) - continue; - - try td.link_objects.append(other_link_object); - }, - .other_step => |inner_other| { - if ((try td.seen_steps.fetchPut(&inner_other.step, {})) != null) - continue; - - if (!dyn) - try td.link_objects.append(other_link_object); - - try addInner(td, inner_other, dyn or inner_other.isDynamicLibrary()); - }, - else => continue, - } - } - } -}; - -fn checkCompileErrors(self: *CompileStep) !void { - // Clear this field so that it does not get printed by the build runner. - const actual_eb = self.step.result_error_bundle; - self.step.result_error_bundle = std.zig.ErrorBundle.empty; - - const arena = self.step.owner.allocator; - - var actual_stderr_list = std.ArrayList(u8).init(arena); - try actual_eb.renderToWriter(.{ - .ttyconf = .no_color, - .include_reference_trace = false, - .include_source_line = false, - }, actual_stderr_list.writer()); - const actual_stderr = try actual_stderr_list.toOwnedSlice(); - - // Render the expected lines into a string that we can compare verbatim. - var expected_generated = std.ArrayList(u8).init(arena); - - var actual_line_it = mem.split(u8, actual_stderr, "\n"); - for (self.expect_errors) |expect_line| { - const actual_line = actual_line_it.next() orelse { - try expected_generated.appendSlice(expect_line); - try expected_generated.append('\n'); - continue; - }; - if (mem.endsWith(u8, actual_line, expect_line)) { - try expected_generated.appendSlice(actual_line); - try expected_generated.append('\n'); - continue; - } - if (mem.startsWith(u8, expect_line, ":?:?: ")) { - if (mem.endsWith(u8, actual_line, expect_line[":?:?: ".len..])) { - try expected_generated.appendSlice(actual_line); - try expected_generated.append('\n'); - continue; - } - } - try expected_generated.appendSlice(expect_line); - try expected_generated.append('\n'); - } - - if (mem.eql(u8, expected_generated.items, actual_stderr)) return; - - // TODO merge this with the testing.expectEqualStrings logic, and also CheckFile - return self.step.fail( - \\ - \\========= expected: ===================== - \\{s} - \\========= but found: ==================== - \\{s} - \\========================================= - , .{ expected_generated.items, actual_stderr }); -} diff --git a/lib/std/Build/ConfigHeaderStep.zig b/lib/std/Build/ConfigHeaderStep.zig deleted file mode 100644 index c1849b410e..0000000000 --- a/lib/std/Build/ConfigHeaderStep.zig +++ /dev/null @@ -1,437 +0,0 @@ -pub const Style = union(enum) { - /// The configure format supported by autotools. It uses `#undef foo` to - /// mark lines that can be substituted with different values. - autoconf: std.Build.FileSource, - /// The configure format supported by CMake. It uses `@@FOO@@` and - /// `#cmakedefine` for template substitution. - cmake: std.Build.FileSource, - /// Instead of starting with an input file, start with nothing. - blank, - /// Start with nothing, like blank, and output a nasm .asm file. - nasm, - - pub fn getFileSource(style: Style) ?std.Build.FileSource { - switch (style) { - .autoconf, .cmake => |s| return s, - .blank, .nasm => return null, - } - } -}; - -pub const Value = union(enum) { - undef, - defined, - boolean: bool, - int: i64, - ident: []const u8, - string: []const u8, -}; - -step: Step, -values: std.StringArrayHashMap(Value), -output_file: std.Build.GeneratedFile, - -style: Style, -max_bytes: usize, -include_path: []const u8, - -pub const base_id: Step.Id = .config_header; - -pub const Options = struct { - style: Style = .blank, - max_bytes: usize = 2 * 1024 * 1024, - include_path: ?[]const u8 = null, - first_ret_addr: ?usize = null, -}; - -pub fn create(owner: *std.Build, options: Options) *ConfigHeaderStep { - const self = owner.allocator.create(ConfigHeaderStep) catch @panic("OOM"); - - var include_path: []const u8 = "config.h"; - - if (options.style.getFileSource()) |s| switch (s) { - .path => |p| { - const basename = std.fs.path.basename(p); - if (std.mem.endsWith(u8, basename, ".h.in")) { - include_path = basename[0 .. basename.len - 3]; - } - }, - else => {}, - }; - - if (options.include_path) |p| { - include_path = p; - } - - const name = if (options.style.getFileSource()) |s| - owner.fmt("configure {s} header {s} to {s}", .{ - @tagName(options.style), s.getDisplayName(), include_path, - }) - else - owner.fmt("configure {s} header to {s}", .{ @tagName(options.style), include_path }); - - self.* = .{ - .step = Step.init(.{ - .id = base_id, - .name = name, - .owner = owner, - .makeFn = make, - .first_ret_addr = options.first_ret_addr orelse @returnAddress(), - }), - .style = options.style, - .values = std.StringArrayHashMap(Value).init(owner.allocator), - - .max_bytes = options.max_bytes, - .include_path = include_path, - .output_file = .{ .step = &self.step }, - }; - - return self; -} - -pub fn addValues(self: *ConfigHeaderStep, values: anytype) void { - return addValuesInner(self, values) catch @panic("OOM"); -} - -pub fn getFileSource(self: *ConfigHeaderStep) std.Build.FileSource { - return .{ .generated = &self.output_file }; -} - -fn addValuesInner(self: *ConfigHeaderStep, values: anytype) !void { - inline for (@typeInfo(@TypeOf(values)).Struct.fields) |field| { - try putValue(self, field.name, field.type, @field(values, field.name)); - } -} - -fn putValue(self: *ConfigHeaderStep, field_name: []const u8, comptime T: type, v: T) !void { - switch (@typeInfo(T)) { - .Null => { - try self.values.put(field_name, .undef); - }, - .Void => { - try self.values.put(field_name, .defined); - }, - .Bool => { - try self.values.put(field_name, .{ .boolean = v }); - }, - .Int => { - try self.values.put(field_name, .{ .int = v }); - }, - .ComptimeInt => { - try self.values.put(field_name, .{ .int = v }); - }, - .EnumLiteral => { - try self.values.put(field_name, .{ .ident = @tagName(v) }); - }, - .Optional => { - if (v) |x| { - return putValue(self, field_name, @TypeOf(x), x); - } else { - try self.values.put(field_name, .undef); - } - }, - .Pointer => |ptr| { - switch (@typeInfo(ptr.child)) { - .Array => |array| { - if (ptr.size == .One and array.child == u8) { - try self.values.put(field_name, .{ .string = v }); - return; - } - }, - .Int => { - if (ptr.size == .Slice and ptr.child == u8) { - try self.values.put(field_name, .{ .string = v }); - return; - } - }, - else => {}, - } - - @compileError("unsupported ConfigHeaderStep value type: " ++ @typeName(T)); - }, - else => @compileError("unsupported ConfigHeaderStep value type: " ++ @typeName(T)), - } -} - -fn make(step: *Step, prog_node: *std.Progress.Node) !void { - _ = prog_node; - const b = step.owner; - const self = @fieldParentPtr(ConfigHeaderStep, "step", step); - const gpa = b.allocator; - const arena = b.allocator; - - var man = b.cache.obtain(); - defer man.deinit(); - - // Random bytes to make ConfigHeaderStep unique. Refresh this with new - // random bytes when ConfigHeaderStep implementation is modified in a - // non-backwards-compatible way. - man.hash.add(@as(u32, 0xdef08d23)); - - var output = std.ArrayList(u8).init(gpa); - defer output.deinit(); - - const header_text = "This file was generated by ConfigHeaderStep using the Zig Build System."; - const c_generated_line = "/* " ++ header_text ++ " */\n"; - const asm_generated_line = "; " ++ header_text ++ "\n"; - - switch (self.style) { - .autoconf => |file_source| { - try output.appendSlice(c_generated_line); - const src_path = file_source.getPath(b); - const contents = try std.fs.cwd().readFileAlloc(arena, src_path, self.max_bytes); - try render_autoconf(step, contents, &output, self.values, src_path); - }, - .cmake => |file_source| { - try output.appendSlice(c_generated_line); - const src_path = file_source.getPath(b); - const contents = try std.fs.cwd().readFileAlloc(arena, src_path, self.max_bytes); - try render_cmake(step, contents, &output, self.values, src_path); - }, - .blank => { - try output.appendSlice(c_generated_line); - try render_blank(&output, self.values, self.include_path); - }, - .nasm => { - try output.appendSlice(asm_generated_line); - try render_nasm(&output, self.values); - }, - } - - man.hash.addBytes(output.items); - - if (try step.cacheHit(&man)) { - const digest = man.final(); - self.output_file.path = try b.cache_root.join(arena, &.{ - "o", &digest, self.include_path, - }); - return; - } - - const digest = man.final(); - - // If output_path has directory parts, deal with them. Example: - // output_dir is zig-cache/o/HASH - // output_path is libavutil/avconfig.h - // We want to open directory zig-cache/o/HASH/libavutil/ - // but keep output_dir as zig-cache/o/HASH for -I include - const sub_path = try std.fs.path.join(arena, &.{ "o", &digest, self.include_path }); - const sub_path_dirname = std.fs.path.dirname(sub_path).?; - - b.cache_root.handle.makePath(sub_path_dirname) catch |err| { - return step.fail("unable to make path '{}{s}': {s}", .{ - b.cache_root, sub_path_dirname, @errorName(err), - }); - }; - - b.cache_root.handle.writeFile(sub_path, output.items) catch |err| { - return step.fail("unable to write file '{}{s}': {s}", .{ - b.cache_root, sub_path, @errorName(err), - }); - }; - - self.output_file.path = try b.cache_root.join(arena, &.{sub_path}); - try man.writeManifest(); -} - -fn render_autoconf( - step: *Step, - contents: []const u8, - output: *std.ArrayList(u8), - values: std.StringArrayHashMap(Value), - src_path: []const u8, -) !void { - var values_copy = try values.clone(); - defer values_copy.deinit(); - - var any_errors = false; - var line_index: u32 = 0; - var line_it = std.mem.split(u8, contents, "\n"); - while (line_it.next()) |line| : (line_index += 1) { - if (!std.mem.startsWith(u8, line, "#")) { - try output.appendSlice(line); - try output.appendSlice("\n"); - continue; - } - var it = std.mem.tokenize(u8, line[1..], " \t\r"); - const undef = it.next().?; - if (!std.mem.eql(u8, undef, "undef")) { - try output.appendSlice(line); - try output.appendSlice("\n"); - continue; - } - const name = it.rest(); - const kv = values_copy.fetchSwapRemove(name) orelse { - try step.addError("{s}:{d}: error: unspecified config header value: '{s}'", .{ - src_path, line_index + 1, name, - }); - any_errors = true; - continue; - }; - try renderValueC(output, name, kv.value); - } - - for (values_copy.keys()) |name| { - try step.addError("{s}: error: config header value unused: '{s}'", .{ src_path, name }); - any_errors = true; - } - - if (any_errors) { - return error.MakeFailed; - } -} - -fn render_cmake( - step: *Step, - contents: []const u8, - output: *std.ArrayList(u8), - values: std.StringArrayHashMap(Value), - src_path: []const u8, -) !void { - var values_copy = try values.clone(); - defer values_copy.deinit(); - - var any_errors = false; - var line_index: u32 = 0; - var line_it = std.mem.split(u8, contents, "\n"); - while (line_it.next()) |line| : (line_index += 1) { - if (!std.mem.startsWith(u8, line, "#")) { - try output.appendSlice(line); - try output.appendSlice("\n"); - continue; - } - var it = std.mem.tokenize(u8, line[1..], " \t\r"); - const cmakedefine = it.next().?; - if (!std.mem.eql(u8, cmakedefine, "cmakedefine")) { - try output.appendSlice(line); - try output.appendSlice("\n"); - continue; - } - const name = it.next() orelse { - try step.addError("{s}:{d}: error: missing define name", .{ - src_path, line_index + 1, - }); - any_errors = true; - continue; - }; - const kv = values_copy.fetchSwapRemove(name) orelse { - try step.addError("{s}:{d}: error: unspecified config header value: '{s}'", .{ - src_path, line_index + 1, name, - }); - any_errors = true; - continue; - }; - try renderValueC(output, name, kv.value); - } - - for (values_copy.keys()) |name| { - try step.addError("{s}: error: config header value unused: '{s}'", .{ src_path, name }); - any_errors = true; - } - - if (any_errors) { - return error.HeaderConfigFailed; - } -} - -fn render_blank( - output: *std.ArrayList(u8), - defines: std.StringArrayHashMap(Value), - include_path: []const u8, -) !void { - const include_guard_name = try output.allocator.dupe(u8, include_path); - for (include_guard_name) |*byte| { - switch (byte.*) { - 'a'...'z' => byte.* = byte.* - 'a' + 'A', - 'A'...'Z', '0'...'9' => continue, - else => byte.* = '_', - } - } - - try output.appendSlice("#ifndef "); - try output.appendSlice(include_guard_name); - try output.appendSlice("\n#define "); - try output.appendSlice(include_guard_name); - try output.appendSlice("\n"); - - const values = defines.values(); - for (defines.keys(), 0..) |name, i| { - try renderValueC(output, name, values[i]); - } - - try output.appendSlice("#endif /* "); - try output.appendSlice(include_guard_name); - try output.appendSlice(" */\n"); -} - -fn render_nasm(output: *std.ArrayList(u8), defines: std.StringArrayHashMap(Value)) !void { - const values = defines.values(); - for (defines.keys(), 0..) |name, i| { - try renderValueNasm(output, name, values[i]); - } -} - -fn renderValueC(output: *std.ArrayList(u8), name: []const u8, value: Value) !void { - switch (value) { - .undef => { - try output.appendSlice("/* #undef "); - try output.appendSlice(name); - try output.appendSlice(" */\n"); - }, - .defined => { - try output.appendSlice("#define "); - try output.appendSlice(name); - try output.appendSlice("\n"); - }, - .boolean => |b| { - try output.appendSlice("#define "); - try output.appendSlice(name); - try output.appendSlice(" "); - try output.appendSlice(if (b) "true\n" else "false\n"); - }, - .int => |i| { - try output.writer().print("#define {s} {d}\n", .{ name, i }); - }, - .ident => |ident| { - try output.writer().print("#define {s} {s}\n", .{ name, ident }); - }, - .string => |string| { - // TODO: use C-specific escaping instead of zig string literals - try output.writer().print("#define {s} \"{}\"\n", .{ name, std.zig.fmtEscapes(string) }); - }, - } -} - -fn renderValueNasm(output: *std.ArrayList(u8), name: []const u8, value: Value) !void { - switch (value) { - .undef => { - try output.appendSlice("; %undef "); - try output.appendSlice(name); - try output.appendSlice("\n"); - }, - .defined => { - try output.appendSlice("%define "); - try output.appendSlice(name); - try output.appendSlice("\n"); - }, - .boolean => |b| { - try output.appendSlice("%define "); - try output.appendSlice(name); - try output.appendSlice(if (b) " 1\n" else " 0\n"); - }, - .int => |i| { - try output.writer().print("%define {s} {d}\n", .{ name, i }); - }, - .ident => |ident| { - try output.writer().print("%define {s} {s}\n", .{ name, ident }); - }, - .string => |string| { - // TODO: use nasm-specific escaping instead of zig string literals - try output.writer().print("%define {s} \"{}\"\n", .{ name, std.zig.fmtEscapes(string) }); - }, - } -} - -const std = @import("../std.zig"); -const ConfigHeaderStep = @This(); -const Step = std.Build.Step; diff --git a/lib/std/Build/FmtStep.zig b/lib/std/Build/FmtStep.zig deleted file mode 100644 index 2a82342336..0000000000 --- a/lib/std/Build/FmtStep.zig +++ /dev/null @@ -1,73 +0,0 @@ -//! This step has two modes: -//! * Modify mode: directly modify source files, formatting them in place. -//! * Check mode: fail the step if a non-conforming file is found. - -step: Step, -paths: []const []const u8, -exclude_paths: []const []const u8, -check: bool, - -pub const base_id = .fmt; - -pub const Options = struct { - paths: []const []const u8 = &.{}, - exclude_paths: []const []const u8 = &.{}, - /// If true, fails the build step when any non-conforming files are encountered. - check: bool = false, -}; - -pub fn create(owner: *std.Build, options: Options) *FmtStep { - const self = owner.allocator.create(FmtStep) catch @panic("OOM"); - const name = if (options.check) "zig fmt --check" else "zig fmt"; - self.* = .{ - .step = Step.init(.{ - .id = base_id, - .name = name, - .owner = owner, - .makeFn = make, - }), - .paths = options.paths, - .exclude_paths = options.exclude_paths, - .check = options.check, - }; - return self; -} - -fn make(step: *Step, prog_node: *std.Progress.Node) !void { - // zig fmt is fast enough that no progress is needed. - _ = prog_node; - - // TODO: if check=false, this means we are modifying source files in place, which - // is an operation that could race against other operations also modifying source files - // in place. In this case, this step should obtain a write lock while making those - // modifications. - - const b = step.owner; - const arena = b.allocator; - const self = @fieldParentPtr(FmtStep, "step", step); - - var argv: std.ArrayListUnmanaged([]const u8) = .{}; - try argv.ensureUnusedCapacity(arena, 2 + 1 + self.paths.len + 2 * self.exclude_paths.len); - - argv.appendAssumeCapacity(b.zig_exe); - argv.appendAssumeCapacity("fmt"); - - if (self.check) { - argv.appendAssumeCapacity("--check"); - } - - for (self.paths) |p| { - argv.appendAssumeCapacity(b.pathFromRoot(p)); - } - - for (self.exclude_paths) |p| { - argv.appendAssumeCapacity("--exclude"); - argv.appendAssumeCapacity(b.pathFromRoot(p)); - } - - return step.evalChildProcess(argv.items); -} - -const std = @import("../std.zig"); -const Step = std.Build.Step; -const FmtStep = @This(); diff --git a/lib/std/Build/InstallArtifactStep.zig b/lib/std/Build/InstallArtifactStep.zig deleted file mode 100644 index 50cf6ff323..0000000000 --- a/lib/std/Build/InstallArtifactStep.zig +++ /dev/null @@ -1,130 +0,0 @@ -const std = @import("../std.zig"); -const Step = std.Build.Step; -const CompileStep = std.Build.CompileStep; -const InstallDir = std.Build.InstallDir; -const InstallArtifactStep = @This(); -const fs = std.fs; - -pub const base_id = .install_artifact; - -step: Step, -artifact: *CompileStep, -dest_dir: InstallDir, -pdb_dir: ?InstallDir, -h_dir: ?InstallDir, -/// If non-null, adds additional path components relative to dest_dir, and -/// overrides the basename of the CompileStep. -dest_sub_path: ?[]const u8, - -pub fn create(owner: *std.Build, artifact: *CompileStep) *InstallArtifactStep { - const self = owner.allocator.create(InstallArtifactStep) catch @panic("OOM"); - self.* = InstallArtifactStep{ - .step = Step.init(.{ - .id = base_id, - .name = owner.fmt("install {s}", .{artifact.name}), - .owner = owner, - .makeFn = make, - }), - .artifact = artifact, - .dest_dir = artifact.override_dest_dir orelse switch (artifact.kind) { - .obj => @panic("Cannot install a .obj build artifact."), - .exe, .@"test" => InstallDir{ .bin = {} }, - .lib => InstallDir{ .lib = {} }, - }, - .pdb_dir = if (artifact.producesPdbFile()) blk: { - if (artifact.kind == .exe or artifact.kind == .@"test") { - break :blk InstallDir{ .bin = {} }; - } else { - break :blk InstallDir{ .lib = {} }; - } - } else null, - .h_dir = if (artifact.kind == .lib and artifact.emit_h) .header else null, - .dest_sub_path = null, - }; - self.step.dependOn(&artifact.step); - - owner.pushInstalledFile(self.dest_dir, artifact.out_filename); - if (self.artifact.isDynamicLibrary()) { - if (artifact.major_only_filename) |name| { - owner.pushInstalledFile(.lib, name); - } - if (artifact.name_only_filename) |name| { - owner.pushInstalledFile(.lib, name); - } - if (self.artifact.target.isWindows()) { - owner.pushInstalledFile(.lib, artifact.out_lib_filename); - } - } - if (self.pdb_dir) |pdb_dir| { - owner.pushInstalledFile(pdb_dir, artifact.out_pdb_filename); - } - if (self.h_dir) |h_dir| { - owner.pushInstalledFile(h_dir, artifact.out_h_filename); - } - return self; -} - -fn make(step: *Step, prog_node: *std.Progress.Node) !void { - _ = prog_node; - const self = @fieldParentPtr(InstallArtifactStep, "step", step); - const src_builder = self.artifact.step.owner; - const dest_builder = step.owner; - - const dest_sub_path = if (self.dest_sub_path) |sub_path| sub_path else self.artifact.out_filename; - const full_dest_path = dest_builder.getInstallPath(self.dest_dir, dest_sub_path); - const cwd = fs.cwd(); - - var all_cached = true; - - { - const full_src_path = self.artifact.getOutputSource().getPath(src_builder); - const p = fs.Dir.updateFile(cwd, full_src_path, cwd, full_dest_path, .{}) catch |err| { - return step.fail("unable to update file from '{s}' to '{s}': {s}", .{ - full_src_path, full_dest_path, @errorName(err), - }); - }; - all_cached = all_cached and p == .fresh; - } - - if (self.artifact.isDynamicLibrary() and - self.artifact.version != null and - self.artifact.target.wantSharedLibSymLinks()) - { - try CompileStep.doAtomicSymLinks(step, full_dest_path, self.artifact.major_only_filename.?, self.artifact.name_only_filename.?); - } - if (self.artifact.isDynamicLibrary() and - self.artifact.target.isWindows() and - self.artifact.emit_implib != .no_emit) - { - const full_src_path = self.artifact.getOutputLibSource().getPath(src_builder); - const full_implib_path = dest_builder.getInstallPath(self.dest_dir, self.artifact.out_lib_filename); - const p = fs.Dir.updateFile(cwd, full_src_path, cwd, full_implib_path, .{}) catch |err| { - return step.fail("unable to update file from '{s}' to '{s}': {s}", .{ - full_src_path, full_implib_path, @errorName(err), - }); - }; - all_cached = all_cached and p == .fresh; - } - if (self.pdb_dir) |pdb_dir| { - const full_src_path = self.artifact.getOutputPdbSource().getPath(src_builder); - const full_pdb_path = dest_builder.getInstallPath(pdb_dir, self.artifact.out_pdb_filename); - const p = fs.Dir.updateFile(cwd, full_src_path, cwd, full_pdb_path, .{}) catch |err| { - return step.fail("unable to update file from '{s}' to '{s}': {s}", .{ - full_src_path, full_pdb_path, @errorName(err), - }); - }; - all_cached = all_cached and p == .fresh; - } - if (self.h_dir) |h_dir| { - const full_src_path = self.artifact.getOutputHSource().getPath(src_builder); - const full_h_path = dest_builder.getInstallPath(h_dir, self.artifact.out_h_filename); - const p = fs.Dir.updateFile(cwd, full_src_path, cwd, full_h_path, .{}) catch |err| { - return step.fail("unable to update file from '{s}' to '{s}': {s}", .{ - full_src_path, full_h_path, @errorName(err), - }); - }; - all_cached = all_cached and p == .fresh; - } - self.artifact.installed_path = full_dest_path; - step.result_cached = all_cached; -} diff --git a/lib/std/Build/InstallDirStep.zig b/lib/std/Build/InstallDirStep.zig deleted file mode 100644 index d9ea248913..0000000000 --- a/lib/std/Build/InstallDirStep.zig +++ /dev/null @@ -1,110 +0,0 @@ -const std = @import("../std.zig"); -const mem = std.mem; -const fs = std.fs; -const Step = std.Build.Step; -const InstallDir = std.Build.InstallDir; -const InstallDirStep = @This(); - -step: Step, -options: Options, -/// This is used by the build system when a file being installed comes from one -/// package but is being installed by another. -dest_builder: *std.Build, - -pub const base_id = .install_dir; - -pub const Options = struct { - source_dir: []const u8, - install_dir: InstallDir, - install_subdir: []const u8, - /// File paths which end in any of these suffixes will be excluded - /// from being installed. - exclude_extensions: []const []const u8 = &.{}, - /// File paths which end in any of these suffixes will result in - /// empty files being installed. This is mainly intended for large - /// test.zig files in order to prevent needless installation bloat. - /// However if the files were not present at all, then - /// `@import("test.zig")` would be a compile error. - blank_extensions: []const []const u8 = &.{}, - - fn dupe(self: Options, b: *std.Build) Options { - return .{ - .source_dir = b.dupe(self.source_dir), - .install_dir = self.install_dir.dupe(b), - .install_subdir = b.dupe(self.install_subdir), - .exclude_extensions = b.dupeStrings(self.exclude_extensions), - .blank_extensions = b.dupeStrings(self.blank_extensions), - }; - } -}; - -pub fn init(owner: *std.Build, options: Options) InstallDirStep { - owner.pushInstalledFile(options.install_dir, options.install_subdir); - return .{ - .step = Step.init(.{ - .id = .install_dir, - .name = owner.fmt("install {s}/", .{options.source_dir}), - .owner = owner, - .makeFn = make, - }), - .options = options.dupe(owner), - .dest_builder = owner, - }; -} - -fn make(step: *Step, prog_node: *std.Progress.Node) !void { - _ = prog_node; - const self = @fieldParentPtr(InstallDirStep, "step", step); - const dest_builder = self.dest_builder; - const arena = dest_builder.allocator; - const dest_prefix = dest_builder.getInstallPath(self.options.install_dir, self.options.install_subdir); - const src_builder = self.step.owner; - var src_dir = src_builder.build_root.handle.openIterableDir(self.options.source_dir, .{}) catch |err| { - return step.fail("unable to open source directory '{}{s}': {s}", .{ - src_builder.build_root, self.options.source_dir, @errorName(err), - }); - }; - defer src_dir.close(); - var it = try src_dir.walk(arena); - var all_cached = true; - next_entry: while (try it.next()) |entry| { - for (self.options.exclude_extensions) |ext| { - if (mem.endsWith(u8, entry.path, ext)) { - continue :next_entry; - } - } - - // relative to src build root - const src_sub_path = try fs.path.join(arena, &.{ self.options.source_dir, entry.path }); - const dest_path = try fs.path.join(arena, &.{ dest_prefix, entry.path }); - const cwd = fs.cwd(); - - switch (entry.kind) { - .Directory => try cwd.makePath(dest_path), - .File => { - for (self.options.blank_extensions) |ext| { - if (mem.endsWith(u8, entry.path, ext)) { - try dest_builder.truncateFile(dest_path); - continue :next_entry; - } - } - - const prev_status = fs.Dir.updateFile( - src_builder.build_root.handle, - src_sub_path, - cwd, - dest_path, - .{}, - ) catch |err| { - return step.fail("unable to update file from '{}{s}' to '{s}': {s}", .{ - src_builder.build_root, src_sub_path, dest_path, @errorName(err), - }); - }; - all_cached = all_cached and prev_status == .fresh; - }, - else => continue, - } - } - - step.result_cached = all_cached; -} diff --git a/lib/std/Build/InstallFileStep.zig b/lib/std/Build/InstallFileStep.zig deleted file mode 100644 index 011ad48208..0000000000 --- a/lib/std/Build/InstallFileStep.zig +++ /dev/null @@ -1,57 +0,0 @@ -const std = @import("../std.zig"); -const Step = std.Build.Step; -const FileSource = std.Build.FileSource; -const InstallDir = std.Build.InstallDir; -const InstallFileStep = @This(); -const assert = std.debug.assert; - -pub const base_id = .install_file; - -step: Step, -source: FileSource, -dir: InstallDir, -dest_rel_path: []const u8, -/// This is used by the build system when a file being installed comes from one -/// package but is being installed by another. -dest_builder: *std.Build, - -pub fn create( - owner: *std.Build, - source: FileSource, - dir: InstallDir, - dest_rel_path: []const u8, -) *InstallFileStep { - assert(dest_rel_path.len != 0); - owner.pushInstalledFile(dir, dest_rel_path); - const self = owner.allocator.create(InstallFileStep) catch @panic("OOM"); - self.* = .{ - .step = Step.init(.{ - .id = base_id, - .name = owner.fmt("install {s} to {s}", .{ source.getDisplayName(), dest_rel_path }), - .owner = owner, - .makeFn = make, - }), - .source = source.dupe(owner), - .dir = dir.dupe(owner), - .dest_rel_path = owner.dupePath(dest_rel_path), - .dest_builder = owner, - }; - source.addStepDependencies(&self.step); - return self; -} - -fn make(step: *Step, prog_node: *std.Progress.Node) !void { - _ = prog_node; - const src_builder = step.owner; - const self = @fieldParentPtr(InstallFileStep, "step", step); - const dest_builder = self.dest_builder; - const full_src_path = self.source.getPath2(src_builder, step); - const full_dest_path = dest_builder.getInstallPath(self.dir, self.dest_rel_path); - const cwd = std.fs.cwd(); - const prev = std.fs.Dir.updateFile(cwd, full_src_path, cwd, full_dest_path, .{}) catch |err| { - return step.fail("unable to update file from '{s}' to '{s}': {s}", .{ - full_src_path, full_dest_path, @errorName(err), - }); - }; - step.result_cached = prev == .fresh; -} diff --git a/lib/std/Build/ObjCopyStep.zig b/lib/std/Build/ObjCopyStep.zig deleted file mode 100644 index 608c56591f..0000000000 --- a/lib/std/Build/ObjCopyStep.zig +++ /dev/null @@ -1,122 +0,0 @@ -const std = @import("std"); -const ObjCopyStep = @This(); - -const Allocator = std.mem.Allocator; -const ArenaAllocator = std.heap.ArenaAllocator; -const ArrayListUnmanaged = std.ArrayListUnmanaged; -const File = std.fs.File; -const InstallDir = std.Build.InstallDir; -const CompileStep = std.Build.CompileStep; -const Step = std.Build.Step; -const elf = std.elf; -const fs = std.fs; -const io = std.io; -const sort = std.sort; - -pub const base_id: Step.Id = .objcopy; - -pub const RawFormat = enum { - bin, - hex, -}; - -step: Step, -file_source: std.Build.FileSource, -basename: []const u8, -output_file: std.Build.GeneratedFile, - -format: ?RawFormat, -only_section: ?[]const u8, -pad_to: ?u64, - -pub const Options = struct { - basename: ?[]const u8 = null, - format: ?RawFormat = null, - only_section: ?[]const u8 = null, - pad_to: ?u64 = null, -}; - -pub fn create( - owner: *std.Build, - file_source: std.Build.FileSource, - options: Options, -) *ObjCopyStep { - const self = owner.allocator.create(ObjCopyStep) catch @panic("OOM"); - self.* = ObjCopyStep{ - .step = Step.init(.{ - .id = base_id, - .name = owner.fmt("objcopy {s}", .{file_source.getDisplayName()}), - .owner = owner, - .makeFn = make, - }), - .file_source = file_source, - .basename = options.basename orelse file_source.getDisplayName(), - .output_file = std.Build.GeneratedFile{ .step = &self.step }, - - .format = options.format, - .only_section = options.only_section, - .pad_to = options.pad_to, - }; - file_source.addStepDependencies(&self.step); - return self; -} - -pub fn getOutputSource(self: *const ObjCopyStep) std.Build.FileSource { - return .{ .generated = &self.output_file }; -} - -fn make(step: *Step, prog_node: *std.Progress.Node) !void { - const b = step.owner; - const self = @fieldParentPtr(ObjCopyStep, "step", step); - - var man = b.cache.obtain(); - defer man.deinit(); - - // Random bytes to make ObjCopyStep unique. Refresh this with new random - // bytes when ObjCopyStep implementation is modified incompatibly. - man.hash.add(@as(u32, 0xe18b7baf)); - - const full_src_path = self.file_source.getPath(b); - _ = try man.addFile(full_src_path, null); - man.hash.addOptionalBytes(self.only_section); - man.hash.addOptional(self.pad_to); - man.hash.addOptional(self.format); - - if (try step.cacheHit(&man)) { - // Cache hit, skip subprocess execution. - const digest = man.final(); - self.output_file.path = try b.cache_root.join(b.allocator, &.{ - "o", &digest, self.basename, - }); - return; - } - - const digest = man.final(); - const full_dest_path = try b.cache_root.join(b.allocator, &.{ "o", &digest, self.basename }); - const cache_path = "o" ++ fs.path.sep_str ++ digest; - b.cache_root.handle.makePath(cache_path) catch |err| { - return step.fail("unable to make path {s}: {s}", .{ cache_path, @errorName(err) }); - }; - - var argv = std.ArrayList([]const u8).init(b.allocator); - try argv.appendSlice(&.{ b.zig_exe, "objcopy" }); - - if (self.only_section) |only_section| { - try argv.appendSlice(&.{ "-j", only_section }); - } - if (self.pad_to) |pad_to| { - try argv.appendSlice(&.{ "--pad-to", b.fmt("{d}", .{pad_to}) }); - } - if (self.format) |format| switch (format) { - .bin => try argv.appendSlice(&.{ "-O", "binary" }), - .hex => try argv.appendSlice(&.{ "-O", "hex" }), - }; - - try argv.appendSlice(&.{ full_src_path, full_dest_path }); - - try argv.append("--listen=-"); - _ = try step.evalZigProcess(argv.items, prog_node); - - self.output_file.path = full_dest_path; - try man.writeManifest(); -} diff --git a/lib/std/Build/OptionsStep.zig b/lib/std/Build/OptionsStep.zig deleted file mode 100644 index a0e72e3695..0000000000 --- a/lib/std/Build/OptionsStep.zig +++ /dev/null @@ -1,421 +0,0 @@ -const std = @import("../std.zig"); -const builtin = @import("builtin"); -const fs = std.fs; -const Step = std.Build.Step; -const GeneratedFile = std.Build.GeneratedFile; -const CompileStep = std.Build.CompileStep; -const FileSource = std.Build.FileSource; - -const OptionsStep = @This(); - -pub const base_id = .options; - -step: Step, -generated_file: GeneratedFile, - -contents: std.ArrayList(u8), -artifact_args: std.ArrayList(OptionArtifactArg), -file_source_args: std.ArrayList(OptionFileSourceArg), - -pub fn create(owner: *std.Build) *OptionsStep { - const self = owner.allocator.create(OptionsStep) catch @panic("OOM"); - self.* = .{ - .step = Step.init(.{ - .id = base_id, - .name = "options", - .owner = owner, - .makeFn = make, - }), - .generated_file = undefined, - .contents = std.ArrayList(u8).init(owner.allocator), - .artifact_args = std.ArrayList(OptionArtifactArg).init(owner.allocator), - .file_source_args = std.ArrayList(OptionFileSourceArg).init(owner.allocator), - }; - self.generated_file = .{ .step = &self.step }; - - return self; -} - -pub fn addOption(self: *OptionsStep, comptime T: type, name: []const u8, value: T) void { - return addOptionFallible(self, T, name, value) catch @panic("unhandled error"); -} - -fn addOptionFallible(self: *OptionsStep, comptime T: type, name: []const u8, value: T) !void { - const out = self.contents.writer(); - switch (T) { - []const []const u8 => { - try out.print("pub const {}: []const []const u8 = &[_][]const u8{{\n", .{std.zig.fmtId(name)}); - for (value) |slice| { - try out.print(" \"{}\",\n", .{std.zig.fmtEscapes(slice)}); - } - try out.writeAll("};\n"); - return; - }, - [:0]const u8 => { - try out.print("pub const {}: [:0]const u8 = \"{}\";\n", .{ std.zig.fmtId(name), std.zig.fmtEscapes(value) }); - return; - }, - []const u8 => { - try out.print("pub const {}: []const u8 = \"{}\";\n", .{ std.zig.fmtId(name), std.zig.fmtEscapes(value) }); - return; - }, - ?[:0]const u8 => { - try out.print("pub const {}: ?[:0]const u8 = ", .{std.zig.fmtId(name)}); - if (value) |payload| { - try out.print("\"{}\";\n", .{std.zig.fmtEscapes(payload)}); - } else { - try out.writeAll("null;\n"); - } - return; - }, - ?[]const u8 => { - try out.print("pub const {}: ?[]const u8 = ", .{std.zig.fmtId(name)}); - if (value) |payload| { - try out.print("\"{}\";\n", .{std.zig.fmtEscapes(payload)}); - } else { - try out.writeAll("null;\n"); - } - return; - }, - std.builtin.Version => { - try out.print( - \\pub const {}: @import("std").builtin.Version = .{{ - \\ .major = {d}, - \\ .minor = {d}, - \\ .patch = {d}, - \\}}; - \\ - , .{ - std.zig.fmtId(name), - - value.major, - value.minor, - value.patch, - }); - return; - }, - std.SemanticVersion => { - try out.print( - \\pub const {}: @import("std").SemanticVersion = .{{ - \\ .major = {d}, - \\ .minor = {d}, - \\ .patch = {d}, - \\ - , .{ - std.zig.fmtId(name), - - value.major, - value.minor, - value.patch, - }); - if (value.pre) |some| { - try out.print(" .pre = \"{}\",\n", .{std.zig.fmtEscapes(some)}); - } - if (value.build) |some| { - try out.print(" .build = \"{}\",\n", .{std.zig.fmtEscapes(some)}); - } - try out.writeAll("};\n"); - return; - }, - else => {}, - } - switch (@typeInfo(T)) { - .Enum => |enum_info| { - try out.print("pub const {} = enum {{\n", .{std.zig.fmtId(@typeName(T))}); - inline for (enum_info.fields) |field| { - try out.print(" {},\n", .{std.zig.fmtId(field.name)}); - } - try out.writeAll("};\n"); - try out.print("pub const {}: {s} = {s}.{s};\n", .{ - std.zig.fmtId(name), - std.zig.fmtId(@typeName(T)), - std.zig.fmtId(@typeName(T)), - std.zig.fmtId(@tagName(value)), - }); - return; - }, - else => {}, - } - try out.print("pub const {}: {s} = ", .{ std.zig.fmtId(name), @typeName(T) }); - try printLiteral(out, value, 0); - try out.writeAll(";\n"); -} - -// TODO: non-recursive? -fn printLiteral(out: anytype, val: anytype, indent: u8) !void { - const T = @TypeOf(val); - switch (@typeInfo(T)) { - .Array => { - try out.print("{s} {{\n", .{@typeName(T)}); - for (val) |item| { - try out.writeByteNTimes(' ', indent + 4); - try printLiteral(out, item, indent + 4); - try out.writeAll(",\n"); - } - try out.writeByteNTimes(' ', indent); - try out.writeAll("}"); - }, - .Pointer => |p| { - if (p.size != .Slice) { - @compileError("Non-slice pointers are not yet supported in build options"); - } - try out.print("&[_]{s} {{\n", .{@typeName(p.child)}); - for (val) |item| { - try out.writeByteNTimes(' ', indent + 4); - try printLiteral(out, item, indent + 4); - try out.writeAll(",\n"); - } - try out.writeByteNTimes(' ', indent); - try out.writeAll("}"); - }, - .Optional => { - if (val) |inner| { - return printLiteral(out, inner, indent); - } else { - return out.writeAll("null"); - } - }, - .Void, - .Bool, - .Int, - .ComptimeInt, - .Float, - .Null, - => try out.print("{any}", .{val}), - else => @compileError(std.fmt.comptimePrint("`{s}` are not yet supported as build options", .{@tagName(@typeInfo(T))})), - } -} - -/// The value is the path in the cache dir. -/// Adds a dependency automatically. -pub fn addOptionFileSource( - self: *OptionsStep, - name: []const u8, - source: FileSource, -) void { - self.file_source_args.append(.{ - .name = name, - .source = source.dupe(self.step.owner), - }) catch @panic("OOM"); - source.addStepDependencies(&self.step); -} - -/// The value is the path in the cache dir. -/// Adds a dependency automatically. -pub fn addOptionArtifact(self: *OptionsStep, name: []const u8, artifact: *CompileStep) void { - self.artifact_args.append(.{ .name = self.step.owner.dupe(name), .artifact = artifact }) catch @panic("OOM"); - self.step.dependOn(&artifact.step); -} - -pub fn createModule(self: *OptionsStep) *std.Build.Module { - return self.step.owner.createModule(.{ - .source_file = self.getSource(), - .dependencies = &.{}, - }); -} - -pub fn getSource(self: *OptionsStep) FileSource { - return .{ .generated = &self.generated_file }; -} - -fn make(step: *Step, prog_node: *std.Progress.Node) !void { - // This step completes so quickly that no progress is necessary. - _ = prog_node; - - const b = step.owner; - const self = @fieldParentPtr(OptionsStep, "step", step); - - for (self.artifact_args.items) |item| { - self.addOption( - []const u8, - item.name, - b.pathFromRoot(item.artifact.getOutputSource().getPath(b)), - ); - } - - for (self.file_source_args.items) |item| { - self.addOption( - []const u8, - item.name, - item.source.getPath(b), - ); - } - - const basename = "options.zig"; - - // Hash contents to file name. - var hash = b.cache.hash; - // Random bytes to make unique. Refresh this with new random bytes when - // implementation is modified in a non-backwards-compatible way. - hash.add(@as(u32, 0x38845ef8)); - hash.addBytes(self.contents.items); - const sub_path = "c" ++ fs.path.sep_str ++ hash.final() ++ fs.path.sep_str ++ basename; - - self.generated_file.path = try b.cache_root.join(b.allocator, &.{sub_path}); - - // Optimize for the hot path. Stat the file, and if it already exists, - // cache hit. - if (b.cache_root.handle.access(sub_path, .{})) |_| { - // This is the hot path, success. - step.result_cached = true; - return; - } else |outer_err| switch (outer_err) { - error.FileNotFound => { - const sub_dirname = fs.path.dirname(sub_path).?; - b.cache_root.handle.makePath(sub_dirname) catch |e| { - return step.fail("unable to make path '{}{s}': {s}", .{ - b.cache_root, sub_dirname, @errorName(e), - }); - }; - - const rand_int = std.crypto.random.int(u64); - const tmp_sub_path = "tmp" ++ fs.path.sep_str ++ - std.Build.hex64(rand_int) ++ fs.path.sep_str ++ - basename; - const tmp_sub_path_dirname = fs.path.dirname(tmp_sub_path).?; - - b.cache_root.handle.makePath(tmp_sub_path_dirname) catch |err| { - return step.fail("unable to make temporary directory '{}{s}': {s}", .{ - b.cache_root, tmp_sub_path_dirname, @errorName(err), - }); - }; - - b.cache_root.handle.writeFile(tmp_sub_path, self.contents.items) catch |err| { - return step.fail("unable to write options to '{}{s}': {s}", .{ - b.cache_root, tmp_sub_path, @errorName(err), - }); - }; - - b.cache_root.handle.rename(tmp_sub_path, sub_path) catch |err| switch (err) { - error.PathAlreadyExists => { - // Other process beat us to it. Clean up the temp file. - b.cache_root.handle.deleteFile(tmp_sub_path) catch |e| { - try step.addError("warning: unable to delete temp file '{}{s}': {s}", .{ - b.cache_root, tmp_sub_path, @errorName(e), - }); - }; - step.result_cached = true; - return; - }, - else => { - return step.fail("unable to rename options from '{}{s}' to '{}{s}': {s}", .{ - b.cache_root, tmp_sub_path, - b.cache_root, sub_path, - @errorName(err), - }); - }, - }; - }, - else => |e| return step.fail("unable to access options file '{}{s}': {s}", .{ - b.cache_root, sub_path, @errorName(e), - }), - } -} - -const OptionArtifactArg = struct { - name: []const u8, - artifact: *CompileStep, -}; - -const OptionFileSourceArg = struct { - name: []const u8, - source: FileSource, -}; - -test "OptionsStep" { - if (builtin.os.tag == .wasi) return error.SkipZigTest; - - var arena = std.heap.ArenaAllocator.init(std.testing.allocator); - defer arena.deinit(); - - const host = try std.zig.system.NativeTargetInfo.detect(.{}); - - var cache: std.Build.Cache = .{ - .gpa = arena.allocator(), - .manifest_dir = std.fs.cwd(), - }; - - var builder = try std.Build.create( - arena.allocator(), - "test", - .{ .path = "test", .handle = std.fs.cwd() }, - .{ .path = "test", .handle = std.fs.cwd() }, - .{ .path = "test", .handle = std.fs.cwd() }, - host, - &cache, - ); - defer builder.destroy(); - - const options = builder.addOptions(); - - // TODO this regressed at some point - //const KeywordEnum = enum { - // @"0.8.1", - //}; - - const nested_array = [2][2]u16{ - [2]u16{ 300, 200 }, - [2]u16{ 300, 200 }, - }; - const nested_slice: []const []const u16 = &[_][]const u16{ &nested_array[0], &nested_array[1] }; - - options.addOption(usize, "option1", 1); - options.addOption(?usize, "option2", null); - options.addOption(?usize, "option3", 3); - options.addOption(comptime_int, "option4", 4); - options.addOption([]const u8, "string", "zigisthebest"); - options.addOption(?[]const u8, "optional_string", null); - options.addOption([2][2]u16, "nested_array", nested_array); - options.addOption([]const []const u16, "nested_slice", nested_slice); - //options.addOption(KeywordEnum, "keyword_enum", .@"0.8.1"); - options.addOption(std.builtin.Version, "version", try std.builtin.Version.parse("0.1.2")); - options.addOption(std.SemanticVersion, "semantic_version", try std.SemanticVersion.parse("0.1.2-foo+bar")); - - try std.testing.expectEqualStrings( - \\pub const option1: usize = 1; - \\pub const option2: ?usize = null; - \\pub const option3: ?usize = 3; - \\pub const option4: comptime_int = 4; - \\pub const string: []const u8 = "zigisthebest"; - \\pub const optional_string: ?[]const u8 = null; - \\pub const nested_array: [2][2]u16 = [2][2]u16 { - \\ [2]u16 { - \\ 300, - \\ 200, - \\ }, - \\ [2]u16 { - \\ 300, - \\ 200, - \\ }, - \\}; - \\pub const nested_slice: []const []const u16 = &[_][]const u16 { - \\ &[_]u16 { - \\ 300, - \\ 200, - \\ }, - \\ &[_]u16 { - \\ 300, - \\ 200, - \\ }, - \\}; - //\\pub const KeywordEnum = enum { - //\\ @"0.8.1", - //\\}; - //\\pub const keyword_enum: KeywordEnum = KeywordEnum.@"0.8.1"; - \\pub const version: @import("std").builtin.Version = .{ - \\ .major = 0, - \\ .minor = 1, - \\ .patch = 2, - \\}; - \\pub const semantic_version: @import("std").SemanticVersion = .{ - \\ .major = 0, - \\ .minor = 1, - \\ .patch = 2, - \\ .pre = "foo", - \\ .build = "bar", - \\}; - \\ - , options.contents.items); - - _ = try std.zig.Ast.parse(arena.allocator(), try options.contents.toOwnedSliceSentinel(0), .zig); -} diff --git a/lib/std/Build/RemoveDirStep.zig b/lib/std/Build/RemoveDirStep.zig deleted file mode 100644 index a5bf3c3256..0000000000 --- a/lib/std/Build/RemoveDirStep.zig +++ /dev/null @@ -1,42 +0,0 @@ -const std = @import("../std.zig"); -const fs = std.fs; -const Step = std.Build.Step; -const RemoveDirStep = @This(); - -pub const base_id = .remove_dir; - -step: Step, -dir_path: []const u8, - -pub fn init(owner: *std.Build, dir_path: []const u8) RemoveDirStep { - return RemoveDirStep{ - .step = Step.init(.{ - .id = .remove_dir, - .name = owner.fmt("RemoveDir {s}", .{dir_path}), - .owner = owner, - .makeFn = make, - }), - .dir_path = owner.dupePath(dir_path), - }; -} - -fn make(step: *Step, prog_node: *std.Progress.Node) !void { - // TODO update progress node while walking file system. - // Should the standard library support this use case?? - _ = prog_node; - - const b = step.owner; - const self = @fieldParentPtr(RemoveDirStep, "step", step); - - b.build_root.handle.deleteTree(self.dir_path) catch |err| { - if (b.build_root.path) |base| { - return step.fail("unable to recursively delete path '{s}/{s}': {s}", .{ - base, self.dir_path, @errorName(err), - }); - } else { - return step.fail("unable to recursively delete path '{s}': {s}", .{ - self.dir_path, @errorName(err), - }); - } - }; -} diff --git a/lib/std/Build/RunStep.zig b/lib/std/Build/RunStep.zig deleted file mode 100644 index 5d530c7a25..0000000000 --- a/lib/std/Build/RunStep.zig +++ /dev/null @@ -1,1254 +0,0 @@ -const std = @import("../std.zig"); -const builtin = @import("builtin"); -const Step = std.Build.Step; -const CompileStep = std.Build.CompileStep; -const WriteFileStep = std.Build.WriteFileStep; -const fs = std.fs; -const mem = std.mem; -const process = std.process; -const ArrayList = std.ArrayList; -const EnvMap = process.EnvMap; -const Allocator = mem.Allocator; -const ExecError = std.Build.ExecError; -const assert = std.debug.assert; - -const RunStep = @This(); - -pub const base_id: Step.Id = .run; - -step: Step, - -/// See also addArg and addArgs to modifying this directly -argv: ArrayList(Arg), - -/// Set this to modify the current working directory -/// TODO change this to a Build.Cache.Directory to better integrate with -/// future child process cwd API. -cwd: ?[]const u8, - -/// Override this field to modify the environment, or use setEnvironmentVariable -env_map: ?*EnvMap, - -/// Configures whether the RunStep is considered to have side-effects, and also -/// whether the RunStep will inherit stdio streams, forwarding them to the -/// parent process, in which case will require a global lock to prevent other -/// steps from interfering with stdio while the subprocess associated with this -/// RunStep is running. -/// If the RunStep is determined to not have side-effects, then execution will -/// be skipped if all output files are up-to-date and input files are -/// unchanged. -stdio: StdIo = .infer_from_args, -/// This field must be `null` if stdio is `inherit`. -stdin: ?[]const u8 = null, - -/// Additional file paths relative to build.zig that, when modified, indicate -/// that the RunStep should be re-executed. -/// If the RunStep is determined to have side-effects, this field is ignored -/// and the RunStep is always executed when it appears in the build graph. -extra_file_dependencies: []const []const u8 = &.{}, - -/// After adding an output argument, this step will by default rename itself -/// for a better display name in the build summary. -/// This can be disabled by setting this to false. -rename_step_with_output_arg: bool = true, - -/// If this is true, a RunStep which is configured to check the output of the -/// executed binary will not fail the build if the binary cannot be executed -/// due to being for a foreign binary to the host system which is running the -/// build graph. -/// Command-line arguments such as -fqemu and -fwasmtime may affect whether a -/// binary is detected as foreign, as well as system configuration such as -/// Rosetta (macOS) and binfmt_misc (Linux). -/// If this RunStep is considered to have side-effects, then this flag does -/// nothing. -skip_foreign_checks: bool = false, - -/// If stderr or stdout exceeds this amount, the child process is killed and -/// the step fails. -max_stdio_size: usize = 10 * 1024 * 1024, - -captured_stdout: ?*Output = null, -captured_stderr: ?*Output = null, - -has_side_effects: bool = false, - -pub const StdIo = union(enum) { - /// Whether the RunStep has side-effects will be determined by whether or not one - /// of the args is an output file (added with `addOutputFileArg`). - /// If the RunStep is determined to have side-effects, this is the same as `inherit`. - /// The step will fail if the subprocess crashes or returns a non-zero exit code. - infer_from_args, - /// Causes the RunStep to be considered to have side-effects, and therefore - /// always execute when it appears in the build graph. - /// It also means that this step will obtain a global lock to prevent other - /// steps from running in the meantime. - /// The step will fail if the subprocess crashes or returns a non-zero exit code. - inherit, - /// Causes the RunStep to be considered to *not* have side-effects. The - /// process will be re-executed if any of the input dependencies are - /// modified. The exit code and standard I/O streams will be checked for - /// certain conditions, and the step will succeed or fail based on these - /// conditions. - /// Note that an explicit check for exit code 0 needs to be added to this - /// list if such a check is desirable. - check: std.ArrayList(Check), - /// This RunStep is running a zig unit test binary and will communicate - /// extra metadata over the IPC protocol. - zig_test, - - pub const Check = union(enum) { - expect_stderr_exact: []const u8, - expect_stderr_match: []const u8, - expect_stdout_exact: []const u8, - expect_stdout_match: []const u8, - expect_term: std.process.Child.Term, - }; -}; - -pub const Arg = union(enum) { - artifact: *CompileStep, - file_source: std.Build.FileSource, - directory_source: std.Build.FileSource, - bytes: []u8, - output: *Output, -}; - -pub const Output = struct { - generated_file: std.Build.GeneratedFile, - prefix: []const u8, - basename: []const u8, -}; - -pub fn create(owner: *std.Build, name: []const u8) *RunStep { - const self = owner.allocator.create(RunStep) catch @panic("OOM"); - self.* = .{ - .step = Step.init(.{ - .id = base_id, - .name = name, - .owner = owner, - .makeFn = make, - }), - .argv = ArrayList(Arg).init(owner.allocator), - .cwd = null, - .env_map = null, - }; - return self; -} - -pub fn setName(self: *RunStep, name: []const u8) void { - self.step.name = name; - self.rename_step_with_output_arg = false; -} - -pub fn enableTestRunnerMode(rs: *RunStep) void { - rs.stdio = .zig_test; - rs.addArgs(&.{"--listen=-"}); -} - -pub fn addArtifactArg(self: *RunStep, artifact: *CompileStep) void { - self.argv.append(Arg{ .artifact = artifact }) catch @panic("OOM"); - self.step.dependOn(&artifact.step); -} - -/// This provides file path as a command line argument to the command being -/// run, and returns a FileSource which can be used as inputs to other APIs -/// throughout the build system. -pub fn addOutputFileArg(rs: *RunStep, basename: []const u8) std.Build.FileSource { - return addPrefixedOutputFileArg(rs, "", basename); -} - -pub fn addPrefixedOutputFileArg( - rs: *RunStep, - prefix: []const u8, - basename: []const u8, -) std.Build.FileSource { - const b = rs.step.owner; - - const output = b.allocator.create(Output) catch @panic("OOM"); - output.* = .{ - .prefix = prefix, - .basename = basename, - .generated_file = .{ .step = &rs.step }, - }; - rs.argv.append(.{ .output = output }) catch @panic("OOM"); - - if (rs.rename_step_with_output_arg) { - rs.setName(b.fmt("{s} ({s})", .{ rs.step.name, basename })); - } - - return .{ .generated = &output.generated_file }; -} - -pub fn addFileSourceArg(self: *RunStep, file_source: std.Build.FileSource) void { - self.argv.append(.{ - .file_source = file_source.dupe(self.step.owner), - }) catch @panic("OOM"); - file_source.addStepDependencies(&self.step); -} - -pub fn addDirectorySourceArg(self: *RunStep, directory_source: std.Build.FileSource) void { - self.argv.append(.{ - .directory_source = directory_source.dupe(self.step.owner), - }) catch @panic("OOM"); - directory_source.addStepDependencies(&self.step); -} - -pub fn addArg(self: *RunStep, arg: []const u8) void { - self.argv.append(.{ .bytes = self.step.owner.dupe(arg) }) catch @panic("OOM"); -} - -pub fn addArgs(self: *RunStep, args: []const []const u8) void { - for (args) |arg| { - self.addArg(arg); - } -} - -pub fn clearEnvironment(self: *RunStep) void { - const b = self.step.owner; - const new_env_map = b.allocator.create(EnvMap) catch @panic("OOM"); - new_env_map.* = EnvMap.init(b.allocator); - self.env_map = new_env_map; -} - -pub fn addPathDir(self: *RunStep, search_path: []const u8) void { - const b = self.step.owner; - const env_map = getEnvMapInternal(self); - - const key = "PATH"; - var prev_path = env_map.get(key); - - if (prev_path) |pp| { - const new_path = b.fmt("{s}" ++ [1]u8{fs.path.delimiter} ++ "{s}", .{ pp, search_path }); - env_map.put(key, new_path) catch @panic("OOM"); - } else { - env_map.put(key, b.dupePath(search_path)) catch @panic("OOM"); - } -} - -pub fn getEnvMap(self: *RunStep) *EnvMap { - return getEnvMapInternal(self); -} - -fn getEnvMapInternal(self: *RunStep) *EnvMap { - const arena = self.step.owner.allocator; - return self.env_map orelse { - const env_map = arena.create(EnvMap) catch @panic("OOM"); - env_map.* = process.getEnvMap(arena) catch @panic("unhandled error"); - self.env_map = env_map; - return env_map; - }; -} - -pub fn setEnvironmentVariable(self: *RunStep, key: []const u8, value: []const u8) void { - const b = self.step.owner; - const env_map = self.getEnvMap(); - env_map.put(b.dupe(key), b.dupe(value)) catch @panic("unhandled error"); -} - -pub fn removeEnvironmentVariable(self: *RunStep, key: []const u8) void { - self.getEnvMap().remove(key); -} - -/// Adds a check for exact stderr match. Does not add any other checks. -pub fn expectStdErrEqual(self: *RunStep, bytes: []const u8) void { - const new_check: StdIo.Check = .{ .expect_stderr_exact = self.step.owner.dupe(bytes) }; - self.addCheck(new_check); -} - -/// Adds a check for exact stdout match as well as a check for exit code 0, if -/// there is not already an expected termination check. -pub fn expectStdOutEqual(self: *RunStep, bytes: []const u8) void { - const new_check: StdIo.Check = .{ .expect_stdout_exact = self.step.owner.dupe(bytes) }; - self.addCheck(new_check); - if (!self.hasTermCheck()) { - self.expectExitCode(0); - } -} - -pub fn expectExitCode(self: *RunStep, code: u8) void { - const new_check: StdIo.Check = .{ .expect_term = .{ .Exited = code } }; - self.addCheck(new_check); -} - -pub fn hasTermCheck(self: RunStep) bool { - for (self.stdio.check.items) |check| switch (check) { - .expect_term => return true, - else => continue, - }; - return false; -} - -pub fn addCheck(self: *RunStep, new_check: StdIo.Check) void { - switch (self.stdio) { - .infer_from_args => { - self.stdio = .{ .check = std.ArrayList(StdIo.Check).init(self.step.owner.allocator) }; - self.stdio.check.append(new_check) catch @panic("OOM"); - }, - .check => |*checks| checks.append(new_check) catch @panic("OOM"), - else => @panic("illegal call to addCheck: conflicting helper method calls. Suggest to directly set stdio field of RunStep instead"), - } -} - -pub fn captureStdErr(self: *RunStep) std.Build.FileSource { - assert(self.stdio != .inherit); - - if (self.captured_stderr) |output| return .{ .generated = &output.generated_file }; - - const output = self.step.owner.allocator.create(Output) catch @panic("OOM"); - output.* = .{ - .prefix = "", - .basename = "stderr", - .generated_file = .{ .step = &self.step }, - }; - self.captured_stderr = output; - return .{ .generated = &output.generated_file }; -} - -pub fn captureStdOut(self: *RunStep) std.Build.FileSource { - assert(self.stdio != .inherit); - - if (self.captured_stdout) |output| return .{ .generated = &output.generated_file }; - - const output = self.step.owner.allocator.create(Output) catch @panic("OOM"); - output.* = .{ - .prefix = "", - .basename = "stdout", - .generated_file = .{ .step = &self.step }, - }; - self.captured_stdout = output; - return .{ .generated = &output.generated_file }; -} - -/// Returns whether the RunStep has side effects *other than* updating the output arguments. -fn hasSideEffects(self: RunStep) bool { - if (self.has_side_effects) return true; - return switch (self.stdio) { - .infer_from_args => !self.hasAnyOutputArgs(), - .inherit => true, - .check => false, - .zig_test => false, - }; -} - -fn hasAnyOutputArgs(self: RunStep) bool { - if (self.captured_stdout != null) return true; - if (self.captured_stderr != null) return true; - for (self.argv.items) |arg| switch (arg) { - .output => return true, - else => continue, - }; - return false; -} - -fn checksContainStdout(checks: []const StdIo.Check) bool { - for (checks) |check| switch (check) { - .expect_stderr_exact, - .expect_stderr_match, - .expect_term, - => continue, - - .expect_stdout_exact, - .expect_stdout_match, - => return true, - }; - return false; -} - -fn checksContainStderr(checks: []const StdIo.Check) bool { - for (checks) |check| switch (check) { - .expect_stdout_exact, - .expect_stdout_match, - .expect_term, - => continue, - - .expect_stderr_exact, - .expect_stderr_match, - => return true, - }; - return false; -} - -fn make(step: *Step, prog_node: *std.Progress.Node) !void { - const b = step.owner; - const arena = b.allocator; - const self = @fieldParentPtr(RunStep, "step", step); - const has_side_effects = self.hasSideEffects(); - - var argv_list = ArrayList([]const u8).init(arena); - var output_placeholders = ArrayList(struct { - index: usize, - output: *Output, - }).init(arena); - - var man = b.cache.obtain(); - defer man.deinit(); - - for (self.argv.items) |arg| { - switch (arg) { - .bytes => |bytes| { - try argv_list.append(bytes); - man.hash.addBytes(bytes); - }, - .file_source => |file| { - const file_path = file.getPath(b); - try argv_list.append(file_path); - _ = try man.addFile(file_path, null); - }, - .directory_source => |file| { - const file_path = file.getPath(b); - try argv_list.append(file_path); - man.hash.addBytes(file_path); - }, - .artifact => |artifact| { - if (artifact.target.isWindows()) { - // On Windows we don't have rpaths so we have to add .dll search paths to PATH - self.addPathForDynLibs(artifact); - } - const file_path = artifact.installed_path orelse - artifact.getOutputSource().getPath(b); - - try argv_list.append(file_path); - - _ = try man.addFile(file_path, null); - }, - .output => |output| { - man.hash.addBytes(output.prefix); - man.hash.addBytes(output.basename); - // Add a placeholder into the argument list because we need the - // manifest hash to be updated with all arguments before the - // object directory is computed. - try argv_list.append(""); - try output_placeholders.append(.{ - .index = argv_list.items.len - 1, - .output = output, - }); - }, - } - } - - if (self.captured_stdout) |output| { - man.hash.addBytes(output.basename); - } - - if (self.captured_stderr) |output| { - man.hash.addBytes(output.basename); - } - - hashStdIo(&man.hash, self.stdio); - - if (has_side_effects) { - try runCommand(self, argv_list.items, has_side_effects, null, prog_node); - return; - } - - for (self.extra_file_dependencies) |file_path| { - _ = try man.addFile(b.pathFromRoot(file_path), null); - } - - if (try step.cacheHit(&man)) { - // cache hit, skip running command - const digest = man.final(); - for (output_placeholders.items) |placeholder| { - placeholder.output.generated_file.path = try b.cache_root.join(arena, &.{ - "o", &digest, placeholder.output.basename, - }); - } - - if (self.captured_stdout) |output| { - output.generated_file.path = try b.cache_root.join(arena, &.{ - "o", &digest, output.basename, - }); - } - - if (self.captured_stderr) |output| { - output.generated_file.path = try b.cache_root.join(arena, &.{ - "o", &digest, output.basename, - }); - } - - step.result_cached = true; - return; - } - - const digest = man.final(); - - for (output_placeholders.items) |placeholder| { - const output_components = .{ "o", &digest, placeholder.output.basename }; - const output_sub_path = try fs.path.join(arena, &output_components); - const output_sub_dir_path = fs.path.dirname(output_sub_path).?; - b.cache_root.handle.makePath(output_sub_dir_path) catch |err| { - return step.fail("unable to make path '{}{s}': {s}", .{ - b.cache_root, output_sub_dir_path, @errorName(err), - }); - }; - const output_path = try b.cache_root.join(arena, &output_components); - placeholder.output.generated_file.path = output_path; - const cli_arg = if (placeholder.output.prefix.len == 0) - output_path - else - b.fmt("{s}{s}", .{ placeholder.output.prefix, output_path }); - argv_list.items[placeholder.index] = cli_arg; - } - - try runCommand(self, argv_list.items, has_side_effects, &digest, prog_node); - - try step.writeManifest(&man); -} - -fn formatTerm( - term: ?std.process.Child.Term, - comptime fmt: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, -) !void { - _ = fmt; - _ = options; - if (term) |t| switch (t) { - .Exited => |code| try writer.print("exited with code {}", .{code}), - .Signal => |sig| try writer.print("terminated with signal {}", .{sig}), - .Stopped => |sig| try writer.print("stopped with signal {}", .{sig}), - .Unknown => |code| try writer.print("terminated for unknown reason with code {}", .{code}), - } else { - try writer.writeAll("exited with any code"); - } -} -fn fmtTerm(term: ?std.process.Child.Term) std.fmt.Formatter(formatTerm) { - return .{ .data = term }; -} - -fn termMatches(expected: ?std.process.Child.Term, actual: std.process.Child.Term) bool { - return if (expected) |e| switch (e) { - .Exited => |expected_code| switch (actual) { - .Exited => |actual_code| expected_code == actual_code, - else => false, - }, - .Signal => |expected_sig| switch (actual) { - .Signal => |actual_sig| expected_sig == actual_sig, - else => false, - }, - .Stopped => |expected_sig| switch (actual) { - .Stopped => |actual_sig| expected_sig == actual_sig, - else => false, - }, - .Unknown => |expected_code| switch (actual) { - .Unknown => |actual_code| expected_code == actual_code, - else => false, - }, - } else switch (actual) { - .Exited => true, - else => false, - }; -} - -fn runCommand( - self: *RunStep, - argv: []const []const u8, - has_side_effects: bool, - digest: ?*const [std.Build.Cache.hex_digest_len]u8, - prog_node: *std.Progress.Node, -) !void { - const step = &self.step; - const b = step.owner; - const arena = b.allocator; - - try step.handleChildProcUnsupported(self.cwd, argv); - try Step.handleVerbose2(step.owner, self.cwd, self.env_map, argv); - - const allow_skip = switch (self.stdio) { - .check, .zig_test => self.skip_foreign_checks, - else => false, - }; - - var interp_argv = std.ArrayList([]const u8).init(b.allocator); - defer interp_argv.deinit(); - - const result = spawnChildAndCollect(self, argv, has_side_effects, prog_node) catch |err| term: { - // InvalidExe: cpu arch mismatch - // FileNotFound: can happen with a wrong dynamic linker path - if (err == error.InvalidExe or err == error.FileNotFound) interpret: { - // TODO: learn the target from the binary directly rather than from - // relying on it being a CompileStep. This will make this logic - // work even for the edge case that the binary was produced by a - // third party. - const exe = switch (self.argv.items[0]) { - .artifact => |exe| exe, - else => break :interpret, - }; - switch (exe.kind) { - .exe, .@"test" => {}, - else => break :interpret, - } - - const need_cross_glibc = exe.target.isGnuLibC() and exe.is_linking_libc; - switch (b.host.getExternalExecutor(exe.target_info, .{ - .qemu_fixes_dl = need_cross_glibc and b.glibc_runtimes_dir != null, - .link_libc = exe.is_linking_libc, - })) { - .native, .rosetta => { - if (allow_skip) return error.MakeSkipped; - break :interpret; - }, - .wine => |bin_name| { - if (b.enable_wine) { - try interp_argv.append(bin_name); - try interp_argv.appendSlice(argv); - } else { - return failForeign(self, "-fwine", argv[0], exe); - } - }, - .qemu => |bin_name| { - if (b.enable_qemu) { - const glibc_dir_arg = if (need_cross_glibc) - b.glibc_runtimes_dir orelse - return failForeign(self, "--glibc-runtimes", argv[0], exe) - else - null; - - try interp_argv.append(bin_name); - - if (glibc_dir_arg) |dir| { - // TODO look into making this a call to `linuxTriple`. This - // needs the directory to be called "i686" rather than - // "x86" which is why we do it manually here. - const fmt_str = "{s}" ++ fs.path.sep_str ++ "{s}-{s}-{s}"; - const cpu_arch = exe.target.getCpuArch(); - const os_tag = exe.target.getOsTag(); - const abi = exe.target.getAbi(); - const cpu_arch_name: []const u8 = if (cpu_arch == .x86) - "i686" - else - @tagName(cpu_arch); - const full_dir = try std.fmt.allocPrint(b.allocator, fmt_str, .{ - dir, cpu_arch_name, @tagName(os_tag), @tagName(abi), - }); - - try interp_argv.append("-L"); - try interp_argv.append(full_dir); - } - - try interp_argv.appendSlice(argv); - } else { - return failForeign(self, "-fqemu", argv[0], exe); - } - }, - .darling => |bin_name| { - if (b.enable_darling) { - try interp_argv.append(bin_name); - try interp_argv.appendSlice(argv); - } else { - return failForeign(self, "-fdarling", argv[0], exe); - } - }, - .wasmtime => |bin_name| { - if (b.enable_wasmtime) { - try interp_argv.append(bin_name); - try interp_argv.append("--dir=."); - try interp_argv.append(argv[0]); - try interp_argv.append("--"); - try interp_argv.appendSlice(argv[1..]); - } else { - return failForeign(self, "-fwasmtime", argv[0], exe); - } - }, - .bad_dl => |foreign_dl| { - if (allow_skip) return error.MakeSkipped; - - const host_dl = b.host.dynamic_linker.get() orelse "(none)"; - - return step.fail( - \\the host system is unable to execute binaries from the target - \\ because the host dynamic linker is '{s}', - \\ while the target dynamic linker is '{s}'. - \\ consider setting the dynamic linker or enabling skip_foreign_checks in the Run step - , .{ host_dl, foreign_dl }); - }, - .bad_os_or_cpu => { - if (allow_skip) return error.MakeSkipped; - - const host_name = try b.host.target.zigTriple(b.allocator); - const foreign_name = try exe.target.zigTriple(b.allocator); - - return step.fail("the host system ({s}) is unable to execute binaries from the target ({s})", .{ - host_name, foreign_name, - }); - }, - } - - if (exe.target.isWindows()) { - // On Windows we don't have rpaths so we have to add .dll search paths to PATH - self.addPathForDynLibs(exe); - } - - try Step.handleVerbose2(step.owner, self.cwd, self.env_map, interp_argv.items); - - break :term spawnChildAndCollect(self, interp_argv.items, has_side_effects, prog_node) catch |e| { - return step.fail("unable to spawn interpreter {s}: {s}", .{ - interp_argv.items[0], @errorName(e), - }); - }; - } - - return step.fail("unable to spawn {s}: {s}", .{ argv[0], @errorName(err) }); - }; - - step.result_duration_ns = result.elapsed_ns; - step.result_peak_rss = result.peak_rss; - step.test_results = result.stdio.test_results; - - // Capture stdout and stderr to GeneratedFile objects. - const Stream = struct { - captured: ?*Output, - is_null: bool, - bytes: []const u8, - }; - for ([_]Stream{ - .{ - .captured = self.captured_stdout, - .is_null = result.stdio.stdout_null, - .bytes = result.stdio.stdout, - }, - .{ - .captured = self.captured_stderr, - .is_null = result.stdio.stderr_null, - .bytes = result.stdio.stderr, - }, - }) |stream| { - if (stream.captured) |output| { - assert(!stream.is_null); - - const output_components = .{ "o", digest.?, output.basename }; - const output_path = try b.cache_root.join(arena, &output_components); - output.generated_file.path = output_path; - - const sub_path = try fs.path.join(arena, &output_components); - const sub_path_dirname = fs.path.dirname(sub_path).?; - b.cache_root.handle.makePath(sub_path_dirname) catch |err| { - return step.fail("unable to make path '{}{s}': {s}", .{ - b.cache_root, sub_path_dirname, @errorName(err), - }); - }; - b.cache_root.handle.writeFile(sub_path, stream.bytes) catch |err| { - return step.fail("unable to write file '{}{s}': {s}", .{ - b.cache_root, sub_path, @errorName(err), - }); - }; - } - } - - const final_argv = if (interp_argv.items.len == 0) argv else interp_argv.items; - - switch (self.stdio) { - .check => |checks| for (checks.items) |check| switch (check) { - .expect_stderr_exact => |expected_bytes| { - assert(!result.stdio.stderr_null); - if (!mem.eql(u8, expected_bytes, result.stdio.stderr)) { - return step.fail( - \\ - \\========= expected this stderr: ========= - \\{s} - \\========= but found: ==================== - \\{s} - \\========= from the following command: === - \\{s} - , .{ - expected_bytes, - result.stdio.stderr, - try Step.allocPrintCmd(arena, self.cwd, final_argv), - }); - } - }, - .expect_stderr_match => |match| { - assert(!result.stdio.stderr_null); - if (mem.indexOf(u8, result.stdio.stderr, match) == null) { - return step.fail( - \\ - \\========= expected to find in stderr: ========= - \\{s} - \\========= but stderr does not contain it: ===== - \\{s} - \\========= from the following command: ========= - \\{s} - , .{ - match, - result.stdio.stderr, - try Step.allocPrintCmd(arena, self.cwd, final_argv), - }); - } - }, - .expect_stdout_exact => |expected_bytes| { - assert(!result.stdio.stdout_null); - if (!mem.eql(u8, expected_bytes, result.stdio.stdout)) { - return step.fail( - \\ - \\========= expected this stdout: ========= - \\{s} - \\========= but found: ==================== - \\{s} - \\========= from the following command: === - \\{s} - , .{ - expected_bytes, - result.stdio.stdout, - try Step.allocPrintCmd(arena, self.cwd, final_argv), - }); - } - }, - .expect_stdout_match => |match| { - assert(!result.stdio.stdout_null); - if (mem.indexOf(u8, result.stdio.stdout, match) == null) { - return step.fail( - \\ - \\========= expected to find in stdout: ========= - \\{s} - \\========= but stdout does not contain it: ===== - \\{s} - \\========= from the following command: ========= - \\{s} - , .{ - match, - result.stdio.stdout, - try Step.allocPrintCmd(arena, self.cwd, final_argv), - }); - } - }, - .expect_term => |expected_term| { - if (!termMatches(expected_term, result.term)) { - return step.fail("the following command {} (expected {}):\n{s}", .{ - fmtTerm(result.term), - fmtTerm(expected_term), - try Step.allocPrintCmd(arena, self.cwd, final_argv), - }); - } - }, - }, - .zig_test => { - const prefix: []const u8 = p: { - if (result.stdio.test_metadata) |tm| { - if (tm.next_index <= tm.names.len) { - const name = tm.testName(tm.next_index - 1); - break :p b.fmt("while executing test '{s}', ", .{name}); - } - } - break :p ""; - }; - const expected_term: std.process.Child.Term = .{ .Exited = 0 }; - if (!termMatches(expected_term, result.term)) { - return step.fail("{s}the following command {} (expected {}):\n{s}", .{ - prefix, - fmtTerm(result.term), - fmtTerm(expected_term), - try Step.allocPrintCmd(arena, self.cwd, final_argv), - }); - } - if (!result.stdio.test_results.isSuccess()) { - return step.fail( - "{s}the following test command failed:\n{s}", - .{ prefix, try Step.allocPrintCmd(arena, self.cwd, final_argv) }, - ); - } - }, - else => { - try step.handleChildProcessTerm(result.term, self.cwd, final_argv); - }, - } -} - -const ChildProcResult = struct { - term: std.process.Child.Term, - elapsed_ns: u64, - peak_rss: usize, - - stdio: StdIoResult, -}; - -fn spawnChildAndCollect( - self: *RunStep, - argv: []const []const u8, - has_side_effects: bool, - prog_node: *std.Progress.Node, -) !ChildProcResult { - const b = self.step.owner; - const arena = b.allocator; - - var child = std.process.Child.init(argv, arena); - if (self.cwd) |cwd| { - child.cwd = b.pathFromRoot(cwd); - } else { - child.cwd = b.build_root.path; - child.cwd_dir = b.build_root.handle; - } - child.env_map = self.env_map orelse b.env_map; - child.request_resource_usage_statistics = true; - - child.stdin_behavior = switch (self.stdio) { - .infer_from_args => if (has_side_effects) .Inherit else .Ignore, - .inherit => .Inherit, - .check => .Ignore, - .zig_test => .Pipe, - }; - child.stdout_behavior = switch (self.stdio) { - .infer_from_args => if (has_side_effects) .Inherit else .Ignore, - .inherit => .Inherit, - .check => |checks| if (checksContainStdout(checks.items)) .Pipe else .Ignore, - .zig_test => .Pipe, - }; - child.stderr_behavior = switch (self.stdio) { - .infer_from_args => if (has_side_effects) .Inherit else .Pipe, - .inherit => .Inherit, - .check => .Pipe, - .zig_test => .Pipe, - }; - if (self.captured_stdout != null) child.stdout_behavior = .Pipe; - if (self.captured_stderr != null) child.stderr_behavior = .Pipe; - if (self.stdin != null) { - assert(child.stdin_behavior != .Inherit); - child.stdin_behavior = .Pipe; - } - - try child.spawn(); - var timer = try std.time.Timer.start(); - - const result = if (self.stdio == .zig_test) - evalZigTest(self, &child, prog_node) - else - evalGeneric(self, &child); - - const term = try child.wait(); - const elapsed_ns = timer.read(); - - return .{ - .stdio = try result, - .term = term, - .elapsed_ns = elapsed_ns, - .peak_rss = child.resource_usage_statistics.getMaxRss() orelse 0, - }; -} - -const StdIoResult = struct { - // These use boolean flags instead of optionals as a workaround for - // https://github.com/ziglang/zig/issues/14783 - stdout: []const u8, - stderr: []const u8, - stdout_null: bool, - stderr_null: bool, - test_results: Step.TestResults, - test_metadata: ?TestMetadata, -}; - -fn evalZigTest( - self: *RunStep, - child: *std.process.Child, - prog_node: *std.Progress.Node, -) !StdIoResult { - const gpa = self.step.owner.allocator; - const arena = self.step.owner.allocator; - - var poller = std.io.poll(gpa, enum { stdout, stderr }, .{ - .stdout = child.stdout.?, - .stderr = child.stderr.?, - }); - defer poller.deinit(); - - try sendMessage(child.stdin.?, .query_test_metadata); - - const Header = std.zig.Server.Message.Header; - - const stdout = poller.fifo(.stdout); - const stderr = poller.fifo(.stderr); - - var fail_count: u32 = 0; - var skip_count: u32 = 0; - var leak_count: u32 = 0; - var test_count: u32 = 0; - - var metadata: ?TestMetadata = null; - - var sub_prog_node: ?std.Progress.Node = null; - defer if (sub_prog_node) |*n| n.end(); - - poll: while (true) { - while (stdout.readableLength() < @sizeOf(Header)) { - if (!(try poller.poll())) break :poll; - } - const header = stdout.reader().readStruct(Header) catch unreachable; - while (stdout.readableLength() < header.bytes_len) { - if (!(try poller.poll())) break :poll; - } - const body = stdout.readableSliceOfLen(header.bytes_len); - - switch (header.tag) { - .zig_version => { - if (!std.mem.eql(u8, builtin.zig_version_string, body)) { - return self.step.fail( - "zig version mismatch build runner vs compiler: '{s}' vs '{s}'", - .{ builtin.zig_version_string, body }, - ); - } - }, - .test_metadata => { - const TmHdr = std.zig.Server.Message.TestMetadata; - const tm_hdr = @ptrCast(*align(1) const TmHdr, body); - test_count = tm_hdr.tests_len; - - const names_bytes = body[@sizeOf(TmHdr)..][0 .. test_count * @sizeOf(u32)]; - const async_frame_lens_bytes = body[@sizeOf(TmHdr) + names_bytes.len ..][0 .. test_count * @sizeOf(u32)]; - const expected_panic_msgs_bytes = body[@sizeOf(TmHdr) + names_bytes.len + async_frame_lens_bytes.len ..][0 .. test_count * @sizeOf(u32)]; - const string_bytes = body[@sizeOf(TmHdr) + names_bytes.len + async_frame_lens_bytes.len + expected_panic_msgs_bytes.len ..][0..tm_hdr.string_bytes_len]; - - const names = std.mem.bytesAsSlice(u32, names_bytes); - const async_frame_lens = std.mem.bytesAsSlice(u32, async_frame_lens_bytes); - const expected_panic_msgs = std.mem.bytesAsSlice(u32, expected_panic_msgs_bytes); - const names_aligned = try arena.alloc(u32, names.len); - for (names_aligned, names) |*dest, src| dest.* = src; - - const async_frame_lens_aligned = try arena.alloc(u32, async_frame_lens.len); - for (async_frame_lens_aligned, async_frame_lens) |*dest, src| dest.* = src; - - const expected_panic_msgs_aligned = try arena.alloc(u32, expected_panic_msgs.len); - for (expected_panic_msgs_aligned, expected_panic_msgs) |*dest, src| dest.* = src; - - prog_node.setEstimatedTotalItems(names.len); - metadata = .{ - .string_bytes = try arena.dupe(u8, string_bytes), - .names = names_aligned, - .async_frame_lens = async_frame_lens_aligned, - .expected_panic_msgs = expected_panic_msgs_aligned, - .next_index = 0, - .prog_node = prog_node, - }; - - try requestNextTest(child.stdin.?, &metadata.?, &sub_prog_node); - }, - .test_results => { - const md = metadata.?; - - const TrHdr = std.zig.Server.Message.TestResults; - const tr_hdr = @ptrCast(*align(1) const TrHdr, body); - fail_count += @boolToInt(tr_hdr.flags.fail); - skip_count += @boolToInt(tr_hdr.flags.skip); - leak_count += @boolToInt(tr_hdr.flags.leak); - - if (tr_hdr.flags.fail or tr_hdr.flags.leak) { - const name = std.mem.sliceTo(md.string_bytes[md.names[tr_hdr.index]..], 0); - const msg = std.mem.trim(u8, stderr.readableSlice(0), "\n"); - const label = if (tr_hdr.flags.fail) "failed" else "leaked"; - if (msg.len > 0) { - try self.step.addError("'{s}' {s}: {s}", .{ name, label, msg }); - } else { - try self.step.addError("'{s}' {s}", .{ name, label }); - } - stderr.discard(msg.len); - } - - try requestNextTest(child.stdin.?, &metadata.?, &sub_prog_node); - }, - else => {}, // ignore other messages - } - - stdout.discard(body.len); - } - - if (stderr.readableLength() > 0) { - const msg = std.mem.trim(u8, try stderr.toOwnedSlice(), "\n"); - if (msg.len > 0) try self.step.result_error_msgs.append(arena, msg); - } - - // Send EOF to stdin. - child.stdin.?.close(); - child.stdin = null; - - return .{ - .stdout = &.{}, - .stderr = &.{}, - .stdout_null = true, - .stderr_null = true, - .test_results = .{ - .test_count = test_count, - .fail_count = fail_count, - .skip_count = skip_count, - .leak_count = leak_count, - }, - .test_metadata = metadata, - }; -} - -const TestMetadata = struct { - names: []const u32, - async_frame_lens: []const u32, - expected_panic_msgs: []const u32, - string_bytes: []const u8, - next_index: u32, - prog_node: *std.Progress.Node, - - fn testName(tm: TestMetadata, index: u32) []const u8 { - return std.mem.sliceTo(tm.string_bytes[tm.names[index]..], 0); - } -}; - -fn requestNextTest(in: fs.File, metadata: *TestMetadata, sub_prog_node: *?std.Progress.Node) !void { - while (metadata.next_index < metadata.names.len) { - const i = metadata.next_index; - metadata.next_index += 1; - - if (metadata.async_frame_lens[i] != 0) continue; - if (metadata.expected_panic_msgs[i] != 0) continue; - - const name = metadata.testName(i); - if (sub_prog_node.*) |*n| n.end(); - sub_prog_node.* = metadata.prog_node.start(name, 0); - - try sendRunTestMessage(in, i); - return; - } else { - try sendMessage(in, .exit); - } -} - -fn sendMessage(file: std.fs.File, tag: std.zig.Client.Message.Tag) !void { - const header: std.zig.Client.Message.Header = .{ - .tag = tag, - .bytes_len = 0, - }; - try file.writeAll(std.mem.asBytes(&header)); -} - -fn sendRunTestMessage(file: std.fs.File, index: u32) !void { - const header: std.zig.Client.Message.Header = .{ - .tag = .run_test, - .bytes_len = 4, - }; - const full_msg = std.mem.asBytes(&header) ++ std.mem.asBytes(&index); - try file.writeAll(full_msg); -} - -fn evalGeneric(self: *RunStep, child: *std.process.Child) !StdIoResult { - const arena = self.step.owner.allocator; - - if (self.stdin) |stdin| { - child.stdin.?.writeAll(stdin) catch |err| { - return self.step.fail("unable to write stdin: {s}", .{@errorName(err)}); - }; - child.stdin.?.close(); - child.stdin = null; - } - - // These are not optionals, as a workaround for - // https://github.com/ziglang/zig/issues/14783 - var stdout_bytes: []const u8 = undefined; - var stderr_bytes: []const u8 = undefined; - var stdout_null = true; - var stderr_null = true; - - if (child.stdout) |stdout| { - if (child.stderr) |stderr| { - var poller = std.io.poll(arena, enum { stdout, stderr }, .{ - .stdout = stdout, - .stderr = stderr, - }); - defer poller.deinit(); - - while (try poller.poll()) { - if (poller.fifo(.stdout).count > self.max_stdio_size) - return error.StdoutStreamTooLong; - if (poller.fifo(.stderr).count > self.max_stdio_size) - return error.StderrStreamTooLong; - } - - stdout_bytes = try poller.fifo(.stdout).toOwnedSlice(); - stderr_bytes = try poller.fifo(.stderr).toOwnedSlice(); - stdout_null = false; - stderr_null = false; - } else { - stdout_bytes = try stdout.reader().readAllAlloc(arena, self.max_stdio_size); - stdout_null = false; - } - } else if (child.stderr) |stderr| { - stderr_bytes = try stderr.reader().readAllAlloc(arena, self.max_stdio_size); - stderr_null = false; - } - - if (!stderr_null and stderr_bytes.len > 0) { - // Treat stderr as an error message. - const stderr_is_diagnostic = self.captured_stderr == null and switch (self.stdio) { - .check => |checks| !checksContainStderr(checks.items), - else => true, - }; - if (stderr_is_diagnostic) { - try self.step.result_error_msgs.append(arena, stderr_bytes); - } - } - - return .{ - .stdout = stdout_bytes, - .stderr = stderr_bytes, - .stdout_null = stdout_null, - .stderr_null = stderr_null, - .test_results = .{}, - .test_metadata = null, - }; -} - -fn addPathForDynLibs(self: *RunStep, artifact: *CompileStep) void { - const b = self.step.owner; - for (artifact.link_objects.items) |link_object| { - switch (link_object) { - .other_step => |other| { - if (other.target.isWindows() and other.isDynamicLibrary()) { - addPathDir(self, fs.path.dirname(other.getOutputSource().getPath(b)).?); - addPathForDynLibs(self, other); - } - }, - else => {}, - } - } -} - -fn failForeign( - self: *RunStep, - suggested_flag: []const u8, - argv0: []const u8, - exe: *CompileStep, -) error{ MakeFailed, MakeSkipped, OutOfMemory } { - switch (self.stdio) { - .check, .zig_test => { - if (self.skip_foreign_checks) - return error.MakeSkipped; - - const b = self.step.owner; - const host_name = try b.host.target.zigTriple(b.allocator); - const foreign_name = try exe.target.zigTriple(b.allocator); - - return self.step.fail( - \\unable to spawn foreign binary '{s}' ({s}) on host system ({s}) - \\ consider using {s} or enabling skip_foreign_checks in the Run step - , .{ argv0, foreign_name, host_name, suggested_flag }); - }, - else => { - return self.step.fail("unable to spawn foreign binary '{s}'", .{argv0}); - }, - } -} - -fn hashStdIo(hh: *std.Build.Cache.HashHelper, stdio: StdIo) void { - switch (stdio) { - .infer_from_args, .inherit, .zig_test => {}, - .check => |checks| for (checks.items) |check| { - hh.add(@as(std.meta.Tag(StdIo.Check), check)); - switch (check) { - .expect_stderr_exact, - .expect_stderr_match, - .expect_stdout_exact, - .expect_stdout_match, - => |s| hh.addBytes(s), - - .expect_term => |term| { - hh.add(@as(std.meta.Tag(std.process.Child.Term), term)); - switch (term) { - .Exited => |x| hh.add(x), - .Signal, .Stopped, .Unknown => |x| hh.add(x), - } - }, - } - }, - } -} diff --git a/lib/std/Build/Step/CheckFile.zig b/lib/std/Build/Step/CheckFile.zig new file mode 100644 index 0000000000..ad8b1a25f0 --- /dev/null +++ b/lib/std/Build/Step/CheckFile.zig @@ -0,0 +1,87 @@ +//! Fail the build step if a file does not match certain checks. +//! TODO: make this more flexible, supporting more kinds of checks. +//! TODO: generalize the code in std.testing.expectEqualStrings and make this +//! CheckFileStep produce those helpful diagnostics when there is not a match. +const CheckFileStep = @This(); +const std = @import("std"); +const Step = std.Build.Step; +const fs = std.fs; +const mem = std.mem; + +step: Step, +expected_matches: []const []const u8, +expected_exact: ?[]const u8, +source: std.Build.FileSource, +max_bytes: usize = 20 * 1024 * 1024, + +pub const base_id = .check_file; + +pub const Options = struct { + expected_matches: []const []const u8 = &.{}, + expected_exact: ?[]const u8 = null, +}; + +pub fn create( + owner: *std.Build, + source: std.Build.FileSource, + options: Options, +) *CheckFileStep { + const self = owner.allocator.create(CheckFileStep) catch @panic("OOM"); + self.* = .{ + .step = Step.init(.{ + .id = .check_file, + .name = "CheckFile", + .owner = owner, + .makeFn = make, + }), + .source = source.dupe(owner), + .expected_matches = owner.dupeStrings(options.expected_matches), + .expected_exact = options.expected_exact, + }; + self.source.addStepDependencies(&self.step); + return self; +} + +pub fn setName(self: *CheckFileStep, name: []const u8) void { + self.step.name = name; +} + +fn make(step: *Step, prog_node: *std.Progress.Node) !void { + _ = prog_node; + const b = step.owner; + const self = @fieldParentPtr(CheckFileStep, "step", step); + + const src_path = self.source.getPath(b); + const contents = fs.cwd().readFileAlloc(b.allocator, src_path, self.max_bytes) catch |err| { + return step.fail("unable to read '{s}': {s}", .{ + src_path, @errorName(err), + }); + }; + + for (self.expected_matches) |expected_match| { + if (mem.indexOf(u8, contents, expected_match) == null) { + return step.fail( + \\ + \\========= expected to find: =================== + \\{s} + \\========= but file does not contain it: ======= + \\{s} + \\=============================================== + , .{ expected_match, contents }); + } + } + + if (self.expected_exact) |expected_exact| { + if (!mem.eql(u8, expected_exact, contents)) { + return step.fail( + \\ + \\========= expected: ===================== + \\{s} + \\========= but found: ==================== + \\{s} + \\========= from the following file: ====== + \\{s} + , .{ expected_exact, contents, src_path }); + } + } +} diff --git a/lib/std/Build/Step/CheckObject.zig b/lib/std/Build/Step/CheckObject.zig new file mode 100644 index 0000000000..431f74eccc --- /dev/null +++ b/lib/std/Build/Step/CheckObject.zig @@ -0,0 +1,1055 @@ +const std = @import("std"); +const assert = std.debug.assert; +const fs = std.fs; +const macho = std.macho; +const math = std.math; +const mem = std.mem; +const testing = std.testing; + +const CheckObjectStep = @This(); + +const Allocator = mem.Allocator; +const Step = std.Build.Step; + +pub const base_id = .check_object; + +step: Step, +source: std.Build.FileSource, +max_bytes: usize = 20 * 1024 * 1024, +checks: std.ArrayList(Check), +dump_symtab: bool = false, +obj_format: std.Target.ObjectFormat, + +pub fn create( + owner: *std.Build, + source: std.Build.FileSource, + obj_format: std.Target.ObjectFormat, +) *CheckObjectStep { + const gpa = owner.allocator; + const self = gpa.create(CheckObjectStep) catch @panic("OOM"); + self.* = .{ + .step = Step.init(.{ + .id = .check_file, + .name = "CheckObject", + .owner = owner, + .makeFn = make, + }), + .source = source.dupe(owner), + .checks = std.ArrayList(Check).init(gpa), + .obj_format = obj_format, + }; + self.source.addStepDependencies(&self.step); + return self; +} + +/// Runs and (optionally) compares the output of a binary. +/// Asserts `self` was generated from an executable step. +/// TODO this doesn't actually compare, and there's no apparent reason for it +/// to depend on the check object step. I don't see why this function should exist, +/// the caller could just add the run step directly. +pub fn runAndCompare(self: *CheckObjectStep) *std.Build.RunStep { + const dependencies_len = self.step.dependencies.items.len; + assert(dependencies_len > 0); + const exe_step = self.step.dependencies.items[dependencies_len - 1]; + const exe = exe_step.cast(std.Build.CompileStep).?; + const run = self.step.owner.addRunArtifact(exe); + run.skip_foreign_checks = true; + run.step.dependOn(&self.step); + return run; +} + +const SearchPhrase = struct { + string: []const u8, + file_source: ?std.Build.FileSource = null, + + fn resolve(phrase: SearchPhrase, b: *std.Build, step: *Step) []const u8 { + const file_source = phrase.file_source orelse return phrase.string; + return b.fmt("{s} {s}", .{ phrase.string, file_source.getPath2(b, step) }); + } +}; + +/// There two types of actions currently supported: +/// * `.match` - is the main building block of standard matchers with optional eat-all token `{*}` +/// and extractors by name such as `{n_value}`. Please note this action is very simplistic in nature +/// i.e., it won't really handle edge cases/nontrivial examples. But given that we do want to use +/// it mainly to test the output of our object format parser-dumpers when testing the linkers, etc. +/// it should be plenty useful in its current form. +/// * `.compute_cmp` - can be used to perform an operation on the extracted global variables +/// using the MatchAction. It currently only supports an addition. The operation is required +/// to be specified in Reverse Polish Notation to ease in operator-precedence parsing (well, +/// to avoid any parsing really). +/// For example, if the two extracted values were saved as `vmaddr` and `entryoff` respectively +/// they could then be added with this simple program `vmaddr entryoff +`. +const Action = struct { + tag: enum { match, not_present, compute_cmp }, + phrase: SearchPhrase, + expected: ?ComputeCompareExpected = null, + + /// Will return true if the `phrase` was found in the `haystack`. + /// Some examples include: + /// + /// LC 0 => will match in its entirety + /// vmaddr {vmaddr} => will match `vmaddr` and then extract the following value as u64 + /// and save under `vmaddr` global name (see `global_vars` param) + /// name {*}libobjc{*}.dylib => will match `name` followed by a token which contains `libobjc` and `.dylib` + /// in that order with other letters in between + fn match( + act: Action, + b: *std.Build, + step: *Step, + haystack: []const u8, + global_vars: anytype, + ) !bool { + assert(act.tag == .match or act.tag == .not_present); + const phrase = act.phrase.resolve(b, step); + var candidate_var: ?struct { name: []const u8, value: u64 } = null; + var hay_it = mem.tokenize(u8, mem.trim(u8, haystack, " "), " "); + var needle_it = mem.tokenize(u8, mem.trim(u8, phrase, " "), " "); + + while (needle_it.next()) |needle_tok| { + const hay_tok = hay_it.next() orelse return false; + + if (mem.indexOf(u8, needle_tok, "{*}")) |index| { + // We have fuzzy matchers within the search pattern, so we match substrings. + var start = index; + var n_tok = needle_tok; + var h_tok = hay_tok; + while (true) { + n_tok = n_tok[start + 3 ..]; + const inner = if (mem.indexOf(u8, n_tok, "{*}")) |sub_end| + n_tok[0..sub_end] + else + n_tok; + if (mem.indexOf(u8, h_tok, inner) == null) return false; + start = mem.indexOf(u8, n_tok, "{*}") orelse break; + } + } else if (mem.startsWith(u8, needle_tok, "{")) { + const closing_brace = mem.indexOf(u8, needle_tok, "}") orelse return error.MissingClosingBrace; + if (closing_brace != needle_tok.len - 1) return error.ClosingBraceNotLast; + + const name = needle_tok[1..closing_brace]; + if (name.len == 0) return error.MissingBraceValue; + const value = try std.fmt.parseInt(u64, hay_tok, 16); + candidate_var = .{ + .name = name, + .value = value, + }; + } else { + if (!mem.eql(u8, hay_tok, needle_tok)) return false; + } + } + + if (candidate_var) |v| { + try global_vars.putNoClobber(v.name, v.value); + } + + return true; + } + + /// Will return true if the `phrase` is correctly parsed into an RPN program and + /// its reduced, computed value compares using `op` with the expected value, either + /// a literal or another extracted variable. + fn computeCmp(act: Action, b: *std.Build, step: *Step, global_vars: anytype) !bool { + const gpa = step.owner.allocator; + const phrase = act.phrase.resolve(b, step); + var op_stack = std.ArrayList(enum { add, sub, mod, mul }).init(gpa); + var values = std.ArrayList(u64).init(gpa); + + var it = mem.tokenize(u8, phrase, " "); + while (it.next()) |next| { + if (mem.eql(u8, next, "+")) { + try op_stack.append(.add); + } else if (mem.eql(u8, next, "-")) { + try op_stack.append(.sub); + } else if (mem.eql(u8, next, "%")) { + try op_stack.append(.mod); + } else if (mem.eql(u8, next, "*")) { + try op_stack.append(.mul); + } else { + const val = std.fmt.parseInt(u64, next, 0) catch blk: { + break :blk global_vars.get(next) orelse { + try step.addError( + \\ + \\========= variable was not extracted: =========== + \\{s} + \\================================================= + , .{next}); + return error.UnknownVariable; + }; + }; + try values.append(val); + } + } + + var op_i: usize = 1; + var reduced: u64 = values.items[0]; + for (op_stack.items) |op| { + const other = values.items[op_i]; + switch (op) { + .add => { + reduced += other; + }, + .sub => { + reduced -= other; + }, + .mod => { + reduced %= other; + }, + .mul => { + reduced *= other; + }, + } + op_i += 1; + } + + const exp_value = switch (act.expected.?.value) { + .variable => |name| global_vars.get(name) orelse { + try step.addError( + \\ + \\========= variable was not extracted: =========== + \\{s} + \\================================================= + , .{name}); + return error.UnknownVariable; + }, + .literal => |x| x, + }; + return math.compare(reduced, act.expected.?.op, exp_value); + } +}; + +const ComputeCompareExpected = struct { + op: math.CompareOperator, + value: union(enum) { + variable: []const u8, + literal: u64, + }, + + pub fn format( + value: @This(), + comptime fmt: []const u8, + options: std.fmt.FormatOptions, + writer: anytype, + ) !void { + if (fmt.len != 0) std.fmt.invalidFmtError(fmt, value); + _ = options; + try writer.print("{s} ", .{@tagName(value.op)}); + switch (value.value) { + .variable => |name| try writer.writeAll(name), + .literal => |x| try writer.print("{x}", .{x}), + } + } +}; + +const Check = struct { + actions: std.ArrayList(Action), + + fn create(allocator: Allocator) Check { + return .{ + .actions = std.ArrayList(Action).init(allocator), + }; + } + + fn match(self: *Check, phrase: SearchPhrase) void { + self.actions.append(.{ + .tag = .match, + .phrase = phrase, + }) catch @panic("OOM"); + } + + fn notPresent(self: *Check, phrase: SearchPhrase) void { + self.actions.append(.{ + .tag = .not_present, + .phrase = phrase, + }) catch @panic("OOM"); + } + + fn computeCmp(self: *Check, phrase: SearchPhrase, expected: ComputeCompareExpected) void { + self.actions.append(.{ + .tag = .compute_cmp, + .phrase = phrase, + .expected = expected, + }) catch @panic("OOM"); + } +}; + +/// Creates a new sequence of actions with `phrase` as the first anchor searched phrase. +pub fn checkStart(self: *CheckObjectStep, phrase: []const u8) void { + var new_check = Check.create(self.step.owner.allocator); + new_check.match(.{ .string = self.step.owner.dupe(phrase) }); + self.checks.append(new_check) catch @panic("OOM"); +} + +/// Adds another searched phrase to the latest created Check with `CheckObjectStep.checkStart(...)`. +/// Asserts at least one check already exists. +pub fn checkNext(self: *CheckObjectStep, phrase: []const u8) void { + assert(self.checks.items.len > 0); + const last = &self.checks.items[self.checks.items.len - 1]; + last.match(.{ .string = self.step.owner.dupe(phrase) }); +} + +/// Like `checkNext()` but takes an additional argument `FileSource` which will be +/// resolved to a full search query in `make()`. +pub fn checkNextFileSource( + self: *CheckObjectStep, + phrase: []const u8, + file_source: std.Build.FileSource, +) void { + assert(self.checks.items.len > 0); + const last = &self.checks.items[self.checks.items.len - 1]; + last.match(.{ .string = self.step.owner.dupe(phrase), .file_source = file_source }); +} + +/// Adds another searched phrase to the latest created Check with `CheckObjectStep.checkStart(...)` +/// however ensures there is no matching phrase in the output. +/// Asserts at least one check already exists. +pub fn checkNotPresent(self: *CheckObjectStep, phrase: []const u8) void { + assert(self.checks.items.len > 0); + const last = &self.checks.items[self.checks.items.len - 1]; + last.notPresent(.{ .string = self.step.owner.dupe(phrase) }); +} + +/// Creates a new check checking specifically symbol table parsed and dumped from the object +/// file. +/// Issuing this check will force parsing and dumping of the symbol table. +pub fn checkInSymtab(self: *CheckObjectStep) void { + self.dump_symtab = true; + const symtab_label = switch (self.obj_format) { + .macho => MachODumper.symtab_label, + else => @panic("TODO other parsers"), + }; + self.checkStart(symtab_label); +} + +/// Creates a new standalone, singular check which allows running simple binary operations +/// on the extracted variables. It will then compare the reduced program with the value of +/// the expected variable. +pub fn checkComputeCompare( + self: *CheckObjectStep, + program: []const u8, + expected: ComputeCompareExpected, +) void { + var new_check = Check.create(self.step.owner.allocator); + new_check.computeCmp(.{ .string = self.step.owner.dupe(program) }, expected); + self.checks.append(new_check) catch @panic("OOM"); +} + +fn make(step: *Step, prog_node: *std.Progress.Node) !void { + _ = prog_node; + const b = step.owner; + const gpa = b.allocator; + const self = @fieldParentPtr(CheckObjectStep, "step", step); + + const src_path = self.source.getPath(b); + const contents = fs.cwd().readFileAllocOptions( + gpa, + src_path, + self.max_bytes, + null, + @alignOf(u64), + null, + ) catch |err| return step.fail("unable to read '{s}': {s}", .{ src_path, @errorName(err) }); + + const output = switch (self.obj_format) { + .macho => try MachODumper.parseAndDump(step, contents, .{ + .dump_symtab = self.dump_symtab, + }), + .elf => @panic("TODO elf parser"), + .coff => @panic("TODO coff parser"), + .wasm => try WasmDumper.parseAndDump(step, contents, .{ + .dump_symtab = self.dump_symtab, + }), + else => unreachable, + }; + + var vars = std.StringHashMap(u64).init(gpa); + + for (self.checks.items) |chk| { + var it = mem.tokenize(u8, output, "\r\n"); + for (chk.actions.items) |act| { + switch (act.tag) { + .match => { + while (it.next()) |line| { + if (try act.match(b, step, line, &vars)) break; + } else { + return step.fail( + \\ + \\========= expected to find: ========================== + \\{s} + \\========= but parsed file does not contain it: ======= + \\{s} + \\====================================================== + , .{ act.phrase.resolve(b, step), output }); + } + }, + .not_present => { + while (it.next()) |line| { + if (try act.match(b, step, line, &vars)) { + return step.fail( + \\ + \\========= expected not to find: =================== + \\{s} + \\========= but parsed file does contain it: ======== + \\{s} + \\=================================================== + , .{ act.phrase.resolve(b, step), output }); + } + } + }, + .compute_cmp => { + const res = act.computeCmp(b, step, vars) catch |err| switch (err) { + error.UnknownVariable => { + return step.fail( + \\========= from parsed file: ===================== + \\{s} + \\================================================= + , .{output}); + }, + else => |e| return e, + }; + if (!res) { + return step.fail( + \\ + \\========= comparison failed for action: =========== + \\{s} {} + \\========= from parsed file: ======================= + \\{s} + \\=================================================== + , .{ act.phrase.resolve(b, step), act.expected.?, output }); + } + }, + } + } + } +} + +const Opts = struct { + dump_symtab: bool = false, +}; + +const MachODumper = struct { + const LoadCommandIterator = macho.LoadCommandIterator; + const symtab_label = "symtab"; + + fn parseAndDump(step: *Step, bytes: []align(@alignOf(u64)) const u8, opts: Opts) ![]const u8 { + const gpa = step.owner.allocator; + var stream = std.io.fixedBufferStream(bytes); + const reader = stream.reader(); + + const hdr = try reader.readStruct(macho.mach_header_64); + if (hdr.magic != macho.MH_MAGIC_64) { + return error.InvalidMagicNumber; + } + + var output = std.ArrayList(u8).init(gpa); + const writer = output.writer(); + + var symtab: []const macho.nlist_64 = undefined; + var strtab: []const u8 = undefined; + var sections = std.ArrayList(macho.section_64).init(gpa); + var imports = std.ArrayList([]const u8).init(gpa); + + var it = LoadCommandIterator{ + .ncmds = hdr.ncmds, + .buffer = bytes[@sizeOf(macho.mach_header_64)..][0..hdr.sizeofcmds], + }; + var i: usize = 0; + while (it.next()) |cmd| { + switch (cmd.cmd()) { + .SEGMENT_64 => { + const seg = cmd.cast(macho.segment_command_64).?; + try sections.ensureUnusedCapacity(seg.nsects); + for (cmd.getSections()) |sect| { + sections.appendAssumeCapacity(sect); + } + }, + .SYMTAB => if (opts.dump_symtab) { + const lc = cmd.cast(macho.symtab_command).?; + symtab = @ptrCast( + [*]const macho.nlist_64, + @alignCast(@alignOf(macho.nlist_64), &bytes[lc.symoff]), + )[0..lc.nsyms]; + strtab = bytes[lc.stroff..][0..lc.strsize]; + }, + .LOAD_DYLIB, + .LOAD_WEAK_DYLIB, + .REEXPORT_DYLIB, + => { + try imports.append(cmd.getDylibPathName()); + }, + else => {}, + } + + try dumpLoadCommand(cmd, i, writer); + try writer.writeByte('\n'); + + i += 1; + } + + if (opts.dump_symtab) { + try writer.print("{s}\n", .{symtab_label}); + for (symtab) |sym| { + if (sym.stab()) continue; + const sym_name = mem.sliceTo(@ptrCast([*:0]const u8, strtab.ptr + sym.n_strx), 0); + if (sym.sect()) { + const sect = sections.items[sym.n_sect - 1]; + try writer.print("{x} ({s},{s})", .{ + sym.n_value, + sect.segName(), + sect.sectName(), + }); + if (sym.ext()) { + try writer.writeAll(" external"); + } + try writer.print(" {s}\n", .{sym_name}); + } else if (sym.undf()) { + const ordinal = @divTrunc(@bitCast(i16, sym.n_desc), macho.N_SYMBOL_RESOLVER); + const import_name = blk: { + if (ordinal <= 0) { + if (ordinal == macho.BIND_SPECIAL_DYLIB_SELF) + break :blk "self import"; + if (ordinal == macho.BIND_SPECIAL_DYLIB_MAIN_EXECUTABLE) + break :blk "main executable"; + if (ordinal == macho.BIND_SPECIAL_DYLIB_FLAT_LOOKUP) + break :blk "flat lookup"; + unreachable; + } + const full_path = imports.items[@bitCast(u16, ordinal) - 1]; + const basename = fs.path.basename(full_path); + assert(basename.len > 0); + const ext = mem.lastIndexOfScalar(u8, basename, '.') orelse basename.len; + break :blk basename[0..ext]; + }; + try writer.writeAll("(undefined)"); + if (sym.weakRef()) { + try writer.writeAll(" weak"); + } + if (sym.ext()) { + try writer.writeAll(" external"); + } + try writer.print(" {s} (from {s})\n", .{ + sym_name, + import_name, + }); + } else unreachable; + } + } + + return output.toOwnedSlice(); + } + + fn dumpLoadCommand(lc: macho.LoadCommandIterator.LoadCommand, index: usize, writer: anytype) !void { + // print header first + try writer.print( + \\LC {d} + \\cmd {s} + \\cmdsize {d} + , .{ index, @tagName(lc.cmd()), lc.cmdsize() }); + + switch (lc.cmd()) { + .SEGMENT_64 => { + const seg = lc.cast(macho.segment_command_64).?; + try writer.writeByte('\n'); + try writer.print( + \\segname {s} + \\vmaddr {x} + \\vmsize {x} + \\fileoff {x} + \\filesz {x} + , .{ + seg.segName(), + seg.vmaddr, + seg.vmsize, + seg.fileoff, + seg.filesize, + }); + + for (lc.getSections()) |sect| { + try writer.writeByte('\n'); + try writer.print( + \\sectname {s} + \\addr {x} + \\size {x} + \\offset {x} + \\align {x} + , .{ + sect.sectName(), + sect.addr, + sect.size, + sect.offset, + sect.@"align", + }); + } + }, + + .ID_DYLIB, + .LOAD_DYLIB, + .LOAD_WEAK_DYLIB, + .REEXPORT_DYLIB, + => { + const dylib = lc.cast(macho.dylib_command).?; + try writer.writeByte('\n'); + try writer.print( + \\name {s} + \\timestamp {d} + \\current version {x} + \\compatibility version {x} + , .{ + lc.getDylibPathName(), + dylib.dylib.timestamp, + dylib.dylib.current_version, + dylib.dylib.compatibility_version, + }); + }, + + .MAIN => { + const main = lc.cast(macho.entry_point_command).?; + try writer.writeByte('\n'); + try writer.print( + \\entryoff {x} + \\stacksize {x} + , .{ main.entryoff, main.stacksize }); + }, + + .RPATH => { + try writer.writeByte('\n'); + try writer.print( + \\path {s} + , .{ + lc.getRpathPathName(), + }); + }, + + .UUID => { + const uuid = lc.cast(macho.uuid_command).?; + try writer.writeByte('\n'); + try writer.print("uuid {x}", .{std.fmt.fmtSliceHexLower(&uuid.uuid)}); + }, + + .DATA_IN_CODE, + .FUNCTION_STARTS, + .CODE_SIGNATURE, + => { + const llc = lc.cast(macho.linkedit_data_command).?; + try writer.writeByte('\n'); + try writer.print( + \\dataoff {x} + \\datasize {x} + , .{ llc.dataoff, llc.datasize }); + }, + + .DYLD_INFO_ONLY => { + const dlc = lc.cast(macho.dyld_info_command).?; + try writer.writeByte('\n'); + try writer.print( + \\rebaseoff {x} + \\rebasesize {x} + \\bindoff {x} + \\bindsize {x} + \\weakbindoff {x} + \\weakbindsize {x} + \\lazybindoff {x} + \\lazybindsize {x} + \\exportoff {x} + \\exportsize {x} + , .{ + dlc.rebase_off, + dlc.rebase_size, + dlc.bind_off, + dlc.bind_size, + dlc.weak_bind_off, + dlc.weak_bind_size, + dlc.lazy_bind_off, + dlc.lazy_bind_size, + dlc.export_off, + dlc.export_size, + }); + }, + + .SYMTAB => { + const slc = lc.cast(macho.symtab_command).?; + try writer.writeByte('\n'); + try writer.print( + \\symoff {x} + \\nsyms {x} + \\stroff {x} + \\strsize {x} + , .{ + slc.symoff, + slc.nsyms, + slc.stroff, + slc.strsize, + }); + }, + + .DYSYMTAB => { + const dlc = lc.cast(macho.dysymtab_command).?; + try writer.writeByte('\n'); + try writer.print( + \\ilocalsym {x} + \\nlocalsym {x} + \\iextdefsym {x} + \\nextdefsym {x} + \\iundefsym {x} + \\nundefsym {x} + \\indirectsymoff {x} + \\nindirectsyms {x} + , .{ + dlc.ilocalsym, + dlc.nlocalsym, + dlc.iextdefsym, + dlc.nextdefsym, + dlc.iundefsym, + dlc.nundefsym, + dlc.indirectsymoff, + dlc.nindirectsyms, + }); + }, + + else => {}, + } + } +}; + +const WasmDumper = struct { + const symtab_label = "symbols"; + + fn parseAndDump(step: *Step, bytes: []const u8, opts: Opts) ![]const u8 { + const gpa = step.owner.allocator; + if (opts.dump_symtab) { + @panic("TODO: Implement symbol table parsing and dumping"); + } + + var fbs = std.io.fixedBufferStream(bytes); + const reader = fbs.reader(); + + const buf = try reader.readBytesNoEof(8); + if (!mem.eql(u8, buf[0..4], &std.wasm.magic)) { + return error.InvalidMagicByte; + } + if (!mem.eql(u8, buf[4..], &std.wasm.version)) { + return error.UnsupportedWasmVersion; + } + + var output = std.ArrayList(u8).init(gpa); + errdefer output.deinit(); + const writer = output.writer(); + + while (reader.readByte()) |current_byte| { + const section = std.meta.intToEnum(std.wasm.Section, current_byte) catch { + return step.fail("Found invalid section id '{d}'", .{current_byte}); + }; + + const section_length = try std.leb.readULEB128(u32, reader); + try parseAndDumpSection(step, section, bytes[fbs.pos..][0..section_length], writer); + fbs.pos += section_length; + } else |_| {} // reached end of stream + + return output.toOwnedSlice(); + } + + fn parseAndDumpSection( + step: *Step, + section: std.wasm.Section, + data: []const u8, + writer: anytype, + ) !void { + var fbs = std.io.fixedBufferStream(data); + const reader = fbs.reader(); + + try writer.print( + \\Section {s} + \\size {d} + , .{ @tagName(section), data.len }); + + switch (section) { + .type, + .import, + .function, + .table, + .memory, + .global, + .@"export", + .element, + .code, + .data, + => { + const entries = try std.leb.readULEB128(u32, reader); + try writer.print("\nentries {d}\n", .{entries}); + try dumpSection(step, section, data[fbs.pos..], entries, writer); + }, + .custom => { + const name_length = try std.leb.readULEB128(u32, reader); + const name = data[fbs.pos..][0..name_length]; + fbs.pos += name_length; + try writer.print("\nname {s}\n", .{name}); + + if (mem.eql(u8, name, "name")) { + try parseDumpNames(step, reader, writer, data); + } else if (mem.eql(u8, name, "producers")) { + try parseDumpProducers(reader, writer, data); + } else if (mem.eql(u8, name, "target_features")) { + try parseDumpFeatures(reader, writer, data); + } + // TODO: Implement parsing and dumping other custom sections (such as relocations) + }, + .start => { + const start = try std.leb.readULEB128(u32, reader); + try writer.print("\nstart {d}\n", .{start}); + }, + else => {}, // skip unknown sections + } + } + + fn dumpSection(step: *Step, section: std.wasm.Section, data: []const u8, entries: u32, writer: anytype) !void { + var fbs = std.io.fixedBufferStream(data); + const reader = fbs.reader(); + + switch (section) { + .type => { + var i: u32 = 0; + while (i < entries) : (i += 1) { + const func_type = try reader.readByte(); + if (func_type != std.wasm.function_type) { + return step.fail("expected function type, found byte '{d}'", .{func_type}); + } + const params = try std.leb.readULEB128(u32, reader); + try writer.print("params {d}\n", .{params}); + var index: u32 = 0; + while (index < params) : (index += 1) { + try parseDumpType(step, std.wasm.Valtype, reader, writer); + } else index = 0; + const returns = try std.leb.readULEB128(u32, reader); + try writer.print("returns {d}\n", .{returns}); + while (index < returns) : (index += 1) { + try parseDumpType(step, std.wasm.Valtype, reader, writer); + } + } + }, + .import => { + var i: u32 = 0; + while (i < entries) : (i += 1) { + const module_name_len = try std.leb.readULEB128(u32, reader); + const module_name = data[fbs.pos..][0..module_name_len]; + fbs.pos += module_name_len; + const name_len = try std.leb.readULEB128(u32, reader); + const name = data[fbs.pos..][0..name_len]; + fbs.pos += name_len; + + const kind = std.meta.intToEnum(std.wasm.ExternalKind, try reader.readByte()) catch { + return step.fail("invalid import kind", .{}); + }; + + try writer.print( + \\module {s} + \\name {s} + \\kind {s} + , .{ module_name, name, @tagName(kind) }); + try writer.writeByte('\n'); + switch (kind) { + .function => { + try writer.print("index {d}\n", .{try std.leb.readULEB128(u32, reader)}); + }, + .memory => { + try parseDumpLimits(reader, writer); + }, + .global => { + try parseDumpType(step, std.wasm.Valtype, reader, writer); + try writer.print("mutable {}\n", .{0x01 == try std.leb.readULEB128(u32, reader)}); + }, + .table => { + try parseDumpType(step, std.wasm.RefType, reader, writer); + try parseDumpLimits(reader, writer); + }, + } + } + }, + .function => { + var i: u32 = 0; + while (i < entries) : (i += 1) { + try writer.print("index {d}\n", .{try std.leb.readULEB128(u32, reader)}); + } + }, + .table => { + var i: u32 = 0; + while (i < entries) : (i += 1) { + try parseDumpType(step, std.wasm.RefType, reader, writer); + try parseDumpLimits(reader, writer); + } + }, + .memory => { + var i: u32 = 0; + while (i < entries) : (i += 1) { + try parseDumpLimits(reader, writer); + } + }, + .global => { + var i: u32 = 0; + while (i < entries) : (i += 1) { + try parseDumpType(step, std.wasm.Valtype, reader, writer); + try writer.print("mutable {}\n", .{0x01 == try std.leb.readULEB128(u1, reader)}); + try parseDumpInit(step, reader, writer); + } + }, + .@"export" => { + var i: u32 = 0; + while (i < entries) : (i += 1) { + const name_len = try std.leb.readULEB128(u32, reader); + const name = data[fbs.pos..][0..name_len]; + fbs.pos += name_len; + const kind_byte = try std.leb.readULEB128(u8, reader); + const kind = std.meta.intToEnum(std.wasm.ExternalKind, kind_byte) catch { + return step.fail("invalid export kind value '{d}'", .{kind_byte}); + }; + const index = try std.leb.readULEB128(u32, reader); + try writer.print( + \\name {s} + \\kind {s} + \\index {d} + , .{ name, @tagName(kind), index }); + try writer.writeByte('\n'); + } + }, + .element => { + var i: u32 = 0; + while (i < entries) : (i += 1) { + try writer.print("table index {d}\n", .{try std.leb.readULEB128(u32, reader)}); + try parseDumpInit(step, reader, writer); + + const function_indexes = try std.leb.readULEB128(u32, reader); + var function_index: u32 = 0; + try writer.print("indexes {d}\n", .{function_indexes}); + while (function_index < function_indexes) : (function_index += 1) { + try writer.print("index {d}\n", .{try std.leb.readULEB128(u32, reader)}); + } + } + }, + .code => {}, // code section is considered opaque to linker + .data => { + var i: u32 = 0; + while (i < entries) : (i += 1) { + const index = try std.leb.readULEB128(u32, reader); + try writer.print("memory index 0x{x}\n", .{index}); + try parseDumpInit(step, reader, writer); + const size = try std.leb.readULEB128(u32, reader); + try writer.print("size {d}\n", .{size}); + try reader.skipBytes(size, .{}); // we do not care about the content of the segments + } + }, + else => unreachable, + } + } + + fn parseDumpType(step: *Step, comptime WasmType: type, reader: anytype, writer: anytype) !void { + const type_byte = try reader.readByte(); + const valtype = std.meta.intToEnum(WasmType, type_byte) catch { + return step.fail("Invalid wasm type value '{d}'", .{type_byte}); + }; + try writer.print("type {s}\n", .{@tagName(valtype)}); + } + + fn parseDumpLimits(reader: anytype, writer: anytype) !void { + const flags = try std.leb.readULEB128(u8, reader); + const min = try std.leb.readULEB128(u32, reader); + + try writer.print("min {x}\n", .{min}); + if (flags != 0) { + try writer.print("max {x}\n", .{try std.leb.readULEB128(u32, reader)}); + } + } + + fn parseDumpInit(step: *Step, reader: anytype, writer: anytype) !void { + const byte = try std.leb.readULEB128(u8, reader); + const opcode = std.meta.intToEnum(std.wasm.Opcode, byte) catch { + return step.fail("invalid wasm opcode '{d}'", .{byte}); + }; + switch (opcode) { + .i32_const => try writer.print("i32.const {x}\n", .{try std.leb.readILEB128(i32, reader)}), + .i64_const => try writer.print("i64.const {x}\n", .{try std.leb.readILEB128(i64, reader)}), + .f32_const => try writer.print("f32.const {x}\n", .{@bitCast(f32, try reader.readIntLittle(u32))}), + .f64_const => try writer.print("f64.const {x}\n", .{@bitCast(f64, try reader.readIntLittle(u64))}), + .global_get => try writer.print("global.get {x}\n", .{try std.leb.readULEB128(u32, reader)}), + else => unreachable, + } + const end_opcode = try std.leb.readULEB128(u8, reader); + if (end_opcode != std.wasm.opcode(.end)) { + return step.fail("expected 'end' opcode in init expression", .{}); + } + } + + fn parseDumpNames(step: *Step, reader: anytype, writer: anytype, data: []const u8) !void { + while (reader.context.pos < data.len) { + try parseDumpType(step, std.wasm.NameSubsection, reader, writer); + const size = try std.leb.readULEB128(u32, reader); + const entries = try std.leb.readULEB128(u32, reader); + try writer.print( + \\size {d} + \\names {d} + , .{ size, entries }); + try writer.writeByte('\n'); + var i: u32 = 0; + while (i < entries) : (i += 1) { + const index = try std.leb.readULEB128(u32, reader); + const name_len = try std.leb.readULEB128(u32, reader); + const pos = reader.context.pos; + const name = data[pos..][0..name_len]; + reader.context.pos += name_len; + + try writer.print( + \\index {d} + \\name {s} + , .{ index, name }); + try writer.writeByte('\n'); + } + } + } + + fn parseDumpProducers(reader: anytype, writer: anytype, data: []const u8) !void { + const field_count = try std.leb.readULEB128(u32, reader); + try writer.print("fields {d}\n", .{field_count}); + var current_field: u32 = 0; + while (current_field < field_count) : (current_field += 1) { + const field_name_length = try std.leb.readULEB128(u32, reader); + const field_name = data[reader.context.pos..][0..field_name_length]; + reader.context.pos += field_name_length; + + const value_count = try std.leb.readULEB128(u32, reader); + try writer.print( + \\field_name {s} + \\values {d} + , .{ field_name, value_count }); + try writer.writeByte('\n'); + var current_value: u32 = 0; + while (current_value < value_count) : (current_value += 1) { + const value_length = try std.leb.readULEB128(u32, reader); + const value = data[reader.context.pos..][0..value_length]; + reader.context.pos += value_length; + + const version_length = try std.leb.readULEB128(u32, reader); + const version = data[reader.context.pos..][0..version_length]; + reader.context.pos += version_length; + + try writer.print( + \\value_name {s} + \\version {s} + , .{ value, version }); + try writer.writeByte('\n'); + } + } + } + + fn parseDumpFeatures(reader: anytype, writer: anytype, data: []const u8) !void { + const feature_count = try std.leb.readULEB128(u32, reader); + try writer.print("features {d}\n", .{feature_count}); + + var index: u32 = 0; + while (index < feature_count) : (index += 1) { + const prefix_byte = try std.leb.readULEB128(u8, reader); + const name_length = try std.leb.readULEB128(u32, reader); + const feature_name = data[reader.context.pos..][0..name_length]; + reader.context.pos += name_length; + + try writer.print("{c} {s}\n", .{ prefix_byte, feature_name }); + } + } +}; diff --git a/lib/std/Build/Step/Compile.zig b/lib/std/Build/Step/Compile.zig new file mode 100644 index 0000000000..7627c4e6d0 --- /dev/null +++ b/lib/std/Build/Step/Compile.zig @@ -0,0 +1,2183 @@ +const builtin = @import("builtin"); +const std = @import("std"); +const mem = std.mem; +const fs = std.fs; +const assert = std.debug.assert; +const panic = std.debug.panic; +const ArrayList = std.ArrayList; +const StringHashMap = std.StringHashMap; +const Sha256 = std.crypto.hash.sha2.Sha256; +const Allocator = mem.Allocator; +const Step = std.Build.Step; +const CrossTarget = std.zig.CrossTarget; +const NativeTargetInfo = std.zig.system.NativeTargetInfo; +const FileSource = std.Build.FileSource; +const PkgConfigPkg = std.Build.PkgConfigPkg; +const PkgConfigError = std.Build.PkgConfigError; +const ExecError = std.Build.ExecError; +const Module = std.Build.Module; +const VcpkgRoot = std.Build.VcpkgRoot; +const InstallDir = std.Build.InstallDir; +const InstallArtifactStep = std.Build.InstallArtifactStep; +const GeneratedFile = std.Build.GeneratedFile; +const ObjCopyStep = std.Build.ObjCopyStep; +const CheckObjectStep = std.Build.CheckObjectStep; +const RunStep = std.Build.RunStep; +const OptionsStep = std.Build.OptionsStep; +const ConfigHeaderStep = std.Build.ConfigHeaderStep; +const CompileStep = @This(); + +pub const base_id: Step.Id = .compile; + +step: Step, +name: []const u8, +target: CrossTarget, +target_info: NativeTargetInfo, +optimize: std.builtin.Mode, +linker_script: ?FileSource = null, +version_script: ?[]const u8 = null, +out_filename: []const u8, +linkage: ?Linkage = null, +version: ?std.builtin.Version, +kind: Kind, +major_only_filename: ?[]const u8, +name_only_filename: ?[]const u8, +strip: ?bool, +unwind_tables: ?bool, +// keep in sync with src/link.zig:CompressDebugSections +compress_debug_sections: enum { none, zlib } = .none, +lib_paths: ArrayList(FileSource), +rpaths: ArrayList(FileSource), +framework_dirs: ArrayList(FileSource), +frameworks: StringHashMap(FrameworkLinkInfo), +verbose_link: bool, +verbose_cc: bool, +emit_analysis: EmitOption = .default, +emit_asm: EmitOption = .default, +emit_bin: EmitOption = .default, +emit_docs: EmitOption = .default, +emit_implib: EmitOption = .default, +emit_llvm_bc: EmitOption = .default, +emit_llvm_ir: EmitOption = .default, +// Lots of things depend on emit_h having a consistent path, +// so it is not an EmitOption for now. +emit_h: bool = false, +bundle_compiler_rt: ?bool = null, +single_threaded: ?bool, +stack_protector: ?bool = null, +disable_stack_probing: bool, +disable_sanitize_c: bool, +sanitize_thread: bool, +rdynamic: bool, +dwarf_format: ?std.dwarf.Format = null, +import_memory: bool = false, +/// For WebAssembly targets, this will allow for undefined symbols to +/// be imported from the host environment. +import_symbols: bool = false, +import_table: bool = false, +export_table: bool = false, +initial_memory: ?u64 = null, +max_memory: ?u64 = null, +shared_memory: bool = false, +global_base: ?u64 = null, +c_std: std.Build.CStd, +zig_lib_dir: ?[]const u8, +main_pkg_path: ?[]const u8, +exec_cmd_args: ?[]const ?[]const u8, +filter: ?[]const u8, +test_evented_io: bool = false, +test_runner: ?[]const u8, +code_model: std.builtin.CodeModel = .default, +wasi_exec_model: ?std.builtin.WasiExecModel = null, +/// Symbols to be exported when compiling to wasm +export_symbol_names: []const []const u8 = &.{}, + +root_src: ?FileSource, +out_h_filename: []const u8, +out_lib_filename: []const u8, +out_pdb_filename: []const u8, +modules: std.StringArrayHashMap(*Module), + +link_objects: ArrayList(LinkObject), +include_dirs: ArrayList(IncludeDir), +c_macros: ArrayList([]const u8), +installed_headers: ArrayList(*Step), +is_linking_libc: bool, +is_linking_libcpp: bool, +vcpkg_bin_path: ?[]const u8 = null, + +/// This may be set in order to override the default install directory +override_dest_dir: ?InstallDir, +installed_path: ?[]const u8, + +/// Base address for an executable image. +image_base: ?u64 = null, + +libc_file: ?FileSource = null, + +valgrind_support: ?bool = null, +each_lib_rpath: ?bool = null, +/// On ELF targets, this will emit a link section called ".note.gnu.build-id" +/// which can be used to coordinate a stripped binary with its debug symbols. +/// As an example, the bloaty project refuses to work unless its inputs have +/// build ids, in order to prevent accidental mismatches. +/// The default is to not include this section because it slows down linking. +build_id: ?bool = null, + +/// Create a .eh_frame_hdr section and a PT_GNU_EH_FRAME segment in the ELF +/// file. +link_eh_frame_hdr: bool = false, +link_emit_relocs: bool = false, + +/// Place every function in its own section so that unused ones may be +/// safely garbage-collected during the linking phase. +link_function_sections: bool = false, + +/// Remove functions and data that are unreachable by the entry point or +/// exported symbols. +link_gc_sections: ?bool = null, + +/// (Windows) Whether or not to enable ASLR. Maps to the /DYNAMICBASE[:NO] linker argument. +linker_dynamicbase: bool = true, + +linker_allow_shlib_undefined: ?bool = null, + +/// Permit read-only relocations in read-only segments. Disallowed by default. +link_z_notext: bool = false, + +/// Force all relocations to be read-only after processing. +link_z_relro: bool = true, + +/// Allow relocations to be lazily processed after load. +link_z_lazy: bool = false, + +/// Common page size +link_z_common_page_size: ?u64 = null, + +/// Maximum page size +link_z_max_page_size: ?u64 = null, + +/// (Darwin) Install name for the dylib +install_name: ?[]const u8 = null, + +/// (Darwin) Path to entitlements file +entitlements: ?[]const u8 = null, + +/// (Darwin) Size of the pagezero segment. +pagezero_size: ?u64 = null, + +/// (Darwin) Search strategy for searching system libraries. Either `paths_first` or `dylibs_first`. +/// The former lowers to `-search_paths_first` linker option, while the latter to `-search_dylibs_first` +/// option. +/// By default, if no option is specified, the linker assumes `paths_first` as the default +/// search strategy. +search_strategy: ?enum { paths_first, dylibs_first } = null, + +/// (Darwin) Set size of the padding between the end of load commands +/// and start of `__TEXT,__text` section. +headerpad_size: ?u32 = null, + +/// (Darwin) Automatically Set size of the padding between the end of load commands +/// and start of `__TEXT,__text` section to a value fitting all paths expanded to MAXPATHLEN. +headerpad_max_install_names: bool = false, + +/// (Darwin) Remove dylibs that are unreachable by the entry point or exported symbols. +dead_strip_dylibs: bool = false, + +/// Position Independent Code +force_pic: ?bool = null, + +/// Position Independent Executable +pie: ?bool = null, + +red_zone: ?bool = null, + +omit_frame_pointer: ?bool = null, +dll_export_fns: ?bool = null, + +subsystem: ?std.Target.SubSystem = null, + +entry_symbol_name: ?[]const u8 = null, + +/// List of symbols forced as undefined in the symbol table +/// thus forcing their resolution by the linker. +/// Corresponds to `-u ` for ELF/MachO and `/include:` for COFF/PE. +force_undefined_symbols: std.StringHashMap(void), + +/// Overrides the default stack size +stack_size: ?u64 = null, + +want_lto: ?bool = null, +use_llvm: ?bool, +use_lld: ?bool, + +/// This is an advanced setting that can change the intent of this CompileStep. +/// If this slice has nonzero length, it means that this CompileStep exists to +/// check for compile errors and return *success* if they match, and failure +/// otherwise. +expect_errors: []const []const u8 = &.{}, + +output_path_source: GeneratedFile, +output_lib_path_source: GeneratedFile, +output_h_path_source: GeneratedFile, +output_pdb_path_source: GeneratedFile, +output_dirname_source: GeneratedFile, + +pub const CSourceFiles = struct { + files: []const []const u8, + flags: []const []const u8, +}; + +pub const CSourceFile = struct { + source: FileSource, + args: []const []const u8, + + pub fn dupe(self: CSourceFile, b: *std.Build) CSourceFile { + return .{ + .source = self.source.dupe(b), + .args = b.dupeStrings(self.args), + }; + } +}; + +pub const LinkObject = union(enum) { + static_path: FileSource, + other_step: *CompileStep, + system_lib: SystemLib, + assembly_file: FileSource, + c_source_file: *CSourceFile, + c_source_files: *CSourceFiles, +}; + +pub const SystemLib = struct { + name: []const u8, + needed: bool, + weak: bool, + use_pkg_config: enum { + /// Don't use pkg-config, just pass -lfoo where foo is name. + no, + /// Try to get information on how to link the library from pkg-config. + /// If that fails, fall back to passing -lfoo where foo is name. + yes, + /// Try to get information on how to link the library from pkg-config. + /// If that fails, error out. + force, + }, +}; + +const FrameworkLinkInfo = struct { + needed: bool = false, + weak: bool = false, +}; + +pub const IncludeDir = union(enum) { + raw_path: []const u8, + raw_path_system: []const u8, + other_step: *CompileStep, + config_header_step: *ConfigHeaderStep, +}; + +pub const Options = struct { + name: []const u8, + root_source_file: ?FileSource = null, + target: CrossTarget, + optimize: std.builtin.Mode, + kind: Kind, + linkage: ?Linkage = null, + version: ?std.builtin.Version = null, + max_rss: usize = 0, + filter: ?[]const u8 = null, + test_runner: ?[]const u8 = null, + link_libc: ?bool = null, + single_threaded: ?bool = null, + use_llvm: ?bool = null, + use_lld: ?bool = null, +}; + +pub const Kind = enum { + exe, + lib, + obj, + @"test", +}; + +pub const Linkage = enum { dynamic, static }; + +pub const EmitOption = union(enum) { + default: void, + no_emit: void, + emit: void, + emit_to: []const u8, + + fn getArg(self: @This(), b: *std.Build, arg_name: []const u8) ?[]const u8 { + return switch (self) { + .no_emit => b.fmt("-fno-{s}", .{arg_name}), + .default => null, + .emit => b.fmt("-f{s}", .{arg_name}), + .emit_to => |path| b.fmt("-f{s}={s}", .{ arg_name, path }), + }; + } +}; + +pub fn create(owner: *std.Build, options: Options) *CompileStep { + const name = owner.dupe(options.name); + const root_src: ?FileSource = if (options.root_source_file) |rsrc| rsrc.dupe(owner) else null; + if (mem.indexOf(u8, name, "/") != null or mem.indexOf(u8, name, "\\") != null) { + panic("invalid name: '{s}'. It looks like a file path, but it is supposed to be the library or application name.", .{name}); + } + + // Avoid the common case of the step name looking like "zig test test". + const name_adjusted = if (options.kind == .@"test" and mem.eql(u8, name, "test")) + "" + else + owner.fmt("{s} ", .{name}); + + const step_name = owner.fmt("{s} {s}{s} {s}", .{ + switch (options.kind) { + .exe => "zig build-exe", + .lib => "zig build-lib", + .obj => "zig build-obj", + .@"test" => "zig test", + }, + name_adjusted, + @tagName(options.optimize), + options.target.zigTriple(owner.allocator) catch @panic("OOM"), + }); + + const target_info = NativeTargetInfo.detect(options.target) catch @panic("unhandled error"); + + const out_filename = std.zig.binNameAlloc(owner.allocator, .{ + .root_name = name, + .target = target_info.target, + .output_mode = switch (options.kind) { + .lib => .Lib, + .obj => .Obj, + .exe, .@"test" => .Exe, + }, + .link_mode = if (options.linkage) |some| @as(std.builtin.LinkMode, switch (some) { + .dynamic => .Dynamic, + .static => .Static, + }) else null, + .version = options.version, + }) catch @panic("OOM"); + + const self = owner.allocator.create(CompileStep) catch @panic("OOM"); + self.* = CompileStep{ + .strip = null, + .unwind_tables = null, + .verbose_link = false, + .verbose_cc = false, + .optimize = options.optimize, + .target = options.target, + .linkage = options.linkage, + .kind = options.kind, + .root_src = root_src, + .name = name, + .frameworks = StringHashMap(FrameworkLinkInfo).init(owner.allocator), + .step = Step.init(.{ + .id = base_id, + .name = step_name, + .owner = owner, + .makeFn = make, + .max_rss = options.max_rss, + }), + .version = options.version, + .out_filename = out_filename, + .out_h_filename = owner.fmt("{s}.h", .{name}), + .out_lib_filename = undefined, + .out_pdb_filename = owner.fmt("{s}.pdb", .{name}), + .major_only_filename = null, + .name_only_filename = null, + .modules = std.StringArrayHashMap(*Module).init(owner.allocator), + .include_dirs = ArrayList(IncludeDir).init(owner.allocator), + .link_objects = ArrayList(LinkObject).init(owner.allocator), + .c_macros = ArrayList([]const u8).init(owner.allocator), + .lib_paths = ArrayList(FileSource).init(owner.allocator), + .rpaths = ArrayList(FileSource).init(owner.allocator), + .framework_dirs = ArrayList(FileSource).init(owner.allocator), + .installed_headers = ArrayList(*Step).init(owner.allocator), + .c_std = std.Build.CStd.C99, + .zig_lib_dir = null, + .main_pkg_path = null, + .exec_cmd_args = null, + .filter = options.filter, + .test_runner = options.test_runner, + .disable_stack_probing = false, + .disable_sanitize_c = false, + .sanitize_thread = false, + .rdynamic = false, + .override_dest_dir = null, + .installed_path = null, + .force_undefined_symbols = StringHashMap(void).init(owner.allocator), + + .output_path_source = GeneratedFile{ .step = &self.step }, + .output_lib_path_source = GeneratedFile{ .step = &self.step }, + .output_h_path_source = GeneratedFile{ .step = &self.step }, + .output_pdb_path_source = GeneratedFile{ .step = &self.step }, + .output_dirname_source = GeneratedFile{ .step = &self.step }, + + .target_info = target_info, + + .is_linking_libc = options.link_libc orelse false, + .is_linking_libcpp = false, + .single_threaded = options.single_threaded, + .use_llvm = options.use_llvm, + .use_lld = options.use_lld, + }; + + if (self.kind == .lib) { + if (self.linkage != null and self.linkage.? == .static) { + self.out_lib_filename = self.out_filename; + } else if (self.version) |version| { + if (target_info.target.isDarwin()) { + self.major_only_filename = owner.fmt("lib{s}.{d}.dylib", .{ + self.name, + version.major, + }); + self.name_only_filename = owner.fmt("lib{s}.dylib", .{self.name}); + self.out_lib_filename = self.out_filename; + } else if (target_info.target.os.tag == .windows) { + self.out_lib_filename = owner.fmt("{s}.lib", .{self.name}); + } else { + self.major_only_filename = owner.fmt("lib{s}.so.{d}", .{ self.name, version.major }); + self.name_only_filename = owner.fmt("lib{s}.so", .{self.name}); + self.out_lib_filename = self.out_filename; + } + } else { + if (target_info.target.isDarwin()) { + self.out_lib_filename = self.out_filename; + } else if (target_info.target.os.tag == .windows) { + self.out_lib_filename = owner.fmt("{s}.lib", .{self.name}); + } else { + self.out_lib_filename = self.out_filename; + } + } + } + + if (root_src) |rs| rs.addStepDependencies(&self.step); + + return self; +} + +pub fn installHeader(cs: *CompileStep, src_path: []const u8, dest_rel_path: []const u8) void { + const b = cs.step.owner; + const install_file = b.addInstallHeaderFile(src_path, dest_rel_path); + b.getInstallStep().dependOn(&install_file.step); + cs.installed_headers.append(&install_file.step) catch @panic("OOM"); +} + +pub const InstallConfigHeaderOptions = struct { + install_dir: InstallDir = .header, + dest_rel_path: ?[]const u8 = null, +}; + +pub fn installConfigHeader( + cs: *CompileStep, + config_header: *ConfigHeaderStep, + options: InstallConfigHeaderOptions, +) void { + const dest_rel_path = options.dest_rel_path orelse config_header.include_path; + const b = cs.step.owner; + const install_file = b.addInstallFileWithDir( + .{ .generated = &config_header.output_file }, + options.install_dir, + dest_rel_path, + ); + install_file.step.dependOn(&config_header.step); + b.getInstallStep().dependOn(&install_file.step); + cs.installed_headers.append(&install_file.step) catch @panic("OOM"); +} + +pub fn installHeadersDirectory( + a: *CompileStep, + src_dir_path: []const u8, + dest_rel_path: []const u8, +) void { + return installHeadersDirectoryOptions(a, .{ + .source_dir = src_dir_path, + .install_dir = .header, + .install_subdir = dest_rel_path, + }); +} + +pub fn installHeadersDirectoryOptions( + cs: *CompileStep, + options: std.Build.InstallDirStep.Options, +) void { + const b = cs.step.owner; + const install_dir = b.addInstallDirectory(options); + b.getInstallStep().dependOn(&install_dir.step); + cs.installed_headers.append(&install_dir.step) catch @panic("OOM"); +} + +pub fn installLibraryHeaders(cs: *CompileStep, l: *CompileStep) void { + assert(l.kind == .lib); + const b = cs.step.owner; + const install_step = b.getInstallStep(); + // Copy each element from installed_headers, modifying the builder + // to be the new parent's builder. + for (l.installed_headers.items) |step| { + const step_copy = switch (step.id) { + inline .install_file, .install_dir => |id| blk: { + const T = id.Type(); + const ptr = b.allocator.create(T) catch @panic("OOM"); + ptr.* = step.cast(T).?.*; + ptr.dest_builder = b; + break :blk &ptr.step; + }, + else => unreachable, + }; + cs.installed_headers.append(step_copy) catch @panic("OOM"); + install_step.dependOn(step_copy); + } + cs.installed_headers.appendSlice(l.installed_headers.items) catch @panic("OOM"); +} + +pub fn addObjCopy(cs: *CompileStep, options: ObjCopyStep.Options) *ObjCopyStep { + const b = cs.step.owner; + var copy = options; + if (copy.basename == null) { + if (options.format) |f| { + copy.basename = b.fmt("{s}.{s}", .{ cs.name, @tagName(f) }); + } else { + copy.basename = cs.name; + } + } + return b.addObjCopy(cs.getOutputSource(), copy); +} + +/// This function would run in the context of the package that created the executable, +/// which is undesirable when running an executable provided by a dependency package. +pub const run = @compileError("deprecated; use std.Build.addRunArtifact"); + +/// This function would install in the context of the package that created the artifact, +/// which is undesirable when installing an artifact provided by a dependency package. +pub const install = @compileError("deprecated; use std.Build.installArtifact"); + +pub fn checkObject(self: *CompileStep) *CheckObjectStep { + return CheckObjectStep.create(self.step.owner, self.getOutputSource(), self.target_info.target.ofmt); +} + +pub fn setLinkerScriptPath(self: *CompileStep, source: FileSource) void { + const b = self.step.owner; + self.linker_script = source.dupe(b); + source.addStepDependencies(&self.step); +} + +pub fn forceUndefinedSymbol(self: *CompileStep, symbol_name: []const u8) void { + const b = self.step.owner; + self.force_undefined_symbols.put(b.dupe(symbol_name), {}) catch @panic("OOM"); +} + +pub fn linkFramework(self: *CompileStep, framework_name: []const u8) void { + const b = self.step.owner; + self.frameworks.put(b.dupe(framework_name), .{}) catch @panic("OOM"); +} + +pub fn linkFrameworkNeeded(self: *CompileStep, framework_name: []const u8) void { + const b = self.step.owner; + self.frameworks.put(b.dupe(framework_name), .{ + .needed = true, + }) catch @panic("OOM"); +} + +pub fn linkFrameworkWeak(self: *CompileStep, framework_name: []const u8) void { + const b = self.step.owner; + self.frameworks.put(b.dupe(framework_name), .{ + .weak = true, + }) catch @panic("OOM"); +} + +/// Returns whether the library, executable, or object depends on a particular system library. +pub fn dependsOnSystemLibrary(self: CompileStep, name: []const u8) bool { + if (isLibCLibrary(name)) { + return self.is_linking_libc; + } + if (isLibCppLibrary(name)) { + return self.is_linking_libcpp; + } + for (self.link_objects.items) |link_object| { + switch (link_object) { + .system_lib => |lib| if (mem.eql(u8, lib.name, name)) return true, + else => continue, + } + } + return false; +} + +pub fn linkLibrary(self: *CompileStep, lib: *CompileStep) void { + assert(lib.kind == .lib); + self.linkLibraryOrObject(lib); +} + +pub fn isDynamicLibrary(self: *CompileStep) bool { + return self.kind == .lib and self.linkage == Linkage.dynamic; +} + +pub fn isStaticLibrary(self: *CompileStep) bool { + return self.kind == .lib and self.linkage != Linkage.dynamic; +} + +pub fn producesPdbFile(self: *CompileStep) bool { + if (!self.target.isWindows() and !self.target.isUefi()) return false; + if (self.target.getObjectFormat() == .c) return false; + if (self.strip == true) return false; + return self.isDynamicLibrary() or self.kind == .exe or self.kind == .@"test"; +} + +pub fn linkLibC(self: *CompileStep) void { + self.is_linking_libc = true; +} + +pub fn linkLibCpp(self: *CompileStep) void { + self.is_linking_libcpp = true; +} + +/// If the value is omitted, it is set to 1. +/// `name` and `value` need not live longer than the function call. +pub fn defineCMacro(self: *CompileStep, name: []const u8, value: ?[]const u8) void { + const b = self.step.owner; + const macro = std.Build.constructCMacro(b.allocator, name, value); + self.c_macros.append(macro) catch @panic("OOM"); +} + +/// name_and_value looks like [name]=[value]. If the value is omitted, it is set to 1. +pub fn defineCMacroRaw(self: *CompileStep, name_and_value: []const u8) void { + const b = self.step.owner; + self.c_macros.append(b.dupe(name_and_value)) catch @panic("OOM"); +} + +/// This one has no integration with anything, it just puts -lname on the command line. +/// Prefer to use `linkSystemLibrary` instead. +pub fn linkSystemLibraryName(self: *CompileStep, name: []const u8) void { + const b = self.step.owner; + self.link_objects.append(.{ + .system_lib = .{ + .name = b.dupe(name), + .needed = false, + .weak = false, + .use_pkg_config = .no, + }, + }) catch @panic("OOM"); +} + +/// This one has no integration with anything, it just puts -needed-lname on the command line. +/// Prefer to use `linkSystemLibraryNeeded` instead. +pub fn linkSystemLibraryNeededName(self: *CompileStep, name: []const u8) void { + const b = self.step.owner; + self.link_objects.append(.{ + .system_lib = .{ + .name = b.dupe(name), + .needed = true, + .weak = false, + .use_pkg_config = .no, + }, + }) catch @panic("OOM"); +} + +/// Darwin-only. This one has no integration with anything, it just puts -weak-lname on the +/// command line. Prefer to use `linkSystemLibraryWeak` instead. +pub fn linkSystemLibraryWeakName(self: *CompileStep, name: []const u8) void { + const b = self.step.owner; + self.link_objects.append(.{ + .system_lib = .{ + .name = b.dupe(name), + .needed = false, + .weak = true, + .use_pkg_config = .no, + }, + }) catch @panic("OOM"); +} + +/// This links against a system library, exclusively using pkg-config to find the library. +/// Prefer to use `linkSystemLibrary` instead. +pub fn linkSystemLibraryPkgConfigOnly(self: *CompileStep, lib_name: []const u8) void { + const b = self.step.owner; + self.link_objects.append(.{ + .system_lib = .{ + .name = b.dupe(lib_name), + .needed = false, + .weak = false, + .use_pkg_config = .force, + }, + }) catch @panic("OOM"); +} + +/// This links against a system library, exclusively using pkg-config to find the library. +/// Prefer to use `linkSystemLibraryNeeded` instead. +pub fn linkSystemLibraryNeededPkgConfigOnly(self: *CompileStep, lib_name: []const u8) void { + const b = self.step.owner; + self.link_objects.append(.{ + .system_lib = .{ + .name = b.dupe(lib_name), + .needed = true, + .weak = false, + .use_pkg_config = .force, + }, + }) catch @panic("OOM"); +} + +/// Run pkg-config for the given library name and parse the output, returning the arguments +/// that should be passed to zig to link the given library. +fn runPkgConfig(self: *CompileStep, lib_name: []const u8) ![]const []const u8 { + const b = self.step.owner; + const pkg_name = match: { + // First we have to map the library name to pkg config name. Unfortunately, + // there are several examples where this is not straightforward: + // -lSDL2 -> pkg-config sdl2 + // -lgdk-3 -> pkg-config gdk-3.0 + // -latk-1.0 -> pkg-config atk + const pkgs = try getPkgConfigList(b); + + // Exact match means instant winner. + for (pkgs) |pkg| { + if (mem.eql(u8, pkg.name, lib_name)) { + break :match pkg.name; + } + } + + // Next we'll try ignoring case. + for (pkgs) |pkg| { + if (std.ascii.eqlIgnoreCase(pkg.name, lib_name)) { + break :match pkg.name; + } + } + + // Now try appending ".0". + for (pkgs) |pkg| { + if (std.ascii.indexOfIgnoreCase(pkg.name, lib_name)) |pos| { + if (pos != 0) continue; + if (mem.eql(u8, pkg.name[lib_name.len..], ".0")) { + break :match pkg.name; + } + } + } + + // Trimming "-1.0". + if (mem.endsWith(u8, lib_name, "-1.0")) { + const trimmed_lib_name = lib_name[0 .. lib_name.len - "-1.0".len]; + for (pkgs) |pkg| { + if (std.ascii.eqlIgnoreCase(pkg.name, trimmed_lib_name)) { + break :match pkg.name; + } + } + } + + return error.PackageNotFound; + }; + + var code: u8 = undefined; + const stdout = if (b.execAllowFail(&[_][]const u8{ + "pkg-config", + pkg_name, + "--cflags", + "--libs", + }, &code, .Ignore)) |stdout| stdout else |err| switch (err) { + error.ProcessTerminated => return error.PkgConfigCrashed, + error.ExecNotSupported => return error.PkgConfigFailed, + error.ExitCodeFailure => return error.PkgConfigFailed, + error.FileNotFound => return error.PkgConfigNotInstalled, + else => return err, + }; + + var zig_args = ArrayList([]const u8).init(b.allocator); + defer zig_args.deinit(); + + var it = mem.tokenize(u8, stdout, " \r\n\t"); + while (it.next()) |tok| { + if (mem.eql(u8, tok, "-I")) { + const dir = it.next() orelse return error.PkgConfigInvalidOutput; + try zig_args.appendSlice(&[_][]const u8{ "-I", dir }); + } else if (mem.startsWith(u8, tok, "-I")) { + try zig_args.append(tok); + } else if (mem.eql(u8, tok, "-L")) { + const dir = it.next() orelse return error.PkgConfigInvalidOutput; + try zig_args.appendSlice(&[_][]const u8{ "-L", dir }); + } else if (mem.startsWith(u8, tok, "-L")) { + try zig_args.append(tok); + } else if (mem.eql(u8, tok, "-l")) { + const lib = it.next() orelse return error.PkgConfigInvalidOutput; + try zig_args.appendSlice(&[_][]const u8{ "-l", lib }); + } else if (mem.startsWith(u8, tok, "-l")) { + try zig_args.append(tok); + } else if (mem.eql(u8, tok, "-D")) { + const macro = it.next() orelse return error.PkgConfigInvalidOutput; + try zig_args.appendSlice(&[_][]const u8{ "-D", macro }); + } else if (mem.startsWith(u8, tok, "-D")) { + try zig_args.append(tok); + } else if (b.debug_pkg_config) { + return self.step.fail("unknown pkg-config flag '{s}'", .{tok}); + } + } + + return zig_args.toOwnedSlice(); +} + +pub fn linkSystemLibrary(self: *CompileStep, name: []const u8) void { + self.linkSystemLibraryInner(name, .{}); +} + +pub fn linkSystemLibraryNeeded(self: *CompileStep, name: []const u8) void { + self.linkSystemLibraryInner(name, .{ .needed = true }); +} + +pub fn linkSystemLibraryWeak(self: *CompileStep, name: []const u8) void { + self.linkSystemLibraryInner(name, .{ .weak = true }); +} + +fn linkSystemLibraryInner(self: *CompileStep, name: []const u8, opts: struct { + needed: bool = false, + weak: bool = false, +}) void { + const b = self.step.owner; + if (isLibCLibrary(name)) { + self.linkLibC(); + return; + } + if (isLibCppLibrary(name)) { + self.linkLibCpp(); + return; + } + + self.link_objects.append(.{ + .system_lib = .{ + .name = b.dupe(name), + .needed = opts.needed, + .weak = opts.weak, + .use_pkg_config = .yes, + }, + }) catch @panic("OOM"); +} + +/// Handy when you have many C/C++ source files and want them all to have the same flags. +pub fn addCSourceFiles(self: *CompileStep, files: []const []const u8, flags: []const []const u8) void { + const b = self.step.owner; + const c_source_files = b.allocator.create(CSourceFiles) catch @panic("OOM"); + + const files_copy = b.dupeStrings(files); + const flags_copy = b.dupeStrings(flags); + + c_source_files.* = .{ + .files = files_copy, + .flags = flags_copy, + }; + self.link_objects.append(.{ .c_source_files = c_source_files }) catch @panic("OOM"); +} + +pub fn addCSourceFile(self: *CompileStep, file: []const u8, flags: []const []const u8) void { + self.addCSourceFileSource(.{ + .args = flags, + .source = .{ .path = file }, + }); +} + +pub fn addCSourceFileSource(self: *CompileStep, source: CSourceFile) void { + const b = self.step.owner; + const c_source_file = b.allocator.create(CSourceFile) catch @panic("OOM"); + c_source_file.* = source.dupe(b); + self.link_objects.append(.{ .c_source_file = c_source_file }) catch @panic("OOM"); + source.source.addStepDependencies(&self.step); +} + +pub fn setVerboseLink(self: *CompileStep, value: bool) void { + self.verbose_link = value; +} + +pub fn setVerboseCC(self: *CompileStep, value: bool) void { + self.verbose_cc = value; +} + +pub fn overrideZigLibDir(self: *CompileStep, dir_path: []const u8) void { + const b = self.step.owner; + self.zig_lib_dir = b.dupePath(dir_path); +} + +pub fn setMainPkgPath(self: *CompileStep, dir_path: []const u8) void { + const b = self.step.owner; + self.main_pkg_path = b.dupePath(dir_path); +} + +pub fn setLibCFile(self: *CompileStep, libc_file: ?FileSource) void { + const b = self.step.owner; + self.libc_file = if (libc_file) |f| f.dupe(b) else null; +} + +/// Returns the generated executable, library or object file. +/// To run an executable built with zig build, use `run`, or create an install step and invoke it. +pub fn getOutputSource(self: *CompileStep) FileSource { + return .{ .generated = &self.output_path_source }; +} + +pub fn getOutputDirectorySource(self: *CompileStep) FileSource { + return .{ .generated = &self.output_dirname_source }; +} + +/// Returns the generated import library. This function can only be called for libraries. +pub fn getOutputLibSource(self: *CompileStep) FileSource { + assert(self.kind == .lib); + return .{ .generated = &self.output_lib_path_source }; +} + +/// Returns the generated header file. +/// This function can only be called for libraries or object files which have `emit_h` set. +pub fn getOutputHSource(self: *CompileStep) FileSource { + assert(self.kind != .exe and self.kind != .@"test"); + assert(self.emit_h); + return .{ .generated = &self.output_h_path_source }; +} + +/// Returns the generated PDB file. This function can only be called for Windows and UEFI. +pub fn getOutputPdbSource(self: *CompileStep) FileSource { + // TODO: Is this right? Isn't PDB for *any* PE/COFF file? + assert(self.target.isWindows() or self.target.isUefi()); + return .{ .generated = &self.output_pdb_path_source }; +} + +pub fn addAssemblyFile(self: *CompileStep, path: []const u8) void { + const b = self.step.owner; + self.link_objects.append(.{ + .assembly_file = .{ .path = b.dupe(path) }, + }) catch @panic("OOM"); +} + +pub fn addAssemblyFileSource(self: *CompileStep, source: FileSource) void { + const b = self.step.owner; + const source_duped = source.dupe(b); + self.link_objects.append(.{ .assembly_file = source_duped }) catch @panic("OOM"); + source_duped.addStepDependencies(&self.step); +} + +pub fn addObjectFile(self: *CompileStep, source_file: []const u8) void { + self.addObjectFileSource(.{ .path = source_file }); +} + +pub fn addObjectFileSource(self: *CompileStep, source: FileSource) void { + const b = self.step.owner; + self.link_objects.append(.{ .static_path = source.dupe(b) }) catch @panic("OOM"); + source.addStepDependencies(&self.step); +} + +pub fn addObject(self: *CompileStep, obj: *CompileStep) void { + assert(obj.kind == .obj); + self.linkLibraryOrObject(obj); +} + +pub const addSystemIncludeDir = @compileError("deprecated; use addSystemIncludePath"); +pub const addIncludeDir = @compileError("deprecated; use addIncludePath"); +pub const addLibPath = @compileError("deprecated, use addLibraryPath"); +pub const addFrameworkDir = @compileError("deprecated, use addFrameworkPath"); + +pub fn addSystemIncludePath(self: *CompileStep, path: []const u8) void { + const b = self.step.owner; + self.include_dirs.append(IncludeDir{ .raw_path_system = b.dupe(path) }) catch @panic("OOM"); +} + +pub fn addIncludePath(self: *CompileStep, path: []const u8) void { + const b = self.step.owner; + self.include_dirs.append(IncludeDir{ .raw_path = b.dupe(path) }) catch @panic("OOM"); +} + +pub fn addConfigHeader(self: *CompileStep, config_header: *ConfigHeaderStep) void { + self.step.dependOn(&config_header.step); + self.include_dirs.append(.{ .config_header_step = config_header }) catch @panic("OOM"); +} + +pub fn addLibraryPath(self: *CompileStep, path: []const u8) void { + const b = self.step.owner; + self.lib_paths.append(.{ .path = b.dupe(path) }) catch @panic("OOM"); +} + +pub fn addLibraryPathDirectorySource(self: *CompileStep, directory_source: FileSource) void { + self.lib_paths.append(directory_source) catch @panic("OOM"); + directory_source.addStepDependencies(&self.step); +} + +pub fn addRPath(self: *CompileStep, path: []const u8) void { + const b = self.step.owner; + self.rpaths.append(.{ .path = b.dupe(path) }) catch @panic("OOM"); +} + +pub fn addRPathDirectorySource(self: *CompileStep, directory_source: FileSource) void { + self.rpaths.append(directory_source) catch @panic("OOM"); + directory_source.addStepDependencies(&self.step); +} + +pub fn addFrameworkPath(self: *CompileStep, dir_path: []const u8) void { + const b = self.step.owner; + self.framework_dirs.append(.{ .path = b.dupe(dir_path) }) catch @panic("OOM"); +} + +pub fn addFrameworkPathDirectorySource(self: *CompileStep, directory_source: FileSource) void { + self.framework_dirs.append(directory_source) catch @panic("OOM"); + directory_source.addStepDependencies(&self.step); +} + +/// Adds a module to be used with `@import` and exposing it in the current +/// package's module table using `name`. +pub fn addModule(cs: *CompileStep, name: []const u8, module: *Module) void { + const b = cs.step.owner; + cs.modules.put(b.dupe(name), module) catch @panic("OOM"); + + var done = std.AutoHashMap(*Module, void).init(b.allocator); + defer done.deinit(); + cs.addRecursiveBuildDeps(module, &done) catch @panic("OOM"); +} + +/// Adds a module to be used with `@import` without exposing it in the current +/// package's module table. +pub fn addAnonymousModule(cs: *CompileStep, name: []const u8, options: std.Build.CreateModuleOptions) void { + const b = cs.step.owner; + const module = b.createModule(options); + return addModule(cs, name, module); +} + +pub fn addOptions(cs: *CompileStep, module_name: []const u8, options: *OptionsStep) void { + addModule(cs, module_name, options.createModule()); +} + +fn addRecursiveBuildDeps(cs: *CompileStep, module: *Module, done: *std.AutoHashMap(*Module, void)) !void { + if (done.contains(module)) return; + try done.put(module, {}); + module.source_file.addStepDependencies(&cs.step); + for (module.dependencies.values()) |dep| { + try cs.addRecursiveBuildDeps(dep, done); + } +} + +/// If Vcpkg was found on the system, it will be added to include and lib +/// paths for the specified target. +pub fn addVcpkgPaths(self: *CompileStep, linkage: CompileStep.Linkage) !void { + const b = self.step.owner; + // Ideally in the Unattempted case we would call the function recursively + // after findVcpkgRoot and have only one switch statement, but the compiler + // cannot resolve the error set. + switch (b.vcpkg_root) { + .unattempted => { + b.vcpkg_root = if (try findVcpkgRoot(b.allocator)) |root| + VcpkgRoot{ .found = root } + else + .not_found; + }, + .not_found => return error.VcpkgNotFound, + .found => {}, + } + + switch (b.vcpkg_root) { + .unattempted => unreachable, + .not_found => return error.VcpkgNotFound, + .found => |root| { + const allocator = b.allocator; + const triplet = try self.target.vcpkgTriplet(allocator, if (linkage == .static) .Static else .Dynamic); + defer b.allocator.free(triplet); + + const include_path = b.pathJoin(&.{ root, "installed", triplet, "include" }); + errdefer allocator.free(include_path); + try self.include_dirs.append(IncludeDir{ .raw_path = include_path }); + + const lib_path = b.pathJoin(&.{ root, "installed", triplet, "lib" }); + try self.lib_paths.append(.{ .path = lib_path }); + + self.vcpkg_bin_path = b.pathJoin(&.{ root, "installed", triplet, "bin" }); + }, + } +} + +pub fn setExecCmd(self: *CompileStep, args: []const ?[]const u8) void { + const b = self.step.owner; + assert(self.kind == .@"test"); + const duped_args = b.allocator.alloc(?[]u8, args.len) catch @panic("OOM"); + for (args, 0..) |arg, i| { + duped_args[i] = if (arg) |a| b.dupe(a) else null; + } + self.exec_cmd_args = duped_args; +} + +fn linkLibraryOrObject(self: *CompileStep, other: *CompileStep) void { + self.step.dependOn(&other.step); + self.link_objects.append(.{ .other_step = other }) catch @panic("OOM"); + self.include_dirs.append(.{ .other_step = other }) catch @panic("OOM"); + + for (other.installed_headers.items) |install_step| { + self.step.dependOn(install_step); + } +} + +fn appendModuleArgs( + cs: *CompileStep, + zig_args: *ArrayList([]const u8), +) error{OutOfMemory}!void { + const b = cs.step.owner; + // First, traverse the whole dependency graph and give every module a unique name, ideally one + // named after what it's called somewhere in the graph. It will help here to have both a mapping + // from module to name and a set of all the currently-used names. + var mod_names = std.AutoHashMap(*Module, []const u8).init(b.allocator); + var names = std.StringHashMap(void).init(b.allocator); + + var to_name = std.ArrayList(struct { + name: []const u8, + mod: *Module, + }).init(b.allocator); + { + var it = cs.modules.iterator(); + while (it.next()) |kv| { + // While we're traversing the root dependencies, let's make sure that no module names + // have colons in them, since the CLI forbids it. We handle this for transitive + // dependencies further down. + if (std.mem.indexOfScalar(u8, kv.key_ptr.*, ':') != null) { + @panic("Module names cannot contain colons"); + } + try to_name.append(.{ + .name = kv.key_ptr.*, + .mod = kv.value_ptr.*, + }); + } + } + + while (to_name.popOrNull()) |dep| { + if (mod_names.contains(dep.mod)) continue; + + // We'll use this buffer to store the name we decide on + var buf = try b.allocator.alloc(u8, dep.name.len + 32); + // First, try just the exposed dependency name + @memcpy(buf[0..dep.name.len], dep.name); + var name = buf[0..dep.name.len]; + var n: usize = 0; + while (names.contains(name)) { + // If that failed, append an incrementing number to the end + name = std.fmt.bufPrint(buf, "{s}{}", .{ dep.name, n }) catch unreachable; + n += 1; + } + + try mod_names.put(dep.mod, name); + try names.put(name, {}); + + var it = dep.mod.dependencies.iterator(); + while (it.next()) |kv| { + // Same colon-in-name check as above, but for transitive dependencies. + if (std.mem.indexOfScalar(u8, kv.key_ptr.*, ':') != null) { + @panic("Module names cannot contain colons"); + } + try to_name.append(.{ + .name = kv.key_ptr.*, + .mod = kv.value_ptr.*, + }); + } + } + + // Since the module names given to the CLI are based off of the exposed names, we already know + // that none of the CLI names have colons in them, so there's no need to check that explicitly. + + // Every module in the graph is now named; output their definitions + { + var it = mod_names.iterator(); + while (it.next()) |kv| { + const mod = kv.key_ptr.*; + const name = kv.value_ptr.*; + + const deps_str = try constructDepString(b.allocator, mod_names, mod.dependencies); + const src = mod.builder.pathFromRoot(mod.source_file.getPath(mod.builder)); + try zig_args.append("--mod"); + try zig_args.append(try std.fmt.allocPrint(b.allocator, "{s}:{s}:{s}", .{ name, deps_str, src })); + } + } + + // Lastly, output the root dependencies + const deps_str = try constructDepString(b.allocator, mod_names, cs.modules); + if (deps_str.len > 0) { + try zig_args.append("--deps"); + try zig_args.append(deps_str); + } +} + +fn constructDepString( + allocator: std.mem.Allocator, + mod_names: std.AutoHashMap(*Module, []const u8), + deps: std.StringArrayHashMap(*Module), +) ![]const u8 { + var deps_str = std.ArrayList(u8).init(allocator); + var it = deps.iterator(); + while (it.next()) |kv| { + const expose = kv.key_ptr.*; + const name = mod_names.get(kv.value_ptr.*).?; + if (std.mem.eql(u8, expose, name)) { + try deps_str.writer().print("{s},", .{name}); + } else { + try deps_str.writer().print("{s}={s},", .{ expose, name }); + } + } + if (deps_str.items.len > 0) { + return deps_str.items[0 .. deps_str.items.len - 1]; // omit trailing comma + } else { + return ""; + } +} + +fn make(step: *Step, prog_node: *std.Progress.Node) !void { + const b = step.owner; + const self = @fieldParentPtr(CompileStep, "step", step); + + if (self.root_src == null and self.link_objects.items.len == 0) { + return step.fail("the linker needs one or more objects to link", .{}); + } + + var zig_args = ArrayList([]const u8).init(b.allocator); + defer zig_args.deinit(); + + try zig_args.append(b.zig_exe); + + const cmd = switch (self.kind) { + .lib => "build-lib", + .exe => "build-exe", + .obj => "build-obj", + .@"test" => "test", + }; + try zig_args.append(cmd); + + if (b.reference_trace) |some| { + try zig_args.append(try std.fmt.allocPrint(b.allocator, "-freference-trace={d}", .{some})); + } + + try addFlag(&zig_args, "LLVM", self.use_llvm); + try addFlag(&zig_args, "LLD", self.use_lld); + + if (self.target.ofmt) |ofmt| { + try zig_args.append(try std.fmt.allocPrint(b.allocator, "-ofmt={s}", .{@tagName(ofmt)})); + } + + if (self.entry_symbol_name) |entry| { + try zig_args.append("--entry"); + try zig_args.append(entry); + } + + { + var it = self.force_undefined_symbols.keyIterator(); + while (it.next()) |symbol_name| { + try zig_args.append("--force_undefined"); + try zig_args.append(symbol_name.*); + } + } + + if (self.stack_size) |stack_size| { + try zig_args.append("--stack"); + try zig_args.append(try std.fmt.allocPrint(b.allocator, "{}", .{stack_size})); + } + + if (self.root_src) |root_src| try zig_args.append(root_src.getPath(b)); + + // We will add link objects from transitive dependencies, but we want to keep + // all link objects in the same order provided. + // This array is used to keep self.link_objects immutable. + var transitive_deps: TransitiveDeps = .{ + .link_objects = ArrayList(LinkObject).init(b.allocator), + .seen_system_libs = StringHashMap(void).init(b.allocator), + .seen_steps = std.AutoHashMap(*const Step, void).init(b.allocator), + .is_linking_libcpp = self.is_linking_libcpp, + .is_linking_libc = self.is_linking_libc, + .frameworks = &self.frameworks, + }; + + try transitive_deps.seen_steps.put(&self.step, {}); + try transitive_deps.add(self.link_objects.items); + + var prev_has_extra_flags = false; + + for (transitive_deps.link_objects.items) |link_object| { + switch (link_object) { + .static_path => |static_path| try zig_args.append(static_path.getPath(b)), + + .other_step => |other| switch (other.kind) { + .exe => @panic("Cannot link with an executable build artifact"), + .@"test" => @panic("Cannot link with a test"), + .obj => { + try zig_args.append(other.getOutputSource().getPath(b)); + }, + .lib => l: { + if (self.isStaticLibrary() and other.isStaticLibrary()) { + // Avoid putting a static library inside a static library. + break :l; + } + + const full_path_lib = other.getOutputLibSource().getPath(b); + try zig_args.append(full_path_lib); + + if (other.linkage == Linkage.dynamic and !self.target.isWindows()) { + if (fs.path.dirname(full_path_lib)) |dirname| { + try zig_args.append("-rpath"); + try zig_args.append(dirname); + } + } + }, + }, + + .system_lib => |system_lib| { + const prefix: []const u8 = prefix: { + if (system_lib.needed) break :prefix "-needed-l"; + if (system_lib.weak) break :prefix "-weak-l"; + break :prefix "-l"; + }; + switch (system_lib.use_pkg_config) { + .no => try zig_args.append(b.fmt("{s}{s}", .{ prefix, system_lib.name })), + .yes, .force => { + if (self.runPkgConfig(system_lib.name)) |args| { + try zig_args.appendSlice(args); + } else |err| switch (err) { + error.PkgConfigInvalidOutput, + error.PkgConfigCrashed, + error.PkgConfigFailed, + error.PkgConfigNotInstalled, + error.PackageNotFound, + => switch (system_lib.use_pkg_config) { + .yes => { + // pkg-config failed, so fall back to linking the library + // by name directly. + try zig_args.append(b.fmt("{s}{s}", .{ + prefix, + system_lib.name, + })); + }, + .force => { + panic("pkg-config failed for library {s}", .{system_lib.name}); + }, + .no => unreachable, + }, + + else => |e| return e, + } + }, + } + }, + + .assembly_file => |asm_file| { + if (prev_has_extra_flags) { + try zig_args.append("-extra-cflags"); + try zig_args.append("--"); + prev_has_extra_flags = false; + } + try zig_args.append(asm_file.getPath(b)); + }, + + .c_source_file => |c_source_file| { + if (c_source_file.args.len == 0) { + if (prev_has_extra_flags) { + try zig_args.append("-cflags"); + try zig_args.append("--"); + prev_has_extra_flags = false; + } + } else { + try zig_args.append("-cflags"); + for (c_source_file.args) |arg| { + try zig_args.append(arg); + } + try zig_args.append("--"); + } + try zig_args.append(c_source_file.source.getPath(b)); + }, + + .c_source_files => |c_source_files| { + if (c_source_files.flags.len == 0) { + if (prev_has_extra_flags) { + try zig_args.append("-cflags"); + try zig_args.append("--"); + prev_has_extra_flags = false; + } + } else { + try zig_args.append("-cflags"); + for (c_source_files.flags) |flag| { + try zig_args.append(flag); + } + try zig_args.append("--"); + } + for (c_source_files.files) |file| { + try zig_args.append(b.pathFromRoot(file)); + } + }, + } + } + + if (transitive_deps.is_linking_libcpp) { + try zig_args.append("-lc++"); + } + + if (transitive_deps.is_linking_libc) { + try zig_args.append("-lc"); + } + + if (self.image_base) |image_base| { + try zig_args.append("--image-base"); + try zig_args.append(b.fmt("0x{x}", .{image_base})); + } + + if (self.filter) |filter| { + try zig_args.append("--test-filter"); + try zig_args.append(filter); + } + + if (self.test_evented_io) { + try zig_args.append("--test-evented-io"); + } + + if (self.test_runner) |test_runner| { + try zig_args.append("--test-runner"); + try zig_args.append(b.pathFromRoot(test_runner)); + } + + for (b.debug_log_scopes) |log_scope| { + try zig_args.append("--debug-log"); + try zig_args.append(log_scope); + } + + if (b.debug_compile_errors) { + try zig_args.append("--debug-compile-errors"); + } + + if (b.verbose_cimport) try zig_args.append("--verbose-cimport"); + if (b.verbose_air) try zig_args.append("--verbose-air"); + if (b.verbose_llvm_ir) |path| try zig_args.append(b.fmt("--verbose-llvm-ir={s}", .{path})); + if (b.verbose_llvm_bc) |path| try zig_args.append(b.fmt("--verbose-llvm-bc={s}", .{path})); + if (b.verbose_link or self.verbose_link) try zig_args.append("--verbose-link"); + if (b.verbose_cc or self.verbose_cc) try zig_args.append("--verbose-cc"); + if (b.verbose_llvm_cpu_features) try zig_args.append("--verbose-llvm-cpu-features"); + + if (self.emit_analysis.getArg(b, "emit-analysis")) |arg| try zig_args.append(arg); + if (self.emit_asm.getArg(b, "emit-asm")) |arg| try zig_args.append(arg); + if (self.emit_bin.getArg(b, "emit-bin")) |arg| try zig_args.append(arg); + if (self.emit_docs.getArg(b, "emit-docs")) |arg| try zig_args.append(arg); + if (self.emit_implib.getArg(b, "emit-implib")) |arg| try zig_args.append(arg); + if (self.emit_llvm_bc.getArg(b, "emit-llvm-bc")) |arg| try zig_args.append(arg); + if (self.emit_llvm_ir.getArg(b, "emit-llvm-ir")) |arg| try zig_args.append(arg); + + if (self.emit_h) try zig_args.append("-femit-h"); + + try addFlag(&zig_args, "strip", self.strip); + try addFlag(&zig_args, "unwind-tables", self.unwind_tables); + + if (self.dwarf_format) |dwarf_format| { + try zig_args.append(switch (dwarf_format) { + .@"32" => "-gdwarf32", + .@"64" => "-gdwarf64", + }); + } + + switch (self.compress_debug_sections) { + .none => {}, + .zlib => try zig_args.append("--compress-debug-sections=zlib"), + } + + if (self.link_eh_frame_hdr) { + try zig_args.append("--eh-frame-hdr"); + } + if (self.link_emit_relocs) { + try zig_args.append("--emit-relocs"); + } + if (self.link_function_sections) { + try zig_args.append("-ffunction-sections"); + } + if (self.link_gc_sections) |x| { + try zig_args.append(if (x) "--gc-sections" else "--no-gc-sections"); + } + if (!self.linker_dynamicbase) { + try zig_args.append("--no-dynamicbase"); + } + if (self.linker_allow_shlib_undefined) |x| { + try zig_args.append(if (x) "-fallow-shlib-undefined" else "-fno-allow-shlib-undefined"); + } + if (self.link_z_notext) { + try zig_args.append("-z"); + try zig_args.append("notext"); + } + if (!self.link_z_relro) { + try zig_args.append("-z"); + try zig_args.append("norelro"); + } + if (self.link_z_lazy) { + try zig_args.append("-z"); + try zig_args.append("lazy"); + } + if (self.link_z_common_page_size) |size| { + try zig_args.append("-z"); + try zig_args.append(b.fmt("common-page-size={d}", .{size})); + } + if (self.link_z_max_page_size) |size| { + try zig_args.append("-z"); + try zig_args.append(b.fmt("max-page-size={d}", .{size})); + } + + if (self.libc_file) |libc_file| { + try zig_args.append("--libc"); + try zig_args.append(libc_file.getPath(b)); + } else if (b.libc_file) |libc_file| { + try zig_args.append("--libc"); + try zig_args.append(libc_file); + } + + switch (self.optimize) { + .Debug => {}, // Skip since it's the default. + else => try zig_args.append(b.fmt("-O{s}", .{@tagName(self.optimize)})), + } + + try zig_args.append("--cache-dir"); + try zig_args.append(b.cache_root.path orelse "."); + + try zig_args.append("--global-cache-dir"); + try zig_args.append(b.global_cache_root.path orelse "."); + + try zig_args.append("--name"); + try zig_args.append(self.name); + + if (self.linkage) |some| switch (some) { + .dynamic => try zig_args.append("-dynamic"), + .static => try zig_args.append("-static"), + }; + if (self.kind == .lib and self.linkage != null and self.linkage.? == .dynamic) { + if (self.version) |version| { + try zig_args.append("--version"); + try zig_args.append(b.fmt("{}", .{version})); + } + + if (self.target.isDarwin()) { + const install_name = self.install_name orelse b.fmt("@rpath/{s}{s}{s}", .{ + self.target.libPrefix(), + self.name, + self.target.dynamicLibSuffix(), + }); + try zig_args.append("-install_name"); + try zig_args.append(install_name); + } + } + + if (self.entitlements) |entitlements| { + try zig_args.appendSlice(&[_][]const u8{ "--entitlements", entitlements }); + } + if (self.pagezero_size) |pagezero_size| { + const size = try std.fmt.allocPrint(b.allocator, "{x}", .{pagezero_size}); + try zig_args.appendSlice(&[_][]const u8{ "-pagezero_size", size }); + } + if (self.search_strategy) |strat| switch (strat) { + .paths_first => try zig_args.append("-search_paths_first"), + .dylibs_first => try zig_args.append("-search_dylibs_first"), + }; + if (self.headerpad_size) |headerpad_size| { + const size = try std.fmt.allocPrint(b.allocator, "{x}", .{headerpad_size}); + try zig_args.appendSlice(&[_][]const u8{ "-headerpad", size }); + } + if (self.headerpad_max_install_names) { + try zig_args.append("-headerpad_max_install_names"); + } + if (self.dead_strip_dylibs) { + try zig_args.append("-dead_strip_dylibs"); + } + + try addFlag(&zig_args, "compiler-rt", self.bundle_compiler_rt); + try addFlag(&zig_args, "single-threaded", self.single_threaded); + if (self.disable_stack_probing) { + try zig_args.append("-fno-stack-check"); + } + try addFlag(&zig_args, "stack-protector", self.stack_protector); + if (self.red_zone) |red_zone| { + if (red_zone) { + try zig_args.append("-mred-zone"); + } else { + try zig_args.append("-mno-red-zone"); + } + } + try addFlag(&zig_args, "omit-frame-pointer", self.omit_frame_pointer); + try addFlag(&zig_args, "dll-export-fns", self.dll_export_fns); + + if (self.disable_sanitize_c) { + try zig_args.append("-fno-sanitize-c"); + } + if (self.sanitize_thread) { + try zig_args.append("-fsanitize-thread"); + } + if (self.rdynamic) { + try zig_args.append("-rdynamic"); + } + if (self.import_memory) { + try zig_args.append("--import-memory"); + } + if (self.import_symbols) { + try zig_args.append("--import-symbols"); + } + if (self.import_table) { + try zig_args.append("--import-table"); + } + if (self.export_table) { + try zig_args.append("--export-table"); + } + if (self.initial_memory) |initial_memory| { + try zig_args.append(b.fmt("--initial-memory={d}", .{initial_memory})); + } + if (self.max_memory) |max_memory| { + try zig_args.append(b.fmt("--max-memory={d}", .{max_memory})); + } + if (self.shared_memory) { + try zig_args.append("--shared-memory"); + } + if (self.global_base) |global_base| { + try zig_args.append(b.fmt("--global-base={d}", .{global_base})); + } + + if (self.code_model != .default) { + try zig_args.append("-mcmodel"); + try zig_args.append(@tagName(self.code_model)); + } + if (self.wasi_exec_model) |model| { + try zig_args.append(b.fmt("-mexec-model={s}", .{@tagName(model)})); + } + for (self.export_symbol_names) |symbol_name| { + try zig_args.append(b.fmt("--export={s}", .{symbol_name})); + } + + if (!self.target.isNative()) { + try zig_args.appendSlice(&.{ + "-target", try self.target.zigTriple(b.allocator), + "-mcpu", try std.Build.serializeCpu(b.allocator, self.target.getCpu()), + }); + + if (self.target.dynamic_linker.get()) |dynamic_linker| { + try zig_args.append("--dynamic-linker"); + try zig_args.append(dynamic_linker); + } + } + + if (self.linker_script) |linker_script| { + try zig_args.append("--script"); + try zig_args.append(linker_script.getPath(b)); + } + + if (self.version_script) |version_script| { + try zig_args.append("--version-script"); + try zig_args.append(b.pathFromRoot(version_script)); + } + + if (self.kind == .@"test") { + if (self.exec_cmd_args) |exec_cmd_args| { + for (exec_cmd_args) |cmd_arg| { + if (cmd_arg) |arg| { + try zig_args.append("--test-cmd"); + try zig_args.append(arg); + } else { + try zig_args.append("--test-cmd-bin"); + } + } + } + } + + try self.appendModuleArgs(&zig_args); + + for (self.include_dirs.items) |include_dir| { + switch (include_dir) { + .raw_path => |include_path| { + try zig_args.append("-I"); + try zig_args.append(b.pathFromRoot(include_path)); + }, + .raw_path_system => |include_path| { + if (b.sysroot != null) { + try zig_args.append("-iwithsysroot"); + } else { + try zig_args.append("-isystem"); + } + + const resolved_include_path = b.pathFromRoot(include_path); + + const common_include_path = if (builtin.os.tag == .windows and b.sysroot != null and fs.path.isAbsolute(resolved_include_path)) blk: { + // We need to check for disk designator and strip it out from dir path so + // that zig/clang can concat resolved_include_path with sysroot. + const disk_designator = fs.path.diskDesignatorWindows(resolved_include_path); + + if (mem.indexOf(u8, resolved_include_path, disk_designator)) |where| { + break :blk resolved_include_path[where + disk_designator.len ..]; + } + + break :blk resolved_include_path; + } else resolved_include_path; + + try zig_args.append(common_include_path); + }, + .other_step => |other| { + if (other.emit_h) { + const h_path = other.getOutputHSource().getPath(b); + try zig_args.append("-isystem"); + try zig_args.append(fs.path.dirname(h_path).?); + } + if (other.installed_headers.items.len > 0) { + try zig_args.append("-I"); + try zig_args.append(b.pathJoin(&.{ + other.step.owner.install_prefix, "include", + })); + } + }, + .config_header_step => |config_header| { + const full_file_path = config_header.output_file.path.?; + const header_dir_path = full_file_path[0 .. full_file_path.len - config_header.include_path.len]; + try zig_args.appendSlice(&.{ "-I", header_dir_path }); + }, + } + } + + for (self.c_macros.items) |c_macro| { + try zig_args.append("-D"); + try zig_args.append(c_macro); + } + + try zig_args.ensureUnusedCapacity(2 * self.lib_paths.items.len); + for (self.lib_paths.items) |lib_path| { + zig_args.appendAssumeCapacity("-L"); + zig_args.appendAssumeCapacity(lib_path.getPath2(b, step)); + } + + try zig_args.ensureUnusedCapacity(2 * self.rpaths.items.len); + for (self.rpaths.items) |rpath| { + zig_args.appendAssumeCapacity("-rpath"); + + if (self.target_info.target.isDarwin()) switch (rpath) { + .path => |path| { + // On Darwin, we should not try to expand special runtime paths such as + // * @executable_path + // * @loader_path + if (mem.startsWith(u8, path, "@executable_path") or + mem.startsWith(u8, path, "@loader_path")) + { + zig_args.appendAssumeCapacity(path); + continue; + } + }, + .generated => {}, + }; + + zig_args.appendAssumeCapacity(rpath.getPath2(b, step)); + } + + for (self.framework_dirs.items) |directory_source| { + if (b.sysroot != null) { + try zig_args.append("-iframeworkwithsysroot"); + } else { + try zig_args.append("-iframework"); + } + try zig_args.append(directory_source.getPath2(b, step)); + try zig_args.append("-F"); + try zig_args.append(directory_source.getPath2(b, step)); + } + + { + var it = self.frameworks.iterator(); + while (it.next()) |entry| { + const name = entry.key_ptr.*; + const info = entry.value_ptr.*; + if (info.needed) { + try zig_args.append("-needed_framework"); + } else if (info.weak) { + try zig_args.append("-weak_framework"); + } else { + try zig_args.append("-framework"); + } + try zig_args.append(name); + } + } + + if (b.sysroot) |sysroot| { + try zig_args.appendSlice(&[_][]const u8{ "--sysroot", sysroot }); + } + + for (b.search_prefixes.items) |search_prefix| { + var prefix_dir = fs.cwd().openDir(search_prefix, .{}) catch |err| { + return step.fail("unable to open prefix directory '{s}': {s}", .{ + search_prefix, @errorName(err), + }); + }; + defer prefix_dir.close(); + + // Avoid passing -L and -I flags for nonexistent directories. + // This prevents a warning, that should probably be upgraded to an error in Zig's + // CLI parsing code, when the linker sees an -L directory that does not exist. + + if (prefix_dir.accessZ("lib", .{})) |_| { + try zig_args.appendSlice(&.{ + "-L", try fs.path.join(b.allocator, &.{ search_prefix, "lib" }), + }); + } else |err| switch (err) { + error.FileNotFound => {}, + else => |e| return step.fail("unable to access '{s}/lib' directory: {s}", .{ + search_prefix, @errorName(e), + }), + } + + if (prefix_dir.accessZ("include", .{})) |_| { + try zig_args.appendSlice(&.{ + "-I", try fs.path.join(b.allocator, &.{ search_prefix, "include" }), + }); + } else |err| switch (err) { + error.FileNotFound => {}, + else => |e| return step.fail("unable to access '{s}/include' directory: {s}", .{ + search_prefix, @errorName(e), + }), + } + } + + try addFlag(&zig_args, "valgrind", self.valgrind_support); + try addFlag(&zig_args, "each-lib-rpath", self.each_lib_rpath); + try addFlag(&zig_args, "build-id", self.build_id); + + if (self.zig_lib_dir) |dir| { + try zig_args.append("--zig-lib-dir"); + try zig_args.append(b.pathFromRoot(dir)); + } else if (b.zig_lib_dir) |dir| { + try zig_args.append("--zig-lib-dir"); + try zig_args.append(dir); + } + + if (self.main_pkg_path) |dir| { + try zig_args.append("--main-pkg-path"); + try zig_args.append(b.pathFromRoot(dir)); + } + + try addFlag(&zig_args, "PIC", self.force_pic); + try addFlag(&zig_args, "PIE", self.pie); + try addFlag(&zig_args, "lto", self.want_lto); + + if (self.subsystem) |subsystem| { + try zig_args.append("--subsystem"); + try zig_args.append(switch (subsystem) { + .Console => "console", + .Windows => "windows", + .Posix => "posix", + .Native => "native", + .EfiApplication => "efi_application", + .EfiBootServiceDriver => "efi_boot_service_driver", + .EfiRom => "efi_rom", + .EfiRuntimeDriver => "efi_runtime_driver", + }); + } + + try zig_args.append("--listen=-"); + + // Windows has an argument length limit of 32,766 characters, macOS 262,144 and Linux + // 2,097,152. If our args exceed 30 KiB, we instead write them to a "response file" and + // pass that to zig, e.g. via 'zig build-lib @args.rsp' + // See @file syntax here: https://gcc.gnu.org/onlinedocs/gcc/Overall-Options.html + var args_length: usize = 0; + for (zig_args.items) |arg| { + args_length += arg.len + 1; // +1 to account for null terminator + } + if (args_length >= 30 * 1024) { + try b.cache_root.handle.makePath("args"); + + const args_to_escape = zig_args.items[2..]; + var escaped_args = try ArrayList([]const u8).initCapacity(b.allocator, args_to_escape.len); + arg_blk: for (args_to_escape) |arg| { + for (arg, 0..) |c, arg_idx| { + if (c == '\\' or c == '"') { + // Slow path for arguments that need to be escaped. We'll need to allocate and copy + var escaped = try ArrayList(u8).initCapacity(b.allocator, arg.len + 1); + const writer = escaped.writer(); + try writer.writeAll(arg[0..arg_idx]); + for (arg[arg_idx..]) |to_escape| { + if (to_escape == '\\' or to_escape == '"') try writer.writeByte('\\'); + try writer.writeByte(to_escape); + } + escaped_args.appendAssumeCapacity(escaped.items); + continue :arg_blk; + } + } + escaped_args.appendAssumeCapacity(arg); // no escaping needed so just use original argument + } + + // Write the args to zig-cache/args/ to avoid conflicts with + // other zig build commands running in parallel. + const partially_quoted = try std.mem.join(b.allocator, "\" \"", escaped_args.items); + const args = try std.mem.concat(b.allocator, u8, &[_][]const u8{ "\"", partially_quoted, "\"" }); + + var args_hash: [Sha256.digest_length]u8 = undefined; + Sha256.hash(args, &args_hash, .{}); + var args_hex_hash: [Sha256.digest_length * 2]u8 = undefined; + _ = try std.fmt.bufPrint( + &args_hex_hash, + "{s}", + .{std.fmt.fmtSliceHexLower(&args_hash)}, + ); + + const args_file = "args" ++ fs.path.sep_str ++ args_hex_hash; + try b.cache_root.handle.writeFile(args_file, args); + + const resolved_args_file = try mem.concat(b.allocator, u8, &.{ + "@", + try b.cache_root.join(b.allocator, &.{args_file}), + }); + + zig_args.shrinkRetainingCapacity(2); + try zig_args.append(resolved_args_file); + } + + const output_bin_path = step.evalZigProcess(zig_args.items, prog_node) catch |err| switch (err) { + error.NeedCompileErrorCheck => { + assert(self.expect_errors.len != 0); + try checkCompileErrors(self); + return; + }, + else => |e| return e, + }; + const output_dir = fs.path.dirname(output_bin_path).?; + + // Update generated files + { + self.output_dirname_source.path = output_dir; + + self.output_path_source.path = b.pathJoin( + &.{ output_dir, self.out_filename }, + ); + + if (self.kind == .lib) { + self.output_lib_path_source.path = b.pathJoin( + &.{ output_dir, self.out_lib_filename }, + ); + } + + if (self.emit_h) { + self.output_h_path_source.path = b.pathJoin( + &.{ output_dir, self.out_h_filename }, + ); + } + + if (self.target.isWindows() or self.target.isUefi()) { + self.output_pdb_path_source.path = b.pathJoin( + &.{ output_dir, self.out_pdb_filename }, + ); + } + } + + if (self.kind == .lib and self.linkage != null and self.linkage.? == .dynamic and + self.version != null and self.target.wantSharedLibSymLinks()) + { + try doAtomicSymLinks( + step, + self.getOutputSource().getPath(b), + self.major_only_filename.?, + self.name_only_filename.?, + ); + } +} + +fn isLibCLibrary(name: []const u8) bool { + const libc_libraries = [_][]const u8{ "c", "m", "dl", "rt", "pthread" }; + for (libc_libraries) |libc_lib_name| { + if (mem.eql(u8, name, libc_lib_name)) + return true; + } + return false; +} + +fn isLibCppLibrary(name: []const u8) bool { + const libcpp_libraries = [_][]const u8{ "c++", "stdc++" }; + for (libcpp_libraries) |libcpp_lib_name| { + if (mem.eql(u8, name, libcpp_lib_name)) + return true; + } + return false; +} + +/// Returned slice must be freed by the caller. +fn findVcpkgRoot(allocator: Allocator) !?[]const u8 { + const appdata_path = try fs.getAppDataDir(allocator, "vcpkg"); + defer allocator.free(appdata_path); + + const path_file = try fs.path.join(allocator, &[_][]const u8{ appdata_path, "vcpkg.path.txt" }); + defer allocator.free(path_file); + + const file = fs.cwd().openFile(path_file, .{}) catch return null; + defer file.close(); + + const size = @intCast(usize, try file.getEndPos()); + const vcpkg_path = try allocator.alloc(u8, size); + const size_read = try file.read(vcpkg_path); + std.debug.assert(size == size_read); + + return vcpkg_path; +} + +pub fn doAtomicSymLinks( + step: *Step, + output_path: []const u8, + filename_major_only: []const u8, + filename_name_only: []const u8, +) !void { + const arena = step.owner.allocator; + const out_dir = fs.path.dirname(output_path) orelse "."; + const out_basename = fs.path.basename(output_path); + // sym link for libfoo.so.1 to libfoo.so.1.2.3 + const major_only_path = try fs.path.join(arena, &.{ out_dir, filename_major_only }); + fs.atomicSymLink(arena, out_basename, major_only_path) catch |err| { + return step.fail("unable to symlink {s} -> {s}: {s}", .{ + major_only_path, out_basename, @errorName(err), + }); + }; + // sym link for libfoo.so to libfoo.so.1 + const name_only_path = try fs.path.join(arena, &.{ out_dir, filename_name_only }); + fs.atomicSymLink(arena, filename_major_only, name_only_path) catch |err| { + return step.fail("Unable to symlink {s} -> {s}: {s}", .{ + name_only_path, filename_major_only, @errorName(err), + }); + }; +} + +fn execPkgConfigList(self: *std.Build, out_code: *u8) (PkgConfigError || ExecError)![]const PkgConfigPkg { + const stdout = try self.execAllowFail(&[_][]const u8{ "pkg-config", "--list-all" }, out_code, .Ignore); + var list = ArrayList(PkgConfigPkg).init(self.allocator); + errdefer list.deinit(); + var line_it = mem.tokenize(u8, stdout, "\r\n"); + while (line_it.next()) |line| { + if (mem.trim(u8, line, " \t").len == 0) continue; + var tok_it = mem.tokenize(u8, line, " \t"); + try list.append(PkgConfigPkg{ + .name = tok_it.next() orelse return error.PkgConfigInvalidOutput, + .desc = tok_it.rest(), + }); + } + return list.toOwnedSlice(); +} + +fn getPkgConfigList(self: *std.Build) ![]const PkgConfigPkg { + if (self.pkg_config_pkg_list) |res| { + return res; + } + var code: u8 = undefined; + if (execPkgConfigList(self, &code)) |list| { + self.pkg_config_pkg_list = list; + return list; + } else |err| { + const result = switch (err) { + error.ProcessTerminated => error.PkgConfigCrashed, + error.ExecNotSupported => error.PkgConfigFailed, + error.ExitCodeFailure => error.PkgConfigFailed, + error.FileNotFound => error.PkgConfigNotInstalled, + error.InvalidName => error.PkgConfigNotInstalled, + error.PkgConfigInvalidOutput => error.PkgConfigInvalidOutput, + else => return err, + }; + self.pkg_config_pkg_list = result; + return result; + } +} + +fn addFlag(args: *ArrayList([]const u8), comptime name: []const u8, opt: ?bool) !void { + const cond = opt orelse return; + try args.ensureUnusedCapacity(1); + if (cond) { + args.appendAssumeCapacity("-f" ++ name); + } else { + args.appendAssumeCapacity("-fno-" ++ name); + } +} + +const TransitiveDeps = struct { + link_objects: ArrayList(LinkObject), + seen_system_libs: StringHashMap(void), + seen_steps: std.AutoHashMap(*const Step, void), + is_linking_libcpp: bool, + is_linking_libc: bool, + frameworks: *StringHashMap(FrameworkLinkInfo), + + fn add(td: *TransitiveDeps, link_objects: []const LinkObject) !void { + try td.link_objects.ensureUnusedCapacity(link_objects.len); + + for (link_objects) |link_object| { + try td.link_objects.append(link_object); + switch (link_object) { + .other_step => |other| try addInner(td, other, other.isDynamicLibrary()), + else => {}, + } + } + } + + fn addInner(td: *TransitiveDeps, other: *CompileStep, dyn: bool) !void { + // Inherit dependency on libc and libc++ + td.is_linking_libcpp = td.is_linking_libcpp or other.is_linking_libcpp; + td.is_linking_libc = td.is_linking_libc or other.is_linking_libc; + + // Inherit dependencies on darwin frameworks + if (!dyn) { + var it = other.frameworks.iterator(); + while (it.next()) |framework| { + try td.frameworks.put(framework.key_ptr.*, framework.value_ptr.*); + } + } + + // Inherit dependencies on system libraries and static libraries. + for (other.link_objects.items) |other_link_object| { + switch (other_link_object) { + .system_lib => |system_lib| { + if ((try td.seen_system_libs.fetchPut(system_lib.name, {})) != null) + continue; + + if (dyn) + continue; + + try td.link_objects.append(other_link_object); + }, + .other_step => |inner_other| { + if ((try td.seen_steps.fetchPut(&inner_other.step, {})) != null) + continue; + + if (!dyn) + try td.link_objects.append(other_link_object); + + try addInner(td, inner_other, dyn or inner_other.isDynamicLibrary()); + }, + else => continue, + } + } + } +}; + +fn checkCompileErrors(self: *CompileStep) !void { + // Clear this field so that it does not get printed by the build runner. + const actual_eb = self.step.result_error_bundle; + self.step.result_error_bundle = std.zig.ErrorBundle.empty; + + const arena = self.step.owner.allocator; + + var actual_stderr_list = std.ArrayList(u8).init(arena); + try actual_eb.renderToWriter(.{ + .ttyconf = .no_color, + .include_reference_trace = false, + .include_source_line = false, + }, actual_stderr_list.writer()); + const actual_stderr = try actual_stderr_list.toOwnedSlice(); + + // Render the expected lines into a string that we can compare verbatim. + var expected_generated = std.ArrayList(u8).init(arena); + + var actual_line_it = mem.split(u8, actual_stderr, "\n"); + for (self.expect_errors) |expect_line| { + const actual_line = actual_line_it.next() orelse { + try expected_generated.appendSlice(expect_line); + try expected_generated.append('\n'); + continue; + }; + if (mem.endsWith(u8, actual_line, expect_line)) { + try expected_generated.appendSlice(actual_line); + try expected_generated.append('\n'); + continue; + } + if (mem.startsWith(u8, expect_line, ":?:?: ")) { + if (mem.endsWith(u8, actual_line, expect_line[":?:?: ".len..])) { + try expected_generated.appendSlice(actual_line); + try expected_generated.append('\n'); + continue; + } + } + try expected_generated.appendSlice(expect_line); + try expected_generated.append('\n'); + } + + if (mem.eql(u8, expected_generated.items, actual_stderr)) return; + + // TODO merge this with the testing.expectEqualStrings logic, and also CheckFile + return self.step.fail( + \\ + \\========= expected: ===================== + \\{s} + \\========= but found: ==================== + \\{s} + \\========================================= + , .{ expected_generated.items, actual_stderr }); +} diff --git a/lib/std/Build/Step/ConfigHeader.zig b/lib/std/Build/Step/ConfigHeader.zig new file mode 100644 index 0000000000..6bfe28ae62 --- /dev/null +++ b/lib/std/Build/Step/ConfigHeader.zig @@ -0,0 +1,437 @@ +const std = @import("std"); +const ConfigHeaderStep = @This(); +const Step = std.Build.Step; + +pub const Style = union(enum) { + /// The configure format supported by autotools. It uses `#undef foo` to + /// mark lines that can be substituted with different values. + autoconf: std.Build.FileSource, + /// The configure format supported by CMake. It uses `@@FOO@@` and + /// `#cmakedefine` for template substitution. + cmake: std.Build.FileSource, + /// Instead of starting with an input file, start with nothing. + blank, + /// Start with nothing, like blank, and output a nasm .asm file. + nasm, + + pub fn getFileSource(style: Style) ?std.Build.FileSource { + switch (style) { + .autoconf, .cmake => |s| return s, + .blank, .nasm => return null, + } + } +}; + +pub const Value = union(enum) { + undef, + defined, + boolean: bool, + int: i64, + ident: []const u8, + string: []const u8, +}; + +step: Step, +values: std.StringArrayHashMap(Value), +output_file: std.Build.GeneratedFile, + +style: Style, +max_bytes: usize, +include_path: []const u8, + +pub const base_id: Step.Id = .config_header; + +pub const Options = struct { + style: Style = .blank, + max_bytes: usize = 2 * 1024 * 1024, + include_path: ?[]const u8 = null, + first_ret_addr: ?usize = null, +}; + +pub fn create(owner: *std.Build, options: Options) *ConfigHeaderStep { + const self = owner.allocator.create(ConfigHeaderStep) catch @panic("OOM"); + + var include_path: []const u8 = "config.h"; + + if (options.style.getFileSource()) |s| switch (s) { + .path => |p| { + const basename = std.fs.path.basename(p); + if (std.mem.endsWith(u8, basename, ".h.in")) { + include_path = basename[0 .. basename.len - 3]; + } + }, + else => {}, + }; + + if (options.include_path) |p| { + include_path = p; + } + + const name = if (options.style.getFileSource()) |s| + owner.fmt("configure {s} header {s} to {s}", .{ + @tagName(options.style), s.getDisplayName(), include_path, + }) + else + owner.fmt("configure {s} header to {s}", .{ @tagName(options.style), include_path }); + + self.* = .{ + .step = Step.init(.{ + .id = base_id, + .name = name, + .owner = owner, + .makeFn = make, + .first_ret_addr = options.first_ret_addr orelse @returnAddress(), + }), + .style = options.style, + .values = std.StringArrayHashMap(Value).init(owner.allocator), + + .max_bytes = options.max_bytes, + .include_path = include_path, + .output_file = .{ .step = &self.step }, + }; + + return self; +} + +pub fn addValues(self: *ConfigHeaderStep, values: anytype) void { + return addValuesInner(self, values) catch @panic("OOM"); +} + +pub fn getFileSource(self: *ConfigHeaderStep) std.Build.FileSource { + return .{ .generated = &self.output_file }; +} + +fn addValuesInner(self: *ConfigHeaderStep, values: anytype) !void { + inline for (@typeInfo(@TypeOf(values)).Struct.fields) |field| { + try putValue(self, field.name, field.type, @field(values, field.name)); + } +} + +fn putValue(self: *ConfigHeaderStep, field_name: []const u8, comptime T: type, v: T) !void { + switch (@typeInfo(T)) { + .Null => { + try self.values.put(field_name, .undef); + }, + .Void => { + try self.values.put(field_name, .defined); + }, + .Bool => { + try self.values.put(field_name, .{ .boolean = v }); + }, + .Int => { + try self.values.put(field_name, .{ .int = v }); + }, + .ComptimeInt => { + try self.values.put(field_name, .{ .int = v }); + }, + .EnumLiteral => { + try self.values.put(field_name, .{ .ident = @tagName(v) }); + }, + .Optional => { + if (v) |x| { + return putValue(self, field_name, @TypeOf(x), x); + } else { + try self.values.put(field_name, .undef); + } + }, + .Pointer => |ptr| { + switch (@typeInfo(ptr.child)) { + .Array => |array| { + if (ptr.size == .One and array.child == u8) { + try self.values.put(field_name, .{ .string = v }); + return; + } + }, + .Int => { + if (ptr.size == .Slice and ptr.child == u8) { + try self.values.put(field_name, .{ .string = v }); + return; + } + }, + else => {}, + } + + @compileError("unsupported ConfigHeaderStep value type: " ++ @typeName(T)); + }, + else => @compileError("unsupported ConfigHeaderStep value type: " ++ @typeName(T)), + } +} + +fn make(step: *Step, prog_node: *std.Progress.Node) !void { + _ = prog_node; + const b = step.owner; + const self = @fieldParentPtr(ConfigHeaderStep, "step", step); + const gpa = b.allocator; + const arena = b.allocator; + + var man = b.cache.obtain(); + defer man.deinit(); + + // Random bytes to make ConfigHeaderStep unique. Refresh this with new + // random bytes when ConfigHeaderStep implementation is modified in a + // non-backwards-compatible way. + man.hash.add(@as(u32, 0xdef08d23)); + + var output = std.ArrayList(u8).init(gpa); + defer output.deinit(); + + const header_text = "This file was generated by ConfigHeaderStep using the Zig Build System."; + const c_generated_line = "/* " ++ header_text ++ " */\n"; + const asm_generated_line = "; " ++ header_text ++ "\n"; + + switch (self.style) { + .autoconf => |file_source| { + try output.appendSlice(c_generated_line); + const src_path = file_source.getPath(b); + const contents = try std.fs.cwd().readFileAlloc(arena, src_path, self.max_bytes); + try render_autoconf(step, contents, &output, self.values, src_path); + }, + .cmake => |file_source| { + try output.appendSlice(c_generated_line); + const src_path = file_source.getPath(b); + const contents = try std.fs.cwd().readFileAlloc(arena, src_path, self.max_bytes); + try render_cmake(step, contents, &output, self.values, src_path); + }, + .blank => { + try output.appendSlice(c_generated_line); + try render_blank(&output, self.values, self.include_path); + }, + .nasm => { + try output.appendSlice(asm_generated_line); + try render_nasm(&output, self.values); + }, + } + + man.hash.addBytes(output.items); + + if (try step.cacheHit(&man)) { + const digest = man.final(); + self.output_file.path = try b.cache_root.join(arena, &.{ + "o", &digest, self.include_path, + }); + return; + } + + const digest = man.final(); + + // If output_path has directory parts, deal with them. Example: + // output_dir is zig-cache/o/HASH + // output_path is libavutil/avconfig.h + // We want to open directory zig-cache/o/HASH/libavutil/ + // but keep output_dir as zig-cache/o/HASH for -I include + const sub_path = try std.fs.path.join(arena, &.{ "o", &digest, self.include_path }); + const sub_path_dirname = std.fs.path.dirname(sub_path).?; + + b.cache_root.handle.makePath(sub_path_dirname) catch |err| { + return step.fail("unable to make path '{}{s}': {s}", .{ + b.cache_root, sub_path_dirname, @errorName(err), + }); + }; + + b.cache_root.handle.writeFile(sub_path, output.items) catch |err| { + return step.fail("unable to write file '{}{s}': {s}", .{ + b.cache_root, sub_path, @errorName(err), + }); + }; + + self.output_file.path = try b.cache_root.join(arena, &.{sub_path}); + try man.writeManifest(); +} + +fn render_autoconf( + step: *Step, + contents: []const u8, + output: *std.ArrayList(u8), + values: std.StringArrayHashMap(Value), + src_path: []const u8, +) !void { + var values_copy = try values.clone(); + defer values_copy.deinit(); + + var any_errors = false; + var line_index: u32 = 0; + var line_it = std.mem.split(u8, contents, "\n"); + while (line_it.next()) |line| : (line_index += 1) { + if (!std.mem.startsWith(u8, line, "#")) { + try output.appendSlice(line); + try output.appendSlice("\n"); + continue; + } + var it = std.mem.tokenize(u8, line[1..], " \t\r"); + const undef = it.next().?; + if (!std.mem.eql(u8, undef, "undef")) { + try output.appendSlice(line); + try output.appendSlice("\n"); + continue; + } + const name = it.rest(); + const kv = values_copy.fetchSwapRemove(name) orelse { + try step.addError("{s}:{d}: error: unspecified config header value: '{s}'", .{ + src_path, line_index + 1, name, + }); + any_errors = true; + continue; + }; + try renderValueC(output, name, kv.value); + } + + for (values_copy.keys()) |name| { + try step.addError("{s}: error: config header value unused: '{s}'", .{ src_path, name }); + any_errors = true; + } + + if (any_errors) { + return error.MakeFailed; + } +} + +fn render_cmake( + step: *Step, + contents: []const u8, + output: *std.ArrayList(u8), + values: std.StringArrayHashMap(Value), + src_path: []const u8, +) !void { + var values_copy = try values.clone(); + defer values_copy.deinit(); + + var any_errors = false; + var line_index: u32 = 0; + var line_it = std.mem.split(u8, contents, "\n"); + while (line_it.next()) |line| : (line_index += 1) { + if (!std.mem.startsWith(u8, line, "#")) { + try output.appendSlice(line); + try output.appendSlice("\n"); + continue; + } + var it = std.mem.tokenize(u8, line[1..], " \t\r"); + const cmakedefine = it.next().?; + if (!std.mem.eql(u8, cmakedefine, "cmakedefine")) { + try output.appendSlice(line); + try output.appendSlice("\n"); + continue; + } + const name = it.next() orelse { + try step.addError("{s}:{d}: error: missing define name", .{ + src_path, line_index + 1, + }); + any_errors = true; + continue; + }; + const kv = values_copy.fetchSwapRemove(name) orelse { + try step.addError("{s}:{d}: error: unspecified config header value: '{s}'", .{ + src_path, line_index + 1, name, + }); + any_errors = true; + continue; + }; + try renderValueC(output, name, kv.value); + } + + for (values_copy.keys()) |name| { + try step.addError("{s}: error: config header value unused: '{s}'", .{ src_path, name }); + any_errors = true; + } + + if (any_errors) { + return error.HeaderConfigFailed; + } +} + +fn render_blank( + output: *std.ArrayList(u8), + defines: std.StringArrayHashMap(Value), + include_path: []const u8, +) !void { + const include_guard_name = try output.allocator.dupe(u8, include_path); + for (include_guard_name) |*byte| { + switch (byte.*) { + 'a'...'z' => byte.* = byte.* - 'a' + 'A', + 'A'...'Z', '0'...'9' => continue, + else => byte.* = '_', + } + } + + try output.appendSlice("#ifndef "); + try output.appendSlice(include_guard_name); + try output.appendSlice("\n#define "); + try output.appendSlice(include_guard_name); + try output.appendSlice("\n"); + + const values = defines.values(); + for (defines.keys(), 0..) |name, i| { + try renderValueC(output, name, values[i]); + } + + try output.appendSlice("#endif /* "); + try output.appendSlice(include_guard_name); + try output.appendSlice(" */\n"); +} + +fn render_nasm(output: *std.ArrayList(u8), defines: std.StringArrayHashMap(Value)) !void { + const values = defines.values(); + for (defines.keys(), 0..) |name, i| { + try renderValueNasm(output, name, values[i]); + } +} + +fn renderValueC(output: *std.ArrayList(u8), name: []const u8, value: Value) !void { + switch (value) { + .undef => { + try output.appendSlice("/* #undef "); + try output.appendSlice(name); + try output.appendSlice(" */\n"); + }, + .defined => { + try output.appendSlice("#define "); + try output.appendSlice(name); + try output.appendSlice("\n"); + }, + .boolean => |b| { + try output.appendSlice("#define "); + try output.appendSlice(name); + try output.appendSlice(" "); + try output.appendSlice(if (b) "true\n" else "false\n"); + }, + .int => |i| { + try output.writer().print("#define {s} {d}\n", .{ name, i }); + }, + .ident => |ident| { + try output.writer().print("#define {s} {s}\n", .{ name, ident }); + }, + .string => |string| { + // TODO: use C-specific escaping instead of zig string literals + try output.writer().print("#define {s} \"{}\"\n", .{ name, std.zig.fmtEscapes(string) }); + }, + } +} + +fn renderValueNasm(output: *std.ArrayList(u8), name: []const u8, value: Value) !void { + switch (value) { + .undef => { + try output.appendSlice("; %undef "); + try output.appendSlice(name); + try output.appendSlice("\n"); + }, + .defined => { + try output.appendSlice("%define "); + try output.appendSlice(name); + try output.appendSlice("\n"); + }, + .boolean => |b| { + try output.appendSlice("%define "); + try output.appendSlice(name); + try output.appendSlice(if (b) " 1\n" else " 0\n"); + }, + .int => |i| { + try output.writer().print("%define {s} {d}\n", .{ name, i }); + }, + .ident => |ident| { + try output.writer().print("%define {s} {s}\n", .{ name, ident }); + }, + .string => |string| { + // TODO: use nasm-specific escaping instead of zig string literals + try output.writer().print("%define {s} \"{}\"\n", .{ name, std.zig.fmtEscapes(string) }); + }, + } +} diff --git a/lib/std/Build/Step/Fmt.zig b/lib/std/Build/Step/Fmt.zig new file mode 100644 index 0000000000..23d5d9e3ff --- /dev/null +++ b/lib/std/Build/Step/Fmt.zig @@ -0,0 +1,72 @@ +//! This step has two modes: +//! * Modify mode: directly modify source files, formatting them in place. +//! * Check mode: fail the step if a non-conforming file is found. +const std = @import("std"); +const Step = std.Build.Step; +const FmtStep = @This(); + +step: Step, +paths: []const []const u8, +exclude_paths: []const []const u8, +check: bool, + +pub const base_id = .fmt; + +pub const Options = struct { + paths: []const []const u8 = &.{}, + exclude_paths: []const []const u8 = &.{}, + /// If true, fails the build step when any non-conforming files are encountered. + check: bool = false, +}; + +pub fn create(owner: *std.Build, options: Options) *FmtStep { + const self = owner.allocator.create(FmtStep) catch @panic("OOM"); + const name = if (options.check) "zig fmt --check" else "zig fmt"; + self.* = .{ + .step = Step.init(.{ + .id = base_id, + .name = name, + .owner = owner, + .makeFn = make, + }), + .paths = options.paths, + .exclude_paths = options.exclude_paths, + .check = options.check, + }; + return self; +} + +fn make(step: *Step, prog_node: *std.Progress.Node) !void { + // zig fmt is fast enough that no progress is needed. + _ = prog_node; + + // TODO: if check=false, this means we are modifying source files in place, which + // is an operation that could race against other operations also modifying source files + // in place. In this case, this step should obtain a write lock while making those + // modifications. + + const b = step.owner; + const arena = b.allocator; + const self = @fieldParentPtr(FmtStep, "step", step); + + var argv: std.ArrayListUnmanaged([]const u8) = .{}; + try argv.ensureUnusedCapacity(arena, 2 + 1 + self.paths.len + 2 * self.exclude_paths.len); + + argv.appendAssumeCapacity(b.zig_exe); + argv.appendAssumeCapacity("fmt"); + + if (self.check) { + argv.appendAssumeCapacity("--check"); + } + + for (self.paths) |p| { + argv.appendAssumeCapacity(b.pathFromRoot(p)); + } + + for (self.exclude_paths) |p| { + argv.appendAssumeCapacity("--exclude"); + argv.appendAssumeCapacity(b.pathFromRoot(p)); + } + + return step.evalChildProcess(argv.items); +} diff --git a/lib/std/Build/Step/InstallArtifact.zig b/lib/std/Build/Step/InstallArtifact.zig new file mode 100644 index 0000000000..fa357a9ae9 --- /dev/null +++ b/lib/std/Build/Step/InstallArtifact.zig @@ -0,0 +1,130 @@ +const std = @import("std"); +const Step = std.Build.Step; +const CompileStep = std.Build.CompileStep; +const InstallDir = std.Build.InstallDir; +const InstallArtifactStep = @This(); +const fs = std.fs; + +pub const base_id = .install_artifact; + +step: Step, +artifact: *CompileStep, +dest_dir: InstallDir, +pdb_dir: ?InstallDir, +h_dir: ?InstallDir, +/// If non-null, adds additional path components relative to dest_dir, and +/// overrides the basename of the CompileStep. +dest_sub_path: ?[]const u8, + +pub fn create(owner: *std.Build, artifact: *CompileStep) *InstallArtifactStep { + const self = owner.allocator.create(InstallArtifactStep) catch @panic("OOM"); + self.* = InstallArtifactStep{ + .step = Step.init(.{ + .id = base_id, + .name = owner.fmt("install {s}", .{artifact.name}), + .owner = owner, + .makeFn = make, + }), + .artifact = artifact, + .dest_dir = artifact.override_dest_dir orelse switch (artifact.kind) { + .obj => @panic("Cannot install a .obj build artifact."), + .exe, .@"test" => InstallDir{ .bin = {} }, + .lib => InstallDir{ .lib = {} }, + }, + .pdb_dir = if (artifact.producesPdbFile()) blk: { + if (artifact.kind == .exe or artifact.kind == .@"test") { + break :blk InstallDir{ .bin = {} }; + } else { + break :blk InstallDir{ .lib = {} }; + } + } else null, + .h_dir = if (artifact.kind == .lib and artifact.emit_h) .header else null, + .dest_sub_path = null, + }; + self.step.dependOn(&artifact.step); + + owner.pushInstalledFile(self.dest_dir, artifact.out_filename); + if (self.artifact.isDynamicLibrary()) { + if (artifact.major_only_filename) |name| { + owner.pushInstalledFile(.lib, name); + } + if (artifact.name_only_filename) |name| { + owner.pushInstalledFile(.lib, name); + } + if (self.artifact.target.isWindows()) { + owner.pushInstalledFile(.lib, artifact.out_lib_filename); + } + } + if (self.pdb_dir) |pdb_dir| { + owner.pushInstalledFile(pdb_dir, artifact.out_pdb_filename); + } + if (self.h_dir) |h_dir| { + owner.pushInstalledFile(h_dir, artifact.out_h_filename); + } + return self; +} + +fn make(step: *Step, prog_node: *std.Progress.Node) !void { + _ = prog_node; + const self = @fieldParentPtr(InstallArtifactStep, "step", step); + const src_builder = self.artifact.step.owner; + const dest_builder = step.owner; + + const dest_sub_path = if (self.dest_sub_path) |sub_path| sub_path else self.artifact.out_filename; + const full_dest_path = dest_builder.getInstallPath(self.dest_dir, dest_sub_path); + const cwd = fs.cwd(); + + var all_cached = true; + + { + const full_src_path = self.artifact.getOutputSource().getPath(src_builder); + const p = fs.Dir.updateFile(cwd, full_src_path, cwd, full_dest_path, .{}) catch |err| { + return step.fail("unable to update file from '{s}' to '{s}': {s}", .{ + full_src_path, full_dest_path, @errorName(err), + }); + }; + all_cached = all_cached and p == .fresh; + } + + if (self.artifact.isDynamicLibrary() and + self.artifact.version != null and + self.artifact.target.wantSharedLibSymLinks()) + { + try CompileStep.doAtomicSymLinks(step, full_dest_path, self.artifact.major_only_filename.?, self.artifact.name_only_filename.?); + } + if (self.artifact.isDynamicLibrary() and + self.artifact.target.isWindows() and + self.artifact.emit_implib != .no_emit) + { + const full_src_path = self.artifact.getOutputLibSource().getPath(src_builder); + const full_implib_path = dest_builder.getInstallPath(self.dest_dir, self.artifact.out_lib_filename); + const p = fs.Dir.updateFile(cwd, full_src_path, cwd, full_implib_path, .{}) catch |err| { + return step.fail("unable to update file from '{s}' to '{s}': {s}", .{ + full_src_path, full_implib_path, @errorName(err), + }); + }; + all_cached = all_cached and p == .fresh; + } + if (self.pdb_dir) |pdb_dir| { + const full_src_path = self.artifact.getOutputPdbSource().getPath(src_builder); + const full_pdb_path = dest_builder.getInstallPath(pdb_dir, self.artifact.out_pdb_filename); + const p = fs.Dir.updateFile(cwd, full_src_path, cwd, full_pdb_path, .{}) catch |err| { + return step.fail("unable to update file from '{s}' to '{s}': {s}", .{ + full_src_path, full_pdb_path, @errorName(err), + }); + }; + all_cached = all_cached and p == .fresh; + } + if (self.h_dir) |h_dir| { + const full_src_path = self.artifact.getOutputHSource().getPath(src_builder); + const full_h_path = dest_builder.getInstallPath(h_dir, self.artifact.out_h_filename); + const p = fs.Dir.updateFile(cwd, full_src_path, cwd, full_h_path, .{}) catch |err| { + return step.fail("unable to update file from '{s}' to '{s}': {s}", .{ + full_src_path, full_h_path, @errorName(err), + }); + }; + all_cached = all_cached and p == .fresh; + } + self.artifact.installed_path = full_dest_path; + step.result_cached = all_cached; +} diff --git a/lib/std/Build/Step/InstallDir.zig b/lib/std/Build/Step/InstallDir.zig new file mode 100644 index 0000000000..28280dcb7f --- /dev/null +++ b/lib/std/Build/Step/InstallDir.zig @@ -0,0 +1,110 @@ +const std = @import("std"); +const mem = std.mem; +const fs = std.fs; +const Step = std.Build.Step; +const InstallDir = std.Build.InstallDir; +const InstallDirStep = @This(); + +step: Step, +options: Options, +/// This is used by the build system when a file being installed comes from one +/// package but is being installed by another. +dest_builder: *std.Build, + +pub const base_id = .install_dir; + +pub const Options = struct { + source_dir: []const u8, + install_dir: InstallDir, + install_subdir: []const u8, + /// File paths which end in any of these suffixes will be excluded + /// from being installed. + exclude_extensions: []const []const u8 = &.{}, + /// File paths which end in any of these suffixes will result in + /// empty files being installed. This is mainly intended for large + /// test.zig files in order to prevent needless installation bloat. + /// However if the files were not present at all, then + /// `@import("test.zig")` would be a compile error. + blank_extensions: []const []const u8 = &.{}, + + fn dupe(self: Options, b: *std.Build) Options { + return .{ + .source_dir = b.dupe(self.source_dir), + .install_dir = self.install_dir.dupe(b), + .install_subdir = b.dupe(self.install_subdir), + .exclude_extensions = b.dupeStrings(self.exclude_extensions), + .blank_extensions = b.dupeStrings(self.blank_extensions), + }; + } +}; + +pub fn init(owner: *std.Build, options: Options) InstallDirStep { + owner.pushInstalledFile(options.install_dir, options.install_subdir); + return .{ + .step = Step.init(.{ + .id = .install_dir, + .name = owner.fmt("install {s}/", .{options.source_dir}), + .owner = owner, + .makeFn = make, + }), + .options = options.dupe(owner), + .dest_builder = owner, + }; +} + +fn make(step: *Step, prog_node: *std.Progress.Node) !void { + _ = prog_node; + const self = @fieldParentPtr(InstallDirStep, "step", step); + const dest_builder = self.dest_builder; + const arena = dest_builder.allocator; + const dest_prefix = dest_builder.getInstallPath(self.options.install_dir, self.options.install_subdir); + const src_builder = self.step.owner; + var src_dir = src_builder.build_root.handle.openIterableDir(self.options.source_dir, .{}) catch |err| { + return step.fail("unable to open source directory '{}{s}': {s}", .{ + src_builder.build_root, self.options.source_dir, @errorName(err), + }); + }; + defer src_dir.close(); + var it = try src_dir.walk(arena); + var all_cached = true; + next_entry: while (try it.next()) |entry| { + for (self.options.exclude_extensions) |ext| { + if (mem.endsWith(u8, entry.path, ext)) { + continue :next_entry; + } + } + + // relative to src build root + const src_sub_path = try fs.path.join(arena, &.{ self.options.source_dir, entry.path }); + const dest_path = try fs.path.join(arena, &.{ dest_prefix, entry.path }); + const cwd = fs.cwd(); + + switch (entry.kind) { + .Directory => try cwd.makePath(dest_path), + .File => { + for (self.options.blank_extensions) |ext| { + if (mem.endsWith(u8, entry.path, ext)) { + try dest_builder.truncateFile(dest_path); + continue :next_entry; + } + } + + const prev_status = fs.Dir.updateFile( + src_builder.build_root.handle, + src_sub_path, + cwd, + dest_path, + .{}, + ) catch |err| { + return step.fail("unable to update file from '{}{s}' to '{s}': {s}", .{ + src_builder.build_root, src_sub_path, dest_path, @errorName(err), + }); + }; + all_cached = all_cached and prev_status == .fresh; + }, + else => continue, + } + } + + step.result_cached = all_cached; +} diff --git a/lib/std/Build/Step/InstallFile.zig b/lib/std/Build/Step/InstallFile.zig new file mode 100644 index 0000000000..b6b66fd1e0 --- /dev/null +++ b/lib/std/Build/Step/InstallFile.zig @@ -0,0 +1,57 @@ +const std = @import("std"); +const Step = std.Build.Step; +const FileSource = std.Build.FileSource; +const InstallDir = std.Build.InstallDir; +const InstallFileStep = @This(); +const assert = std.debug.assert; + +pub const base_id = .install_file; + +step: Step, +source: FileSource, +dir: InstallDir, +dest_rel_path: []const u8, +/// This is used by the build system when a file being installed comes from one +/// package but is being installed by another. +dest_builder: *std.Build, + +pub fn create( + owner: *std.Build, + source: FileSource, + dir: InstallDir, + dest_rel_path: []const u8, +) *InstallFileStep { + assert(dest_rel_path.len != 0); + owner.pushInstalledFile(dir, dest_rel_path); + const self = owner.allocator.create(InstallFileStep) catch @panic("OOM"); + self.* = .{ + .step = Step.init(.{ + .id = base_id, + .name = owner.fmt("install {s} to {s}", .{ source.getDisplayName(), dest_rel_path }), + .owner = owner, + .makeFn = make, + }), + .source = source.dupe(owner), + .dir = dir.dupe(owner), + .dest_rel_path = owner.dupePath(dest_rel_path), + .dest_builder = owner, + }; + source.addStepDependencies(&self.step); + return self; +} + +fn make(step: *Step, prog_node: *std.Progress.Node) !void { + _ = prog_node; + const src_builder = step.owner; + const self = @fieldParentPtr(InstallFileStep, "step", step); + const dest_builder = self.dest_builder; + const full_src_path = self.source.getPath2(src_builder, step); + const full_dest_path = dest_builder.getInstallPath(self.dir, self.dest_rel_path); + const cwd = std.fs.cwd(); + const prev = std.fs.Dir.updateFile(cwd, full_src_path, cwd, full_dest_path, .{}) catch |err| { + return step.fail("unable to update file from '{s}' to '{s}': {s}", .{ + full_src_path, full_dest_path, @errorName(err), + }); + }; + step.result_cached = prev == .fresh; +} diff --git a/lib/std/Build/Step/ObjCopy.zig b/lib/std/Build/Step/ObjCopy.zig new file mode 100644 index 0000000000..608c56591f --- /dev/null +++ b/lib/std/Build/Step/ObjCopy.zig @@ -0,0 +1,122 @@ +const std = @import("std"); +const ObjCopyStep = @This(); + +const Allocator = std.mem.Allocator; +const ArenaAllocator = std.heap.ArenaAllocator; +const ArrayListUnmanaged = std.ArrayListUnmanaged; +const File = std.fs.File; +const InstallDir = std.Build.InstallDir; +const CompileStep = std.Build.CompileStep; +const Step = std.Build.Step; +const elf = std.elf; +const fs = std.fs; +const io = std.io; +const sort = std.sort; + +pub const base_id: Step.Id = .objcopy; + +pub const RawFormat = enum { + bin, + hex, +}; + +step: Step, +file_source: std.Build.FileSource, +basename: []const u8, +output_file: std.Build.GeneratedFile, + +format: ?RawFormat, +only_section: ?[]const u8, +pad_to: ?u64, + +pub const Options = struct { + basename: ?[]const u8 = null, + format: ?RawFormat = null, + only_section: ?[]const u8 = null, + pad_to: ?u64 = null, +}; + +pub fn create( + owner: *std.Build, + file_source: std.Build.FileSource, + options: Options, +) *ObjCopyStep { + const self = owner.allocator.create(ObjCopyStep) catch @panic("OOM"); + self.* = ObjCopyStep{ + .step = Step.init(.{ + .id = base_id, + .name = owner.fmt("objcopy {s}", .{file_source.getDisplayName()}), + .owner = owner, + .makeFn = make, + }), + .file_source = file_source, + .basename = options.basename orelse file_source.getDisplayName(), + .output_file = std.Build.GeneratedFile{ .step = &self.step }, + + .format = options.format, + .only_section = options.only_section, + .pad_to = options.pad_to, + }; + file_source.addStepDependencies(&self.step); + return self; +} + +pub fn getOutputSource(self: *const ObjCopyStep) std.Build.FileSource { + return .{ .generated = &self.output_file }; +} + +fn make(step: *Step, prog_node: *std.Progress.Node) !void { + const b = step.owner; + const self = @fieldParentPtr(ObjCopyStep, "step", step); + + var man = b.cache.obtain(); + defer man.deinit(); + + // Random bytes to make ObjCopyStep unique. Refresh this with new random + // bytes when ObjCopyStep implementation is modified incompatibly. + man.hash.add(@as(u32, 0xe18b7baf)); + + const full_src_path = self.file_source.getPath(b); + _ = try man.addFile(full_src_path, null); + man.hash.addOptionalBytes(self.only_section); + man.hash.addOptional(self.pad_to); + man.hash.addOptional(self.format); + + if (try step.cacheHit(&man)) { + // Cache hit, skip subprocess execution. + const digest = man.final(); + self.output_file.path = try b.cache_root.join(b.allocator, &.{ + "o", &digest, self.basename, + }); + return; + } + + const digest = man.final(); + const full_dest_path = try b.cache_root.join(b.allocator, &.{ "o", &digest, self.basename }); + const cache_path = "o" ++ fs.path.sep_str ++ digest; + b.cache_root.handle.makePath(cache_path) catch |err| { + return step.fail("unable to make path {s}: {s}", .{ cache_path, @errorName(err) }); + }; + + var argv = std.ArrayList([]const u8).init(b.allocator); + try argv.appendSlice(&.{ b.zig_exe, "objcopy" }); + + if (self.only_section) |only_section| { + try argv.appendSlice(&.{ "-j", only_section }); + } + if (self.pad_to) |pad_to| { + try argv.appendSlice(&.{ "--pad-to", b.fmt("{d}", .{pad_to}) }); + } + if (self.format) |format| switch (format) { + .bin => try argv.appendSlice(&.{ "-O", "binary" }), + .hex => try argv.appendSlice(&.{ "-O", "hex" }), + }; + + try argv.appendSlice(&.{ full_src_path, full_dest_path }); + + try argv.append("--listen=-"); + _ = try step.evalZigProcess(argv.items, prog_node); + + self.output_file.path = full_dest_path; + try man.writeManifest(); +} diff --git a/lib/std/Build/Step/Options.zig b/lib/std/Build/Step/Options.zig new file mode 100644 index 0000000000..101c284cf0 --- /dev/null +++ b/lib/std/Build/Step/Options.zig @@ -0,0 +1,421 @@ +const std = @import("std"); +const builtin = @import("builtin"); +const fs = std.fs; +const Step = std.Build.Step; +const GeneratedFile = std.Build.GeneratedFile; +const CompileStep = std.Build.CompileStep; +const FileSource = std.Build.FileSource; + +const OptionsStep = @This(); + +pub const base_id = .options; + +step: Step, +generated_file: GeneratedFile, + +contents: std.ArrayList(u8), +artifact_args: std.ArrayList(OptionArtifactArg), +file_source_args: std.ArrayList(OptionFileSourceArg), + +pub fn create(owner: *std.Build) *OptionsStep { + const self = owner.allocator.create(OptionsStep) catch @panic("OOM"); + self.* = .{ + .step = Step.init(.{ + .id = base_id, + .name = "options", + .owner = owner, + .makeFn = make, + }), + .generated_file = undefined, + .contents = std.ArrayList(u8).init(owner.allocator), + .artifact_args = std.ArrayList(OptionArtifactArg).init(owner.allocator), + .file_source_args = std.ArrayList(OptionFileSourceArg).init(owner.allocator), + }; + self.generated_file = .{ .step = &self.step }; + + return self; +} + +pub fn addOption(self: *OptionsStep, comptime T: type, name: []const u8, value: T) void { + return addOptionFallible(self, T, name, value) catch @panic("unhandled error"); +} + +fn addOptionFallible(self: *OptionsStep, comptime T: type, name: []const u8, value: T) !void { + const out = self.contents.writer(); + switch (T) { + []const []const u8 => { + try out.print("pub const {}: []const []const u8 = &[_][]const u8{{\n", .{std.zig.fmtId(name)}); + for (value) |slice| { + try out.print(" \"{}\",\n", .{std.zig.fmtEscapes(slice)}); + } + try out.writeAll("};\n"); + return; + }, + [:0]const u8 => { + try out.print("pub const {}: [:0]const u8 = \"{}\";\n", .{ std.zig.fmtId(name), std.zig.fmtEscapes(value) }); + return; + }, + []const u8 => { + try out.print("pub const {}: []const u8 = \"{}\";\n", .{ std.zig.fmtId(name), std.zig.fmtEscapes(value) }); + return; + }, + ?[:0]const u8 => { + try out.print("pub const {}: ?[:0]const u8 = ", .{std.zig.fmtId(name)}); + if (value) |payload| { + try out.print("\"{}\";\n", .{std.zig.fmtEscapes(payload)}); + } else { + try out.writeAll("null;\n"); + } + return; + }, + ?[]const u8 => { + try out.print("pub const {}: ?[]const u8 = ", .{std.zig.fmtId(name)}); + if (value) |payload| { + try out.print("\"{}\";\n", .{std.zig.fmtEscapes(payload)}); + } else { + try out.writeAll("null;\n"); + } + return; + }, + std.builtin.Version => { + try out.print( + \\pub const {}: @import("std").builtin.Version = .{{ + \\ .major = {d}, + \\ .minor = {d}, + \\ .patch = {d}, + \\}}; + \\ + , .{ + std.zig.fmtId(name), + + value.major, + value.minor, + value.patch, + }); + return; + }, + std.SemanticVersion => { + try out.print( + \\pub const {}: @import("std").SemanticVersion = .{{ + \\ .major = {d}, + \\ .minor = {d}, + \\ .patch = {d}, + \\ + , .{ + std.zig.fmtId(name), + + value.major, + value.minor, + value.patch, + }); + if (value.pre) |some| { + try out.print(" .pre = \"{}\",\n", .{std.zig.fmtEscapes(some)}); + } + if (value.build) |some| { + try out.print(" .build = \"{}\",\n", .{std.zig.fmtEscapes(some)}); + } + try out.writeAll("};\n"); + return; + }, + else => {}, + } + switch (@typeInfo(T)) { + .Enum => |enum_info| { + try out.print("pub const {} = enum {{\n", .{std.zig.fmtId(@typeName(T))}); + inline for (enum_info.fields) |field| { + try out.print(" {},\n", .{std.zig.fmtId(field.name)}); + } + try out.writeAll("};\n"); + try out.print("pub const {}: {s} = {s}.{s};\n", .{ + std.zig.fmtId(name), + std.zig.fmtId(@typeName(T)), + std.zig.fmtId(@typeName(T)), + std.zig.fmtId(@tagName(value)), + }); + return; + }, + else => {}, + } + try out.print("pub const {}: {s} = ", .{ std.zig.fmtId(name), @typeName(T) }); + try printLiteral(out, value, 0); + try out.writeAll(";\n"); +} + +// TODO: non-recursive? +fn printLiteral(out: anytype, val: anytype, indent: u8) !void { + const T = @TypeOf(val); + switch (@typeInfo(T)) { + .Array => { + try out.print("{s} {{\n", .{@typeName(T)}); + for (val) |item| { + try out.writeByteNTimes(' ', indent + 4); + try printLiteral(out, item, indent + 4); + try out.writeAll(",\n"); + } + try out.writeByteNTimes(' ', indent); + try out.writeAll("}"); + }, + .Pointer => |p| { + if (p.size != .Slice) { + @compileError("Non-slice pointers are not yet supported in build options"); + } + try out.print("&[_]{s} {{\n", .{@typeName(p.child)}); + for (val) |item| { + try out.writeByteNTimes(' ', indent + 4); + try printLiteral(out, item, indent + 4); + try out.writeAll(",\n"); + } + try out.writeByteNTimes(' ', indent); + try out.writeAll("}"); + }, + .Optional => { + if (val) |inner| { + return printLiteral(out, inner, indent); + } else { + return out.writeAll("null"); + } + }, + .Void, + .Bool, + .Int, + .ComptimeInt, + .Float, + .Null, + => try out.print("{any}", .{val}), + else => @compileError(std.fmt.comptimePrint("`{s}` are not yet supported as build options", .{@tagName(@typeInfo(T))})), + } +} + +/// The value is the path in the cache dir. +/// Adds a dependency automatically. +pub fn addOptionFileSource( + self: *OptionsStep, + name: []const u8, + source: FileSource, +) void { + self.file_source_args.append(.{ + .name = name, + .source = source.dupe(self.step.owner), + }) catch @panic("OOM"); + source.addStepDependencies(&self.step); +} + +/// The value is the path in the cache dir. +/// Adds a dependency automatically. +pub fn addOptionArtifact(self: *OptionsStep, name: []const u8, artifact: *CompileStep) void { + self.artifact_args.append(.{ .name = self.step.owner.dupe(name), .artifact = artifact }) catch @panic("OOM"); + self.step.dependOn(&artifact.step); +} + +pub fn createModule(self: *OptionsStep) *std.Build.Module { + return self.step.owner.createModule(.{ + .source_file = self.getSource(), + .dependencies = &.{}, + }); +} + +pub fn getSource(self: *OptionsStep) FileSource { + return .{ .generated = &self.generated_file }; +} + +fn make(step: *Step, prog_node: *std.Progress.Node) !void { + // This step completes so quickly that no progress is necessary. + _ = prog_node; + + const b = step.owner; + const self = @fieldParentPtr(OptionsStep, "step", step); + + for (self.artifact_args.items) |item| { + self.addOption( + []const u8, + item.name, + b.pathFromRoot(item.artifact.getOutputSource().getPath(b)), + ); + } + + for (self.file_source_args.items) |item| { + self.addOption( + []const u8, + item.name, + item.source.getPath(b), + ); + } + + const basename = "options.zig"; + + // Hash contents to file name. + var hash = b.cache.hash; + // Random bytes to make unique. Refresh this with new random bytes when + // implementation is modified in a non-backwards-compatible way. + hash.add(@as(u32, 0x38845ef8)); + hash.addBytes(self.contents.items); + const sub_path = "c" ++ fs.path.sep_str ++ hash.final() ++ fs.path.sep_str ++ basename; + + self.generated_file.path = try b.cache_root.join(b.allocator, &.{sub_path}); + + // Optimize for the hot path. Stat the file, and if it already exists, + // cache hit. + if (b.cache_root.handle.access(sub_path, .{})) |_| { + // This is the hot path, success. + step.result_cached = true; + return; + } else |outer_err| switch (outer_err) { + error.FileNotFound => { + const sub_dirname = fs.path.dirname(sub_path).?; + b.cache_root.handle.makePath(sub_dirname) catch |e| { + return step.fail("unable to make path '{}{s}': {s}", .{ + b.cache_root, sub_dirname, @errorName(e), + }); + }; + + const rand_int = std.crypto.random.int(u64); + const tmp_sub_path = "tmp" ++ fs.path.sep_str ++ + std.Build.hex64(rand_int) ++ fs.path.sep_str ++ + basename; + const tmp_sub_path_dirname = fs.path.dirname(tmp_sub_path).?; + + b.cache_root.handle.makePath(tmp_sub_path_dirname) catch |err| { + return step.fail("unable to make temporary directory '{}{s}': {s}", .{ + b.cache_root, tmp_sub_path_dirname, @errorName(err), + }); + }; + + b.cache_root.handle.writeFile(tmp_sub_path, self.contents.items) catch |err| { + return step.fail("unable to write options to '{}{s}': {s}", .{ + b.cache_root, tmp_sub_path, @errorName(err), + }); + }; + + b.cache_root.handle.rename(tmp_sub_path, sub_path) catch |err| switch (err) { + error.PathAlreadyExists => { + // Other process beat us to it. Clean up the temp file. + b.cache_root.handle.deleteFile(tmp_sub_path) catch |e| { + try step.addError("warning: unable to delete temp file '{}{s}': {s}", .{ + b.cache_root, tmp_sub_path, @errorName(e), + }); + }; + step.result_cached = true; + return; + }, + else => { + return step.fail("unable to rename options from '{}{s}' to '{}{s}': {s}", .{ + b.cache_root, tmp_sub_path, + b.cache_root, sub_path, + @errorName(err), + }); + }, + }; + }, + else => |e| return step.fail("unable to access options file '{}{s}': {s}", .{ + b.cache_root, sub_path, @errorName(e), + }), + } +} + +const OptionArtifactArg = struct { + name: []const u8, + artifact: *CompileStep, +}; + +const OptionFileSourceArg = struct { + name: []const u8, + source: FileSource, +}; + +test "OptionsStep" { + if (builtin.os.tag == .wasi) return error.SkipZigTest; + + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); + defer arena.deinit(); + + const host = try std.zig.system.NativeTargetInfo.detect(.{}); + + var cache: std.Build.Cache = .{ + .gpa = arena.allocator(), + .manifest_dir = std.fs.cwd(), + }; + + var builder = try std.Build.create( + arena.allocator(), + "test", + .{ .path = "test", .handle = std.fs.cwd() }, + .{ .path = "test", .handle = std.fs.cwd() }, + .{ .path = "test", .handle = std.fs.cwd() }, + host, + &cache, + ); + defer builder.destroy(); + + const options = builder.addOptions(); + + // TODO this regressed at some point + //const KeywordEnum = enum { + // @"0.8.1", + //}; + + const nested_array = [2][2]u16{ + [2]u16{ 300, 200 }, + [2]u16{ 300, 200 }, + }; + const nested_slice: []const []const u16 = &[_][]const u16{ &nested_array[0], &nested_array[1] }; + + options.addOption(usize, "option1", 1); + options.addOption(?usize, "option2", null); + options.addOption(?usize, "option3", 3); + options.addOption(comptime_int, "option4", 4); + options.addOption([]const u8, "string", "zigisthebest"); + options.addOption(?[]const u8, "optional_string", null); + options.addOption([2][2]u16, "nested_array", nested_array); + options.addOption([]const []const u16, "nested_slice", nested_slice); + //options.addOption(KeywordEnum, "keyword_enum", .@"0.8.1"); + options.addOption(std.builtin.Version, "version", try std.builtin.Version.parse("0.1.2")); + options.addOption(std.SemanticVersion, "semantic_version", try std.SemanticVersion.parse("0.1.2-foo+bar")); + + try std.testing.expectEqualStrings( + \\pub const option1: usize = 1; + \\pub const option2: ?usize = null; + \\pub const option3: ?usize = 3; + \\pub const option4: comptime_int = 4; + \\pub const string: []const u8 = "zigisthebest"; + \\pub const optional_string: ?[]const u8 = null; + \\pub const nested_array: [2][2]u16 = [2][2]u16 { + \\ [2]u16 { + \\ 300, + \\ 200, + \\ }, + \\ [2]u16 { + \\ 300, + \\ 200, + \\ }, + \\}; + \\pub const nested_slice: []const []const u16 = &[_][]const u16 { + \\ &[_]u16 { + \\ 300, + \\ 200, + \\ }, + \\ &[_]u16 { + \\ 300, + \\ 200, + \\ }, + \\}; + //\\pub const KeywordEnum = enum { + //\\ @"0.8.1", + //\\}; + //\\pub const keyword_enum: KeywordEnum = KeywordEnum.@"0.8.1"; + \\pub const version: @import("std").builtin.Version = .{ + \\ .major = 0, + \\ .minor = 1, + \\ .patch = 2, + \\}; + \\pub const semantic_version: @import("std").SemanticVersion = .{ + \\ .major = 0, + \\ .minor = 1, + \\ .patch = 2, + \\ .pre = "foo", + \\ .build = "bar", + \\}; + \\ + , options.contents.items); + + _ = try std.zig.Ast.parse(arena.allocator(), try options.contents.toOwnedSliceSentinel(0), .zig); +} diff --git a/lib/std/Build/Step/RemoveDir.zig b/lib/std/Build/Step/RemoveDir.zig new file mode 100644 index 0000000000..59025a7e91 --- /dev/null +++ b/lib/std/Build/Step/RemoveDir.zig @@ -0,0 +1,42 @@ +const std = @import("std"); +const fs = std.fs; +const Step = std.Build.Step; +const RemoveDirStep = @This(); + +pub const base_id = .remove_dir; + +step: Step, +dir_path: []const u8, + +pub fn init(owner: *std.Build, dir_path: []const u8) RemoveDirStep { + return RemoveDirStep{ + .step = Step.init(.{ + .id = .remove_dir, + .name = owner.fmt("RemoveDir {s}", .{dir_path}), + .owner = owner, + .makeFn = make, + }), + .dir_path = owner.dupePath(dir_path), + }; +} + +fn make(step: *Step, prog_node: *std.Progress.Node) !void { + // TODO update progress node while walking file system. + // Should the standard library support this use case?? + _ = prog_node; + + const b = step.owner; + const self = @fieldParentPtr(RemoveDirStep, "step", step); + + b.build_root.handle.deleteTree(self.dir_path) catch |err| { + if (b.build_root.path) |base| { + return step.fail("unable to recursively delete path '{s}/{s}': {s}", .{ + base, self.dir_path, @errorName(err), + }); + } else { + return step.fail("unable to recursively delete path '{s}': {s}", .{ + self.dir_path, @errorName(err), + }); + } + }; +} diff --git a/lib/std/Build/Step/Run.zig b/lib/std/Build/Step/Run.zig new file mode 100644 index 0000000000..4e973cfd98 --- /dev/null +++ b/lib/std/Build/Step/Run.zig @@ -0,0 +1,1254 @@ +const std = @import("std"); +const builtin = @import("builtin"); +const Step = std.Build.Step; +const CompileStep = std.Build.CompileStep; +const WriteFileStep = std.Build.WriteFileStep; +const fs = std.fs; +const mem = std.mem; +const process = std.process; +const ArrayList = std.ArrayList; +const EnvMap = process.EnvMap; +const Allocator = mem.Allocator; +const ExecError = std.Build.ExecError; +const assert = std.debug.assert; + +const RunStep = @This(); + +pub const base_id: Step.Id = .run; + +step: Step, + +/// See also addArg and addArgs to modifying this directly +argv: ArrayList(Arg), + +/// Set this to modify the current working directory +/// TODO change this to a Build.Cache.Directory to better integrate with +/// future child process cwd API. +cwd: ?[]const u8, + +/// Override this field to modify the environment, or use setEnvironmentVariable +env_map: ?*EnvMap, + +/// Configures whether the RunStep is considered to have side-effects, and also +/// whether the RunStep will inherit stdio streams, forwarding them to the +/// parent process, in which case will require a global lock to prevent other +/// steps from interfering with stdio while the subprocess associated with this +/// RunStep is running. +/// If the RunStep is determined to not have side-effects, then execution will +/// be skipped if all output files are up-to-date and input files are +/// unchanged. +stdio: StdIo = .infer_from_args, +/// This field must be `null` if stdio is `inherit`. +stdin: ?[]const u8 = null, + +/// Additional file paths relative to build.zig that, when modified, indicate +/// that the RunStep should be re-executed. +/// If the RunStep is determined to have side-effects, this field is ignored +/// and the RunStep is always executed when it appears in the build graph. +extra_file_dependencies: []const []const u8 = &.{}, + +/// After adding an output argument, this step will by default rename itself +/// for a better display name in the build summary. +/// This can be disabled by setting this to false. +rename_step_with_output_arg: bool = true, + +/// If this is true, a RunStep which is configured to check the output of the +/// executed binary will not fail the build if the binary cannot be executed +/// due to being for a foreign binary to the host system which is running the +/// build graph. +/// Command-line arguments such as -fqemu and -fwasmtime may affect whether a +/// binary is detected as foreign, as well as system configuration such as +/// Rosetta (macOS) and binfmt_misc (Linux). +/// If this RunStep is considered to have side-effects, then this flag does +/// nothing. +skip_foreign_checks: bool = false, + +/// If stderr or stdout exceeds this amount, the child process is killed and +/// the step fails. +max_stdio_size: usize = 10 * 1024 * 1024, + +captured_stdout: ?*Output = null, +captured_stderr: ?*Output = null, + +has_side_effects: bool = false, + +pub const StdIo = union(enum) { + /// Whether the RunStep has side-effects will be determined by whether or not one + /// of the args is an output file (added with `addOutputFileArg`). + /// If the RunStep is determined to have side-effects, this is the same as `inherit`. + /// The step will fail if the subprocess crashes or returns a non-zero exit code. + infer_from_args, + /// Causes the RunStep to be considered to have side-effects, and therefore + /// always execute when it appears in the build graph. + /// It also means that this step will obtain a global lock to prevent other + /// steps from running in the meantime. + /// The step will fail if the subprocess crashes or returns a non-zero exit code. + inherit, + /// Causes the RunStep to be considered to *not* have side-effects. The + /// process will be re-executed if any of the input dependencies are + /// modified. The exit code and standard I/O streams will be checked for + /// certain conditions, and the step will succeed or fail based on these + /// conditions. + /// Note that an explicit check for exit code 0 needs to be added to this + /// list if such a check is desirable. + check: std.ArrayList(Check), + /// This RunStep is running a zig unit test binary and will communicate + /// extra metadata over the IPC protocol. + zig_test, + + pub const Check = union(enum) { + expect_stderr_exact: []const u8, + expect_stderr_match: []const u8, + expect_stdout_exact: []const u8, + expect_stdout_match: []const u8, + expect_term: std.process.Child.Term, + }; +}; + +pub const Arg = union(enum) { + artifact: *CompileStep, + file_source: std.Build.FileSource, + directory_source: std.Build.FileSource, + bytes: []u8, + output: *Output, +}; + +pub const Output = struct { + generated_file: std.Build.GeneratedFile, + prefix: []const u8, + basename: []const u8, +}; + +pub fn create(owner: *std.Build, name: []const u8) *RunStep { + const self = owner.allocator.create(RunStep) catch @panic("OOM"); + self.* = .{ + .step = Step.init(.{ + .id = base_id, + .name = name, + .owner = owner, + .makeFn = make, + }), + .argv = ArrayList(Arg).init(owner.allocator), + .cwd = null, + .env_map = null, + }; + return self; +} + +pub fn setName(self: *RunStep, name: []const u8) void { + self.step.name = name; + self.rename_step_with_output_arg = false; +} + +pub fn enableTestRunnerMode(rs: *RunStep) void { + rs.stdio = .zig_test; + rs.addArgs(&.{"--listen=-"}); +} + +pub fn addArtifactArg(self: *RunStep, artifact: *CompileStep) void { + self.argv.append(Arg{ .artifact = artifact }) catch @panic("OOM"); + self.step.dependOn(&artifact.step); +} + +/// This provides file path as a command line argument to the command being +/// run, and returns a FileSource which can be used as inputs to other APIs +/// throughout the build system. +pub fn addOutputFileArg(rs: *RunStep, basename: []const u8) std.Build.FileSource { + return addPrefixedOutputFileArg(rs, "", basename); +} + +pub fn addPrefixedOutputFileArg( + rs: *RunStep, + prefix: []const u8, + basename: []const u8, +) std.Build.FileSource { + const b = rs.step.owner; + + const output = b.allocator.create(Output) catch @panic("OOM"); + output.* = .{ + .prefix = prefix, + .basename = basename, + .generated_file = .{ .step = &rs.step }, + }; + rs.argv.append(.{ .output = output }) catch @panic("OOM"); + + if (rs.rename_step_with_output_arg) { + rs.setName(b.fmt("{s} ({s})", .{ rs.step.name, basename })); + } + + return .{ .generated = &output.generated_file }; +} + +pub fn addFileSourceArg(self: *RunStep, file_source: std.Build.FileSource) void { + self.argv.append(.{ + .file_source = file_source.dupe(self.step.owner), + }) catch @panic("OOM"); + file_source.addStepDependencies(&self.step); +} + +pub fn addDirectorySourceArg(self: *RunStep, directory_source: std.Build.FileSource) void { + self.argv.append(.{ + .directory_source = directory_source.dupe(self.step.owner), + }) catch @panic("OOM"); + directory_source.addStepDependencies(&self.step); +} + +pub fn addArg(self: *RunStep, arg: []const u8) void { + self.argv.append(.{ .bytes = self.step.owner.dupe(arg) }) catch @panic("OOM"); +} + +pub fn addArgs(self: *RunStep, args: []const []const u8) void { + for (args) |arg| { + self.addArg(arg); + } +} + +pub fn clearEnvironment(self: *RunStep) void { + const b = self.step.owner; + const new_env_map = b.allocator.create(EnvMap) catch @panic("OOM"); + new_env_map.* = EnvMap.init(b.allocator); + self.env_map = new_env_map; +} + +pub fn addPathDir(self: *RunStep, search_path: []const u8) void { + const b = self.step.owner; + const env_map = getEnvMapInternal(self); + + const key = "PATH"; + var prev_path = env_map.get(key); + + if (prev_path) |pp| { + const new_path = b.fmt("{s}" ++ [1]u8{fs.path.delimiter} ++ "{s}", .{ pp, search_path }); + env_map.put(key, new_path) catch @panic("OOM"); + } else { + env_map.put(key, b.dupePath(search_path)) catch @panic("OOM"); + } +} + +pub fn getEnvMap(self: *RunStep) *EnvMap { + return getEnvMapInternal(self); +} + +fn getEnvMapInternal(self: *RunStep) *EnvMap { + const arena = self.step.owner.allocator; + return self.env_map orelse { + const env_map = arena.create(EnvMap) catch @panic("OOM"); + env_map.* = process.getEnvMap(arena) catch @panic("unhandled error"); + self.env_map = env_map; + return env_map; + }; +} + +pub fn setEnvironmentVariable(self: *RunStep, key: []const u8, value: []const u8) void { + const b = self.step.owner; + const env_map = self.getEnvMap(); + env_map.put(b.dupe(key), b.dupe(value)) catch @panic("unhandled error"); +} + +pub fn removeEnvironmentVariable(self: *RunStep, key: []const u8) void { + self.getEnvMap().remove(key); +} + +/// Adds a check for exact stderr match. Does not add any other checks. +pub fn expectStdErrEqual(self: *RunStep, bytes: []const u8) void { + const new_check: StdIo.Check = .{ .expect_stderr_exact = self.step.owner.dupe(bytes) }; + self.addCheck(new_check); +} + +/// Adds a check for exact stdout match as well as a check for exit code 0, if +/// there is not already an expected termination check. +pub fn expectStdOutEqual(self: *RunStep, bytes: []const u8) void { + const new_check: StdIo.Check = .{ .expect_stdout_exact = self.step.owner.dupe(bytes) }; + self.addCheck(new_check); + if (!self.hasTermCheck()) { + self.expectExitCode(0); + } +} + +pub fn expectExitCode(self: *RunStep, code: u8) void { + const new_check: StdIo.Check = .{ .expect_term = .{ .Exited = code } }; + self.addCheck(new_check); +} + +pub fn hasTermCheck(self: RunStep) bool { + for (self.stdio.check.items) |check| switch (check) { + .expect_term => return true, + else => continue, + }; + return false; +} + +pub fn addCheck(self: *RunStep, new_check: StdIo.Check) void { + switch (self.stdio) { + .infer_from_args => { + self.stdio = .{ .check = std.ArrayList(StdIo.Check).init(self.step.owner.allocator) }; + self.stdio.check.append(new_check) catch @panic("OOM"); + }, + .check => |*checks| checks.append(new_check) catch @panic("OOM"), + else => @panic("illegal call to addCheck: conflicting helper method calls. Suggest to directly set stdio field of RunStep instead"), + } +} + +pub fn captureStdErr(self: *RunStep) std.Build.FileSource { + assert(self.stdio != .inherit); + + if (self.captured_stderr) |output| return .{ .generated = &output.generated_file }; + + const output = self.step.owner.allocator.create(Output) catch @panic("OOM"); + output.* = .{ + .prefix = "", + .basename = "stderr", + .generated_file = .{ .step = &self.step }, + }; + self.captured_stderr = output; + return .{ .generated = &output.generated_file }; +} + +pub fn captureStdOut(self: *RunStep) std.Build.FileSource { + assert(self.stdio != .inherit); + + if (self.captured_stdout) |output| return .{ .generated = &output.generated_file }; + + const output = self.step.owner.allocator.create(Output) catch @panic("OOM"); + output.* = .{ + .prefix = "", + .basename = "stdout", + .generated_file = .{ .step = &self.step }, + }; + self.captured_stdout = output; + return .{ .generated = &output.generated_file }; +} + +/// Returns whether the RunStep has side effects *other than* updating the output arguments. +fn hasSideEffects(self: RunStep) bool { + if (self.has_side_effects) return true; + return switch (self.stdio) { + .infer_from_args => !self.hasAnyOutputArgs(), + .inherit => true, + .check => false, + .zig_test => false, + }; +} + +fn hasAnyOutputArgs(self: RunStep) bool { + if (self.captured_stdout != null) return true; + if (self.captured_stderr != null) return true; + for (self.argv.items) |arg| switch (arg) { + .output => return true, + else => continue, + }; + return false; +} + +fn checksContainStdout(checks: []const StdIo.Check) bool { + for (checks) |check| switch (check) { + .expect_stderr_exact, + .expect_stderr_match, + .expect_term, + => continue, + + .expect_stdout_exact, + .expect_stdout_match, + => return true, + }; + return false; +} + +fn checksContainStderr(checks: []const StdIo.Check) bool { + for (checks) |check| switch (check) { + .expect_stdout_exact, + .expect_stdout_match, + .expect_term, + => continue, + + .expect_stderr_exact, + .expect_stderr_match, + => return true, + }; + return false; +} + +fn make(step: *Step, prog_node: *std.Progress.Node) !void { + const b = step.owner; + const arena = b.allocator; + const self = @fieldParentPtr(RunStep, "step", step); + const has_side_effects = self.hasSideEffects(); + + var argv_list = ArrayList([]const u8).init(arena); + var output_placeholders = ArrayList(struct { + index: usize, + output: *Output, + }).init(arena); + + var man = b.cache.obtain(); + defer man.deinit(); + + for (self.argv.items) |arg| { + switch (arg) { + .bytes => |bytes| { + try argv_list.append(bytes); + man.hash.addBytes(bytes); + }, + .file_source => |file| { + const file_path = file.getPath(b); + try argv_list.append(file_path); + _ = try man.addFile(file_path, null); + }, + .directory_source => |file| { + const file_path = file.getPath(b); + try argv_list.append(file_path); + man.hash.addBytes(file_path); + }, + .artifact => |artifact| { + if (artifact.target.isWindows()) { + // On Windows we don't have rpaths so we have to add .dll search paths to PATH + self.addPathForDynLibs(artifact); + } + const file_path = artifact.installed_path orelse + artifact.getOutputSource().getPath(b); + + try argv_list.append(file_path); + + _ = try man.addFile(file_path, null); + }, + .output => |output| { + man.hash.addBytes(output.prefix); + man.hash.addBytes(output.basename); + // Add a placeholder into the argument list because we need the + // manifest hash to be updated with all arguments before the + // object directory is computed. + try argv_list.append(""); + try output_placeholders.append(.{ + .index = argv_list.items.len - 1, + .output = output, + }); + }, + } + } + + if (self.captured_stdout) |output| { + man.hash.addBytes(output.basename); + } + + if (self.captured_stderr) |output| { + man.hash.addBytes(output.basename); + } + + hashStdIo(&man.hash, self.stdio); + + if (has_side_effects) { + try runCommand(self, argv_list.items, has_side_effects, null, prog_node); + return; + } + + for (self.extra_file_dependencies) |file_path| { + _ = try man.addFile(b.pathFromRoot(file_path), null); + } + + if (try step.cacheHit(&man)) { + // cache hit, skip running command + const digest = man.final(); + for (output_placeholders.items) |placeholder| { + placeholder.output.generated_file.path = try b.cache_root.join(arena, &.{ + "o", &digest, placeholder.output.basename, + }); + } + + if (self.captured_stdout) |output| { + output.generated_file.path = try b.cache_root.join(arena, &.{ + "o", &digest, output.basename, + }); + } + + if (self.captured_stderr) |output| { + output.generated_file.path = try b.cache_root.join(arena, &.{ + "o", &digest, output.basename, + }); + } + + step.result_cached = true; + return; + } + + const digest = man.final(); + + for (output_placeholders.items) |placeholder| { + const output_components = .{ "o", &digest, placeholder.output.basename }; + const output_sub_path = try fs.path.join(arena, &output_components); + const output_sub_dir_path = fs.path.dirname(output_sub_path).?; + b.cache_root.handle.makePath(output_sub_dir_path) catch |err| { + return step.fail("unable to make path '{}{s}': {s}", .{ + b.cache_root, output_sub_dir_path, @errorName(err), + }); + }; + const output_path = try b.cache_root.join(arena, &output_components); + placeholder.output.generated_file.path = output_path; + const cli_arg = if (placeholder.output.prefix.len == 0) + output_path + else + b.fmt("{s}{s}", .{ placeholder.output.prefix, output_path }); + argv_list.items[placeholder.index] = cli_arg; + } + + try runCommand(self, argv_list.items, has_side_effects, &digest, prog_node); + + try step.writeManifest(&man); +} + +fn formatTerm( + term: ?std.process.Child.Term, + comptime fmt: []const u8, + options: std.fmt.FormatOptions, + writer: anytype, +) !void { + _ = fmt; + _ = options; + if (term) |t| switch (t) { + .Exited => |code| try writer.print("exited with code {}", .{code}), + .Signal => |sig| try writer.print("terminated with signal {}", .{sig}), + .Stopped => |sig| try writer.print("stopped with signal {}", .{sig}), + .Unknown => |code| try writer.print("terminated for unknown reason with code {}", .{code}), + } else { + try writer.writeAll("exited with any code"); + } +} +fn fmtTerm(term: ?std.process.Child.Term) std.fmt.Formatter(formatTerm) { + return .{ .data = term }; +} + +fn termMatches(expected: ?std.process.Child.Term, actual: std.process.Child.Term) bool { + return if (expected) |e| switch (e) { + .Exited => |expected_code| switch (actual) { + .Exited => |actual_code| expected_code == actual_code, + else => false, + }, + .Signal => |expected_sig| switch (actual) { + .Signal => |actual_sig| expected_sig == actual_sig, + else => false, + }, + .Stopped => |expected_sig| switch (actual) { + .Stopped => |actual_sig| expected_sig == actual_sig, + else => false, + }, + .Unknown => |expected_code| switch (actual) { + .Unknown => |actual_code| expected_code == actual_code, + else => false, + }, + } else switch (actual) { + .Exited => true, + else => false, + }; +} + +fn runCommand( + self: *RunStep, + argv: []const []const u8, + has_side_effects: bool, + digest: ?*const [std.Build.Cache.hex_digest_len]u8, + prog_node: *std.Progress.Node, +) !void { + const step = &self.step; + const b = step.owner; + const arena = b.allocator; + + try step.handleChildProcUnsupported(self.cwd, argv); + try Step.handleVerbose2(step.owner, self.cwd, self.env_map, argv); + + const allow_skip = switch (self.stdio) { + .check, .zig_test => self.skip_foreign_checks, + else => false, + }; + + var interp_argv = std.ArrayList([]const u8).init(b.allocator); + defer interp_argv.deinit(); + + const result = spawnChildAndCollect(self, argv, has_side_effects, prog_node) catch |err| term: { + // InvalidExe: cpu arch mismatch + // FileNotFound: can happen with a wrong dynamic linker path + if (err == error.InvalidExe or err == error.FileNotFound) interpret: { + // TODO: learn the target from the binary directly rather than from + // relying on it being a CompileStep. This will make this logic + // work even for the edge case that the binary was produced by a + // third party. + const exe = switch (self.argv.items[0]) { + .artifact => |exe| exe, + else => break :interpret, + }; + switch (exe.kind) { + .exe, .@"test" => {}, + else => break :interpret, + } + + const need_cross_glibc = exe.target.isGnuLibC() and exe.is_linking_libc; + switch (b.host.getExternalExecutor(exe.target_info, .{ + .qemu_fixes_dl = need_cross_glibc and b.glibc_runtimes_dir != null, + .link_libc = exe.is_linking_libc, + })) { + .native, .rosetta => { + if (allow_skip) return error.MakeSkipped; + break :interpret; + }, + .wine => |bin_name| { + if (b.enable_wine) { + try interp_argv.append(bin_name); + try interp_argv.appendSlice(argv); + } else { + return failForeign(self, "-fwine", argv[0], exe); + } + }, + .qemu => |bin_name| { + if (b.enable_qemu) { + const glibc_dir_arg = if (need_cross_glibc) + b.glibc_runtimes_dir orelse + return failForeign(self, "--glibc-runtimes", argv[0], exe) + else + null; + + try interp_argv.append(bin_name); + + if (glibc_dir_arg) |dir| { + // TODO look into making this a call to `linuxTriple`. This + // needs the directory to be called "i686" rather than + // "x86" which is why we do it manually here. + const fmt_str = "{s}" ++ fs.path.sep_str ++ "{s}-{s}-{s}"; + const cpu_arch = exe.target.getCpuArch(); + const os_tag = exe.target.getOsTag(); + const abi = exe.target.getAbi(); + const cpu_arch_name: []const u8 = if (cpu_arch == .x86) + "i686" + else + @tagName(cpu_arch); + const full_dir = try std.fmt.allocPrint(b.allocator, fmt_str, .{ + dir, cpu_arch_name, @tagName(os_tag), @tagName(abi), + }); + + try interp_argv.append("-L"); + try interp_argv.append(full_dir); + } + + try interp_argv.appendSlice(argv); + } else { + return failForeign(self, "-fqemu", argv[0], exe); + } + }, + .darling => |bin_name| { + if (b.enable_darling) { + try interp_argv.append(bin_name); + try interp_argv.appendSlice(argv); + } else { + return failForeign(self, "-fdarling", argv[0], exe); + } + }, + .wasmtime => |bin_name| { + if (b.enable_wasmtime) { + try interp_argv.append(bin_name); + try interp_argv.append("--dir=."); + try interp_argv.append(argv[0]); + try interp_argv.append("--"); + try interp_argv.appendSlice(argv[1..]); + } else { + return failForeign(self, "-fwasmtime", argv[0], exe); + } + }, + .bad_dl => |foreign_dl| { + if (allow_skip) return error.MakeSkipped; + + const host_dl = b.host.dynamic_linker.get() orelse "(none)"; + + return step.fail( + \\the host system is unable to execute binaries from the target + \\ because the host dynamic linker is '{s}', + \\ while the target dynamic linker is '{s}'. + \\ consider setting the dynamic linker or enabling skip_foreign_checks in the Run step + , .{ host_dl, foreign_dl }); + }, + .bad_os_or_cpu => { + if (allow_skip) return error.MakeSkipped; + + const host_name = try b.host.target.zigTriple(b.allocator); + const foreign_name = try exe.target.zigTriple(b.allocator); + + return step.fail("the host system ({s}) is unable to execute binaries from the target ({s})", .{ + host_name, foreign_name, + }); + }, + } + + if (exe.target.isWindows()) { + // On Windows we don't have rpaths so we have to add .dll search paths to PATH + self.addPathForDynLibs(exe); + } + + try Step.handleVerbose2(step.owner, self.cwd, self.env_map, interp_argv.items); + + break :term spawnChildAndCollect(self, interp_argv.items, has_side_effects, prog_node) catch |e| { + return step.fail("unable to spawn interpreter {s}: {s}", .{ + interp_argv.items[0], @errorName(e), + }); + }; + } + + return step.fail("unable to spawn {s}: {s}", .{ argv[0], @errorName(err) }); + }; + + step.result_duration_ns = result.elapsed_ns; + step.result_peak_rss = result.peak_rss; + step.test_results = result.stdio.test_results; + + // Capture stdout and stderr to GeneratedFile objects. + const Stream = struct { + captured: ?*Output, + is_null: bool, + bytes: []const u8, + }; + for ([_]Stream{ + .{ + .captured = self.captured_stdout, + .is_null = result.stdio.stdout_null, + .bytes = result.stdio.stdout, + }, + .{ + .captured = self.captured_stderr, + .is_null = result.stdio.stderr_null, + .bytes = result.stdio.stderr, + }, + }) |stream| { + if (stream.captured) |output| { + assert(!stream.is_null); + + const output_components = .{ "o", digest.?, output.basename }; + const output_path = try b.cache_root.join(arena, &output_components); + output.generated_file.path = output_path; + + const sub_path = try fs.path.join(arena, &output_components); + const sub_path_dirname = fs.path.dirname(sub_path).?; + b.cache_root.handle.makePath(sub_path_dirname) catch |err| { + return step.fail("unable to make path '{}{s}': {s}", .{ + b.cache_root, sub_path_dirname, @errorName(err), + }); + }; + b.cache_root.handle.writeFile(sub_path, stream.bytes) catch |err| { + return step.fail("unable to write file '{}{s}': {s}", .{ + b.cache_root, sub_path, @errorName(err), + }); + }; + } + } + + const final_argv = if (interp_argv.items.len == 0) argv else interp_argv.items; + + switch (self.stdio) { + .check => |checks| for (checks.items) |check| switch (check) { + .expect_stderr_exact => |expected_bytes| { + assert(!result.stdio.stderr_null); + if (!mem.eql(u8, expected_bytes, result.stdio.stderr)) { + return step.fail( + \\ + \\========= expected this stderr: ========= + \\{s} + \\========= but found: ==================== + \\{s} + \\========= from the following command: === + \\{s} + , .{ + expected_bytes, + result.stdio.stderr, + try Step.allocPrintCmd(arena, self.cwd, final_argv), + }); + } + }, + .expect_stderr_match => |match| { + assert(!result.stdio.stderr_null); + if (mem.indexOf(u8, result.stdio.stderr, match) == null) { + return step.fail( + \\ + \\========= expected to find in stderr: ========= + \\{s} + \\========= but stderr does not contain it: ===== + \\{s} + \\========= from the following command: ========= + \\{s} + , .{ + match, + result.stdio.stderr, + try Step.allocPrintCmd(arena, self.cwd, final_argv), + }); + } + }, + .expect_stdout_exact => |expected_bytes| { + assert(!result.stdio.stdout_null); + if (!mem.eql(u8, expected_bytes, result.stdio.stdout)) { + return step.fail( + \\ + \\========= expected this stdout: ========= + \\{s} + \\========= but found: ==================== + \\{s} + \\========= from the following command: === + \\{s} + , .{ + expected_bytes, + result.stdio.stdout, + try Step.allocPrintCmd(arena, self.cwd, final_argv), + }); + } + }, + .expect_stdout_match => |match| { + assert(!result.stdio.stdout_null); + if (mem.indexOf(u8, result.stdio.stdout, match) == null) { + return step.fail( + \\ + \\========= expected to find in stdout: ========= + \\{s} + \\========= but stdout does not contain it: ===== + \\{s} + \\========= from the following command: ========= + \\{s} + , .{ + match, + result.stdio.stdout, + try Step.allocPrintCmd(arena, self.cwd, final_argv), + }); + } + }, + .expect_term => |expected_term| { + if (!termMatches(expected_term, result.term)) { + return step.fail("the following command {} (expected {}):\n{s}", .{ + fmtTerm(result.term), + fmtTerm(expected_term), + try Step.allocPrintCmd(arena, self.cwd, final_argv), + }); + } + }, + }, + .zig_test => { + const prefix: []const u8 = p: { + if (result.stdio.test_metadata) |tm| { + if (tm.next_index <= tm.names.len) { + const name = tm.testName(tm.next_index - 1); + break :p b.fmt("while executing test '{s}', ", .{name}); + } + } + break :p ""; + }; + const expected_term: std.process.Child.Term = .{ .Exited = 0 }; + if (!termMatches(expected_term, result.term)) { + return step.fail("{s}the following command {} (expected {}):\n{s}", .{ + prefix, + fmtTerm(result.term), + fmtTerm(expected_term), + try Step.allocPrintCmd(arena, self.cwd, final_argv), + }); + } + if (!result.stdio.test_results.isSuccess()) { + return step.fail( + "{s}the following test command failed:\n{s}", + .{ prefix, try Step.allocPrintCmd(arena, self.cwd, final_argv) }, + ); + } + }, + else => { + try step.handleChildProcessTerm(result.term, self.cwd, final_argv); + }, + } +} + +const ChildProcResult = struct { + term: std.process.Child.Term, + elapsed_ns: u64, + peak_rss: usize, + + stdio: StdIoResult, +}; + +fn spawnChildAndCollect( + self: *RunStep, + argv: []const []const u8, + has_side_effects: bool, + prog_node: *std.Progress.Node, +) !ChildProcResult { + const b = self.step.owner; + const arena = b.allocator; + + var child = std.process.Child.init(argv, arena); + if (self.cwd) |cwd| { + child.cwd = b.pathFromRoot(cwd); + } else { + child.cwd = b.build_root.path; + child.cwd_dir = b.build_root.handle; + } + child.env_map = self.env_map orelse b.env_map; + child.request_resource_usage_statistics = true; + + child.stdin_behavior = switch (self.stdio) { + .infer_from_args => if (has_side_effects) .Inherit else .Ignore, + .inherit => .Inherit, + .check => .Ignore, + .zig_test => .Pipe, + }; + child.stdout_behavior = switch (self.stdio) { + .infer_from_args => if (has_side_effects) .Inherit else .Ignore, + .inherit => .Inherit, + .check => |checks| if (checksContainStdout(checks.items)) .Pipe else .Ignore, + .zig_test => .Pipe, + }; + child.stderr_behavior = switch (self.stdio) { + .infer_from_args => if (has_side_effects) .Inherit else .Pipe, + .inherit => .Inherit, + .check => .Pipe, + .zig_test => .Pipe, + }; + if (self.captured_stdout != null) child.stdout_behavior = .Pipe; + if (self.captured_stderr != null) child.stderr_behavior = .Pipe; + if (self.stdin != null) { + assert(child.stdin_behavior != .Inherit); + child.stdin_behavior = .Pipe; + } + + try child.spawn(); + var timer = try std.time.Timer.start(); + + const result = if (self.stdio == .zig_test) + evalZigTest(self, &child, prog_node) + else + evalGeneric(self, &child); + + const term = try child.wait(); + const elapsed_ns = timer.read(); + + return .{ + .stdio = try result, + .term = term, + .elapsed_ns = elapsed_ns, + .peak_rss = child.resource_usage_statistics.getMaxRss() orelse 0, + }; +} + +const StdIoResult = struct { + // These use boolean flags instead of optionals as a workaround for + // https://github.com/ziglang/zig/issues/14783 + stdout: []const u8, + stderr: []const u8, + stdout_null: bool, + stderr_null: bool, + test_results: Step.TestResults, + test_metadata: ?TestMetadata, +}; + +fn evalZigTest( + self: *RunStep, + child: *std.process.Child, + prog_node: *std.Progress.Node, +) !StdIoResult { + const gpa = self.step.owner.allocator; + const arena = self.step.owner.allocator; + + var poller = std.io.poll(gpa, enum { stdout, stderr }, .{ + .stdout = child.stdout.?, + .stderr = child.stderr.?, + }); + defer poller.deinit(); + + try sendMessage(child.stdin.?, .query_test_metadata); + + const Header = std.zig.Server.Message.Header; + + const stdout = poller.fifo(.stdout); + const stderr = poller.fifo(.stderr); + + var fail_count: u32 = 0; + var skip_count: u32 = 0; + var leak_count: u32 = 0; + var test_count: u32 = 0; + + var metadata: ?TestMetadata = null; + + var sub_prog_node: ?std.Progress.Node = null; + defer if (sub_prog_node) |*n| n.end(); + + poll: while (true) { + while (stdout.readableLength() < @sizeOf(Header)) { + if (!(try poller.poll())) break :poll; + } + const header = stdout.reader().readStruct(Header) catch unreachable; + while (stdout.readableLength() < header.bytes_len) { + if (!(try poller.poll())) break :poll; + } + const body = stdout.readableSliceOfLen(header.bytes_len); + + switch (header.tag) { + .zig_version => { + if (!std.mem.eql(u8, builtin.zig_version_string, body)) { + return self.step.fail( + "zig version mismatch build runner vs compiler: '{s}' vs '{s}'", + .{ builtin.zig_version_string, body }, + ); + } + }, + .test_metadata => { + const TmHdr = std.zig.Server.Message.TestMetadata; + const tm_hdr = @ptrCast(*align(1) const TmHdr, body); + test_count = tm_hdr.tests_len; + + const names_bytes = body[@sizeOf(TmHdr)..][0 .. test_count * @sizeOf(u32)]; + const async_frame_lens_bytes = body[@sizeOf(TmHdr) + names_bytes.len ..][0 .. test_count * @sizeOf(u32)]; + const expected_panic_msgs_bytes = body[@sizeOf(TmHdr) + names_bytes.len + async_frame_lens_bytes.len ..][0 .. test_count * @sizeOf(u32)]; + const string_bytes = body[@sizeOf(TmHdr) + names_bytes.len + async_frame_lens_bytes.len + expected_panic_msgs_bytes.len ..][0..tm_hdr.string_bytes_len]; + + const names = std.mem.bytesAsSlice(u32, names_bytes); + const async_frame_lens = std.mem.bytesAsSlice(u32, async_frame_lens_bytes); + const expected_panic_msgs = std.mem.bytesAsSlice(u32, expected_panic_msgs_bytes); + const names_aligned = try arena.alloc(u32, names.len); + for (names_aligned, names) |*dest, src| dest.* = src; + + const async_frame_lens_aligned = try arena.alloc(u32, async_frame_lens.len); + for (async_frame_lens_aligned, async_frame_lens) |*dest, src| dest.* = src; + + const expected_panic_msgs_aligned = try arena.alloc(u32, expected_panic_msgs.len); + for (expected_panic_msgs_aligned, expected_panic_msgs) |*dest, src| dest.* = src; + + prog_node.setEstimatedTotalItems(names.len); + metadata = .{ + .string_bytes = try arena.dupe(u8, string_bytes), + .names = names_aligned, + .async_frame_lens = async_frame_lens_aligned, + .expected_panic_msgs = expected_panic_msgs_aligned, + .next_index = 0, + .prog_node = prog_node, + }; + + try requestNextTest(child.stdin.?, &metadata.?, &sub_prog_node); + }, + .test_results => { + const md = metadata.?; + + const TrHdr = std.zig.Server.Message.TestResults; + const tr_hdr = @ptrCast(*align(1) const TrHdr, body); + fail_count += @boolToInt(tr_hdr.flags.fail); + skip_count += @boolToInt(tr_hdr.flags.skip); + leak_count += @boolToInt(tr_hdr.flags.leak); + + if (tr_hdr.flags.fail or tr_hdr.flags.leak) { + const name = std.mem.sliceTo(md.string_bytes[md.names[tr_hdr.index]..], 0); + const msg = std.mem.trim(u8, stderr.readableSlice(0), "\n"); + const label = if (tr_hdr.flags.fail) "failed" else "leaked"; + if (msg.len > 0) { + try self.step.addError("'{s}' {s}: {s}", .{ name, label, msg }); + } else { + try self.step.addError("'{s}' {s}", .{ name, label }); + } + stderr.discard(msg.len); + } + + try requestNextTest(child.stdin.?, &metadata.?, &sub_prog_node); + }, + else => {}, // ignore other messages + } + + stdout.discard(body.len); + } + + if (stderr.readableLength() > 0) { + const msg = std.mem.trim(u8, try stderr.toOwnedSlice(), "\n"); + if (msg.len > 0) try self.step.result_error_msgs.append(arena, msg); + } + + // Send EOF to stdin. + child.stdin.?.close(); + child.stdin = null; + + return .{ + .stdout = &.{}, + .stderr = &.{}, + .stdout_null = true, + .stderr_null = true, + .test_results = .{ + .test_count = test_count, + .fail_count = fail_count, + .skip_count = skip_count, + .leak_count = leak_count, + }, + .test_metadata = metadata, + }; +} + +const TestMetadata = struct { + names: []const u32, + async_frame_lens: []const u32, + expected_panic_msgs: []const u32, + string_bytes: []const u8, + next_index: u32, + prog_node: *std.Progress.Node, + + fn testName(tm: TestMetadata, index: u32) []const u8 { + return std.mem.sliceTo(tm.string_bytes[tm.names[index]..], 0); + } +}; + +fn requestNextTest(in: fs.File, metadata: *TestMetadata, sub_prog_node: *?std.Progress.Node) !void { + while (metadata.next_index < metadata.names.len) { + const i = metadata.next_index; + metadata.next_index += 1; + + if (metadata.async_frame_lens[i] != 0) continue; + if (metadata.expected_panic_msgs[i] != 0) continue; + + const name = metadata.testName(i); + if (sub_prog_node.*) |*n| n.end(); + sub_prog_node.* = metadata.prog_node.start(name, 0); + + try sendRunTestMessage(in, i); + return; + } else { + try sendMessage(in, .exit); + } +} + +fn sendMessage(file: std.fs.File, tag: std.zig.Client.Message.Tag) !void { + const header: std.zig.Client.Message.Header = .{ + .tag = tag, + .bytes_len = 0, + }; + try file.writeAll(std.mem.asBytes(&header)); +} + +fn sendRunTestMessage(file: std.fs.File, index: u32) !void { + const header: std.zig.Client.Message.Header = .{ + .tag = .run_test, + .bytes_len = 4, + }; + const full_msg = std.mem.asBytes(&header) ++ std.mem.asBytes(&index); + try file.writeAll(full_msg); +} + +fn evalGeneric(self: *RunStep, child: *std.process.Child) !StdIoResult { + const arena = self.step.owner.allocator; + + if (self.stdin) |stdin| { + child.stdin.?.writeAll(stdin) catch |err| { + return self.step.fail("unable to write stdin: {s}", .{@errorName(err)}); + }; + child.stdin.?.close(); + child.stdin = null; + } + + // These are not optionals, as a workaround for + // https://github.com/ziglang/zig/issues/14783 + var stdout_bytes: []const u8 = undefined; + var stderr_bytes: []const u8 = undefined; + var stdout_null = true; + var stderr_null = true; + + if (child.stdout) |stdout| { + if (child.stderr) |stderr| { + var poller = std.io.poll(arena, enum { stdout, stderr }, .{ + .stdout = stdout, + .stderr = stderr, + }); + defer poller.deinit(); + + while (try poller.poll()) { + if (poller.fifo(.stdout).count > self.max_stdio_size) + return error.StdoutStreamTooLong; + if (poller.fifo(.stderr).count > self.max_stdio_size) + return error.StderrStreamTooLong; + } + + stdout_bytes = try poller.fifo(.stdout).toOwnedSlice(); + stderr_bytes = try poller.fifo(.stderr).toOwnedSlice(); + stdout_null = false; + stderr_null = false; + } else { + stdout_bytes = try stdout.reader().readAllAlloc(arena, self.max_stdio_size); + stdout_null = false; + } + } else if (child.stderr) |stderr| { + stderr_bytes = try stderr.reader().readAllAlloc(arena, self.max_stdio_size); + stderr_null = false; + } + + if (!stderr_null and stderr_bytes.len > 0) { + // Treat stderr as an error message. + const stderr_is_diagnostic = self.captured_stderr == null and switch (self.stdio) { + .check => |checks| !checksContainStderr(checks.items), + else => true, + }; + if (stderr_is_diagnostic) { + try self.step.result_error_msgs.append(arena, stderr_bytes); + } + } + + return .{ + .stdout = stdout_bytes, + .stderr = stderr_bytes, + .stdout_null = stdout_null, + .stderr_null = stderr_null, + .test_results = .{}, + .test_metadata = null, + }; +} + +fn addPathForDynLibs(self: *RunStep, artifact: *CompileStep) void { + const b = self.step.owner; + for (artifact.link_objects.items) |link_object| { + switch (link_object) { + .other_step => |other| { + if (other.target.isWindows() and other.isDynamicLibrary()) { + addPathDir(self, fs.path.dirname(other.getOutputSource().getPath(b)).?); + addPathForDynLibs(self, other); + } + }, + else => {}, + } + } +} + +fn failForeign( + self: *RunStep, + suggested_flag: []const u8, + argv0: []const u8, + exe: *CompileStep, +) error{ MakeFailed, MakeSkipped, OutOfMemory } { + switch (self.stdio) { + .check, .zig_test => { + if (self.skip_foreign_checks) + return error.MakeSkipped; + + const b = self.step.owner; + const host_name = try b.host.target.zigTriple(b.allocator); + const foreign_name = try exe.target.zigTriple(b.allocator); + + return self.step.fail( + \\unable to spawn foreign binary '{s}' ({s}) on host system ({s}) + \\ consider using {s} or enabling skip_foreign_checks in the Run step + , .{ argv0, foreign_name, host_name, suggested_flag }); + }, + else => { + return self.step.fail("unable to spawn foreign binary '{s}'", .{argv0}); + }, + } +} + +fn hashStdIo(hh: *std.Build.Cache.HashHelper, stdio: StdIo) void { + switch (stdio) { + .infer_from_args, .inherit, .zig_test => {}, + .check => |checks| for (checks.items) |check| { + hh.add(@as(std.meta.Tag(StdIo.Check), check)); + switch (check) { + .expect_stderr_exact, + .expect_stderr_match, + .expect_stdout_exact, + .expect_stdout_match, + => |s| hh.addBytes(s), + + .expect_term => |term| { + hh.add(@as(std.meta.Tag(std.process.Child.Term), term)); + switch (term) { + .Exited => |x| hh.add(x), + .Signal, .Stopped, .Unknown => |x| hh.add(x), + } + }, + } + }, + } +} diff --git a/lib/std/Build/Step/TranslateC.zig b/lib/std/Build/Step/TranslateC.zig new file mode 100644 index 0000000000..86727ea2f0 --- /dev/null +++ b/lib/std/Build/Step/TranslateC.zig @@ -0,0 +1,136 @@ +const std = @import("std"); +const Step = std.Build.Step; +const CompileStep = std.Build.CompileStep; +const CheckFileStep = std.Build.CheckFileStep; +const fs = std.fs; +const mem = std.mem; +const CrossTarget = std.zig.CrossTarget; + +const TranslateCStep = @This(); + +pub const base_id = .translate_c; + +step: Step, +source: std.Build.FileSource, +include_dirs: std.ArrayList([]const u8), +c_macros: std.ArrayList([]const u8), +out_basename: []const u8, +target: CrossTarget, +optimize: std.builtin.OptimizeMode, +output_file: std.Build.GeneratedFile, + +pub const Options = struct { + source_file: std.Build.FileSource, + target: CrossTarget, + optimize: std.builtin.OptimizeMode, +}; + +pub fn create(owner: *std.Build, options: Options) *TranslateCStep { + const self = owner.allocator.create(TranslateCStep) catch @panic("OOM"); + const source = options.source_file.dupe(owner); + self.* = TranslateCStep{ + .step = Step.init(.{ + .id = .translate_c, + .name = "translate-c", + .owner = owner, + .makeFn = make, + }), + .source = source, + .include_dirs = std.ArrayList([]const u8).init(owner.allocator), + .c_macros = std.ArrayList([]const u8).init(owner.allocator), + .out_basename = undefined, + .target = options.target, + .optimize = options.optimize, + .output_file = std.Build.GeneratedFile{ .step = &self.step }, + }; + source.addStepDependencies(&self.step); + return self; +} + +pub const AddExecutableOptions = struct { + name: ?[]const u8 = null, + version: ?std.builtin.Version = null, + target: ?CrossTarget = null, + optimize: ?std.builtin.Mode = null, + linkage: ?CompileStep.Linkage = null, +}; + +/// Creates a step to build an executable from the translated source. +pub fn addExecutable(self: *TranslateCStep, options: AddExecutableOptions) *CompileStep { + return self.step.owner.addExecutable(.{ + .root_source_file = .{ .generated = &self.output_file }, + .name = options.name orelse "translated_c", + .version = options.version, + .target = options.target orelse self.target, + .optimize = options.optimize orelse self.optimize, + .linkage = options.linkage, + }); +} + +pub fn addIncludeDir(self: *TranslateCStep, include_dir: []const u8) void { + self.include_dirs.append(self.step.owner.dupePath(include_dir)) catch @panic("OOM"); +} + +pub fn addCheckFile(self: *TranslateCStep, expected_matches: []const []const u8) *CheckFileStep { + return CheckFileStep.create( + self.step.owner, + .{ .generated = &self.output_file }, + .{ .expected_matches = expected_matches }, + ); +} + +/// If the value is omitted, it is set to 1. +/// `name` and `value` need not live longer than the function call. +pub fn defineCMacro(self: *TranslateCStep, name: []const u8, value: ?[]const u8) void { + const macro = std.Build.constructCMacro(self.step.owner.allocator, name, value); + self.c_macros.append(macro) catch @panic("OOM"); +} + +/// name_and_value looks like [name]=[value]. If the value is omitted, it is set to 1. +pub fn defineCMacroRaw(self: *TranslateCStep, name_and_value: []const u8) void { + self.c_macros.append(self.step.owner.dupe(name_and_value)) catch @panic("OOM"); +} + +fn make(step: *Step, prog_node: *std.Progress.Node) !void { + const b = step.owner; + const self = @fieldParentPtr(TranslateCStep, "step", step); + + var argv_list = std.ArrayList([]const u8).init(b.allocator); + try argv_list.append(b.zig_exe); + try argv_list.append("translate-c"); + try argv_list.append("-lc"); + + try argv_list.append("--listen=-"); + + if (!self.target.isNative()) { + try argv_list.append("-target"); + try argv_list.append(try self.target.zigTriple(b.allocator)); + } + + switch (self.optimize) { + .Debug => {}, // Skip since it's the default. + else => try argv_list.append(b.fmt("-O{s}", .{@tagName(self.optimize)})), + } + + for (self.include_dirs.items) |include_dir| { + try argv_list.append("-I"); + try argv_list.append(include_dir); + } + + for (self.c_macros.items) |c_macro| { + try argv_list.append("-D"); + try argv_list.append(c_macro); + } + + try argv_list.append(self.source.getPath(b)); + + const output_path = try step.evalZigProcess(argv_list.items, prog_node); + + self.out_basename = fs.path.basename(output_path); + const output_dir = fs.path.dirname(output_path).?; + + self.output_file.path = try fs.path.join( + b.allocator, + &[_][]const u8{ output_dir, self.out_basename }, + ); +} diff --git a/lib/std/Build/Step/WriteFile.zig b/lib/std/Build/Step/WriteFile.zig new file mode 100644 index 0000000000..68f7c37c6c --- /dev/null +++ b/lib/std/Build/Step/WriteFile.zig @@ -0,0 +1,291 @@ +//! WriteFileStep is primarily used to create a directory in an appropriate +//! location inside the local cache which has a set of files that have either +//! been generated during the build, or are copied from the source package. +//! +//! However, this step has an additional capability of writing data to paths +//! relative to the package root, effectively mutating the package's source +//! files. Be careful with the latter functionality; it should not be used +//! during the normal build process, but as a utility run by a developer with +//! intention to update source files, which will then be committed to version +//! control. +const std = @import("std"); +const Step = std.Build.Step; +const fs = std.fs; +const ArrayList = std.ArrayList; +const WriteFileStep = @This(); + +step: Step, +/// The elements here are pointers because we need stable pointers for the +/// GeneratedFile field. +files: std.ArrayListUnmanaged(*File), +output_source_files: std.ArrayListUnmanaged(OutputSourceFile), +generated_directory: std.Build.GeneratedFile, + +pub const base_id = .write_file; + +pub const File = struct { + generated_file: std.Build.GeneratedFile, + sub_path: []const u8, + contents: Contents, +}; + +pub const OutputSourceFile = struct { + contents: Contents, + sub_path: []const u8, +}; + +pub const Contents = union(enum) { + bytes: []const u8, + copy: std.Build.FileSource, +}; + +pub fn create(owner: *std.Build) *WriteFileStep { + const wf = owner.allocator.create(WriteFileStep) catch @panic("OOM"); + wf.* = .{ + .step = Step.init(.{ + .id = .write_file, + .name = "WriteFile", + .owner = owner, + .makeFn = make, + }), + .files = .{}, + .output_source_files = .{}, + .generated_directory = .{ .step = &wf.step }, + }; + return wf; +} + +pub fn add(wf: *WriteFileStep, sub_path: []const u8, bytes: []const u8) void { + const b = wf.step.owner; + const gpa = b.allocator; + const file = gpa.create(File) catch @panic("OOM"); + file.* = .{ + .generated_file = .{ .step = &wf.step }, + .sub_path = b.dupePath(sub_path), + .contents = .{ .bytes = b.dupe(bytes) }, + }; + wf.files.append(gpa, file) catch @panic("OOM"); + + wf.maybeUpdateName(); +} + +/// Place the file into the generated directory within the local cache, +/// along with all the rest of the files added to this step. The parameter +/// here is the destination path relative to the local cache directory +/// associated with this WriteFileStep. It may be a basename, or it may +/// include sub-directories, in which case this step will ensure the +/// required sub-path exists. +/// This is the option expected to be used most commonly with `addCopyFile`. +pub fn addCopyFile(wf: *WriteFileStep, source: std.Build.FileSource, sub_path: []const u8) void { + const b = wf.step.owner; + const gpa = b.allocator; + const file = gpa.create(File) catch @panic("OOM"); + file.* = .{ + .generated_file = .{ .step = &wf.step }, + .sub_path = b.dupePath(sub_path), + .contents = .{ .copy = source }, + }; + wf.files.append(gpa, file) catch @panic("OOM"); + + wf.maybeUpdateName(); + source.addStepDependencies(&wf.step); +} + +/// A path relative to the package root. +/// Be careful with this because it updates source files. This should not be +/// used as part of the normal build process, but as a utility occasionally +/// run by a developer with intent to modify source files and then commit +/// those changes to version control. +/// A file added this way is not available with `getFileSource`. +pub fn addCopyFileToSource(wf: *WriteFileStep, source: std.Build.FileSource, sub_path: []const u8) void { + const b = wf.step.owner; + wf.output_source_files.append(b.allocator, .{ + .contents = .{ .copy = source }, + .sub_path = sub_path, + }) catch @panic("OOM"); + source.addStepDependencies(&wf.step); +} + +/// A path relative to the package root. +/// Be careful with this because it updates source files. This should not be +/// used as part of the normal build process, but as a utility occasionally +/// run by a developer with intent to modify source files and then commit +/// those changes to version control. +/// A file added this way is not available with `getFileSource`. +pub fn addBytesToSource(wf: *WriteFileStep, bytes: []const u8, sub_path: []const u8) void { + const b = wf.step.owner; + wf.output_source_files.append(b.allocator, .{ + .contents = .{ .bytes = bytes }, + .sub_path = sub_path, + }) catch @panic("OOM"); +} + +/// Gets a file source for the given sub_path. If the file does not exist, returns `null`. +pub fn getFileSource(wf: *WriteFileStep, sub_path: []const u8) ?std.Build.FileSource { + for (wf.files.items) |file| { + if (std.mem.eql(u8, file.sub_path, sub_path)) { + return .{ .generated = &file.generated_file }; + } + } + return null; +} + +/// Returns a `FileSource` representing the base directory that contains all the +/// files from this `WriteFileStep`. +pub fn getDirectorySource(wf: *WriteFileStep) std.Build.FileSource { + return .{ .generated = &wf.generated_directory }; +} + +fn maybeUpdateName(wf: *WriteFileStep) void { + if (wf.files.items.len == 1) { + // First time adding a file; update name. + if (std.mem.eql(u8, wf.step.name, "WriteFile")) { + wf.step.name = wf.step.owner.fmt("WriteFile {s}", .{wf.files.items[0].sub_path}); + } + } +} + +fn make(step: *Step, prog_node: *std.Progress.Node) !void { + _ = prog_node; + const b = step.owner; + const wf = @fieldParentPtr(WriteFileStep, "step", step); + + // Writing to source files is kind of an extra capability of this + // WriteFileStep - arguably it should be a different step. But anyway here + // it is, it happens unconditionally and does not interact with the other + // files here. + var any_miss = false; + for (wf.output_source_files.items) |output_source_file| { + if (fs.path.dirname(output_source_file.sub_path)) |dirname| { + b.build_root.handle.makePath(dirname) catch |err| { + return step.fail("unable to make path '{}{s}': {s}", .{ + b.build_root, dirname, @errorName(err), + }); + }; + } + switch (output_source_file.contents) { + .bytes => |bytes| { + b.build_root.handle.writeFile(output_source_file.sub_path, bytes) catch |err| { + return step.fail("unable to write file '{}{s}': {s}", .{ + b.build_root, output_source_file.sub_path, @errorName(err), + }); + }; + any_miss = true; + }, + .copy => |file_source| { + const source_path = file_source.getPath(b); + const prev_status = fs.Dir.updateFile( + fs.cwd(), + source_path, + b.build_root.handle, + output_source_file.sub_path, + .{}, + ) catch |err| { + return step.fail("unable to update file from '{s}' to '{}{s}': {s}", .{ + source_path, b.build_root, output_source_file.sub_path, @errorName(err), + }); + }; + any_miss = any_miss or prev_status == .stale; + }, + } + } + + // The cache is used here not really as a way to speed things up - because writing + // the data to a file would probably be very fast - but as a way to find a canonical + // location to put build artifacts. + + // If, for example, a hard-coded path was used as the location to put WriteFileStep + // files, then two WriteFileSteps executing in parallel might clobber each other. + + var man = b.cache.obtain(); + defer man.deinit(); + + // Random bytes to make WriteFileStep unique. Refresh this with + // new random bytes when WriteFileStep implementation is modified + // in a non-backwards-compatible way. + man.hash.add(@as(u32, 0xd767ee59)); + + for (wf.files.items) |file| { + man.hash.addBytes(file.sub_path); + switch (file.contents) { + .bytes => |bytes| { + man.hash.addBytes(bytes); + }, + .copy => |file_source| { + _ = try man.addFile(file_source.getPath(b), null); + }, + } + } + + if (try step.cacheHit(&man)) { + const digest = man.final(); + for (wf.files.items) |file| { + file.generated_file.path = try b.cache_root.join(b.allocator, &.{ + "o", &digest, file.sub_path, + }); + } + wf.generated_directory.path = try b.cache_root.join(b.allocator, &.{ "o", &digest }); + return; + } + + const digest = man.final(); + const cache_path = "o" ++ fs.path.sep_str ++ digest; + + wf.generated_directory.path = try b.cache_root.join(b.allocator, &.{ "o", &digest }); + + var cache_dir = b.cache_root.handle.makeOpenPath(cache_path, .{}) catch |err| { + return step.fail("unable to make path '{}{s}': {s}", .{ + b.cache_root, cache_path, @errorName(err), + }); + }; + defer cache_dir.close(); + + for (wf.files.items) |file| { + if (fs.path.dirname(file.sub_path)) |dirname| { + cache_dir.makePath(dirname) catch |err| { + return step.fail("unable to make path '{}{s}{c}{s}': {s}", .{ + b.cache_root, cache_path, fs.path.sep, dirname, @errorName(err), + }); + }; + } + switch (file.contents) { + .bytes => |bytes| { + cache_dir.writeFile(file.sub_path, bytes) catch |err| { + return step.fail("unable to write file '{}{s}{c}{s}': {s}", .{ + b.cache_root, cache_path, fs.path.sep, file.sub_path, @errorName(err), + }); + }; + }, + .copy => |file_source| { + const source_path = file_source.getPath(b); + const prev_status = fs.Dir.updateFile( + fs.cwd(), + source_path, + cache_dir, + file.sub_path, + .{}, + ) catch |err| { + return step.fail("unable to update file from '{s}' to '{}{s}{c}{s}': {s}", .{ + source_path, + b.cache_root, + cache_path, + fs.path.sep, + file.sub_path, + @errorName(err), + }); + }; + // At this point we already will mark the step as a cache miss. + // But this is kind of a partial cache hit since individual + // file copies may be avoided. Oh well, this information is + // discarded. + _ = prev_status; + }, + } + + file.generated_file.path = try b.cache_root.join(b.allocator, &.{ + cache_path, file.sub_path, + }); + } + + try step.writeManifest(&man); +} diff --git a/lib/std/Build/TranslateCStep.zig b/lib/std/Build/TranslateCStep.zig deleted file mode 100644 index f2dc23d950..0000000000 --- a/lib/std/Build/TranslateCStep.zig +++ /dev/null @@ -1,136 +0,0 @@ -const std = @import("../std.zig"); -const Step = std.Build.Step; -const CompileStep = std.Build.CompileStep; -const CheckFileStep = std.Build.CheckFileStep; -const fs = std.fs; -const mem = std.mem; -const CrossTarget = std.zig.CrossTarget; - -const TranslateCStep = @This(); - -pub const base_id = .translate_c; - -step: Step, -source: std.Build.FileSource, -include_dirs: std.ArrayList([]const u8), -c_macros: std.ArrayList([]const u8), -out_basename: []const u8, -target: CrossTarget, -optimize: std.builtin.OptimizeMode, -output_file: std.Build.GeneratedFile, - -pub const Options = struct { - source_file: std.Build.FileSource, - target: CrossTarget, - optimize: std.builtin.OptimizeMode, -}; - -pub fn create(owner: *std.Build, options: Options) *TranslateCStep { - const self = owner.allocator.create(TranslateCStep) catch @panic("OOM"); - const source = options.source_file.dupe(owner); - self.* = TranslateCStep{ - .step = Step.init(.{ - .id = .translate_c, - .name = "translate-c", - .owner = owner, - .makeFn = make, - }), - .source = source, - .include_dirs = std.ArrayList([]const u8).init(owner.allocator), - .c_macros = std.ArrayList([]const u8).init(owner.allocator), - .out_basename = undefined, - .target = options.target, - .optimize = options.optimize, - .output_file = std.Build.GeneratedFile{ .step = &self.step }, - }; - source.addStepDependencies(&self.step); - return self; -} - -pub const AddExecutableOptions = struct { - name: ?[]const u8 = null, - version: ?std.builtin.Version = null, - target: ?CrossTarget = null, - optimize: ?std.builtin.Mode = null, - linkage: ?CompileStep.Linkage = null, -}; - -/// Creates a step to build an executable from the translated source. -pub fn addExecutable(self: *TranslateCStep, options: AddExecutableOptions) *CompileStep { - return self.step.owner.addExecutable(.{ - .root_source_file = .{ .generated = &self.output_file }, - .name = options.name orelse "translated_c", - .version = options.version, - .target = options.target orelse self.target, - .optimize = options.optimize orelse self.optimize, - .linkage = options.linkage, - }); -} - -pub fn addIncludeDir(self: *TranslateCStep, include_dir: []const u8) void { - self.include_dirs.append(self.step.owner.dupePath(include_dir)) catch @panic("OOM"); -} - -pub fn addCheckFile(self: *TranslateCStep, expected_matches: []const []const u8) *CheckFileStep { - return CheckFileStep.create( - self.step.owner, - .{ .generated = &self.output_file }, - .{ .expected_matches = expected_matches }, - ); -} - -/// If the value is omitted, it is set to 1. -/// `name` and `value` need not live longer than the function call. -pub fn defineCMacro(self: *TranslateCStep, name: []const u8, value: ?[]const u8) void { - const macro = std.Build.constructCMacro(self.step.owner.allocator, name, value); - self.c_macros.append(macro) catch @panic("OOM"); -} - -/// name_and_value looks like [name]=[value]. If the value is omitted, it is set to 1. -pub fn defineCMacroRaw(self: *TranslateCStep, name_and_value: []const u8) void { - self.c_macros.append(self.step.owner.dupe(name_and_value)) catch @panic("OOM"); -} - -fn make(step: *Step, prog_node: *std.Progress.Node) !void { - const b = step.owner; - const self = @fieldParentPtr(TranslateCStep, "step", step); - - var argv_list = std.ArrayList([]const u8).init(b.allocator); - try argv_list.append(b.zig_exe); - try argv_list.append("translate-c"); - try argv_list.append("-lc"); - - try argv_list.append("--listen=-"); - - if (!self.target.isNative()) { - try argv_list.append("-target"); - try argv_list.append(try self.target.zigTriple(b.allocator)); - } - - switch (self.optimize) { - .Debug => {}, // Skip since it's the default. - else => try argv_list.append(b.fmt("-O{s}", .{@tagName(self.optimize)})), - } - - for (self.include_dirs.items) |include_dir| { - try argv_list.append("-I"); - try argv_list.append(include_dir); - } - - for (self.c_macros.items) |c_macro| { - try argv_list.append("-D"); - try argv_list.append(c_macro); - } - - try argv_list.append(self.source.getPath(b)); - - const output_path = try step.evalZigProcess(argv_list.items, prog_node); - - self.out_basename = fs.path.basename(output_path); - const output_dir = fs.path.dirname(output_path).?; - - self.output_file.path = try fs.path.join( - b.allocator, - &[_][]const u8{ output_dir, self.out_basename }, - ); -} diff --git a/lib/std/Build/WriteFileStep.zig b/lib/std/Build/WriteFileStep.zig deleted file mode 100644 index dee79af5be..0000000000 --- a/lib/std/Build/WriteFileStep.zig +++ /dev/null @@ -1,293 +0,0 @@ -//! WriteFileStep is primarily used to create a directory in an appropriate -//! location inside the local cache which has a set of files that have either -//! been generated during the build, or are copied from the source package. -//! -//! However, this step has an additional capability of writing data to paths -//! relative to the package root, effectively mutating the package's source -//! files. Be careful with the latter functionality; it should not be used -//! during the normal build process, but as a utility run by a developer with -//! intention to update source files, which will then be committed to version -//! control. - -step: Step, -/// The elements here are pointers because we need stable pointers for the -/// GeneratedFile field. -files: std.ArrayListUnmanaged(*File), -output_source_files: std.ArrayListUnmanaged(OutputSourceFile), -generated_directory: std.Build.GeneratedFile, - -pub const base_id = .write_file; - -pub const File = struct { - generated_file: std.Build.GeneratedFile, - sub_path: []const u8, - contents: Contents, -}; - -pub const OutputSourceFile = struct { - contents: Contents, - sub_path: []const u8, -}; - -pub const Contents = union(enum) { - bytes: []const u8, - copy: std.Build.FileSource, -}; - -pub fn create(owner: *std.Build) *WriteFileStep { - const wf = owner.allocator.create(WriteFileStep) catch @panic("OOM"); - wf.* = .{ - .step = Step.init(.{ - .id = .write_file, - .name = "WriteFile", - .owner = owner, - .makeFn = make, - }), - .files = .{}, - .output_source_files = .{}, - .generated_directory = .{ .step = &wf.step }, - }; - return wf; -} - -pub fn add(wf: *WriteFileStep, sub_path: []const u8, bytes: []const u8) void { - const b = wf.step.owner; - const gpa = b.allocator; - const file = gpa.create(File) catch @panic("OOM"); - file.* = .{ - .generated_file = .{ .step = &wf.step }, - .sub_path = b.dupePath(sub_path), - .contents = .{ .bytes = b.dupe(bytes) }, - }; - wf.files.append(gpa, file) catch @panic("OOM"); - - wf.maybeUpdateName(); -} - -/// Place the file into the generated directory within the local cache, -/// along with all the rest of the files added to this step. The parameter -/// here is the destination path relative to the local cache directory -/// associated with this WriteFileStep. It may be a basename, or it may -/// include sub-directories, in which case this step will ensure the -/// required sub-path exists. -/// This is the option expected to be used most commonly with `addCopyFile`. -pub fn addCopyFile(wf: *WriteFileStep, source: std.Build.FileSource, sub_path: []const u8) void { - const b = wf.step.owner; - const gpa = b.allocator; - const file = gpa.create(File) catch @panic("OOM"); - file.* = .{ - .generated_file = .{ .step = &wf.step }, - .sub_path = b.dupePath(sub_path), - .contents = .{ .copy = source }, - }; - wf.files.append(gpa, file) catch @panic("OOM"); - - wf.maybeUpdateName(); - source.addStepDependencies(&wf.step); -} - -/// A path relative to the package root. -/// Be careful with this because it updates source files. This should not be -/// used as part of the normal build process, but as a utility occasionally -/// run by a developer with intent to modify source files and then commit -/// those changes to version control. -/// A file added this way is not available with `getFileSource`. -pub fn addCopyFileToSource(wf: *WriteFileStep, source: std.Build.FileSource, sub_path: []const u8) void { - const b = wf.step.owner; - wf.output_source_files.append(b.allocator, .{ - .contents = .{ .copy = source }, - .sub_path = sub_path, - }) catch @panic("OOM"); - source.addStepDependencies(&wf.step); -} - -/// A path relative to the package root. -/// Be careful with this because it updates source files. This should not be -/// used as part of the normal build process, but as a utility occasionally -/// run by a developer with intent to modify source files and then commit -/// those changes to version control. -/// A file added this way is not available with `getFileSource`. -pub fn addBytesToSource(wf: *WriteFileStep, bytes: []const u8, sub_path: []const u8) void { - const b = wf.step.owner; - wf.output_source_files.append(b.allocator, .{ - .contents = .{ .bytes = bytes }, - .sub_path = sub_path, - }) catch @panic("OOM"); -} - -/// Gets a file source for the given sub_path. If the file does not exist, returns `null`. -pub fn getFileSource(wf: *WriteFileStep, sub_path: []const u8) ?std.Build.FileSource { - for (wf.files.items) |file| { - if (std.mem.eql(u8, file.sub_path, sub_path)) { - return .{ .generated = &file.generated_file }; - } - } - return null; -} - -/// Returns a `FileSource` representing the base directory that contains all the -/// files from this `WriteFileStep`. -pub fn getDirectorySource(wf: *WriteFileStep) std.Build.FileSource { - return .{ .generated = &wf.generated_directory }; -} - -fn maybeUpdateName(wf: *WriteFileStep) void { - if (wf.files.items.len == 1) { - // First time adding a file; update name. - if (std.mem.eql(u8, wf.step.name, "WriteFile")) { - wf.step.name = wf.step.owner.fmt("WriteFile {s}", .{wf.files.items[0].sub_path}); - } - } -} - -fn make(step: *Step, prog_node: *std.Progress.Node) !void { - _ = prog_node; - const b = step.owner; - const wf = @fieldParentPtr(WriteFileStep, "step", step); - - // Writing to source files is kind of an extra capability of this - // WriteFileStep - arguably it should be a different step. But anyway here - // it is, it happens unconditionally and does not interact with the other - // files here. - var any_miss = false; - for (wf.output_source_files.items) |output_source_file| { - if (fs.path.dirname(output_source_file.sub_path)) |dirname| { - b.build_root.handle.makePath(dirname) catch |err| { - return step.fail("unable to make path '{}{s}': {s}", .{ - b.build_root, dirname, @errorName(err), - }); - }; - } - switch (output_source_file.contents) { - .bytes => |bytes| { - b.build_root.handle.writeFile(output_source_file.sub_path, bytes) catch |err| { - return step.fail("unable to write file '{}{s}': {s}", .{ - b.build_root, output_source_file.sub_path, @errorName(err), - }); - }; - any_miss = true; - }, - .copy => |file_source| { - const source_path = file_source.getPath(b); - const prev_status = fs.Dir.updateFile( - fs.cwd(), - source_path, - b.build_root.handle, - output_source_file.sub_path, - .{}, - ) catch |err| { - return step.fail("unable to update file from '{s}' to '{}{s}': {s}", .{ - source_path, b.build_root, output_source_file.sub_path, @errorName(err), - }); - }; - any_miss = any_miss or prev_status == .stale; - }, - } - } - - // The cache is used here not really as a way to speed things up - because writing - // the data to a file would probably be very fast - but as a way to find a canonical - // location to put build artifacts. - - // If, for example, a hard-coded path was used as the location to put WriteFileStep - // files, then two WriteFileSteps executing in parallel might clobber each other. - - var man = b.cache.obtain(); - defer man.deinit(); - - // Random bytes to make WriteFileStep unique. Refresh this with - // new random bytes when WriteFileStep implementation is modified - // in a non-backwards-compatible way. - man.hash.add(@as(u32, 0xd767ee59)); - - for (wf.files.items) |file| { - man.hash.addBytes(file.sub_path); - switch (file.contents) { - .bytes => |bytes| { - man.hash.addBytes(bytes); - }, - .copy => |file_source| { - _ = try man.addFile(file_source.getPath(b), null); - }, - } - } - - if (try step.cacheHit(&man)) { - const digest = man.final(); - for (wf.files.items) |file| { - file.generated_file.path = try b.cache_root.join(b.allocator, &.{ - "o", &digest, file.sub_path, - }); - } - wf.generated_directory.path = try b.cache_root.join(b.allocator, &.{ "o", &digest }); - return; - } - - const digest = man.final(); - const cache_path = "o" ++ fs.path.sep_str ++ digest; - - wf.generated_directory.path = try b.cache_root.join(b.allocator, &.{ "o", &digest }); - - var cache_dir = b.cache_root.handle.makeOpenPath(cache_path, .{}) catch |err| { - return step.fail("unable to make path '{}{s}': {s}", .{ - b.cache_root, cache_path, @errorName(err), - }); - }; - defer cache_dir.close(); - - for (wf.files.items) |file| { - if (fs.path.dirname(file.sub_path)) |dirname| { - cache_dir.makePath(dirname) catch |err| { - return step.fail("unable to make path '{}{s}{c}{s}': {s}", .{ - b.cache_root, cache_path, fs.path.sep, dirname, @errorName(err), - }); - }; - } - switch (file.contents) { - .bytes => |bytes| { - cache_dir.writeFile(file.sub_path, bytes) catch |err| { - return step.fail("unable to write file '{}{s}{c}{s}': {s}", .{ - b.cache_root, cache_path, fs.path.sep, file.sub_path, @errorName(err), - }); - }; - }, - .copy => |file_source| { - const source_path = file_source.getPath(b); - const prev_status = fs.Dir.updateFile( - fs.cwd(), - source_path, - cache_dir, - file.sub_path, - .{}, - ) catch |err| { - return step.fail("unable to update file from '{s}' to '{}{s}{c}{s}': {s}", .{ - source_path, - b.cache_root, - cache_path, - fs.path.sep, - file.sub_path, - @errorName(err), - }); - }; - // At this point we already will mark the step as a cache miss. - // But this is kind of a partial cache hit since individual - // file copies may be avoided. Oh well, this information is - // discarded. - _ = prev_status; - }, - } - - file.generated_file.path = try b.cache_root.join(b.allocator, &.{ - cache_path, file.sub_path, - }); - } - - try step.writeManifest(&man); -} - -const std = @import("../std.zig"); -const Step = std.Build.Step; -const fs = std.fs; -const ArrayList = std.ArrayList; - -const WriteFileStep = @This(); -- cgit v1.2.3 From 3f3b1a6808113fd5f9b2cec1033009cbb17dc969 Mon Sep 17 00:00:00 2001 From: Veikka Tuominen Date: Wed, 3 May 2023 11:49:55 +0300 Subject: std.Build: use Step.* instead of *Step Follow up to 13eb7251d37759bd47403db304c6120c706fe353 --- build.zig | 12 +- lib/init-exe/build.zig | 2 +- lib/std/Build.zig | 150 +++++++++++++------------ lib/std/Build/Step.zig | 67 ++++++++--- lib/std/Build/Step/CheckFile.zig | 12 +- lib/std/Build/Step/CheckObject.zig | 28 ++--- lib/std/Build/Step/Compile.zig | 168 ++++++++++++++-------------- lib/std/Build/Step/ConfigHeader.zig | 26 ++--- lib/std/Build/Step/Fmt.zig | 8 +- lib/std/Build/Step/InstallArtifact.zig | 17 ++- lib/std/Build/Step/InstallFile.zig | 8 +- lib/std/Build/Step/ObjCopy.zig | 17 ++- lib/std/Build/Step/Options.zig | 25 ++--- lib/std/Build/Step/RemoveDir.zig | 8 +- lib/std/Build/Step/Run.zig | 108 +++++++++--------- lib/std/Build/Step/TranslateC.zig | 26 ++--- lib/std/Build/Step/WriteFile.zig | 38 +++---- test/link/macho/dead_strip/build.zig | 2 +- test/link/macho/dead_strip_dylibs/build.zig | 2 +- test/link/macho/headerpad/build.zig | 2 +- test/link/macho/search_strategy/build.zig | 2 +- test/link/macho/unwind_info/build.zig | 2 +- test/link/macho/uuid/build.zig | 3 +- test/src/Cases.zig | 2 +- test/src/StackTrace.zig | 2 +- test/standalone/install_raw_hex/build.zig | 1 - test/tests.zig | 2 +- 27 files changed, 379 insertions(+), 361 deletions(-) (limited to 'lib/std/Build.zig') diff --git a/build.zig b/build.zig index 78345ac9e4..208d06fe1d 100644 --- a/build.zig +++ b/build.zig @@ -533,7 +533,7 @@ fn addCompilerStep( b: *std.Build, optimize: std.builtin.OptimizeMode, target: std.zig.CrossTarget, -) *std.Build.CompileStep { +) *std.Build.Step.Compile { const exe = b.addExecutable(.{ .name = "zig", .root_source_file = .{ .path = "src/main.zig" }, @@ -561,7 +561,7 @@ const exe_cflags = [_][]const u8{ fn addCmakeCfgOptionsToExe( b: *std.Build, cfg: CMakeConfig, - exe: *std.Build.CompileStep, + exe: *std.Build.Step.Compile, use_zig_libcxx: bool, ) !void { if (exe.target.isDarwin()) { @@ -640,7 +640,7 @@ fn addCmakeCfgOptionsToExe( } } -fn addStaticLlvmOptionsToExe(exe: *std.Build.CompileStep) !void { +fn addStaticLlvmOptionsToExe(exe: *std.Build.Step.Compile) !void { // Adds the Zig C++ sources which both stage1 and stage2 need. // // We need this because otherwise zig_clang_cc1_main.cpp ends up pulling @@ -679,7 +679,7 @@ fn addStaticLlvmOptionsToExe(exe: *std.Build.CompileStep) !void { fn addCxxKnownPath( b: *std.Build, ctx: CMakeConfig, - exe: *std.Build.CompileStep, + exe: *std.Build.Step.Compile, objname: []const u8, errtxt: ?[]const u8, need_cpp_includes: bool, @@ -709,7 +709,7 @@ fn addCxxKnownPath( } } -fn addCMakeLibraryList(exe: *std.Build.CompileStep, list: []const u8) void { +fn addCMakeLibraryList(exe: *std.Build.Step.Compile, list: []const u8) void { var it = mem.tokenize(u8, list, ";"); while (it.next()) |lib| { if (mem.startsWith(u8, lib, "-l")) { @@ -723,7 +723,7 @@ fn addCMakeLibraryList(exe: *std.Build.CompileStep, list: []const u8) void { } const CMakeConfig = struct { - llvm_linkage: std.Build.CompileStep.Linkage, + llvm_linkage: std.Build.Step.Compile.Linkage, cmake_binary_dir: []const u8, cmake_prefix_path: []const u8, cmake_static_library_prefix: []const u8, diff --git a/lib/init-exe/build.zig b/lib/init-exe/build.zig index abf8654f0f..1221984190 100644 --- a/lib/init-exe/build.zig +++ b/lib/init-exe/build.zig @@ -29,7 +29,7 @@ pub fn build(b: *std.Build) void { // step when running `zig build`). b.installArtifact(exe); - // This *creates* a RunStep in the build graph, to be executed when another + // This *creates* a Run step in the build graph, to be executed when another // step is evaluated that depends on it. The next line below will establish // such a dependency. const run_cmd = b.addRunArtifact(exe); diff --git a/lib/std/Build.zig b/lib/std/Build.zig index bda50112b6..ca55d23937 100644 --- a/lib/std/Build.zig +++ b/lib/std/Build.zig @@ -21,27 +21,41 @@ const Build = @This(); pub const Cache = @import("Build/Cache.zig"); -/// deprecated: use `CompileStep`. -pub const LibExeObjStep = CompileStep; +/// deprecated: use `Step.Compile`. +pub const LibExeObjStep = Step.Compile; /// deprecated: use `Build`. pub const Builder = Build; -/// deprecated: use `InstallDirStep.Options` -pub const InstallDirectoryOptions = InstallDirStep.Options; +/// deprecated: use `Step.InstallDir.Options` +pub const InstallDirectoryOptions = Step.InstallDir.Options; pub const Step = @import("Build/Step.zig"); +/// deprecated: use `Step.CheckFile`. pub const CheckFileStep = @import("Build/Step/CheckFile.zig"); +/// deprecated: use `Step.CheckObject`. pub const CheckObjectStep = @import("Build/Step/CheckObject.zig"); +/// deprecated: use `Step.ConfigHeader`. pub const ConfigHeaderStep = @import("Build/Step/ConfigHeader.zig"); +/// deprecated: use `Step.Fmt`. pub const FmtStep = @import("Build/Step/Fmt.zig"); +/// deprecated: use `Step.InstallArtifact`. pub const InstallArtifactStep = @import("Build/Step/InstallArtifact.zig"); +/// deprecated: use `Step.InstallDir`. pub const InstallDirStep = @import("Build/Step/InstallDir.zig"); +/// deprecated: use `Step.InstallFile`. pub const InstallFileStep = @import("Build/Step/InstallFile.zig"); +/// deprecated: use `Step.ObjCopy`. pub const ObjCopyStep = @import("Build/Step/ObjCopy.zig"); +/// deprecated: use `Step.Compile`. pub const CompileStep = @import("Build/Step/Compile.zig"); +/// deprecated: use `Step.Options`. pub const OptionsStep = @import("Build/Step/Options.zig"); +/// deprecated: use `Step.RemoveDir`. pub const RemoveDirStep = @import("Build/Step/RemoveDir.zig"); +/// deprecated: use `Step.Run`. pub const RunStep = @import("Build/Step/Run.zig"); +/// deprecated: use `Step.TranslateC`. pub const TranslateCStep = @import("Build/Step/TranslateC.zig"); +/// deprecated: use `Step.WriteFile`. pub const WriteFileStep = @import("Build/Step/WriteFile.zig"); install_tls: TopLevelStep, @@ -442,8 +456,8 @@ pub fn resolveInstallPrefix(self: *Build, install_prefix: ?[]const u8, dir_list: self.h_dir = self.pathJoin(&h_list); } -pub fn addOptions(self: *Build) *OptionsStep { - return OptionsStep.create(self); +pub fn addOptions(self: *Build) *Step.Options { + return Step.Options.create(self); } pub const ExecutableOptions = struct { @@ -452,7 +466,7 @@ pub const ExecutableOptions = struct { version: ?std.builtin.Version = null, target: CrossTarget = .{}, optimize: std.builtin.Mode = .Debug, - linkage: ?CompileStep.Linkage = null, + linkage: ?Step.Compile.Linkage = null, max_rss: usize = 0, link_libc: ?bool = null, single_threaded: ?bool = null, @@ -460,8 +474,8 @@ pub const ExecutableOptions = struct { use_lld: ?bool = null, }; -pub fn addExecutable(b: *Build, options: ExecutableOptions) *CompileStep { - return CompileStep.create(b, .{ +pub fn addExecutable(b: *Build, options: ExecutableOptions) *Step.Compile { + return Step.Compile.create(b, .{ .name = options.name, .root_source_file = options.root_source_file, .version = options.version, @@ -489,8 +503,8 @@ pub const ObjectOptions = struct { use_lld: ?bool = null, }; -pub fn addObject(b: *Build, options: ObjectOptions) *CompileStep { - return CompileStep.create(b, .{ +pub fn addObject(b: *Build, options: ObjectOptions) *Step.Compile { + return Step.Compile.create(b, .{ .name = options.name, .root_source_file = options.root_source_file, .target = options.target, @@ -517,8 +531,8 @@ pub const SharedLibraryOptions = struct { use_lld: ?bool = null, }; -pub fn addSharedLibrary(b: *Build, options: SharedLibraryOptions) *CompileStep { - return CompileStep.create(b, .{ +pub fn addSharedLibrary(b: *Build, options: SharedLibraryOptions) *Step.Compile { + return Step.Compile.create(b, .{ .name = options.name, .root_source_file = options.root_source_file, .kind = .lib, @@ -547,8 +561,8 @@ pub const StaticLibraryOptions = struct { use_lld: ?bool = null, }; -pub fn addStaticLibrary(b: *Build, options: StaticLibraryOptions) *CompileStep { - return CompileStep.create(b, .{ +pub fn addStaticLibrary(b: *Build, options: StaticLibraryOptions) *Step.Compile { + return Step.Compile.create(b, .{ .name = options.name, .root_source_file = options.root_source_file, .kind = .lib, @@ -579,8 +593,8 @@ pub const TestOptions = struct { use_lld: ?bool = null, }; -pub fn addTest(b: *Build, options: TestOptions) *CompileStep { - return CompileStep.create(b, .{ +pub fn addTest(b: *Build, options: TestOptions) *Step.Compile { + return Step.Compile.create(b, .{ .name = options.name, .kind = .@"test", .root_source_file = options.root_source_file, @@ -604,8 +618,8 @@ pub const AssemblyOptions = struct { max_rss: usize = 0, }; -pub fn addAssembly(b: *Build, options: AssemblyOptions) *CompileStep { - const obj_step = CompileStep.create(b, .{ +pub fn addAssembly(b: *Build, options: AssemblyOptions) *Step.Compile { + const obj_step = Step.Compile.create(b, .{ .name = options.name, .kind = .obj, .root_source_file = null, @@ -657,25 +671,25 @@ fn moduleDependenciesToArrayHashMap(arena: Allocator, deps: []const ModuleDepend return result; } -/// Initializes a RunStep with argv, which must at least have the path to the +/// Initializes a `Step.Run` with argv, which must at least have the path to the /// executable. More command line arguments can be added with `addArg`, /// `addArgs`, and `addArtifactArg`. /// Be careful using this function, as it introduces a system dependency. -/// To run an executable built with zig build, see `CompileStep.run`. -pub fn addSystemCommand(self: *Build, argv: []const []const u8) *RunStep { +/// To run an executable built with zig build, see `Step.Compile.run`. +pub fn addSystemCommand(self: *Build, argv: []const []const u8) *Step.Run { assert(argv.len >= 1); - const run_step = RunStep.create(self, self.fmt("run {s}", .{argv[0]})); + const run_step = Step.Run.create(self, self.fmt("run {s}", .{argv[0]})); run_step.addArgs(argv); return run_step; } -/// Creates a `RunStep` with an executable built with `addExecutable`. -/// Add command line arguments with methods of `RunStep`. -pub fn addRunArtifact(b: *Build, exe: *CompileStep) *RunStep { +/// Creates a `Step.Run` with an executable built with `addExecutable`. +/// Add command line arguments with methods of `Step.Run`. +pub fn addRunArtifact(b: *Build, exe: *Step.Compile) *Step.Run { // It doesn't have to be native. We catch that if you actually try to run it. // Consider that this is declarative; the run step may not be run unless a user // option is supplied. - const run_step = RunStep.create(b, b.fmt("run {s}", .{exe.name})); + const run_step = Step.Run.create(b, b.fmt("run {s}", .{exe.name})); run_step.addArtifactArg(exe); if (exe.kind == .@"test") { @@ -696,14 +710,14 @@ pub fn addRunArtifact(b: *Build, exe: *CompileStep) *RunStep { /// when an option found in the input file is missing from `values`. pub fn addConfigHeader( b: *Build, - options: ConfigHeaderStep.Options, + options: Step.ConfigHeader.Options, values: anytype, -) *ConfigHeaderStep { +) *Step.ConfigHeader { var options_copy = options; if (options_copy.first_ret_addr == null) options_copy.first_ret_addr = @returnAddress(); - const config_header_step = ConfigHeaderStep.create(b, options_copy); + const config_header_step = Step.ConfigHeader.create(b, options_copy); config_header_step.addValues(values); return config_header_step; } @@ -734,28 +748,28 @@ pub fn dupePath(self: *Build, bytes: []const u8) []u8 { return the_copy; } -pub fn addWriteFile(self: *Build, file_path: []const u8, data: []const u8) *WriteFileStep { +pub fn addWriteFile(self: *Build, file_path: []const u8, data: []const u8) *Step.WriteFile { const write_file_step = self.addWriteFiles(); write_file_step.add(file_path, data); return write_file_step; } -pub fn addWriteFiles(b: *Build) *WriteFileStep { - return WriteFileStep.create(b); +pub fn addWriteFiles(b: *Build) *Step.WriteFile { + return Step.WriteFile.create(b); } -pub fn addRemoveDirTree(self: *Build, dir_path: []const u8) *RemoveDirStep { - const remove_dir_step = self.allocator.create(RemoveDirStep) catch @panic("OOM"); - remove_dir_step.* = RemoveDirStep.init(self, dir_path); +pub fn addRemoveDirTree(self: *Build, dir_path: []const u8) *Step.RemoveDir { + const remove_dir_step = self.allocator.create(Step.RemoveDir) catch @panic("OOM"); + remove_dir_step.* = Step.RemoveDir.init(self, dir_path); return remove_dir_step; } -pub fn addFmt(b: *Build, options: FmtStep.Options) *FmtStep { - return FmtStep.create(b, options); +pub fn addFmt(b: *Build, options: Step.Fmt.Options) *Step.Fmt { + return Step.Fmt.create(b, options); } -pub fn addTranslateC(self: *Build, options: TranslateCStep.Options) *TranslateCStep { - return TranslateCStep.create(self, options); +pub fn addTranslateC(self: *Build, options: Step.TranslateC.Options) *Step.TranslateC { + return Step.TranslateC.create(self, options); } pub fn getInstallStep(self: *Build) *Step { @@ -1213,12 +1227,12 @@ fn printCmd(ally: Allocator, cwd: ?[]const u8, argv: []const []const u8) void { std.debug.print("{s}\n", .{text}); } -pub fn installArtifact(self: *Build, artifact: *CompileStep) void { +pub fn installArtifact(self: *Build, artifact: *Step.Compile) void { self.getInstallStep().dependOn(&self.addInstallArtifact(artifact).step); } -pub fn addInstallArtifact(self: *Build, artifact: *CompileStep) *InstallArtifactStep { - return InstallArtifactStep.create(self, artifact); +pub fn addInstallArtifact(self: *Build, artifact: *Step.Compile) *Step.InstallArtifact { + return Step.InstallArtifact.create(self, artifact); } ///`dest_rel_path` is relative to prefix path @@ -1240,26 +1254,26 @@ pub fn installLibFile(self: *Build, src_path: []const u8, dest_rel_path: []const self.getInstallStep().dependOn(&self.addInstallFileWithDir(.{ .path = src_path }, .lib, dest_rel_path).step); } -pub fn addObjCopy(b: *Build, source: FileSource, options: ObjCopyStep.Options) *ObjCopyStep { - return ObjCopyStep.create(b, source, options); +pub fn addObjCopy(b: *Build, source: FileSource, options: Step.ObjCopy.Options) *Step.ObjCopy { + return Step.ObjCopy.create(b, source, options); } ///`dest_rel_path` is relative to install prefix path -pub fn addInstallFile(self: *Build, source: FileSource, dest_rel_path: []const u8) *InstallFileStep { +pub fn addInstallFile(self: *Build, source: FileSource, dest_rel_path: []const u8) *Step.InstallFile { return self.addInstallFileWithDir(source.dupe(self), .prefix, dest_rel_path); } ///`dest_rel_path` is relative to bin path -pub fn addInstallBinFile(self: *Build, source: FileSource, dest_rel_path: []const u8) *InstallFileStep { +pub fn addInstallBinFile(self: *Build, source: FileSource, dest_rel_path: []const u8) *Step.InstallFile { return self.addInstallFileWithDir(source.dupe(self), .bin, dest_rel_path); } ///`dest_rel_path` is relative to lib path -pub fn addInstallLibFile(self: *Build, source: FileSource, dest_rel_path: []const u8) *InstallFileStep { +pub fn addInstallLibFile(self: *Build, source: FileSource, dest_rel_path: []const u8) *Step.InstallFile { return self.addInstallFileWithDir(source.dupe(self), .lib, dest_rel_path); } -pub fn addInstallHeaderFile(b: *Build, src_path: []const u8, dest_rel_path: []const u8) *InstallFileStep { +pub fn addInstallHeaderFile(b: *Build, src_path: []const u8, dest_rel_path: []const u8) *Step.InstallFile { return b.addInstallFileWithDir(.{ .path = src_path }, .header, dest_rel_path); } @@ -1268,22 +1282,22 @@ pub fn addInstallFileWithDir( source: FileSource, install_dir: InstallDir, dest_rel_path: []const u8, -) *InstallFileStep { - return InstallFileStep.create(self, source.dupe(self), install_dir, dest_rel_path); +) *Step.InstallFile { + return Step.InstallFile.create(self, source.dupe(self), install_dir, dest_rel_path); } -pub fn addInstallDirectory(self: *Build, options: InstallDirectoryOptions) *InstallDirStep { - const install_step = self.allocator.create(InstallDirStep) catch @panic("OOM"); - install_step.* = InstallDirStep.init(self, options); +pub fn addInstallDirectory(self: *Build, options: InstallDirectoryOptions) *Step.InstallDir { + const install_step = self.allocator.create(Step.InstallDir) catch @panic("OOM"); + install_step.* = Step.InstallDir.init(self, options); return install_step; } pub fn addCheckFile( b: *Build, file_source: FileSource, - options: CheckFileStep.Options, -) *CheckFileStep { - return CheckFileStep.create(b, file_source, options); + options: Step.CheckFile.Options, +) *Step.CheckFile { + return Step.CheckFile.create(b, file_source, options); } pub fn pushInstalledFile(self: *Build, dir: InstallDir, dest_rel_path: []const u8) void { @@ -1453,10 +1467,10 @@ pub fn getInstallPath(self: *Build, dir: InstallDir, dest_rel_path: []const u8) pub const Dependency = struct { builder: *Build, - pub fn artifact(d: *Dependency, name: []const u8) *CompileStep { - var found: ?*CompileStep = null; + pub fn artifact(d: *Dependency, name: []const u8) *Step.Compile { + var found: ?*Step.Compile = null; for (d.builder.install_tls.step.dependencies.items) |dep_step| { - const inst = dep_step.cast(InstallArtifactStep) orelse continue; + const inst = dep_step.cast(Step.InstallArtifact) orelse continue; if (mem.eql(u8, inst.artifact.name, name)) { if (found != null) panic("artifact name '{s}' is ambiguous", .{name}); found = inst.artifact; @@ -1464,7 +1478,7 @@ pub const Dependency = struct { } return found orelse { for (d.builder.install_tls.step.dependencies.items) |dep_step| { - const inst = dep_step.cast(InstallArtifactStep) orelse continue; + const inst = dep_step.cast(Step.InstallArtifact) orelse continue; log.info("available artifact: '{s}'", .{inst.artifact.name}); } panic("unable to find artifact '{s}'", .{name}); @@ -1808,17 +1822,5 @@ pub fn hex64(x: u64) [16]u8 { } test { - _ = CheckFileStep; - _ = CheckObjectStep; - _ = FmtStep; - _ = InstallArtifactStep; - _ = InstallDirStep; - _ = InstallFileStep; - _ = ObjCopyStep; - _ = CompileStep; - _ = OptionsStep; - _ = RemoveDirStep; - _ = RunStep; - _ = TranslateCStep; - _ = WriteFileStep; + _ = Step; } diff --git a/lib/std/Build/Step.zig b/lib/std/Build/Step.zig index bdb500d99c..40c88df2b9 100644 --- a/lib/std/Build/Step.zig +++ b/lib/std/Build/Step.zig @@ -94,26 +94,41 @@ pub const Id = enum { pub fn Type(comptime id: Id) type { return switch (id) { .top_level => Build.TopLevelStep, - .compile => Build.CompileStep, - .install_artifact => Build.InstallArtifactStep, - .install_file => Build.InstallFileStep, - .install_dir => Build.InstallDirStep, - .remove_dir => Build.RemoveDirStep, - .fmt => Build.FmtStep, - .translate_c => Build.TranslateCStep, - .write_file => Build.WriteFileStep, - .run => Build.RunStep, - .check_file => Build.CheckFileStep, - .check_object => Build.CheckObjectStep, - .config_header => Build.ConfigHeaderStep, - .objcopy => Build.ObjCopyStep, - .options => Build.OptionsStep, + .compile => Compile, + .install_artifact => InstallArtifact, + .install_file => InstallFile, + .install_dir => InstallDir, + .remove_dir => RemoveDir, + .fmt => Fmt, + .translate_c => TranslateC, + .write_file => WriteFile, + .run => Run, + .check_file => CheckFile, + .check_object => CheckObject, + .config_header => ConfigHeader, + .objcopy => ObjCopy, + .options => Options, .custom => @compileError("no type available for custom step"), }; } }; -pub const Options = struct { +pub const CheckFile = @import("Step/CheckFile.zig"); +pub const CheckObject = @import("Step/CheckObject.zig"); +pub const ConfigHeader = @import("Step/ConfigHeader.zig"); +pub const Fmt = @import("Step/Fmt.zig"); +pub const InstallArtifact = @import("Step/InstallArtifact.zig"); +pub const InstallDir = @import("Step/InstallDir.zig"); +pub const InstallFile = @import("Step/InstallFile.zig"); +pub const ObjCopy = @import("Step/ObjCopy.zig"); +pub const Compile = @import("Step/Compile.zig"); +pub const Options = @import("Step/Options.zig"); +pub const RemoveDir = @import("Step/RemoveDir.zig"); +pub const Run = @import("Step/Run.zig"); +pub const TranslateC = @import("Step/TranslateC.zig"); +pub const WriteFile = @import("Step/WriteFile.zig"); + +pub const StepOptions = struct { id: Id, name: []const u8, owner: *Build, @@ -122,7 +137,7 @@ pub const Options = struct { max_rss: usize = 0, }; -pub fn init(options: Options) Step { +pub fn init(options: StepOptions) Step { const arena = options.owner.allocator; var addresses = [1]usize{0} ** n_debug_stack_frames; @@ -387,8 +402,8 @@ pub fn evalZigProcess( s.result_duration_ns = timer.read(); s.result_peak_rss = child.resource_usage_statistics.getMaxRss() orelse 0; - // Special handling for CompileStep that is expecting compile errors. - if (s.cast(Build.CompileStep)) |compile| switch (term) { + // Special handling for Compile step that is expecting compile errors. + if (s.cast(Compile)) |compile| switch (term) { .Exited => { // Note that the exit code may be 0 in this case due to the // compiler server protocol. @@ -535,3 +550,19 @@ pub fn writeManifest(s: *Step, man: *std.Build.Cache.Manifest) !void { }; } } + +test { + _ = CheckFile; + _ = CheckObject; + _ = Fmt; + _ = InstallArtifact; + _ = InstallDir; + _ = InstallFile; + _ = ObjCopy; + _ = Compile; + _ = Options; + _ = RemoveDir; + _ = Run; + _ = TranslateC; + _ = WriteFile; +} diff --git a/lib/std/Build/Step/CheckFile.zig b/lib/std/Build/Step/CheckFile.zig index ad8b1a25f0..dc359b5654 100644 --- a/lib/std/Build/Step/CheckFile.zig +++ b/lib/std/Build/Step/CheckFile.zig @@ -1,8 +1,8 @@ //! Fail the build step if a file does not match certain checks. //! TODO: make this more flexible, supporting more kinds of checks. //! TODO: generalize the code in std.testing.expectEqualStrings and make this -//! CheckFileStep produce those helpful diagnostics when there is not a match. -const CheckFileStep = @This(); +//! CheckFile step produce those helpful diagnostics when there is not a match. +const CheckFile = @This(); const std = @import("std"); const Step = std.Build.Step; const fs = std.fs; @@ -25,8 +25,8 @@ pub fn create( owner: *std.Build, source: std.Build.FileSource, options: Options, -) *CheckFileStep { - const self = owner.allocator.create(CheckFileStep) catch @panic("OOM"); +) *CheckFile { + const self = owner.allocator.create(CheckFile) catch @panic("OOM"); self.* = .{ .step = Step.init(.{ .id = .check_file, @@ -42,14 +42,14 @@ pub fn create( return self; } -pub fn setName(self: *CheckFileStep, name: []const u8) void { +pub fn setName(self: *CheckFile, name: []const u8) void { self.step.name = name; } fn make(step: *Step, prog_node: *std.Progress.Node) !void { _ = prog_node; const b = step.owner; - const self = @fieldParentPtr(CheckFileStep, "step", step); + const self = @fieldParentPtr(CheckFile, "step", step); const src_path = self.source.getPath(b); const contents = fs.cwd().readFileAlloc(b.allocator, src_path, self.max_bytes) catch |err| { diff --git a/lib/std/Build/Step/CheckObject.zig b/lib/std/Build/Step/CheckObject.zig index 431f74eccc..c77dc3de36 100644 --- a/lib/std/Build/Step/CheckObject.zig +++ b/lib/std/Build/Step/CheckObject.zig @@ -6,7 +6,7 @@ const math = std.math; const mem = std.mem; const testing = std.testing; -const CheckObjectStep = @This(); +const CheckObject = @This(); const Allocator = mem.Allocator; const Step = std.Build.Step; @@ -24,9 +24,9 @@ pub fn create( owner: *std.Build, source: std.Build.FileSource, obj_format: std.Target.ObjectFormat, -) *CheckObjectStep { +) *CheckObject { const gpa = owner.allocator; - const self = gpa.create(CheckObjectStep) catch @panic("OOM"); + const self = gpa.create(CheckObject) catch @panic("OOM"); self.* = .{ .step = Step.init(.{ .id = .check_file, @@ -47,11 +47,11 @@ pub fn create( /// TODO this doesn't actually compare, and there's no apparent reason for it /// to depend on the check object step. I don't see why this function should exist, /// the caller could just add the run step directly. -pub fn runAndCompare(self: *CheckObjectStep) *std.Build.RunStep { +pub fn runAndCompare(self: *CheckObject) *std.Build.Step.Run { const dependencies_len = self.step.dependencies.items.len; assert(dependencies_len > 0); const exe_step = self.step.dependencies.items[dependencies_len - 1]; - const exe = exe_step.cast(std.Build.CompileStep).?; + const exe = exe_step.cast(std.Build.Step.Compile).?; const run = self.step.owner.addRunArtifact(exe); run.skip_foreign_checks = true; run.step.dependOn(&self.step); @@ -274,15 +274,15 @@ const Check = struct { }; /// Creates a new sequence of actions with `phrase` as the first anchor searched phrase. -pub fn checkStart(self: *CheckObjectStep, phrase: []const u8) void { +pub fn checkStart(self: *CheckObject, phrase: []const u8) void { var new_check = Check.create(self.step.owner.allocator); new_check.match(.{ .string = self.step.owner.dupe(phrase) }); self.checks.append(new_check) catch @panic("OOM"); } -/// Adds another searched phrase to the latest created Check with `CheckObjectStep.checkStart(...)`. +/// Adds another searched phrase to the latest created Check with `CheckObject.checkStart(...)`. /// Asserts at least one check already exists. -pub fn checkNext(self: *CheckObjectStep, phrase: []const u8) void { +pub fn checkNext(self: *CheckObject, phrase: []const u8) void { assert(self.checks.items.len > 0); const last = &self.checks.items[self.checks.items.len - 1]; last.match(.{ .string = self.step.owner.dupe(phrase) }); @@ -291,7 +291,7 @@ pub fn checkNext(self: *CheckObjectStep, phrase: []const u8) void { /// Like `checkNext()` but takes an additional argument `FileSource` which will be /// resolved to a full search query in `make()`. pub fn checkNextFileSource( - self: *CheckObjectStep, + self: *CheckObject, phrase: []const u8, file_source: std.Build.FileSource, ) void { @@ -300,10 +300,10 @@ pub fn checkNextFileSource( last.match(.{ .string = self.step.owner.dupe(phrase), .file_source = file_source }); } -/// Adds another searched phrase to the latest created Check with `CheckObjectStep.checkStart(...)` +/// Adds another searched phrase to the latest created Check with `CheckObject.checkStart(...)` /// however ensures there is no matching phrase in the output. /// Asserts at least one check already exists. -pub fn checkNotPresent(self: *CheckObjectStep, phrase: []const u8) void { +pub fn checkNotPresent(self: *CheckObject, phrase: []const u8) void { assert(self.checks.items.len > 0); const last = &self.checks.items[self.checks.items.len - 1]; last.notPresent(.{ .string = self.step.owner.dupe(phrase) }); @@ -312,7 +312,7 @@ pub fn checkNotPresent(self: *CheckObjectStep, phrase: []const u8) void { /// Creates a new check checking specifically symbol table parsed and dumped from the object /// file. /// Issuing this check will force parsing and dumping of the symbol table. -pub fn checkInSymtab(self: *CheckObjectStep) void { +pub fn checkInSymtab(self: *CheckObject) void { self.dump_symtab = true; const symtab_label = switch (self.obj_format) { .macho => MachODumper.symtab_label, @@ -325,7 +325,7 @@ pub fn checkInSymtab(self: *CheckObjectStep) void { /// on the extracted variables. It will then compare the reduced program with the value of /// the expected variable. pub fn checkComputeCompare( - self: *CheckObjectStep, + self: *CheckObject, program: []const u8, expected: ComputeCompareExpected, ) void { @@ -338,7 +338,7 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void { _ = prog_node; const b = step.owner; const gpa = b.allocator; - const self = @fieldParentPtr(CheckObjectStep, "step", step); + const self = @fieldParentPtr(CheckObject, "step", step); const src_path = self.source.getPath(b); const contents = fs.cwd().readFileAllocOptions( diff --git a/lib/std/Build/Step/Compile.zig b/lib/std/Build/Step/Compile.zig index 7627c4e6d0..2371f49daf 100644 --- a/lib/std/Build/Step/Compile.zig +++ b/lib/std/Build/Step/Compile.zig @@ -18,14 +18,8 @@ const ExecError = std.Build.ExecError; const Module = std.Build.Module; const VcpkgRoot = std.Build.VcpkgRoot; const InstallDir = std.Build.InstallDir; -const InstallArtifactStep = std.Build.InstallArtifactStep; const GeneratedFile = std.Build.GeneratedFile; -const ObjCopyStep = std.Build.ObjCopyStep; -const CheckObjectStep = std.Build.CheckObjectStep; -const RunStep = std.Build.RunStep; -const OptionsStep = std.Build.OptionsStep; -const ConfigHeaderStep = std.Build.ConfigHeaderStep; -const CompileStep = @This(); +const Compile = @This(); pub const base_id: Step.Id = .compile; @@ -211,8 +205,8 @@ want_lto: ?bool = null, use_llvm: ?bool, use_lld: ?bool, -/// This is an advanced setting that can change the intent of this CompileStep. -/// If this slice has nonzero length, it means that this CompileStep exists to +/// This is an advanced setting that can change the intent of this Compile step. +/// If this slice has nonzero length, it means that this Compile step exists to /// check for compile errors and return *success* if they match, and failure /// otherwise. expect_errors: []const []const u8 = &.{}, @@ -242,7 +236,7 @@ pub const CSourceFile = struct { pub const LinkObject = union(enum) { static_path: FileSource, - other_step: *CompileStep, + other_step: *Compile, system_lib: SystemLib, assembly_file: FileSource, c_source_file: *CSourceFile, @@ -273,8 +267,8 @@ const FrameworkLinkInfo = struct { pub const IncludeDir = union(enum) { raw_path: []const u8, raw_path_system: []const u8, - other_step: *CompileStep, - config_header_step: *ConfigHeaderStep, + other_step: *Compile, + config_header_step: *Step.ConfigHeader, }; pub const Options = struct { @@ -319,7 +313,7 @@ pub const EmitOption = union(enum) { } }; -pub fn create(owner: *std.Build, options: Options) *CompileStep { +pub fn create(owner: *std.Build, options: Options) *Compile { const name = owner.dupe(options.name); const root_src: ?FileSource = if (options.root_source_file) |rsrc| rsrc.dupe(owner) else null; if (mem.indexOf(u8, name, "/") != null or mem.indexOf(u8, name, "\\") != null) { @@ -361,8 +355,8 @@ pub fn create(owner: *std.Build, options: Options) *CompileStep { .version = options.version, }) catch @panic("OOM"); - const self = owner.allocator.create(CompileStep) catch @panic("OOM"); - self.* = CompileStep{ + const self = owner.allocator.create(Compile) catch @panic("OOM"); + self.* = Compile{ .strip = null, .unwind_tables = null, .verbose_link = false, @@ -459,7 +453,7 @@ pub fn create(owner: *std.Build, options: Options) *CompileStep { return self; } -pub fn installHeader(cs: *CompileStep, src_path: []const u8, dest_rel_path: []const u8) void { +pub fn installHeader(cs: *Compile, src_path: []const u8, dest_rel_path: []const u8) void { const b = cs.step.owner; const install_file = b.addInstallHeaderFile(src_path, dest_rel_path); b.getInstallStep().dependOn(&install_file.step); @@ -472,8 +466,8 @@ pub const InstallConfigHeaderOptions = struct { }; pub fn installConfigHeader( - cs: *CompileStep, - config_header: *ConfigHeaderStep, + cs: *Compile, + config_header: *Step.ConfigHeader, options: InstallConfigHeaderOptions, ) void { const dest_rel_path = options.dest_rel_path orelse config_header.include_path; @@ -489,7 +483,7 @@ pub fn installConfigHeader( } pub fn installHeadersDirectory( - a: *CompileStep, + a: *Compile, src_dir_path: []const u8, dest_rel_path: []const u8, ) void { @@ -501,8 +495,8 @@ pub fn installHeadersDirectory( } pub fn installHeadersDirectoryOptions( - cs: *CompileStep, - options: std.Build.InstallDirStep.Options, + cs: *Compile, + options: std.Build.Step.InstallDir.Options, ) void { const b = cs.step.owner; const install_dir = b.addInstallDirectory(options); @@ -510,7 +504,7 @@ pub fn installHeadersDirectoryOptions( cs.installed_headers.append(&install_dir.step) catch @panic("OOM"); } -pub fn installLibraryHeaders(cs: *CompileStep, l: *CompileStep) void { +pub fn installLibraryHeaders(cs: *Compile, l: *Compile) void { assert(l.kind == .lib); const b = cs.step.owner; const install_step = b.getInstallStep(); @@ -533,7 +527,7 @@ pub fn installLibraryHeaders(cs: *CompileStep, l: *CompileStep) void { cs.installed_headers.appendSlice(l.installed_headers.items) catch @panic("OOM"); } -pub fn addObjCopy(cs: *CompileStep, options: ObjCopyStep.Options) *ObjCopyStep { +pub fn addObjCopy(cs: *Compile, options: Step.ObjCopy.Options) *Step.ObjCopy { const b = cs.step.owner; var copy = options; if (copy.basename == null) { @@ -554,34 +548,34 @@ pub const run = @compileError("deprecated; use std.Build.addRunArtifact"); /// which is undesirable when installing an artifact provided by a dependency package. pub const install = @compileError("deprecated; use std.Build.installArtifact"); -pub fn checkObject(self: *CompileStep) *CheckObjectStep { - return CheckObjectStep.create(self.step.owner, self.getOutputSource(), self.target_info.target.ofmt); +pub fn checkObject(self: *Compile) *Step.CheckObject { + return Step.CheckObject.create(self.step.owner, self.getOutputSource(), self.target_info.target.ofmt); } -pub fn setLinkerScriptPath(self: *CompileStep, source: FileSource) void { +pub fn setLinkerScriptPath(self: *Compile, source: FileSource) void { const b = self.step.owner; self.linker_script = source.dupe(b); source.addStepDependencies(&self.step); } -pub fn forceUndefinedSymbol(self: *CompileStep, symbol_name: []const u8) void { +pub fn forceUndefinedSymbol(self: *Compile, symbol_name: []const u8) void { const b = self.step.owner; self.force_undefined_symbols.put(b.dupe(symbol_name), {}) catch @panic("OOM"); } -pub fn linkFramework(self: *CompileStep, framework_name: []const u8) void { +pub fn linkFramework(self: *Compile, framework_name: []const u8) void { const b = self.step.owner; self.frameworks.put(b.dupe(framework_name), .{}) catch @panic("OOM"); } -pub fn linkFrameworkNeeded(self: *CompileStep, framework_name: []const u8) void { +pub fn linkFrameworkNeeded(self: *Compile, framework_name: []const u8) void { const b = self.step.owner; self.frameworks.put(b.dupe(framework_name), .{ .needed = true, }) catch @panic("OOM"); } -pub fn linkFrameworkWeak(self: *CompileStep, framework_name: []const u8) void { +pub fn linkFrameworkWeak(self: *Compile, framework_name: []const u8) void { const b = self.step.owner; self.frameworks.put(b.dupe(framework_name), .{ .weak = true, @@ -589,7 +583,7 @@ pub fn linkFrameworkWeak(self: *CompileStep, framework_name: []const u8) void { } /// Returns whether the library, executable, or object depends on a particular system library. -pub fn dependsOnSystemLibrary(self: CompileStep, name: []const u8) bool { +pub fn dependsOnSystemLibrary(self: Compile, name: []const u8) bool { if (isLibCLibrary(name)) { return self.is_linking_libc; } @@ -605,51 +599,51 @@ pub fn dependsOnSystemLibrary(self: CompileStep, name: []const u8) bool { return false; } -pub fn linkLibrary(self: *CompileStep, lib: *CompileStep) void { +pub fn linkLibrary(self: *Compile, lib: *Compile) void { assert(lib.kind == .lib); self.linkLibraryOrObject(lib); } -pub fn isDynamicLibrary(self: *CompileStep) bool { +pub fn isDynamicLibrary(self: *Compile) bool { return self.kind == .lib and self.linkage == Linkage.dynamic; } -pub fn isStaticLibrary(self: *CompileStep) bool { +pub fn isStaticLibrary(self: *Compile) bool { return self.kind == .lib and self.linkage != Linkage.dynamic; } -pub fn producesPdbFile(self: *CompileStep) bool { +pub fn producesPdbFile(self: *Compile) bool { if (!self.target.isWindows() and !self.target.isUefi()) return false; if (self.target.getObjectFormat() == .c) return false; if (self.strip == true) return false; return self.isDynamicLibrary() or self.kind == .exe or self.kind == .@"test"; } -pub fn linkLibC(self: *CompileStep) void { +pub fn linkLibC(self: *Compile) void { self.is_linking_libc = true; } -pub fn linkLibCpp(self: *CompileStep) void { +pub fn linkLibCpp(self: *Compile) void { self.is_linking_libcpp = true; } /// If the value is omitted, it is set to 1. /// `name` and `value` need not live longer than the function call. -pub fn defineCMacro(self: *CompileStep, name: []const u8, value: ?[]const u8) void { +pub fn defineCMacro(self: *Compile, name: []const u8, value: ?[]const u8) void { const b = self.step.owner; const macro = std.Build.constructCMacro(b.allocator, name, value); self.c_macros.append(macro) catch @panic("OOM"); } /// name_and_value looks like [name]=[value]. If the value is omitted, it is set to 1. -pub fn defineCMacroRaw(self: *CompileStep, name_and_value: []const u8) void { +pub fn defineCMacroRaw(self: *Compile, name_and_value: []const u8) void { const b = self.step.owner; self.c_macros.append(b.dupe(name_and_value)) catch @panic("OOM"); } /// This one has no integration with anything, it just puts -lname on the command line. /// Prefer to use `linkSystemLibrary` instead. -pub fn linkSystemLibraryName(self: *CompileStep, name: []const u8) void { +pub fn linkSystemLibraryName(self: *Compile, name: []const u8) void { const b = self.step.owner; self.link_objects.append(.{ .system_lib = .{ @@ -663,7 +657,7 @@ pub fn linkSystemLibraryName(self: *CompileStep, name: []const u8) void { /// This one has no integration with anything, it just puts -needed-lname on the command line. /// Prefer to use `linkSystemLibraryNeeded` instead. -pub fn linkSystemLibraryNeededName(self: *CompileStep, name: []const u8) void { +pub fn linkSystemLibraryNeededName(self: *Compile, name: []const u8) void { const b = self.step.owner; self.link_objects.append(.{ .system_lib = .{ @@ -677,7 +671,7 @@ pub fn linkSystemLibraryNeededName(self: *CompileStep, name: []const u8) void { /// Darwin-only. This one has no integration with anything, it just puts -weak-lname on the /// command line. Prefer to use `linkSystemLibraryWeak` instead. -pub fn linkSystemLibraryWeakName(self: *CompileStep, name: []const u8) void { +pub fn linkSystemLibraryWeakName(self: *Compile, name: []const u8) void { const b = self.step.owner; self.link_objects.append(.{ .system_lib = .{ @@ -691,7 +685,7 @@ pub fn linkSystemLibraryWeakName(self: *CompileStep, name: []const u8) void { /// This links against a system library, exclusively using pkg-config to find the library. /// Prefer to use `linkSystemLibrary` instead. -pub fn linkSystemLibraryPkgConfigOnly(self: *CompileStep, lib_name: []const u8) void { +pub fn linkSystemLibraryPkgConfigOnly(self: *Compile, lib_name: []const u8) void { const b = self.step.owner; self.link_objects.append(.{ .system_lib = .{ @@ -705,7 +699,7 @@ pub fn linkSystemLibraryPkgConfigOnly(self: *CompileStep, lib_name: []const u8) /// This links against a system library, exclusively using pkg-config to find the library. /// Prefer to use `linkSystemLibraryNeeded` instead. -pub fn linkSystemLibraryNeededPkgConfigOnly(self: *CompileStep, lib_name: []const u8) void { +pub fn linkSystemLibraryNeededPkgConfigOnly(self: *Compile, lib_name: []const u8) void { const b = self.step.owner; self.link_objects.append(.{ .system_lib = .{ @@ -719,7 +713,7 @@ pub fn linkSystemLibraryNeededPkgConfigOnly(self: *CompileStep, lib_name: []cons /// Run pkg-config for the given library name and parse the output, returning the arguments /// that should be passed to zig to link the given library. -fn runPkgConfig(self: *CompileStep, lib_name: []const u8) ![]const []const u8 { +fn runPkgConfig(self: *Compile, lib_name: []const u8) ![]const []const u8 { const b = self.step.owner; const pkg_name = match: { // First we have to map the library name to pkg config name. Unfortunately, @@ -813,19 +807,19 @@ fn runPkgConfig(self: *CompileStep, lib_name: []const u8) ![]const []const u8 { return zig_args.toOwnedSlice(); } -pub fn linkSystemLibrary(self: *CompileStep, name: []const u8) void { +pub fn linkSystemLibrary(self: *Compile, name: []const u8) void { self.linkSystemLibraryInner(name, .{}); } -pub fn linkSystemLibraryNeeded(self: *CompileStep, name: []const u8) void { +pub fn linkSystemLibraryNeeded(self: *Compile, name: []const u8) void { self.linkSystemLibraryInner(name, .{ .needed = true }); } -pub fn linkSystemLibraryWeak(self: *CompileStep, name: []const u8) void { +pub fn linkSystemLibraryWeak(self: *Compile, name: []const u8) void { self.linkSystemLibraryInner(name, .{ .weak = true }); } -fn linkSystemLibraryInner(self: *CompileStep, name: []const u8, opts: struct { +fn linkSystemLibraryInner(self: *Compile, name: []const u8, opts: struct { needed: bool = false, weak: bool = false, }) void { @@ -850,7 +844,7 @@ fn linkSystemLibraryInner(self: *CompileStep, name: []const u8, opts: struct { } /// Handy when you have many C/C++ source files and want them all to have the same flags. -pub fn addCSourceFiles(self: *CompileStep, files: []const []const u8, flags: []const []const u8) void { +pub fn addCSourceFiles(self: *Compile, files: []const []const u8, flags: []const []const u8) void { const b = self.step.owner; const c_source_files = b.allocator.create(CSourceFiles) catch @panic("OOM"); @@ -864,14 +858,14 @@ pub fn addCSourceFiles(self: *CompileStep, files: []const []const u8, flags: []c self.link_objects.append(.{ .c_source_files = c_source_files }) catch @panic("OOM"); } -pub fn addCSourceFile(self: *CompileStep, file: []const u8, flags: []const []const u8) void { +pub fn addCSourceFile(self: *Compile, file: []const u8, flags: []const []const u8) void { self.addCSourceFileSource(.{ .args = flags, .source = .{ .path = file }, }); } -pub fn addCSourceFileSource(self: *CompileStep, source: CSourceFile) void { +pub fn addCSourceFileSource(self: *Compile, source: CSourceFile) void { const b = self.step.owner; const c_source_file = b.allocator.create(CSourceFile) catch @panic("OOM"); c_source_file.* = source.dupe(b); @@ -879,85 +873,85 @@ pub fn addCSourceFileSource(self: *CompileStep, source: CSourceFile) void { source.source.addStepDependencies(&self.step); } -pub fn setVerboseLink(self: *CompileStep, value: bool) void { +pub fn setVerboseLink(self: *Compile, value: bool) void { self.verbose_link = value; } -pub fn setVerboseCC(self: *CompileStep, value: bool) void { +pub fn setVerboseCC(self: *Compile, value: bool) void { self.verbose_cc = value; } -pub fn overrideZigLibDir(self: *CompileStep, dir_path: []const u8) void { +pub fn overrideZigLibDir(self: *Compile, dir_path: []const u8) void { const b = self.step.owner; self.zig_lib_dir = b.dupePath(dir_path); } -pub fn setMainPkgPath(self: *CompileStep, dir_path: []const u8) void { +pub fn setMainPkgPath(self: *Compile, dir_path: []const u8) void { const b = self.step.owner; self.main_pkg_path = b.dupePath(dir_path); } -pub fn setLibCFile(self: *CompileStep, libc_file: ?FileSource) void { +pub fn setLibCFile(self: *Compile, libc_file: ?FileSource) void { const b = self.step.owner; self.libc_file = if (libc_file) |f| f.dupe(b) else null; } /// Returns the generated executable, library or object file. /// To run an executable built with zig build, use `run`, or create an install step and invoke it. -pub fn getOutputSource(self: *CompileStep) FileSource { +pub fn getOutputSource(self: *Compile) FileSource { return .{ .generated = &self.output_path_source }; } -pub fn getOutputDirectorySource(self: *CompileStep) FileSource { +pub fn getOutputDirectorySource(self: *Compile) FileSource { return .{ .generated = &self.output_dirname_source }; } /// Returns the generated import library. This function can only be called for libraries. -pub fn getOutputLibSource(self: *CompileStep) FileSource { +pub fn getOutputLibSource(self: *Compile) FileSource { assert(self.kind == .lib); return .{ .generated = &self.output_lib_path_source }; } /// Returns the generated header file. /// This function can only be called for libraries or object files which have `emit_h` set. -pub fn getOutputHSource(self: *CompileStep) FileSource { +pub fn getOutputHSource(self: *Compile) FileSource { assert(self.kind != .exe and self.kind != .@"test"); assert(self.emit_h); return .{ .generated = &self.output_h_path_source }; } /// Returns the generated PDB file. This function can only be called for Windows and UEFI. -pub fn getOutputPdbSource(self: *CompileStep) FileSource { +pub fn getOutputPdbSource(self: *Compile) FileSource { // TODO: Is this right? Isn't PDB for *any* PE/COFF file? assert(self.target.isWindows() or self.target.isUefi()); return .{ .generated = &self.output_pdb_path_source }; } -pub fn addAssemblyFile(self: *CompileStep, path: []const u8) void { +pub fn addAssemblyFile(self: *Compile, path: []const u8) void { const b = self.step.owner; self.link_objects.append(.{ .assembly_file = .{ .path = b.dupe(path) }, }) catch @panic("OOM"); } -pub fn addAssemblyFileSource(self: *CompileStep, source: FileSource) void { +pub fn addAssemblyFileSource(self: *Compile, source: FileSource) void { const b = self.step.owner; const source_duped = source.dupe(b); self.link_objects.append(.{ .assembly_file = source_duped }) catch @panic("OOM"); source_duped.addStepDependencies(&self.step); } -pub fn addObjectFile(self: *CompileStep, source_file: []const u8) void { +pub fn addObjectFile(self: *Compile, source_file: []const u8) void { self.addObjectFileSource(.{ .path = source_file }); } -pub fn addObjectFileSource(self: *CompileStep, source: FileSource) void { +pub fn addObjectFileSource(self: *Compile, source: FileSource) void { const b = self.step.owner; self.link_objects.append(.{ .static_path = source.dupe(b) }) catch @panic("OOM"); source.addStepDependencies(&self.step); } -pub fn addObject(self: *CompileStep, obj: *CompileStep) void { +pub fn addObject(self: *Compile, obj: *Compile) void { assert(obj.kind == .obj); self.linkLibraryOrObject(obj); } @@ -967,54 +961,54 @@ pub const addIncludeDir = @compileError("deprecated; use addIncludePath"); pub const addLibPath = @compileError("deprecated, use addLibraryPath"); pub const addFrameworkDir = @compileError("deprecated, use addFrameworkPath"); -pub fn addSystemIncludePath(self: *CompileStep, path: []const u8) void { +pub fn addSystemIncludePath(self: *Compile, path: []const u8) void { const b = self.step.owner; self.include_dirs.append(IncludeDir{ .raw_path_system = b.dupe(path) }) catch @panic("OOM"); } -pub fn addIncludePath(self: *CompileStep, path: []const u8) void { +pub fn addIncludePath(self: *Compile, path: []const u8) void { const b = self.step.owner; self.include_dirs.append(IncludeDir{ .raw_path = b.dupe(path) }) catch @panic("OOM"); } -pub fn addConfigHeader(self: *CompileStep, config_header: *ConfigHeaderStep) void { +pub fn addConfigHeader(self: *Compile, config_header: *Step.ConfigHeader) void { self.step.dependOn(&config_header.step); self.include_dirs.append(.{ .config_header_step = config_header }) catch @panic("OOM"); } -pub fn addLibraryPath(self: *CompileStep, path: []const u8) void { +pub fn addLibraryPath(self: *Compile, path: []const u8) void { const b = self.step.owner; self.lib_paths.append(.{ .path = b.dupe(path) }) catch @panic("OOM"); } -pub fn addLibraryPathDirectorySource(self: *CompileStep, directory_source: FileSource) void { +pub fn addLibraryPathDirectorySource(self: *Compile, directory_source: FileSource) void { self.lib_paths.append(directory_source) catch @panic("OOM"); directory_source.addStepDependencies(&self.step); } -pub fn addRPath(self: *CompileStep, path: []const u8) void { +pub fn addRPath(self: *Compile, path: []const u8) void { const b = self.step.owner; self.rpaths.append(.{ .path = b.dupe(path) }) catch @panic("OOM"); } -pub fn addRPathDirectorySource(self: *CompileStep, directory_source: FileSource) void { +pub fn addRPathDirectorySource(self: *Compile, directory_source: FileSource) void { self.rpaths.append(directory_source) catch @panic("OOM"); directory_source.addStepDependencies(&self.step); } -pub fn addFrameworkPath(self: *CompileStep, dir_path: []const u8) void { +pub fn addFrameworkPath(self: *Compile, dir_path: []const u8) void { const b = self.step.owner; self.framework_dirs.append(.{ .path = b.dupe(dir_path) }) catch @panic("OOM"); } -pub fn addFrameworkPathDirectorySource(self: *CompileStep, directory_source: FileSource) void { +pub fn addFrameworkPathDirectorySource(self: *Compile, directory_source: FileSource) void { self.framework_dirs.append(directory_source) catch @panic("OOM"); directory_source.addStepDependencies(&self.step); } /// Adds a module to be used with `@import` and exposing it in the current /// package's module table using `name`. -pub fn addModule(cs: *CompileStep, name: []const u8, module: *Module) void { +pub fn addModule(cs: *Compile, name: []const u8, module: *Module) void { const b = cs.step.owner; cs.modules.put(b.dupe(name), module) catch @panic("OOM"); @@ -1025,17 +1019,17 @@ pub fn addModule(cs: *CompileStep, name: []const u8, module: *Module) void { /// Adds a module to be used with `@import` without exposing it in the current /// package's module table. -pub fn addAnonymousModule(cs: *CompileStep, name: []const u8, options: std.Build.CreateModuleOptions) void { +pub fn addAnonymousModule(cs: *Compile, name: []const u8, options: std.Build.CreateModuleOptions) void { const b = cs.step.owner; const module = b.createModule(options); return addModule(cs, name, module); } -pub fn addOptions(cs: *CompileStep, module_name: []const u8, options: *OptionsStep) void { +pub fn addOptions(cs: *Compile, module_name: []const u8, options: *Step.Options) void { addModule(cs, module_name, options.createModule()); } -fn addRecursiveBuildDeps(cs: *CompileStep, module: *Module, done: *std.AutoHashMap(*Module, void)) !void { +fn addRecursiveBuildDeps(cs: *Compile, module: *Module, done: *std.AutoHashMap(*Module, void)) !void { if (done.contains(module)) return; try done.put(module, {}); module.source_file.addStepDependencies(&cs.step); @@ -1046,7 +1040,7 @@ fn addRecursiveBuildDeps(cs: *CompileStep, module: *Module, done: *std.AutoHashM /// If Vcpkg was found on the system, it will be added to include and lib /// paths for the specified target. -pub fn addVcpkgPaths(self: *CompileStep, linkage: CompileStep.Linkage) !void { +pub fn addVcpkgPaths(self: *Compile, linkage: Compile.Linkage) !void { const b = self.step.owner; // Ideally in the Unattempted case we would call the function recursively // after findVcpkgRoot and have only one switch statement, but the compiler @@ -1082,7 +1076,7 @@ pub fn addVcpkgPaths(self: *CompileStep, linkage: CompileStep.Linkage) !void { } } -pub fn setExecCmd(self: *CompileStep, args: []const ?[]const u8) void { +pub fn setExecCmd(self: *Compile, args: []const ?[]const u8) void { const b = self.step.owner; assert(self.kind == .@"test"); const duped_args = b.allocator.alloc(?[]u8, args.len) catch @panic("OOM"); @@ -1092,7 +1086,7 @@ pub fn setExecCmd(self: *CompileStep, args: []const ?[]const u8) void { self.exec_cmd_args = duped_args; } -fn linkLibraryOrObject(self: *CompileStep, other: *CompileStep) void { +fn linkLibraryOrObject(self: *Compile, other: *Compile) void { self.step.dependOn(&other.step); self.link_objects.append(.{ .other_step = other }) catch @panic("OOM"); self.include_dirs.append(.{ .other_step = other }) catch @panic("OOM"); @@ -1103,7 +1097,7 @@ fn linkLibraryOrObject(self: *CompileStep, other: *CompileStep) void { } fn appendModuleArgs( - cs: *CompileStep, + cs: *Compile, zig_args: *ArrayList([]const u8), ) error{OutOfMemory}!void { const b = cs.step.owner; @@ -1214,7 +1208,7 @@ fn constructDepString( fn make(step: *Step, prog_node: *std.Progress.Node) !void { const b = step.owner; - const self = @fieldParentPtr(CompileStep, "step", step); + const self = @fieldParentPtr(Compile, "step", step); if (self.root_src == null and self.link_objects.items.len == 0) { return step.fail("the linker needs one or more objects to link", .{}); @@ -2088,7 +2082,7 @@ const TransitiveDeps = struct { } } - fn addInner(td: *TransitiveDeps, other: *CompileStep, dyn: bool) !void { + fn addInner(td: *TransitiveDeps, other: *Compile, dyn: bool) !void { // Inherit dependency on libc and libc++ td.is_linking_libcpp = td.is_linking_libcpp or other.is_linking_libcpp; td.is_linking_libc = td.is_linking_libc or other.is_linking_libc; @@ -2128,7 +2122,7 @@ const TransitiveDeps = struct { } }; -fn checkCompileErrors(self: *CompileStep) !void { +fn checkCompileErrors(self: *Compile) !void { // Clear this field so that it does not get printed by the build runner. const actual_eb = self.step.result_error_bundle; self.step.result_error_bundle = std.zig.ErrorBundle.empty; diff --git a/lib/std/Build/Step/ConfigHeader.zig b/lib/std/Build/Step/ConfigHeader.zig index 6bfe28ae62..a17784c96a 100644 --- a/lib/std/Build/Step/ConfigHeader.zig +++ b/lib/std/Build/Step/ConfigHeader.zig @@ -1,5 +1,5 @@ const std = @import("std"); -const ConfigHeaderStep = @This(); +const ConfigHeader = @This(); const Step = std.Build.Step; pub const Style = union(enum) { @@ -48,8 +48,8 @@ pub const Options = struct { first_ret_addr: ?usize = null, }; -pub fn create(owner: *std.Build, options: Options) *ConfigHeaderStep { - const self = owner.allocator.create(ConfigHeaderStep) catch @panic("OOM"); +pub fn create(owner: *std.Build, options: Options) *ConfigHeader { + const self = owner.allocator.create(ConfigHeader) catch @panic("OOM"); var include_path: []const u8 = "config.h"; @@ -93,21 +93,21 @@ pub fn create(owner: *std.Build, options: Options) *ConfigHeaderStep { return self; } -pub fn addValues(self: *ConfigHeaderStep, values: anytype) void { +pub fn addValues(self: *ConfigHeader, values: anytype) void { return addValuesInner(self, values) catch @panic("OOM"); } -pub fn getFileSource(self: *ConfigHeaderStep) std.Build.FileSource { +pub fn getFileSource(self: *ConfigHeader) std.Build.FileSource { return .{ .generated = &self.output_file }; } -fn addValuesInner(self: *ConfigHeaderStep, values: anytype) !void { +fn addValuesInner(self: *ConfigHeader, values: anytype) !void { inline for (@typeInfo(@TypeOf(values)).Struct.fields) |field| { try putValue(self, field.name, field.type, @field(values, field.name)); } } -fn putValue(self: *ConfigHeaderStep, field_name: []const u8, comptime T: type, v: T) !void { +fn putValue(self: *ConfigHeader, field_name: []const u8, comptime T: type, v: T) !void { switch (@typeInfo(T)) { .Null => { try self.values.put(field_name, .undef); @@ -151,31 +151,31 @@ fn putValue(self: *ConfigHeaderStep, field_name: []const u8, comptime T: type, v else => {}, } - @compileError("unsupported ConfigHeaderStep value type: " ++ @typeName(T)); + @compileError("unsupported ConfigHeader value type: " ++ @typeName(T)); }, - else => @compileError("unsupported ConfigHeaderStep value type: " ++ @typeName(T)), + else => @compileError("unsupported ConfigHeader value type: " ++ @typeName(T)), } } fn make(step: *Step, prog_node: *std.Progress.Node) !void { _ = prog_node; const b = step.owner; - const self = @fieldParentPtr(ConfigHeaderStep, "step", step); + const self = @fieldParentPtr(ConfigHeader, "step", step); const gpa = b.allocator; const arena = b.allocator; var man = b.cache.obtain(); defer man.deinit(); - // Random bytes to make ConfigHeaderStep unique. Refresh this with new - // random bytes when ConfigHeaderStep implementation is modified in a + // Random bytes to make ConfigHeader unique. Refresh this with new + // random bytes when ConfigHeader implementation is modified in a // non-backwards-compatible way. man.hash.add(@as(u32, 0xdef08d23)); var output = std.ArrayList(u8).init(gpa); defer output.deinit(); - const header_text = "This file was generated by ConfigHeaderStep using the Zig Build System."; + const header_text = "This file was generated by ConfigHeader using the Zig Build System."; const c_generated_line = "/* " ++ header_text ++ " */\n"; const asm_generated_line = "; " ++ header_text ++ "\n"; diff --git a/lib/std/Build/Step/Fmt.zig b/lib/std/Build/Step/Fmt.zig index 23d5d9e3ff..8e8cc51c0d 100644 --- a/lib/std/Build/Step/Fmt.zig +++ b/lib/std/Build/Step/Fmt.zig @@ -3,7 +3,7 @@ //! * Check mode: fail the step if a non-conforming file is found. const std = @import("std"); const Step = std.Build.Step; -const FmtStep = @This(); +const Fmt = @This(); step: Step, paths: []const []const u8, @@ -19,8 +19,8 @@ pub const Options = struct { check: bool = false, }; -pub fn create(owner: *std.Build, options: Options) *FmtStep { - const self = owner.allocator.create(FmtStep) catch @panic("OOM"); +pub fn create(owner: *std.Build, options: Options) *Fmt { + const self = owner.allocator.create(Fmt) catch @panic("OOM"); const name = if (options.check) "zig fmt --check" else "zig fmt"; self.* = .{ .step = Step.init(.{ @@ -47,7 +47,7 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void { const b = step.owner; const arena = b.allocator; - const self = @fieldParentPtr(FmtStep, "step", step); + const self = @fieldParentPtr(Fmt, "step", step); var argv: std.ArrayListUnmanaged([]const u8) = .{}; try argv.ensureUnusedCapacity(arena, 2 + 1 + self.paths.len + 2 * self.exclude_paths.len); diff --git a/lib/std/Build/Step/InstallArtifact.zig b/lib/std/Build/Step/InstallArtifact.zig index fa357a9ae9..9552a44440 100644 --- a/lib/std/Build/Step/InstallArtifact.zig +++ b/lib/std/Build/Step/InstallArtifact.zig @@ -1,24 +1,23 @@ const std = @import("std"); const Step = std.Build.Step; -const CompileStep = std.Build.CompileStep; const InstallDir = std.Build.InstallDir; -const InstallArtifactStep = @This(); +const InstallArtifact = @This(); const fs = std.fs; pub const base_id = .install_artifact; step: Step, -artifact: *CompileStep, +artifact: *Step.Compile, dest_dir: InstallDir, pdb_dir: ?InstallDir, h_dir: ?InstallDir, /// If non-null, adds additional path components relative to dest_dir, and -/// overrides the basename of the CompileStep. +/// overrides the basename of the Compile step. dest_sub_path: ?[]const u8, -pub fn create(owner: *std.Build, artifact: *CompileStep) *InstallArtifactStep { - const self = owner.allocator.create(InstallArtifactStep) catch @panic("OOM"); - self.* = InstallArtifactStep{ +pub fn create(owner: *std.Build, artifact: *Step.Compile) *InstallArtifact { + const self = owner.allocator.create(InstallArtifact) catch @panic("OOM"); + self.* = InstallArtifact{ .step = Step.init(.{ .id = base_id, .name = owner.fmt("install {s}", .{artifact.name}), @@ -66,7 +65,7 @@ pub fn create(owner: *std.Build, artifact: *CompileStep) *InstallArtifactStep { fn make(step: *Step, prog_node: *std.Progress.Node) !void { _ = prog_node; - const self = @fieldParentPtr(InstallArtifactStep, "step", step); + const self = @fieldParentPtr(InstallArtifact, "step", step); const src_builder = self.artifact.step.owner; const dest_builder = step.owner; @@ -90,7 +89,7 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void { self.artifact.version != null and self.artifact.target.wantSharedLibSymLinks()) { - try CompileStep.doAtomicSymLinks(step, full_dest_path, self.artifact.major_only_filename.?, self.artifact.name_only_filename.?); + try Step.Compile.doAtomicSymLinks(step, full_dest_path, self.artifact.major_only_filename.?, self.artifact.name_only_filename.?); } if (self.artifact.isDynamicLibrary() and self.artifact.target.isWindows() and diff --git a/lib/std/Build/Step/InstallFile.zig b/lib/std/Build/Step/InstallFile.zig index b6b66fd1e0..784685dc3a 100644 --- a/lib/std/Build/Step/InstallFile.zig +++ b/lib/std/Build/Step/InstallFile.zig @@ -2,7 +2,7 @@ const std = @import("std"); const Step = std.Build.Step; const FileSource = std.Build.FileSource; const InstallDir = std.Build.InstallDir; -const InstallFileStep = @This(); +const InstallFile = @This(); const assert = std.debug.assert; pub const base_id = .install_file; @@ -20,10 +20,10 @@ pub fn create( source: FileSource, dir: InstallDir, dest_rel_path: []const u8, -) *InstallFileStep { +) *InstallFile { assert(dest_rel_path.len != 0); owner.pushInstalledFile(dir, dest_rel_path); - const self = owner.allocator.create(InstallFileStep) catch @panic("OOM"); + const self = owner.allocator.create(InstallFile) catch @panic("OOM"); self.* = .{ .step = Step.init(.{ .id = base_id, @@ -43,7 +43,7 @@ pub fn create( fn make(step: *Step, prog_node: *std.Progress.Node) !void { _ = prog_node; const src_builder = step.owner; - const self = @fieldParentPtr(InstallFileStep, "step", step); + const self = @fieldParentPtr(InstallFile, "step", step); const dest_builder = self.dest_builder; const full_src_path = self.source.getPath2(src_builder, step); const full_dest_path = dest_builder.getInstallPath(self.dir, self.dest_rel_path); diff --git a/lib/std/Build/Step/ObjCopy.zig b/lib/std/Build/Step/ObjCopy.zig index 608c56591f..a8a0dafaef 100644 --- a/lib/std/Build/Step/ObjCopy.zig +++ b/lib/std/Build/Step/ObjCopy.zig @@ -1,12 +1,11 @@ const std = @import("std"); -const ObjCopyStep = @This(); +const ObjCopy = @This(); const Allocator = std.mem.Allocator; const ArenaAllocator = std.heap.ArenaAllocator; const ArrayListUnmanaged = std.ArrayListUnmanaged; const File = std.fs.File; const InstallDir = std.Build.InstallDir; -const CompileStep = std.Build.CompileStep; const Step = std.Build.Step; const elf = std.elf; const fs = std.fs; @@ -40,9 +39,9 @@ pub fn create( owner: *std.Build, file_source: std.Build.FileSource, options: Options, -) *ObjCopyStep { - const self = owner.allocator.create(ObjCopyStep) catch @panic("OOM"); - self.* = ObjCopyStep{ +) *ObjCopy { + const self = owner.allocator.create(ObjCopy) catch @panic("OOM"); + self.* = ObjCopy{ .step = Step.init(.{ .id = base_id, .name = owner.fmt("objcopy {s}", .{file_source.getDisplayName()}), @@ -61,19 +60,19 @@ pub fn create( return self; } -pub fn getOutputSource(self: *const ObjCopyStep) std.Build.FileSource { +pub fn getOutputSource(self: *const ObjCopy) std.Build.FileSource { return .{ .generated = &self.output_file }; } fn make(step: *Step, prog_node: *std.Progress.Node) !void { const b = step.owner; - const self = @fieldParentPtr(ObjCopyStep, "step", step); + const self = @fieldParentPtr(ObjCopy, "step", step); var man = b.cache.obtain(); defer man.deinit(); - // Random bytes to make ObjCopyStep unique. Refresh this with new random - // bytes when ObjCopyStep implementation is modified incompatibly. + // Random bytes to make ObjCopy unique. Refresh this with new random + // bytes when ObjCopy implementation is modified incompatibly. man.hash.add(@as(u32, 0xe18b7baf)); const full_src_path = self.file_source.getPath(b); diff --git a/lib/std/Build/Step/Options.zig b/lib/std/Build/Step/Options.zig index 101c284cf0..cc7152a81e 100644 --- a/lib/std/Build/Step/Options.zig +++ b/lib/std/Build/Step/Options.zig @@ -3,10 +3,9 @@ const builtin = @import("builtin"); const fs = std.fs; const Step = std.Build.Step; const GeneratedFile = std.Build.GeneratedFile; -const CompileStep = std.Build.CompileStep; const FileSource = std.Build.FileSource; -const OptionsStep = @This(); +const Options = @This(); pub const base_id = .options; @@ -17,8 +16,8 @@ contents: std.ArrayList(u8), artifact_args: std.ArrayList(OptionArtifactArg), file_source_args: std.ArrayList(OptionFileSourceArg), -pub fn create(owner: *std.Build) *OptionsStep { - const self = owner.allocator.create(OptionsStep) catch @panic("OOM"); +pub fn create(owner: *std.Build) *Options { + const self = owner.allocator.create(Options) catch @panic("OOM"); self.* = .{ .step = Step.init(.{ .id = base_id, @@ -36,11 +35,11 @@ pub fn create(owner: *std.Build) *OptionsStep { return self; } -pub fn addOption(self: *OptionsStep, comptime T: type, name: []const u8, value: T) void { +pub fn addOption(self: *Options, comptime T: type, name: []const u8, value: T) void { return addOptionFallible(self, T, name, value) catch @panic("unhandled error"); } -fn addOptionFallible(self: *OptionsStep, comptime T: type, name: []const u8, value: T) !void { +fn addOptionFallible(self: *Options, comptime T: type, name: []const u8, value: T) !void { const out = self.contents.writer(); switch (T) { []const []const u8 => { @@ -189,7 +188,7 @@ fn printLiteral(out: anytype, val: anytype, indent: u8) !void { /// The value is the path in the cache dir. /// Adds a dependency automatically. pub fn addOptionFileSource( - self: *OptionsStep, + self: *Options, name: []const u8, source: FileSource, ) void { @@ -202,19 +201,19 @@ pub fn addOptionFileSource( /// The value is the path in the cache dir. /// Adds a dependency automatically. -pub fn addOptionArtifact(self: *OptionsStep, name: []const u8, artifact: *CompileStep) void { +pub fn addOptionArtifact(self: *Options, name: []const u8, artifact: *Step.Compile) void { self.artifact_args.append(.{ .name = self.step.owner.dupe(name), .artifact = artifact }) catch @panic("OOM"); self.step.dependOn(&artifact.step); } -pub fn createModule(self: *OptionsStep) *std.Build.Module { +pub fn createModule(self: *Options) *std.Build.Module { return self.step.owner.createModule(.{ .source_file = self.getSource(), .dependencies = &.{}, }); } -pub fn getSource(self: *OptionsStep) FileSource { +pub fn getSource(self: *Options) FileSource { return .{ .generated = &self.generated_file }; } @@ -223,7 +222,7 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void { _ = prog_node; const b = step.owner; - const self = @fieldParentPtr(OptionsStep, "step", step); + const self = @fieldParentPtr(Options, "step", step); for (self.artifact_args.items) |item| { self.addOption( @@ -314,7 +313,7 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void { const OptionArtifactArg = struct { name: []const u8, - artifact: *CompileStep, + artifact: *Step.Compile, }; const OptionFileSourceArg = struct { @@ -322,7 +321,7 @@ const OptionFileSourceArg = struct { source: FileSource, }; -test "OptionsStep" { +test Options { if (builtin.os.tag == .wasi) return error.SkipZigTest; var arena = std.heap.ArenaAllocator.init(std.testing.allocator); diff --git a/lib/std/Build/Step/RemoveDir.zig b/lib/std/Build/Step/RemoveDir.zig index 59025a7e91..7666dd2a7d 100644 --- a/lib/std/Build/Step/RemoveDir.zig +++ b/lib/std/Build/Step/RemoveDir.zig @@ -1,15 +1,15 @@ const std = @import("std"); const fs = std.fs; const Step = std.Build.Step; -const RemoveDirStep = @This(); +const RemoveDir = @This(); pub const base_id = .remove_dir; step: Step, dir_path: []const u8, -pub fn init(owner: *std.Build, dir_path: []const u8) RemoveDirStep { - return RemoveDirStep{ +pub fn init(owner: *std.Build, dir_path: []const u8) RemoveDir { + return RemoveDir{ .step = Step.init(.{ .id = .remove_dir, .name = owner.fmt("RemoveDir {s}", .{dir_path}), @@ -26,7 +26,7 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void { _ = prog_node; const b = step.owner; - const self = @fieldParentPtr(RemoveDirStep, "step", step); + const self = @fieldParentPtr(RemoveDir, "step", step); b.build_root.handle.deleteTree(self.dir_path) catch |err| { if (b.build_root.path) |base| { diff --git a/lib/std/Build/Step/Run.zig b/lib/std/Build/Step/Run.zig index 4e973cfd98..c506e23f90 100644 --- a/lib/std/Build/Step/Run.zig +++ b/lib/std/Build/Step/Run.zig @@ -1,8 +1,6 @@ const std = @import("std"); const builtin = @import("builtin"); const Step = std.Build.Step; -const CompileStep = std.Build.CompileStep; -const WriteFileStep = std.Build.WriteFileStep; const fs = std.fs; const mem = std.mem; const process = std.process; @@ -12,7 +10,7 @@ const Allocator = mem.Allocator; const ExecError = std.Build.ExecError; const assert = std.debug.assert; -const RunStep = @This(); +const Run = @This(); pub const base_id: Step.Id = .run; @@ -29,12 +27,12 @@ cwd: ?[]const u8, /// Override this field to modify the environment, or use setEnvironmentVariable env_map: ?*EnvMap, -/// Configures whether the RunStep is considered to have side-effects, and also -/// whether the RunStep will inherit stdio streams, forwarding them to the +/// Configures whether the Run step is considered to have side-effects, and also +/// whether the Run step will inherit stdio streams, forwarding them to the /// parent process, in which case will require a global lock to prevent other /// steps from interfering with stdio while the subprocess associated with this -/// RunStep is running. -/// If the RunStep is determined to not have side-effects, then execution will +/// Run step is running. +/// If the Run step is determined to not have side-effects, then execution will /// be skipped if all output files are up-to-date and input files are /// unchanged. stdio: StdIo = .infer_from_args, @@ -42,9 +40,9 @@ stdio: StdIo = .infer_from_args, stdin: ?[]const u8 = null, /// Additional file paths relative to build.zig that, when modified, indicate -/// that the RunStep should be re-executed. -/// If the RunStep is determined to have side-effects, this field is ignored -/// and the RunStep is always executed when it appears in the build graph. +/// that the Run step should be re-executed. +/// If the Run step is determined to have side-effects, this field is ignored +/// and the Run step is always executed when it appears in the build graph. extra_file_dependencies: []const []const u8 = &.{}, /// After adding an output argument, this step will by default rename itself @@ -52,14 +50,14 @@ extra_file_dependencies: []const []const u8 = &.{}, /// This can be disabled by setting this to false. rename_step_with_output_arg: bool = true, -/// If this is true, a RunStep which is configured to check the output of the +/// If this is true, a Run step which is configured to check the output of the /// executed binary will not fail the build if the binary cannot be executed /// due to being for a foreign binary to the host system which is running the /// build graph. /// Command-line arguments such as -fqemu and -fwasmtime may affect whether a /// binary is detected as foreign, as well as system configuration such as /// Rosetta (macOS) and binfmt_misc (Linux). -/// If this RunStep is considered to have side-effects, then this flag does +/// If this Run step is considered to have side-effects, then this flag does /// nothing. skip_foreign_checks: bool = false, @@ -73,18 +71,18 @@ captured_stderr: ?*Output = null, has_side_effects: bool = false, pub const StdIo = union(enum) { - /// Whether the RunStep has side-effects will be determined by whether or not one + /// Whether the Run step has side-effects will be determined by whether or not one /// of the args is an output file (added with `addOutputFileArg`). - /// If the RunStep is determined to have side-effects, this is the same as `inherit`. + /// If the Run step is determined to have side-effects, this is the same as `inherit`. /// The step will fail if the subprocess crashes or returns a non-zero exit code. infer_from_args, - /// Causes the RunStep to be considered to have side-effects, and therefore + /// Causes the Run step to be considered to have side-effects, and therefore /// always execute when it appears in the build graph. /// It also means that this step will obtain a global lock to prevent other /// steps from running in the meantime. /// The step will fail if the subprocess crashes or returns a non-zero exit code. inherit, - /// Causes the RunStep to be considered to *not* have side-effects. The + /// Causes the Run step to be considered to *not* have side-effects. The /// process will be re-executed if any of the input dependencies are /// modified. The exit code and standard I/O streams will be checked for /// certain conditions, and the step will succeed or fail based on these @@ -92,7 +90,7 @@ pub const StdIo = union(enum) { /// Note that an explicit check for exit code 0 needs to be added to this /// list if such a check is desirable. check: std.ArrayList(Check), - /// This RunStep is running a zig unit test binary and will communicate + /// This Run step is running a zig unit test binary and will communicate /// extra metadata over the IPC protocol. zig_test, @@ -106,7 +104,7 @@ pub const StdIo = union(enum) { }; pub const Arg = union(enum) { - artifact: *CompileStep, + artifact: *Step.Compile, file_source: std.Build.FileSource, directory_source: std.Build.FileSource, bytes: []u8, @@ -119,8 +117,8 @@ pub const Output = struct { basename: []const u8, }; -pub fn create(owner: *std.Build, name: []const u8) *RunStep { - const self = owner.allocator.create(RunStep) catch @panic("OOM"); +pub fn create(owner: *std.Build, name: []const u8) *Run { + const self = owner.allocator.create(Run) catch @panic("OOM"); self.* = .{ .step = Step.init(.{ .id = base_id, @@ -135,17 +133,17 @@ pub fn create(owner: *std.Build, name: []const u8) *RunStep { return self; } -pub fn setName(self: *RunStep, name: []const u8) void { +pub fn setName(self: *Run, name: []const u8) void { self.step.name = name; self.rename_step_with_output_arg = false; } -pub fn enableTestRunnerMode(rs: *RunStep) void { +pub fn enableTestRunnerMode(rs: *Run) void { rs.stdio = .zig_test; rs.addArgs(&.{"--listen=-"}); } -pub fn addArtifactArg(self: *RunStep, artifact: *CompileStep) void { +pub fn addArtifactArg(self: *Run, artifact: *Step.Compile) void { self.argv.append(Arg{ .artifact = artifact }) catch @panic("OOM"); self.step.dependOn(&artifact.step); } @@ -153,12 +151,12 @@ pub fn addArtifactArg(self: *RunStep, artifact: *CompileStep) void { /// This provides file path as a command line argument to the command being /// run, and returns a FileSource which can be used as inputs to other APIs /// throughout the build system. -pub fn addOutputFileArg(rs: *RunStep, basename: []const u8) std.Build.FileSource { +pub fn addOutputFileArg(rs: *Run, basename: []const u8) std.Build.FileSource { return addPrefixedOutputFileArg(rs, "", basename); } pub fn addPrefixedOutputFileArg( - rs: *RunStep, + rs: *Run, prefix: []const u8, basename: []const u8, ) std.Build.FileSource { @@ -179,38 +177,38 @@ pub fn addPrefixedOutputFileArg( return .{ .generated = &output.generated_file }; } -pub fn addFileSourceArg(self: *RunStep, file_source: std.Build.FileSource) void { +pub fn addFileSourceArg(self: *Run, file_source: std.Build.FileSource) void { self.argv.append(.{ .file_source = file_source.dupe(self.step.owner), }) catch @panic("OOM"); file_source.addStepDependencies(&self.step); } -pub fn addDirectorySourceArg(self: *RunStep, directory_source: std.Build.FileSource) void { +pub fn addDirectorySourceArg(self: *Run, directory_source: std.Build.FileSource) void { self.argv.append(.{ .directory_source = directory_source.dupe(self.step.owner), }) catch @panic("OOM"); directory_source.addStepDependencies(&self.step); } -pub fn addArg(self: *RunStep, arg: []const u8) void { +pub fn addArg(self: *Run, arg: []const u8) void { self.argv.append(.{ .bytes = self.step.owner.dupe(arg) }) catch @panic("OOM"); } -pub fn addArgs(self: *RunStep, args: []const []const u8) void { +pub fn addArgs(self: *Run, args: []const []const u8) void { for (args) |arg| { self.addArg(arg); } } -pub fn clearEnvironment(self: *RunStep) void { +pub fn clearEnvironment(self: *Run) void { const b = self.step.owner; const new_env_map = b.allocator.create(EnvMap) catch @panic("OOM"); new_env_map.* = EnvMap.init(b.allocator); self.env_map = new_env_map; } -pub fn addPathDir(self: *RunStep, search_path: []const u8) void { +pub fn addPathDir(self: *Run, search_path: []const u8) void { const b = self.step.owner; const env_map = getEnvMapInternal(self); @@ -225,11 +223,11 @@ pub fn addPathDir(self: *RunStep, search_path: []const u8) void { } } -pub fn getEnvMap(self: *RunStep) *EnvMap { +pub fn getEnvMap(self: *Run) *EnvMap { return getEnvMapInternal(self); } -fn getEnvMapInternal(self: *RunStep) *EnvMap { +fn getEnvMapInternal(self: *Run) *EnvMap { const arena = self.step.owner.allocator; return self.env_map orelse { const env_map = arena.create(EnvMap) catch @panic("OOM"); @@ -239,25 +237,25 @@ fn getEnvMapInternal(self: *RunStep) *EnvMap { }; } -pub fn setEnvironmentVariable(self: *RunStep, key: []const u8, value: []const u8) void { +pub fn setEnvironmentVariable(self: *Run, key: []const u8, value: []const u8) void { const b = self.step.owner; const env_map = self.getEnvMap(); env_map.put(b.dupe(key), b.dupe(value)) catch @panic("unhandled error"); } -pub fn removeEnvironmentVariable(self: *RunStep, key: []const u8) void { +pub fn removeEnvironmentVariable(self: *Run, key: []const u8) void { self.getEnvMap().remove(key); } /// Adds a check for exact stderr match. Does not add any other checks. -pub fn expectStdErrEqual(self: *RunStep, bytes: []const u8) void { +pub fn expectStdErrEqual(self: *Run, bytes: []const u8) void { const new_check: StdIo.Check = .{ .expect_stderr_exact = self.step.owner.dupe(bytes) }; self.addCheck(new_check); } /// Adds a check for exact stdout match as well as a check for exit code 0, if /// there is not already an expected termination check. -pub fn expectStdOutEqual(self: *RunStep, bytes: []const u8) void { +pub fn expectStdOutEqual(self: *Run, bytes: []const u8) void { const new_check: StdIo.Check = .{ .expect_stdout_exact = self.step.owner.dupe(bytes) }; self.addCheck(new_check); if (!self.hasTermCheck()) { @@ -265,12 +263,12 @@ pub fn expectStdOutEqual(self: *RunStep, bytes: []const u8) void { } } -pub fn expectExitCode(self: *RunStep, code: u8) void { +pub fn expectExitCode(self: *Run, code: u8) void { const new_check: StdIo.Check = .{ .expect_term = .{ .Exited = code } }; self.addCheck(new_check); } -pub fn hasTermCheck(self: RunStep) bool { +pub fn hasTermCheck(self: Run) bool { for (self.stdio.check.items) |check| switch (check) { .expect_term => return true, else => continue, @@ -278,18 +276,18 @@ pub fn hasTermCheck(self: RunStep) bool { return false; } -pub fn addCheck(self: *RunStep, new_check: StdIo.Check) void { +pub fn addCheck(self: *Run, new_check: StdIo.Check) void { switch (self.stdio) { .infer_from_args => { self.stdio = .{ .check = std.ArrayList(StdIo.Check).init(self.step.owner.allocator) }; self.stdio.check.append(new_check) catch @panic("OOM"); }, .check => |*checks| checks.append(new_check) catch @panic("OOM"), - else => @panic("illegal call to addCheck: conflicting helper method calls. Suggest to directly set stdio field of RunStep instead"), + else => @panic("illegal call to addCheck: conflicting helper method calls. Suggest to directly set stdio field of Run instead"), } } -pub fn captureStdErr(self: *RunStep) std.Build.FileSource { +pub fn captureStdErr(self: *Run) std.Build.FileSource { assert(self.stdio != .inherit); if (self.captured_stderr) |output| return .{ .generated = &output.generated_file }; @@ -304,7 +302,7 @@ pub fn captureStdErr(self: *RunStep) std.Build.FileSource { return .{ .generated = &output.generated_file }; } -pub fn captureStdOut(self: *RunStep) std.Build.FileSource { +pub fn captureStdOut(self: *Run) std.Build.FileSource { assert(self.stdio != .inherit); if (self.captured_stdout) |output| return .{ .generated = &output.generated_file }; @@ -319,8 +317,8 @@ pub fn captureStdOut(self: *RunStep) std.Build.FileSource { return .{ .generated = &output.generated_file }; } -/// Returns whether the RunStep has side effects *other than* updating the output arguments. -fn hasSideEffects(self: RunStep) bool { +/// Returns whether the Run step has side effects *other than* updating the output arguments. +fn hasSideEffects(self: Run) bool { if (self.has_side_effects) return true; return switch (self.stdio) { .infer_from_args => !self.hasAnyOutputArgs(), @@ -330,7 +328,7 @@ fn hasSideEffects(self: RunStep) bool { }; } -fn hasAnyOutputArgs(self: RunStep) bool { +fn hasAnyOutputArgs(self: Run) bool { if (self.captured_stdout != null) return true; if (self.captured_stderr != null) return true; for (self.argv.items) |arg| switch (arg) { @@ -371,7 +369,7 @@ fn checksContainStderr(checks: []const StdIo.Check) bool { fn make(step: *Step, prog_node: *std.Progress.Node) !void { const b = step.owner; const arena = b.allocator; - const self = @fieldParentPtr(RunStep, "step", step); + const self = @fieldParentPtr(Run, "step", step); const has_side_effects = self.hasSideEffects(); var argv_list = ArrayList([]const u8).init(arena); @@ -541,7 +539,7 @@ fn termMatches(expected: ?std.process.Child.Term, actual: std.process.Child.Term } fn runCommand( - self: *RunStep, + self: *Run, argv: []const []const u8, has_side_effects: bool, digest: ?*const [std.Build.Cache.hex_digest_len]u8, @@ -567,7 +565,7 @@ fn runCommand( // FileNotFound: can happen with a wrong dynamic linker path if (err == error.InvalidExe or err == error.FileNotFound) interpret: { // TODO: learn the target from the binary directly rather than from - // relying on it being a CompileStep. This will make this logic + // relying on it being a Compile step. This will make this logic // work even for the edge case that the binary was produced by a // third party. const exe = switch (self.argv.items[0]) { @@ -862,7 +860,7 @@ const ChildProcResult = struct { }; fn spawnChildAndCollect( - self: *RunStep, + self: *Run, argv: []const []const u8, has_side_effects: bool, prog_node: *std.Progress.Node, @@ -936,7 +934,7 @@ const StdIoResult = struct { }; fn evalZigTest( - self: *RunStep, + self: *Run, child: *std.process.Child, prog_node: *std.Progress.Node, ) !StdIoResult { @@ -1121,7 +1119,7 @@ fn sendRunTestMessage(file: std.fs.File, index: u32) !void { try file.writeAll(full_msg); } -fn evalGeneric(self: *RunStep, child: *std.process.Child) !StdIoResult { +fn evalGeneric(self: *Run, child: *std.process.Child) !StdIoResult { const arena = self.step.owner.allocator; if (self.stdin) |stdin| { @@ -1188,7 +1186,7 @@ fn evalGeneric(self: *RunStep, child: *std.process.Child) !StdIoResult { }; } -fn addPathForDynLibs(self: *RunStep, artifact: *CompileStep) void { +fn addPathForDynLibs(self: *Run, artifact: *Step.Compile) void { const b = self.step.owner; for (artifact.link_objects.items) |link_object| { switch (link_object) { @@ -1204,10 +1202,10 @@ fn addPathForDynLibs(self: *RunStep, artifact: *CompileStep) void { } fn failForeign( - self: *RunStep, + self: *Run, suggested_flag: []const u8, argv0: []const u8, - exe: *CompileStep, + exe: *Step.Compile, ) error{ MakeFailed, MakeSkipped, OutOfMemory } { switch (self.stdio) { .check, .zig_test => { diff --git a/lib/std/Build/Step/TranslateC.zig b/lib/std/Build/Step/TranslateC.zig index 86727ea2f0..0c7ddc4720 100644 --- a/lib/std/Build/Step/TranslateC.zig +++ b/lib/std/Build/Step/TranslateC.zig @@ -1,12 +1,10 @@ const std = @import("std"); const Step = std.Build.Step; -const CompileStep = std.Build.CompileStep; -const CheckFileStep = std.Build.CheckFileStep; const fs = std.fs; const mem = std.mem; const CrossTarget = std.zig.CrossTarget; -const TranslateCStep = @This(); +const TranslateC = @This(); pub const base_id = .translate_c; @@ -25,10 +23,10 @@ pub const Options = struct { optimize: std.builtin.OptimizeMode, }; -pub fn create(owner: *std.Build, options: Options) *TranslateCStep { - const self = owner.allocator.create(TranslateCStep) catch @panic("OOM"); +pub fn create(owner: *std.Build, options: Options) *TranslateC { + const self = owner.allocator.create(TranslateC) catch @panic("OOM"); const source = options.source_file.dupe(owner); - self.* = TranslateCStep{ + self.* = TranslateC{ .step = Step.init(.{ .id = .translate_c, .name = "translate-c", @@ -52,11 +50,11 @@ pub const AddExecutableOptions = struct { version: ?std.builtin.Version = null, target: ?CrossTarget = null, optimize: ?std.builtin.Mode = null, - linkage: ?CompileStep.Linkage = null, + linkage: ?Step.Compile.Linkage = null, }; /// Creates a step to build an executable from the translated source. -pub fn addExecutable(self: *TranslateCStep, options: AddExecutableOptions) *CompileStep { +pub fn addExecutable(self: *TranslateC, options: AddExecutableOptions) *Step.Compile { return self.step.owner.addExecutable(.{ .root_source_file = .{ .generated = &self.output_file }, .name = options.name orelse "translated_c", @@ -67,12 +65,12 @@ pub fn addExecutable(self: *TranslateCStep, options: AddExecutableOptions) *Comp }); } -pub fn addIncludeDir(self: *TranslateCStep, include_dir: []const u8) void { +pub fn addIncludeDir(self: *TranslateC, include_dir: []const u8) void { self.include_dirs.append(self.step.owner.dupePath(include_dir)) catch @panic("OOM"); } -pub fn addCheckFile(self: *TranslateCStep, expected_matches: []const []const u8) *CheckFileStep { - return CheckFileStep.create( +pub fn addCheckFile(self: *TranslateC, expected_matches: []const []const u8) *Step.CheckFile { + return Step.CheckFile.create( self.step.owner, .{ .generated = &self.output_file }, .{ .expected_matches = expected_matches }, @@ -81,19 +79,19 @@ pub fn addCheckFile(self: *TranslateCStep, expected_matches: []const []const u8) /// If the value is omitted, it is set to 1. /// `name` and `value` need not live longer than the function call. -pub fn defineCMacro(self: *TranslateCStep, name: []const u8, value: ?[]const u8) void { +pub fn defineCMacro(self: *TranslateC, name: []const u8, value: ?[]const u8) void { const macro = std.Build.constructCMacro(self.step.owner.allocator, name, value); self.c_macros.append(macro) catch @panic("OOM"); } /// name_and_value looks like [name]=[value]. If the value is omitted, it is set to 1. -pub fn defineCMacroRaw(self: *TranslateCStep, name_and_value: []const u8) void { +pub fn defineCMacroRaw(self: *TranslateC, name_and_value: []const u8) void { self.c_macros.append(self.step.owner.dupe(name_and_value)) catch @panic("OOM"); } fn make(step: *Step, prog_node: *std.Progress.Node) !void { const b = step.owner; - const self = @fieldParentPtr(TranslateCStep, "step", step); + const self = @fieldParentPtr(TranslateC, "step", step); var argv_list = std.ArrayList([]const u8).init(b.allocator); try argv_list.append(b.zig_exe); diff --git a/lib/std/Build/Step/WriteFile.zig b/lib/std/Build/Step/WriteFile.zig index 68f7c37c6c..0d817e7430 100644 --- a/lib/std/Build/Step/WriteFile.zig +++ b/lib/std/Build/Step/WriteFile.zig @@ -1,4 +1,4 @@ -//! WriteFileStep is primarily used to create a directory in an appropriate +//! WriteFile is primarily used to create a directory in an appropriate //! location inside the local cache which has a set of files that have either //! been generated during the build, or are copied from the source package. //! @@ -12,7 +12,7 @@ const std = @import("std"); const Step = std.Build.Step; const fs = std.fs; const ArrayList = std.ArrayList; -const WriteFileStep = @This(); +const WriteFile = @This(); step: Step, /// The elements here are pointers because we need stable pointers for the @@ -39,8 +39,8 @@ pub const Contents = union(enum) { copy: std.Build.FileSource, }; -pub fn create(owner: *std.Build) *WriteFileStep { - const wf = owner.allocator.create(WriteFileStep) catch @panic("OOM"); +pub fn create(owner: *std.Build) *WriteFile { + const wf = owner.allocator.create(WriteFile) catch @panic("OOM"); wf.* = .{ .step = Step.init(.{ .id = .write_file, @@ -55,7 +55,7 @@ pub fn create(owner: *std.Build) *WriteFileStep { return wf; } -pub fn add(wf: *WriteFileStep, sub_path: []const u8, bytes: []const u8) void { +pub fn add(wf: *WriteFile, sub_path: []const u8, bytes: []const u8) void { const b = wf.step.owner; const gpa = b.allocator; const file = gpa.create(File) catch @panic("OOM"); @@ -72,11 +72,11 @@ pub fn add(wf: *WriteFileStep, sub_path: []const u8, bytes: []const u8) void { /// Place the file into the generated directory within the local cache, /// along with all the rest of the files added to this step. The parameter /// here is the destination path relative to the local cache directory -/// associated with this WriteFileStep. It may be a basename, or it may +/// associated with this WriteFile. It may be a basename, or it may /// include sub-directories, in which case this step will ensure the /// required sub-path exists. /// This is the option expected to be used most commonly with `addCopyFile`. -pub fn addCopyFile(wf: *WriteFileStep, source: std.Build.FileSource, sub_path: []const u8) void { +pub fn addCopyFile(wf: *WriteFile, source: std.Build.FileSource, sub_path: []const u8) void { const b = wf.step.owner; const gpa = b.allocator; const file = gpa.create(File) catch @panic("OOM"); @@ -97,7 +97,7 @@ pub fn addCopyFile(wf: *WriteFileStep, source: std.Build.FileSource, sub_path: [ /// run by a developer with intent to modify source files and then commit /// those changes to version control. /// A file added this way is not available with `getFileSource`. -pub fn addCopyFileToSource(wf: *WriteFileStep, source: std.Build.FileSource, sub_path: []const u8) void { +pub fn addCopyFileToSource(wf: *WriteFile, source: std.Build.FileSource, sub_path: []const u8) void { const b = wf.step.owner; wf.output_source_files.append(b.allocator, .{ .contents = .{ .copy = source }, @@ -112,7 +112,7 @@ pub fn addCopyFileToSource(wf: *WriteFileStep, source: std.Build.FileSource, sub /// run by a developer with intent to modify source files and then commit /// those changes to version control. /// A file added this way is not available with `getFileSource`. -pub fn addBytesToSource(wf: *WriteFileStep, bytes: []const u8, sub_path: []const u8) void { +pub fn addBytesToSource(wf: *WriteFile, bytes: []const u8, sub_path: []const u8) void { const b = wf.step.owner; wf.output_source_files.append(b.allocator, .{ .contents = .{ .bytes = bytes }, @@ -121,7 +121,7 @@ pub fn addBytesToSource(wf: *WriteFileStep, bytes: []const u8, sub_path: []const } /// Gets a file source for the given sub_path. If the file does not exist, returns `null`. -pub fn getFileSource(wf: *WriteFileStep, sub_path: []const u8) ?std.Build.FileSource { +pub fn getFileSource(wf: *WriteFile, sub_path: []const u8) ?std.Build.FileSource { for (wf.files.items) |file| { if (std.mem.eql(u8, file.sub_path, sub_path)) { return .{ .generated = &file.generated_file }; @@ -131,12 +131,12 @@ pub fn getFileSource(wf: *WriteFileStep, sub_path: []const u8) ?std.Build.FileSo } /// Returns a `FileSource` representing the base directory that contains all the -/// files from this `WriteFileStep`. -pub fn getDirectorySource(wf: *WriteFileStep) std.Build.FileSource { +/// files from this `WriteFile`. +pub fn getDirectorySource(wf: *WriteFile) std.Build.FileSource { return .{ .generated = &wf.generated_directory }; } -fn maybeUpdateName(wf: *WriteFileStep) void { +fn maybeUpdateName(wf: *WriteFile) void { if (wf.files.items.len == 1) { // First time adding a file; update name. if (std.mem.eql(u8, wf.step.name, "WriteFile")) { @@ -148,10 +148,10 @@ fn maybeUpdateName(wf: *WriteFileStep) void { fn make(step: *Step, prog_node: *std.Progress.Node) !void { _ = prog_node; const b = step.owner; - const wf = @fieldParentPtr(WriteFileStep, "step", step); + const wf = @fieldParentPtr(WriteFile, "step", step); // Writing to source files is kind of an extra capability of this - // WriteFileStep - arguably it should be a different step. But anyway here + // WriteFile - arguably it should be a different step. But anyway here // it is, it happens unconditionally and does not interact with the other // files here. var any_miss = false; @@ -194,14 +194,14 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void { // the data to a file would probably be very fast - but as a way to find a canonical // location to put build artifacts. - // If, for example, a hard-coded path was used as the location to put WriteFileStep - // files, then two WriteFileSteps executing in parallel might clobber each other. + // If, for example, a hard-coded path was used as the location to put WriteFile + // files, then two WriteFiles executing in parallel might clobber each other. var man = b.cache.obtain(); defer man.deinit(); - // Random bytes to make WriteFileStep unique. Refresh this with - // new random bytes when WriteFileStep implementation is modified + // Random bytes to make WriteFile unique. Refresh this with + // new random bytes when WriteFile implementation is modified // in a non-backwards-compatible way. man.hash.add(@as(u32, 0xd767ee59)); diff --git a/test/link/macho/dead_strip/build.zig b/test/link/macho/dead_strip/build.zig index 4c739b3d8c..9d00bad9e0 100644 --- a/test/link/macho/dead_strip/build.zig +++ b/test/link/macho/dead_strip/build.zig @@ -42,7 +42,7 @@ fn createScenario( optimize: std.builtin.OptimizeMode, target: std.zig.CrossTarget, name: []const u8, -) *std.Build.CompileStep { +) *std.Build.Step.Compile { const exe = b.addExecutable(.{ .name = name, .optimize = optimize, diff --git a/test/link/macho/dead_strip_dylibs/build.zig b/test/link/macho/dead_strip_dylibs/build.zig index 47e53f853e..ec073e183a 100644 --- a/test/link/macho/dead_strip_dylibs/build.zig +++ b/test/link/macho/dead_strip_dylibs/build.zig @@ -46,7 +46,7 @@ fn createScenario( b: *std.Build, optimize: std.builtin.OptimizeMode, name: []const u8, -) *std.Build.CompileStep { +) *std.Build.Step.Compile { const exe = b.addExecutable(.{ .name = name, .optimize = optimize, diff --git a/test/link/macho/headerpad/build.zig b/test/link/macho/headerpad/build.zig index 22cfcc90ec..99edfe72fa 100644 --- a/test/link/macho/headerpad/build.zig +++ b/test/link/macho/headerpad/build.zig @@ -104,7 +104,7 @@ fn simpleExe( b: *std.Build, optimize: std.builtin.OptimizeMode, name: []const u8, -) *std.Build.CompileStep { +) *std.Build.Step.Compile { const exe = b.addExecutable(.{ .name = name, .optimize = optimize, diff --git a/test/link/macho/search_strategy/build.zig b/test/link/macho/search_strategy/build.zig index 4777629c8b..853c471969 100644 --- a/test/link/macho/search_strategy/build.zig +++ b/test/link/macho/search_strategy/build.zig @@ -46,7 +46,7 @@ fn createScenario( optimize: std.builtin.OptimizeMode, target: std.zig.CrossTarget, name: []const u8, -) *std.Build.CompileStep { +) *std.Build.Step.Compile { const static = b.addStaticLibrary(.{ .name = name, .optimize = optimize, diff --git a/test/link/macho/unwind_info/build.zig b/test/link/macho/unwind_info/build.zig index 4ace2a4e96..96b5f6cacc 100644 --- a/test/link/macho/unwind_info/build.zig +++ b/test/link/macho/unwind_info/build.zig @@ -65,7 +65,7 @@ fn createScenario( optimize: std.builtin.OptimizeMode, target: std.zig.CrossTarget, name: []const u8, -) *std.Build.CompileStep { +) *std.Build.Step.Compile { const exe = b.addExecutable(.{ .name = name, .optimize = optimize, diff --git a/test/link/macho/uuid/build.zig b/test/link/macho/uuid/build.zig index df58aeacb7..0072825f46 100644 --- a/test/link/macho/uuid/build.zig +++ b/test/link/macho/uuid/build.zig @@ -1,5 +1,4 @@ const std = @import("std"); -const CompileStep = std.Build.CompileStep; const FileSource = std.Build.FileSource; const Step = std.Build.Step; @@ -60,7 +59,7 @@ fn simpleDylib( b: *std.Build, optimize: std.builtin.OptimizeMode, target: std.zig.CrossTarget, -) *std.Build.CompileStep { +) *std.Build.Step.Compile { const dylib = b.addSharedLibrary(.{ .name = "test", .version = .{ .major = 1, .minor = 0 }, diff --git a/test/src/Cases.zig b/test/src/Cases.zig index 4b023f45b0..0451079a0e 100644 --- a/test/src/Cases.zig +++ b/test/src/Cases.zig @@ -465,7 +465,7 @@ pub fn lowerToBuildSteps( parent_step: *std.Build.Step, opt_test_filter: ?[]const u8, cases_dir_path: []const u8, - incremental_exe: *std.Build.CompileStep, + incremental_exe: *std.Build.Step.Compile, ) void { for (self.incremental_cases.items) |incr_case| { if (opt_test_filter) |test_filter| { diff --git a/test/src/StackTrace.zig b/test/src/StackTrace.zig index c32720a210..0d0b7155e6 100644 --- a/test/src/StackTrace.zig +++ b/test/src/StackTrace.zig @@ -3,7 +3,7 @@ step: *Step, test_index: usize, test_filter: ?[]const u8, optimize_modes: []const OptimizeMode, -check_exe: *std.Build.CompileStep, +check_exe: *std.Build.Step.Compile, const Expect = [@typeInfo(OptimizeMode).Enum.fields.len][]const u8; diff --git a/test/standalone/install_raw_hex/build.zig b/test/standalone/install_raw_hex/build.zig index b34bb01378..c05490a3e5 100644 --- a/test/standalone/install_raw_hex/build.zig +++ b/test/standalone/install_raw_hex/build.zig @@ -1,6 +1,5 @@ const builtin = @import("builtin"); const std = @import("std"); -const CheckFileStep = std.Build.CheckFileStep; pub fn build(b: *std.Build) void { const test_step = b.step("test", "Test it"); diff --git a/test/tests.zig b/test/tests.zig index 7ec1aaaa65..641914aabe 100644 --- a/test/tests.zig +++ b/test/tests.zig @@ -1132,7 +1132,7 @@ pub fn addCases( b: *std.Build, parent_step: *Step, opt_test_filter: ?[]const u8, - check_case_exe: *std.Build.CompileStep, + check_case_exe: *std.Build.Step.Compile, ) !void { const arena = b.allocator; const gpa = b.allocator; -- cgit v1.2.3 From 815e53b147a321d0bdb47dc008aa8181f57175ac Mon Sep 17 00:00:00 2001 From: Ryan Liptak Date: Thu, 4 May 2023 18:05:40 -0700 Subject: Update all std.mem.tokenize calls to their appropriate function Everywhere that can now use `tokenizeScalar` should get a nice little performance boost. --- build.zig | 8 ++++---- lib/std/Build.zig | 2 +- lib/std/Build/Cache.zig | 4 ++-- lib/std/Build/Step/CheckObject.zig | 8 ++++---- lib/std/Build/Step/Compile.zig | 6 +++--- lib/std/Build/Step/ConfigHeader.zig | 4 ++-- lib/std/child_process.zig | 4 ++-- lib/std/fs.zig | 2 +- lib/std/fs/path.zig | 26 ++++++++++++------------ lib/std/http/Client.zig | 4 ++-- lib/std/http/Server.zig | 4 ++-- lib/std/net.zig | 6 +++--- lib/std/os.zig | 2 +- lib/std/process.zig | 2 +- lib/std/zig/system/NativePaths.zig | 10 ++++----- lib/std/zig/system/NativeTargetInfo.zig | 4 ++-- src/arch/x86_64/CodeGen.zig | 6 +++--- src/glibc.zig | 2 +- src/libc_installation.zig | 8 ++++---- src/link/Plan9.zig | 2 +- src/print_zir.zig | 2 +- test/behavior/bugs/6456.zig | 2 +- test/src/Cases.zig | 4 ++-- tools/generate_linux_syscalls.zig | 36 ++++++++++++++++----------------- 24 files changed, 79 insertions(+), 79 deletions(-) (limited to 'lib/std/Build.zig') diff --git a/build.zig b/build.zig index 208d06fe1d..21b323df56 100644 --- a/build.zig +++ b/build.zig @@ -284,7 +284,7 @@ pub fn build(b: *std.Build) !void { // That means we also have to rely on stage1 compiled c++ files. We parse config.h to find // the information passed on to us from cmake. if (cfg.cmake_prefix_path.len > 0) { - var it = mem.tokenize(u8, cfg.cmake_prefix_path, ";"); + var it = mem.tokenizeScalar(u8, cfg.cmake_prefix_path, ';'); while (it.next()) |path| { b.addSearchPrefix(path); } @@ -687,7 +687,7 @@ fn addCxxKnownPath( if (!std.process.can_spawn) return error.RequiredLibraryNotFound; const path_padded = b.exec(&.{ ctx.cxx_compiler, b.fmt("-print-file-name={s}", .{objname}) }); - var tokenizer = mem.tokenize(u8, path_padded, "\r\n"); + var tokenizer = mem.tokenizeAny(u8, path_padded, "\r\n"); const path_unpadded = tokenizer.next().?; if (mem.eql(u8, path_unpadded, objname)) { if (errtxt) |msg| { @@ -710,7 +710,7 @@ fn addCxxKnownPath( } fn addCMakeLibraryList(exe: *std.Build.Step.Compile, list: []const u8) void { - var it = mem.tokenize(u8, list, ";"); + var it = mem.tokenizeScalar(u8, list, ';'); while (it.next()) |lib| { if (mem.startsWith(u8, lib, "-l")) { exe.linkSystemLibrary(lib["-l".len..]); @@ -855,7 +855,7 @@ fn parseConfigH(b: *std.Build, config_h_text: []const u8) ?CMakeConfig { // .prefix = ZIG_LLVM_LINK_MODE parsed manually below }; - var lines_it = mem.tokenize(u8, config_h_text, "\r\n"); + var lines_it = mem.tokenizeAny(u8, config_h_text, "\r\n"); while (lines_it.next()) |line| { inline for (mappings) |mapping| { if (mem.startsWith(u8, line, mapping.prefix)) { diff --git a/lib/std/Build.zig b/lib/std/Build.zig index ca55d23937..4ab5db5c70 100644 --- a/lib/std/Build.zig +++ b/lib/std/Build.zig @@ -1358,7 +1358,7 @@ pub fn findProgram(self: *Build, names: []const []const u8, paths: []const []con if (fs.path.isAbsolute(name)) { return name; } - var it = mem.tokenize(u8, PATH, &[_]u8{fs.path.delimiter}); + var it = mem.tokenizeScalar(u8, PATH, fs.path.delimiter); while (it.next()) |path| { const full_path = self.pathJoin(&.{ path, diff --git a/lib/std/Build/Cache.zig b/lib/std/Build/Cache.zig index 17429c0370..7709e5e26c 100644 --- a/lib/std/Build/Cache.zig +++ b/lib/std/Build/Cache.zig @@ -434,7 +434,7 @@ pub const Manifest = struct { const input_file_count = self.files.items.len; var any_file_changed = false; - var line_iter = mem.tokenize(u8, file_contents, "\n"); + var line_iter = mem.tokenizeScalar(u8, file_contents, '\n'); var idx: usize = 0; if (if (line_iter.next()) |line| !std.mem.eql(u8, line, manifest_header) else true) { if (try self.upgradeToExclusiveLock()) continue; @@ -463,7 +463,7 @@ pub const Manifest = struct { break :blk new; }; - var iter = mem.tokenize(u8, line, " "); + var iter = mem.tokenizeScalar(u8, line, ' '); const size = iter.next() orelse return error.InvalidFormat; const inode = iter.next() orelse return error.InvalidFormat; const mtime_nsec_str = iter.next() orelse return error.InvalidFormat; diff --git a/lib/std/Build/Step/CheckObject.zig b/lib/std/Build/Step/CheckObject.zig index c77dc3de36..24ebfef388 100644 --- a/lib/std/Build/Step/CheckObject.zig +++ b/lib/std/Build/Step/CheckObject.zig @@ -103,8 +103,8 @@ const Action = struct { assert(act.tag == .match or act.tag == .not_present); const phrase = act.phrase.resolve(b, step); var candidate_var: ?struct { name: []const u8, value: u64 } = null; - var hay_it = mem.tokenize(u8, mem.trim(u8, haystack, " "), " "); - var needle_it = mem.tokenize(u8, mem.trim(u8, phrase, " "), " "); + var hay_it = mem.tokenizeScalar(u8, mem.trim(u8, haystack, " "), ' '); + var needle_it = mem.tokenizeScalar(u8, mem.trim(u8, phrase, " "), ' '); while (needle_it.next()) |needle_tok| { const hay_tok = hay_it.next() orelse return false; @@ -155,7 +155,7 @@ const Action = struct { var op_stack = std.ArrayList(enum { add, sub, mod, mul }).init(gpa); var values = std.ArrayList(u64).init(gpa); - var it = mem.tokenize(u8, phrase, " "); + var it = mem.tokenizeScalar(u8, phrase, ' '); while (it.next()) |next| { if (mem.eql(u8, next, "+")) { try op_stack.append(.add); @@ -365,7 +365,7 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void { var vars = std.StringHashMap(u64).init(gpa); for (self.checks.items) |chk| { - var it = mem.tokenize(u8, output, "\r\n"); + var it = mem.tokenizeAny(u8, output, "\r\n"); for (chk.actions.items) |act| { switch (act.tag) { .match => { diff --git a/lib/std/Build/Step/Compile.zig b/lib/std/Build/Step/Compile.zig index 2371f49daf..6a05adc1a6 100644 --- a/lib/std/Build/Step/Compile.zig +++ b/lib/std/Build/Step/Compile.zig @@ -777,7 +777,7 @@ fn runPkgConfig(self: *Compile, lib_name: []const u8) ![]const []const u8 { var zig_args = ArrayList([]const u8).init(b.allocator); defer zig_args.deinit(); - var it = mem.tokenize(u8, stdout, " \r\n\t"); + var it = mem.tokenizeAny(u8, stdout, " \r\n\t"); while (it.next()) |tok| { if (mem.eql(u8, tok, "-I")) { const dir = it.next() orelse return error.PkgConfigInvalidOutput; @@ -2017,10 +2017,10 @@ fn execPkgConfigList(self: *std.Build, out_code: *u8) (PkgConfigError || ExecErr const stdout = try self.execAllowFail(&[_][]const u8{ "pkg-config", "--list-all" }, out_code, .Ignore); var list = ArrayList(PkgConfigPkg).init(self.allocator); errdefer list.deinit(); - var line_it = mem.tokenize(u8, stdout, "\r\n"); + var line_it = mem.tokenizeAny(u8, stdout, "\r\n"); while (line_it.next()) |line| { if (mem.trim(u8, line, " \t").len == 0) continue; - var tok_it = mem.tokenize(u8, line, " \t"); + var tok_it = mem.tokenizeAny(u8, line, " \t"); try list.append(PkgConfigPkg{ .name = tok_it.next() orelse return error.PkgConfigInvalidOutput, .desc = tok_it.rest(), diff --git a/lib/std/Build/Step/ConfigHeader.zig b/lib/std/Build/Step/ConfigHeader.zig index f6939e0e38..cd97367218 100644 --- a/lib/std/Build/Step/ConfigHeader.zig +++ b/lib/std/Build/Step/ConfigHeader.zig @@ -257,7 +257,7 @@ fn render_autoconf( try output.appendSlice("\n"); continue; } - var it = std.mem.tokenize(u8, line[1..], " \t\r"); + var it = std.mem.tokenizeAny(u8, line[1..], " \t\r"); const undef = it.next().?; if (!std.mem.eql(u8, undef, "undef")) { try output.appendSlice(line); @@ -304,7 +304,7 @@ fn render_cmake( try output.appendSlice("\n"); continue; } - var it = std.mem.tokenize(u8, line[1..], " \t\r"); + var it = std.mem.tokenizeAny(u8, line[1..], " \t\r"); const cmakedefine = it.next().?; if (!std.mem.eql(u8, cmakedefine, "cmakedefine") and !std.mem.eql(u8, cmakedefine, "cmakedefine01")) diff --git a/lib/std/child_process.zig b/lib/std/child_process.zig index daaa1689bc..d94f5ea000 100644 --- a/lib/std/child_process.zig +++ b/lib/std/child_process.zig @@ -850,7 +850,7 @@ pub const ChildProcess = struct { return original_err; } - var it = mem.tokenize(u16, PATH, &[_]u16{';'}); + var it = mem.tokenizeScalar(u16, PATH, ';'); while (it.next()) |search_path| { dir_buf.clearRetainingCapacity(); try dir_buf.appendSlice(self.allocator, search_path); @@ -1067,7 +1067,7 @@ fn windowsCreateProcessPathExt( // Now we know that at least *a* file matching the wildcard exists, we can loop // through PATHEXT in order and exec any that exist - var ext_it = mem.tokenize(u16, pathext, &[_]u16{';'}); + var ext_it = mem.tokenizeScalar(u16, pathext, ';'); while (ext_it.next()) |ext| { if (!windowsCreateProcessSupportsExtension(ext)) continue; diff --git a/lib/std/fs.zig b/lib/std/fs.zig index 7327a3a913..5aeea8a4aa 100644 --- a/lib/std/fs.zig +++ b/lib/std/fs.zig @@ -3021,7 +3021,7 @@ pub fn selfExePath(out_buffer: []u8) SelfExePathError![]u8 { } else if (argv0.len != 0) { // argv[0] is not empty (and not a path): search it inside PATH const PATH = std.os.getenvZ("PATH") orelse return error.FileNotFound; - var path_it = mem.tokenize(u8, PATH, &[_]u8{path.delimiter}); + var path_it = mem.tokenizeScalar(u8, PATH, path.delimiter); while (path_it.next()) |a_path| { var resolved_path_buf: [MAX_PATH_BYTES - 1:0]u8 = undefined; const resolved_path = std.fmt.bufPrintZ(&resolved_path_buf, "{s}/{s}", .{ diff --git a/lib/std/fs/path.zig b/lib/std/fs/path.zig index 4c320ae5cf..e7a28a7615 100644 --- a/lib/std/fs/path.zig +++ b/lib/std/fs/path.zig @@ -358,7 +358,7 @@ pub fn windowsParsePath(path: []const u8) WindowsPath { return relative_path; } - var it = mem.tokenize(u8, path, &[_]u8{this_sep}); + var it = mem.tokenizeScalar(u8, path, this_sep); _ = (it.next() orelse return relative_path); _ = (it.next() orelse return relative_path); return WindowsPath{ @@ -420,8 +420,8 @@ fn networkShareServersEql(ns1: []const u8, ns2: []const u8) bool { const sep1 = ns1[0]; const sep2 = ns2[0]; - var it1 = mem.tokenize(u8, ns1, &[_]u8{sep1}); - var it2 = mem.tokenize(u8, ns2, &[_]u8{sep2}); + var it1 = mem.tokenizeScalar(u8, ns1, sep1); + var it2 = mem.tokenizeScalar(u8, ns2, sep2); // TODO ASCII is wrong, we actually need full unicode support to compare paths. return ascii.eqlIgnoreCase(it1.next().?, it2.next().?); @@ -441,8 +441,8 @@ fn compareDiskDesignators(kind: WindowsPath.Kind, p1: []const u8, p2: []const u8 const sep1 = p1[0]; const sep2 = p2[0]; - var it1 = mem.tokenize(u8, p1, &[_]u8{sep1}); - var it2 = mem.tokenize(u8, p2, &[_]u8{sep2}); + var it1 = mem.tokenizeScalar(u8, p1, sep1); + var it2 = mem.tokenizeScalar(u8, p2, sep2); // TODO ASCII is wrong, we actually need full unicode support to compare paths. return ascii.eqlIgnoreCase(it1.next().?, it2.next().?) and ascii.eqlIgnoreCase(it1.next().?, it2.next().?); @@ -535,7 +535,7 @@ pub fn resolveWindows(allocator: Allocator, paths: []const []const u8) ![]u8 { break :l disk_designator.len; }, .NetworkShare => { - var it = mem.tokenize(u8, paths[first_index], "/\\"); + var it = mem.tokenizeAny(u8, paths[first_index], "/\\"); const server_name = it.next().?; const other_name = it.next().?; @@ -570,7 +570,7 @@ pub fn resolveWindows(allocator: Allocator, paths: []const []const u8) ![]u8 { if (!correct_disk_designator) { continue; } - var it = mem.tokenize(u8, p[parsed.disk_designator.len..], "/\\"); + var it = mem.tokenizeAny(u8, p[parsed.disk_designator.len..], "/\\"); while (it.next()) |component| { if (mem.eql(u8, component, ".")) { continue; @@ -657,7 +657,7 @@ pub fn resolvePosix(allocator: Allocator, paths: []const []const u8) Allocator.E negative_count = 0; result.clearRetainingCapacity(); } - var it = mem.tokenize(u8, p, "/"); + var it = mem.tokenizeScalar(u8, p, '/'); while (it.next()) |component| { if (mem.eql(u8, component, ".")) { continue; @@ -1078,8 +1078,8 @@ pub fn relativeWindows(allocator: Allocator, from: []const u8, to: []const u8) ! return resolved_to; } - var from_it = mem.tokenize(u8, resolved_from, "/\\"); - var to_it = mem.tokenize(u8, resolved_to, "/\\"); + var from_it = mem.tokenizeAny(u8, resolved_from, "/\\"); + var to_it = mem.tokenizeAny(u8, resolved_to, "/\\"); while (true) { const from_component = from_it.next() orelse return allocator.dupe(u8, to_it.rest()); const to_rest = to_it.rest(); @@ -1102,7 +1102,7 @@ pub fn relativeWindows(allocator: Allocator, from: []const u8, to: []const u8) ! result_index += 3; } - var rest_it = mem.tokenize(u8, to_rest, "/\\"); + var rest_it = mem.tokenizeAny(u8, to_rest, "/\\"); while (rest_it.next()) |to_component| { result[result_index] = '\\'; result_index += 1; @@ -1124,8 +1124,8 @@ pub fn relativePosix(allocator: Allocator, from: []const u8, to: []const u8) ![] const resolved_to = try resolvePosix(allocator, &[_][]const u8{ cwd, to }); defer allocator.free(resolved_to); - var from_it = mem.tokenize(u8, resolved_from, "/"); - var to_it = mem.tokenize(u8, resolved_to, "/"); + var from_it = mem.tokenizeScalar(u8, resolved_from, '/'); + var to_it = mem.tokenizeScalar(u8, resolved_to, '/'); while (true) { const from_component = from_it.next() orelse return allocator.dupe(u8, to_it.rest()); const to_rest = to_it.rest(); diff --git a/lib/std/http/Client.zig b/lib/std/http/Client.zig index 023bdd28bc..5626864ceb 100644 --- a/lib/std/http/Client.zig +++ b/lib/std/http/Client.zig @@ -386,7 +386,7 @@ pub const Response = struct { }; pub fn parse(res: *Response, bytes: []const u8, trailing: bool) ParseError!void { - var it = mem.tokenize(u8, bytes[0 .. bytes.len - 4], "\r\n"); + var it = mem.tokenizeAny(u8, bytes[0 .. bytes.len - 4], "\r\n"); const first_line = it.next() orelse return error.HttpHeadersInvalid; if (first_line.len < 12) @@ -412,7 +412,7 @@ pub const Response = struct { else => {}, } - var line_it = mem.tokenize(u8, line, ": "); + var line_it = mem.tokenizeAny(u8, line, ": "); const header_name = line_it.next() orelse return error.HttpHeadersInvalid; const header_value = line_it.rest(); diff --git a/lib/std/http/Server.zig b/lib/std/http/Server.zig index 6b5db6725f..51ab6c086b 100644 --- a/lib/std/http/Server.zig +++ b/lib/std/http/Server.zig @@ -231,7 +231,7 @@ pub const Request = struct { }; pub fn parse(req: *Request, bytes: []const u8) ParseError!void { - var it = mem.tokenize(u8, bytes[0 .. bytes.len - 4], "\r\n"); + var it = mem.tokenizeAny(u8, bytes[0 .. bytes.len - 4], "\r\n"); const first_line = it.next() orelse return error.HttpHeadersInvalid; if (first_line.len < 10) @@ -265,7 +265,7 @@ pub const Request = struct { else => {}, } - var line_it = mem.tokenize(u8, line, ": "); + var line_it = mem.tokenizeAny(u8, line, ": "); const header_name = line_it.next() orelse return error.HttpHeadersInvalid; const header_value = line_it.rest(); diff --git a/lib/std/net.zig b/lib/std/net.zig index 57e50a7349..4360cc29f4 100644 --- a/lib/std/net.zig +++ b/lib/std/net.zig @@ -1266,7 +1266,7 @@ fn linuxLookupNameFromHosts( var split_it = mem.split(u8, line, "#"); const no_comment_line = split_it.first(); - var line_it = mem.tokenize(u8, no_comment_line, " \t"); + var line_it = mem.tokenizeAny(u8, no_comment_line, " \t"); const ip_text = line_it.next() orelse continue; var first_name_text: ?[]const u8 = null; while (line_it.next()) |name_text| { @@ -1346,7 +1346,7 @@ fn linuxLookupNameFromDnsSearch( @memcpy(canon.items, canon_name); try canon.append('.'); - var tok_it = mem.tokenize(u8, search, " \t"); + var tok_it = mem.tokenizeAny(u8, search, " \t"); while (tok_it.next()) |tok| { canon.shrinkRetainingCapacity(canon_name.len + 1); try canon.appendSlice(tok); @@ -1468,7 +1468,7 @@ fn getResolvConf(allocator: mem.Allocator, rc: *ResolvConf) !void { var split = mem.split(u8, line, "#"); break :no_comment_line split.first(); }; - var line_it = mem.tokenize(u8, no_comment_line, " \t"); + var line_it = mem.tokenizeAny(u8, no_comment_line, " \t"); const token = line_it.next() orelse continue; if (mem.eql(u8, token, "options")) { diff --git a/lib/std/os.zig b/lib/std/os.zig index 779e913230..eac79690b5 100644 --- a/lib/std/os.zig +++ b/lib/std/os.zig @@ -1878,7 +1878,7 @@ pub fn execvpeZ_expandArg0( // Use of MAX_PATH_BYTES here is valid as the path_buf will be passed // directly to the operating system in execveZ. var path_buf: [MAX_PATH_BYTES]u8 = undefined; - var it = mem.tokenize(u8, PATH, ":"); + var it = mem.tokenizeScalar(u8, PATH, ':'); var seen_eacces = false; var err: ExecveError = error.FileNotFound; diff --git a/lib/std/process.zig b/lib/std/process.zig index 504f9075eb..c33fd92db6 100644 --- a/lib/std/process.zig +++ b/lib/std/process.zig @@ -1200,7 +1200,7 @@ fn totalSystemMemoryLinux() !usize { var buf: [50]u8 = undefined; const amt = try file.read(&buf); if (amt != 50) return error.Unexpected; - var it = std.mem.tokenize(u8, buf[0..amt], " \n"); + var it = std.mem.tokenizeAny(u8, buf[0..amt], " \n"); const label = it.next().?; if (!std.mem.eql(u8, label, "MemTotal:")) return error.Unexpected; const int_text = it.next() orelse return error.Unexpected; diff --git a/lib/std/zig/system/NativePaths.zig b/lib/std/zig/system/NativePaths.zig index 70c795b0cf..368e3e062d 100644 --- a/lib/std/zig/system/NativePaths.zig +++ b/lib/std/zig/system/NativePaths.zig @@ -31,7 +31,7 @@ pub fn detect(allocator: Allocator, native_info: NativeTargetInfo) !NativePaths defer allocator.free(nix_cflags_compile); is_nix = true; - var it = mem.tokenize(u8, nix_cflags_compile, " "); + var it = mem.tokenizeScalar(u8, nix_cflags_compile, ' '); while (true) { const word = it.next() orelse break; if (mem.eql(u8, word, "-isystem")) { @@ -62,7 +62,7 @@ pub fn detect(allocator: Allocator, native_info: NativeTargetInfo) !NativePaths defer allocator.free(nix_ldflags); is_nix = true; - var it = mem.tokenize(u8, nix_ldflags, " "); + var it = mem.tokenizeScalar(u8, nix_ldflags, ' '); while (true) { const word = it.next() orelse break; if (mem.eql(u8, word, "-rpath")) { @@ -147,21 +147,21 @@ pub fn detect(allocator: Allocator, native_info: NativeTargetInfo) !NativePaths // We use os.getenv here since this part won't be executed on // windows, to get rid of unnecessary error handling. if (std.os.getenv("C_INCLUDE_PATH")) |c_include_path| { - var it = mem.tokenize(u8, c_include_path, ":"); + var it = mem.tokenizeScalar(u8, c_include_path, ':'); while (it.next()) |dir| { try self.addIncludeDir(dir); } } if (std.os.getenv("CPLUS_INCLUDE_PATH")) |cplus_include_path| { - var it = mem.tokenize(u8, cplus_include_path, ":"); + var it = mem.tokenizeScalar(u8, cplus_include_path, ':'); while (it.next()) |dir| { try self.addIncludeDir(dir); } } if (std.os.getenv("LIBRARY_PATH")) |library_path| { - var it = mem.tokenize(u8, library_path, ":"); + var it = mem.tokenizeScalar(u8, library_path, ':'); while (it.next()) |dir| { try self.addLibDir(dir); } diff --git a/lib/std/zig/system/NativeTargetInfo.zig b/lib/std/zig/system/NativeTargetInfo.zig index 539ad96365..808a1bda8d 100644 --- a/lib/std/zig/system/NativeTargetInfo.zig +++ b/lib/std/zig/system/NativeTargetInfo.zig @@ -354,7 +354,7 @@ fn detectAbiAndDynamicLinker( const newline = mem.indexOfScalar(u8, buffer[0..len], '\n') orelse break :blk file; const line = buffer[0..newline]; if (!mem.startsWith(u8, line, "#!")) break :blk file; - var it = mem.tokenize(u8, line[2..], " "); + var it = mem.tokenizeScalar(u8, line[2..], ' '); file_name = it.next() orelse return defaultAbiAndDynamicLinker(cpu, os, cross_target); file.close(); } @@ -811,7 +811,7 @@ pub fn abiAndDynamicLinkerFromFile( const strtab = strtab_buf[0..strtab_read_len]; const rpath_list = mem.sliceTo(strtab, 0); - var it = mem.tokenize(u8, rpath_list, ":"); + var it = mem.tokenizeScalar(u8, rpath_list, ':'); while (it.next()) |rpath| { if (glibcVerFromRPath(rpath)) |ver| { result.target.os.version_range.linux.glibc = ver; diff --git a/src/arch/x86_64/CodeGen.zig b/src/arch/x86_64/CodeGen.zig index 2dc1cc8ee4..be09a33bde 100644 --- a/src/arch/x86_64/CodeGen.zig +++ b/src/arch/x86_64/CodeGen.zig @@ -8409,9 +8409,9 @@ fn airAsm(self: *Self, inst: Air.Inst.Index) !void { } const asm_source = mem.sliceAsBytes(self.air.extra[extra_i..])[0..extra.data.source_len]; - var line_it = mem.tokenize(u8, asm_source, "\n\r;"); + var line_it = mem.tokenizeAny(u8, asm_source, "\n\r;"); while (line_it.next()) |line| { - var mnem_it = mem.tokenize(u8, line, " \t"); + var mnem_it = mem.tokenizeAny(u8, line, " \t"); const mnem_str = mnem_it.next() orelse continue; if (mem.startsWith(u8, mnem_str, "#")) continue; @@ -8435,7 +8435,7 @@ fn airAsm(self: *Self, inst: Air.Inst.Index) !void { return self.fail("Invalid mnemonic: '{s}'", .{mnem_str}); } }; - var op_it = mem.tokenize(u8, mnem_it.rest(), ","); + var op_it = mem.tokenizeScalar(u8, mnem_it.rest(), ','); var ops = [1]encoder.Instruction.Operand{.none} ** 4; for (&ops) |*op| { const op_str = mem.trim(u8, op_it.next() orelse break, " \t"); diff --git a/src/glibc.zig b/src/glibc.zig index 327e4f4bb9..00787381f4 100644 --- a/src/glibc.zig +++ b/src/glibc.zig @@ -109,7 +109,7 @@ pub fn loadMetaData(gpa: Allocator, contents: []const u8) LoadMetaDataError!*ABI const target_name = mem.sliceTo(contents[index..], 0); index += target_name.len + 1; - var component_it = mem.tokenize(u8, target_name, "-"); + var component_it = mem.tokenizeScalar(u8, target_name, '-'); const arch_name = component_it.next() orelse { log.err("abilists: expected arch name", .{}); return error.ZigInstallationCorrupt; diff --git a/src/libc_installation.zig b/src/libc_installation.zig index da877e1291..a62da6b9c7 100644 --- a/src/libc_installation.zig +++ b/src/libc_installation.zig @@ -60,7 +60,7 @@ pub const LibCInstallation = struct { const contents = try std.fs.cwd().readFileAlloc(allocator, libc_file, std.math.maxInt(usize)); defer allocator.free(contents); - var it = std.mem.tokenize(u8, contents, "\n"); + var it = std.mem.tokenizeScalar(u8, contents, '\n'); while (it.next()) |line| { if (line.len == 0 or line[0] == '#') continue; var line_it = std.mem.split(u8, line, "="); @@ -293,7 +293,7 @@ pub const LibCInstallation = struct { }, } - var it = std.mem.tokenize(u8, exec_res.stderr, "\n\r"); + var it = std.mem.tokenizeAny(u8, exec_res.stderr, "\n\r"); var search_paths = std.ArrayList([]const u8).init(allocator); defer search_paths.deinit(); while (it.next()) |line| { @@ -613,7 +613,7 @@ fn ccPrintFileName(args: CCPrintFileNameOptions) ![:0]u8 { }, } - var it = std.mem.tokenize(u8, exec_res.stdout, "\n\r"); + var it = std.mem.tokenizeAny(u8, exec_res.stdout, "\n\r"); const line = it.next() orelse return error.LibCRuntimeNotFound; // When this command fails, it returns exit code 0 and duplicates the input file name. // So we detect failure by checking if the output matches exactly the input. @@ -692,7 +692,7 @@ fn appendCcExe(args: *std.ArrayList([]const u8), skip_cc_env_var: bool) !void { return; }; // Respect space-separated flags to the C compiler. - var it = std.mem.tokenize(u8, cc_env_var, " "); + var it = std.mem.tokenizeScalar(u8, cc_env_var, ' '); while (it.next()) |arg| { try args.append(arg); } diff --git a/src/link/Plan9.zig b/src/link/Plan9.zig index bef06d1c87..f8ac4e09c1 100644 --- a/src/link/Plan9.zig +++ b/src/link/Plan9.zig @@ -264,7 +264,7 @@ fn putFn(self: *Plan9, decl_index: Module.Decl.Index, out: FnDeclOutput) !void { fn addPathComponents(self: *Plan9, path: []const u8, a: *std.ArrayList(u8)) !void { const sep = std.fs.path.sep; - var it = std.mem.tokenize(u8, path, &.{sep}); + var it = std.mem.tokenizeScalar(u8, path, sep); while (it.next()) |component| { if (self.file_segments.get(component)) |num| { try a.writer().writeIntBig(u16, num); diff --git a/src/print_zir.zig b/src/print_zir.zig index f5e84fcf5b..6ded52ae9f 100644 --- a/src/print_zir.zig +++ b/src/print_zir.zig @@ -2581,7 +2581,7 @@ const Writer = struct { fn writeDocComment(self: *Writer, stream: anytype, doc_comment_index: u32) !void { if (doc_comment_index != 0) { const doc_comment = self.code.nullTerminatedString(doc_comment_index); - var it = std.mem.tokenize(u8, doc_comment, "\n"); + var it = std.mem.tokenizeScalar(u8, doc_comment, '\n'); while (it.next()) |doc_line| { try stream.writeByteNTimes(' ', self.indent); try stream.print("///{s}\n", .{doc_line}); diff --git a/test/behavior/bugs/6456.zig b/test/behavior/bugs/6456.zig index 1eef9c7f75..297c9c7423 100644 --- a/test/behavior/bugs/6456.zig +++ b/test/behavior/bugs/6456.zig @@ -18,7 +18,7 @@ test "issue 6456" { comptime { var fields: []const StructField = &[0]StructField{}; - var it = std.mem.tokenize(u8, text, "\n"); + var it = std.mem.tokenizeScalar(u8, text, '\n'); while (it.next()) |name| { fields = fields ++ &[_]StructField{StructField{ .alignment = 0, diff --git a/test/src/Cases.zig b/test/src/Cases.zig index 68ecebc7bd..aa5369af93 100644 --- a/test/src/Cases.zig +++ b/test/src/Cases.zig @@ -846,7 +846,7 @@ const TestManifest = struct { const actual_start = start orelse return error.MissingTestManifest; const manifest_bytes = bytes[actual_start..end]; - var it = std.mem.tokenize(u8, manifest_bytes, "\r\n"); + var it = std.mem.tokenizeAny(u8, manifest_bytes, "\r\n"); // First line is the test type const tt: Type = blk: { @@ -923,7 +923,7 @@ const TestManifest = struct { fn trailing(self: TestManifest) TrailingIterator { return .{ - .inner = std.mem.tokenize(u8, self.trailing_bytes, "\r\n"), + .inner = std.mem.tokenizeAny(u8, self.trailing_bytes, "\r\n"), }; } diff --git a/tools/generate_linux_syscalls.zig b/tools/generate_linux_syscalls.zig index 11b18ae3bf..32e287b434 100644 --- a/tools/generate_linux_syscalls.zig +++ b/tools/generate_linux_syscalls.zig @@ -51,11 +51,11 @@ pub fn main() !void { try writer.writeAll("pub const X86 = enum(usize) {\n"); const table = try linux_dir.readFile("arch/x86/entry/syscalls/syscall_32.tbl", buf); - var lines = mem.tokenize(u8, table, "\n"); + var lines = mem.tokenizeScalar(u8, table, '\n'); while (lines.next()) |line| { if (line[0] == '#') continue; - var fields = mem.tokenize(u8, line, " \t"); + var fields = mem.tokenizeAny(u8, line, " \t"); const number = fields.next() orelse return error.Incomplete; // abi is always i386 _ = fields.next() orelse return error.Incomplete; @@ -70,11 +70,11 @@ pub fn main() !void { try writer.writeAll("pub const X64 = enum(usize) {\n"); const table = try linux_dir.readFile("arch/x86/entry/syscalls/syscall_64.tbl", buf); - var lines = mem.tokenize(u8, table, "\n"); + var lines = mem.tokenizeScalar(u8, table, '\n'); while (lines.next()) |line| { if (line[0] == '#') continue; - var fields = mem.tokenize(u8, line, " \t"); + var fields = mem.tokenizeAny(u8, line, " \t"); const number = fields.next() orelse return error.Incomplete; const abi = fields.next() orelse return error.Incomplete; // The x32 abi syscalls are always at the end. @@ -96,11 +96,11 @@ pub fn main() !void { ); const table = try linux_dir.readFile("arch/arm/tools/syscall.tbl", buf); - var lines = mem.tokenize(u8, table, "\n"); + var lines = mem.tokenizeScalar(u8, table, '\n'); while (lines.next()) |line| { if (line[0] == '#') continue; - var fields = mem.tokenize(u8, line, " \t"); + var fields = mem.tokenizeAny(u8, line, " \t"); const number = fields.next() orelse return error.Incomplete; const abi = fields.next() orelse return error.Incomplete; if (mem.eql(u8, abi, "oabi")) continue; @@ -127,11 +127,11 @@ pub fn main() !void { { try writer.writeAll("pub const Sparc64 = enum(usize) {\n"); const table = try linux_dir.readFile("arch/sparc/kernel/syscalls/syscall.tbl", buf); - var lines = mem.tokenize(u8, table, "\n"); + var lines = mem.tokenizeScalar(u8, table, '\n'); while (lines.next()) |line| { if (line[0] == '#') continue; - var fields = mem.tokenize(u8, line, " \t"); + var fields = mem.tokenizeAny(u8, line, " \t"); const number = fields.next() orelse return error.Incomplete; const abi = fields.next() orelse return error.Incomplete; if (mem.eql(u8, abi, "32")) continue; @@ -151,11 +151,11 @@ pub fn main() !void { ); const table = try linux_dir.readFile("arch/mips/kernel/syscalls/syscall_o32.tbl", buf); - var lines = mem.tokenize(u8, table, "\n"); + var lines = mem.tokenizeScalar(u8, table, '\n'); while (lines.next()) |line| { if (line[0] == '#') continue; - var fields = mem.tokenize(u8, line, " \t"); + var fields = mem.tokenizeAny(u8, line, " \t"); const number = fields.next() orelse return error.Incomplete; // abi is always o32 _ = fields.next() orelse return error.Incomplete; @@ -176,11 +176,11 @@ pub fn main() !void { ); const table = try linux_dir.readFile("arch/mips/kernel/syscalls/syscall_n64.tbl", buf); - var lines = mem.tokenize(u8, table, "\n"); + var lines = mem.tokenizeScalar(u8, table, '\n'); while (lines.next()) |line| { if (line[0] == '#') continue; - var fields = mem.tokenize(u8, line, " \t"); + var fields = mem.tokenizeAny(u8, line, " \t"); const number = fields.next() orelse return error.Incomplete; // abi is always n64 _ = fields.next() orelse return error.Incomplete; @@ -197,11 +197,11 @@ pub fn main() !void { const table = try linux_dir.readFile("arch/powerpc/kernel/syscalls/syscall.tbl", buf); var list_64 = std.ArrayList(u8).init(allocator); - var lines = mem.tokenize(u8, table, "\n"); + var lines = mem.tokenizeScalar(u8, table, '\n'); while (lines.next()) |line| { if (line[0] == '#') continue; - var fields = mem.tokenize(u8, line, " \t"); + var fields = mem.tokenizeAny(u8, line, " \t"); const number = fields.next() orelse return error.Incomplete; const abi = fields.next() orelse return error.Incomplete; const name = fields.next() orelse return error.Incomplete; @@ -277,9 +277,9 @@ pub fn main() !void { }, }; - var lines = mem.tokenize(u8, defines, "\n"); + var lines = mem.tokenizeScalar(u8, defines, '\n'); loop: while (lines.next()) |line| { - var fields = mem.tokenize(u8, line, " \t"); + var fields = mem.tokenizeAny(u8, line, " \t"); const cmd = fields.next() orelse return error.Incomplete; if (!mem.eql(u8, cmd, "#define")) continue; const define = fields.next() orelse return error.Incomplete; @@ -339,9 +339,9 @@ pub fn main() !void { }, }; - var lines = mem.tokenize(u8, defines, "\n"); + var lines = mem.tokenizeScalar(u8, defines, '\n'); loop: while (lines.next()) |line| { - var fields = mem.tokenize(u8, line, " \t"); + var fields = mem.tokenizeAny(u8, line, " \t"); const cmd = fields.next() orelse return error.Incomplete; if (!mem.eql(u8, cmd, "#define")) continue; const define = fields.next() orelse return error.Incomplete; -- cgit v1.2.3 From 728ce2d7c18e23ca6c36d86f4ee1ea4ce3ac81e2 Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Tue, 16 May 2023 20:00:47 -0700 Subject: tweaks to --build-id * build.zig: the result of b.option() can be assigned directly in many cases thanks to the return type being an optional * std.Build: make the build system aware of the std.Build.Step.Compile.BuildId type when used as an option. - remove extraneous newlines in error logs * simplify caching logic * simplify hexstring parsing tests and use a doc test * simplify hashing logic. don't use an optional when the `none` tag already provides this meaning. * CLI: fix incorrect linker arg parsing --- build.zig | 7 +- lib/std/Build.zig | 62 ++++++++++++------ lib/std/Build/Cache.zig | 4 ++ lib/std/Build/Step/Compile.zig | 141 ++++++++++++++++------------------------- src/Compilation.zig | 9 ++- src/link.zig | 2 +- src/link/Elf.zig | 21 +++--- src/link/Wasm.zig | 50 +++++++-------- src/main.zig | 37 ++++++----- 9 files changed, 169 insertions(+), 164 deletions(-) (limited to 'lib/std/Build.zig') diff --git a/build.zig b/build.zig index 62f1d86441..a75269083f 100644 --- a/build.zig +++ b/build.zig @@ -167,8 +167,11 @@ pub fn build(b: *std.Build) !void { exe.sanitize_thread = sanitize_thread; exe.entitlements = entitlements; - if (b.option([]const u8, "build-id", "Include a build id note")) |build_id| - exe.build_id = try std.Build.CompileStep.BuildId.parse(b.allocator, build_id); + exe.build_id = b.option( + std.Build.Step.Compile.BuildId, + "build-id", + "Request creation of '.note.gnu.build-id' section", + ); b.installArtifact(exe); diff --git a/lib/std/Build.zig b/lib/std/Build.zig index ca55d23937..7ef504851e 100644 --- a/lib/std/Build.zig +++ b/lib/std/Build.zig @@ -181,6 +181,7 @@ const TypeId = enum { @"enum", string, list, + build_id, }; const TopLevelStep = struct { @@ -832,13 +833,13 @@ pub fn option(self: *Build, comptime T: type, name_raw: []const u8, description_ } else if (mem.eql(u8, s, "false")) { return false; } else { - log.err("Expected -D{s} to be a boolean, but received '{s}'\n", .{ name, s }); + log.err("Expected -D{s} to be a boolean, but received '{s}'", .{ name, s }); self.markInvalidUserInput(); return null; } }, .list, .map => { - log.err("Expected -D{s} to be a boolean, but received a {s}.\n", .{ + log.err("Expected -D{s} to be a boolean, but received a {s}.", .{ name, @tagName(option_ptr.value), }); self.markInvalidUserInput(); @@ -847,7 +848,7 @@ pub fn option(self: *Build, comptime T: type, name_raw: []const u8, description_ }, .int => switch (option_ptr.value) { .flag, .list, .map => { - log.err("Expected -D{s} to be an integer, but received a {s}.\n", .{ + log.err("Expected -D{s} to be an integer, but received a {s}.", .{ name, @tagName(option_ptr.value), }); self.markInvalidUserInput(); @@ -856,12 +857,12 @@ pub fn option(self: *Build, comptime T: type, name_raw: []const u8, description_ .scalar => |s| { const n = std.fmt.parseInt(T, s, 10) catch |err| switch (err) { error.Overflow => { - log.err("-D{s} value {s} cannot fit into type {s}.\n", .{ name, s, @typeName(T) }); + log.err("-D{s} value {s} cannot fit into type {s}.", .{ name, s, @typeName(T) }); self.markInvalidUserInput(); return null; }, else => { - log.err("Expected -D{s} to be an integer of type {s}.\n", .{ name, @typeName(T) }); + log.err("Expected -D{s} to be an integer of type {s}.", .{ name, @typeName(T) }); self.markInvalidUserInput(); return null; }, @@ -871,7 +872,7 @@ pub fn option(self: *Build, comptime T: type, name_raw: []const u8, description_ }, .float => switch (option_ptr.value) { .flag, .map, .list => { - log.err("Expected -D{s} to be a float, but received a {s}.\n", .{ + log.err("Expected -D{s} to be a float, but received a {s}.", .{ name, @tagName(option_ptr.value), }); self.markInvalidUserInput(); @@ -879,7 +880,7 @@ pub fn option(self: *Build, comptime T: type, name_raw: []const u8, description_ }, .scalar => |s| { const n = std.fmt.parseFloat(T, s) catch { - log.err("Expected -D{s} to be a float of type {s}.\n", .{ name, @typeName(T) }); + log.err("Expected -D{s} to be a float of type {s}.", .{ name, @typeName(T) }); self.markInvalidUserInput(); return null; }; @@ -888,7 +889,7 @@ pub fn option(self: *Build, comptime T: type, name_raw: []const u8, description_ }, .@"enum" => switch (option_ptr.value) { .flag, .map, .list => { - log.err("Expected -D{s} to be an enum, but received a {s}.\n", .{ + log.err("Expected -D{s} to be an enum, but received a {s}.", .{ name, @tagName(option_ptr.value), }); self.markInvalidUserInput(); @@ -898,7 +899,7 @@ pub fn option(self: *Build, comptime T: type, name_raw: []const u8, description_ if (std.meta.stringToEnum(T, s)) |enum_lit| { return enum_lit; } else { - log.err("Expected -D{s} to be of type {s}.\n", .{ name, @typeName(T) }); + log.err("Expected -D{s} to be of type {s}.", .{ name, @typeName(T) }); self.markInvalidUserInput(); return null; } @@ -906,7 +907,7 @@ pub fn option(self: *Build, comptime T: type, name_raw: []const u8, description_ }, .string => switch (option_ptr.value) { .flag, .list, .map => { - log.err("Expected -D{s} to be a string, but received a {s}.\n", .{ + log.err("Expected -D{s} to be a string, but received a {s}.", .{ name, @tagName(option_ptr.value), }); self.markInvalidUserInput(); @@ -914,9 +915,27 @@ pub fn option(self: *Build, comptime T: type, name_raw: []const u8, description_ }, .scalar => |s| return s, }, + .build_id => switch (option_ptr.value) { + .flag, .map, .list => { + log.err("Expected -D{s} to be an enum, but received a {s}.", .{ + name, @tagName(option_ptr.value), + }); + self.markInvalidUserInput(); + return null; + }, + .scalar => |s| { + if (Step.Compile.BuildId.parse(s)) |build_id| { + return build_id; + } else |err| { + log.err("unable to parse option '-D{s}': {s}", .{ name, @errorName(err) }); + self.markInvalidUserInput(); + return null; + } + }, + }, .list => switch (option_ptr.value) { .flag, .map => { - log.err("Expected -D{s} to be a list, but received a {s}.\n", .{ + log.err("Expected -D{s} to be a list, but received a {s}.", .{ name, @tagName(option_ptr.value), }); self.markInvalidUserInput(); @@ -1183,15 +1202,18 @@ pub fn addUserInputFlag(self: *Build, name_raw: []const u8) !bool { } fn typeToEnum(comptime T: type) TypeId { - return switch (@typeInfo(T)) { - .Int => .int, - .Float => .float, - .Bool => .bool, - .Enum => .@"enum", - else => switch (T) { - []const u8 => .string, - []const []const u8 => .list, - else => @compileError("Unsupported type: " ++ @typeName(T)), + return switch (T) { + Step.Compile.BuildId => .build_id, + else => return switch (@typeInfo(T)) { + .Int => .int, + .Float => .float, + .Bool => .bool, + .Enum => .@"enum", + else => switch (T) { + []const u8 => .string, + []const []const u8 => .list, + else => @compileError("Unsupported type: " ++ @typeName(T)), + }, }, }; } diff --git a/lib/std/Build/Cache.zig b/lib/std/Build/Cache.zig index 17429c0370..e991aff5b5 100644 --- a/lib/std/Build/Cache.zig +++ b/lib/std/Build/Cache.zig @@ -235,6 +235,10 @@ pub const HashHelper = struct { .none => {}, } }, + std.Build.Step.Compile.BuildId => switch (x) { + .none, .fast, .uuid, .sha1, .md5 => hh.add(std.meta.activeTag(x)), + .hexstring => |hex_string| hh.addBytes(hex_string.toSlice()), + }, else => switch (@typeInfo(@TypeOf(x))) { .Bool, .Int, .Enum, .Array => hh.addBytes(mem.asBytes(&x)), else => @compileError("unable to hash type " ++ @typeName(@TypeOf(x))), diff --git a/lib/std/Build/Step/Compile.zig b/lib/std/Build/Step/Compile.zig index 25492eb73d..d0a2d69bfe 100644 --- a/lib/std/Build/Step/Compile.zig +++ b/lib/std/Build/Step/Compile.zig @@ -294,27 +294,41 @@ pub const BuildId = union(enum) { uuid, sha1, md5, - hexstring: []const u8, + hexstring: HexString, + + pub fn eql(a: BuildId, b: BuildId) bool { + const a_tag = std.meta.activeTag(a); + const b_tag = std.meta.activeTag(b); + if (a_tag != b_tag) return false; + return switch (a) { + .none, .fast, .uuid, .sha1, .md5 => true, + .hexstring => |a_hexstring| mem.eql(u8, a_hexstring.toSlice(), b.hexstring.toSlice()), + }; + } - pub fn hash(self: BuildId, hasher: anytype) void { - switch (self) { - .none, .fast, .uuid, .sha1, .md5 => { - hasher.update(@tagName(self)); - }, - .hexstring => |str| { - hasher.update("0x"); - hasher.update(str); - }, + pub const HexString = struct { + bytes: [32]u8, + len: u8, + + /// Result is byte values, *not* hex-encoded. + pub fn toSlice(hs: *const HexString) []const u8 { + return hs.bytes[0..hs.len]; } + }; + + /// Input is byte values, *not* hex-encoded. + /// Asserts `bytes` fits inside `HexString` + pub fn initHexString(bytes: []const u8) BuildId { + var result: BuildId = .{ .hexstring = .{ + .bytes = undefined, + .len = @intCast(u8, bytes.len), + } }; + @memcpy(result.hexstring.bytes[0..bytes.len], bytes); + return result; } - // parses the incoming BuildId. If returns a hexstring, it is allocated - // by the provided allocator. - pub fn parse(allocator: std.mem.Allocator, text: []const u8) error{ - InvalidHexInt, - InvalidBuildId, - OutOfMemory, - }!BuildId { + /// Converts UTF-8 text to a `BuildId`. + pub fn parse(text: []const u8) !BuildId { if (mem.eql(u8, text, "none")) { return .none; } else if (mem.eql(u8, text, "fast")) { @@ -326,27 +340,27 @@ pub const BuildId = union(enum) { } else if (mem.eql(u8, text, "md5")) { return .md5; } else if (mem.startsWith(u8, text, "0x")) { - var clean_hex_string = try allocator.alloc(u8, text.len); - errdefer allocator.free(clean_hex_string); - - var i: usize = 0; - for (text["0x".len..]) |c| { - if (std.ascii.isHex(c)) { - clean_hex_string[i] = c; - i += 1; - } else if (c == '-' or c == ':') { - continue; - } else { - return error.InvalidHexInt; - } - } - if (i < text.len) - _ = allocator.resize(clean_hex_string, i); - - return BuildId{ .hexstring = clean_hex_string[0..i] }; + var result: BuildId = .{ .hexstring = undefined }; + const slice = try std.fmt.hexToBytes(&result.hexstring.bytes, text[2..]); + result.hexstring.len = @intCast(u8, slice.len); + return result; } + return error.InvalidBuildIdStyle; + } + + test parse { + try std.testing.expectEqual(BuildId.md5, try parse("md5")); + try std.testing.expectEqual(BuildId.none, try parse("none")); + try std.testing.expectEqual(BuildId.fast, try parse("fast")); + try std.testing.expectEqual(BuildId.uuid, try parse("uuid")); + try std.testing.expectEqual(BuildId.sha1, try parse("sha1")); + try std.testing.expectEqual(BuildId.sha1, try parse("tree")); - return error.InvalidBuildId; + try std.testing.expect(BuildId.initHexString("").eql(try parse("0x"))); + try std.testing.expect(BuildId.initHexString("\x12\x34\x56").eql(try parse("0x123456"))); + try std.testing.expectError(error.InvalidLength, parse("0x12-34")); + try std.testing.expectError(error.InvalidCharacter, parse("0xfoobbb")); + try std.testing.expectError(error.InvalidBuildIdStyle, parse("yaddaxxx")); } }; @@ -1872,11 +1886,13 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void { try addFlag(&zig_args, "valgrind", self.valgrind_support); try addFlag(&zig_args, "each-lib-rpath", self.each_lib_rpath); + if (self.build_id) |build_id| { - const fmt_str = "--build-id={s}{s}"; try zig_args.append(switch (build_id) { - .hexstring => |str| try std.fmt.allocPrint(b.allocator, fmt_str, .{ "0x", str }), - .none, .fast, .uuid, .sha1, .md5 => try std.fmt.allocPrint(b.allocator, fmt_str, .{ "", @tagName(build_id) }), + .hexstring => |hs| b.fmt("--build-id=0x{s}", .{ + std.fmt.fmtSliceHexLower(hs.toSlice()), + }), + .none, .fast, .uuid, .sha1, .md5 => b.fmt("--build-id={s}", .{@tagName(build_id)}), }); } @@ -2243,50 +2259,3 @@ fn checkCompileErrors(self: *Compile) !void { \\========================================= , .{ expected_generated.items, actual_stderr }); } - -const testing = std.testing; - -test "BuildId.parse" { - const tests = &[_]struct { - []const u8, - ?BuildId, - ?anyerror, - }{ - .{ "0x", BuildId{ .hexstring = "" }, null }, - .{ "0x12-34:", BuildId{ .hexstring = "1234" }, null }, - .{ "0x123456", BuildId{ .hexstring = "123456" }, null }, - .{ "md5", .md5, null }, - .{ "none", .none, null }, - .{ "fast", .fast, null }, - .{ "uuid", .uuid, null }, - .{ "sha1", .sha1, null }, - .{ "tree", .sha1, null }, - .{ "0xfoobbb", null, error.InvalidHexInt }, - .{ "yaddaxxx", null, error.InvalidBuildId }, - }; - - for (tests) |tt| { - const input = tt[0]; - const expected = tt[1]; - const expected_err = tt[2]; - - _ = (if (expected_err) |err| { - try testing.expectError(err, BuildId.parse(testing.allocator, input)); - } else blk: { - const actual = BuildId.parse(testing.allocator, input) catch |e| break :blk e; - switch (expected.?) { - .hexstring => |expected_str| { - try testing.expectEqualStrings(expected_str, actual.hexstring); - testing.allocator.free(actual.hexstring); - }, - else => try testing.expectEqual(expected.?, actual), - } - }) catch |e| { - std.log.err( - "BuildId.parse failed on {s}: expected {} got {!}", - .{ input, expected.?, e }, - ); - return e; - }; - } -} diff --git a/src/Compilation.zig b/src/Compilation.zig index 5a547346a5..de09a78c77 100644 --- a/src/Compilation.zig +++ b/src/Compilation.zig @@ -798,6 +798,7 @@ pub fn create(gpa: Allocator, options: InitOptions) !*Compilation { const unwind_tables = options.want_unwind_tables orelse (link_libunwind or target_util.needUnwindTables(options.target)); const link_eh_frame_hdr = options.link_eh_frame_hdr or unwind_tables; + const build_id = options.build_id orelse .none; // Make a decision on whether to use LLD or our own linker. const use_lld = options.use_lld orelse blk: { @@ -828,7 +829,7 @@ pub fn create(gpa: Allocator, options: InitOptions) !*Compilation { options.output_mode == .Lib or options.linker_script != null or options.version_script != null or options.emit_implib != null or - options.build_id != null or + build_id != .none or options.symbol_wrap_set.count() > 0) { break :blk true; @@ -1514,7 +1515,7 @@ pub fn create(gpa: Allocator, options: InitOptions) !*Compilation { .skip_linker_dependencies = options.skip_linker_dependencies, .parent_compilation_link_libc = options.parent_compilation_link_libc, .each_lib_rpath = options.each_lib_rpath orelse options.is_native_os, - .build_id = options.build_id, + .build_id = build_id, .cache_mode = cache_mode, .disable_lld_caching = options.disable_lld_caching or cache_mode == .whole, .subsystem = options.subsystem, @@ -2269,9 +2270,7 @@ fn addNonIncrementalStuffToCacheManifest(comp: *Compilation, man: *Cache.Manifes man.hash.addListOfBytes(comp.bin_file.options.rpath_list); man.hash.addListOfBytes(comp.bin_file.options.symbol_wrap_set.keys()); man.hash.add(comp.bin_file.options.each_lib_rpath); - if (comp.bin_file.options.build_id) |build_id| { - build_id.hash(&man.hash.hasher); - } + man.hash.add(comp.bin_file.options.build_id); man.hash.add(comp.bin_file.options.skip_linker_dependencies); man.hash.add(comp.bin_file.options.z_nodelete); man.hash.add(comp.bin_file.options.z_notext); diff --git a/src/link.zig b/src/link.zig index eccf389d05..79ac33b892 100644 --- a/src/link.zig +++ b/src/link.zig @@ -158,7 +158,7 @@ pub const Options = struct { skip_linker_dependencies: bool, parent_compilation_link_libc: bool, each_lib_rpath: bool, - build_id: ?BuildId, + build_id: BuildId, disable_lld_caching: bool, is_test: bool, hash_style: HashStyle, diff --git a/src/link/Elf.zig b/src/link/Elf.zig index 289c687270..f90f4ebd46 100644 --- a/src/link/Elf.zig +++ b/src/link/Elf.zig @@ -1399,8 +1399,7 @@ fn linkWithLLD(self: *Elf, comp: *Compilation, prog_node: *std.Progress.Node) !v man.hash.add(self.base.options.each_lib_rpath); if (self.base.options.output_mode == .Exe) { man.hash.add(stack_size); - if (self.base.options.build_id) |build_id| - build_id.hash(&man.hash.hasher); + man.hash.add(self.base.options.build_id); } man.hash.addListOfBytes(self.base.options.symbol_wrap_set.keys()); man.hash.add(self.base.options.skip_linker_dependencies); @@ -1543,12 +1542,18 @@ fn linkWithLLD(self: *Elf, comp: *Compilation, prog_node: *std.Progress.Node) !v try argv.append("-z"); try argv.append(try std.fmt.allocPrint(arena, "stack-size={d}", .{stack_size})); - if (self.base.options.build_id) |build_id| { - const fmt_str = "--build-id={s}{s}"; - try argv.append(switch (build_id) { - .hexstring => |str| try std.fmt.allocPrint(arena, fmt_str, .{ "0x", str }), - .none, .fast, .uuid, .sha1, .md5 => try std.fmt.allocPrint(arena, fmt_str, .{ "", @tagName(build_id) }), - }); + switch (self.base.options.build_id) { + .none => {}, + .fast, .uuid, .sha1, .md5 => { + try argv.append(try std.fmt.allocPrint(arena, "--build-id={s}", .{ + @tagName(self.base.options.build_id), + })); + }, + .hexstring => |hs| { + try argv.append(try std.fmt.allocPrint(arena, "--build-id=0x{s}", .{ + std.fmt.fmtSliceHexLower(hs.toSlice()), + })); + }, } } diff --git a/src/link/Wasm.zig b/src/link/Wasm.zig index 9396377c73..cd9c44d656 100644 --- a/src/link/Wasm.zig +++ b/src/link/Wasm.zig @@ -3163,8 +3163,7 @@ fn linkWithZld(wasm: *Wasm, comp: *Compilation, prog_node: *std.Progress.Node) l try man.addOptionalFile(compiler_rt_path); man.hash.addOptionalBytes(options.entry); man.hash.addOptional(options.stack_size_override); - if (wasm.base.options.build_id) |build_id| - build_id.hash(&man.hash.hasher); + man.hash.add(wasm.base.options.build_id); man.hash.add(options.import_memory); man.hash.add(options.import_table); man.hash.add(options.export_table); @@ -3798,27 +3797,29 @@ fn writeToFile( if (!wasm.base.options.strip) { // The build id must be computed on the main sections only, // so we have to do it now, before the debug sections. - if (wasm.base.options.build_id) |build_id| { - switch (build_id) { - .none => {}, - .fast => { - var id: [16]u8 = undefined; - std.crypto.hash.sha3.TurboShake128(null).hash(binary_bytes.items, &id, .{}); - var uuid: [36]u8 = undefined; - _ = try std.fmt.bufPrint(&uuid, "{s}-{s}-{s}-{s}-{s}", .{ - std.fmt.fmtSliceHexLower(id[0..4]), - std.fmt.fmtSliceHexLower(id[4..6]), - std.fmt.fmtSliceHexLower(id[6..8]), - std.fmt.fmtSliceHexLower(id[8..10]), - std.fmt.fmtSliceHexLower(id[10..]), - }); - try emitBuildIdSection(&binary_bytes, &uuid); - }, - .hexstring => |str| { - try emitBuildIdSection(&binary_bytes, str); - }, - else => |mode| log.err("build-id '{s}' is not supported for WASM", .{@tagName(mode)}), - } + switch (wasm.base.options.build_id) { + .none => {}, + .fast => { + var id: [16]u8 = undefined; + std.crypto.hash.sha3.TurboShake128(null).hash(binary_bytes.items, &id, .{}); + var uuid: [36]u8 = undefined; + _ = try std.fmt.bufPrint(&uuid, "{s}-{s}-{s}-{s}-{s}", .{ + std.fmt.fmtSliceHexLower(id[0..4]), + std.fmt.fmtSliceHexLower(id[4..6]), + std.fmt.fmtSliceHexLower(id[6..8]), + std.fmt.fmtSliceHexLower(id[8..10]), + std.fmt.fmtSliceHexLower(id[10..]), + }); + try emitBuildIdSection(&binary_bytes, &uuid); + }, + .hexstring => |hs| { + var buffer: [32 * 2]u8 = undefined; + const str = std.fmt.bufPrint(&buffer, "{s}", .{ + std.fmt.fmtSliceHexLower(hs.toSlice()), + }) catch unreachable; + try emitBuildIdSection(&binary_bytes, str); + }, + else => |mode| log.err("build-id '{s}' is not supported for WASM", .{@tagName(mode)}), } // if (wasm.dwarf) |*dwarf| { @@ -4211,8 +4212,7 @@ fn linkWithLLD(wasm: *Wasm, comp: *Compilation, prog_node: *std.Progress.Node) ! try man.addOptionalFile(compiler_rt_path); man.hash.addOptionalBytes(wasm.base.options.entry); man.hash.addOptional(wasm.base.options.stack_size_override); - if (wasm.base.options.build_id) |build_id| - build_id.hash(&man.hash.hasher); + man.hash.add(wasm.base.options.build_id); man.hash.add(wasm.base.options.import_memory); man.hash.add(wasm.base.options.import_table); man.hash.add(wasm.base.options.export_table); diff --git a/src/main.zig b/src/main.zig index 93199f9566..aa7d587983 100644 --- a/src/main.zig +++ b/src/main.zig @@ -494,7 +494,10 @@ const usage_build_generic = \\ -fno-each-lib-rpath Prevent adding rpath for each used dynamic library \\ -fallow-shlib-undefined Allows undefined symbols in shared libraries \\ -fno-allow-shlib-undefined Disallows undefined symbols in shared libraries - \\ --build-id[=style] Generate a build ID note + \\ --build-id[=style] At a minor link-time expense, coordinates stripped binaries + \\ fast, uuid, sha1, md5 with debug symbols via a '.note.gnu.build-id' section + \\ 0x[hexstring] Maximum 32 bytes + \\ none (default) Disable build-id \\ --eh-frame-hdr Enable C++ exception handling by passing --eh-frame-hdr to linker \\ --emit-relocs Enable output of relocation sections for post build tools \\ -z [arg] Set linker extension flags @@ -1445,11 +1448,11 @@ fn buildOutputType( } else if (mem.eql(u8, arg, "--build-id")) { build_id = .fast; } else if (mem.startsWith(u8, arg, "--build-id=")) { - const value = arg["--build-id=".len..]; - build_id = BuildId.parse(arena, value) catch |err| switch (err) { - error.InvalidHexInt => fatal("failed to parse hex value {s}", .{value}), - error.InvalidBuildId => fatal("invalid --build-id={s}", .{value}), - error.OutOfMemory => fatal("OOM", .{}), + const style = arg["--build-id=".len..]; + build_id = BuildId.parse(style) catch |err| { + fatal("unable to parse --build-id style '{s}': {s}", .{ + style, @errorName(err), + }); }; } else if (mem.eql(u8, arg, "--debug-compile-errors")) { if (!crash_report.is_enabled) { @@ -1689,7 +1692,14 @@ fn buildOutputType( if (mem.indexOfScalar(u8, linker_arg, '=')) |equals_pos| { const key = linker_arg[0..equals_pos]; const value = linker_arg[equals_pos + 1 ..]; - if (mem.eql(u8, key, "--sort-common")) { + if (mem.eql(u8, key, "--build-id")) { + build_id = BuildId.parse(value) catch |err| { + fatal("unable to parse --build-id style '{s}': {s}", .{ + value, @errorName(err), + }); + }; + continue; + } else if (mem.eql(u8, key, "--sort-common")) { // this ignores --sort=common=; ignoring plain --sort-common // is done below. continue; @@ -1699,7 +1709,9 @@ fn buildOutputType( continue; } } - if (mem.eql(u8, linker_arg, "--as-needed")) { + if (mem.eql(u8, linker_arg, "--build-id")) { + build_id = .fast; + } else if (mem.eql(u8, linker_arg, "--as-needed")) { needed = false; } else if (mem.eql(u8, linker_arg, "--no-as-needed")) { needed = true; @@ -1731,15 +1743,6 @@ fn buildOutputType( search_strategy = .paths_first; } else if (mem.eql(u8, linker_arg, "-search_dylibs_first")) { search_strategy = .dylibs_first; - } else if (mem.eql(u8, linker_arg, "--build-id")) { - build_id = .fast; - } else if (mem.startsWith(u8, linker_arg, "--build-id=")) { - const value = linker_arg["--build-id=".len..]; - build_id = BuildId.parse(arena, value) catch |err| switch (err) { - error.InvalidHexInt => fatal("failed to parse hex value {s}", .{value}), - error.InvalidBuildId => fatal("invalid --build-id={s}", .{value}), - error.OutOfMemory => fatal("OOM", .{}), - }; } else { try linker_args.append(linker_arg); } -- cgit v1.2.3 From f65e8c78621c90c9b9932903a9ed99c973dbcf63 Mon Sep 17 00:00:00 2001 From: mlugg Date: Sat, 22 Apr 2023 18:15:19 +0100 Subject: Deduplicate uses of the same package across dependencies --- lib/std/Build.zig | 16 ++++++++++++++++ src/Package.zig | 17 ++++++++++------- 2 files changed, 26 insertions(+), 7 deletions(-) (limited to 'lib/std/Build.zig') diff --git a/lib/std/Build.zig b/lib/std/Build.zig index 7ef504851e..d97a5c5d7a 100644 --- a/lib/std/Build.zig +++ b/lib/std/Build.zig @@ -124,6 +124,9 @@ host: NativeTargetInfo, dep_prefix: []const u8 = "", modules: std.StringArrayHashMap(*Module), +/// A map from build root dirs to the corresponding `*Dependency`. This is shared with all child +/// `Build`s. +initialized_deps: *std.StringHashMap(*Dependency), pub const ExecError = error{ ReadFailure, @@ -209,6 +212,9 @@ pub fn create( const env_map = try allocator.create(EnvMap); env_map.* = try process.getEnvMap(allocator); + const initialized_deps = try allocator.create(std.StringHashMap(*Dependency)); + initialized_deps.* = std.StringHashMap(*Dependency).init(allocator); + const self = try allocator.create(Build); self.* = .{ .zig_exe = zig_exe, @@ -261,6 +267,7 @@ pub fn create( .args = null, .host = host, .modules = std.StringArrayHashMap(*Module).init(allocator), + .initialized_deps = initialized_deps, }; try self.top_level_steps.put(allocator, self.install_tls.step.name, &self.install_tls); try self.top_level_steps.put(allocator, self.uninstall_tls.step.name, &self.uninstall_tls); @@ -345,6 +352,7 @@ fn createChildOnly(parent: *Build, dep_name: []const u8, build_root: Cache.Direc .host = parent.host, .dep_prefix = parent.fmt("{s}{s}.", .{ parent.dep_prefix, dep_name }), .modules = std.StringArrayHashMap(*Module).init(allocator), + .initialized_deps = parent.initialized_deps, }; try child.top_level_steps.put(allocator, child.install_tls.step.name, &child.install_tls); try child.top_level_steps.put(allocator, child.uninstall_tls.step.name, &child.uninstall_tls); @@ -1560,6 +1568,11 @@ pub fn dependencyInner( comptime build_zig: type, args: anytype, ) *Dependency { + if (b.initialized_deps.get(build_root_string)) |dep| { + // TODO: check args are the same + return dep; + } + const build_root: std.Build.Cache.Directory = .{ .path = build_root_string, .handle = std.fs.cwd().openDir(build_root_string, .{}) catch |err| { @@ -1578,6 +1591,9 @@ pub fn dependencyInner( const dep = b.allocator.create(Dependency) catch @panic("OOM"); dep.* = .{ .builder = sub_builder }; + + b.initialized_deps.put(build_root_string, dep) catch @panic("OOM"); + return dep; } diff --git a/src/Package.zig b/src/Package.zig index f84f0a8a1b..8a2875667a 100644 --- a/src/Package.zig +++ b/src/Package.zig @@ -216,7 +216,7 @@ pub const build_zig_basename = "build.zig"; pub fn fetchAndAddDependencies( pkg: *Package, - root_pkg: *Package, + deps_pkg: *Package, arena: Allocator, thread_pool: *ThreadPool, http_client: *std.http.Client, @@ -272,7 +272,6 @@ pub fn fetchAndAddDependencies( .error_bundle = error_bundle, }; - var any_error = false; const deps_list = manifest.dependencies.values(); for (manifest.dependencies.keys(), 0..) |name, i| { const dep = deps_list[i]; @@ -292,7 +291,7 @@ pub fn fetchAndAddDependencies( ); try sub_pkg.fetchAndAddDependencies( - root_pkg, + deps_pkg, arena, thread_pool, http_client, @@ -307,14 +306,18 @@ pub fn fetchAndAddDependencies( ); try pkg.add(gpa, name, sub_pkg); - try root_pkg.add(gpa, fqn, sub_pkg); + if (deps_pkg.table.get(dep.hash.?)) |other_sub| { + // This should be the same package (and hence module) since it's the same hash + // TODO: dedup multiple versions of the same package + assert(other_sub == sub_pkg); + } else { + try deps_pkg.add(gpa, dep.hash.?, sub_pkg); + } try dependencies_source.writer().print(" pub const {s} = @import(\"{}\");\n", .{ - std.zig.fmtId(fqn), std.zig.fmtEscapes(fqn), + std.zig.fmtId(fqn), std.zig.fmtEscapes(dep.hash.?), }); } - - if (any_error) return error.InvalidBuildManifestFile; } pub fn createFilePkg( -- cgit v1.2.3 From 39c2eee285f820282dedba4404cac1009a5ae2d6 Mon Sep 17 00:00:00 2001 From: Linus Groh Date: Sat, 20 May 2023 22:30:02 +0100 Subject: std.debug: Rename TTY.Color enum values to snake case --- lib/build_runner.zig | 60 +++++++++++++++++++++---------------------- lib/std/Build.zig | 12 ++++----- lib/std/debug.zig | 62 ++++++++++++++++++++++----------------------- lib/std/testing.zig | 8 +++--- lib/std/zig/ErrorBundle.zig | 32 +++++++++++------------ 5 files changed, 87 insertions(+), 87 deletions(-) (limited to 'lib/std/Build.zig') diff --git a/lib/build_runner.zig b/lib/build_runner.zig index 42903b82f3..7eec164871 100644 --- a/lib/build_runner.zig +++ b/lib/build_runner.zig @@ -476,9 +476,9 @@ fn runStepNames( if (run.enable_summary != false) { const total_count = success_count + failure_count + pending_count + skipped_count; - ttyconf.setColor(stderr, .Cyan) catch {}; + ttyconf.setColor(stderr, .cyan) catch {}; stderr.writeAll("Build Summary:") catch {}; - ttyconf.setColor(stderr, .Reset) catch {}; + ttyconf.setColor(stderr, .reset) catch {}; stderr.writer().print(" {d}/{d} steps succeeded", .{ success_count, total_count }) catch {}; if (skipped_count > 0) stderr.writer().print("; {d} skipped", .{skipped_count}) catch {}; if (failure_count > 0) stderr.writer().print("; {d} failed", .{failure_count}) catch {}; @@ -489,9 +489,9 @@ fn runStepNames( if (test_leak_count > 0) stderr.writer().print("; {d} leaked", .{test_leak_count}) catch {}; if (run.enable_summary == null) { - ttyconf.setColor(stderr, .Dim) catch {}; + ttyconf.setColor(stderr, .dim) catch {}; stderr.writeAll(" (disable with -fno-summary)") catch {}; - ttyconf.setColor(stderr, .Reset) catch {}; + ttyconf.setColor(stderr, .reset) catch {}; } stderr.writeAll("\n") catch {}; @@ -560,7 +560,7 @@ fn printTreeStep( const first = step_stack.swapRemove(s); try printPrefix(parent_node, stderr, ttyconf); - if (!first) try ttyconf.setColor(stderr, .Dim); + if (!first) try ttyconf.setColor(stderr, .dim); if (parent_node.parent != null) { if (parent_node.last) { try stderr.writeAll(switch (ttyconf) { @@ -586,28 +586,28 @@ fn printTreeStep( .running => unreachable, .dependency_failure => { - try ttyconf.setColor(stderr, .Dim); + try ttyconf.setColor(stderr, .dim); try stderr.writeAll(" transitive failure\n"); - try ttyconf.setColor(stderr, .Reset); + try ttyconf.setColor(stderr, .reset); }, .success => { - try ttyconf.setColor(stderr, .Green); + try ttyconf.setColor(stderr, .green); if (s.result_cached) { try stderr.writeAll(" cached"); } else if (s.test_results.test_count > 0) { const pass_count = s.test_results.passCount(); try stderr.writer().print(" {d} passed", .{pass_count}); if (s.test_results.skip_count > 0) { - try ttyconf.setColor(stderr, .Yellow); + try ttyconf.setColor(stderr, .yellow); try stderr.writer().print(" {d} skipped", .{s.test_results.skip_count}); } } else { try stderr.writeAll(" success"); } - try ttyconf.setColor(stderr, .Reset); + try ttyconf.setColor(stderr, .reset); if (s.result_duration_ns) |ns| { - try ttyconf.setColor(stderr, .Dim); + try ttyconf.setColor(stderr, .dim); if (ns >= std.time.ns_per_min) { try stderr.writer().print(" {d}m", .{ns / std.time.ns_per_min}); } else if (ns >= std.time.ns_per_s) { @@ -619,11 +619,11 @@ fn printTreeStep( } else { try stderr.writer().print(" {d}ns", .{ns}); } - try ttyconf.setColor(stderr, .Reset); + try ttyconf.setColor(stderr, .reset); } if (s.result_peak_rss != 0) { const rss = s.result_peak_rss; - try ttyconf.setColor(stderr, .Dim); + try ttyconf.setColor(stderr, .dim); if (rss >= 1000_000_000) { try stderr.writer().print(" MaxRSS:{d}G", .{rss / 1000_000_000}); } else if (rss >= 1000_000) { @@ -633,57 +633,57 @@ fn printTreeStep( } else { try stderr.writer().print(" MaxRSS:{d}B", .{rss}); } - try ttyconf.setColor(stderr, .Reset); + try ttyconf.setColor(stderr, .reset); } try stderr.writeAll("\n"); }, .skipped => { - try ttyconf.setColor(stderr, .Yellow); + try ttyconf.setColor(stderr, .yellow); try stderr.writeAll(" skipped\n"); - try ttyconf.setColor(stderr, .Reset); + try ttyconf.setColor(stderr, .reset); }, .failure => { if (s.result_error_bundle.errorMessageCount() > 0) { - try ttyconf.setColor(stderr, .Red); + try ttyconf.setColor(stderr, .red); try stderr.writer().print(" {d} errors\n", .{ s.result_error_bundle.errorMessageCount(), }); - try ttyconf.setColor(stderr, .Reset); + try ttyconf.setColor(stderr, .reset); } else if (!s.test_results.isSuccess()) { try stderr.writer().print(" {d}/{d} passed", .{ s.test_results.passCount(), s.test_results.test_count, }); if (s.test_results.fail_count > 0) { try stderr.writeAll(", "); - try ttyconf.setColor(stderr, .Red); + try ttyconf.setColor(stderr, .red); try stderr.writer().print("{d} failed", .{ s.test_results.fail_count, }); - try ttyconf.setColor(stderr, .Reset); + try ttyconf.setColor(stderr, .reset); } if (s.test_results.skip_count > 0) { try stderr.writeAll(", "); - try ttyconf.setColor(stderr, .Yellow); + try ttyconf.setColor(stderr, .yellow); try stderr.writer().print("{d} skipped", .{ s.test_results.skip_count, }); - try ttyconf.setColor(stderr, .Reset); + try ttyconf.setColor(stderr, .reset); } if (s.test_results.leak_count > 0) { try stderr.writeAll(", "); - try ttyconf.setColor(stderr, .Red); + try ttyconf.setColor(stderr, .red); try stderr.writer().print("{d} leaked", .{ s.test_results.leak_count, }); - try ttyconf.setColor(stderr, .Reset); + try ttyconf.setColor(stderr, .reset); } try stderr.writeAll("\n"); } else { - try ttyconf.setColor(stderr, .Red); + try ttyconf.setColor(stderr, .red); try stderr.writeAll(" failure\n"); - try ttyconf.setColor(stderr, .Reset); + try ttyconf.setColor(stderr, .reset); } }, } @@ -703,7 +703,7 @@ fn printTreeStep( s.dependencies.items.len, }); } - try ttyconf.setColor(stderr, .Reset); + try ttyconf.setColor(stderr, .reset); } } @@ -819,13 +819,13 @@ fn workerMakeOneStep( for (s.result_error_msgs.items) |msg| { // Sometimes it feels like you just can't catch a break. Finally, // with Zig, you can. - ttyconf.setColor(stderr, .Bold) catch break; + ttyconf.setColor(stderr, .bold) catch break; stderr.writeAll(s.owner.dep_prefix) catch break; stderr.writeAll(s.name) catch break; stderr.writeAll(": ") catch break; - ttyconf.setColor(stderr, .Red) catch break; + ttyconf.setColor(stderr, .red) catch break; stderr.writeAll("error: ") catch break; - ttyconf.setColor(stderr, .Reset) catch break; + ttyconf.setColor(stderr, .reset) catch break; stderr.writeAll(msg) catch break; stderr.writeAll("\n") catch break; } diff --git a/lib/std/Build.zig b/lib/std/Build.zig index d97a5c5d7a..b36e815f72 100644 --- a/lib/std/Build.zig +++ b/lib/std/Build.zig @@ -1713,9 +1713,9 @@ fn dumpBadGetPathHelp( }); const tty_config = std.debug.detectTTYConfig(stderr); - tty_config.setColor(w, .Red) catch {}; + tty_config.setColor(w, .red) catch {}; try stderr.writeAll(" The step was created by this stack trace:\n"); - tty_config.setColor(w, .Reset) catch {}; + tty_config.setColor(w, .reset) catch {}; const debug_info = std.debug.getSelfDebugInfo() catch |err| { try w.print("Unable to dump stack trace: Unable to open debug info: {s}\n", .{@errorName(err)}); @@ -1727,9 +1727,9 @@ fn dumpBadGetPathHelp( return; }; if (asking_step) |as| { - tty_config.setColor(w, .Red) catch {}; + tty_config.setColor(w, .red) catch {}; try stderr.writeAll(" The step that is missing a dependency on the above step was created by this stack trace:\n"); - tty_config.setColor(w, .Reset) catch {}; + tty_config.setColor(w, .reset) catch {}; std.debug.writeStackTrace(as.getStackTrace(), w, ally, debug_info, tty_config) catch |err| { try stderr.writer().print("Unable to dump stack trace: {s}\n", .{@errorName(err)}); @@ -1737,9 +1737,9 @@ fn dumpBadGetPathHelp( }; } - tty_config.setColor(w, .Red) catch {}; + tty_config.setColor(w, .red) catch {}; try stderr.writeAll(" Hope that helps. Proceeding to panic.\n"); - tty_config.setColor(w, .Reset) catch {}; + tty_config.setColor(w, .reset) catch {}; } /// Allocates a new string for assigning a value to a named macro. diff --git a/lib/std/debug.zig b/lib/std/debug.zig index 005c2b5404..d98cf8f27d 100644 --- a/lib/std/debug.zig +++ b/lib/std/debug.zig @@ -421,9 +421,9 @@ pub fn writeStackTrace( if (stack_trace.index > stack_trace.instruction_addresses.len) { const dropped_frames = stack_trace.index - stack_trace.instruction_addresses.len; - tty_config.setColor(out_stream, .Bold) catch {}; + tty_config.setColor(out_stream, .bold) catch {}; try out_stream.print("({d} additional stack frames skipped...)\n", .{dropped_frames}); - tty_config.setColor(out_stream, .Reset) catch {}; + tty_config.setColor(out_stream, .reset) catch {}; } } @@ -655,14 +655,14 @@ pub fn writeCurrentStackTraceWindows( /// for debugging purposes, such as coloring text, etc. pub const TTY = struct { pub const Color = enum { - Red, - Green, - Yellow, - Cyan, - White, - Dim, - Bold, - Reset, + red, + green, + yellow, + cyan, + white, + dim, + bold, + reset, }; pub const Config = union(enum) { @@ -680,26 +680,26 @@ pub const TTY = struct { .no_color => return, .escape_codes => { const color_string = switch (color) { - .Red => "\x1b[31;1m", - .Green => "\x1b[32;1m", - .Yellow => "\x1b[33;1m", - .Cyan => "\x1b[36;1m", - .White => "\x1b[37;1m", - .Bold => "\x1b[1m", - .Dim => "\x1b[2m", - .Reset => "\x1b[0m", + .red => "\x1b[31;1m", + .green => "\x1b[32;1m", + .yellow => "\x1b[33;1m", + .cyan => "\x1b[36;1m", + .white => "\x1b[37;1m", + .bold => "\x1b[1m", + .dim => "\x1b[2m", + .reset => "\x1b[0m", }; try out_stream.writeAll(color_string); }, .windows_api => |ctx| if (native_os == .windows) { const attributes = switch (color) { - .Red => windows.FOREGROUND_RED | windows.FOREGROUND_INTENSITY, - .Green => windows.FOREGROUND_GREEN | windows.FOREGROUND_INTENSITY, - .Yellow => windows.FOREGROUND_RED | windows.FOREGROUND_GREEN | windows.FOREGROUND_INTENSITY, - .Cyan => windows.FOREGROUND_GREEN | windows.FOREGROUND_BLUE | windows.FOREGROUND_INTENSITY, - .White, .Bold => windows.FOREGROUND_RED | windows.FOREGROUND_GREEN | windows.FOREGROUND_BLUE | windows.FOREGROUND_INTENSITY, - .Dim => windows.FOREGROUND_INTENSITY, - .Reset => ctx.reset_attributes, + .red => windows.FOREGROUND_RED | windows.FOREGROUND_INTENSITY, + .green => windows.FOREGROUND_GREEN | windows.FOREGROUND_INTENSITY, + .yellow => windows.FOREGROUND_RED | windows.FOREGROUND_GREEN | windows.FOREGROUND_INTENSITY, + .cyan => windows.FOREGROUND_GREEN | windows.FOREGROUND_BLUE | windows.FOREGROUND_INTENSITY, + .white, .bold => windows.FOREGROUND_RED | windows.FOREGROUND_GREEN | windows.FOREGROUND_BLUE | windows.FOREGROUND_INTENSITY, + .dim => windows.FOREGROUND_INTENSITY, + .reset => ctx.reset_attributes, }; try windows.SetConsoleTextAttribute(ctx.handle, attributes); } else { @@ -831,7 +831,7 @@ fn printLineInfo( comptime printLineFromFile: anytype, ) !void { nosuspend { - try tty_config.setColor(out_stream, .Bold); + try tty_config.setColor(out_stream, .bold); if (line_info) |*li| { try out_stream.print("{s}:{d}:{d}", .{ li.file_name, li.line, li.column }); @@ -839,11 +839,11 @@ fn printLineInfo( try out_stream.writeAll("???:?:?"); } - try tty_config.setColor(out_stream, .Reset); + try tty_config.setColor(out_stream, .reset); try out_stream.writeAll(": "); - try tty_config.setColor(out_stream, .Dim); + try tty_config.setColor(out_stream, .dim); try out_stream.print("0x{x} in {s} ({s})", .{ address, symbol_name, compile_unit_name }); - try tty_config.setColor(out_stream, .Reset); + try tty_config.setColor(out_stream, .reset); try out_stream.writeAll("\n"); // Show the matching source code line if possible @@ -854,9 +854,9 @@ fn printLineInfo( const space_needed = @intCast(usize, li.column - 1); try out_stream.writeByteNTimes(' ', space_needed); - try tty_config.setColor(out_stream, .Green); + try tty_config.setColor(out_stream, .green); try out_stream.writeAll("^"); - try tty_config.setColor(out_stream, .Reset); + try tty_config.setColor(out_stream, .reset); } try out_stream.writeAll("\n"); } else |err| switch (err) { diff --git a/lib/std/testing.zig b/lib/std/testing.zig index 2857ebdbd3..8576ec0c83 100644 --- a/lib/std/testing.zig +++ b/lib/std/testing.zig @@ -387,9 +387,9 @@ fn SliceDiffer(comptime T: type) type { for (self.expected, 0..) |value, i| { var full_index = self.start_index + i; const diff = if (i < self.actual.len) !std.meta.eql(self.actual[i], value) else true; - if (diff) try self.ttyconf.setColor(writer, .Red); + if (diff) try self.ttyconf.setColor(writer, .red); try writer.print("[{}]: {any}\n", .{ full_index, value }); - if (diff) try self.ttyconf.setColor(writer, .Reset); + if (diff) try self.ttyconf.setColor(writer, .reset); } } }; @@ -427,9 +427,9 @@ const BytesDiffer = struct { } fn writeByteDiff(self: BytesDiffer, writer: anytype, comptime fmt: []const u8, byte: u8, diff: bool) !void { - if (diff) try self.ttyconf.setColor(writer, .Red); + if (diff) try self.ttyconf.setColor(writer, .red); try writer.print(fmt, .{byte}); - if (diff) try self.ttyconf.setColor(writer, .Reset); + if (diff) try self.ttyconf.setColor(writer, .reset); } const ChunkIterator = struct { diff --git a/lib/std/zig/ErrorBundle.zig b/lib/std/zig/ErrorBundle.zig index ffe748203e..f74d82273a 100644 --- a/lib/std/zig/ErrorBundle.zig +++ b/lib/std/zig/ErrorBundle.zig @@ -163,7 +163,7 @@ pub fn renderToStdErr(eb: ErrorBundle, options: RenderOptions) void { pub fn renderToWriter(eb: ErrorBundle, options: RenderOptions, writer: anytype) anyerror!void { for (eb.getMessages()) |err_msg| { - try renderErrorMessageToWriter(eb, options, err_msg, writer, "error", .Red, 0); + try renderErrorMessageToWriter(eb, options, err_msg, writer, "error", .red, 0); } if (options.include_log_text) { @@ -191,7 +191,7 @@ fn renderErrorMessageToWriter( if (err_msg.src_loc != .none) { const src = eb.extraData(SourceLocation, @enumToInt(err_msg.src_loc)); try counting_stderr.writeByteNTimes(' ', indent); - try ttyconf.setColor(stderr, .Bold); + try ttyconf.setColor(stderr, .bold); try counting_stderr.print("{s}:{d}:{d}: ", .{ eb.nullTerminatedString(src.data.src_path), src.data.line + 1, @@ -203,17 +203,17 @@ fn renderErrorMessageToWriter( // This is the length of the part before the error message: // e.g. "file.zig:4:5: error: " const prefix_len = @intCast(usize, counting_stderr.context.bytes_written); - try ttyconf.setColor(stderr, .Reset); - try ttyconf.setColor(stderr, .Bold); + try ttyconf.setColor(stderr, .reset); + try ttyconf.setColor(stderr, .bold); if (err_msg.count == 1) { try writeMsg(eb, err_msg, stderr, prefix_len); try stderr.writeByte('\n'); } else { try writeMsg(eb, err_msg, stderr, prefix_len); - try ttyconf.setColor(stderr, .Dim); + try ttyconf.setColor(stderr, .dim); try stderr.print(" ({d} times)\n", .{err_msg.count}); } - try ttyconf.setColor(stderr, .Reset); + try ttyconf.setColor(stderr, .reset); if (src.data.source_line != 0 and options.include_source_line) { const line = eb.nullTerminatedString(src.data.source_line); for (line) |b| switch (b) { @@ -226,19 +226,19 @@ fn renderErrorMessageToWriter( // -1 since span.main includes the caret const after_caret = src.data.span_end - src.data.span_main -| 1; try stderr.writeByteNTimes(' ', src.data.column - before_caret); - try ttyconf.setColor(stderr, .Green); + try ttyconf.setColor(stderr, .green); try stderr.writeByteNTimes('~', before_caret); try stderr.writeByte('^'); try stderr.writeByteNTimes('~', after_caret); try stderr.writeByte('\n'); - try ttyconf.setColor(stderr, .Reset); + try ttyconf.setColor(stderr, .reset); } for (eb.getNotes(err_msg_index)) |note| { - try renderErrorMessageToWriter(eb, options, note, stderr, "note", .Cyan, indent); + try renderErrorMessageToWriter(eb, options, note, stderr, "note", .cyan, indent); } if (src.data.reference_trace_len > 0 and options.include_reference_trace) { - try ttyconf.setColor(stderr, .Reset); - try ttyconf.setColor(stderr, .Dim); + try ttyconf.setColor(stderr, .reset); + try ttyconf.setColor(stderr, .dim); try stderr.print("referenced by:\n", .{}); var ref_index = src.end; for (0..src.data.reference_trace_len) |_| { @@ -266,25 +266,25 @@ fn renderErrorMessageToWriter( } } try stderr.writeByte('\n'); - try ttyconf.setColor(stderr, .Reset); + try ttyconf.setColor(stderr, .reset); } } else { try ttyconf.setColor(stderr, color); try stderr.writeByteNTimes(' ', indent); try stderr.writeAll(kind); try stderr.writeAll(": "); - try ttyconf.setColor(stderr, .Reset); + try ttyconf.setColor(stderr, .reset); const msg = eb.nullTerminatedString(err_msg.msg); if (err_msg.count == 1) { try stderr.print("{s}\n", .{msg}); } else { try stderr.print("{s}", .{msg}); - try ttyconf.setColor(stderr, .Dim); + try ttyconf.setColor(stderr, .dim); try stderr.print(" ({d} times)\n", .{err_msg.count}); } - try ttyconf.setColor(stderr, .Reset); + try ttyconf.setColor(stderr, .reset); for (eb.getNotes(err_msg_index)) |note| { - try renderErrorMessageToWriter(eb, options, note, stderr, "note", .Cyan, indent + 4); + try renderErrorMessageToWriter(eb, options, note, stderr, "note", .cyan, indent + 4); } } } -- cgit v1.2.3 From 0f6fa3f20b3b28958921bd63a9a9d96468455e9c Mon Sep 17 00:00:00 2001 From: Linus Groh Date: Sun, 21 May 2023 14:27:28 +0100 Subject: std: Move std.debug.{TTY.Config,detectTTYConfig} to std.io.tty Also get rid of the TTY wrapper struct, which was exlusively used as a namespace - this is done by the tty.zig root struct now. detectTTYConfig has been renamed to just detectConfig, which is enough given the new namespace. Additionally, a doc comment had been added. --- lib/build_runner.zig | 12 ++-- lib/std/Build.zig | 2 +- lib/std/Build/Step.zig | 2 +- lib/std/builtin.zig | 2 +- lib/std/debug.zig | 137 ++++---------------------------------------- lib/std/io.zig | 2 + lib/std/io/tty.zig | 121 ++++++++++++++++++++++++++++++++++++++ lib/std/testing.zig | 8 +-- lib/std/zig/ErrorBundle.zig | 4 +- src/main.zig | 4 +- test/src/Cases.zig | 2 +- 11 files changed, 152 insertions(+), 144 deletions(-) create mode 100644 lib/std/io/tty.zig (limited to 'lib/std/Build.zig') diff --git a/lib/build_runner.zig b/lib/build_runner.zig index 7eec164871..a09ec2cf1f 100644 --- a/lib/build_runner.zig +++ b/lib/build_runner.zig @@ -333,7 +333,7 @@ const Run = struct { claimed_rss: usize, enable_summary: ?bool, - ttyconf: std.debug.TTY.Config, + ttyconf: std.io.tty.Config, stderr: std.fs.File, }; @@ -535,7 +535,7 @@ const PrintNode = struct { last: bool = false, }; -fn printPrefix(node: *PrintNode, stderr: std.fs.File, ttyconf: std.debug.TTY.Config) !void { +fn printPrefix(node: *PrintNode, stderr: std.fs.File, ttyconf: std.io.tty.Config) !void { const parent = node.parent orelse return; if (parent.parent == null) return; try printPrefix(parent, stderr, ttyconf); @@ -553,7 +553,7 @@ fn printTreeStep( b: *std.Build, s: *Step, stderr: std.fs.File, - ttyconf: std.debug.TTY.Config, + ttyconf: std.io.tty.Config, parent_node: *PrintNode, step_stack: *std.AutoArrayHashMapUnmanaged(*Step, void), ) !void { @@ -1026,15 +1026,15 @@ fn cleanExit() void { const Color = enum { auto, off, on }; -fn get_tty_conf(color: Color, stderr: std.fs.File) std.debug.TTY.Config { +fn get_tty_conf(color: Color, stderr: std.fs.File) std.io.tty.Config { return switch (color) { - .auto => std.debug.detectTTYConfig(stderr), + .auto => std.io.tty.detectConfig(stderr), .on => .escape_codes, .off => .no_color, }; } -fn renderOptions(ttyconf: std.debug.TTY.Config) std.zig.ErrorBundle.RenderOptions { +fn renderOptions(ttyconf: std.io.tty.Config) std.zig.ErrorBundle.RenderOptions { return .{ .ttyconf = ttyconf, .include_source_line = ttyconf != .no_color, diff --git a/lib/std/Build.zig b/lib/std/Build.zig index b36e815f72..bb642b5e66 100644 --- a/lib/std/Build.zig +++ b/lib/std/Build.zig @@ -1712,7 +1712,7 @@ fn dumpBadGetPathHelp( s.name, }); - const tty_config = std.debug.detectTTYConfig(stderr); + const tty_config = std.io.tty.detectConfig(stderr); tty_config.setColor(w, .red) catch {}; try stderr.writeAll(" The step was created by this stack trace:\n"); tty_config.setColor(w, .reset) catch {}; diff --git a/lib/std/Build/Step.zig b/lib/std/Build/Step.zig index 40c88df2b9..a0d7a6a296 100644 --- a/lib/std/Build/Step.zig +++ b/lib/std/Build/Step.zig @@ -237,7 +237,7 @@ pub fn dump(step: *Step) void { const stderr = std.io.getStdErr(); const w = stderr.writer(); - const tty_config = std.debug.detectTTYConfig(stderr); + const tty_config = std.io.tty.detectConfig(stderr); const debug_info = std.debug.getSelfDebugInfo() catch |err| { w.print("Unable to dump stack trace: Unable to open debug info: {s}\n", .{ @errorName(err), diff --git a/lib/std/builtin.zig b/lib/std/builtin.zig index 56fab05d88..710aaefd5a 100644 --- a/lib/std/builtin.zig +++ b/lib/std/builtin.zig @@ -51,7 +51,7 @@ pub const StackTrace = struct { const debug_info = std.debug.getSelfDebugInfo() catch |err| { return writer.print("\nUnable to print stack trace: Unable to open debug info: {s}\n", .{@errorName(err)}); }; - const tty_config = std.debug.detectTTYConfig(std.io.getStdErr()); + const tty_config = std.io.tty.detectConfig(std.io.getStdErr()); try writer.writeAll("\n"); std.debug.writeStackTrace(self, writer, arena.allocator(), debug_info, tty_config) catch |err| { try writer.print("Unable to print stack trace: {s}\n", .{@errorName(err)}); diff --git a/lib/std/debug.zig b/lib/std/debug.zig index d98cf8f27d..08407023d6 100644 --- a/lib/std/debug.zig +++ b/lib/std/debug.zig @@ -5,7 +5,6 @@ const mem = std.mem; const io = std.io; const os = std.os; const fs = std.fs; -const process = std.process; const testing = std.testing; const elf = std.elf; const DW = std.dwarf; @@ -109,31 +108,6 @@ pub fn getSelfDebugInfo() !*DebugInfo { } } -pub fn detectTTYConfig(file: std.fs.File) TTY.Config { - if (builtin.os.tag == .wasi) { - // Per https://github.com/WebAssembly/WASI/issues/162 ANSI codes - // aren't currently supported. - return .no_color; - } else if (process.hasEnvVarConstant("ZIG_DEBUG_COLOR")) { - return .escape_codes; - } else if (process.hasEnvVarConstant("NO_COLOR")) { - return .no_color; - } else if (file.supportsAnsiEscapeCodes()) { - return .escape_codes; - } else if (native_os == .windows and file.isTty()) { - var info: windows.CONSOLE_SCREEN_BUFFER_INFO = undefined; - if (windows.kernel32.GetConsoleScreenBufferInfo(file.handle, &info) != windows.TRUE) { - // TODO: Should this return an error instead? - return .no_color; - } - return .{ .windows_api = .{ - .handle = file.handle, - .reset_attributes = info.wAttributes, - } }; - } - return .no_color; -} - /// Tries to print the current stack trace to stderr, unbuffered, and ignores any error returned. /// TODO multithreaded awareness pub fn dumpCurrentStackTrace(start_addr: ?usize) void { @@ -154,7 +128,7 @@ pub fn dumpCurrentStackTrace(start_addr: ?usize) void { stderr.print("Unable to dump stack trace: Unable to open debug info: {s}\n", .{@errorName(err)}) catch return; return; }; - writeCurrentStackTrace(stderr, debug_info, detectTTYConfig(io.getStdErr()), start_addr) catch |err| { + writeCurrentStackTrace(stderr, debug_info, io.tty.detectConfig(io.getStdErr()), start_addr) catch |err| { stderr.print("Unable to dump stack trace: {s}\n", .{@errorName(err)}) catch return; return; }; @@ -182,7 +156,7 @@ pub fn dumpStackTraceFromBase(bp: usize, ip: usize) void { stderr.print("Unable to dump stack trace: Unable to open debug info: {s}\n", .{@errorName(err)}) catch return; return; }; - const tty_config = detectTTYConfig(io.getStdErr()); + const tty_config = io.tty.detectConfig(io.getStdErr()); if (native_os == .windows) { writeCurrentStackTraceWindows(stderr, debug_info, tty_config, ip) catch return; return; @@ -265,7 +239,7 @@ pub fn dumpStackTrace(stack_trace: std.builtin.StackTrace) void { stderr.print("Unable to dump stack trace: Unable to open debug info: {s}\n", .{@errorName(err)}) catch return; return; }; - writeStackTrace(stack_trace, stderr, getDebugInfoAllocator(), debug_info, detectTTYConfig(io.getStdErr())) catch |err| { + writeStackTrace(stack_trace, stderr, getDebugInfoAllocator(), debug_info, io.tty.detectConfig(io.getStdErr())) catch |err| { stderr.print("Unable to dump stack trace: {s}\n", .{@errorName(err)}) catch return; return; }; @@ -403,7 +377,7 @@ pub fn writeStackTrace( out_stream: anytype, allocator: mem.Allocator, debug_info: *DebugInfo, - tty_config: TTY.Config, + tty_config: io.tty.Config, ) !void { _ = allocator; if (builtin.strip_debug_info) return error.MissingDebugInfo; @@ -562,7 +536,7 @@ pub const StackIterator = struct { pub fn writeCurrentStackTrace( out_stream: anytype, debug_info: *DebugInfo, - tty_config: TTY.Config, + tty_config: io.tty.Config, start_addr: ?usize, ) !void { if (native_os == .windows) { @@ -634,7 +608,7 @@ pub noinline fn walkStackWindows(addresses: []usize) usize { pub fn writeCurrentStackTraceWindows( out_stream: anytype, debug_info: *DebugInfo, - tty_config: TTY.Config, + tty_config: io.tty.Config, start_addr: ?usize, ) !void { var addr_buf: [1024]usize = undefined; @@ -651,95 +625,6 @@ pub fn writeCurrentStackTraceWindows( } } -/// Provides simple functionality for manipulating the terminal in some way, -/// for debugging purposes, such as coloring text, etc. -pub const TTY = struct { - pub const Color = enum { - red, - green, - yellow, - cyan, - white, - dim, - bold, - reset, - }; - - pub const Config = union(enum) { - no_color, - escape_codes, - windows_api: if (native_os == .windows) WindowsContext else void, - - pub const WindowsContext = struct { - handle: File.Handle, - reset_attributes: u16, - }; - - pub fn setColor(conf: Config, out_stream: anytype, color: Color) !void { - nosuspend switch (conf) { - .no_color => return, - .escape_codes => { - const color_string = switch (color) { - .red => "\x1b[31;1m", - .green => "\x1b[32;1m", - .yellow => "\x1b[33;1m", - .cyan => "\x1b[36;1m", - .white => "\x1b[37;1m", - .bold => "\x1b[1m", - .dim => "\x1b[2m", - .reset => "\x1b[0m", - }; - try out_stream.writeAll(color_string); - }, - .windows_api => |ctx| if (native_os == .windows) { - const attributes = switch (color) { - .red => windows.FOREGROUND_RED | windows.FOREGROUND_INTENSITY, - .green => windows.FOREGROUND_GREEN | windows.FOREGROUND_INTENSITY, - .yellow => windows.FOREGROUND_RED | windows.FOREGROUND_GREEN | windows.FOREGROUND_INTENSITY, - .cyan => windows.FOREGROUND_GREEN | windows.FOREGROUND_BLUE | windows.FOREGROUND_INTENSITY, - .white, .bold => windows.FOREGROUND_RED | windows.FOREGROUND_GREEN | windows.FOREGROUND_BLUE | windows.FOREGROUND_INTENSITY, - .dim => windows.FOREGROUND_INTENSITY, - .reset => ctx.reset_attributes, - }; - try windows.SetConsoleTextAttribute(ctx.handle, attributes); - } else { - unreachable; - }, - }; - } - - pub fn writeDEC(conf: Config, writer: anytype, codepoint: u8) !void { - const bytes = switch (conf) { - .no_color, .windows_api => switch (codepoint) { - 0x50...0x5e => @as(*const [1]u8, &codepoint), - 0x6a => "+", // ┘ - 0x6b => "+", // ┐ - 0x6c => "+", // ┌ - 0x6d => "+", // └ - 0x6e => "+", // ┼ - 0x71 => "-", // ─ - 0x74 => "+", // ├ - 0x75 => "+", // ┤ - 0x76 => "+", // ┴ - 0x77 => "+", // ┬ - 0x78 => "|", // │ - else => " ", // TODO - }, - .escape_codes => switch (codepoint) { - // Here we avoid writing the DEC beginning sequence and - // ending sequence in separate syscalls by putting the - // beginning and ending sequence into the same string - // literals, to prevent terminals ending up in bad states - // in case a crash happens between syscalls. - inline 0x50...0x7f => |x| "\x1B\x28\x30" ++ [1]u8{x} ++ "\x1B\x28\x42", - else => unreachable, - }, - }; - return writer.writeAll(bytes); - } - }; -}; - fn machoSearchSymbols(symbols: []const MachoSymbol, address: usize) ?*const MachoSymbol { var min: usize = 0; var max: usize = symbols.len - 1; @@ -785,7 +670,7 @@ test "machoSearchSymbols" { try testing.expectEqual(&symbols[2], machoSearchSymbols(&symbols, 5000).?); } -fn printUnknownSource(debug_info: *DebugInfo, out_stream: anytype, address: usize, tty_config: TTY.Config) !void { +fn printUnknownSource(debug_info: *DebugInfo, out_stream: anytype, address: usize, tty_config: io.tty.Config) !void { const module_name = debug_info.getModuleNameForAddress(address); return printLineInfo( out_stream, @@ -798,7 +683,7 @@ fn printUnknownSource(debug_info: *DebugInfo, out_stream: anytype, address: usiz ); } -pub fn printSourceAtAddress(debug_info: *DebugInfo, out_stream: anytype, address: usize, tty_config: TTY.Config) !void { +pub fn printSourceAtAddress(debug_info: *DebugInfo, out_stream: anytype, address: usize, tty_config: io.tty.Config) !void { const module = debug_info.getModuleForAddress(address) catch |err| switch (err) { error.MissingDebugInfo, error.InvalidDebugInfo => return printUnknownSource(debug_info, out_stream, address, tty_config), else => return err, @@ -827,7 +712,7 @@ fn printLineInfo( address: usize, symbol_name: []const u8, compile_unit_name: []const u8, - tty_config: TTY.Config, + tty_config: io.tty.Config, comptime printLineFromFile: anytype, ) !void { nosuspend { @@ -2193,7 +2078,7 @@ test "manage resources correctly" { const writer = std.io.null_writer; var di = try openSelfDebugInfo(testing.allocator); defer di.deinit(); - try printSourceAtAddress(&di, writer, showMyTrace(), detectTTYConfig(std.io.getStdErr())); + try printSourceAtAddress(&di, writer, showMyTrace(), io.tty.detectConfig(std.io.getStdErr())); } noinline fn showMyTrace() usize { @@ -2253,7 +2138,7 @@ pub fn ConfigurableTrace(comptime size: usize, comptime stack_frame_count: usize pub fn dump(t: @This()) void { if (!enabled) return; - const tty_config = detectTTYConfig(std.io.getStdErr()); + const tty_config = io.tty.detectConfig(std.io.getStdErr()); const stderr = io.getStdErr().writer(); const end = @min(t.index, size); const debug_info = getSelfDebugInfo() catch |err| { diff --git a/lib/std/io.zig b/lib/std/io.zig index d95997f853..f6d893c7dd 100644 --- a/lib/std/io.zig +++ b/lib/std/io.zig @@ -155,6 +155,8 @@ pub const BufferedAtomicFile = @import("io/buffered_atomic_file.zig").BufferedAt pub const StreamSource = @import("io/stream_source.zig").StreamSource; +pub const tty = @import("io/tty.zig"); + /// A Writer that doesn't write to anything. pub const null_writer = @as(NullWriter, .{ .context = {} }); diff --git a/lib/std/io/tty.zig b/lib/std/io/tty.zig new file mode 100644 index 0000000000..ea1c52db00 --- /dev/null +++ b/lib/std/io/tty.zig @@ -0,0 +1,121 @@ +const std = @import("std"); +const builtin = @import("builtin"); +const File = std.fs.File; +const process = std.process; +const windows = std.os.windows; +const native_os = builtin.os.tag; + +/// Detect suitable TTY configuration options for the given file (commonly stdout/stderr). +/// This includes feature checks for ANSI escape codes and the Windows console API, as well as +/// respecting the `NO_COLOR` environment variable. +pub fn detectConfig(file: File) Config { + if (builtin.os.tag == .wasi) { + // Per https://github.com/WebAssembly/WASI/issues/162 ANSI codes + // aren't currently supported. + return .no_color; + } else if (process.hasEnvVarConstant("ZIG_DEBUG_COLOR")) { + return .escape_codes; + } else if (process.hasEnvVarConstant("NO_COLOR")) { + return .no_color; + } else if (file.supportsAnsiEscapeCodes()) { + return .escape_codes; + } else if (native_os == .windows and file.isTty()) { + var info: windows.CONSOLE_SCREEN_BUFFER_INFO = undefined; + if (windows.kernel32.GetConsoleScreenBufferInfo(file.handle, &info) != windows.TRUE) { + // TODO: Should this return an error instead? + return .no_color; + } + return .{ .windows_api = .{ + .handle = file.handle, + .reset_attributes = info.wAttributes, + } }; + } + return .no_color; +} + +pub const Color = enum { + red, + green, + yellow, + cyan, + white, + dim, + bold, + reset, +}; + +/// Provides simple functionality for manipulating the terminal in some way, +/// such as coloring text, etc. +pub const Config = union(enum) { + no_color, + escape_codes, + windows_api: if (native_os == .windows) WindowsContext else void, + + pub const WindowsContext = struct { + handle: File.Handle, + reset_attributes: u16, + }; + + pub fn setColor(conf: Config, out_stream: anytype, color: Color) !void { + nosuspend switch (conf) { + .no_color => return, + .escape_codes => { + const color_string = switch (color) { + .red => "\x1b[31;1m", + .green => "\x1b[32;1m", + .yellow => "\x1b[33;1m", + .cyan => "\x1b[36;1m", + .white => "\x1b[37;1m", + .bold => "\x1b[1m", + .dim => "\x1b[2m", + .reset => "\x1b[0m", + }; + try out_stream.writeAll(color_string); + }, + .windows_api => |ctx| if (native_os == .windows) { + const attributes = switch (color) { + .red => windows.FOREGROUND_RED | windows.FOREGROUND_INTENSITY, + .green => windows.FOREGROUND_GREEN | windows.FOREGROUND_INTENSITY, + .yellow => windows.FOREGROUND_RED | windows.FOREGROUND_GREEN | windows.FOREGROUND_INTENSITY, + .cyan => windows.FOREGROUND_GREEN | windows.FOREGROUND_BLUE | windows.FOREGROUND_INTENSITY, + .white, .bold => windows.FOREGROUND_RED | windows.FOREGROUND_GREEN | windows.FOREGROUND_BLUE | windows.FOREGROUND_INTENSITY, + .dim => windows.FOREGROUND_INTENSITY, + .reset => ctx.reset_attributes, + }; + try windows.SetConsoleTextAttribute(ctx.handle, attributes); + } else { + unreachable; + }, + }; + } + + pub fn writeDEC(conf: Config, writer: anytype, codepoint: u8) !void { + const bytes = switch (conf) { + .no_color, .windows_api => switch (codepoint) { + 0x50...0x5e => @as(*const [1]u8, &codepoint), + 0x6a => "+", // ┘ + 0x6b => "+", // ┐ + 0x6c => "+", // ┌ + 0x6d => "+", // └ + 0x6e => "+", // ┼ + 0x71 => "-", // ─ + 0x74 => "+", // ├ + 0x75 => "+", // ┤ + 0x76 => "+", // ┴ + 0x77 => "+", // ┬ + 0x78 => "|", // │ + else => " ", // TODO + }, + .escape_codes => switch (codepoint) { + // Here we avoid writing the DEC beginning sequence and + // ending sequence in separate syscalls by putting the + // beginning and ending sequence into the same string + // literals, to prevent terminals ending up in bad states + // in case a crash happens between syscalls. + inline 0x50...0x7f => |x| "\x1B\x28\x30" ++ [1]u8{x} ++ "\x1B\x28\x42", + else => unreachable, + }, + }; + return writer.writeAll(bytes); + } +}; diff --git a/lib/std/testing.zig b/lib/std/testing.zig index 8576ec0c83..7986c50eaf 100644 --- a/lib/std/testing.zig +++ b/lib/std/testing.zig @@ -279,7 +279,7 @@ test "expectApproxEqRel" { /// This function is intended to be used only in tests. When the two slices are not /// equal, prints diagnostics to stderr to show exactly how they are not equal (with /// the differences highlighted in red), then returns a test failure error. -/// The colorized output is optional and controlled by the return of `std.debug.detectTTYConfig()`. +/// The colorized output is optional and controlled by the return of `std.io.tty.detectConfig()`. /// If your inputs are UTF-8 encoded strings, consider calling `expectEqualStrings` instead. pub fn expectEqualSlices(comptime T: type, expected: []const T, actual: []const T) !void { if (expected.ptr == actual.ptr and expected.len == actual.len) { @@ -312,7 +312,7 @@ pub fn expectEqualSlices(comptime T: type, expected: []const T, actual: []const const actual_window = actual[window_start..@min(actual.len, window_start + max_window_size)]; const actual_truncated = window_start + actual_window.len < actual.len; - const ttyconf = std.debug.detectTTYConfig(std.io.getStdErr()); + const ttyconf = std.io.tty.detectConfig(std.io.getStdErr()); var differ = if (T == u8) BytesDiffer{ .expected = expected_window, .actual = actual_window, @@ -379,7 +379,7 @@ fn SliceDiffer(comptime T: type) type { start_index: usize, expected: []const T, actual: []const T, - ttyconf: std.debug.TTY.Config, + ttyconf: std.io.tty.Config, const Self = @This(); @@ -398,7 +398,7 @@ fn SliceDiffer(comptime T: type) type { const BytesDiffer = struct { expected: []const u8, actual: []const u8, - ttyconf: std.debug.TTY.Config, + ttyconf: std.io.tty.Config, pub fn write(self: BytesDiffer, writer: anytype) !void { var expected_iterator = ChunkIterator{ .bytes = self.expected }; diff --git a/lib/std/zig/ErrorBundle.zig b/lib/std/zig/ErrorBundle.zig index f74d82273a..46b5799807 100644 --- a/lib/std/zig/ErrorBundle.zig +++ b/lib/std/zig/ErrorBundle.zig @@ -148,7 +148,7 @@ pub fn nullTerminatedString(eb: ErrorBundle, index: usize) [:0]const u8 { } pub const RenderOptions = struct { - ttyconf: std.debug.TTY.Config, + ttyconf: std.io.tty.Config, include_reference_trace: bool = true, include_source_line: bool = true, include_log_text: bool = true, @@ -181,7 +181,7 @@ fn renderErrorMessageToWriter( err_msg_index: MessageIndex, stderr: anytype, kind: []const u8, - color: std.debug.TTY.Color, + color: std.io.tty.Color, indent: usize, ) anyerror!void { const ttyconf = options.ttyconf; diff --git a/src/main.zig b/src/main.zig index 650741e5e4..afda88cebd 100644 --- a/src/main.zig +++ b/src/main.zig @@ -6044,9 +6044,9 @@ const ClangSearchSanitizer = struct { }; }; -fn get_tty_conf(color: Color) std.debug.TTY.Config { +fn get_tty_conf(color: Color) std.io.tty.Config { return switch (color) { - .auto => std.debug.detectTTYConfig(std.io.getStdErr()), + .auto => std.io.tty.detectConfig(std.io.getStdErr()), .on => .escape_codes, .off => .no_color, }; diff --git a/test/src/Cases.zig b/test/src/Cases.zig index 63dd2fd3da..08568d0dd6 100644 --- a/test/src/Cases.zig +++ b/test/src/Cases.zig @@ -1354,7 +1354,7 @@ fn runOneCase( defer all_errors.deinit(allocator); if (all_errors.errorMessageCount() > 0) { all_errors.renderToStdErr(.{ - .ttyconf = std.debug.detectTTYConfig(std.io.getStdErr()), + .ttyconf = std.io.tty.detectConfig(std.io.getStdErr()), }); // TODO print generated C code return error.UnexpectedCompileErrors; -- cgit v1.2.3 From 5744ceedb8ea4b3e5906175033f634b17287f3ca Mon Sep 17 00:00:00 2001 From: Mason Remaley Date: Wed, 24 May 2023 14:26:07 -0700 Subject: Fixes `WriteFile.getFileSource` failure on Windows (#15730) --- lib/std/Build.zig | 2 +- lib/std/Build/Step/WriteFile.zig | 23 +++++++++-------------- test/src/Cases.zig | 8 +++++--- test/src/CompareOutput.zig | 10 ++++------ test/src/StackTrace.zig | 5 ++--- test/src/run_translated_c.zig | 4 ++-- test/src/translate_c.zig | 4 ++-- test/tests.zig | 2 +- 8 files changed, 26 insertions(+), 32 deletions(-) (limited to 'lib/std/Build.zig') diff --git a/lib/std/Build.zig b/lib/std/Build.zig index bb642b5e66..bf0c74bd64 100644 --- a/lib/std/Build.zig +++ b/lib/std/Build.zig @@ -759,7 +759,7 @@ pub fn dupePath(self: *Build, bytes: []const u8) []u8 { pub fn addWriteFile(self: *Build, file_path: []const u8, data: []const u8) *Step.WriteFile { const write_file_step = self.addWriteFiles(); - write_file_step.add(file_path, data); + _ = write_file_step.add(file_path, data); return write_file_step; } diff --git a/lib/std/Build/Step/WriteFile.zig b/lib/std/Build/Step/WriteFile.zig index 0d817e7430..0448aa8d2a 100644 --- a/lib/std/Build/Step/WriteFile.zig +++ b/lib/std/Build/Step/WriteFile.zig @@ -27,6 +27,10 @@ pub const File = struct { generated_file: std.Build.GeneratedFile, sub_path: []const u8, contents: Contents, + + pub fn getFileSource(self: *File) std.Build.FileSource { + return .{ .generated = &self.generated_file }; + } }; pub const OutputSourceFile = struct { @@ -55,7 +59,7 @@ pub fn create(owner: *std.Build) *WriteFile { return wf; } -pub fn add(wf: *WriteFile, sub_path: []const u8, bytes: []const u8) void { +pub fn add(wf: *WriteFile, sub_path: []const u8, bytes: []const u8) std.Build.FileSource { const b = wf.step.owner; const gpa = b.allocator; const file = gpa.create(File) catch @panic("OOM"); @@ -65,8 +69,8 @@ pub fn add(wf: *WriteFile, sub_path: []const u8, bytes: []const u8) void { .contents = .{ .bytes = b.dupe(bytes) }, }; wf.files.append(gpa, file) catch @panic("OOM"); - wf.maybeUpdateName(); + return file.getFileSource(); } /// Place the file into the generated directory within the local cache, @@ -76,7 +80,7 @@ pub fn add(wf: *WriteFile, sub_path: []const u8, bytes: []const u8) void { /// include sub-directories, in which case this step will ensure the /// required sub-path exists. /// This is the option expected to be used most commonly with `addCopyFile`. -pub fn addCopyFile(wf: *WriteFile, source: std.Build.FileSource, sub_path: []const u8) void { +pub fn addCopyFile(wf: *WriteFile, source: std.Build.FileSource, sub_path: []const u8) std.Build.FileSource { const b = wf.step.owner; const gpa = b.allocator; const file = gpa.create(File) catch @panic("OOM"); @@ -89,6 +93,7 @@ pub fn addCopyFile(wf: *WriteFile, source: std.Build.FileSource, sub_path: []con wf.maybeUpdateName(); source.addStepDependencies(&wf.step); + return file.getFileSource(); } /// A path relative to the package root. @@ -96,7 +101,6 @@ pub fn addCopyFile(wf: *WriteFile, source: std.Build.FileSource, sub_path: []con /// used as part of the normal build process, but as a utility occasionally /// run by a developer with intent to modify source files and then commit /// those changes to version control. -/// A file added this way is not available with `getFileSource`. pub fn addCopyFileToSource(wf: *WriteFile, source: std.Build.FileSource, sub_path: []const u8) void { const b = wf.step.owner; wf.output_source_files.append(b.allocator, .{ @@ -111,7 +115,6 @@ pub fn addCopyFileToSource(wf: *WriteFile, source: std.Build.FileSource, sub_pat /// used as part of the normal build process, but as a utility occasionally /// run by a developer with intent to modify source files and then commit /// those changes to version control. -/// A file added this way is not available with `getFileSource`. pub fn addBytesToSource(wf: *WriteFile, bytes: []const u8, sub_path: []const u8) void { const b = wf.step.owner; wf.output_source_files.append(b.allocator, .{ @@ -120,15 +123,7 @@ pub fn addBytesToSource(wf: *WriteFile, bytes: []const u8, sub_path: []const u8) }) catch @panic("OOM"); } -/// Gets a file source for the given sub_path. If the file does not exist, returns `null`. -pub fn getFileSource(wf: *WriteFile, sub_path: []const u8) ?std.Build.FileSource { - for (wf.files.items) |file| { - if (std.mem.eql(u8, file.sub_path, sub_path)) { - return .{ .generated = &file.generated_file }; - } - } - return null; -} +pub const getFileSource = @compileError("Deprecated; use the return value from add()/addCopyFile(), or use files[i].getFileSource()"); /// Returns a `FileSource` representing the base directory that contains all the /// files from this `WriteFile`. diff --git a/test/src/Cases.zig b/test/src/Cases.zig index 08568d0dd6..e4f3d532ce 100644 --- a/test/src/Cases.zig +++ b/test/src/Cases.zig @@ -494,10 +494,12 @@ pub fn lowerToBuildSteps( } const writefiles = b.addWriteFiles(); + var file_sources = std.StringHashMap(std.Build.FileSource).init(b.allocator); + defer file_sources.deinit(); for (update.files.items) |file| { - writefiles.add(file.path, file.src); + file_sources.put(file.path, writefiles.add(file.path, file.src)) catch @panic("OOM"); } - const root_source_file = writefiles.getFileSource(update.files.items[0].path).?; + const root_source_file = writefiles.files.items[0].getFileSource(); const artifact = if (case.is_test) b.addTest(.{ .root_source_file = root_source_file, @@ -540,7 +542,7 @@ pub fn lowerToBuildSteps( for (case.deps.items) |dep| { artifact.addAnonymousModule(dep.name, .{ - .source_file = writefiles.getFileSource(dep.path).?, + .source_file = file_sources.get(dep.path).?, }); } diff --git a/test/src/CompareOutput.zig b/test/src/CompareOutput.zig index fb89082def..d2f7a23089 100644 --- a/test/src/CompareOutput.zig +++ b/test/src/CompareOutput.zig @@ -82,7 +82,7 @@ pub fn addCase(self: *CompareOutput, case: TestCase) void { const write_src = b.addWriteFiles(); for (case.sources.items) |src_file| { - write_src.add(src_file.filename, src_file.source); + _ = write_src.add(src_file.filename, src_file.source); } switch (case.special) { @@ -99,7 +99,7 @@ pub fn addCase(self: *CompareOutput, case: TestCase) void { .target = .{}, .optimize = .Debug, }); - exe.addAssemblyFileSource(write_src.getFileSource(case.sources.items[0].filename).?); + exe.addAssemblyFileSource(write_src.files.items[0].getFileSource()); const run = b.addRunArtifact(exe); run.setName(annotated_case_name); @@ -117,10 +117,9 @@ pub fn addCase(self: *CompareOutput, case: TestCase) void { if (mem.indexOf(u8, annotated_case_name, filter) == null) continue; } - const basename = case.sources.items[0].filename; const exe = b.addExecutable(.{ .name = "test", - .root_source_file = write_src.getFileSource(basename).?, + .root_source_file = write_src.files.items[0].getFileSource(), .optimize = optimize, .target = .{}, }); @@ -144,10 +143,9 @@ pub fn addCase(self: *CompareOutput, case: TestCase) void { if (mem.indexOf(u8, annotated_case_name, filter) == null) return; } - const basename = case.sources.items[0].filename; const exe = b.addExecutable(.{ .name = "test", - .root_source_file = write_src.getFileSource(basename).?, + .root_source_file = write_src.files.items[0].getFileSource(), .target = .{}, .optimize = .Debug, }); diff --git a/test/src/StackTrace.zig b/test/src/StackTrace.zig index 0d0b7155e6..0552b419c0 100644 --- a/test/src/StackTrace.zig +++ b/test/src/StackTrace.zig @@ -72,11 +72,10 @@ fn addExpect( if (mem.indexOf(u8, annotated_case_name, filter) == null) return; } - const src_basename = "source.zig"; - const write_src = b.addWriteFile(src_basename, source); + const write_src = b.addWriteFile("source.zig", source); const exe = b.addExecutable(.{ .name = "test", - .root_source_file = write_src.getFileSource(src_basename).?, + .root_source_file = write_src.files.items[0].getFileSource(), .optimize = optimize_mode, .target = .{}, }); diff --git a/test/src/run_translated_c.zig b/test/src/run_translated_c.zig index 946e56f512..42db8b19c5 100644 --- a/test/src/run_translated_c.zig +++ b/test/src/run_translated_c.zig @@ -82,10 +82,10 @@ pub const RunTranslatedCContext = struct { const write_src = b.addWriteFiles(); for (case.sources.items) |src_file| { - write_src.add(src_file.filename, src_file.source); + _ = write_src.add(src_file.filename, src_file.source); } const translate_c = b.addTranslateC(.{ - .source_file = write_src.getFileSource(case.sources.items[0].filename).?, + .source_file = write_src.files.items[0].getFileSource(), .target = .{}, .optimize = .Debug, }); diff --git a/test/src/translate_c.zig b/test/src/translate_c.zig index e275ee57ee..a1f312d623 100644 --- a/test/src/translate_c.zig +++ b/test/src/translate_c.zig @@ -104,11 +104,11 @@ pub const TranslateCContext = struct { const write_src = b.addWriteFiles(); for (case.sources.items) |src_file| { - write_src.add(src_file.filename, src_file.source); + _ = write_src.add(src_file.filename, src_file.source); } const translate_c = b.addTranslateC(.{ - .source_file = write_src.getFileSource(case.sources.items[0].filename).?, + .source_file = write_src.files.items[0].getFileSource(), .target = case.target, .optimize = .Debug, }); diff --git a/test/tests.zig b/test/tests.zig index 641914aabe..5912ceb907 100644 --- a/test/tests.zig +++ b/test/tests.zig @@ -759,7 +759,7 @@ pub fn addCliTests(b: *std.Build) *Step { "-fno-emit-bin", "-fno-emit-h", "-fstrip", "-OReleaseFast", }); - run.addFileSourceArg(writefile.getFileSource("example.zig").?); + run.addFileSourceArg(writefile.files.items[0].getFileSource()); const example_s = run.addPrefixedOutputFileArg("-femit-asm=", "example.s"); const checkfile = b.addCheckFile(example_s, .{ -- cgit v1.2.3 From 6e84f469904a24615a6721265a88ad8dcb4ed83a Mon Sep 17 00:00:00 2001 From: r00ster91 Date: Tue, 21 Feb 2023 18:39:22 +0100 Subject: std: replace builtin.Version with SemanticVersion --- build.zig | 4 +- lib/std/Build.zig | 8 +- lib/std/Build/Cache.zig | 2 +- lib/std/Build/Step/Compile.zig | 4 +- lib/std/Build/Step/Options.zig | 23 ----- lib/std/Build/Step/TranslateC.zig | 2 +- lib/std/SemanticVersion.zig | 24 ++++- lib/std/builtin.zig | 133 ------------------------- lib/std/c.zig | 2 +- lib/std/crypto/tlcsprng.zig | 1 + lib/std/os.zig | 18 ++-- lib/std/os/test.zig | 2 +- lib/std/target.zig | 36 +++---- lib/std/zig.zig | 2 +- lib/std/zig/CrossTarget.zig | 65 +++++++++--- lib/std/zig/system/NativeTargetInfo.zig | 22 ++-- lib/std/zig/system/darwin.zig | 3 +- lib/std/zig/system/darwin/macos.zig | 47 +++++---- src/Compilation.zig | 4 +- src/codegen/spirv/spec.zig | 2 +- src/glibc.zig | 4 +- src/link.zig | 4 +- src/link/Elf.zig | 2 +- src/link/MachO/load_commands.zig | 12 +-- src/main.zig | 13 ++- src/target.zig | 14 +-- test/link/macho/dylib/build.zig | 2 +- test/link/macho/needed_library/build.zig | 2 +- test/link/macho/search_strategy/build.zig | 2 +- test/link/macho/tls/build.zig | 2 +- test/link/macho/uuid/build.zig | 2 +- test/standalone/load_dynamic_library/build.zig | 2 +- test/standalone/shared_library/build.zig | 2 +- tools/gen_spirv_spec.zig | 2 +- 34 files changed, 189 insertions(+), 280 deletions(-) (limited to 'lib/std/Build.zig') diff --git a/build.zig b/build.zig index 9cfebebc56..28bc528772 100644 --- a/build.zig +++ b/build.zig @@ -9,7 +9,7 @@ const fs = std.fs; const InstallDirectoryOptions = std.Build.InstallDirectoryOptions; const assert = std.debug.assert; -const zig_version = std.builtin.Version{ .major = 0, .minor = 11, .patch = 0 }; +const zig_version = std.SemanticVersion{ .major = 0, .minor = 11, .patch = 0 }; const stack_size = 32 * 1024 * 1024; pub fn build(b: *std.Build) !void { @@ -242,7 +242,7 @@ pub fn build(b: *std.Build) !void { const commit_height = it.next().?; const commit_id = it.next().?; - const ancestor_ver = try std.builtin.Version.parse(tagged_ancestor); + const ancestor_ver = try std.SemanticVersion.parse(tagged_ancestor); if (zig_version.order(ancestor_ver) != .gt) { std.debug.print("Zig version '{}' must be greater than tagged ancestor '{}'\n", .{ zig_version, ancestor_ver }); std.process.exit(1); diff --git a/lib/std/Build.zig b/lib/std/Build.zig index 6ea7153c0d..c569e0074a 100644 --- a/lib/std/Build.zig +++ b/lib/std/Build.zig @@ -472,7 +472,7 @@ pub fn addOptions(self: *Build) *Step.Options { pub const ExecutableOptions = struct { name: []const u8, root_source_file: ?FileSource = null, - version: ?std.builtin.Version = null, + version: ?std.SemanticVersion = null, target: CrossTarget = .{}, optimize: std.builtin.Mode = .Debug, linkage: ?Step.Compile.Linkage = null, @@ -530,7 +530,7 @@ pub fn addObject(b: *Build, options: ObjectOptions) *Step.Compile { pub const SharedLibraryOptions = struct { name: []const u8, root_source_file: ?FileSource = null, - version: ?std.builtin.Version = null, + version: ?std.SemanticVersion = null, target: CrossTarget, optimize: std.builtin.Mode, max_rss: usize = 0, @@ -562,7 +562,7 @@ pub const StaticLibraryOptions = struct { root_source_file: ?FileSource = null, target: CrossTarget, optimize: std.builtin.Mode, - version: ?std.builtin.Version = null, + version: ?std.SemanticVersion = null, max_rss: usize = 0, link_libc: ?bool = null, single_threaded: ?bool = null, @@ -592,7 +592,7 @@ pub const TestOptions = struct { root_source_file: FileSource, target: CrossTarget = .{}, optimize: std.builtin.Mode = .Debug, - version: ?std.builtin.Version = null, + version: ?std.SemanticVersion = null, max_rss: usize = 0, filter: ?[]const u8 = null, test_runner: ?[]const u8 = null, diff --git a/lib/std/Build/Cache.zig b/lib/std/Build/Cache.zig index a1518f7c6a..3b7f180ae8 100644 --- a/lib/std/Build/Cache.zig +++ b/lib/std/Build/Cache.zig @@ -212,7 +212,7 @@ pub const HashHelper = struct { /// Convert the input value into bytes and record it as a dependency of the process being cached. pub fn add(hh: *HashHelper, x: anytype) void { switch (@TypeOf(x)) { - std.builtin.Version => { + std.SemanticVersion => { hh.add(x.major); hh.add(x.minor); hh.add(x.patch); diff --git a/lib/std/Build/Step/Compile.zig b/lib/std/Build/Step/Compile.zig index 092fdf7e63..89576c15fa 100644 --- a/lib/std/Build/Step/Compile.zig +++ b/lib/std/Build/Step/Compile.zig @@ -32,7 +32,7 @@ linker_script: ?FileSource = null, version_script: ?[]const u8 = null, out_filename: []const u8, linkage: ?Linkage = null, -version: ?std.builtin.Version, +version: ?std.SemanticVersion, kind: Kind, major_only_filename: ?[]const u8, name_only_filename: ?[]const u8, @@ -278,7 +278,7 @@ pub const Options = struct { optimize: std.builtin.Mode, kind: Kind, linkage: ?Linkage = null, - version: ?std.builtin.Version = null, + version: ?std.SemanticVersion = null, max_rss: usize = 0, filter: ?[]const u8 = null, test_runner: ?[]const u8 = null, diff --git a/lib/std/Build/Step/Options.zig b/lib/std/Build/Step/Options.zig index cc7152a81e..52821a5951 100644 --- a/lib/std/Build/Step/Options.zig +++ b/lib/std/Build/Step/Options.zig @@ -76,23 +76,6 @@ fn addOptionFallible(self: *Options, comptime T: type, name: []const u8, value: } return; }, - std.builtin.Version => { - try out.print( - \\pub const {}: @import("std").builtin.Version = .{{ - \\ .major = {d}, - \\ .minor = {d}, - \\ .patch = {d}, - \\}}; - \\ - , .{ - std.zig.fmtId(name), - - value.major, - value.minor, - value.patch, - }); - return; - }, std.SemanticVersion => { try out.print( \\pub const {}: @import("std").SemanticVersion = .{{ @@ -367,7 +350,6 @@ test Options { options.addOption([2][2]u16, "nested_array", nested_array); options.addOption([]const []const u16, "nested_slice", nested_slice); //options.addOption(KeywordEnum, "keyword_enum", .@"0.8.1"); - options.addOption(std.builtin.Version, "version", try std.builtin.Version.parse("0.1.2")); options.addOption(std.SemanticVersion, "semantic_version", try std.SemanticVersion.parse("0.1.2-foo+bar")); try std.testing.expectEqualStrings( @@ -401,11 +383,6 @@ test Options { //\\ @"0.8.1", //\\}; //\\pub const keyword_enum: KeywordEnum = KeywordEnum.@"0.8.1"; - \\pub const version: @import("std").builtin.Version = .{ - \\ .major = 0, - \\ .minor = 1, - \\ .patch = 2, - \\}; \\pub const semantic_version: @import("std").SemanticVersion = .{ \\ .major = 0, \\ .minor = 1, diff --git a/lib/std/Build/Step/TranslateC.zig b/lib/std/Build/Step/TranslateC.zig index 0c7ddc4720..ced249b3f2 100644 --- a/lib/std/Build/Step/TranslateC.zig +++ b/lib/std/Build/Step/TranslateC.zig @@ -47,7 +47,7 @@ pub fn create(owner: *std.Build, options: Options) *TranslateC { pub const AddExecutableOptions = struct { name: ?[]const u8 = null, - version: ?std.builtin.Version = null, + version: ?std.SemanticVersion = null, target: ?CrossTarget = null, optimize: ?std.builtin.Mode = null, linkage: ?Step.Compile.Linkage = null, diff --git a/lib/std/SemanticVersion.zig b/lib/std/SemanticVersion.zig index 4d505b4e30..4fa1d47c40 100644 --- a/lib/std/SemanticVersion.zig +++ b/lib/std/SemanticVersion.zig @@ -1,4 +1,4 @@ -//! A software version formatted according to the Semantic Version 2 specification. +//! A software version formatted according to the Semantic Versioning 2.0.0 specification. //! //! See: https://semver.org @@ -167,7 +167,7 @@ const expect = std.testing.expect; const expectError = std.testing.expectError; test "SemanticVersion format" { - // Test vectors are from https://github.com/semver/semver.org/issues/59#issuecomment-390854010. + // Many of these test strings are from https://github.com/semver/semver.org/issues/59#issuecomment-390854010. // Valid version strings should be accepted. for ([_][]const u8{ @@ -200,6 +200,8 @@ test "SemanticVersion format" { "1.2.3----R-S.12.9.1--.12+meta", "1.2.3----RC-SNAPSHOT.12.9.1--.12", "1.0.0+0.build.1-rc.10000aaa-kk-0.1", + "5.4.0-1018-raspi", + "5.7.123", }) |valid| try std.testing.expectFmt(valid, "{}", .{try parse(valid)}); // Invalid version strings should be rejected. @@ -244,6 +246,24 @@ test "SemanticVersion format" { "+justmeta", "9.8.7+meta+meta", "9.8.7-whatever+meta+meta", + "2.6.32.11-svn21605", + "2.11.2(0.329/5/3)", + "2.13-DEVELOPMENT", + "2.3-35", + "1a.4", + "3.b1.0", + "1.4beta", + "2.7.pre", + "0..3", + "8.008.", + "01...", + "55", + "foobar", + "", + "-1", + "+4", + ".", + "....3", }) |invalid| try expectError(error.InvalidVersion, parse(invalid)); // Valid version string that may overflow. diff --git a/lib/std/builtin.zig b/lib/std/builtin.zig index b449080e86..54781e4465 100644 --- a/lib/std/builtin.zig +++ b/lib/std/builtin.zig @@ -483,139 +483,6 @@ pub const WasiExecModel = enum { reactor, }; -/// This data structure is used by the Zig language code generation and -/// therefore must be kept in sync with the compiler implementation. -pub const Version = struct { - major: u32, - minor: u32, - patch: u32 = 0, - - pub const Range = struct { - min: Version, - max: Version, - - pub fn includesVersion(self: Range, ver: Version) bool { - if (self.min.order(ver) == .gt) return false; - if (self.max.order(ver) == .lt) return false; - return true; - } - - /// Checks if system is guaranteed to be at least `version` or older than `version`. - /// Returns `null` if a runtime check is required. - pub fn isAtLeast(self: Range, ver: Version) ?bool { - if (self.min.order(ver) != .lt) return true; - if (self.max.order(ver) == .lt) return false; - return null; - } - }; - - pub fn order(lhs: Version, rhs: Version) std.math.Order { - if (lhs.major < rhs.major) return .lt; - if (lhs.major > rhs.major) return .gt; - if (lhs.minor < rhs.minor) return .lt; - if (lhs.minor > rhs.minor) return .gt; - if (lhs.patch < rhs.patch) return .lt; - if (lhs.patch > rhs.patch) return .gt; - return .eq; - } - - pub fn parse(text: []const u8) !Version { - var end: usize = 0; - while (end < text.len) : (end += 1) { - const c = text[end]; - if (!std.ascii.isDigit(c) and c != '.') break; - } - // found no digits or '.' before unexpected character - if (end == 0) return error.InvalidVersion; - - var it = std.mem.splitScalar(u8, text[0..end], '.'); - // substring is not empty, first call will succeed - const major = it.first(); - if (major.len == 0) return error.InvalidVersion; - const minor = it.next() orelse "0"; - // ignore 'patch' if 'minor' is invalid - const patch = if (minor.len == 0) "0" else (it.next() orelse "0"); - - return Version{ - .major = try std.fmt.parseUnsigned(u32, major, 10), - .minor = try std.fmt.parseUnsigned(u32, if (minor.len == 0) "0" else minor, 10), - .patch = try std.fmt.parseUnsigned(u32, if (patch.len == 0) "0" else patch, 10), - }; - } - - pub fn format( - self: Version, - comptime fmt: []const u8, - options: std.fmt.FormatOptions, - out_stream: anytype, - ) !void { - _ = options; - if (fmt.len == 0) { - if (self.patch == 0) { - if (self.minor == 0) { - return std.fmt.format(out_stream, "{d}", .{self.major}); - } else { - return std.fmt.format(out_stream, "{d}.{d}", .{ self.major, self.minor }); - } - } else { - return std.fmt.format(out_stream, "{d}.{d}.{d}", .{ self.major, self.minor, self.patch }); - } - } else { - std.fmt.invalidFmtError(fmt, self); - } - } -}; - -test "Version.parse" { - @setEvalBranchQuota(3000); - try testVersionParse(); - comptime (try testVersionParse()); -} - -fn testVersionParse() !void { - const f = struct { - fn eql(text: []const u8, v1: u32, v2: u32, v3: u32) !void { - const v = try Version.parse(text); - try std.testing.expect(v.major == v1 and v.minor == v2 and v.patch == v3); - } - - fn err(text: []const u8, expected_err: anyerror) !void { - _ = Version.parse(text) catch |actual_err| { - if (actual_err == expected_err) return; - return actual_err; - }; - return error.Unreachable; - } - }; - - try f.eql("2.6.32.11-svn21605", 2, 6, 32); // Debian PPC - try f.eql("2.11.2(0.329/5/3)", 2, 11, 2); // MinGW - try f.eql("5.4.0-1018-raspi", 5, 4, 0); // Ubuntu - try f.eql("5.7.12_3", 5, 7, 12); // Void - try f.eql("2.13-DEVELOPMENT", 2, 13, 0); // DragonFly - try f.eql("2.3-35", 2, 3, 0); - try f.eql("1a.4", 1, 0, 0); - try f.eql("3.b1.0", 3, 0, 0); - try f.eql("1.4beta", 1, 4, 0); - try f.eql("2.7.pre", 2, 7, 0); - try f.eql("0..3", 0, 0, 0); - try f.eql("8.008.", 8, 8, 0); - try f.eql("01...", 1, 0, 0); - try f.eql("55", 55, 0, 0); - try f.eql("4294967295.0.1", 4294967295, 0, 1); - try f.eql("429496729_6", 429496729, 0, 0); - - try f.err("foobar", error.InvalidVersion); - try f.err("", error.InvalidVersion); - try f.err("-1", error.InvalidVersion); - try f.err("+4", error.InvalidVersion); - try f.err(".", error.InvalidVersion); - try f.err("....3", error.InvalidVersion); - try f.err("4294967296", error.Overflow); - try f.err("5000877755", error.Overflow); - // error.InvalidCharacter is not possible anymore -} - /// This data structure is used by the Zig language code generation and /// therefore must be kept in sync with the compiler implementation. pub const CallModifier = enum { diff --git a/lib/std/c.zig b/lib/std/c.zig index 7cc4adf815..c0ee29445a 100644 --- a/lib/std/c.zig +++ b/lib/std/c.zig @@ -20,7 +20,7 @@ pub const Tokenizer = tokenizer.Tokenizer; /// If linking gnu libc (glibc), the `ok` value will be true if the target /// version is greater than or equal to `glibc_version`. /// If linking a libc other than these, returns `false`. -pub fn versionCheck(comptime glibc_version: std.builtin.Version) type { +pub fn versionCheck(comptime glibc_version: std.SemanticVersion) type { return struct { pub const ok = blk: { if (!builtin.link_libc) break :blk false; diff --git a/lib/std/crypto/tlcsprng.zig b/lib/std/crypto/tlcsprng.zig index ac706e5f6a..54a30cfaba 100644 --- a/lib/std/crypto/tlcsprng.zig +++ b/lib/std/crypto/tlcsprng.zig @@ -38,6 +38,7 @@ const want_fork_safety = os_has_fork and !os_has_arc4random and const maybe_have_wipe_on_fork = builtin.os.isAtLeast(.linux, .{ .major = 4, .minor = 14, + .patch = 0, }) orelse true; const is_haiku = builtin.os.tag == .haiku; diff --git a/lib/std/os.zig b/lib/std/os.zig index 4699d0186c..802bb1d8df 100644 --- a/lib/std/os.zig +++ b/lib/std/os.zig @@ -488,7 +488,7 @@ pub fn getrandom(buffer: []u8) GetRandomError!void { if (builtin.os.tag == .linux or builtin.os.tag == .freebsd) { var buf = buffer; const use_c = builtin.os.tag != .linux or - std.c.versionCheck(std.builtin.Version{ .major = 2, .minor = 25, .patch = 0 }).ok; + std.c.versionCheck(std.SemanticVersion{ .major = 2, .minor = 25, .patch = 0 }).ok; while (buf.len != 0) { const res = if (use_c) blk: { @@ -5272,7 +5272,7 @@ pub fn getFdPath(fd: fd_t, out_buffer: *[MAX_PATH_BYTES]u8) RealPathError![]u8 { return target; }, .freebsd => { - if (comptime builtin.os.version_range.semver.max.order(.{ .major = 13, .minor = 0 }) == .gt) { + if (comptime builtin.os.version_range.semver.max.order(.{ .major = 13, .minor = 0, .patch = 0 }) == .gt) { var kfile: system.kinfo_file = undefined; kfile.structsize = system.KINFO_FILE_SIZE; switch (errno(system.fcntl(fd, system.F.KINFO, @ptrToInt(&kfile)))) { @@ -5325,7 +5325,7 @@ pub fn getFdPath(fd: fd_t, out_buffer: *[MAX_PATH_BYTES]u8) RealPathError![]u8 { } }, .dragonfly => { - if (comptime builtin.os.version_range.semver.max.order(.{ .major = 6, .minor = 0 }) == .lt) { + if (comptime builtin.os.version_range.semver.max.order(.{ .major = 6, .minor = 0, .patch = 0 }) == .lt) { @compileError("querying for canonical path of a handle is unsupported on this host"); } @memset(out_buffer[0..MAX_PATH_BYTES], 0); @@ -5339,7 +5339,7 @@ pub fn getFdPath(fd: fd_t, out_buffer: *[MAX_PATH_BYTES]u8) RealPathError![]u8 { return out_buffer[0..len]; }, .netbsd => { - if (comptime builtin.os.version_range.semver.max.order(.{ .major = 10, .minor = 0 }) == .lt) { + if (comptime builtin.os.version_range.semver.max.order(.{ .major = 10, .minor = 0, .patch = 0 }) == .lt) { @compileError("querying for canonical path of a handle is unsupported on this host"); } @memset(out_buffer[0..MAX_PATH_BYTES], 0); @@ -6152,9 +6152,9 @@ pub fn sendfile( .linux => sf: { // sendfile() first appeared in Linux 2.2, glibc 2.1. const call_sf = comptime if (builtin.link_libc) - std.c.versionCheck(.{ .major = 2, .minor = 1 }).ok + std.c.versionCheck(.{ .major = 2, .minor = 1, .patch = 0 }).ok else - builtin.os.version_range.linux.range.max.order(.{ .major = 2, .minor = 2 }) != .lt; + builtin.os.version_range.linux.range.max.order(.{ .major = 2, .minor = 2, .patch = 0 }) != .lt; if (!call_sf) break :sf; if (headers.len != 0) { @@ -6453,8 +6453,8 @@ var has_copy_file_range_syscall = std.atomic.Atomic(bool).init(true); /// /// Maximum offsets on Linux and FreeBSD are `math.maxInt(i64)`. pub fn copy_file_range(fd_in: fd_t, off_in: u64, fd_out: fd_t, off_out: u64, len: usize, flags: u32) CopyFileRangeError!usize { - if ((comptime builtin.os.isAtLeast(.freebsd, .{ .major = 13, .minor = 0 }) orelse false) or - ((comptime builtin.os.isAtLeast(.linux, .{ .major = 4, .minor = 5 }) orelse false and + if ((comptime builtin.os.isAtLeast(.freebsd, .{ .major = 13, .minor = 0, .patch = 0 }) orelse false) or + ((comptime builtin.os.isAtLeast(.linux, .{ .major = 4, .minor = 5, .patch = 0 }) orelse false and std.c.versionCheck(.{ .major = 2, .minor = 27, .patch = 0 }).ok) and has_copy_file_range_syscall.load(.Monotonic))) { @@ -6787,7 +6787,7 @@ pub fn memfd_createZ(name: [*:0]const u8, flags: u32) MemFdCreateError!fd_t { } }, .freebsd => { - if (comptime builtin.os.version_range.semver.max.order(.{ .major = 13, .minor = 0 }) == .lt) + if (comptime builtin.os.version_range.semver.max.order(.{ .major = 13, .minor = 0, .patch = 0 }) == .lt) @compileError("memfd_create is unavailable on FreeBSD < 13.0"); const rc = system.memfd_create(name, flags); switch (errno(rc)) { diff --git a/lib/std/os/test.zig b/lib/std/os/test.zig index e7b66c0d55..59575e0109 100644 --- a/lib/std/os/test.zig +++ b/lib/std/os/test.zig @@ -541,7 +541,7 @@ test "memfd_create" { switch (native_os) { .linux => {}, .freebsd => { - if (comptime builtin.os.version_range.semver.max.order(.{ .major = 13, .minor = 0 }) == .lt) + if (comptime builtin.os.version_range.semver.max.order(.{ .major = 13, .minor = 0, .patch = 0 }) == .lt) return error.SkipZigTest; }, else => return error.SkipZigTest, diff --git a/lib/std/target.zig b/lib/std/target.zig index 4c7bcfc37a..995edd02f7 100644 --- a/lib/std/target.zig +++ b/lib/std/target.zig @@ -1,7 +1,7 @@ const std = @import("std.zig"); const builtin = @import("builtin"); const mem = std.mem; -const Version = std.builtin.Version; +const Version = std.SemanticVersion; /// TODO Nearly all the functions in this namespace would be /// better off if https://github.com/ziglang/zig/issues/425 @@ -272,75 +272,75 @@ pub const Target = struct { .freebsd => return .{ .semver = Version.Range{ - .min = .{ .major = 12, .minor = 0 }, - .max = .{ .major = 13, .minor = 1 }, + .min = .{ .major = 12, .minor = 0, .patch = 0 }, + .max = .{ .major = 13, .minor = 1, .patch = 0 }, }, }, .macos => return switch (arch) { .aarch64 => VersionRange{ .semver = .{ .min = .{ .major = 11, .minor = 7, .patch = 1 }, - .max = .{ .major = 13, .minor = 3 }, + .max = .{ .major = 13, .minor = 3, .patch = 0 }, }, }, .x86_64 => VersionRange{ .semver = .{ .min = .{ .major = 11, .minor = 7, .patch = 1 }, - .max = .{ .major = 13, .minor = 3 }, + .max = .{ .major = 13, .minor = 3, .patch = 0 }, }, }, else => unreachable, }, .ios => return .{ .semver = .{ - .min = .{ .major = 12, .minor = 0 }, + .min = .{ .major = 12, .minor = 0, .patch = 0 }, .max = .{ .major = 13, .minor = 4, .patch = 0 }, }, }, .watchos => return .{ .semver = .{ - .min = .{ .major = 6, .minor = 0 }, + .min = .{ .major = 6, .minor = 0, .patch = 0 }, .max = .{ .major = 6, .minor = 2, .patch = 0 }, }, }, .tvos => return .{ .semver = .{ - .min = .{ .major = 13, .minor = 0 }, + .min = .{ .major = 13, .minor = 0, .patch = 0 }, .max = .{ .major = 13, .minor = 4, .patch = 0 }, }, }, .netbsd => return .{ .semver = .{ - .min = .{ .major = 8, .minor = 0 }, - .max = .{ .major = 10, .minor = 0 }, + .min = .{ .major = 8, .minor = 0, .patch = 0 }, + .max = .{ .major = 10, .minor = 0, .patch = 0 }, }, }, .openbsd => return .{ .semver = .{ - .min = .{ .major = 6, .minor = 8 }, - .max = .{ .major = 7, .minor = 2 }, + .min = .{ .major = 6, .minor = 8, .patch = 0 }, + .max = .{ .major = 7, .minor = 2, .patch = 0 }, }, }, .dragonfly => return .{ .semver = .{ - .min = .{ .major = 5, .minor = 8 }, - .max = .{ .major = 6, .minor = 4 }, + .min = .{ .major = 5, .minor = 8, .patch = 0 }, + .max = .{ .major = 6, .minor = 4, .patch = 0 }, }, }, .solaris => return .{ .semver = .{ - .min = .{ .major = 5, .minor = 11 }, - .max = .{ .major = 5, .minor = 11 }, + .min = .{ .major = 5, .minor = 11, .patch = 0 }, + .max = .{ .major = 5, .minor = 11, .patch = 0 }, }, }, .linux => return .{ .linux = .{ .range = .{ - .min = .{ .major = 3, .minor = 16 }, + .min = .{ .major = 3, .minor = 16, .patch = 0 }, .max = .{ .major = 5, .minor = 10, .patch = 81 }, }, - .glibc = .{ .major = 2, .minor = 19 }, + .glibc = .{ .major = 2, .minor = 19, .patch = 0 }, }, }, diff --git a/lib/std/zig.zig b/lib/std/zig.zig index 98edeabd10..fe6d2ec120 100644 --- a/lib/std/zig.zig +++ b/lib/std/zig.zig @@ -108,7 +108,7 @@ pub const BinNameOptions = struct { target: std.Target, output_mode: std.builtin.OutputMode, link_mode: ?std.builtin.LinkMode = null, - version: ?std.builtin.Version = null, + version: ?std.SemanticVersion = null, }; /// Returns the standard file system basename of a binary generated by the Zig compiler. diff --git a/lib/std/zig/CrossTarget.zig b/lib/std/zig/CrossTarget.zig index 6432c733c6..13219888b2 100644 --- a/lib/std/zig/CrossTarget.zig +++ b/lib/std/zig/CrossTarget.zig @@ -33,7 +33,7 @@ os_version_max: ?OsVersion = null, /// `null` means default when cross compiling, or native when os_tag is native. /// If `isGnuLibC()` is `false`, this must be `null` and is ignored. -glibc_version: ?SemVer = null, +glibc_version: ?SemanticVersion = null, /// `null` means the native C ABI, if `os_tag` is native, otherwise it means the default C ABI. abi: ?Target.Abi = null, @@ -61,11 +61,11 @@ pub const CpuModel = union(enum) { pub const OsVersion = union(enum) { none: void, - semver: SemVer, + semver: SemanticVersion, windows: Target.Os.WindowsVersion, }; -pub const SemVer = std.builtin.Version; +pub const SemanticVersion = std.SemanticVersion; pub const DynamicLinker = Target.DynamicLinker; @@ -266,9 +266,8 @@ pub fn parse(args: ParseOptions) !CrossTarget { const abi_ver_text = abi_it.rest(); if (abi_it.next() != null) { if (result.isGnuLibC()) { - result.glibc_version = SemVer.parse(abi_ver_text) catch |err| switch (err) { + result.glibc_version = parseVersion(abi_ver_text) catch |err| switch (err) { error.Overflow => return error.InvalidAbiVersion, - error.InvalidCharacter => return error.InvalidAbiVersion, error.InvalidVersion => return error.InvalidAbiVersion, }; } else { @@ -353,6 +352,31 @@ pub fn parseCpuArch(args: ParseOptions) ?Target.Cpu.Arch { } } +/// Parses a version with an omitted patch component, such as "1.0", +/// which SemanticVersion.parse is not capable of. +fn parseVersion(ver: []const u8) !SemanticVersion { + const parseVersionComponent = struct { + fn parseVersionComponent(component: []const u8) !usize { + return std.fmt.parseUnsigned(usize, component, 10) catch |err| { + switch (err) { + error.InvalidCharacter => return error.InvalidVersion, + error.Overflow => return error.Overflow, + } + }; + } + }.parseVersionComponent; + var version_components = mem.split(u8, ver, "."); + const major = version_components.first(); + const minor = version_components.next() orelse return error.InvalidVersion; + const patch = version_components.next() orelse "0"; + if (version_components.next() != null) return error.InvalidVersion; + return .{ + .major = try parseVersionComponent(major), + .minor = try parseVersionComponent(minor), + .patch = try parseVersionComponent(patch), + }; +} + /// TODO deprecated, use `std.zig.system.NativeTargetInfo.detect`. pub fn getCpu(self: CrossTarget) Target.Cpu { switch (self.cpu_model) { @@ -534,6 +558,16 @@ pub fn isNative(self: CrossTarget) bool { return self.isNativeCpu() and self.isNativeOs() and self.isNativeAbi(); } +/// Formats a version with the patch component omitted if it is zero, +/// unlike SemanticVersion.format which formats all its version components regardless. +fn formatVersion(version: SemanticVersion, writer: anytype) !void { + if (version.patch == 0) { + try writer.print("{d}.{d}", .{ version.major, version.minor }); + } else { + try writer.print("{d}.{d}.{d}", .{ version.major, version.minor, version.patch }); + } +} + pub fn zigTriple(self: CrossTarget, allocator: mem.Allocator) error{OutOfMemory}![]u8 { if (self.isNative()) { return allocator.dupe(u8, "native"); @@ -552,20 +586,27 @@ pub fn zigTriple(self: CrossTarget, allocator: mem.Allocator) error{OutOfMemory} if (self.os_version_min != null or self.os_version_max != null) { switch (self.getOsVersionMin()) { .none => {}, - .semver => |v| try result.writer().print(".{}", .{v}), + .semver => |v| { + try result.writer().writeAll("."); + try formatVersion(v, result.writer()); + }, .windows => |v| try result.writer().print("{s}", .{v}), } } if (self.os_version_max) |max| { switch (max) { .none => {}, - .semver => |v| try result.writer().print("...{}", .{v}), + .semver => |v| { + try result.writer().writeAll("..."); + try formatVersion(v, result.writer()); + }, .windows => |v| try result.writer().print("..{s}", .{v}), } } if (self.glibc_version) |v| { - try result.writer().print("-{s}.{}", .{ @tagName(self.getAbi()), v }); + try result.writer().print("-{s}.", .{@tagName(self.getAbi())}); + try formatVersion(v, result.writer()); } else if (self.abi) |abi| { try result.writer().print("-{s}", .{@tagName(abi)}); } @@ -630,7 +671,7 @@ pub fn isGnuLibC(self: CrossTarget) bool { pub fn setGnuLibCVersion(self: *CrossTarget, major: u32, minor: u32, patch: u32) void { assert(self.isGnuLibC()); - self.glibc_version = SemVer{ .major = major, .minor = minor, .patch = patch }; + self.glibc_version = SemanticVersion{ .major = major, .minor = minor, .patch = patch }; } pub fn getObjectFormat(self: CrossTarget) Target.ObjectFormat { @@ -709,17 +750,15 @@ fn parseOs(result: *CrossTarget, diags: *ParseOptions.Diagnostics, text: []const var range_it = mem.splitSequence(u8, version_text, "..."); const min_text = range_it.next().?; - const min_ver = SemVer.parse(min_text) catch |err| switch (err) { + const min_ver = parseVersion(min_text) catch |err| switch (err) { error.Overflow => return error.InvalidOperatingSystemVersion, - error.InvalidCharacter => return error.InvalidOperatingSystemVersion, error.InvalidVersion => return error.InvalidOperatingSystemVersion, }; result.os_version_min = .{ .semver = min_ver }; const max_text = range_it.next() orelse return; - const max_ver = SemVer.parse(max_text) catch |err| switch (err) { + const max_ver = parseVersion(max_text) catch |err| switch (err) { error.Overflow => return error.InvalidOperatingSystemVersion, - error.InvalidCharacter => return error.InvalidOperatingSystemVersion, error.InvalidVersion => return error.InvalidOperatingSystemVersion, }; result.os_version_max = .{ .semver = max_ver }; diff --git a/lib/std/zig/system/NativeTargetInfo.zig b/lib/std/zig/system/NativeTargetInfo.zig index cddaea2295..2daac4881d 100644 --- a/lib/std/zig/system/NativeTargetInfo.zig +++ b/lib/std/zig/system/NativeTargetInfo.zig @@ -43,24 +43,22 @@ pub fn detect(cross_target: CrossTarget) DetectError!NativeTargetInfo { const release = mem.sliceTo(&uts.release, 0); // The release field sometimes has a weird format, // `Version.parse` will attempt to find some meaningful interpretation. - if (std.builtin.Version.parse(release)) |ver| { + if (std.SemanticVersion.parse(release)) |ver| { os.version_range.linux.range.min = ver; os.version_range.linux.range.max = ver; } else |err| switch (err) { error.Overflow => {}, - error.InvalidCharacter => {}, error.InvalidVersion => {}, } }, .solaris => { const uts = std.os.uname(); const release = mem.sliceTo(&uts.release, 0); - if (std.builtin.Version.parse(release)) |ver| { + if (std.SemanticVersion.parse(release)) |ver| { os.version_range.semver.min = ver; os.version_range.semver.max = ver; } else |err| switch (err) { error.Overflow => {}, - error.InvalidCharacter => {}, error.InvalidVersion => {}, } }, @@ -144,7 +142,7 @@ pub fn detect(cross_target: CrossTarget) DetectError!NativeTargetInfo { error.Unexpected => return error.OSVersionDetectionFail, }; - if (std.builtin.Version.parse(buf[0 .. len - 1])) |ver| { + if (std.SemanticVersion.parse(buf[0 .. len - 1])) |ver| { os.version_range.semver.min = ver; os.version_range.semver.max = ver; } else |_| { @@ -390,7 +388,7 @@ fn detectAbiAndDynamicLinker( }; } -fn glibcVerFromRPath(rpath: []const u8) !std.builtin.Version { +fn glibcVerFromRPath(rpath: []const u8) !std.SemanticVersion { var dir = fs.cwd().openDir(rpath, .{}) catch |err| switch (err) { error.NameTooLong => unreachable, error.InvalidUtf8 => unreachable, @@ -471,7 +469,7 @@ fn glibcVerFromRPath(rpath: []const u8) !std.builtin.Version { }; } -fn glibcVerFromSoFile(file: fs.File) !std.builtin.Version { +fn glibcVerFromSoFile(file: fs.File) !std.SemanticVersion { var hdr_buf: [@sizeOf(elf.Elf64_Ehdr)]u8 align(@alignOf(elf.Elf64_Ehdr)) = undefined; _ = try preadMin(file, &hdr_buf, 0, hdr_buf.len); const hdr32 = @ptrCast(*elf.Elf32_Ehdr, &hdr_buf); @@ -557,13 +555,12 @@ fn glibcVerFromSoFile(file: fs.File) !std.builtin.Version { const dynstr_bytes = buf[0..dynstr_size]; _ = try preadMin(file, dynstr_bytes, dynstr.offset, dynstr_bytes.len); var it = mem.splitScalar(u8, dynstr_bytes, 0); - var max_ver: std.builtin.Version = .{ .major = 2, .minor = 2, .patch = 5 }; + var max_ver: std.SemanticVersion = .{ .major = 2, .minor = 2, .patch = 5 }; while (it.next()) |s| { if (mem.startsWith(u8, s, "GLIBC_2.")) { const chopped = s["GLIBC_".len..]; - const ver = std.builtin.Version.parse(chopped) catch |err| switch (err) { + const ver = std.SemanticVersion.parse(chopped) catch |err| switch (err) { error.Overflow => return error.InvalidGnuLibCVersion, - error.InvalidCharacter => return error.InvalidGnuLibCVersion, error.InvalidVersion => return error.InvalidGnuLibCVersion, }; switch (ver.order(max_ver)) { @@ -575,7 +572,7 @@ fn glibcVerFromSoFile(file: fs.File) !std.builtin.Version { return max_ver; } -fn glibcVerFromLinkName(link_name: []const u8, prefix: []const u8) !std.builtin.Version { +fn glibcVerFromLinkName(link_name: []const u8, prefix: []const u8) !std.SemanticVersion { // example: "libc-2.3.4.so" // example: "libc-2.27.so" // example: "ld-2.33.so" @@ -585,9 +582,8 @@ fn glibcVerFromLinkName(link_name: []const u8, prefix: []const u8) !std.builtin. } // chop off "libc-" and ".so" const link_name_chopped = link_name[prefix.len .. link_name.len - suffix.len]; - return std.builtin.Version.parse(link_name_chopped) catch |err| switch (err) { + return std.SemanticVersion.parse(link_name_chopped) catch |err| switch (err) { error.Overflow => return error.InvalidGnuLibCVersion, - error.InvalidCharacter => return error.InvalidGnuLibCVersion, error.InvalidVersion => return error.InvalidGnuLibCVersion, }; } diff --git a/lib/std/zig/system/darwin.zig b/lib/std/zig/system/darwin.zig index fbddaa799a..05762ffc54 100644 --- a/lib/std/zig/system/darwin.zig +++ b/lib/std/zig/system/darwin.zig @@ -2,7 +2,7 @@ const std = @import("std"); const mem = std.mem; const Allocator = mem.Allocator; const Target = std.Target; -const Version = std.builtin.Version; +const Version = std.SemanticVersion; pub const macos = @import("darwin/macos.zig"); @@ -69,6 +69,7 @@ pub fn getDarwinSDK(allocator: Allocator, target: Target) ?DarwinSDK { const version = Version.parse(raw_version) catch Version{ .major = 0, .minor = 0, + .patch = 0, }; break :version version; }; diff --git a/lib/std/zig/system/darwin/macos.zig b/lib/std/zig/system/darwin/macos.zig index eef2f77a62..6717b1319f 100644 --- a/lib/std/zig/system/darwin/macos.zig +++ b/lib/std/zig/system/darwin/macos.zig @@ -74,20 +74,39 @@ pub fn detect(target_os: *Target.Os) !void { return error.OSVersionDetectionFail; } -fn parseSystemVersion(buf: []const u8) !std.builtin.Version { +fn parseSystemVersion(buf: []const u8) !std.SemanticVersion { var svt = SystemVersionTokenizer{ .bytes = buf }; try svt.skipUntilTag(.start, "dict"); while (true) { try svt.skipUntilTag(.start, "key"); const content = try svt.expectContent(); try svt.skipUntilTag(.end, "key"); - if (std.mem.eql(u8, content, "ProductVersion")) break; + if (mem.eql(u8, content, "ProductVersion")) break; } try svt.skipUntilTag(.start, "string"); const ver = try svt.expectContent(); try svt.skipUntilTag(.end, "string"); - return std.builtin.Version.parse(ver); + const parseVersionComponent = struct { + fn parseVersionComponent(component: []const u8) !usize { + return std.fmt.parseUnsigned(usize, component, 10) catch |err| { + switch (err) { + error.InvalidCharacter => return error.InvalidVersion, + error.Overflow => return error.Overflow, + } + }; + } + }.parseVersionComponent; + var version_components = mem.split(u8, ver, "."); + const major = version_components.first(); + const minor = version_components.next() orelse return error.InvalidVersion; + const patch = version_components.next() orelse "0"; + if (version_components.next() != null) return error.InvalidVersion; + return .{ + .major = try parseVersionComponent(major), + .minor = try parseVersionComponent(minor), + .patch = try parseVersionComponent(patch), + }; } const SystemVersionTokenizer = struct { @@ -246,7 +265,7 @@ const SystemVersionTokenizer = struct { while (try self.next()) |tok| { switch (tok) { .tag => |tag| { - if (tag.kind == kind and std.mem.eql(u8, tag.name, name)) return; + if (tag.kind == kind and mem.eql(u8, tag.name, name)) return; }, else => {}, } @@ -297,7 +316,7 @@ test "detect" { \\ \\ , - .{ .major = 10, .minor = 3 }, + .{ .major = 10, .minor = 3, .patch = 0 }, }, .{ \\ @@ -361,7 +380,7 @@ test "detect" { \\ \\ , - .{ .major = 11, .minor = 0 }, + .{ .major = 11, .minor = 0, .patch = 0 }, }, .{ \\ @@ -383,27 +402,17 @@ test "detect" { \\ \\ , - .{ .major = 11, .minor = 1 }, + .{ .major = 11, .minor = 1, .patch = 0 }, }, }; inline for (cases) |case| { const ver0 = try parseSystemVersion(case[0]); - const ver1: std.builtin.Version = case[1]; - try testVersionEquality(ver1, ver0); + const ver1: std.SemanticVersion = case[1]; + try testing.expectEqual(@as(std.math.Order, .eq), ver0.order(ver1)); } } -fn testVersionEquality(expected: std.builtin.Version, got: std.builtin.Version) !void { - var b_expected: [64]u8 = undefined; - const s_expected: []const u8 = try std.fmt.bufPrint(b_expected[0..], "{}", .{expected}); - - var b_got: [64]u8 = undefined; - const s_got: []const u8 = try std.fmt.bufPrint(b_got[0..], "{}", .{got}); - - try testing.expectEqualStrings(s_expected, s_got); -} - pub fn detectNativeCpuAndFeatures() ?Target.Cpu { var cpu_family: std.c.CPUFAMILY = undefined; var len: usize = @sizeOf(std.c.CPUFAMILY); diff --git a/src/Compilation.zig b/src/Compilation.zig index 662dcdc408..739b747e32 100644 --- a/src/Compilation.zig +++ b/src/Compilation.zig @@ -615,8 +615,8 @@ pub const InitOptions = struct { stack_size_override: ?u64 = null, image_base_override: ?u64 = null, self_exe_path: ?[]const u8 = null, - version: ?std.builtin.Version = null, - compatibility_version: ?std.builtin.Version = null, + version: ?std.SemanticVersion = null, + compatibility_version: ?std.SemanticVersion = null, libc_installation: ?*const LibCInstallation = null, machine_code_model: std.builtin.CodeModel = .default, clang_preprocessor_mode: ClangPreprocessorMode = .no, diff --git a/src/codegen/spirv/spec.zig b/src/codegen/spirv/spec.zig index 60d16461cb..f73487f41f 100644 --- a/src/codegen/spirv/spec.zig +++ b/src/codegen/spirv/spec.zig @@ -1,6 +1,6 @@ //! This file is auto-generated by tools/gen_spirv_spec.zig. -const Version = @import("std").builtin.Version; +const Version = @import("std").SemanticVersion; pub const Word = u32; pub const IdResult = struct { diff --git a/src/glibc.zig b/src/glibc.zig index 4ab00eeed9..bb38c2c987 100644 --- a/src/glibc.zig +++ b/src/glibc.zig @@ -5,7 +5,7 @@ const log = std.log; const fs = std.fs; const path = fs.path; const assert = std.debug.assert; -const Version = std.builtin.Version; +const Version = std.SemanticVersion; const target_util = @import("target.zig"); const Compilation = @import("Compilation.zig"); @@ -172,7 +172,7 @@ pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile, prog_node: *std.Progr const target = comp.getTarget(); const target_ver = target.os.version_range.linux.glibc; - const start_old_init_fini = target_ver.order(.{ .major = 2, .minor = 33 }) != .gt; + const start_old_init_fini = target_ver.order(.{ .major = 2, .minor = 33, .patch = 0 }) != .gt; // In all cases in this function, we add the C compiler flags to // cache_exempt_flags rather than extra_flags, because these arguments diff --git a/src/link.zig b/src/link.zig index c184f7ed7c..9458bd6c0a 100644 --- a/src/link.zig +++ b/src/link.zig @@ -197,8 +197,8 @@ pub const Options = struct { /// __real_symbol. symbol_wrap_set: std.StringArrayHashMapUnmanaged(void), - version: ?std.builtin.Version, - compatibility_version: ?std.builtin.Version, + version: ?std.SemanticVersion, + compatibility_version: ?std.SemanticVersion, libc_installation: ?*const LibCInstallation, dwarf_format: ?std.dwarf.Format, diff --git a/src/link/Elf.zig b/src/link/Elf.zig index e0d0dfc75f..b2e7f57211 100644 --- a/src/link/Elf.zig +++ b/src/link/Elf.zig @@ -3414,7 +3414,7 @@ const CsuObjects = struct { if (result.crtn) |*obj| obj.* = try fs.path.join(arena, &[_][]const u8{ crt_dir_path, obj.* }); var gccv: []const u8 = undefined; - if (link_options.target.os.version_range.semver.isAtLeast(.{ .major = 5, .minor = 4 }) orelse true) { + if (link_options.target.os.version_range.semver.isAtLeast(.{ .major = 5, .minor = 4, .patch = 0 }) orelse true) { gccv = "gcc80"; } else { gccv = "gcc54"; diff --git a/src/link/MachO/load_commands.zig b/src/link/MachO/load_commands.zig index 5111f53f2a..eb582e2222 100644 --- a/src/link/MachO/load_commands.zig +++ b/src/link/MachO/load_commands.zig @@ -204,12 +204,12 @@ pub fn writeDylibIdLC(gpa: Allocator, options: *const link.Options, lc_writer: a const emit = options.emit.?; const install_name = options.install_name orelse try emit.directory.join(gpa, &.{emit.sub_path}); defer if (options.install_name == null) gpa.free(install_name); - const curr = options.version orelse std.builtin.Version{ + const curr = options.version orelse std.SemanticVersion{ .major = 1, .minor = 0, .patch = 0, }; - const compat = options.compatibility_version orelse std.builtin.Version{ + const compat = options.compatibility_version orelse std.SemanticVersion{ .major = 1, .minor = 0, .patch = 0, @@ -217,8 +217,8 @@ pub fn writeDylibIdLC(gpa: Allocator, options: *const link.Options, lc_writer: a try writeDylibLC(.{ .cmd = .ID_DYLIB, .name = install_name, - .current_version = curr.major << 16 | curr.minor << 8 | curr.patch, - .compatibility_version = compat.major << 16 | compat.minor << 8 | compat.patch, + .current_version = @intCast(u32, curr.major << 16 | curr.minor << 8 | curr.patch), + .compatibility_version = @intCast(u32, compat.major << 16 | compat.minor << 8 | compat.patch), }, lc_writer); } @@ -275,12 +275,12 @@ pub fn writeBuildVersionLC(options: *const link.Options, lc_writer: anytype) !vo const cmdsize = @sizeOf(macho.build_version_command) + @sizeOf(macho.build_tool_version); const platform_version = blk: { const ver = options.target.os.version_range.semver.min; - const platform_version = ver.major << 16 | ver.minor << 8; + const platform_version = @intCast(u32, ver.major << 16 | ver.minor << 8); break :blk platform_version; }; const sdk_version = if (options.native_darwin_sdk) |sdk| blk: { const ver = sdk.version; - const sdk_version = ver.major << 16 | ver.minor << 8; + const sdk_version = @intCast(u32, ver.major << 16 | ver.minor << 8); break :blk sdk_version; } else platform_version; const is_simulator_abi = options.target.abi == .simulator; diff --git a/src/main.zig b/src/main.zig index b245b357ca..2f56cad133 100644 --- a/src/main.zig +++ b/src/main.zig @@ -724,9 +724,9 @@ fn buildOutputType( var dll_export_fns: ?bool = null; var single_threaded: ?bool = null; var root_src_file: ?[]const u8 = null; - var version: std.builtin.Version = .{ .major = 0, .minor = 0, .patch = 0 }; + var version: std.SemanticVersion = .{ .major = 0, .minor = 0, .patch = 0 }; var have_version = false; - var compatibility_version: ?std.builtin.Version = null; + var compatibility_version: ?std.SemanticVersion = null; var strip: ?bool = null; var formatted_panics: ?bool = null; var function_sections = false; @@ -1121,7 +1121,7 @@ fn buildOutputType( try cssan.addIncludePath(.iframework, arg, args_iter.nextOrFatal(), false); } else if (mem.eql(u8, arg, "--version")) { const next_arg = args_iter.nextOrFatal(); - version = std.builtin.Version.parse(next_arg) catch |err| { + version = std.SemanticVersion.parse(next_arg) catch |err| { fatal("unable to parse --version '{s}': {s}", .{ next_arg, @errorName(err) }); }; have_version = true; @@ -2152,12 +2152,12 @@ fn buildOutputType( try system_libs.put(linker_args_it.nextOrFatal(), .{ .weak = true }); } else if (mem.eql(u8, arg, "-compatibility_version")) { const compat_version = linker_args_it.nextOrFatal(); - compatibility_version = std.builtin.Version.parse(compat_version) catch |err| { + compatibility_version = std.SemanticVersion.parse(compat_version) catch |err| { fatal("unable to parse -compatibility_version '{s}': {s}", .{ compat_version, @errorName(err) }); }; } else if (mem.eql(u8, arg, "-current_version")) { const curr_version = linker_args_it.nextOrFatal(); - version = std.builtin.Version.parse(curr_version) catch |err| { + version = std.SemanticVersion.parse(curr_version) catch |err| { fatal("unable to parse -current_version '{s}': {s}", .{ curr_version, @errorName(err) }); }; have_version = true; @@ -2207,10 +2207,9 @@ fn buildOutputType( } else if (mem.startsWith(u8, arg, "/version:")) { var split_it = mem.splitBackwardsScalar(u8, arg, ':'); const version_arg = split_it.first(); - version = std.builtin.Version.parse(version_arg) catch |err| { + version = std.SemanticVersion.parse(version_arg) catch |err| { fatal("unable to parse /version '{s}': {s}", .{ arg, @errorName(err) }); }; - have_version = true; } else { fatal("unsupported linker arg: {s}", .{arg}); diff --git a/src/target.zig b/src/target.zig index ac78d27c1a..2d27869cf6 100644 --- a/src/target.zig +++ b/src/target.zig @@ -6,7 +6,7 @@ pub const ArchOsAbi = struct { arch: std.Target.Cpu.Arch, os: std.Target.Os.Tag, abi: std.Target.Abi, - os_ver: ?std.builtin.Version = null, + os_ver: ?std.SemanticVersion = null, }; pub const available_libcs = [_]ArchOsAbi{ @@ -16,9 +16,9 @@ pub const available_libcs = [_]ArchOsAbi{ .{ .arch = .aarch64, .os = .linux, .abi = .gnu }, .{ .arch = .aarch64, .os = .linux, .abi = .musl }, .{ .arch = .aarch64, .os = .windows, .abi = .gnu }, - .{ .arch = .aarch64, .os = .macos, .abi = .none, .os_ver = .{ .major = 11, .minor = 0 } }, - .{ .arch = .aarch64, .os = .macos, .abi = .none, .os_ver = .{ .major = 12, .minor = 0 } }, - .{ .arch = .aarch64, .os = .macos, .abi = .none, .os_ver = .{ .major = 13, .minor = 0 } }, + .{ .arch = .aarch64, .os = .macos, .abi = .none, .os_ver = .{ .major = 11, .minor = 0, .patch = 0 } }, + .{ .arch = .aarch64, .os = .macos, .abi = .none, .os_ver = .{ .major = 12, .minor = 0, .patch = 0 } }, + .{ .arch = .aarch64, .os = .macos, .abi = .none, .os_ver = .{ .major = 13, .minor = 0, .patch = 0 } }, .{ .arch = .armeb, .os = .linux, .abi = .gnueabi }, .{ .arch = .armeb, .os = .linux, .abi = .gnueabihf }, .{ .arch = .armeb, .os = .linux, .abi = .musleabi }, @@ -71,9 +71,9 @@ pub const available_libcs = [_]ArchOsAbi{ .{ .arch = .x86_64, .os = .linux, .abi = .gnux32 }, .{ .arch = .x86_64, .os = .linux, .abi = .musl }, .{ .arch = .x86_64, .os = .windows, .abi = .gnu }, - .{ .arch = .x86_64, .os = .macos, .abi = .none, .os_ver = .{ .major = 11, .minor = 0 } }, - .{ .arch = .x86_64, .os = .macos, .abi = .none, .os_ver = .{ .major = 12, .minor = 0 } }, - .{ .arch = .x86_64, .os = .macos, .abi = .none, .os_ver = .{ .major = 13, .minor = 0 } }, + .{ .arch = .x86_64, .os = .macos, .abi = .none, .os_ver = .{ .major = 11, .minor = 0, .patch = 0 } }, + .{ .arch = .x86_64, .os = .macos, .abi = .none, .os_ver = .{ .major = 12, .minor = 0, .patch = 0 } }, + .{ .arch = .x86_64, .os = .macos, .abi = .none, .os_ver = .{ .major = 13, .minor = 0, .patch = 0 } }, }; pub fn libCGenericName(target: std.Target) [:0]const u8 { diff --git a/test/link/macho/dylib/build.zig b/test/link/macho/dylib/build.zig index fe294f3333..5fbf73dd1b 100644 --- a/test/link/macho/dylib/build.zig +++ b/test/link/macho/dylib/build.zig @@ -17,7 +17,7 @@ fn add(b: *std.Build, test_step: *std.Build.Step, optimize: std.builtin.Optimize const dylib = b.addSharedLibrary(.{ .name = "a", - .version = .{ .major = 1, .minor = 0 }, + .version = .{ .major = 1, .minor = 0, .patch = 0 }, .optimize = optimize, .target = target, }); diff --git a/test/link/macho/needed_library/build.zig b/test/link/macho/needed_library/build.zig index 7b56572cc3..d39166c18b 100644 --- a/test/link/macho/needed_library/build.zig +++ b/test/link/macho/needed_library/build.zig @@ -17,7 +17,7 @@ fn add(b: *std.Build, test_step: *std.Build.Step, optimize: std.builtin.Optimize const dylib = b.addSharedLibrary(.{ .name = "a", - .version = .{ .major = 1, .minor = 0 }, + .version = .{ .major = 1, .minor = 0, .patch = 0 }, .optimize = optimize, .target = target, }); diff --git a/test/link/macho/search_strategy/build.zig b/test/link/macho/search_strategy/build.zig index 4b52d9aa0a..336ca593b1 100644 --- a/test/link/macho/search_strategy/build.zig +++ b/test/link/macho/search_strategy/build.zig @@ -61,7 +61,7 @@ fn createScenario( const dylib = b.addSharedLibrary(.{ .name = name, - .version = .{ .major = 1, .minor = 0 }, + .version = .{ .major = 1, .minor = 0, .patch = 0 }, .optimize = optimize, .target = target, }); diff --git a/test/link/macho/tls/build.zig b/test/link/macho/tls/build.zig index f155f514f8..555fe207c5 100644 --- a/test/link/macho/tls/build.zig +++ b/test/link/macho/tls/build.zig @@ -17,7 +17,7 @@ fn add(b: *std.Build, test_step: *std.Build.Step, optimize: std.builtin.Optimize const lib = b.addSharedLibrary(.{ .name = "a", - .version = .{ .major = 1, .minor = 0 }, + .version = .{ .major = 1, .minor = 0, .patch = 0 }, .optimize = optimize, .target = target, }); diff --git a/test/link/macho/uuid/build.zig b/test/link/macho/uuid/build.zig index 0072825f46..f2ef6b33ec 100644 --- a/test/link/macho/uuid/build.zig +++ b/test/link/macho/uuid/build.zig @@ -62,7 +62,7 @@ fn simpleDylib( ) *std.Build.Step.Compile { const dylib = b.addSharedLibrary(.{ .name = "test", - .version = .{ .major = 1, .minor = 0 }, + .version = .{ .major = 1, .minor = 0, .patch = 0 }, .optimize = optimize, .target = target, }); diff --git a/test/standalone/load_dynamic_library/build.zig b/test/standalone/load_dynamic_library/build.zig index 6dec8de7ae..a711704e46 100644 --- a/test/standalone/load_dynamic_library/build.zig +++ b/test/standalone/load_dynamic_library/build.zig @@ -13,7 +13,7 @@ pub fn build(b: *std.Build) void { const lib = b.addSharedLibrary(.{ .name = "add", .root_source_file = .{ .path = "add.zig" }, - .version = .{ .major = 1, .minor = 0 }, + .version = .{ .major = 1, .minor = 0, .patch = 0 }, .optimize = optimize, .target = target, }); diff --git a/test/standalone/shared_library/build.zig b/test/standalone/shared_library/build.zig index 377bf81862..3034d89017 100644 --- a/test/standalone/shared_library/build.zig +++ b/test/standalone/shared_library/build.zig @@ -9,7 +9,7 @@ pub fn build(b: *std.Build) void { const lib = b.addSharedLibrary(.{ .name = "mathtest", .root_source_file = .{ .path = "mathtest.zig" }, - .version = .{ .major = 1, .minor = 0 }, + .version = .{ .major = 1, .minor = 0, .patch = 0 }, .target = target, .optimize = optimize, }); diff --git a/tools/gen_spirv_spec.zig b/tools/gen_spirv_spec.zig index 28a5ed7ded..b48e3834a2 100644 --- a/tools/gen_spirv_spec.zig +++ b/tools/gen_spirv_spec.zig @@ -76,7 +76,7 @@ fn render(writer: anytype, allocator: Allocator, registry: g.CoreRegistry) !void try writer.writeAll( \\//! This file is auto-generated by tools/gen_spirv_spec.zig. \\ - \\const Version = @import("std").builtin.Version; + \\const Version = @import("std").SemanticVersion; \\ \\pub const Word = u32; \\pub const IdResult = struct{ -- cgit v1.2.3