mirror of
https://codeberg.org/ziglang/zig.git
synced 2025-12-06 05:44:20 +00:00
eliminate stderr usage in std.Build make() functions
* Eliminate all uses of `std.debug.print` in make() functions, instead
properly using the step failure reporting mechanism.
* Introduce the concept of skipped build steps. These do not cause the
build to fail, and they do allow their dependants to run.
* RunStep gains a new flag, `skip_foreign_checks` which causes the
RunStep to be skipped if stdio mode is `check` and the binary cannot
be executed due to it being a foreign executable.
- RunStep is improved to automatically use known interpreters to
execute binaries if possible (integrating with flags such as
-fqemu and -fwasmtime). It only does this after attempting a native
execution and receiving a "exec file format" error.
- Update RunStep to use an ArrayList for the checks rather than this
ad-hoc reallocation/copying mechanism.
- `expectStdOutEqual` now also implicitly adds an exit_code==0 check
if there is not already an expected termination. This matches
previously expected behavior from older API and can be overridden by
directly setting the checks array.
* Add `dest_sub_path` to `InstallArtifactStep` which allows choosing an
arbitrary subdirectory relative to the prefix, as well as overriding
the basename.
- Delete the custom InstallWithRename step that I found deep in the
test/ directory.
* WriteFileStep will now update its step display name after the first
file is added.
* Add missing stdout checks to various standalone test case build
scripts.
This commit is contained in:
parent
9bf63b0996
commit
dcec4d55e3
20 changed files with 554 additions and 353 deletions
|
|
@ -385,7 +385,7 @@ pub fn build(b: *std.Build) !void {
|
|||
const optimization_modes = chosen_opt_modes_buf[0..chosen_mode_index];
|
||||
|
||||
const fmt_include_paths = &.{ "doc", "lib", "src", "test", "tools", "build.zig" };
|
||||
const fmt_exclude_paths = &.{ "test/cases" };
|
||||
const fmt_exclude_paths = &.{"test/cases"};
|
||||
const check_fmt = b.addFmt(.{
|
||||
.paths = fmt_include_paths,
|
||||
.exclude_paths = fmt_exclude_paths,
|
||||
|
|
@ -402,7 +402,6 @@ pub fn build(b: *std.Build) !void {
|
|||
const do_fmt_step = b.step("fmt", "Modify source files in place to have conforming formatting");
|
||||
do_fmt_step.dependOn(&do_fmt.step);
|
||||
|
||||
|
||||
test_step.dependOn(tests.addPkgTests(
|
||||
b,
|
||||
test_filter,
|
||||
|
|
|
|||
|
|
@ -357,6 +357,7 @@ fn runStepNames(
|
|||
}
|
||||
|
||||
var success_count: usize = 0;
|
||||
var skipped_count: usize = 0;
|
||||
var failure_count: usize = 0;
|
||||
var pending_count: usize = 0;
|
||||
var total_compile_errors: usize = 0;
|
||||
|
|
@ -379,6 +380,7 @@ fn runStepNames(
|
|||
},
|
||||
.dependency_failure => pending_count += 1,
|
||||
.success => success_count += 1,
|
||||
.skipped => skipped_count += 1,
|
||||
.failure => {
|
||||
failure_count += 1;
|
||||
const compile_errors_len = s.result_error_bundle.errorMessageCount();
|
||||
|
|
@ -395,13 +397,13 @@ fn runStepNames(
|
|||
if (failure_count == 0 and enable_summary != true) return cleanExit();
|
||||
|
||||
if (enable_summary != false) {
|
||||
const total_count = success_count + failure_count + pending_count;
|
||||
const total_count = success_count + failure_count + pending_count + skipped_count;
|
||||
ttyconf.setColor(stderr, .Cyan) catch {};
|
||||
stderr.writeAll("Build Summary:") catch {};
|
||||
ttyconf.setColor(stderr, .Reset) catch {};
|
||||
stderr.writer().print(" {d}/{d} steps succeeded; {d} failed", .{
|
||||
success_count, total_count, failure_count,
|
||||
}) catch {};
|
||||
stderr.writer().print(" {d}/{d} steps succeeded", .{ success_count, total_count }) catch {};
|
||||
if (skipped_count > 0) stderr.writer().print("; {d} skipped", .{skipped_count}) catch {};
|
||||
if (failure_count > 0) stderr.writer().print("; {d} failed", .{failure_count}) catch {};
|
||||
|
||||
if (enable_summary == null) {
|
||||
ttyconf.setColor(stderr, .Dim) catch {};
|
||||
|
|
@ -503,6 +505,12 @@ fn printTreeStep(
|
|||
try ttyconf.setColor(stderr, .Reset);
|
||||
},
|
||||
|
||||
.skipped => {
|
||||
try ttyconf.setColor(stderr, .Yellow);
|
||||
try stderr.writeAll(" skipped\n");
|
||||
try ttyconf.setColor(stderr, .Reset);
|
||||
},
|
||||
|
||||
.failure => {
|
||||
try ttyconf.setColor(stderr, .Red);
|
||||
if (s.result_error_bundle.errorMessageCount() > 0) {
|
||||
|
|
@ -569,6 +577,7 @@ fn checkForDependencyLoop(
|
|||
.running => unreachable,
|
||||
.success => unreachable,
|
||||
.failure => unreachable,
|
||||
.skipped => unreachable,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -587,7 +596,7 @@ fn workerMakeOneStep(
|
|||
// queue this step up again when dependencies are met.
|
||||
for (s.dependencies.items) |dep| {
|
||||
switch (@atomicLoad(Step.State, &dep.state, .SeqCst)) {
|
||||
.success => continue,
|
||||
.success, .skipped => continue,
|
||||
.failure, .dependency_failure => {
|
||||
@atomicStore(Step.State, &s.state, .dependency_failure, .SeqCst);
|
||||
return;
|
||||
|
|
@ -639,13 +648,15 @@ fn workerMakeOneStep(
|
|||
}
|
||||
}
|
||||
|
||||
make_result catch |err| {
|
||||
assert(err == error.MakeFailed);
|
||||
@atomicStore(Step.State, &s.state, .failure, .SeqCst);
|
||||
return;
|
||||
};
|
||||
|
||||
@atomicStore(Step.State, &s.state, .success, .SeqCst);
|
||||
if (make_result) |_| {
|
||||
@atomicStore(Step.State, &s.state, .success, .SeqCst);
|
||||
} else |err| switch (err) {
|
||||
error.MakeFailed => {
|
||||
@atomicStore(Step.State, &s.state, .failure, .SeqCst);
|
||||
return;
|
||||
},
|
||||
error.MakeSkipped => @atomicStore(Step.State, &s.state, .skipped, .SeqCst),
|
||||
}
|
||||
|
||||
// Successful completion of a step, so we queue up its dependants as well.
|
||||
for (s.dependants.items) |dep| {
|
||||
|
|
|
|||
|
|
@ -42,15 +42,14 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
|||
|
||||
for (self.expected_matches) |expected_match| {
|
||||
if (mem.indexOf(u8, contents, expected_match) == null) {
|
||||
std.debug.print(
|
||||
return step.fail(
|
||||
\\
|
||||
\\========= Expected to find: ===================
|
||||
\\========= expected to find: ===================
|
||||
\\{s}
|
||||
\\========= But file does not contain it: =======
|
||||
\\========= but file does not contain it: =======
|
||||
\\{s}
|
||||
\\
|
||||
, .{ expected_match, contents });
|
||||
return error.TestFailed;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -133,7 +133,8 @@ const Action = struct {
|
|||
/// Will return true if the `phrase` is correctly parsed into an RPN program and
|
||||
/// its reduced, computed value compares using `op` with the expected value, either
|
||||
/// a literal or another extracted variable.
|
||||
fn computeCmp(act: Action, gpa: Allocator, global_vars: anytype) !bool {
|
||||
fn computeCmp(act: Action, step: *Step, global_vars: anytype) !bool {
|
||||
const gpa = step.owner.allocator;
|
||||
var op_stack = std.ArrayList(enum { add, sub, mod, mul }).init(gpa);
|
||||
var values = std.ArrayList(u64).init(gpa);
|
||||
|
||||
|
|
@ -150,11 +151,11 @@ const Action = struct {
|
|||
} else {
|
||||
const val = std.fmt.parseInt(u64, next, 0) catch blk: {
|
||||
break :blk global_vars.get(next) orelse {
|
||||
std.debug.print(
|
||||
try step.addError(
|
||||
\\
|
||||
\\========= Variable was not extracted: ===========
|
||||
\\========= variable was not extracted: ===========
|
||||
\\{s}
|
||||
\\
|
||||
\\=================================================
|
||||
, .{next});
|
||||
return error.UnknownVariable;
|
||||
};
|
||||
|
|
@ -186,11 +187,11 @@ const Action = struct {
|
|||
|
||||
const exp_value = switch (act.expected.?.value) {
|
||||
.variable => |name| global_vars.get(name) orelse {
|
||||
std.debug.print(
|
||||
try step.addError(
|
||||
\\
|
||||
\\========= Variable was not extracted: ===========
|
||||
\\========= variable was not extracted: ===========
|
||||
\\{s}
|
||||
\\
|
||||
\\=================================================
|
||||
, .{name});
|
||||
return error.UnknownVariable;
|
||||
},
|
||||
|
|
@ -323,14 +324,12 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
|||
);
|
||||
|
||||
const output = switch (self.obj_format) {
|
||||
.macho => try MachODumper.parseAndDump(contents, .{
|
||||
.gpa = gpa,
|
||||
.macho => try MachODumper.parseAndDump(step, contents, .{
|
||||
.dump_symtab = self.dump_symtab,
|
||||
}),
|
||||
.elf => @panic("TODO elf parser"),
|
||||
.coff => @panic("TODO coff parser"),
|
||||
.wasm => try WasmDumper.parseAndDump(contents, .{
|
||||
.gpa = gpa,
|
||||
.wasm => try WasmDumper.parseAndDump(step, contents, .{
|
||||
.dump_symtab = self.dump_symtab,
|
||||
}),
|
||||
else => unreachable,
|
||||
|
|
@ -346,54 +345,50 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
|||
while (it.next()) |line| {
|
||||
if (try act.match(line, &vars)) break;
|
||||
} else {
|
||||
std.debug.print(
|
||||
return step.fail(
|
||||
\\
|
||||
\\========= Expected to find: ==========================
|
||||
\\========= expected to find: ==========================
|
||||
\\{s}
|
||||
\\========= But parsed file does not contain it: =======
|
||||
\\========= but parsed file does not contain it: =======
|
||||
\\{s}
|
||||
\\
|
||||
\\======================================================
|
||||
, .{ act.phrase, output });
|
||||
return error.TestFailed;
|
||||
}
|
||||
},
|
||||
.not_present => {
|
||||
while (it.next()) |line| {
|
||||
if (try act.match(line, &vars)) {
|
||||
std.debug.print(
|
||||
return step.fail(
|
||||
\\
|
||||
\\========= Expected not to find: ===================
|
||||
\\========= expected not to find: ===================
|
||||
\\{s}
|
||||
\\========= But parsed file does contain it: ========
|
||||
\\========= but parsed file does contain it: ========
|
||||
\\{s}
|
||||
\\
|
||||
\\===================================================
|
||||
, .{ act.phrase, output });
|
||||
return error.TestFailed;
|
||||
}
|
||||
}
|
||||
},
|
||||
.compute_cmp => {
|
||||
const res = act.computeCmp(gpa, vars) catch |err| switch (err) {
|
||||
const res = act.computeCmp(step, vars) catch |err| switch (err) {
|
||||
error.UnknownVariable => {
|
||||
std.debug.print(
|
||||
\\========= From parsed file: =====================
|
||||
return step.fail(
|
||||
\\========= from parsed file: =====================
|
||||
\\{s}
|
||||
\\
|
||||
\\=================================================
|
||||
, .{output});
|
||||
return error.TestFailed;
|
||||
},
|
||||
else => |e| return e,
|
||||
};
|
||||
if (!res) {
|
||||
std.debug.print(
|
||||
return step.fail(
|
||||
\\
|
||||
\\========= Comparison failed for action: ===========
|
||||
\\========= comparison failed for action: ===========
|
||||
\\{s} {}
|
||||
\\========= From parsed file: =======================
|
||||
\\========= from parsed file: =======================
|
||||
\\{s}
|
||||
\\
|
||||
\\===================================================
|
||||
, .{ act.phrase, act.expected.?, output });
|
||||
return error.TestFailed;
|
||||
}
|
||||
},
|
||||
}
|
||||
|
|
@ -402,7 +397,6 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
|||
}
|
||||
|
||||
const Opts = struct {
|
||||
gpa: ?Allocator = null,
|
||||
dump_symtab: bool = false,
|
||||
};
|
||||
|
||||
|
|
@ -410,8 +404,8 @@ const MachODumper = struct {
|
|||
const LoadCommandIterator = macho.LoadCommandIterator;
|
||||
const symtab_label = "symtab";
|
||||
|
||||
fn parseAndDump(bytes: []align(@alignOf(u64)) const u8, opts: Opts) ![]const u8 {
|
||||
const gpa = opts.gpa orelse unreachable; // MachO dumper requires an allocator
|
||||
fn parseAndDump(step: *Step, bytes: []align(@alignOf(u64)) const u8, opts: Opts) ![]const u8 {
|
||||
const gpa = step.owner.allocator;
|
||||
var stream = std.io.fixedBufferStream(bytes);
|
||||
const reader = stream.reader();
|
||||
|
||||
|
|
@ -693,8 +687,8 @@ const MachODumper = struct {
|
|||
const WasmDumper = struct {
|
||||
const symtab_label = "symbols";
|
||||
|
||||
fn parseAndDump(bytes: []const u8, opts: Opts) ![]const u8 {
|
||||
const gpa = opts.gpa orelse unreachable; // Wasm dumper requires an allocator
|
||||
fn parseAndDump(step: *Step, bytes: []const u8, opts: Opts) ![]const u8 {
|
||||
const gpa = step.owner.allocator;
|
||||
if (opts.dump_symtab) {
|
||||
@panic("TODO: Implement symbol table parsing and dumping");
|
||||
}
|
||||
|
|
@ -715,20 +709,24 @@ const WasmDumper = struct {
|
|||
const writer = output.writer();
|
||||
|
||||
while (reader.readByte()) |current_byte| {
|
||||
const section = std.meta.intToEnum(std.wasm.Section, current_byte) catch |err| {
|
||||
std.debug.print("Found invalid section id '{d}'\n", .{current_byte});
|
||||
return err;
|
||||
const section = std.meta.intToEnum(std.wasm.Section, current_byte) catch {
|
||||
return step.fail("Found invalid section id '{d}'", .{current_byte});
|
||||
};
|
||||
|
||||
const section_length = try std.leb.readULEB128(u32, reader);
|
||||
try parseAndDumpSection(section, bytes[fbs.pos..][0..section_length], writer);
|
||||
try parseAndDumpSection(step, section, bytes[fbs.pos..][0..section_length], writer);
|
||||
fbs.pos += section_length;
|
||||
} else |_| {} // reached end of stream
|
||||
|
||||
return output.toOwnedSlice();
|
||||
}
|
||||
|
||||
fn parseAndDumpSection(section: std.wasm.Section, data: []const u8, writer: anytype) !void {
|
||||
fn parseAndDumpSection(
|
||||
step: *Step,
|
||||
section: std.wasm.Section,
|
||||
data: []const u8,
|
||||
writer: anytype,
|
||||
) !void {
|
||||
var fbs = std.io.fixedBufferStream(data);
|
||||
const reader = fbs.reader();
|
||||
|
||||
|
|
@ -751,7 +749,7 @@ const WasmDumper = struct {
|
|||
=> {
|
||||
const entries = try std.leb.readULEB128(u32, reader);
|
||||
try writer.print("\nentries {d}\n", .{entries});
|
||||
try dumpSection(section, data[fbs.pos..], entries, writer);
|
||||
try dumpSection(step, section, data[fbs.pos..], entries, writer);
|
||||
},
|
||||
.custom => {
|
||||
const name_length = try std.leb.readULEB128(u32, reader);
|
||||
|
|
@ -760,7 +758,7 @@ const WasmDumper = struct {
|
|||
try writer.print("\nname {s}\n", .{name});
|
||||
|
||||
if (mem.eql(u8, name, "name")) {
|
||||
try parseDumpNames(reader, writer, data);
|
||||
try parseDumpNames(step, reader, writer, data);
|
||||
} else if (mem.eql(u8, name, "producers")) {
|
||||
try parseDumpProducers(reader, writer, data);
|
||||
} else if (mem.eql(u8, name, "target_features")) {
|
||||
|
|
@ -776,7 +774,7 @@ const WasmDumper = struct {
|
|||
}
|
||||
}
|
||||
|
||||
fn dumpSection(section: std.wasm.Section, data: []const u8, entries: u32, writer: anytype) !void {
|
||||
fn dumpSection(step: *Step, section: std.wasm.Section, data: []const u8, entries: u32, writer: anytype) !void {
|
||||
var fbs = std.io.fixedBufferStream(data);
|
||||
const reader = fbs.reader();
|
||||
|
||||
|
|
@ -786,19 +784,18 @@ const WasmDumper = struct {
|
|||
while (i < entries) : (i += 1) {
|
||||
const func_type = try reader.readByte();
|
||||
if (func_type != std.wasm.function_type) {
|
||||
std.debug.print("Expected function type, found byte '{d}'\n", .{func_type});
|
||||
return error.UnexpectedByte;
|
||||
return step.fail("expected function type, found byte '{d}'", .{func_type});
|
||||
}
|
||||
const params = try std.leb.readULEB128(u32, reader);
|
||||
try writer.print("params {d}\n", .{params});
|
||||
var index: u32 = 0;
|
||||
while (index < params) : (index += 1) {
|
||||
try parseDumpType(std.wasm.Valtype, reader, writer);
|
||||
try parseDumpType(step, std.wasm.Valtype, reader, writer);
|
||||
} else index = 0;
|
||||
const returns = try std.leb.readULEB128(u32, reader);
|
||||
try writer.print("returns {d}\n", .{returns});
|
||||
while (index < returns) : (index += 1) {
|
||||
try parseDumpType(std.wasm.Valtype, reader, writer);
|
||||
try parseDumpType(step, std.wasm.Valtype, reader, writer);
|
||||
}
|
||||
}
|
||||
},
|
||||
|
|
@ -812,9 +809,8 @@ const WasmDumper = struct {
|
|||
const name = data[fbs.pos..][0..name_len];
|
||||
fbs.pos += name_len;
|
||||
|
||||
const kind = std.meta.intToEnum(std.wasm.ExternalKind, try reader.readByte()) catch |err| {
|
||||
std.debug.print("Invalid import kind\n", .{});
|
||||
return err;
|
||||
const kind = std.meta.intToEnum(std.wasm.ExternalKind, try reader.readByte()) catch {
|
||||
return step.fail("invalid import kind", .{});
|
||||
};
|
||||
|
||||
try writer.print(
|
||||
|
|
@ -831,11 +827,11 @@ const WasmDumper = struct {
|
|||
try parseDumpLimits(reader, writer);
|
||||
},
|
||||
.global => {
|
||||
try parseDumpType(std.wasm.Valtype, reader, writer);
|
||||
try parseDumpType(step, std.wasm.Valtype, reader, writer);
|
||||
try writer.print("mutable {}\n", .{0x01 == try std.leb.readULEB128(u32, reader)});
|
||||
},
|
||||
.table => {
|
||||
try parseDumpType(std.wasm.RefType, reader, writer);
|
||||
try parseDumpType(step, std.wasm.RefType, reader, writer);
|
||||
try parseDumpLimits(reader, writer);
|
||||
},
|
||||
}
|
||||
|
|
@ -850,7 +846,7 @@ const WasmDumper = struct {
|
|||
.table => {
|
||||
var i: u32 = 0;
|
||||
while (i < entries) : (i += 1) {
|
||||
try parseDumpType(std.wasm.RefType, reader, writer);
|
||||
try parseDumpType(step, std.wasm.RefType, reader, writer);
|
||||
try parseDumpLimits(reader, writer);
|
||||
}
|
||||
},
|
||||
|
|
@ -863,9 +859,9 @@ const WasmDumper = struct {
|
|||
.global => {
|
||||
var i: u32 = 0;
|
||||
while (i < entries) : (i += 1) {
|
||||
try parseDumpType(std.wasm.Valtype, reader, writer);
|
||||
try parseDumpType(step, std.wasm.Valtype, reader, writer);
|
||||
try writer.print("mutable {}\n", .{0x01 == try std.leb.readULEB128(u1, reader)});
|
||||
try parseDumpInit(reader, writer);
|
||||
try parseDumpInit(step, reader, writer);
|
||||
}
|
||||
},
|
||||
.@"export" => {
|
||||
|
|
@ -875,9 +871,8 @@ const WasmDumper = struct {
|
|||
const name = data[fbs.pos..][0..name_len];
|
||||
fbs.pos += name_len;
|
||||
const kind_byte = try std.leb.readULEB128(u8, reader);
|
||||
const kind = std.meta.intToEnum(std.wasm.ExternalKind, kind_byte) catch |err| {
|
||||
std.debug.print("invalid export kind value '{d}'\n", .{kind_byte});
|
||||
return err;
|
||||
const kind = std.meta.intToEnum(std.wasm.ExternalKind, kind_byte) catch {
|
||||
return step.fail("invalid export kind value '{d}'", .{kind_byte});
|
||||
};
|
||||
const index = try std.leb.readULEB128(u32, reader);
|
||||
try writer.print(
|
||||
|
|
@ -892,7 +887,7 @@ const WasmDumper = struct {
|
|||
var i: u32 = 0;
|
||||
while (i < entries) : (i += 1) {
|
||||
try writer.print("table index {d}\n", .{try std.leb.readULEB128(u32, reader)});
|
||||
try parseDumpInit(reader, writer);
|
||||
try parseDumpInit(step, reader, writer);
|
||||
|
||||
const function_indexes = try std.leb.readULEB128(u32, reader);
|
||||
var function_index: u32 = 0;
|
||||
|
|
@ -908,7 +903,7 @@ const WasmDumper = struct {
|
|||
while (i < entries) : (i += 1) {
|
||||
const index = try std.leb.readULEB128(u32, reader);
|
||||
try writer.print("memory index 0x{x}\n", .{index});
|
||||
try parseDumpInit(reader, writer);
|
||||
try parseDumpInit(step, reader, writer);
|
||||
const size = try std.leb.readULEB128(u32, reader);
|
||||
try writer.print("size {d}\n", .{size});
|
||||
try reader.skipBytes(size, .{}); // we do not care about the content of the segments
|
||||
|
|
@ -918,11 +913,10 @@ const WasmDumper = struct {
|
|||
}
|
||||
}
|
||||
|
||||
fn parseDumpType(comptime WasmType: type, reader: anytype, writer: anytype) !void {
|
||||
fn parseDumpType(step: *Step, comptime WasmType: type, reader: anytype, writer: anytype) !void {
|
||||
const type_byte = try reader.readByte();
|
||||
const valtype = std.meta.intToEnum(WasmType, type_byte) catch |err| {
|
||||
std.debug.print("Invalid wasm type value '{d}'\n", .{type_byte});
|
||||
return err;
|
||||
const valtype = std.meta.intToEnum(WasmType, type_byte) catch {
|
||||
return step.fail("Invalid wasm type value '{d}'", .{type_byte});
|
||||
};
|
||||
try writer.print("type {s}\n", .{@tagName(valtype)});
|
||||
}
|
||||
|
|
@ -937,11 +931,10 @@ const WasmDumper = struct {
|
|||
}
|
||||
}
|
||||
|
||||
fn parseDumpInit(reader: anytype, writer: anytype) !void {
|
||||
fn parseDumpInit(step: *Step, reader: anytype, writer: anytype) !void {
|
||||
const byte = try std.leb.readULEB128(u8, reader);
|
||||
const opcode = std.meta.intToEnum(std.wasm.Opcode, byte) catch |err| {
|
||||
std.debug.print("invalid wasm opcode '{d}'\n", .{byte});
|
||||
return err;
|
||||
const opcode = std.meta.intToEnum(std.wasm.Opcode, byte) catch {
|
||||
return step.fail("invalid wasm opcode '{d}'", .{byte});
|
||||
};
|
||||
switch (opcode) {
|
||||
.i32_const => try writer.print("i32.const {x}\n", .{try std.leb.readILEB128(i32, reader)}),
|
||||
|
|
@ -953,14 +946,13 @@ const WasmDumper = struct {
|
|||
}
|
||||
const end_opcode = try std.leb.readULEB128(u8, reader);
|
||||
if (end_opcode != std.wasm.opcode(.end)) {
|
||||
std.debug.print("expected 'end' opcode in init expression\n", .{});
|
||||
return error.MissingEndOpcode;
|
||||
return step.fail("expected 'end' opcode in init expression", .{});
|
||||
}
|
||||
}
|
||||
|
||||
fn parseDumpNames(reader: anytype, writer: anytype, data: []const u8) !void {
|
||||
fn parseDumpNames(step: *Step, reader: anytype, writer: anytype, data: []const u8) !void {
|
||||
while (reader.context.pos < data.len) {
|
||||
try parseDumpType(std.wasm.NameSubsection, reader, writer);
|
||||
try parseDumpType(step, std.wasm.NameSubsection, reader, writer);
|
||||
const size = try std.leb.readULEB128(u32, reader);
|
||||
const entries = try std.leb.readULEB128(u32, reader);
|
||||
try writer.print(
|
||||
|
|
|
|||
|
|
@ -538,8 +538,7 @@ pub fn run(cs: *CompileStep) *RunStep {
|
|||
}
|
||||
|
||||
pub fn checkObject(self: *CompileStep, obj_format: std.Target.ObjectFormat) *CheckObjectStep {
|
||||
const b = self.step.owner;
|
||||
return CheckObjectStep.create(b, self.getOutputSource(), obj_format);
|
||||
return CheckObjectStep.create(self.step.owner, self.getOutputSource(), obj_format);
|
||||
}
|
||||
|
||||
pub fn setLinkerScriptPath(self: *CompileStep, source: FileSource) void {
|
||||
|
|
|
|||
|
|
@ -192,13 +192,13 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
|||
try output.appendSlice(c_generated_line);
|
||||
const src_path = file_source.getPath(b);
|
||||
const contents = try std.fs.cwd().readFileAlloc(gpa, src_path, self.max_bytes);
|
||||
try render_autoconf(contents, &output, self.values, src_path);
|
||||
try render_autoconf(step, contents, &output, self.values, src_path);
|
||||
},
|
||||
.cmake => |file_source| {
|
||||
try output.appendSlice(c_generated_line);
|
||||
const src_path = file_source.getPath(b);
|
||||
const contents = try std.fs.cwd().readFileAlloc(gpa, src_path, self.max_bytes);
|
||||
try render_cmake(contents, &output, self.values, src_path);
|
||||
try render_cmake(step, contents, &output, self.values, src_path);
|
||||
},
|
||||
.blank => {
|
||||
try output.appendSlice(c_generated_line);
|
||||
|
|
@ -234,8 +234,7 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
|||
output_dir;
|
||||
|
||||
var dir = std.fs.cwd().makeOpenPath(sub_dir_path, .{}) catch |err| {
|
||||
std.debug.print("unable to make path {s}: {s}\n", .{ output_dir, @errorName(err) });
|
||||
return err;
|
||||
return step.fail("unable to make path '{s}': {s}", .{ output_dir, @errorName(err) });
|
||||
};
|
||||
defer dir.close();
|
||||
|
||||
|
|
@ -247,6 +246,7 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
|||
}
|
||||
|
||||
fn render_autoconf(
|
||||
step: *Step,
|
||||
contents: []const u8,
|
||||
output: *std.ArrayList(u8),
|
||||
values: std.StringArrayHashMap(Value),
|
||||
|
|
@ -273,7 +273,7 @@ fn render_autoconf(
|
|||
}
|
||||
const name = it.rest();
|
||||
const kv = values_copy.fetchSwapRemove(name) orelse {
|
||||
std.debug.print("{s}:{d}: error: unspecified config header value: '{s}'\n", .{
|
||||
try step.addError("{s}:{d}: error: unspecified config header value: '{s}'", .{
|
||||
src_path, line_index + 1, name,
|
||||
});
|
||||
any_errors = true;
|
||||
|
|
@ -283,15 +283,17 @@ fn render_autoconf(
|
|||
}
|
||||
|
||||
for (values_copy.keys()) |name| {
|
||||
std.debug.print("{s}: error: config header value unused: '{s}'\n", .{ src_path, name });
|
||||
try step.addError("{s}: error: config header value unused: '{s}'", .{ src_path, name });
|
||||
any_errors = true;
|
||||
}
|
||||
|
||||
if (any_errors) {
|
||||
return error.HeaderConfigFailed;
|
||||
return error.MakeFailed;
|
||||
}
|
||||
}
|
||||
|
||||
fn render_cmake(
|
||||
step: *Step,
|
||||
contents: []const u8,
|
||||
output: *std.ArrayList(u8),
|
||||
values: std.StringArrayHashMap(Value),
|
||||
|
|
@ -317,14 +319,14 @@ fn render_cmake(
|
|||
continue;
|
||||
}
|
||||
const name = it.next() orelse {
|
||||
std.debug.print("{s}:{d}: error: missing define name\n", .{
|
||||
try step.addError("{s}:{d}: error: missing define name", .{
|
||||
src_path, line_index + 1,
|
||||
});
|
||||
any_errors = true;
|
||||
continue;
|
||||
};
|
||||
const kv = values_copy.fetchSwapRemove(name) orelse {
|
||||
std.debug.print("{s}:{d}: error: unspecified config header value: '{s}'\n", .{
|
||||
try step.addError("{s}:{d}: error: unspecified config header value: '{s}'", .{
|
||||
src_path, line_index + 1, name,
|
||||
});
|
||||
any_errors = true;
|
||||
|
|
@ -334,7 +336,8 @@ fn render_cmake(
|
|||
}
|
||||
|
||||
for (values_copy.keys()) |name| {
|
||||
std.debug.print("{s}: error: config header value unused: '{s}'\n", .{ src_path, name });
|
||||
try step.addError("{s}: error: config header value unused: '{s}'", .{ src_path, name });
|
||||
any_errors = true;
|
||||
}
|
||||
|
||||
if (any_errors) {
|
||||
|
|
|
|||
|
|
@ -12,6 +12,9 @@ artifact: *CompileStep,
|
|||
dest_dir: InstallDir,
|
||||
pdb_dir: ?InstallDir,
|
||||
h_dir: ?InstallDir,
|
||||
/// If non-null, adds additional path components relative to dest_dir, and
|
||||
/// overrides the basename of the CompileStep.
|
||||
dest_sub_path: ?[]const u8,
|
||||
|
||||
pub fn create(owner: *std.Build, artifact: *CompileStep) *InstallArtifactStep {
|
||||
if (artifact.install_step) |s| return s;
|
||||
|
|
@ -40,6 +43,7 @@ pub fn create(owner: *std.Build, artifact: *CompileStep) *InstallArtifactStep {
|
|||
}
|
||||
} else null,
|
||||
.h_dir = if (artifact.kind == .lib and artifact.emit_h) .header else null,
|
||||
.dest_sub_path = null,
|
||||
};
|
||||
self.step.dependOn(&artifact.step);
|
||||
artifact.install_step = self;
|
||||
|
|
@ -71,7 +75,9 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
|||
const self = @fieldParentPtr(InstallArtifactStep, "step", step);
|
||||
const dest_builder = self.dest_builder;
|
||||
|
||||
const full_dest_path = dest_builder.getInstallPath(self.dest_dir, self.artifact.out_filename);
|
||||
const dest_sub_path = if (self.dest_sub_path) |sub_path| sub_path else self.artifact.out_filename;
|
||||
const full_dest_path = dest_builder.getInstallPath(self.dest_dir, dest_sub_path);
|
||||
|
||||
try src_builder.updateFile(
|
||||
self.artifact.getOutputSource().getPath(src_builder),
|
||||
full_dest_path,
|
||||
|
|
|
|||
|
|
@ -95,8 +95,7 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
|||
const full_dest_path = try b.cache_root.join(b.allocator, &.{ "o", &digest, self.basename });
|
||||
const cache_path = "o" ++ fs.path.sep_str ++ digest;
|
||||
b.cache_root.handle.makePath(cache_path) catch |err| {
|
||||
std.debug.print("unable to make path {s}: {s}\n", .{ cache_path, @errorName(err) });
|
||||
return err;
|
||||
return step.fail("unable to make path {s}: {s}", .{ cache_path, @errorName(err) });
|
||||
};
|
||||
|
||||
var argv = std.ArrayList([]const u8).init(b.allocator);
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@ const ArrayList = std.ArrayList;
|
|||
const EnvMap = process.EnvMap;
|
||||
const Allocator = mem.Allocator;
|
||||
const ExecError = std.Build.ExecError;
|
||||
const assert = std.debug.assert;
|
||||
|
||||
const RunStep = @This();
|
||||
|
||||
|
|
@ -54,6 +55,8 @@ rename_step_with_output_arg: bool = true,
|
|||
/// Command-line arguments such as -fqemu and -fwasmtime may affect whether a
|
||||
/// binary is detected as foreign, as well as system configuration such as
|
||||
/// Rosetta (macOS) and binfmt_misc (Linux).
|
||||
/// If this RunStep is considered to have side-effects, then this flag does
|
||||
/// nothing.
|
||||
skip_foreign_checks: bool = false,
|
||||
|
||||
/// If stderr or stdout exceeds this amount, the child process is killed and
|
||||
|
|
@ -79,7 +82,7 @@ pub const StdIo = union(enum) {
|
|||
/// conditions.
|
||||
/// Note that an explicit check for exit code 0 needs to be added to this
|
||||
/// list if such a check is desireable.
|
||||
check: []const Check,
|
||||
check: std.ArrayList(Check),
|
||||
|
||||
pub const Check = union(enum) {
|
||||
expect_stderr_exact: []const u8,
|
||||
|
|
@ -214,14 +217,20 @@ pub fn setEnvironmentVariable(self: *RunStep, key: []const u8, value: []const u8
|
|||
env_map.put(b.dupe(key), b.dupe(value)) catch @panic("unhandled error");
|
||||
}
|
||||
|
||||
/// Adds a check for exact stderr match. Does not add any other checks.
|
||||
pub fn expectStdErrEqual(self: *RunStep, bytes: []const u8) void {
|
||||
const new_check: StdIo.Check = .{ .expect_stderr_exact = self.step.owner.dupe(bytes) };
|
||||
self.addCheck(new_check);
|
||||
}
|
||||
|
||||
/// Adds a check for exact stdout match as well as a check for exit code 0, if
|
||||
/// there is not already an expected termination check.
|
||||
pub fn expectStdOutEqual(self: *RunStep, bytes: []const u8) void {
|
||||
const new_check: StdIo.Check = .{ .expect_stdout_exact = self.step.owner.dupe(bytes) };
|
||||
self.addCheck(new_check);
|
||||
if (!self.hasTermCheck()) {
|
||||
self.expectExitCode(0);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expectExitCode(self: *RunStep, code: u8) void {
|
||||
|
|
@ -229,19 +238,21 @@ pub fn expectExitCode(self: *RunStep, code: u8) void {
|
|||
self.addCheck(new_check);
|
||||
}
|
||||
|
||||
pub fn hasTermCheck(self: RunStep) bool {
|
||||
for (self.stdio.check.items) |check| switch (check) {
|
||||
.expect_term => return true,
|
||||
else => continue,
|
||||
};
|
||||
return false;
|
||||
}
|
||||
|
||||
pub fn addCheck(self: *RunStep, new_check: StdIo.Check) void {
|
||||
const arena = self.step.owner.allocator;
|
||||
switch (self.stdio) {
|
||||
.infer_from_args => {
|
||||
const list = arena.create([1]StdIo.Check) catch @panic("OOM");
|
||||
list.* = .{new_check};
|
||||
self.stdio = .{ .check = list };
|
||||
},
|
||||
.check => |checks| {
|
||||
const new_list = arena.alloc(StdIo.Check, checks.len + 1) catch @panic("OOM");
|
||||
std.mem.copy(StdIo.Check, new_list, checks);
|
||||
new_list[checks.len] = new_check;
|
||||
self.stdio = .{ .check = std.ArrayList(StdIo.Check).init(self.step.owner.allocator) };
|
||||
self.stdio.check.append(new_check) catch @panic("OOM");
|
||||
},
|
||||
.check => |*checks| checks.append(new_check) catch @panic("OOM"),
|
||||
else => @panic("illegal call to addCheck: conflicting helper method calls. Suggest to directly set stdio field of RunStep instead"),
|
||||
}
|
||||
}
|
||||
|
|
@ -298,14 +309,15 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
|||
_ = prog_node;
|
||||
|
||||
const b = step.owner;
|
||||
const arena = b.allocator;
|
||||
const self = @fieldParentPtr(RunStep, "step", step);
|
||||
const has_side_effects = self.hasSideEffects();
|
||||
|
||||
var argv_list = ArrayList([]const u8).init(b.allocator);
|
||||
var argv_list = ArrayList([]const u8).init(arena);
|
||||
var output_placeholders = ArrayList(struct {
|
||||
index: usize,
|
||||
output: Arg.Output,
|
||||
}).init(b.allocator);
|
||||
}).init(arena);
|
||||
|
||||
var man = b.cache.obtain();
|
||||
defer man.deinit();
|
||||
|
|
@ -357,7 +369,7 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
|||
const digest = man.final();
|
||||
for (output_placeholders.items) |placeholder| {
|
||||
placeholder.output.generated_file.path = try b.cache_root.join(
|
||||
b.allocator,
|
||||
arena,
|
||||
&.{ "o", &digest, placeholder.output.basename },
|
||||
);
|
||||
}
|
||||
|
|
@ -367,30 +379,21 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
|||
const digest = man.final();
|
||||
|
||||
for (output_placeholders.items) |placeholder| {
|
||||
const output_path = try b.cache_root.join(
|
||||
b.allocator,
|
||||
&.{ "o", &digest, placeholder.output.basename },
|
||||
);
|
||||
const output_dir = fs.path.dirname(output_path).?;
|
||||
fs.cwd().makePath(output_dir) catch |err| {
|
||||
std.debug.print("unable to make path {s}: {s}\n", .{ output_dir, @errorName(err) });
|
||||
return err;
|
||||
const output_components = .{ "o", &digest, placeholder.output.basename };
|
||||
const output_sub_path = try fs.path.join(arena, &output_components);
|
||||
const output_sub_dir_path = fs.path.dirname(output_sub_path).?;
|
||||
b.cache_root.handle.makePath(output_sub_dir_path) catch |err| {
|
||||
return step.fail("unable to make path '{}{s}': {s}", .{
|
||||
b.cache_root, output_sub_dir_path, @errorName(err),
|
||||
});
|
||||
};
|
||||
|
||||
const output_path = try b.cache_root.join(arena, &output_components);
|
||||
placeholder.output.generated_file.path = output_path;
|
||||
argv_list.items[placeholder.index] = output_path;
|
||||
}
|
||||
}
|
||||
|
||||
try runCommand(
|
||||
step,
|
||||
self.cwd,
|
||||
argv_list.items,
|
||||
self.env_map,
|
||||
self.stdio,
|
||||
has_side_effects,
|
||||
self.max_stdio_size,
|
||||
);
|
||||
try runCommand(self, argv_list.items, has_side_effects);
|
||||
|
||||
if (!has_side_effects) {
|
||||
try man.writeManifest();
|
||||
|
|
@ -442,49 +445,265 @@ fn termMatches(expected: ?std.ChildProcess.Term, actual: std.ChildProcess.Term)
|
|||
};
|
||||
}
|
||||
|
||||
fn runCommand(
|
||||
step: *Step,
|
||||
opt_cwd: ?[]const u8,
|
||||
argv: []const []const u8,
|
||||
env_map: ?*EnvMap,
|
||||
stdio: StdIo,
|
||||
has_side_effects: bool,
|
||||
max_stdio_size: usize,
|
||||
) !void {
|
||||
fn runCommand(self: *RunStep, argv: []const []const u8, has_side_effects: bool) !void {
|
||||
const step = &self.step;
|
||||
const b = step.owner;
|
||||
const arena = b.allocator;
|
||||
const cwd = if (opt_cwd) |cwd| b.pathFromRoot(cwd) else b.build_root.path;
|
||||
|
||||
try step.handleChildProcUnsupported(opt_cwd, argv);
|
||||
try Step.handleVerbose(step.owner, opt_cwd, argv);
|
||||
try step.handleChildProcUnsupported(self.cwd, argv);
|
||||
try Step.handleVerbose(step.owner, self.cwd, argv);
|
||||
|
||||
var stdout_bytes: ?[]const u8 = null;
|
||||
var stderr_bytes: ?[]const u8 = null;
|
||||
|
||||
const term = spawnChildAndCollect(self, argv, &stdout_bytes, &stderr_bytes, has_side_effects) catch |err| term: {
|
||||
if (err == error.InvalidExe) interpret: {
|
||||
// TODO: learn the target from the binary directly rather than from
|
||||
// relying on it being a CompileStep. This will make this logic
|
||||
// work even for the edge case that the binary was produced by a
|
||||
// third party.
|
||||
const exe = switch (self.argv.items[0]) {
|
||||
.artifact => |exe| exe,
|
||||
else => break :interpret,
|
||||
};
|
||||
if (exe.kind != .exe) break :interpret;
|
||||
|
||||
var interp_argv = std.ArrayList([]const u8).init(b.allocator);
|
||||
defer interp_argv.deinit();
|
||||
|
||||
const need_cross_glibc = exe.target.isGnuLibC() and exe.is_linking_libc;
|
||||
switch (b.host.getExternalExecutor(exe.target_info, .{
|
||||
.qemu_fixes_dl = need_cross_glibc and b.glibc_runtimes_dir != null,
|
||||
.link_libc = exe.is_linking_libc,
|
||||
})) {
|
||||
.native, .rosetta => {
|
||||
if (self.stdio == .check and self.skip_foreign_checks)
|
||||
return error.MakeSkipped;
|
||||
|
||||
break :interpret;
|
||||
},
|
||||
.wine => |bin_name| {
|
||||
if (b.enable_wine) {
|
||||
try interp_argv.append(bin_name);
|
||||
} else {
|
||||
return failForeign(self, "-fwine", argv[0], exe);
|
||||
}
|
||||
},
|
||||
.qemu => |bin_name| {
|
||||
if (b.enable_qemu) {
|
||||
const glibc_dir_arg = if (need_cross_glibc)
|
||||
b.glibc_runtimes_dir orelse return
|
||||
else
|
||||
null;
|
||||
|
||||
try interp_argv.append(bin_name);
|
||||
|
||||
if (glibc_dir_arg) |dir| {
|
||||
// TODO look into making this a call to `linuxTriple`. This
|
||||
// needs the directory to be called "i686" rather than
|
||||
// "x86" which is why we do it manually here.
|
||||
const fmt_str = "{s}" ++ fs.path.sep_str ++ "{s}-{s}-{s}";
|
||||
const cpu_arch = exe.target.getCpuArch();
|
||||
const os_tag = exe.target.getOsTag();
|
||||
const abi = exe.target.getAbi();
|
||||
const cpu_arch_name: []const u8 = if (cpu_arch == .x86)
|
||||
"i686"
|
||||
else
|
||||
@tagName(cpu_arch);
|
||||
const full_dir = try std.fmt.allocPrint(b.allocator, fmt_str, .{
|
||||
dir, cpu_arch_name, @tagName(os_tag), @tagName(abi),
|
||||
});
|
||||
|
||||
try interp_argv.append("-L");
|
||||
try interp_argv.append(full_dir);
|
||||
}
|
||||
} else {
|
||||
return failForeign(self, "-fqemu", argv[0], exe);
|
||||
}
|
||||
},
|
||||
.darling => |bin_name| {
|
||||
if (b.enable_darling) {
|
||||
try interp_argv.append(bin_name);
|
||||
} else {
|
||||
return failForeign(self, "-fdarling", argv[0], exe);
|
||||
}
|
||||
},
|
||||
.wasmtime => |bin_name| {
|
||||
if (b.enable_wasmtime) {
|
||||
try interp_argv.append(bin_name);
|
||||
try interp_argv.append("--dir=.");
|
||||
} else {
|
||||
return failForeign(self, "-fwasmtime", argv[0], exe);
|
||||
}
|
||||
},
|
||||
.bad_dl => |foreign_dl| {
|
||||
if (self.stdio == .check and self.skip_foreign_checks)
|
||||
return error.MakeSkipped;
|
||||
|
||||
const host_dl = b.host.dynamic_linker.get() orelse "(none)";
|
||||
|
||||
return step.fail(
|
||||
\\the host system is unable to execute binaries from the target
|
||||
\\ because the host dynamic linker is '{s}',
|
||||
\\ while the target dynamic linker is '{s}'.
|
||||
\\ consider setting the dynamic linker or enabling skip_foreign_checks in the Run step
|
||||
, .{ host_dl, foreign_dl });
|
||||
},
|
||||
.bad_os_or_cpu => {
|
||||
if (self.stdio == .check and self.skip_foreign_checks)
|
||||
return error.MakeSkipped;
|
||||
|
||||
const host_name = try b.host.target.zigTriple(b.allocator);
|
||||
const foreign_name = try exe.target.zigTriple(b.allocator);
|
||||
|
||||
return step.fail("the host system ({s}) is unable to execute binaries from the target ({s})", .{
|
||||
host_name, foreign_name,
|
||||
});
|
||||
},
|
||||
}
|
||||
|
||||
if (exe.target.isWindows()) {
|
||||
// On Windows we don't have rpaths so we have to add .dll search paths to PATH
|
||||
RunStep.addPathForDynLibsInternal(&self.step, b, exe);
|
||||
}
|
||||
|
||||
try interp_argv.append(argv[0]);
|
||||
|
||||
try Step.handleVerbose(step.owner, self.cwd, interp_argv.items);
|
||||
|
||||
assert(stdout_bytes == null);
|
||||
assert(stderr_bytes == null);
|
||||
break :term spawnChildAndCollect(self, interp_argv.items, &stdout_bytes, &stderr_bytes, has_side_effects) catch |inner_err| {
|
||||
return step.fail("unable to spawn {s}: {s}", .{
|
||||
interp_argv.items[0], @errorName(inner_err),
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
return step.fail("unable to spawn {s}: {s}", .{ argv[0], @errorName(err) });
|
||||
};
|
||||
|
||||
switch (self.stdio) {
|
||||
.check => |checks| for (checks.items) |check| switch (check) {
|
||||
.expect_stderr_exact => |expected_bytes| {
|
||||
if (!mem.eql(u8, expected_bytes, stderr_bytes.?)) {
|
||||
return step.fail(
|
||||
\\
|
||||
\\========= expected this stderr: =========
|
||||
\\{s}
|
||||
\\========= but found: ====================
|
||||
\\{s}
|
||||
\\========= from the following command: ===
|
||||
\\{s}
|
||||
, .{
|
||||
expected_bytes,
|
||||
stderr_bytes.?,
|
||||
try Step.allocPrintCmd(arena, self.cwd, argv),
|
||||
});
|
||||
}
|
||||
},
|
||||
.expect_stderr_match => |match| {
|
||||
if (mem.indexOf(u8, stderr_bytes.?, match) == null) {
|
||||
return step.fail(
|
||||
\\
|
||||
\\========= expected to find in stderr: =========
|
||||
\\{s}
|
||||
\\========= but stderr does not contain it: =====
|
||||
\\{s}
|
||||
\\========= from the following command: =========
|
||||
\\{s}
|
||||
, .{
|
||||
match,
|
||||
stderr_bytes.?,
|
||||
try Step.allocPrintCmd(arena, self.cwd, argv),
|
||||
});
|
||||
}
|
||||
},
|
||||
.expect_stdout_exact => |expected_bytes| {
|
||||
if (!mem.eql(u8, expected_bytes, stdout_bytes.?)) {
|
||||
return step.fail(
|
||||
\\
|
||||
\\========= expected this stdout: =========
|
||||
\\{s}
|
||||
\\========= but found: ====================
|
||||
\\{s}
|
||||
\\========= from the following command: ===
|
||||
\\{s}
|
||||
, .{
|
||||
expected_bytes,
|
||||
stdout_bytes.?,
|
||||
try Step.allocPrintCmd(arena, self.cwd, argv),
|
||||
});
|
||||
}
|
||||
},
|
||||
.expect_stdout_match => |match| {
|
||||
if (mem.indexOf(u8, stdout_bytes.?, match) == null) {
|
||||
return step.fail(
|
||||
\\
|
||||
\\========= expected to find in stdout: =========
|
||||
\\{s}
|
||||
\\========= but stdout does not contain it: =====
|
||||
\\{s}
|
||||
\\========= from the following command: =========
|
||||
\\{s}
|
||||
, .{
|
||||
match,
|
||||
stdout_bytes.?,
|
||||
try Step.allocPrintCmd(arena, self.cwd, argv),
|
||||
});
|
||||
}
|
||||
},
|
||||
.expect_term => |expected_term| {
|
||||
if (!termMatches(expected_term, term)) {
|
||||
return step.fail("the following command {} (expected {}):\n{s}", .{
|
||||
fmtTerm(term),
|
||||
fmtTerm(expected_term),
|
||||
try Step.allocPrintCmd(arena, self.cwd, argv),
|
||||
});
|
||||
}
|
||||
},
|
||||
},
|
||||
else => {
|
||||
try step.handleChildProcessTerm(term, self.cwd, argv);
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn spawnChildAndCollect(
|
||||
self: *RunStep,
|
||||
argv: []const []const u8,
|
||||
stdout_bytes: *?[]const u8,
|
||||
stderr_bytes: *?[]const u8,
|
||||
has_side_effects: bool,
|
||||
) !std.ChildProcess.Term {
|
||||
const b = self.step.owner;
|
||||
const arena = b.allocator;
|
||||
const cwd = if (self.cwd) |cwd| b.pathFromRoot(cwd) else b.build_root.path;
|
||||
|
||||
var child = std.ChildProcess.init(argv, arena);
|
||||
child.cwd = cwd;
|
||||
child.env_map = env_map orelse b.env_map;
|
||||
child.env_map = self.env_map orelse b.env_map;
|
||||
|
||||
child.stdin_behavior = switch (stdio) {
|
||||
child.stdin_behavior = switch (self.stdio) {
|
||||
.infer_from_args => if (has_side_effects) .Inherit else .Ignore,
|
||||
.inherit => .Inherit,
|
||||
.check => .Close,
|
||||
};
|
||||
child.stdout_behavior = switch (stdio) {
|
||||
child.stdout_behavior = switch (self.stdio) {
|
||||
.infer_from_args => if (has_side_effects) .Inherit else .Ignore,
|
||||
.inherit => .Inherit,
|
||||
.check => |checks| if (checksContainStdout(checks)) .Pipe else .Ignore,
|
||||
.check => |checks| if (checksContainStdout(checks.items)) .Pipe else .Ignore,
|
||||
};
|
||||
child.stderr_behavior = switch (stdio) {
|
||||
child.stderr_behavior = switch (self.stdio) {
|
||||
.infer_from_args => if (has_side_effects) .Inherit else .Pipe,
|
||||
.inherit => .Inherit,
|
||||
.check => .Pipe,
|
||||
};
|
||||
|
||||
child.spawn() catch |err| return step.fail("unable to spawn {s}: {s}", .{
|
||||
child.spawn() catch |err| return self.step.fail("unable to spawn {s}: {s}", .{
|
||||
argv[0], @errorName(err),
|
||||
});
|
||||
|
||||
var stdout_bytes: ?[]const u8 = null;
|
||||
var stderr_bytes: ?[]const u8 = null;
|
||||
|
||||
if (child.stdout) |stdout| {
|
||||
if (child.stderr) |stderr| {
|
||||
var poller = std.io.poll(arena, enum { stdout, stderr }, .{
|
||||
|
|
@ -494,115 +713,32 @@ fn runCommand(
|
|||
defer poller.deinit();
|
||||
|
||||
while (try poller.poll()) {
|
||||
if (poller.fifo(.stdout).count > max_stdio_size)
|
||||
if (poller.fifo(.stdout).count > self.max_stdio_size)
|
||||
return error.StdoutStreamTooLong;
|
||||
if (poller.fifo(.stderr).count > max_stdio_size)
|
||||
if (poller.fifo(.stderr).count > self.max_stdio_size)
|
||||
return error.StderrStreamTooLong;
|
||||
}
|
||||
|
||||
stdout_bytes = try poller.fifo(.stdout).toOwnedSlice();
|
||||
stderr_bytes = try poller.fifo(.stderr).toOwnedSlice();
|
||||
stdout_bytes.* = try poller.fifo(.stdout).toOwnedSlice();
|
||||
stderr_bytes.* = try poller.fifo(.stderr).toOwnedSlice();
|
||||
} else {
|
||||
stdout_bytes = try stdout.reader().readAllAlloc(arena, max_stdio_size);
|
||||
stdout_bytes.* = try stdout.reader().readAllAlloc(arena, self.max_stdio_size);
|
||||
}
|
||||
} else if (child.stderr) |stderr| {
|
||||
stderr_bytes = try stderr.reader().readAllAlloc(arena, max_stdio_size);
|
||||
stderr_bytes.* = try stderr.reader().readAllAlloc(arena, self.max_stdio_size);
|
||||
}
|
||||
|
||||
if (stderr_bytes) |stderr| if (stderr.len > 0) {
|
||||
const stderr_is_diagnostic = switch (stdio) {
|
||||
.check => |checks| !checksContainStderr(checks),
|
||||
if (stderr_bytes.*) |stderr| if (stderr.len > 0) {
|
||||
const stderr_is_diagnostic = switch (self.stdio) {
|
||||
.check => |checks| !checksContainStderr(checks.items),
|
||||
else => true,
|
||||
};
|
||||
if (stderr_is_diagnostic) {
|
||||
try step.result_error_msgs.append(arena, stderr);
|
||||
try self.step.result_error_msgs.append(arena, stderr);
|
||||
}
|
||||
};
|
||||
|
||||
const term = child.wait() catch |err| {
|
||||
return step.fail("unable to wait for {s}: {s}", .{ argv[0], @errorName(err) });
|
||||
};
|
||||
|
||||
switch (stdio) {
|
||||
.check => |checks| for (checks) |check| switch (check) {
|
||||
.expect_stderr_exact => |expected_bytes| {
|
||||
if (!mem.eql(u8, expected_bytes, stderr_bytes.?)) {
|
||||
return step.fail(
|
||||
\\========= expected this stderr: =========
|
||||
\\{s}
|
||||
\\========= but found: ====================
|
||||
\\{s}
|
||||
\\========= from the following command: ===
|
||||
\\{s}
|
||||
, .{
|
||||
expected_bytes,
|
||||
stderr_bytes.?,
|
||||
try Step.allocPrintCmd(arena, opt_cwd, argv),
|
||||
});
|
||||
}
|
||||
},
|
||||
.expect_stderr_match => |match| {
|
||||
if (mem.indexOf(u8, stderr_bytes.?, match) == null) {
|
||||
return step.fail(
|
||||
\\========= expected to find in stderr: =========
|
||||
\\{s}
|
||||
\\========= but stderr does not contain it: =====
|
||||
\\{s}
|
||||
\\========= from the following command: =========
|
||||
\\{s}
|
||||
, .{
|
||||
match,
|
||||
stderr_bytes.?,
|
||||
try Step.allocPrintCmd(arena, opt_cwd, argv),
|
||||
});
|
||||
}
|
||||
},
|
||||
.expect_stdout_exact => |expected_bytes| {
|
||||
if (!mem.eql(u8, expected_bytes, stdout_bytes.?)) {
|
||||
return step.fail(
|
||||
\\========= expected this stdout: =========
|
||||
\\{s}
|
||||
\\========= but found: ====================
|
||||
\\{s}
|
||||
\\========= from the following command: ===
|
||||
\\{s}
|
||||
, .{
|
||||
expected_bytes,
|
||||
stdout_bytes.?,
|
||||
try Step.allocPrintCmd(arena, opt_cwd, argv),
|
||||
});
|
||||
}
|
||||
},
|
||||
.expect_stdout_match => |match| {
|
||||
if (mem.indexOf(u8, stdout_bytes.?, match) == null) {
|
||||
return step.fail(
|
||||
\\========= expected to find in stdout: =========
|
||||
\\{s}
|
||||
\\========= but stdout does not contain it: =====
|
||||
\\{s}
|
||||
\\========= from the following command: =========
|
||||
\\{s}
|
||||
, .{
|
||||
match,
|
||||
stdout_bytes.?,
|
||||
try Step.allocPrintCmd(arena, opt_cwd, argv),
|
||||
});
|
||||
}
|
||||
},
|
||||
.expect_term => |expected_term| {
|
||||
if (!termMatches(expected_term, term)) {
|
||||
return step.fail("the following command {} (expected {}):\n{s}", .{
|
||||
fmtTerm(term),
|
||||
fmtTerm(expected_term),
|
||||
try Step.allocPrintCmd(arena, opt_cwd, argv),
|
||||
});
|
||||
}
|
||||
},
|
||||
},
|
||||
else => {
|
||||
try step.handleChildProcessTerm(term, opt_cwd, argv);
|
||||
},
|
||||
}
|
||||
return child.wait();
|
||||
}
|
||||
|
||||
fn addPathForDynLibs(self: *RunStep, artifact: *CompileStep) void {
|
||||
|
|
@ -624,3 +760,29 @@ pub fn addPathForDynLibsInternal(step: *Step, builder: *std.Build, artifact: *Co
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn failForeign(
|
||||
self: *RunStep,
|
||||
suggested_flag: []const u8,
|
||||
argv0: []const u8,
|
||||
exe: *CompileStep,
|
||||
) error{ MakeFailed, MakeSkipped, OutOfMemory } {
|
||||
switch (self.stdio) {
|
||||
.check => {
|
||||
if (self.skip_foreign_checks)
|
||||
return error.MakeSkipped;
|
||||
|
||||
const b = self.step.owner;
|
||||
const host_name = try b.host.target.zigTriple(b.allocator);
|
||||
const foreign_name = try exe.target.zigTriple(b.allocator);
|
||||
|
||||
return self.step.fail(
|
||||
\\unable to spawn foreign binary '{s}' ({s}) on host system ({s})
|
||||
\\ consider using {s} or enabling skip_foreign_checks in the Run step
|
||||
, .{ argv0, foreign_name, host_name, suggested_flag });
|
||||
},
|
||||
else => {
|
||||
return self.step.fail("unable to spawn foreign binary '{s}'", .{argv0});
|
||||
},
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -26,6 +26,9 @@ pub const State = enum {
|
|||
dependency_failure,
|
||||
success,
|
||||
failure,
|
||||
/// This state indicates that the step did not complete, however, it also did not fail,
|
||||
/// and it is safe to continue executing its dependencies.
|
||||
skipped,
|
||||
};
|
||||
|
||||
pub const Id = enum {
|
||||
|
|
@ -106,13 +109,15 @@ pub fn init(options: Options) Step {
|
|||
/// If the Step's `make` function reports `error.MakeFailed`, it indicates they
|
||||
/// have already reported the error. Otherwise, we add a simple error report
|
||||
/// here.
|
||||
pub fn make(s: *Step, prog_node: *std.Progress.Node) error{MakeFailed}!void {
|
||||
return s.makeFn(s, prog_node) catch |err| {
|
||||
if (err != error.MakeFailed) {
|
||||
pub fn make(s: *Step, prog_node: *std.Progress.Node) error{ MakeFailed, MakeSkipped }!void {
|
||||
return s.makeFn(s, prog_node) catch |err| switch (err) {
|
||||
error.MakeFailed => return error.MakeFailed,
|
||||
error.MakeSkipped => return error.MakeSkipped,
|
||||
else => {
|
||||
const gpa = s.dependencies.allocator;
|
||||
s.result_error_msgs.append(gpa, @errorName(err)) catch @panic("OOM");
|
||||
}
|
||||
return error.MakeFailed;
|
||||
return error.MakeFailed;
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
|
|
@ -192,10 +197,14 @@ pub fn evalChildProcess(s: *Step, argv: []const []const u8) !void {
|
|||
}
|
||||
|
||||
pub fn fail(step: *Step, comptime fmt: []const u8, args: anytype) error{ OutOfMemory, MakeFailed } {
|
||||
try step.addError(fmt, args);
|
||||
return error.MakeFailed;
|
||||
}
|
||||
|
||||
pub fn addError(step: *Step, comptime fmt: []const u8, args: anytype) error{OutOfMemory}!void {
|
||||
const arena = step.owner.allocator;
|
||||
const msg = try std.fmt.allocPrint(arena, fmt, args);
|
||||
try step.result_error_msgs.append(arena, msg);
|
||||
return error.MakeFailed;
|
||||
}
|
||||
|
||||
/// Assumes that argv contains `--listen=-` and that the process being spawned
|
||||
|
|
@ -398,5 +407,5 @@ fn failWithCacheError(s: *Step, man: *const std.Build.Cache.Manifest, err: anyer
|
|||
const i = man.failed_file_index orelse return err;
|
||||
const pp = man.files.items[i].prefixed_path orelse return err;
|
||||
const prefix = man.cache.prefixes()[pp.prefix].path orelse "";
|
||||
return s.fail("{s}: {s}/{s}\n", .{ @errorName(err), prefix, pp.sub_path });
|
||||
return s.fail("{s}: {s}/{s}", .{ @errorName(err), prefix, pp.sub_path });
|
||||
}
|
||||
|
|
|
|||
|
|
@ -37,7 +37,7 @@ pub fn init(owner: *std.Build) WriteFileStep {
|
|||
return .{
|
||||
.step = Step.init(.{
|
||||
.id = .write_file,
|
||||
.name = "writefile",
|
||||
.name = "WriteFile",
|
||||
.owner = owner,
|
||||
.makeFn = make,
|
||||
}),
|
||||
|
|
@ -56,6 +56,8 @@ pub fn add(wf: *WriteFileStep, sub_path: []const u8, bytes: []const u8) void {
|
|||
.contents = .{ .bytes = b.dupe(bytes) },
|
||||
};
|
||||
wf.files.append(gpa, file) catch @panic("OOM");
|
||||
|
||||
wf.maybeUpdateName();
|
||||
}
|
||||
|
||||
/// Place the file into the generated directory within the local cache,
|
||||
|
|
@ -75,6 +77,8 @@ pub fn addCopyFile(wf: *WriteFileStep, source: std.Build.FileSource, sub_path: [
|
|||
.contents = .{ .copy = source },
|
||||
};
|
||||
wf.files.append(gpa, file) catch @panic("OOM");
|
||||
|
||||
wf.maybeUpdateName();
|
||||
}
|
||||
|
||||
/// A path relative to the package root.
|
||||
|
|
@ -101,6 +105,15 @@ pub fn getFileSource(wf: *WriteFileStep, sub_path: []const u8) ?std.Build.FileSo
|
|||
return null;
|
||||
}
|
||||
|
||||
fn maybeUpdateName(wf: *WriteFileStep) void {
|
||||
if (wf.files.items.len == 1) {
|
||||
// First time adding a file; update name.
|
||||
if (std.mem.eql(u8, wf.step.name, "WriteFile")) {
|
||||
wf.step.name = wf.step.owner.fmt("WriteFile {s}", .{wf.files.items[0].sub_path});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
_ = prog_node;
|
||||
const b = step.owner;
|
||||
|
|
@ -110,14 +123,39 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
|||
// WriteFileStep - arguably it should be a different step. But anyway here
|
||||
// it is, it happens unconditionally and does not interact with the other
|
||||
// files here.
|
||||
var any_miss = false;
|
||||
for (wf.output_source_files.items) |output_source_file| {
|
||||
const basename = fs.path.basename(output_source_file.sub_path);
|
||||
if (fs.path.dirname(output_source_file.sub_path)) |dirname| {
|
||||
var dir = try b.build_root.handle.makeOpenPath(dirname, .{});
|
||||
defer dir.close();
|
||||
try writeFile(wf, dir, output_source_file.contents, basename);
|
||||
} else {
|
||||
try writeFile(wf, b.build_root.handle, output_source_file.contents, basename);
|
||||
b.build_root.handle.makePath(dirname) catch |err| {
|
||||
return step.fail("unable to make path '{}{s}': {s}", .{
|
||||
b.build_root, dirname, @errorName(err),
|
||||
});
|
||||
};
|
||||
}
|
||||
switch (output_source_file.contents) {
|
||||
.bytes => |bytes| {
|
||||
b.build_root.handle.writeFile(output_source_file.sub_path, bytes) catch |err| {
|
||||
return step.fail("unable to write file '{}{s}': {s}", .{
|
||||
b.build_root, output_source_file.sub_path, @errorName(err),
|
||||
});
|
||||
};
|
||||
any_miss = true;
|
||||
},
|
||||
.copy => |file_source| {
|
||||
const source_path = file_source.getPath(b);
|
||||
const prev_status = fs.Dir.updateFile(
|
||||
fs.cwd(),
|
||||
source_path,
|
||||
b.build_root.handle,
|
||||
output_source_file.sub_path,
|
||||
.{},
|
||||
) catch |err| {
|
||||
return step.fail("unable to update file from '{s}' to '{}{s}': {s}", .{
|
||||
source_path, b.build_root, output_source_file.sub_path, @errorName(err),
|
||||
});
|
||||
};
|
||||
any_miss = any_miss or prev_status == .stale;
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -164,19 +202,52 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
|||
const cache_path = "o" ++ fs.path.sep_str ++ digest;
|
||||
|
||||
var cache_dir = b.cache_root.handle.makeOpenPath(cache_path, .{}) catch |err| {
|
||||
std.debug.print("unable to make path {s}: {s}\n", .{ cache_path, @errorName(err) });
|
||||
return err;
|
||||
return step.fail("unable to make path '{}{s}': {s}", .{
|
||||
b.cache_root, cache_path, @errorName(err),
|
||||
});
|
||||
};
|
||||
defer cache_dir.close();
|
||||
|
||||
for (wf.files.items) |file| {
|
||||
const basename = fs.path.basename(file.sub_path);
|
||||
if (fs.path.dirname(file.sub_path)) |dirname| {
|
||||
var dir = try b.cache_root.handle.makeOpenPath(dirname, .{});
|
||||
defer dir.close();
|
||||
try writeFile(wf, dir, file.contents, basename);
|
||||
} else {
|
||||
try writeFile(wf, cache_dir, file.contents, basename);
|
||||
cache_dir.makePath(dirname) catch |err| {
|
||||
return step.fail("unable to make path '{}{s}{c}{s}': {s}", .{
|
||||
b.cache_root, cache_path, fs.path.sep, dirname, @errorName(err),
|
||||
});
|
||||
};
|
||||
}
|
||||
switch (file.contents) {
|
||||
.bytes => |bytes| {
|
||||
cache_dir.writeFile(file.sub_path, bytes) catch |err| {
|
||||
return step.fail("unable to write file '{}{s}{c}{s}': {s}", .{
|
||||
b.cache_root, cache_path, fs.path.sep, file.sub_path, @errorName(err),
|
||||
});
|
||||
};
|
||||
},
|
||||
.copy => |file_source| {
|
||||
const source_path = file_source.getPath(b);
|
||||
const prev_status = fs.Dir.updateFile(
|
||||
fs.cwd(),
|
||||
source_path,
|
||||
cache_dir,
|
||||
file.sub_path,
|
||||
.{},
|
||||
) catch |err| {
|
||||
return step.fail("unable to update file from '{s}' to '{}{s}{c}{s}': {s}", .{
|
||||
source_path,
|
||||
b.cache_root,
|
||||
cache_path,
|
||||
fs.path.sep,
|
||||
file.sub_path,
|
||||
@errorName(err),
|
||||
});
|
||||
};
|
||||
// At this point we already will mark the step as a cache miss.
|
||||
// But this is kind of a partial cache hit since individual
|
||||
// file copies may be avoided. Oh well, this information is
|
||||
// discarded.
|
||||
_ = prev_status;
|
||||
},
|
||||
}
|
||||
|
||||
file.generated_file.path = try b.cache_root.join(
|
||||
|
|
@ -188,19 +259,6 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
|||
try man.writeManifest();
|
||||
}
|
||||
|
||||
fn writeFile(wf: *WriteFileStep, dir: fs.Dir, contents: Contents, basename: []const u8) !void {
|
||||
const b = wf.step.owner;
|
||||
// TODO after landing concurrency PR, improve error reporting here
|
||||
switch (contents) {
|
||||
.bytes => |bytes| return dir.writeFile(basename, bytes),
|
||||
.copy => |file_source| {
|
||||
const source_path = file_source.getPath(b);
|
||||
const prev_status = try fs.Dir.updateFile(fs.cwd(), source_path, dir, basename, .{});
|
||||
_ = prev_status; // TODO logging (affected by open PR regarding concurrency)
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
const std = @import("../std.zig");
|
||||
const Step = std.Build.Step;
|
||||
const fs = std.fs;
|
||||
|
|
|
|||
|
|
@ -185,7 +185,6 @@ pub const ChildProcess = struct {
|
|||
}
|
||||
|
||||
/// Blocks until child process terminates and then cleans up all resources.
|
||||
/// TODO: set the pid to undefined in this function.
|
||||
pub fn wait(self: *ChildProcess) !Term {
|
||||
const term = if (builtin.os.tag == .windows)
|
||||
try self.waitWindows()
|
||||
|
|
|
|||
|
|
@ -13,6 +13,8 @@ pub fn build(b: *std.Build) void {
|
|||
.target = target,
|
||||
});
|
||||
|
||||
const run = exe.runEmulatable();
|
||||
const run = b.addRunArtifact(exe);
|
||||
run.skip_foreign_checks = true;
|
||||
run.expectStdOutEqual("");
|
||||
test_step.dependOn(&run.step);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -16,7 +16,8 @@ pub fn build(b: *std.Build) void {
|
|||
exe.addCSourceFile("empty.c", &[0][]const u8{});
|
||||
exe.linkLibC();
|
||||
|
||||
const run_cmd = std.Build.EmulatableRunStep.create(b, "run", exe);
|
||||
const run_cmd = b.addRunArtifact(exe);
|
||||
run_cmd.skip_foreign_checks = true;
|
||||
run_cmd.expectStdOutEqual("Hello!\n");
|
||||
test_step.dependOn(&run_cmd.step);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -36,5 +36,6 @@ pub fn build(b: *std.Build) void {
|
|||
check.checkNext("name @rpath/liba.dylib");
|
||||
|
||||
const run_cmd = check.runAndCompare();
|
||||
run_cmd.expectStdOutEqual("");
|
||||
test_step.dependOn(&run_cmd.step);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -17,6 +17,8 @@ pub fn build(b: *std.Build) void {
|
|||
// populate paths to the sysroot here.
|
||||
exe.linkFramework("Foundation");
|
||||
|
||||
const run_cmd = std.Build.EmulatableRunStep.create(b, "run", exe);
|
||||
const run_cmd = b.addRunArtifact(exe);
|
||||
run_cmd.skip_foreign_checks = true;
|
||||
run_cmd.expectStdOutEqual("");
|
||||
test_step.dependOn(&run_cmd.step);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -27,7 +27,8 @@ pub fn build(b: *std.Build) void {
|
|||
const exe = createScenario(b, optimize, target);
|
||||
exe.search_strategy = .paths_first;
|
||||
|
||||
const run = std.Build.EmulatableRunStep.create(b, "run", exe);
|
||||
const run = b.addRunArtifact(exe);
|
||||
run.skip_foreign_checks = true;
|
||||
run.cwd = b.pathFromRoot(".");
|
||||
run.expectStdOutEqual("Hello world");
|
||||
test_step.dependOn(&run.step);
|
||||
|
|
|
|||
|
|
@ -21,5 +21,6 @@ pub fn build(b: *std.Build) void {
|
|||
check_exe.checkNext("stacksize 100000000");
|
||||
|
||||
const run = check_exe.runAndCompare();
|
||||
run.expectStdOutEqual("");
|
||||
test_step.dependOn(&run.step);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,5 +1,4 @@
|
|||
const std = @import("std");
|
||||
const Builder = std.Build.Builder;
|
||||
const CompileStep = std.Build.CompileStep;
|
||||
const FileSource = std.Build.FileSource;
|
||||
const Step = std.Build.Step;
|
||||
|
|
@ -38,13 +37,15 @@ fn testUuid(
|
|||
// stay the same across builds.
|
||||
{
|
||||
const dylib = simpleDylib(b, optimize, target);
|
||||
const install_step = installWithRename(dylib, "test1.dylib");
|
||||
const install_step = b.addInstallArtifact(dylib);
|
||||
install_step.dest_sub_path = "test1.dylib";
|
||||
install_step.step.dependOn(&dylib.step);
|
||||
}
|
||||
{
|
||||
const dylib = simpleDylib(b, optimize, target);
|
||||
dylib.strip = true;
|
||||
const install_step = installWithRename(dylib, "test2.dylib");
|
||||
const install_step = b.addInstallArtifact(dylib);
|
||||
install_step.dest_sub_path = "test2.dylib";
|
||||
install_step.step.dependOn(&dylib.step);
|
||||
}
|
||||
|
||||
|
|
@ -68,70 +69,23 @@ fn simpleDylib(
|
|||
return dylib;
|
||||
}
|
||||
|
||||
fn installWithRename(cs: *CompileStep, name: []const u8) *InstallWithRename {
|
||||
const step = InstallWithRename.create(cs.builder, cs.getOutputSource(), name);
|
||||
cs.builder.getInstallStep().dependOn(&step.step);
|
||||
return step;
|
||||
}
|
||||
|
||||
const InstallWithRename = struct {
|
||||
pub const base_id = .custom;
|
||||
|
||||
step: Step,
|
||||
builder: *Builder,
|
||||
source: FileSource,
|
||||
name: []const u8,
|
||||
|
||||
pub fn create(
|
||||
builder: *Builder,
|
||||
source: FileSource,
|
||||
name: []const u8,
|
||||
) *InstallWithRename {
|
||||
const self = builder.allocator.create(InstallWithRename) catch @panic("OOM");
|
||||
self.* = InstallWithRename{
|
||||
.builder = builder,
|
||||
.step = Step.init(builder.allocator, .{
|
||||
.id = .custom,
|
||||
.name = builder.fmt("install and rename: {s} -> {s}", .{
|
||||
source.getDisplayName(), name,
|
||||
}),
|
||||
.makeFn = make,
|
||||
}),
|
||||
.source = source,
|
||||
.name = builder.dupe(name),
|
||||
};
|
||||
return self;
|
||||
}
|
||||
|
||||
fn make(step: *Step) anyerror!void {
|
||||
const self = @fieldParentPtr(InstallWithRename, "step", step);
|
||||
const source_path = self.source.getPath(self.builder);
|
||||
const target_path = self.builder.getInstallPath(.lib, self.name);
|
||||
self.builder.updateFile(source_path, target_path) catch |err| {
|
||||
std.log.err("Unable to rename: {s} -> {s}", .{ source_path, target_path });
|
||||
return err;
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
const CompareUuid = struct {
|
||||
pub const base_id = .custom;
|
||||
|
||||
step: Step,
|
||||
builder: *Builder,
|
||||
lhs: []const u8,
|
||||
rhs: []const u8,
|
||||
|
||||
pub fn create(builder: *Builder, lhs: []const u8, rhs: []const u8) *CompareUuid {
|
||||
const self = builder.allocator.create(CompareUuid) catch @panic("OOM");
|
||||
pub fn create(owner: *std.Build, lhs: []const u8, rhs: []const u8) *CompareUuid {
|
||||
const self = owner.allocator.create(CompareUuid) catch @panic("OOM");
|
||||
self.* = CompareUuid{
|
||||
.builder = builder,
|
||||
.step = Step.init(builder.allocator, .{
|
||||
.id = .custom,
|
||||
.name = builder.fmt("compare uuid: {s} and {s}", .{
|
||||
.step = Step.init(.{
|
||||
.id = base_id,
|
||||
.name = owner.fmt("compare uuid: {s} and {s}", .{
|
||||
lhs,
|
||||
rhs,
|
||||
}),
|
||||
.owner = owner,
|
||||
.makeFn = make,
|
||||
}),
|
||||
.lhs = lhs,
|
||||
|
|
@ -140,16 +94,18 @@ const CompareUuid = struct {
|
|||
return self;
|
||||
}
|
||||
|
||||
fn make(step: *Step) anyerror!void {
|
||||
fn make(step: *Step, prog_node: *std.Progress.Node) anyerror!void {
|
||||
_ = prog_node;
|
||||
const b = step.owner;
|
||||
const self = @fieldParentPtr(CompareUuid, "step", step);
|
||||
const gpa = self.builder.allocator;
|
||||
const gpa = b.allocator;
|
||||
|
||||
var lhs_uuid: [16]u8 = undefined;
|
||||
const lhs_path = self.builder.getInstallPath(.lib, self.lhs);
|
||||
const lhs_path = b.getInstallPath(.lib, self.lhs);
|
||||
try parseUuid(gpa, lhs_path, &lhs_uuid);
|
||||
|
||||
var rhs_uuid: [16]u8 = undefined;
|
||||
const rhs_path = self.builder.getInstallPath(.lib, self.rhs);
|
||||
const rhs_path = b.getInstallPath(.lib, self.rhs);
|
||||
try parseUuid(gpa, rhs_path, &rhs_uuid);
|
||||
|
||||
try std.testing.expectEqualStrings(&lhs_uuid, &rhs_uuid);
|
||||
|
|
|
|||
3
test/link/wasm/extern/build.zig
vendored
3
test/link/wasm/extern/build.zig
vendored
|
|
@ -11,7 +11,8 @@ pub fn build(b: *std.Build) void {
|
|||
exe.use_llvm = false;
|
||||
exe.use_lld = false;
|
||||
|
||||
const run = exe.runEmulatable();
|
||||
const run = b.addRunArtifact(exe);
|
||||
run.skip_foreign_checks = true;
|
||||
run.expectStdOutEqual("Result: 30");
|
||||
|
||||
const test_step = b.step("test", "Run linker test");
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue