mirror of
https://codeberg.org/ziglang/zig.git
synced 2025-12-06 05:44:20 +00:00
Build: cleanup
* `doc/langref` formatting
* upgrade `.{ .path = "..." }` to `b.path("...")`
* avoid using arguments named `self`
* make `Build.Step.Id` usage more consistent
* add `Build.pathResolve`
* use `pathJoin` and `pathResolve` everywhere
* make sure `Build.LazyPath.getPath2` returns an absolute path
This commit is contained in:
parent
d582575aba
commit
e3424332d3
59 changed files with 1306 additions and 1311 deletions
|
|
@ -15,44 +15,44 @@ pub fn syscall1(number: usize, arg1: usize) usize {
|
|||
// the below code, this is not used. A literal `%` can be
|
||||
// obtained by escaping it with a double percent: `%%`.
|
||||
// Often multiline string syntax comes in handy here.
|
||||
\\syscall
|
||||
// Next is the output. It is possible in the future Zig will
|
||||
// support multiple outputs, depending on how
|
||||
// https://github.com/ziglang/zig/issues/215 is resolved.
|
||||
// It is allowed for there to be no outputs, in which case
|
||||
// this colon would be directly followed by the colon for the inputs.
|
||||
\\syscall
|
||||
// Next is the output. It is possible in the future Zig will
|
||||
// support multiple outputs, depending on how
|
||||
// https://github.com/ziglang/zig/issues/215 is resolved.
|
||||
// It is allowed for there to be no outputs, in which case
|
||||
// this colon would be directly followed by the colon for the inputs.
|
||||
:
|
||||
// This specifies the name to be used in `%[ret]` syntax in
|
||||
// the above assembly string. This example does not use it,
|
||||
// but the syntax is mandatory.
|
||||
[ret]
|
||||
// Next is the output constraint string. This feature is still
|
||||
// considered unstable in Zig, and so LLVM/GCC documentation
|
||||
// must be used to understand the semantics.
|
||||
// http://releases.llvm.org/10.0.0/docs/LangRef.html#inline-asm-constraint-string
|
||||
// https://gcc.gnu.org/onlinedocs/gcc/Extended-Asm.html
|
||||
// In this example, the constraint string means "the result value of
|
||||
// this inline assembly instruction is whatever is in $rax".
|
||||
"={rax}"
|
||||
// Next is either a value binding, or `->` and then a type. The
|
||||
// type is the result type of the inline assembly expression.
|
||||
// If it is a value binding, then `%[ret]` syntax would be used
|
||||
// to refer to the register bound to the value.
|
||||
(-> usize),
|
||||
// Next is the list of inputs.
|
||||
// The constraint for these inputs means, "when the assembly code is
|
||||
// executed, $rax shall have the value of `number` and $rdi shall have
|
||||
// the value of `arg1`". Any number of input parameters is allowed,
|
||||
// including none.
|
||||
// This specifies the name to be used in `%[ret]` syntax in
|
||||
// the above assembly string. This example does not use it,
|
||||
// but the syntax is mandatory.
|
||||
[ret]
|
||||
// Next is the output constraint string. This feature is still
|
||||
// considered unstable in Zig, and so LLVM/GCC documentation
|
||||
// must be used to understand the semantics.
|
||||
// http://releases.llvm.org/10.0.0/docs/LangRef.html#inline-asm-constraint-string
|
||||
// https://gcc.gnu.org/onlinedocs/gcc/Extended-Asm.html
|
||||
// In this example, the constraint string means "the result value of
|
||||
// this inline assembly instruction is whatever is in $rax".
|
||||
"={rax}"
|
||||
// Next is either a value binding, or `->` and then a type. The
|
||||
// type is the result type of the inline assembly expression.
|
||||
// If it is a value binding, then `%[ret]` syntax would be used
|
||||
// to refer to the register bound to the value.
|
||||
(-> usize),
|
||||
// Next is the list of inputs.
|
||||
// The constraint for these inputs means, "when the assembly code is
|
||||
// executed, $rax shall have the value of `number` and $rdi shall have
|
||||
// the value of `arg1`". Any number of input parameters is allowed,
|
||||
// including none.
|
||||
: [number] "{rax}" (number),
|
||||
[arg1] "{rdi}" (arg1),
|
||||
// Next is the list of clobbers. These declare a set of registers whose
|
||||
// values will not be preserved by the execution of this assembly code.
|
||||
// These do not include output or input registers. The special clobber
|
||||
// value of "memory" means that the assembly writes to arbitrary undeclared
|
||||
// memory locations - not only the memory pointed to by a declared indirect
|
||||
// output. In this example we list $rcx and $r11 because it is known the
|
||||
// kernel syscall does not preserve these registers.
|
||||
[arg1] "{rdi}" (arg1),
|
||||
// Next is the list of clobbers. These declare a set of registers whose
|
||||
// values will not be preserved by the execution of this assembly code.
|
||||
// These do not include output or input registers. The special clobber
|
||||
// value of "memory" means that the assembly writes to arbitrary undeclared
|
||||
// memory locations - not only the memory pointed to by a declared indirect
|
||||
// output. In this example we list $rcx and $r11 because it is known the
|
||||
// kernel syscall does not preserve these registers.
|
||||
: "rcx", "r11"
|
||||
);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ pub fn build(b: *std.Build) void {
|
|||
const optimize = b.standardOptimizeOption(.{});
|
||||
const exe = b.addExecutable(.{
|
||||
.name = "example",
|
||||
.root_source_file = .{ .path = "example.zig" },
|
||||
.root_source_file = b.path("example.zig"),
|
||||
.optimize = optimize,
|
||||
});
|
||||
b.default_step.dependOn(&exe.step);
|
||||
|
|
|
|||
|
|
@ -3,13 +3,13 @@ const std = @import("std");
|
|||
pub fn build(b: *std.Build) void {
|
||||
const lib = b.addSharedLibrary(.{
|
||||
.name = "mathtest",
|
||||
.root_source_file = .{ .path = "mathtest.zig" },
|
||||
.root_source_file = b.path("mathtest.zig"),
|
||||
.version = .{ .major = 1, .minor = 0, .patch = 0 },
|
||||
});
|
||||
const exe = b.addExecutable(.{
|
||||
.name = "test",
|
||||
});
|
||||
exe.addCSourceFile(.{ .file = .{ .path = "test.c" }, .flags = &.{"-std=c99"} });
|
||||
exe.addCSourceFile(.{ .file = b.path("test.c"), .flags = &.{"-std=c99"} });
|
||||
exe.linkLibrary(lib);
|
||||
exe.linkSystemLibrary("c");
|
||||
|
||||
|
|
|
|||
|
|
@ -3,13 +3,13 @@ const std = @import("std");
|
|||
pub fn build(b: *std.Build) void {
|
||||
const obj = b.addObject(.{
|
||||
.name = "base64",
|
||||
.root_source_file = .{ .path = "base64.zig" },
|
||||
.root_source_file = b.path("base64.zig"),
|
||||
});
|
||||
|
||||
const exe = b.addExecutable(.{
|
||||
.name = "test",
|
||||
});
|
||||
exe.addCSourceFile(.{ .file = .{ .path = "test.c" }, .flags = &.{"-std=c99",} });
|
||||
exe.addCSourceFile(.{ .file = b.path("test.c"), .flags = &.{"-std=c99"} });
|
||||
exe.addObject(obj);
|
||||
exe.linkSystemLibrary("c");
|
||||
b.installArtifact(exe);
|
||||
|
|
|
|||
|
|
@ -1,11 +1,13 @@
|
|||
const Foo = struct{};
|
||||
fn doSomethingWithFoo(foo: *Foo) void { _ = foo; }
|
||||
const Foo = struct {};
|
||||
fn doSomethingWithFoo(foo: *Foo) void {
|
||||
_ = foo;
|
||||
}
|
||||
|
||||
fn doAThing(optional_foo: ?*Foo) void {
|
||||
// do some stuff
|
||||
|
||||
if (optional_foo) |foo| {
|
||||
doSomethingWithFoo(foo);
|
||||
doSomethingWithFoo(foo);
|
||||
}
|
||||
|
||||
// do some stuff
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
/// multiline doc comment).
|
||||
const Timestamp = struct {
|
||||
/// The number of seconds since the epoch (this is also a doc comment).
|
||||
seconds: i64, // signed so we can represent pre-1970 (not a doc comment)
|
||||
seconds: i64, // signed so we can represent pre-1970 (not a doc comment)
|
||||
/// The number of nanoseconds past the second (doc comment again).
|
||||
nanos: u32,
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,6 @@
|
|||
const Foo = enum(c_int) { a, b, c };
|
||||
export fn entry(foo: Foo) void { _ = foo; }
|
||||
export fn entry(foo: Foo) void {
|
||||
_ = foo;
|
||||
}
|
||||
|
||||
// obj
|
||||
|
|
|
|||
|
|
@ -1,4 +1,6 @@
|
|||
const Foo = enum { a, b, c };
|
||||
export fn entry(foo: Foo) void { _ = foo; }
|
||||
export fn entry(foo: Foo) void {
|
||||
_ = foo;
|
||||
}
|
||||
|
||||
// obj=parameter of type 'enum_export_error.Foo' not allowed in function with calling convention 'C'
|
||||
|
|
|
|||
|
|
@ -26,9 +26,9 @@ pub fn parseU64(buf: []const u8, radix: u8) !u64 {
|
|||
|
||||
fn charToDigit(c: u8) u8 {
|
||||
return switch (c) {
|
||||
'0' ... '9' => c - '0',
|
||||
'A' ... 'Z' => c - 'A' + 10,
|
||||
'a' ... 'z' => c - 'a' + 10,
|
||||
'0'...'9' => c - '0',
|
||||
'A'...'Z' => c - 'A' + 10,
|
||||
'a'...'z' => c - 'a' + 10,
|
||||
else => maxInt(u8),
|
||||
};
|
||||
}
|
||||
|
|
|
|||
|
|
@ -6,8 +6,8 @@ pub extern "c" fn @"error"() void;
|
|||
pub extern "c" fn @"fstat$INODE64"(fd: c.fd_t, buf: *c.Stat) c_int;
|
||||
|
||||
const Color = enum {
|
||||
red,
|
||||
@"really red",
|
||||
red,
|
||||
@"really red",
|
||||
};
|
||||
const color: Color = .@"really red";
|
||||
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ const a_number: i32 = 1234;
|
|||
const a_string = "foobar";
|
||||
|
||||
pub fn main() void {
|
||||
print("here is a string: '{s}' here is a number: {}\n", .{a_string, a_number});
|
||||
print("here is a string: '{s}' here is a number: {}\n", .{ a_string, a_number });
|
||||
}
|
||||
|
||||
// exe=succeed
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ const a_string = "foobar";
|
|||
const fmt = "here is a string: '{s}' here is a number: {}\n";
|
||||
|
||||
pub fn main() void {
|
||||
print(fmt, .{a_string, a_number});
|
||||
print(fmt, .{ a_string, a_number });
|
||||
}
|
||||
|
||||
// exe=succeed
|
||||
|
|
|
|||
|
|
@ -1,3 +1,3 @@
|
|||
const err = (error {FileNotFound}).FileNotFound;
|
||||
const err = (error{FileNotFound}).FileNotFound;
|
||||
|
||||
// syntax
|
||||
|
|
|
|||
|
|
@ -3,19 +3,19 @@ const mem = @import("std").mem; // will be used to compare bytes
|
|||
|
||||
pub fn main() void {
|
||||
const bytes = "hello";
|
||||
print("{}\n", .{@TypeOf(bytes)}); // *const [5:0]u8
|
||||
print("{d}\n", .{bytes.len}); // 5
|
||||
print("{c}\n", .{bytes[1]}); // 'e'
|
||||
print("{d}\n", .{bytes[5]}); // 0
|
||||
print("{}\n", .{'e' == '\x65'}); // true
|
||||
print("{d}\n", .{'\u{1f4a9}'}); // 128169
|
||||
print("{d}\n", .{'💯'}); // 128175
|
||||
print("{}\n", .{@TypeOf(bytes)}); // *const [5:0]u8
|
||||
print("{d}\n", .{bytes.len}); // 5
|
||||
print("{c}\n", .{bytes[1]}); // 'e'
|
||||
print("{d}\n", .{bytes[5]}); // 0
|
||||
print("{}\n", .{'e' == '\x65'}); // true
|
||||
print("{d}\n", .{'\u{1f4a9}'}); // 128169
|
||||
print("{d}\n", .{'💯'}); // 128175
|
||||
print("{u}\n", .{'⚡'});
|
||||
print("{}\n", .{mem.eql(u8, "hello", "h\x65llo")}); // true
|
||||
print("{}\n", .{mem.eql(u8, "hello", "h\x65llo")}); // true
|
||||
print("{}\n", .{mem.eql(u8, "💯", "\xf0\x9f\x92\xaf")}); // also true
|
||||
const invalid_utf8 = "\xff\xfe"; // non-UTF-8 strings are possible with \xNN notation.
|
||||
const invalid_utf8 = "\xff\xfe"; // non-UTF-8 strings are possible with \xNN notation.
|
||||
print("0x{x}\n", .{invalid_utf8[1]}); // indexing them returns individual bytes...
|
||||
print("0x{x}\n", .{"💯"[1]}); // ...as does indexing part-way through non-ASCII characters
|
||||
print("0x{x}\n", .{"💯"[1]}); // ...as does indexing part-way through non-ASCII characters
|
||||
}
|
||||
|
||||
// exe=succeed
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
const expect = @import("std").testing.expect;
|
||||
|
||||
test "noinline function call" {
|
||||
try expect(@call(.auto, add, .{3, 9}) == 12);
|
||||
try expect(@call(.auto, add, .{ 3, 9 }) == 12);
|
||||
}
|
||||
|
||||
fn add(a: i32, b: i32) i32 {
|
||||
|
|
|
|||
|
|
@ -1,12 +1,12 @@
|
|||
const std = @import("std");
|
||||
|
||||
const FileOpenError = error {
|
||||
const FileOpenError = error{
|
||||
AccessDenied,
|
||||
OutOfMemory,
|
||||
FileNotFound,
|
||||
};
|
||||
|
||||
const AllocationError = error {
|
||||
const AllocationError = error{
|
||||
OutOfMemory,
|
||||
};
|
||||
|
||||
|
|
|
|||
|
|
@ -1,10 +1,10 @@
|
|||
const FileOpenError = error {
|
||||
const FileOpenError = error{
|
||||
AccessDenied,
|
||||
OutOfMemory,
|
||||
FileNotFound,
|
||||
};
|
||||
|
||||
const AllocationError = error {
|
||||
const AllocationError = error{
|
||||
OutOfMemory,
|
||||
};
|
||||
|
||||
|
|
|
|||
|
|
@ -1,11 +1,11 @@
|
|||
const std = @import("std");
|
||||
const expect = std.testing.expect;
|
||||
|
||||
const Tuple = struct{ u8, u8 };
|
||||
const Tuple = struct { u8, u8 };
|
||||
test "coercion from homogenous tuple to array" {
|
||||
const tuple: Tuple = .{5, 6};
|
||||
const array: [2]u8 = tuple;
|
||||
_ = array;
|
||||
const tuple: Tuple = .{ 5, 6 };
|
||||
const array: [2]u8 = tuple;
|
||||
_ = array;
|
||||
}
|
||||
|
||||
// test
|
||||
|
|
|
|||
|
|
@ -2,17 +2,23 @@ const expect = @import("std").testing.expect;
|
|||
|
||||
const CmdFn = struct {
|
||||
name: []const u8,
|
||||
func: fn(i32) i32,
|
||||
func: fn (i32) i32,
|
||||
};
|
||||
|
||||
const cmd_fns = [_]CmdFn{
|
||||
CmdFn {.name = "one", .func = one},
|
||||
CmdFn {.name = "two", .func = two},
|
||||
CmdFn {.name = "three", .func = three},
|
||||
CmdFn{ .name = "one", .func = one },
|
||||
CmdFn{ .name = "two", .func = two },
|
||||
CmdFn{ .name = "three", .func = three },
|
||||
};
|
||||
fn one(value: i32) i32 { return value + 1; }
|
||||
fn two(value: i32) i32 { return value + 2; }
|
||||
fn three(value: i32) i32 { return value + 3; }
|
||||
fn one(value: i32) i32 {
|
||||
return value + 1;
|
||||
}
|
||||
fn two(value: i32) i32 {
|
||||
return value + 2;
|
||||
}
|
||||
fn three(value: i32) i32 {
|
||||
return value + 3;
|
||||
}
|
||||
|
||||
fn performFn(comptime prefix_char: u8, start_value: i32) i32 {
|
||||
var result: i32 = start_value;
|
||||
|
|
|
|||
|
|
@ -1,9 +1,7 @@
|
|||
const std = @import("std");
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
const Foo = struct {
|
||||
data: *u32
|
||||
};
|
||||
const Foo = struct { data: *u32 };
|
||||
|
||||
fn getData() !u32 {
|
||||
return 666;
|
||||
|
|
|
|||
|
|
@ -1,9 +1,7 @@
|
|||
const std = @import("std");
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
const Foo = struct {
|
||||
data: *u32
|
||||
};
|
||||
const Foo = struct { data: *u32 };
|
||||
|
||||
fn getData() !u32 {
|
||||
return 666;
|
||||
|
|
@ -19,7 +17,7 @@ fn genFoos(allocator: Allocator, num: usize) ![]Foo {
|
|||
errdefer allocator.destroy(foo.data);
|
||||
|
||||
// The data for the first 3 foos will be leaked
|
||||
if(i >= 3) return error.TooManyFoos;
|
||||
if (i >= 3) return error.TooManyFoos;
|
||||
|
||||
foo.data.* = try getData();
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
const expect = @import("std").testing.expect;
|
||||
|
||||
test "for basics" {
|
||||
const items = [_]i32 { 4, 5, 3, 4, 0 };
|
||||
const items = [_]i32{ 4, 5, 3, 4, 0 };
|
||||
var sum: i32 = 0;
|
||||
|
||||
// For loops iterate over slices and arrays.
|
||||
|
|
@ -31,7 +31,7 @@ test "for basics" {
|
|||
|
||||
// To iterate over consecutive integers, use the range syntax.
|
||||
// Unbounded range is always a compile error.
|
||||
var sum3 : usize = 0;
|
||||
var sum3: usize = 0;
|
||||
for (0..5) |i| {
|
||||
sum3 += i;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -14,7 +14,9 @@ fn add(a: i8, b: i8) i8 {
|
|||
|
||||
// The export specifier makes a function externally visible in the generated
|
||||
// object file, and makes it use the C ABI.
|
||||
export fn sub(a: i8, b: i8) i8 { return a - b; }
|
||||
export fn sub(a: i8, b: i8) i8 {
|
||||
return a - b;
|
||||
}
|
||||
|
||||
// The extern specifier is used to declare a function that will be resolved
|
||||
// at link time, when linking statically, or at runtime, when linking
|
||||
|
|
@ -39,13 +41,15 @@ fn _start() callconv(.Naked) noreturn {
|
|||
|
||||
// The inline calling convention forces a function to be inlined at all call sites.
|
||||
// If the function cannot be inlined, it is a compile-time error.
|
||||
fn shiftLeftOne(a: u32) callconv(.Inline) u32 {
|
||||
inline fn shiftLeftOne(a: u32) u32 {
|
||||
return a << 1;
|
||||
}
|
||||
|
||||
// The pub specifier allows the function to be visible when importing.
|
||||
// Another file can use @import and call sub2
|
||||
pub fn sub2(a: i8, b: i8) i8 { return a - b; }
|
||||
pub fn sub2(a: i8, b: i8) i8 {
|
||||
return a - b;
|
||||
}
|
||||
|
||||
// Function pointers are prefixed with `*const `.
|
||||
const Call2Op = *const fn (a: i8, b: i8) i8;
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ pub fn add_explicit(comptime T: type, a: T, b: T) Error!T {
|
|||
return ov[0];
|
||||
}
|
||||
|
||||
const Error = error {
|
||||
const Error = error{
|
||||
Overflow,
|
||||
};
|
||||
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
const expect = @import("std").testing.expect;
|
||||
|
||||
test "inline for loop" {
|
||||
const nums = [_]i32{2, 4, 6};
|
||||
const nums = [_]i32{ 2, 4, 6 };
|
||||
var sum: usize = 0;
|
||||
inline for (nums) |i| {
|
||||
const T = switch (i) {
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ fn getNum(u: U) u32 {
|
|||
return @intFromFloat(num);
|
||||
}
|
||||
return num;
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ const std = @import("std");
|
|||
const expect = std.testing.expect;
|
||||
|
||||
test "0-terminated sentinel array" {
|
||||
const array = [_:0]u8 {1, 2, 3, 4};
|
||||
const array = [_:0]u8{ 1, 2, 3, 4 };
|
||||
|
||||
try expect(@TypeOf(array) == [4:0]u8);
|
||||
try expect(array.len == 4);
|
||||
|
|
@ -11,7 +11,7 @@ test "0-terminated sentinel array" {
|
|||
|
||||
test "extra 0s in 0-terminated sentinel array" {
|
||||
// The sentinel value may appear earlier, but does not influence the compile-time 'len'.
|
||||
const array = [_:0]u8 {1, 0, 0, 4};
|
||||
const array = [_:0]u8{ 1, 0, 0, 4 };
|
||||
|
||||
try expect(@TypeOf(array) == [4:0]u8);
|
||||
try expect(array.len == 4);
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
const std = @import("std");
|
||||
const expect = std.testing.expect;
|
||||
|
||||
const Point = struct {x: i32, y: i32};
|
||||
const Point = struct { x: i32, y: i32 };
|
||||
|
||||
test "anonymous struct literal" {
|
||||
const pt: Point = .{
|
||||
|
|
|
|||
|
|
@ -13,15 +13,14 @@ const Point2 = packed struct {
|
|||
y: f32,
|
||||
};
|
||||
|
||||
|
||||
// Declare an instance of a struct.
|
||||
const p = Point {
|
||||
const p = Point{
|
||||
.x = 0.12,
|
||||
.y = 0.34,
|
||||
};
|
||||
|
||||
// Maybe we're not ready to fill out some of the fields.
|
||||
var p2 = Point {
|
||||
var p2 = Point{
|
||||
.x = 0.12,
|
||||
.y = undefined,
|
||||
};
|
||||
|
|
@ -35,7 +34,7 @@ const Vec3 = struct {
|
|||
z: f32,
|
||||
|
||||
pub fn init(x: f32, y: f32, z: f32) Vec3 {
|
||||
return Vec3 {
|
||||
return Vec3{
|
||||
.x = x,
|
||||
.y = y,
|
||||
.z = z,
|
||||
|
|
@ -69,7 +68,7 @@ test "struct namespaced variable" {
|
|||
try expect(@sizeOf(Empty) == 0);
|
||||
|
||||
// you can still instantiate an empty struct
|
||||
const does_nothing = Empty {};
|
||||
const does_nothing = Empty{};
|
||||
|
||||
_ = does_nothing;
|
||||
}
|
||||
|
|
@ -81,7 +80,7 @@ fn setYBasedOnX(x: *f32, y: f32) void {
|
|||
point.y = y;
|
||||
}
|
||||
test "field parent pointer" {
|
||||
var point = Point {
|
||||
var point = Point{
|
||||
.x = 0.1234,
|
||||
.y = 0.5678,
|
||||
};
|
||||
|
|
@ -100,8 +99,8 @@ fn LinkedList(comptime T: type) type {
|
|||
};
|
||||
|
||||
first: ?*Node,
|
||||
last: ?*Node,
|
||||
len: usize,
|
||||
last: ?*Node,
|
||||
len: usize,
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -12,8 +12,7 @@ test "switch on non-exhaustive enum" {
|
|||
const number = Number.one;
|
||||
const result = switch (number) {
|
||||
.one => true,
|
||||
.two,
|
||||
.three => false,
|
||||
.two, .three => false,
|
||||
_ => false,
|
||||
};
|
||||
try expect(result);
|
||||
|
|
|
|||
|
|
@ -5,10 +5,7 @@ test "try to pass a runtime type" {
|
|||
foo(false);
|
||||
}
|
||||
fn foo(condition: bool) void {
|
||||
const result = max(
|
||||
if (condition) f32 else u64,
|
||||
1234,
|
||||
5678);
|
||||
const result = max(if (condition) f32 else u64, 1234, 5678);
|
||||
_ = result;
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -9,7 +9,10 @@ test "while loop continue expression" {
|
|||
test "while loop continue expression, more complicated" {
|
||||
var i: usize = 1;
|
||||
var j: usize = 1;
|
||||
while (i * j < 2000) : ({ i *= 2; j *= 3; }) {
|
||||
while (i * j < 2000) : ({
|
||||
i *= 2;
|
||||
j *= 3;
|
||||
}) {
|
||||
const my_ij = i * j;
|
||||
try expect(my_ij < 2000);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -39,7 +39,9 @@ pub fn main() void {
|
|||
var number_or_error: anyerror!i32 = error.ArgNotFound;
|
||||
|
||||
print("\nerror union 1\ntype: {}\nvalue: {!}\n", .{
|
||||
@TypeOf(number_or_error), number_or_error, });
|
||||
@TypeOf(number_or_error),
|
||||
number_or_error,
|
||||
});
|
||||
|
||||
number_or_error = 1234;
|
||||
|
||||
|
|
|
|||
|
|
@ -13,8 +13,7 @@ const Allocator = mem.Allocator;
|
|||
const Target = std.Target;
|
||||
const process = std.process;
|
||||
const EnvMap = std.process.EnvMap;
|
||||
const fmt_lib = std.fmt;
|
||||
const File = std.fs.File;
|
||||
const File = fs.File;
|
||||
const Sha256 = std.crypto.hash.sha2.Sha256;
|
||||
const Build = @This();
|
||||
|
||||
|
|
@ -149,15 +148,14 @@ const InitializedDepKey = struct {
|
|||
const InitializedDepContext = struct {
|
||||
allocator: Allocator,
|
||||
|
||||
pub fn hash(self: @This(), k: InitializedDepKey) u64 {
|
||||
pub fn hash(ctx: @This(), k: InitializedDepKey) u64 {
|
||||
var hasher = std.hash.Wyhash.init(0);
|
||||
hasher.update(k.build_root_string);
|
||||
hashUserInputOptionsMap(self.allocator, k.user_input_options, &hasher);
|
||||
hashUserInputOptionsMap(ctx.allocator, k.user_input_options, &hasher);
|
||||
return hasher.final();
|
||||
}
|
||||
|
||||
pub fn eql(self: @This(), lhs: InitializedDepKey, rhs: InitializedDepKey) bool {
|
||||
_ = self;
|
||||
pub fn eql(_: @This(), lhs: InitializedDepKey, rhs: InitializedDepKey) bool {
|
||||
if (!std.mem.eql(u8, lhs.build_root_string, rhs.build_root_string))
|
||||
return false;
|
||||
|
||||
|
|
@ -229,7 +227,7 @@ const TypeId = enum {
|
|||
};
|
||||
|
||||
const TopLevelStep = struct {
|
||||
pub const base_id = .top_level;
|
||||
pub const base_id: Step.Id = .top_level;
|
||||
|
||||
step: Step,
|
||||
description: []const u8,
|
||||
|
|
@ -251,8 +249,8 @@ pub fn create(
|
|||
const initialized_deps = try arena.create(InitializedDepMap);
|
||||
initialized_deps.* = InitializedDepMap.initContext(arena, .{ .allocator = arena });
|
||||
|
||||
const self = try arena.create(Build);
|
||||
self.* = .{
|
||||
const b = try arena.create(Build);
|
||||
b.* = .{
|
||||
.graph = graph,
|
||||
.build_root = build_root,
|
||||
.cache_root = cache_root,
|
||||
|
|
@ -280,17 +278,17 @@ pub fn create(
|
|||
.installed_files = ArrayList(InstalledFile).init(arena),
|
||||
.install_tls = .{
|
||||
.step = Step.init(.{
|
||||
.id = .top_level,
|
||||
.id = TopLevelStep.base_id,
|
||||
.name = "install",
|
||||
.owner = self,
|
||||
.owner = b,
|
||||
}),
|
||||
.description = "Copy build artifacts to prefix path",
|
||||
},
|
||||
.uninstall_tls = .{
|
||||
.step = Step.init(.{
|
||||
.id = .top_level,
|
||||
.id = TopLevelStep.base_id,
|
||||
.name = "uninstall",
|
||||
.owner = self,
|
||||
.owner = b,
|
||||
.makeFn = makeUninstall,
|
||||
}),
|
||||
.description = "Remove build artifacts from prefix path",
|
||||
|
|
@ -306,10 +304,10 @@ pub fn create(
|
|||
.available_deps = available_deps,
|
||||
.release_mode = .off,
|
||||
};
|
||||
try self.top_level_steps.put(arena, self.install_tls.step.name, &self.install_tls);
|
||||
try self.top_level_steps.put(arena, self.uninstall_tls.step.name, &self.uninstall_tls);
|
||||
self.default_step = &self.install_tls.step;
|
||||
return self;
|
||||
try b.top_level_steps.put(arena, b.install_tls.step.name, &b.install_tls);
|
||||
try b.top_level_steps.put(arena, b.uninstall_tls.step.name, &b.uninstall_tls);
|
||||
b.default_step = &b.install_tls.step;
|
||||
return b;
|
||||
}
|
||||
|
||||
fn createChild(
|
||||
|
|
@ -340,7 +338,7 @@ fn createChildOnly(
|
|||
.allocator = allocator,
|
||||
.install_tls = .{
|
||||
.step = Step.init(.{
|
||||
.id = .top_level,
|
||||
.id = TopLevelStep.base_id,
|
||||
.name = "install",
|
||||
.owner = child,
|
||||
}),
|
||||
|
|
@ -348,7 +346,7 @@ fn createChildOnly(
|
|||
},
|
||||
.uninstall_tls = .{
|
||||
.step = Step.init(.{
|
||||
.id = .top_level,
|
||||
.id = TopLevelStep.base_id,
|
||||
.name = "uninstall",
|
||||
.owner = child,
|
||||
.makeFn = makeUninstall,
|
||||
|
|
@ -498,8 +496,8 @@ const OrderedUserValue = union(enum) {
|
|||
}
|
||||
};
|
||||
|
||||
fn hash(self: OrderedUserValue, hasher: *std.hash.Wyhash) void {
|
||||
switch (self) {
|
||||
fn hash(val: OrderedUserValue, hasher: *std.hash.Wyhash) void {
|
||||
switch (val) {
|
||||
.flag => {},
|
||||
.scalar => |scalar| hasher.update(scalar),
|
||||
// lists are already ordered
|
||||
|
|
@ -541,9 +539,9 @@ const OrderedUserInputOption = struct {
|
|||
value: OrderedUserValue,
|
||||
used: bool,
|
||||
|
||||
fn hash(self: OrderedUserInputOption, hasher: *std.hash.Wyhash) void {
|
||||
hasher.update(self.name);
|
||||
self.value.hash(hasher);
|
||||
fn hash(opt: OrderedUserInputOption, hasher: *std.hash.Wyhash) void {
|
||||
hasher.update(opt.name);
|
||||
opt.value.hash(hasher);
|
||||
}
|
||||
|
||||
fn fromUnordered(allocator: Allocator, user_input_option: UserInputOption) OrderedUserInputOption {
|
||||
|
|
@ -593,38 +591,38 @@ fn determineAndApplyInstallPrefix(b: *Build) !void {
|
|||
}
|
||||
|
||||
/// This function is intended to be called by lib/build_runner.zig, not a build.zig file.
|
||||
pub fn resolveInstallPrefix(self: *Build, install_prefix: ?[]const u8, dir_list: DirList) void {
|
||||
if (self.dest_dir) |dest_dir| {
|
||||
self.install_prefix = install_prefix orelse "/usr";
|
||||
self.install_path = self.pathJoin(&.{ dest_dir, self.install_prefix });
|
||||
pub fn resolveInstallPrefix(b: *Build, install_prefix: ?[]const u8, dir_list: DirList) void {
|
||||
if (b.dest_dir) |dest_dir| {
|
||||
b.install_prefix = install_prefix orelse "/usr";
|
||||
b.install_path = b.pathJoin(&.{ dest_dir, b.install_prefix });
|
||||
} else {
|
||||
self.install_prefix = install_prefix orelse
|
||||
(self.build_root.join(self.allocator, &.{"zig-out"}) catch @panic("unhandled error"));
|
||||
self.install_path = self.install_prefix;
|
||||
b.install_prefix = install_prefix orelse
|
||||
(b.build_root.join(b.allocator, &.{"zig-out"}) catch @panic("unhandled error"));
|
||||
b.install_path = b.install_prefix;
|
||||
}
|
||||
|
||||
var lib_list = [_][]const u8{ self.install_path, "lib" };
|
||||
var exe_list = [_][]const u8{ self.install_path, "bin" };
|
||||
var h_list = [_][]const u8{ self.install_path, "include" };
|
||||
var lib_list = [_][]const u8{ b.install_path, "lib" };
|
||||
var exe_list = [_][]const u8{ b.install_path, "bin" };
|
||||
var h_list = [_][]const u8{ b.install_path, "include" };
|
||||
|
||||
if (dir_list.lib_dir) |dir| {
|
||||
if (fs.path.isAbsolute(dir)) lib_list[0] = self.dest_dir orelse "";
|
||||
if (fs.path.isAbsolute(dir)) lib_list[0] = b.dest_dir orelse "";
|
||||
lib_list[1] = dir;
|
||||
}
|
||||
|
||||
if (dir_list.exe_dir) |dir| {
|
||||
if (fs.path.isAbsolute(dir)) exe_list[0] = self.dest_dir orelse "";
|
||||
if (fs.path.isAbsolute(dir)) exe_list[0] = b.dest_dir orelse "";
|
||||
exe_list[1] = dir;
|
||||
}
|
||||
|
||||
if (dir_list.include_dir) |dir| {
|
||||
if (fs.path.isAbsolute(dir)) h_list[0] = self.dest_dir orelse "";
|
||||
if (fs.path.isAbsolute(dir)) h_list[0] = b.dest_dir orelse "";
|
||||
h_list[1] = dir;
|
||||
}
|
||||
|
||||
self.lib_dir = self.pathJoin(&lib_list);
|
||||
self.exe_dir = self.pathJoin(&exe_list);
|
||||
self.h_dir = self.pathJoin(&h_list);
|
||||
b.lib_dir = b.pathJoin(&lib_list);
|
||||
b.exe_dir = b.pathJoin(&exe_list);
|
||||
b.h_dir = b.pathJoin(&h_list);
|
||||
}
|
||||
|
||||
/// Create a set of key-value pairs that can be converted into a Zig source
|
||||
|
|
@ -632,8 +630,8 @@ pub fn resolveInstallPrefix(self: *Build, install_prefix: ?[]const u8, dir_list:
|
|||
/// In other words, this provides a way to expose build.zig values to Zig
|
||||
/// source code with `@import`.
|
||||
/// Related: `Module.addOptions`.
|
||||
pub fn addOptions(self: *Build) *Step.Options {
|
||||
return Step.Options.create(self);
|
||||
pub fn addOptions(b: *Build) *Step.Options {
|
||||
return Step.Options.create(b);
|
||||
}
|
||||
|
||||
pub const ExecutableOptions = struct {
|
||||
|
|
@ -959,9 +957,9 @@ pub fn createModule(b: *Build, options: Module.CreateOptions) *Module {
|
|||
/// `addArgs`, and `addArtifactArg`.
|
||||
/// Be careful using this function, as it introduces a system dependency.
|
||||
/// To run an executable built with zig build, see `Step.Compile.run`.
|
||||
pub fn addSystemCommand(self: *Build, argv: []const []const u8) *Step.Run {
|
||||
pub fn addSystemCommand(b: *Build, argv: []const []const u8) *Step.Run {
|
||||
assert(argv.len >= 1);
|
||||
const run_step = Step.Run.create(self, self.fmt("run {s}", .{argv[0]}));
|
||||
const run_step = Step.Run.create(b, b.fmt("run {s}", .{argv[0]}));
|
||||
run_step.addArgs(argv);
|
||||
return run_step;
|
||||
}
|
||||
|
|
@ -1002,20 +1000,20 @@ pub fn addConfigHeader(
|
|||
}
|
||||
|
||||
/// Allocator.dupe without the need to handle out of memory.
|
||||
pub fn dupe(self: *Build, bytes: []const u8) []u8 {
|
||||
return self.allocator.dupe(u8, bytes) catch @panic("OOM");
|
||||
pub fn dupe(b: *Build, bytes: []const u8) []u8 {
|
||||
return b.allocator.dupe(u8, bytes) catch @panic("OOM");
|
||||
}
|
||||
|
||||
/// Duplicates an array of strings without the need to handle out of memory.
|
||||
pub fn dupeStrings(self: *Build, strings: []const []const u8) [][]u8 {
|
||||
const array = self.allocator.alloc([]u8, strings.len) catch @panic("OOM");
|
||||
for (array, strings) |*dest, source| dest.* = self.dupe(source);
|
||||
pub fn dupeStrings(b: *Build, strings: []const []const u8) [][]u8 {
|
||||
const array = b.allocator.alloc([]u8, strings.len) catch @panic("OOM");
|
||||
for (array, strings) |*dest, source| dest.* = b.dupe(source);
|
||||
return array;
|
||||
}
|
||||
|
||||
/// Duplicates a path and converts all slashes to the OS's canonical path separator.
|
||||
pub fn dupePath(self: *Build, bytes: []const u8) []u8 {
|
||||
const the_copy = self.dupe(bytes);
|
||||
pub fn dupePath(b: *Build, bytes: []const u8) []u8 {
|
||||
const the_copy = b.dupe(bytes);
|
||||
for (the_copy) |*byte| {
|
||||
switch (byte.*) {
|
||||
'/', '\\' => byte.* = fs.path.sep,
|
||||
|
|
@ -1025,8 +1023,8 @@ pub fn dupePath(self: *Build, bytes: []const u8) []u8 {
|
|||
return the_copy;
|
||||
}
|
||||
|
||||
pub fn addWriteFile(self: *Build, file_path: []const u8, data: []const u8) *Step.WriteFile {
|
||||
const write_file_step = self.addWriteFiles();
|
||||
pub fn addWriteFile(b: *Build, file_path: []const u8, data: []const u8) *Step.WriteFile {
|
||||
const write_file_step = b.addWriteFiles();
|
||||
_ = write_file_step.add(file_path, data);
|
||||
return write_file_step;
|
||||
}
|
||||
|
|
@ -1041,34 +1039,34 @@ pub fn addWriteFiles(b: *Build) *Step.WriteFile {
|
|||
return Step.WriteFile.create(b);
|
||||
}
|
||||
|
||||
pub fn addRemoveDirTree(self: *Build, dir_path: []const u8) *Step.RemoveDir {
|
||||
return Step.RemoveDir.create(self, dir_path);
|
||||
pub fn addRemoveDirTree(b: *Build, dir_path: []const u8) *Step.RemoveDir {
|
||||
return Step.RemoveDir.create(b, dir_path);
|
||||
}
|
||||
|
||||
pub fn addFmt(b: *Build, options: Step.Fmt.Options) *Step.Fmt {
|
||||
return Step.Fmt.create(b, options);
|
||||
}
|
||||
|
||||
pub fn addTranslateC(self: *Build, options: Step.TranslateC.Options) *Step.TranslateC {
|
||||
return Step.TranslateC.create(self, options);
|
||||
pub fn addTranslateC(b: *Build, options: Step.TranslateC.Options) *Step.TranslateC {
|
||||
return Step.TranslateC.create(b, options);
|
||||
}
|
||||
|
||||
pub fn getInstallStep(self: *Build) *Step {
|
||||
return &self.install_tls.step;
|
||||
pub fn getInstallStep(b: *Build) *Step {
|
||||
return &b.install_tls.step;
|
||||
}
|
||||
|
||||
pub fn getUninstallStep(self: *Build) *Step {
|
||||
return &self.uninstall_tls.step;
|
||||
pub fn getUninstallStep(b: *Build) *Step {
|
||||
return &b.uninstall_tls.step;
|
||||
}
|
||||
|
||||
fn makeUninstall(uninstall_step: *Step, prog_node: *std.Progress.Node) anyerror!void {
|
||||
_ = prog_node;
|
||||
const uninstall_tls: *TopLevelStep = @fieldParentPtr("step", uninstall_step);
|
||||
const self: *Build = @fieldParentPtr("uninstall_tls", uninstall_tls);
|
||||
const b: *Build = @fieldParentPtr("uninstall_tls", uninstall_tls);
|
||||
|
||||
for (self.installed_files.items) |installed_file| {
|
||||
const full_path = self.getInstallPath(installed_file.dir, installed_file.path);
|
||||
if (self.verbose) {
|
||||
for (b.installed_files.items) |installed_file| {
|
||||
const full_path = b.getInstallPath(installed_file.dir, installed_file.path);
|
||||
if (b.verbose) {
|
||||
log.info("rm {s}", .{full_path});
|
||||
}
|
||||
fs.cwd().deleteTree(full_path) catch {};
|
||||
|
|
@ -1082,13 +1080,13 @@ fn makeUninstall(uninstall_step: *Step, prog_node: *std.Progress.Node) anyerror!
|
|||
/// When a project depends on a Zig package as a dependency, it programmatically sets
|
||||
/// these options when calling the dependency's build.zig script as a function.
|
||||
/// `null` is returned when an option is left to default.
|
||||
pub fn option(self: *Build, comptime T: type, name_raw: []const u8, description_raw: []const u8) ?T {
|
||||
const name = self.dupe(name_raw);
|
||||
const description = self.dupe(description_raw);
|
||||
pub fn option(b: *Build, comptime T: type, name_raw: []const u8, description_raw: []const u8) ?T {
|
||||
const name = b.dupe(name_raw);
|
||||
const description = b.dupe(description_raw);
|
||||
const type_id = comptime typeToEnum(T);
|
||||
const enum_options = if (type_id == .@"enum") blk: {
|
||||
const fields = comptime std.meta.fields(T);
|
||||
var options = ArrayList([]const u8).initCapacity(self.allocator, fields.len) catch @panic("OOM");
|
||||
var options = ArrayList([]const u8).initCapacity(b.allocator, fields.len) catch @panic("OOM");
|
||||
|
||||
inline for (fields) |field| {
|
||||
options.appendAssumeCapacity(field.name);
|
||||
|
|
@ -1102,12 +1100,12 @@ pub fn option(self: *Build, comptime T: type, name_raw: []const u8, description_
|
|||
.description = description,
|
||||
.enum_options = enum_options,
|
||||
};
|
||||
if ((self.available_options_map.fetchPut(name, available_option) catch @panic("OOM")) != null) {
|
||||
if ((b.available_options_map.fetchPut(name, available_option) catch @panic("OOM")) != null) {
|
||||
panic("Option '{s}' declared twice", .{name});
|
||||
}
|
||||
self.available_options_list.append(available_option) catch @panic("OOM");
|
||||
b.available_options_list.append(available_option) catch @panic("OOM");
|
||||
|
||||
const option_ptr = self.user_input_options.getPtr(name) orelse return null;
|
||||
const option_ptr = b.user_input_options.getPtr(name) orelse return null;
|
||||
option_ptr.used = true;
|
||||
switch (type_id) {
|
||||
.bool => switch (option_ptr.value) {
|
||||
|
|
@ -1119,7 +1117,7 @@ pub fn option(self: *Build, comptime T: type, name_raw: []const u8, description_
|
|||
return false;
|
||||
} else {
|
||||
log.err("Expected -D{s} to be a boolean, but received '{s}'", .{ name, s });
|
||||
self.markInvalidUserInput();
|
||||
b.markInvalidUserInput();
|
||||
return null;
|
||||
}
|
||||
},
|
||||
|
|
@ -1127,7 +1125,7 @@ pub fn option(self: *Build, comptime T: type, name_raw: []const u8, description_
|
|||
log.err("Expected -D{s} to be a boolean, but received a {s}.", .{
|
||||
name, @tagName(option_ptr.value),
|
||||
});
|
||||
self.markInvalidUserInput();
|
||||
b.markInvalidUserInput();
|
||||
return null;
|
||||
},
|
||||
},
|
||||
|
|
@ -1136,19 +1134,19 @@ pub fn option(self: *Build, comptime T: type, name_raw: []const u8, description_
|
|||
log.err("Expected -D{s} to be an integer, but received a {s}.", .{
|
||||
name, @tagName(option_ptr.value),
|
||||
});
|
||||
self.markInvalidUserInput();
|
||||
b.markInvalidUserInput();
|
||||
return null;
|
||||
},
|
||||
.scalar => |s| {
|
||||
const n = std.fmt.parseInt(T, s, 10) catch |err| switch (err) {
|
||||
error.Overflow => {
|
||||
log.err("-D{s} value {s} cannot fit into type {s}.", .{ name, s, @typeName(T) });
|
||||
self.markInvalidUserInput();
|
||||
b.markInvalidUserInput();
|
||||
return null;
|
||||
},
|
||||
else => {
|
||||
log.err("Expected -D{s} to be an integer of type {s}.", .{ name, @typeName(T) });
|
||||
self.markInvalidUserInput();
|
||||
b.markInvalidUserInput();
|
||||
return null;
|
||||
},
|
||||
};
|
||||
|
|
@ -1160,13 +1158,13 @@ pub fn option(self: *Build, comptime T: type, name_raw: []const u8, description_
|
|||
log.err("Expected -D{s} to be a float, but received a {s}.", .{
|
||||
name, @tagName(option_ptr.value),
|
||||
});
|
||||
self.markInvalidUserInput();
|
||||
b.markInvalidUserInput();
|
||||
return null;
|
||||
},
|
||||
.scalar => |s| {
|
||||
const n = std.fmt.parseFloat(T, s) catch {
|
||||
log.err("Expected -D{s} to be a float of type {s}.", .{ name, @typeName(T) });
|
||||
self.markInvalidUserInput();
|
||||
b.markInvalidUserInput();
|
||||
return null;
|
||||
};
|
||||
return n;
|
||||
|
|
@ -1177,7 +1175,7 @@ pub fn option(self: *Build, comptime T: type, name_raw: []const u8, description_
|
|||
log.err("Expected -D{s} to be an enum, but received a {s}.", .{
|
||||
name, @tagName(option_ptr.value),
|
||||
});
|
||||
self.markInvalidUserInput();
|
||||
b.markInvalidUserInput();
|
||||
return null;
|
||||
},
|
||||
.scalar => |s| {
|
||||
|
|
@ -1185,7 +1183,7 @@ pub fn option(self: *Build, comptime T: type, name_raw: []const u8, description_
|
|||
return enum_lit;
|
||||
} else {
|
||||
log.err("Expected -D{s} to be of type {s}.", .{ name, @typeName(T) });
|
||||
self.markInvalidUserInput();
|
||||
b.markInvalidUserInput();
|
||||
return null;
|
||||
}
|
||||
},
|
||||
|
|
@ -1195,7 +1193,7 @@ pub fn option(self: *Build, comptime T: type, name_raw: []const u8, description_
|
|||
log.err("Expected -D{s} to be a string, but received a {s}.", .{
|
||||
name, @tagName(option_ptr.value),
|
||||
});
|
||||
self.markInvalidUserInput();
|
||||
b.markInvalidUserInput();
|
||||
return null;
|
||||
},
|
||||
.scalar => |s| return s,
|
||||
|
|
@ -1205,7 +1203,7 @@ pub fn option(self: *Build, comptime T: type, name_raw: []const u8, description_
|
|||
log.err("Expected -D{s} to be an enum, but received a {s}.", .{
|
||||
name, @tagName(option_ptr.value),
|
||||
});
|
||||
self.markInvalidUserInput();
|
||||
b.markInvalidUserInput();
|
||||
return null;
|
||||
},
|
||||
.scalar => |s| {
|
||||
|
|
@ -1213,7 +1211,7 @@ pub fn option(self: *Build, comptime T: type, name_raw: []const u8, description_
|
|||
return build_id;
|
||||
} else |err| {
|
||||
log.err("unable to parse option '-D{s}': {s}", .{ name, @errorName(err) });
|
||||
self.markInvalidUserInput();
|
||||
b.markInvalidUserInput();
|
||||
return null;
|
||||
}
|
||||
},
|
||||
|
|
@ -1223,28 +1221,28 @@ pub fn option(self: *Build, comptime T: type, name_raw: []const u8, description_
|
|||
log.err("Expected -D{s} to be a list, but received a {s}.", .{
|
||||
name, @tagName(option_ptr.value),
|
||||
});
|
||||
self.markInvalidUserInput();
|
||||
b.markInvalidUserInput();
|
||||
return null;
|
||||
},
|
||||
.scalar => |s| {
|
||||
return self.allocator.dupe([]const u8, &[_][]const u8{s}) catch @panic("OOM");
|
||||
return b.allocator.dupe([]const u8, &[_][]const u8{s}) catch @panic("OOM");
|
||||
},
|
||||
.list => |lst| return lst.items,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn step(self: *Build, name: []const u8, description: []const u8) *Step {
|
||||
const step_info = self.allocator.create(TopLevelStep) catch @panic("OOM");
|
||||
pub fn step(b: *Build, name: []const u8, description: []const u8) *Step {
|
||||
const step_info = b.allocator.create(TopLevelStep) catch @panic("OOM");
|
||||
step_info.* = .{
|
||||
.step = Step.init(.{
|
||||
.id = .top_level,
|
||||
.id = TopLevelStep.base_id,
|
||||
.name = name,
|
||||
.owner = self,
|
||||
.owner = b,
|
||||
}),
|
||||
.description = self.dupe(description),
|
||||
.description = b.dupe(description),
|
||||
};
|
||||
const gop = self.top_level_steps.getOrPut(self.allocator, name) catch @panic("OOM");
|
||||
const gop = b.top_level_steps.getOrPut(b.allocator, name) catch @panic("OOM");
|
||||
if (gop.found_existing) std.debug.panic("A top-level step with name \"{s}\" already exists", .{name});
|
||||
|
||||
gop.key_ptr.* = step_info.step.name;
|
||||
|
|
@ -1406,10 +1404,10 @@ pub fn standardTargetOptionsQueryOnly(b: *Build, args: StandardTargetOptionsArgs
|
|||
return args.default_target;
|
||||
}
|
||||
|
||||
pub fn addUserInputOption(self: *Build, name_raw: []const u8, value_raw: []const u8) !bool {
|
||||
const name = self.dupe(name_raw);
|
||||
const value = self.dupe(value_raw);
|
||||
const gop = try self.user_input_options.getOrPut(name);
|
||||
pub fn addUserInputOption(b: *Build, name_raw: []const u8, value_raw: []const u8) !bool {
|
||||
const name = b.dupe(name_raw);
|
||||
const value = b.dupe(value_raw);
|
||||
const gop = try b.user_input_options.getOrPut(name);
|
||||
if (!gop.found_existing) {
|
||||
gop.value_ptr.* = UserInputOption{
|
||||
.name = name,
|
||||
|
|
@ -1423,10 +1421,10 @@ pub fn addUserInputOption(self: *Build, name_raw: []const u8, value_raw: []const
|
|||
switch (gop.value_ptr.value) {
|
||||
.scalar => |s| {
|
||||
// turn it into a list
|
||||
var list = ArrayList([]const u8).init(self.allocator);
|
||||
var list = ArrayList([]const u8).init(b.allocator);
|
||||
try list.append(s);
|
||||
try list.append(value);
|
||||
try self.user_input_options.put(name, .{
|
||||
try b.user_input_options.put(name, .{
|
||||
.name = name,
|
||||
.value = .{ .list = list },
|
||||
.used = false,
|
||||
|
|
@ -1435,7 +1433,7 @@ pub fn addUserInputOption(self: *Build, name_raw: []const u8, value_raw: []const
|
|||
.list => |*list| {
|
||||
// append to the list
|
||||
try list.append(value);
|
||||
try self.user_input_options.put(name, .{
|
||||
try b.user_input_options.put(name, .{
|
||||
.name = name,
|
||||
.value = .{ .list = list.* },
|
||||
.used = false,
|
||||
|
|
@ -1454,9 +1452,9 @@ pub fn addUserInputOption(self: *Build, name_raw: []const u8, value_raw: []const
|
|||
return false;
|
||||
}
|
||||
|
||||
pub fn addUserInputFlag(self: *Build, name_raw: []const u8) !bool {
|
||||
const name = self.dupe(name_raw);
|
||||
const gop = try self.user_input_options.getOrPut(name);
|
||||
pub fn addUserInputFlag(b: *Build, name_raw: []const u8) !bool {
|
||||
const name = b.dupe(name_raw);
|
||||
const gop = try b.user_input_options.getOrPut(name);
|
||||
if (!gop.found_existing) {
|
||||
gop.value_ptr.* = .{
|
||||
.name = name,
|
||||
|
|
@ -1498,8 +1496,8 @@ fn typeToEnum(comptime T: type) TypeId {
|
|||
};
|
||||
}
|
||||
|
||||
fn markInvalidUserInput(self: *Build) void {
|
||||
self.invalid_user_input = true;
|
||||
fn markInvalidUserInput(b: *Build) void {
|
||||
b.invalid_user_input = true;
|
||||
}
|
||||
|
||||
pub fn validateUserInputDidItFail(b: *Build) bool {
|
||||
|
|
@ -1532,18 +1530,18 @@ fn printCmd(ally: Allocator, cwd: ?[]const u8, argv: []const []const u8) void {
|
|||
/// This creates the install step and adds it to the dependencies of the
|
||||
/// top-level install step, using all the default options.
|
||||
/// See `addInstallArtifact` for a more flexible function.
|
||||
pub fn installArtifact(self: *Build, artifact: *Step.Compile) void {
|
||||
self.getInstallStep().dependOn(&self.addInstallArtifact(artifact, .{}).step);
|
||||
pub fn installArtifact(b: *Build, artifact: *Step.Compile) void {
|
||||
b.getInstallStep().dependOn(&b.addInstallArtifact(artifact, .{}).step);
|
||||
}
|
||||
|
||||
/// This merely creates the step; it does not add it to the dependencies of the
|
||||
/// top-level install step.
|
||||
pub fn addInstallArtifact(
|
||||
self: *Build,
|
||||
b: *Build,
|
||||
artifact: *Step.Compile,
|
||||
options: Step.InstallArtifact.Options,
|
||||
) *Step.InstallArtifact {
|
||||
return Step.InstallArtifact.create(self, artifact, options);
|
||||
return Step.InstallArtifact.create(b, artifact, options);
|
||||
}
|
||||
|
||||
///`dest_rel_path` is relative to prefix path
|
||||
|
|
@ -1590,16 +1588,16 @@ pub fn addInstallHeaderFile(b: *Build, source: LazyPath, dest_rel_path: []const
|
|||
}
|
||||
|
||||
pub fn addInstallFileWithDir(
|
||||
self: *Build,
|
||||
b: *Build,
|
||||
source: LazyPath,
|
||||
install_dir: InstallDir,
|
||||
dest_rel_path: []const u8,
|
||||
) *Step.InstallFile {
|
||||
return Step.InstallFile.create(self, source, install_dir, dest_rel_path);
|
||||
return Step.InstallFile.create(b, source, install_dir, dest_rel_path);
|
||||
}
|
||||
|
||||
pub fn addInstallDirectory(self: *Build, options: Step.InstallDir.Options) *Step.InstallDir {
|
||||
return Step.InstallDir.create(self, options);
|
||||
pub fn addInstallDirectory(b: *Build, options: Step.InstallDir.Options) *Step.InstallDir {
|
||||
return Step.InstallDir.create(b, options);
|
||||
}
|
||||
|
||||
pub fn addCheckFile(
|
||||
|
|
@ -1611,16 +1609,16 @@ pub fn addCheckFile(
|
|||
}
|
||||
|
||||
/// deprecated: https://github.com/ziglang/zig/issues/14943
|
||||
pub fn pushInstalledFile(self: *Build, dir: InstallDir, dest_rel_path: []const u8) void {
|
||||
pub fn pushInstalledFile(b: *Build, dir: InstallDir, dest_rel_path: []const u8) void {
|
||||
const file = InstalledFile{
|
||||
.dir = dir,
|
||||
.path = dest_rel_path,
|
||||
};
|
||||
self.installed_files.append(file.dupe(self)) catch @panic("OOM");
|
||||
b.installed_files.append(file.dupe(b)) catch @panic("OOM");
|
||||
}
|
||||
|
||||
pub fn truncateFile(self: *Build, dest_path: []const u8) !void {
|
||||
if (self.verbose) {
|
||||
pub fn truncateFile(b: *Build, dest_path: []const u8) !void {
|
||||
if (b.verbose) {
|
||||
log.info("truncate {s}", .{dest_path});
|
||||
}
|
||||
const cwd = fs.cwd();
|
||||
|
|
@ -1652,50 +1650,54 @@ pub fn path(b: *Build, sub_path: []const u8) LazyPath {
|
|||
/// This is low-level implementation details of the build system, not meant to
|
||||
/// be called by users' build scripts. Even in the build system itself it is a
|
||||
/// code smell to call this function.
|
||||
pub fn pathFromRoot(b: *Build, p: []const u8) []u8 {
|
||||
return fs.path.resolve(b.allocator, &.{ b.build_root.path orelse ".", p }) catch @panic("OOM");
|
||||
pub fn pathFromRoot(b: *Build, sub_path: []const u8) []u8 {
|
||||
return b.pathResolve(&.{ b.build_root.path orelse ".", sub_path });
|
||||
}
|
||||
|
||||
fn pathFromCwd(b: *Build, p: []const u8) []u8 {
|
||||
fn pathFromCwd(b: *Build, sub_path: []const u8) []u8 {
|
||||
const cwd = process.getCwdAlloc(b.allocator) catch @panic("OOM");
|
||||
return fs.path.resolve(b.allocator, &.{ cwd, p }) catch @panic("OOM");
|
||||
return b.pathResolve(&.{ cwd, sub_path });
|
||||
}
|
||||
|
||||
pub fn pathJoin(self: *Build, paths: []const []const u8) []u8 {
|
||||
return fs.path.join(self.allocator, paths) catch @panic("OOM");
|
||||
pub fn pathJoin(b: *Build, paths: []const []const u8) []u8 {
|
||||
return fs.path.join(b.allocator, paths) catch @panic("OOM");
|
||||
}
|
||||
|
||||
pub fn fmt(self: *Build, comptime format: []const u8, args: anytype) []u8 {
|
||||
return fmt_lib.allocPrint(self.allocator, format, args) catch @panic("OOM");
|
||||
pub fn pathResolve(b: *Build, paths: []const []const u8) []u8 {
|
||||
return fs.path.resolve(b.allocator, paths) catch @panic("OOM");
|
||||
}
|
||||
|
||||
pub fn findProgram(self: *Build, names: []const []const u8, paths: []const []const u8) ![]const u8 {
|
||||
pub fn fmt(b: *Build, comptime format: []const u8, args: anytype) []u8 {
|
||||
return std.fmt.allocPrint(b.allocator, format, args) catch @panic("OOM");
|
||||
}
|
||||
|
||||
pub fn findProgram(b: *Build, names: []const []const u8, paths: []const []const u8) ![]const u8 {
|
||||
// TODO report error for ambiguous situations
|
||||
const exe_extension = self.host.result.exeFileExt();
|
||||
for (self.search_prefixes.items) |search_prefix| {
|
||||
const exe_extension = b.host.result.exeFileExt();
|
||||
for (b.search_prefixes.items) |search_prefix| {
|
||||
for (names) |name| {
|
||||
if (fs.path.isAbsolute(name)) {
|
||||
return name;
|
||||
}
|
||||
const full_path = self.pathJoin(&.{
|
||||
const full_path = b.pathJoin(&.{
|
||||
search_prefix,
|
||||
"bin",
|
||||
self.fmt("{s}{s}", .{ name, exe_extension }),
|
||||
b.fmt("{s}{s}", .{ name, exe_extension }),
|
||||
});
|
||||
return fs.realpathAlloc(self.allocator, full_path) catch continue;
|
||||
return fs.realpathAlloc(b.allocator, full_path) catch continue;
|
||||
}
|
||||
}
|
||||
if (self.graph.env_map.get("PATH")) |PATH| {
|
||||
if (b.graph.env_map.get("PATH")) |PATH| {
|
||||
for (names) |name| {
|
||||
if (fs.path.isAbsolute(name)) {
|
||||
return name;
|
||||
}
|
||||
var it = mem.tokenizeScalar(u8, PATH, fs.path.delimiter);
|
||||
while (it.next()) |p| {
|
||||
const full_path = self.pathJoin(&.{
|
||||
p, self.fmt("{s}{s}", .{ name, exe_extension }),
|
||||
const full_path = b.pathJoin(&.{
|
||||
p, b.fmt("{s}{s}", .{ name, exe_extension }),
|
||||
});
|
||||
return fs.realpathAlloc(self.allocator, full_path) catch continue;
|
||||
return fs.realpathAlloc(b.allocator, full_path) catch continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1704,17 +1706,17 @@ pub fn findProgram(self: *Build, names: []const []const u8, paths: []const []con
|
|||
return name;
|
||||
}
|
||||
for (paths) |p| {
|
||||
const full_path = self.pathJoin(&.{
|
||||
p, self.fmt("{s}{s}", .{ name, exe_extension }),
|
||||
const full_path = b.pathJoin(&.{
|
||||
p, b.fmt("{s}{s}", .{ name, exe_extension }),
|
||||
});
|
||||
return fs.realpathAlloc(self.allocator, full_path) catch continue;
|
||||
return fs.realpathAlloc(b.allocator, full_path) catch continue;
|
||||
}
|
||||
}
|
||||
return error.FileNotFound;
|
||||
}
|
||||
|
||||
pub fn runAllowFail(
|
||||
self: *Build,
|
||||
b: *Build,
|
||||
argv: []const []const u8,
|
||||
out_code: *u8,
|
||||
stderr_behavior: std.ChildProcess.StdIo,
|
||||
|
|
@ -1725,18 +1727,18 @@ pub fn runAllowFail(
|
|||
return error.ExecNotSupported;
|
||||
|
||||
const max_output_size = 400 * 1024;
|
||||
var child = std.ChildProcess.init(argv, self.allocator);
|
||||
var child = std.ChildProcess.init(argv, b.allocator);
|
||||
child.stdin_behavior = .Ignore;
|
||||
child.stdout_behavior = .Pipe;
|
||||
child.stderr_behavior = stderr_behavior;
|
||||
child.env_map = &self.graph.env_map;
|
||||
child.env_map = &b.graph.env_map;
|
||||
|
||||
try child.spawn();
|
||||
|
||||
const stdout = child.stdout.?.reader().readAllAlloc(self.allocator, max_output_size) catch {
|
||||
const stdout = child.stdout.?.reader().readAllAlloc(b.allocator, max_output_size) catch {
|
||||
return error.ReadFailure;
|
||||
};
|
||||
errdefer self.allocator.free(stdout);
|
||||
errdefer b.allocator.free(stdout);
|
||||
|
||||
const term = try child.wait();
|
||||
switch (term) {
|
||||
|
|
@ -1779,19 +1781,16 @@ pub fn addSearchPrefix(b: *Build, search_prefix: []const u8) void {
|
|||
b.search_prefixes.append(b.allocator, b.dupePath(search_prefix)) catch @panic("OOM");
|
||||
}
|
||||
|
||||
pub fn getInstallPath(self: *Build, dir: InstallDir, dest_rel_path: []const u8) []const u8 {
|
||||
pub fn getInstallPath(b: *Build, dir: InstallDir, dest_rel_path: []const u8) []const u8 {
|
||||
assert(!fs.path.isAbsolute(dest_rel_path)); // Install paths must be relative to the prefix
|
||||
const base_dir = switch (dir) {
|
||||
.prefix => self.install_path,
|
||||
.bin => self.exe_dir,
|
||||
.lib => self.lib_dir,
|
||||
.header => self.h_dir,
|
||||
.custom => |p| self.pathJoin(&.{ self.install_path, p }),
|
||||
.prefix => b.install_path,
|
||||
.bin => b.exe_dir,
|
||||
.lib => b.lib_dir,
|
||||
.header => b.h_dir,
|
||||
.custom => |p| b.pathJoin(&.{ b.install_path, p }),
|
||||
};
|
||||
return fs.path.resolve(
|
||||
self.allocator,
|
||||
&[_][]const u8{ base_dir, dest_rel_path },
|
||||
) catch @panic("OOM");
|
||||
return b.pathResolve(&.{ base_dir, dest_rel_path });
|
||||
}
|
||||
|
||||
pub const Dependency = struct {
|
||||
|
|
@ -2092,11 +2091,11 @@ pub const GeneratedFile = struct {
|
|||
/// This value must be set in the `fn make()` of the `step` and must not be `null` afterwards.
|
||||
path: ?[]const u8 = null,
|
||||
|
||||
pub fn getPath(self: GeneratedFile) []const u8 {
|
||||
return self.path orelse std.debug.panic(
|
||||
pub fn getPath(gen: GeneratedFile) []const u8 {
|
||||
return gen.step.owner.pathFromRoot(gen.path orelse std.debug.panic(
|
||||
"getPath() was called on a GeneratedFile that wasn't built yet. Is there a missing Step dependency on step '{s}'?",
|
||||
.{self.step.name},
|
||||
);
|
||||
.{gen.step.name},
|
||||
));
|
||||
}
|
||||
};
|
||||
|
||||
|
|
@ -2170,9 +2169,9 @@ pub const LazyPath = union(enum) {
|
|||
},
|
||||
|
||||
/// Deprecated. Call `path` instead.
|
||||
pub fn relative(p: []const u8) LazyPath {
|
||||
pub fn relative(sub_path: []const u8) LazyPath {
|
||||
std.log.warn("deprecated. call std.Build.path instead", .{});
|
||||
return .{ .path = p };
|
||||
return .{ .path = sub_path };
|
||||
}
|
||||
|
||||
/// Returns a lazy path referring to the directory containing this path.
|
||||
|
|
@ -2182,8 +2181,8 @@ pub const LazyPath = union(enum) {
|
|||
/// the dirname is not allowed to traverse outside of the build root.
|
||||
/// Similarly, if the path is a generated file inside zig-cache,
|
||||
/// the dirname is not allowed to traverse outside of zig-cache.
|
||||
pub fn dirname(self: LazyPath) LazyPath {
|
||||
return switch (self) {
|
||||
pub fn dirname(lazy_path: LazyPath) LazyPath {
|
||||
return switch (lazy_path) {
|
||||
.generated => |gen| .{ .generated_dirname = .{ .generated = gen, .up = 0 } },
|
||||
.generated_dirname => |gen| .{ .generated_dirname = .{ .generated = gen.generated, .up = gen.up + 1 } },
|
||||
.src_path => |sp| .{ .src_path = .{
|
||||
|
|
@ -2193,20 +2192,20 @@ pub const LazyPath = union(enum) {
|
|||
@panic("misconfigured build script");
|
||||
},
|
||||
} },
|
||||
.path => |p| .{
|
||||
.path = dirnameAllowEmpty(p) orelse {
|
||||
.path => |sub_path| .{
|
||||
.path = dirnameAllowEmpty(sub_path) orelse {
|
||||
dumpBadDirnameHelp(null, null, "dirname() attempted to traverse outside the build root\n", .{}) catch {};
|
||||
@panic("misconfigured build script");
|
||||
},
|
||||
},
|
||||
.cwd_relative => |p| .{
|
||||
.cwd_relative = dirnameAllowEmpty(p) orelse {
|
||||
.cwd_relative => |rel_path| .{
|
||||
.cwd_relative = dirnameAllowEmpty(rel_path) orelse {
|
||||
// If we get null, it means one of two things:
|
||||
// - p was absolute, and is now root
|
||||
// - p was relative, and is now ""
|
||||
// - rel_path was absolute, and is now root
|
||||
// - rel_path was relative, and is now ""
|
||||
// In either case, the build script tried to go too far
|
||||
// and we should panic.
|
||||
if (fs.path.isAbsolute(p)) {
|
||||
if (fs.path.isAbsolute(rel_path)) {
|
||||
dumpBadDirnameHelp(null, null,
|
||||
\\dirname() attempted to traverse outside the root.
|
||||
\\No more directories left to go up.
|
||||
|
|
@ -2237,10 +2236,10 @@ pub const LazyPath = union(enum) {
|
|||
|
||||
/// Returns a string that can be shown to represent the file source.
|
||||
/// Either returns the path or `"generated"`.
|
||||
pub fn getDisplayName(self: LazyPath) []const u8 {
|
||||
return switch (self) {
|
||||
.src_path => |sp| sp.sub_path,
|
||||
.path, .cwd_relative => |p| p,
|
||||
pub fn getDisplayName(lazy_path: LazyPath) []const u8 {
|
||||
return switch (lazy_path) {
|
||||
.src_path => |src_path| src_path.sub_path,
|
||||
.path, .cwd_relative => |sub_path| sub_path,
|
||||
.generated => "generated",
|
||||
.generated_dirname => "generated",
|
||||
.dependency => "dependency",
|
||||
|
|
@ -2248,8 +2247,8 @@ pub const LazyPath = union(enum) {
|
|||
}
|
||||
|
||||
/// Adds dependencies this file source implies to the given step.
|
||||
pub fn addStepDependencies(self: LazyPath, other_step: *Step) void {
|
||||
switch (self) {
|
||||
pub fn addStepDependencies(lazy_path: LazyPath, other_step: *Step) void {
|
||||
switch (lazy_path) {
|
||||
.src_path, .path, .cwd_relative, .dependency => {},
|
||||
.generated => |gen| other_step.dependOn(gen.step),
|
||||
.generated_dirname => |gen| other_step.dependOn(gen.generated.step),
|
||||
|
|
@ -2258,8 +2257,8 @@ pub const LazyPath = union(enum) {
|
|||
|
||||
/// Returns an absolute path.
|
||||
/// Intended to be used during the make phase only.
|
||||
pub fn getPath(self: LazyPath, src_builder: *Build) []const u8 {
|
||||
return getPath2(self, src_builder, null);
|
||||
pub fn getPath(lazy_path: LazyPath, src_builder: *Build) []const u8 {
|
||||
return getPath2(lazy_path, src_builder, null);
|
||||
}
|
||||
|
||||
/// Returns an absolute path.
|
||||
|
|
@ -2267,17 +2266,17 @@ pub const LazyPath = union(enum) {
|
|||
///
|
||||
/// `asking_step` is only used for debugging purposes; it's the step being
|
||||
/// run that is asking for the path.
|
||||
pub fn getPath2(self: LazyPath, src_builder: *Build, asking_step: ?*Step) []const u8 {
|
||||
switch (self) {
|
||||
pub fn getPath2(lazy_path: LazyPath, src_builder: *Build, asking_step: ?*Step) []const u8 {
|
||||
switch (lazy_path) {
|
||||
.path => |p| return src_builder.pathFromRoot(p),
|
||||
.src_path => |sp| return sp.owner.pathFromRoot(sp.sub_path),
|
||||
.cwd_relative => |p| return src_builder.pathFromCwd(p),
|
||||
.generated => |gen| return gen.path orelse {
|
||||
.generated => |gen| return gen.step.owner.pathFromRoot(gen.path orelse {
|
||||
std.debug.getStderrMutex().lock();
|
||||
const stderr = std.io.getStdErr();
|
||||
dumpBadGetPathHelp(gen.step, stderr, src_builder, asking_step) catch {};
|
||||
@panic("misconfigured build script");
|
||||
},
|
||||
}),
|
||||
.generated_dirname => |gen| {
|
||||
const cache_root_path = src_builder.cache_root.path orelse
|
||||
(src_builder.cache_root.join(src_builder.allocator, &.{"."}) catch @panic("OOM"));
|
||||
|
|
@ -2311,12 +2310,7 @@ pub const LazyPath = union(enum) {
|
|||
}
|
||||
return p;
|
||||
},
|
||||
.dependency => |dep| {
|
||||
return dep.dependency.builder.pathJoin(&[_][]const u8{
|
||||
dep.dependency.builder.build_root.path.?,
|
||||
dep.sub_path,
|
||||
});
|
||||
},
|
||||
.dependency => |dep| return dep.dependency.builder.pathFromRoot(dep.sub_path),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -2324,8 +2318,8 @@ pub const LazyPath = union(enum) {
|
|||
///
|
||||
/// The `b` parameter is only used for its allocator. All *Build instances
|
||||
/// share the same allocator.
|
||||
pub fn dupe(self: LazyPath, b: *Build) LazyPath {
|
||||
return switch (self) {
|
||||
pub fn dupe(lazy_path: LazyPath, b: *Build) LazyPath {
|
||||
return switch (lazy_path) {
|
||||
.src_path => |sp| .{ .src_path = .{
|
||||
.owner = sp.owner,
|
||||
.sub_path = sp.owner.dupePath(sp.sub_path),
|
||||
|
|
@ -2425,11 +2419,11 @@ pub const InstallDir = union(enum) {
|
|||
custom: []const u8,
|
||||
|
||||
/// Duplicates the install directory including the path if set to custom.
|
||||
pub fn dupe(self: InstallDir, builder: *Build) InstallDir {
|
||||
if (self == .custom) {
|
||||
return .{ .custom = builder.dupe(self.custom) };
|
||||
pub fn dupe(dir: InstallDir, builder: *Build) InstallDir {
|
||||
if (dir == .custom) {
|
||||
return .{ .custom = builder.dupe(dir.custom) };
|
||||
} else {
|
||||
return self;
|
||||
return dir;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
|
@ -2439,10 +2433,10 @@ pub const InstalledFile = struct {
|
|||
path: []const u8,
|
||||
|
||||
/// Duplicates the installed file path and directory.
|
||||
pub fn dupe(self: InstalledFile, builder: *Build) InstalledFile {
|
||||
pub fn dupe(file: InstalledFile, builder: *Build) InstalledFile {
|
||||
return .{
|
||||
.dir = self.dir.dupe(builder),
|
||||
.path = builder.dupe(self.path),
|
||||
.dir = file.dir.dupe(builder),
|
||||
.path = builder.dupe(file.path),
|
||||
};
|
||||
}
|
||||
};
|
||||
|
|
|
|||
|
|
@ -89,10 +89,10 @@ pub const CSourceFile = struct {
|
|||
file: LazyPath,
|
||||
flags: []const []const u8 = &.{},
|
||||
|
||||
pub fn dupe(self: CSourceFile, b: *std.Build) CSourceFile {
|
||||
pub fn dupe(file: CSourceFile, b: *std.Build) CSourceFile {
|
||||
return .{
|
||||
.file = self.file.dupe(b),
|
||||
.flags = b.dupeStrings(self.flags),
|
||||
.file = file.file.dupe(b),
|
||||
.flags = b.dupeStrings(file.flags),
|
||||
};
|
||||
}
|
||||
};
|
||||
|
|
@ -115,12 +115,12 @@ pub const RcSourceFile = struct {
|
|||
/// as `/I <resolved path>`.
|
||||
include_paths: []const LazyPath = &.{},
|
||||
|
||||
pub fn dupe(self: RcSourceFile, b: *std.Build) RcSourceFile {
|
||||
const include_paths = b.allocator.alloc(LazyPath, self.include_paths.len) catch @panic("OOM");
|
||||
for (include_paths, self.include_paths) |*dest, lazy_path| dest.* = lazy_path.dupe(b);
|
||||
pub fn dupe(file: RcSourceFile, b: *std.Build) RcSourceFile {
|
||||
const include_paths = b.allocator.alloc(LazyPath, file.include_paths.len) catch @panic("OOM");
|
||||
for (include_paths, file.include_paths) |*dest, lazy_path| dest.* = lazy_path.dupe(b);
|
||||
return .{
|
||||
.file = self.file.dupe(b),
|
||||
.flags = b.dupeStrings(self.flags),
|
||||
.file = file.file.dupe(b),
|
||||
.flags = b.dupeStrings(file.flags),
|
||||
.include_paths = include_paths,
|
||||
};
|
||||
}
|
||||
|
|
@ -665,24 +665,19 @@ pub fn appendZigProcessFlags(
|
|||
for (m.include_dirs.items) |include_dir| {
|
||||
switch (include_dir) {
|
||||
.path => |include_path| {
|
||||
try zig_args.append("-I");
|
||||
try zig_args.append(include_path.getPath(b));
|
||||
try zig_args.appendSlice(&.{ "-I", include_path.getPath2(b, asking_step) });
|
||||
},
|
||||
.path_system => |include_path| {
|
||||
try zig_args.append("-isystem");
|
||||
try zig_args.append(include_path.getPath(b));
|
||||
try zig_args.appendSlice(&.{ "-isystem", include_path.getPath2(b, asking_step) });
|
||||
},
|
||||
.path_after => |include_path| {
|
||||
try zig_args.append("-idirafter");
|
||||
try zig_args.append(include_path.getPath(b));
|
||||
try zig_args.appendSlice(&.{ "-idirafter", include_path.getPath2(b, asking_step) });
|
||||
},
|
||||
.framework_path => |include_path| {
|
||||
try zig_args.append("-F");
|
||||
try zig_args.append(include_path.getPath2(b, asking_step));
|
||||
try zig_args.appendSlice(&.{ "-F", include_path.getPath2(b, asking_step) });
|
||||
},
|
||||
.framework_path_system => |include_path| {
|
||||
try zig_args.append("-iframework");
|
||||
try zig_args.append(include_path.getPath2(b, asking_step));
|
||||
try zig_args.appendSlice(&.{ "-iframework", include_path.getPath2(b, asking_step) });
|
||||
},
|
||||
.other_step => |other| {
|
||||
if (other.generated_h) |header| {
|
||||
|
|
|
|||
|
|
@ -58,7 +58,7 @@ pub const TestResults = struct {
|
|||
}
|
||||
};
|
||||
|
||||
pub const MakeFn = *const fn (self: *Step, prog_node: *std.Progress.Node) anyerror!void;
|
||||
pub const MakeFn = *const fn (step: *Step, prog_node: *std.Progress.Node) anyerror!void;
|
||||
|
||||
pub const State = enum {
|
||||
precheck_unstarted,
|
||||
|
|
@ -201,8 +201,8 @@ pub fn make(s: *Step, prog_node: *std.Progress.Node) error{ MakeFailed, MakeSkip
|
|||
}
|
||||
}
|
||||
|
||||
pub fn dependOn(self: *Step, other: *Step) void {
|
||||
self.dependencies.append(other) catch @panic("OOM");
|
||||
pub fn dependOn(step: *Step, other: *Step) void {
|
||||
step.dependencies.append(other) catch @panic("OOM");
|
||||
}
|
||||
|
||||
pub fn getStackTrace(s: *Step) ?std.builtin.StackTrace {
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ expected_exact: ?[]const u8,
|
|||
source: std.Build.LazyPath,
|
||||
max_bytes: usize = 20 * 1024 * 1024,
|
||||
|
||||
pub const base_id = .check_file;
|
||||
pub const base_id: Step.Id = .check_file;
|
||||
|
||||
pub const Options = struct {
|
||||
expected_matches: []const []const u8 = &.{},
|
||||
|
|
@ -26,10 +26,10 @@ pub fn create(
|
|||
source: std.Build.LazyPath,
|
||||
options: Options,
|
||||
) *CheckFile {
|
||||
const self = owner.allocator.create(CheckFile) catch @panic("OOM");
|
||||
self.* = .{
|
||||
const check_file = owner.allocator.create(CheckFile) catch @panic("OOM");
|
||||
check_file.* = .{
|
||||
.step = Step.init(.{
|
||||
.id = .check_file,
|
||||
.id = base_id,
|
||||
.name = "CheckFile",
|
||||
.owner = owner,
|
||||
.makeFn = make,
|
||||
|
|
@ -38,27 +38,27 @@ pub fn create(
|
|||
.expected_matches = owner.dupeStrings(options.expected_matches),
|
||||
.expected_exact = options.expected_exact,
|
||||
};
|
||||
self.source.addStepDependencies(&self.step);
|
||||
return self;
|
||||
check_file.source.addStepDependencies(&check_file.step);
|
||||
return check_file;
|
||||
}
|
||||
|
||||
pub fn setName(self: *CheckFile, name: []const u8) void {
|
||||
self.step.name = name;
|
||||
pub fn setName(check_file: *CheckFile, name: []const u8) void {
|
||||
check_file.step.name = name;
|
||||
}
|
||||
|
||||
fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
_ = prog_node;
|
||||
const b = step.owner;
|
||||
const self: *CheckFile = @fieldParentPtr("step", step);
|
||||
const check_file: *CheckFile = @fieldParentPtr("step", step);
|
||||
|
||||
const src_path = self.source.getPath(b);
|
||||
const contents = fs.cwd().readFileAlloc(b.allocator, src_path, self.max_bytes) catch |err| {
|
||||
const src_path = check_file.source.getPath2(b, step);
|
||||
const contents = fs.cwd().readFileAlloc(b.allocator, src_path, check_file.max_bytes) catch |err| {
|
||||
return step.fail("unable to read '{s}': {s}", .{
|
||||
src_path, @errorName(err),
|
||||
});
|
||||
};
|
||||
|
||||
for (self.expected_matches) |expected_match| {
|
||||
for (check_file.expected_matches) |expected_match| {
|
||||
if (mem.indexOf(u8, contents, expected_match) == null) {
|
||||
return step.fail(
|
||||
\\
|
||||
|
|
@ -71,7 +71,7 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
|||
}
|
||||
}
|
||||
|
||||
if (self.expected_exact) |expected_exact| {
|
||||
if (check_file.expected_exact) |expected_exact| {
|
||||
if (!mem.eql(u8, expected_exact, contents)) {
|
||||
return step.fail(
|
||||
\\
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ const CheckObject = @This();
|
|||
const Allocator = mem.Allocator;
|
||||
const Step = std.Build.Step;
|
||||
|
||||
pub const base_id = .check_object;
|
||||
pub const base_id: Step.Id = .check_object;
|
||||
|
||||
step: Step,
|
||||
source: std.Build.LazyPath,
|
||||
|
|
@ -26,10 +26,10 @@ pub fn create(
|
|||
obj_format: std.Target.ObjectFormat,
|
||||
) *CheckObject {
|
||||
const gpa = owner.allocator;
|
||||
const self = gpa.create(CheckObject) catch @panic("OOM");
|
||||
self.* = .{
|
||||
const check_object = gpa.create(CheckObject) catch @panic("OOM");
|
||||
check_object.* = .{
|
||||
.step = Step.init(.{
|
||||
.id = .check_file,
|
||||
.id = base_id,
|
||||
.name = "CheckObject",
|
||||
.owner = owner,
|
||||
.makeFn = make,
|
||||
|
|
@ -38,8 +38,8 @@ pub fn create(
|
|||
.checks = std.ArrayList(Check).init(gpa),
|
||||
.obj_format = obj_format,
|
||||
};
|
||||
self.source.addStepDependencies(&self.step);
|
||||
return self;
|
||||
check_object.source.addStepDependencies(&check_object.step);
|
||||
return check_object;
|
||||
}
|
||||
|
||||
const SearchPhrase = struct {
|
||||
|
|
@ -268,36 +268,36 @@ const Check = struct {
|
|||
return check;
|
||||
}
|
||||
|
||||
fn extract(self: *Check, phrase: SearchPhrase) void {
|
||||
self.actions.append(.{
|
||||
fn extract(check: *Check, phrase: SearchPhrase) void {
|
||||
check.actions.append(.{
|
||||
.tag = .extract,
|
||||
.phrase = phrase,
|
||||
}) catch @panic("OOM");
|
||||
}
|
||||
|
||||
fn exact(self: *Check, phrase: SearchPhrase) void {
|
||||
self.actions.append(.{
|
||||
fn exact(check: *Check, phrase: SearchPhrase) void {
|
||||
check.actions.append(.{
|
||||
.tag = .exact,
|
||||
.phrase = phrase,
|
||||
}) catch @panic("OOM");
|
||||
}
|
||||
|
||||
fn contains(self: *Check, phrase: SearchPhrase) void {
|
||||
self.actions.append(.{
|
||||
fn contains(check: *Check, phrase: SearchPhrase) void {
|
||||
check.actions.append(.{
|
||||
.tag = .contains,
|
||||
.phrase = phrase,
|
||||
}) catch @panic("OOM");
|
||||
}
|
||||
|
||||
fn notPresent(self: *Check, phrase: SearchPhrase) void {
|
||||
self.actions.append(.{
|
||||
fn notPresent(check: *Check, phrase: SearchPhrase) void {
|
||||
check.actions.append(.{
|
||||
.tag = .not_present,
|
||||
.phrase = phrase,
|
||||
}) catch @panic("OOM");
|
||||
}
|
||||
|
||||
fn computeCmp(self: *Check, phrase: SearchPhrase, expected: ComputeCompareExpected) void {
|
||||
self.actions.append(.{
|
||||
fn computeCmp(check: *Check, phrase: SearchPhrase, expected: ComputeCompareExpected) void {
|
||||
check.actions.append(.{
|
||||
.tag = .compute_cmp,
|
||||
.phrase = phrase,
|
||||
.expected = expected,
|
||||
|
|
@ -328,246 +328,246 @@ const Check = struct {
|
|||
};
|
||||
|
||||
/// Creates a new empty sequence of actions.
|
||||
fn checkStart(self: *CheckObject, kind: Check.Kind) void {
|
||||
const new_check = Check.create(self.step.owner.allocator, kind);
|
||||
self.checks.append(new_check) catch @panic("OOM");
|
||||
fn checkStart(check_object: *CheckObject, kind: Check.Kind) void {
|
||||
const check = Check.create(check_object.step.owner.allocator, kind);
|
||||
check_object.checks.append(check) catch @panic("OOM");
|
||||
}
|
||||
|
||||
/// Adds an exact match phrase to the latest created Check.
|
||||
pub fn checkExact(self: *CheckObject, phrase: []const u8) void {
|
||||
self.checkExactInner(phrase, null);
|
||||
pub fn checkExact(check_object: *CheckObject, phrase: []const u8) void {
|
||||
check_object.checkExactInner(phrase, null);
|
||||
}
|
||||
|
||||
/// Like `checkExact()` but takes an additional argument `LazyPath` which will be
|
||||
/// resolved to a full search query in `make()`.
|
||||
pub fn checkExactPath(self: *CheckObject, phrase: []const u8, lazy_path: std.Build.LazyPath) void {
|
||||
self.checkExactInner(phrase, lazy_path);
|
||||
pub fn checkExactPath(check_object: *CheckObject, phrase: []const u8, lazy_path: std.Build.LazyPath) void {
|
||||
check_object.checkExactInner(phrase, lazy_path);
|
||||
}
|
||||
|
||||
fn checkExactInner(self: *CheckObject, phrase: []const u8, lazy_path: ?std.Build.LazyPath) void {
|
||||
assert(self.checks.items.len > 0);
|
||||
const last = &self.checks.items[self.checks.items.len - 1];
|
||||
last.exact(.{ .string = self.step.owner.dupe(phrase), .lazy_path = lazy_path });
|
||||
fn checkExactInner(check_object: *CheckObject, phrase: []const u8, lazy_path: ?std.Build.LazyPath) void {
|
||||
assert(check_object.checks.items.len > 0);
|
||||
const last = &check_object.checks.items[check_object.checks.items.len - 1];
|
||||
last.exact(.{ .string = check_object.step.owner.dupe(phrase), .lazy_path = lazy_path });
|
||||
}
|
||||
|
||||
/// Adds a fuzzy match phrase to the latest created Check.
|
||||
pub fn checkContains(self: *CheckObject, phrase: []const u8) void {
|
||||
self.checkContainsInner(phrase, null);
|
||||
pub fn checkContains(check_object: *CheckObject, phrase: []const u8) void {
|
||||
check_object.checkContainsInner(phrase, null);
|
||||
}
|
||||
|
||||
/// Like `checkContains()` but takes an additional argument `lazy_path` which will be
|
||||
/// resolved to a full search query in `make()`.
|
||||
pub fn checkContainsPath(
|
||||
self: *CheckObject,
|
||||
check_object: *CheckObject,
|
||||
phrase: []const u8,
|
||||
lazy_path: std.Build.LazyPath,
|
||||
) void {
|
||||
self.checkContainsInner(phrase, lazy_path);
|
||||
check_object.checkContainsInner(phrase, lazy_path);
|
||||
}
|
||||
|
||||
fn checkContainsInner(self: *CheckObject, phrase: []const u8, lazy_path: ?std.Build.LazyPath) void {
|
||||
assert(self.checks.items.len > 0);
|
||||
const last = &self.checks.items[self.checks.items.len - 1];
|
||||
last.contains(.{ .string = self.step.owner.dupe(phrase), .lazy_path = lazy_path });
|
||||
fn checkContainsInner(check_object: *CheckObject, phrase: []const u8, lazy_path: ?std.Build.LazyPath) void {
|
||||
assert(check_object.checks.items.len > 0);
|
||||
const last = &check_object.checks.items[check_object.checks.items.len - 1];
|
||||
last.contains(.{ .string = check_object.step.owner.dupe(phrase), .lazy_path = lazy_path });
|
||||
}
|
||||
|
||||
/// Adds an exact match phrase with variable extractor to the latest created Check.
|
||||
pub fn checkExtract(self: *CheckObject, phrase: []const u8) void {
|
||||
self.checkExtractInner(phrase, null);
|
||||
pub fn checkExtract(check_object: *CheckObject, phrase: []const u8) void {
|
||||
check_object.checkExtractInner(phrase, null);
|
||||
}
|
||||
|
||||
/// Like `checkExtract()` but takes an additional argument `LazyPath` which will be
|
||||
/// resolved to a full search query in `make()`.
|
||||
pub fn checkExtractLazyPath(self: *CheckObject, phrase: []const u8, lazy_path: std.Build.LazyPath) void {
|
||||
self.checkExtractInner(phrase, lazy_path);
|
||||
pub fn checkExtractLazyPath(check_object: *CheckObject, phrase: []const u8, lazy_path: std.Build.LazyPath) void {
|
||||
check_object.checkExtractInner(phrase, lazy_path);
|
||||
}
|
||||
|
||||
fn checkExtractInner(self: *CheckObject, phrase: []const u8, lazy_path: ?std.Build.LazyPath) void {
|
||||
assert(self.checks.items.len > 0);
|
||||
const last = &self.checks.items[self.checks.items.len - 1];
|
||||
last.extract(.{ .string = self.step.owner.dupe(phrase), .lazy_path = lazy_path });
|
||||
fn checkExtractInner(check_object: *CheckObject, phrase: []const u8, lazy_path: ?std.Build.LazyPath) void {
|
||||
assert(check_object.checks.items.len > 0);
|
||||
const last = &check_object.checks.items[check_object.checks.items.len - 1];
|
||||
last.extract(.{ .string = check_object.step.owner.dupe(phrase), .lazy_path = lazy_path });
|
||||
}
|
||||
|
||||
/// Adds another searched phrase to the latest created Check
|
||||
/// however ensures there is no matching phrase in the output.
|
||||
pub fn checkNotPresent(self: *CheckObject, phrase: []const u8) void {
|
||||
self.checkNotPresentInner(phrase, null);
|
||||
pub fn checkNotPresent(check_object: *CheckObject, phrase: []const u8) void {
|
||||
check_object.checkNotPresentInner(phrase, null);
|
||||
}
|
||||
|
||||
/// Like `checkExtract()` but takes an additional argument `LazyPath` which will be
|
||||
/// resolved to a full search query in `make()`.
|
||||
pub fn checkNotPresentLazyPath(self: *CheckObject, phrase: []const u8, lazy_path: std.Build.LazyPath) void {
|
||||
self.checkNotPresentInner(phrase, lazy_path);
|
||||
pub fn checkNotPresentLazyPath(check_object: *CheckObject, phrase: []const u8, lazy_path: std.Build.LazyPath) void {
|
||||
check_object.checkNotPresentInner(phrase, lazy_path);
|
||||
}
|
||||
|
||||
fn checkNotPresentInner(self: *CheckObject, phrase: []const u8, lazy_path: ?std.Build.LazyPath) void {
|
||||
assert(self.checks.items.len > 0);
|
||||
const last = &self.checks.items[self.checks.items.len - 1];
|
||||
last.notPresent(.{ .string = self.step.owner.dupe(phrase), .lazy_path = lazy_path });
|
||||
fn checkNotPresentInner(check_object: *CheckObject, phrase: []const u8, lazy_path: ?std.Build.LazyPath) void {
|
||||
assert(check_object.checks.items.len > 0);
|
||||
const last = &check_object.checks.items[check_object.checks.items.len - 1];
|
||||
last.notPresent(.{ .string = check_object.step.owner.dupe(phrase), .lazy_path = lazy_path });
|
||||
}
|
||||
|
||||
/// Creates a new check checking in the file headers (section, program headers, etc.).
|
||||
pub fn checkInHeaders(self: *CheckObject) void {
|
||||
self.checkStart(.headers);
|
||||
pub fn checkInHeaders(check_object: *CheckObject) void {
|
||||
check_object.checkStart(.headers);
|
||||
}
|
||||
|
||||
/// Creates a new check checking specifically symbol table parsed and dumped from the object
|
||||
/// file.
|
||||
pub fn checkInSymtab(self: *CheckObject) void {
|
||||
const label = switch (self.obj_format) {
|
||||
pub fn checkInSymtab(check_object: *CheckObject) void {
|
||||
const label = switch (check_object.obj_format) {
|
||||
.macho => MachODumper.symtab_label,
|
||||
.elf => ElfDumper.symtab_label,
|
||||
.wasm => WasmDumper.symtab_label,
|
||||
.coff => @panic("TODO symtab for coff"),
|
||||
else => @panic("TODO other file formats"),
|
||||
};
|
||||
self.checkStart(.symtab);
|
||||
self.checkExact(label);
|
||||
check_object.checkStart(.symtab);
|
||||
check_object.checkExact(label);
|
||||
}
|
||||
|
||||
/// Creates a new check checking specifically dyld rebase opcodes contents parsed and dumped
|
||||
/// from the object file.
|
||||
/// This check is target-dependent and applicable to MachO only.
|
||||
pub fn checkInDyldRebase(self: *CheckObject) void {
|
||||
const label = switch (self.obj_format) {
|
||||
pub fn checkInDyldRebase(check_object: *CheckObject) void {
|
||||
const label = switch (check_object.obj_format) {
|
||||
.macho => MachODumper.dyld_rebase_label,
|
||||
else => @panic("Unsupported target platform"),
|
||||
};
|
||||
self.checkStart(.dyld_rebase);
|
||||
self.checkExact(label);
|
||||
check_object.checkStart(.dyld_rebase);
|
||||
check_object.checkExact(label);
|
||||
}
|
||||
|
||||
/// Creates a new check checking specifically dyld bind opcodes contents parsed and dumped
|
||||
/// from the object file.
|
||||
/// This check is target-dependent and applicable to MachO only.
|
||||
pub fn checkInDyldBind(self: *CheckObject) void {
|
||||
const label = switch (self.obj_format) {
|
||||
pub fn checkInDyldBind(check_object: *CheckObject) void {
|
||||
const label = switch (check_object.obj_format) {
|
||||
.macho => MachODumper.dyld_bind_label,
|
||||
else => @panic("Unsupported target platform"),
|
||||
};
|
||||
self.checkStart(.dyld_bind);
|
||||
self.checkExact(label);
|
||||
check_object.checkStart(.dyld_bind);
|
||||
check_object.checkExact(label);
|
||||
}
|
||||
|
||||
/// Creates a new check checking specifically dyld weak bind opcodes contents parsed and dumped
|
||||
/// from the object file.
|
||||
/// This check is target-dependent and applicable to MachO only.
|
||||
pub fn checkInDyldWeakBind(self: *CheckObject) void {
|
||||
const label = switch (self.obj_format) {
|
||||
pub fn checkInDyldWeakBind(check_object: *CheckObject) void {
|
||||
const label = switch (check_object.obj_format) {
|
||||
.macho => MachODumper.dyld_weak_bind_label,
|
||||
else => @panic("Unsupported target platform"),
|
||||
};
|
||||
self.checkStart(.dyld_weak_bind);
|
||||
self.checkExact(label);
|
||||
check_object.checkStart(.dyld_weak_bind);
|
||||
check_object.checkExact(label);
|
||||
}
|
||||
|
||||
/// Creates a new check checking specifically dyld lazy bind opcodes contents parsed and dumped
|
||||
/// from the object file.
|
||||
/// This check is target-dependent and applicable to MachO only.
|
||||
pub fn checkInDyldLazyBind(self: *CheckObject) void {
|
||||
const label = switch (self.obj_format) {
|
||||
pub fn checkInDyldLazyBind(check_object: *CheckObject) void {
|
||||
const label = switch (check_object.obj_format) {
|
||||
.macho => MachODumper.dyld_lazy_bind_label,
|
||||
else => @panic("Unsupported target platform"),
|
||||
};
|
||||
self.checkStart(.dyld_lazy_bind);
|
||||
self.checkExact(label);
|
||||
check_object.checkStart(.dyld_lazy_bind);
|
||||
check_object.checkExact(label);
|
||||
}
|
||||
|
||||
/// Creates a new check checking specifically exports info contents parsed and dumped
|
||||
/// from the object file.
|
||||
/// This check is target-dependent and applicable to MachO only.
|
||||
pub fn checkInExports(self: *CheckObject) void {
|
||||
const label = switch (self.obj_format) {
|
||||
pub fn checkInExports(check_object: *CheckObject) void {
|
||||
const label = switch (check_object.obj_format) {
|
||||
.macho => MachODumper.exports_label,
|
||||
else => @panic("Unsupported target platform"),
|
||||
};
|
||||
self.checkStart(.exports);
|
||||
self.checkExact(label);
|
||||
check_object.checkStart(.exports);
|
||||
check_object.checkExact(label);
|
||||
}
|
||||
|
||||
/// Creates a new check checking specifically indirect symbol table parsed and dumped
|
||||
/// from the object file.
|
||||
/// This check is target-dependent and applicable to MachO only.
|
||||
pub fn checkInIndirectSymtab(self: *CheckObject) void {
|
||||
const label = switch (self.obj_format) {
|
||||
pub fn checkInIndirectSymtab(check_object: *CheckObject) void {
|
||||
const label = switch (check_object.obj_format) {
|
||||
.macho => MachODumper.indirect_symtab_label,
|
||||
else => @panic("Unsupported target platform"),
|
||||
};
|
||||
self.checkStart(.indirect_symtab);
|
||||
self.checkExact(label);
|
||||
check_object.checkStart(.indirect_symtab);
|
||||
check_object.checkExact(label);
|
||||
}
|
||||
|
||||
/// Creates a new check checking specifically dynamic symbol table parsed and dumped from the object
|
||||
/// file.
|
||||
/// This check is target-dependent and applicable to ELF only.
|
||||
pub fn checkInDynamicSymtab(self: *CheckObject) void {
|
||||
const label = switch (self.obj_format) {
|
||||
pub fn checkInDynamicSymtab(check_object: *CheckObject) void {
|
||||
const label = switch (check_object.obj_format) {
|
||||
.elf => ElfDumper.dynamic_symtab_label,
|
||||
else => @panic("Unsupported target platform"),
|
||||
};
|
||||
self.checkStart(.dynamic_symtab);
|
||||
self.checkExact(label);
|
||||
check_object.checkStart(.dynamic_symtab);
|
||||
check_object.checkExact(label);
|
||||
}
|
||||
|
||||
/// Creates a new check checking specifically dynamic section parsed and dumped from the object
|
||||
/// file.
|
||||
/// This check is target-dependent and applicable to ELF only.
|
||||
pub fn checkInDynamicSection(self: *CheckObject) void {
|
||||
const label = switch (self.obj_format) {
|
||||
pub fn checkInDynamicSection(check_object: *CheckObject) void {
|
||||
const label = switch (check_object.obj_format) {
|
||||
.elf => ElfDumper.dynamic_section_label,
|
||||
else => @panic("Unsupported target platform"),
|
||||
};
|
||||
self.checkStart(.dynamic_section);
|
||||
self.checkExact(label);
|
||||
check_object.checkStart(.dynamic_section);
|
||||
check_object.checkExact(label);
|
||||
}
|
||||
|
||||
/// Creates a new check checking specifically symbol table parsed and dumped from the archive
|
||||
/// file.
|
||||
pub fn checkInArchiveSymtab(self: *CheckObject) void {
|
||||
const label = switch (self.obj_format) {
|
||||
pub fn checkInArchiveSymtab(check_object: *CheckObject) void {
|
||||
const label = switch (check_object.obj_format) {
|
||||
.elf => ElfDumper.archive_symtab_label,
|
||||
else => @panic("TODO other file formats"),
|
||||
};
|
||||
self.checkStart(.archive_symtab);
|
||||
self.checkExact(label);
|
||||
check_object.checkStart(.archive_symtab);
|
||||
check_object.checkExact(label);
|
||||
}
|
||||
|
||||
pub fn dumpSection(self: *CheckObject, name: [:0]const u8) void {
|
||||
const new_check = Check.dumpSection(self.step.owner.allocator, name);
|
||||
self.checks.append(new_check) catch @panic("OOM");
|
||||
pub fn dumpSection(check_object: *CheckObject, name: [:0]const u8) void {
|
||||
const check = Check.dumpSection(check_object.step.owner.allocator, name);
|
||||
check_object.checks.append(check) catch @panic("OOM");
|
||||
}
|
||||
|
||||
/// Creates a new standalone, singular check which allows running simple binary operations
|
||||
/// on the extracted variables. It will then compare the reduced program with the value of
|
||||
/// the expected variable.
|
||||
pub fn checkComputeCompare(
|
||||
self: *CheckObject,
|
||||
check_object: *CheckObject,
|
||||
program: []const u8,
|
||||
expected: ComputeCompareExpected,
|
||||
) void {
|
||||
var new_check = Check.create(self.step.owner.allocator, .compute_compare);
|
||||
new_check.computeCmp(.{ .string = self.step.owner.dupe(program) }, expected);
|
||||
self.checks.append(new_check) catch @panic("OOM");
|
||||
var check = Check.create(check_object.step.owner.allocator, .compute_compare);
|
||||
check.computeCmp(.{ .string = check_object.step.owner.dupe(program) }, expected);
|
||||
check_object.checks.append(check) catch @panic("OOM");
|
||||
}
|
||||
|
||||
fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
_ = prog_node;
|
||||
const b = step.owner;
|
||||
const gpa = b.allocator;
|
||||
const self: *CheckObject = @fieldParentPtr("step", step);
|
||||
const check_object: *CheckObject = @fieldParentPtr("step", step);
|
||||
|
||||
const src_path = self.source.getPath(b);
|
||||
const src_path = check_object.source.getPath2(b, step);
|
||||
const contents = fs.cwd().readFileAllocOptions(
|
||||
gpa,
|
||||
src_path,
|
||||
self.max_bytes,
|
||||
check_object.max_bytes,
|
||||
null,
|
||||
@alignOf(u64),
|
||||
null,
|
||||
) catch |err| return step.fail("unable to read '{s}': {s}", .{ src_path, @errorName(err) });
|
||||
|
||||
var vars = std.StringHashMap(u64).init(gpa);
|
||||
for (self.checks.items) |chk| {
|
||||
for (check_object.checks.items) |chk| {
|
||||
if (chk.kind == .compute_compare) {
|
||||
assert(chk.actions.items.len == 1);
|
||||
const act = chk.actions.items[0];
|
||||
|
|
@ -587,7 +587,7 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
|||
continue;
|
||||
}
|
||||
|
||||
const output = switch (self.obj_format) {
|
||||
const output = switch (check_object.obj_format) {
|
||||
.macho => try MachODumper.parseAndDump(step, chk, contents),
|
||||
.elf => try ElfDumper.parseAndDump(step, chk, contents),
|
||||
.coff => return step.fail("TODO coff parser", .{}),
|
||||
|
|
@ -1597,8 +1597,8 @@ const MachODumper = struct {
|
|||
},
|
||||
},
|
||||
|
||||
inline fn rankByTag(self: Export) u3 {
|
||||
return switch (self.tag) {
|
||||
inline fn rankByTag(@"export": Export) u3 {
|
||||
return switch (@"export".tag) {
|
||||
.@"export" => 1,
|
||||
.reexport => 2,
|
||||
.stub_resolver => 3,
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -52,7 +52,7 @@ pub const Options = struct {
|
|||
};
|
||||
|
||||
pub fn create(owner: *std.Build, options: Options) *ConfigHeader {
|
||||
const self = owner.allocator.create(ConfigHeader) catch @panic("OOM");
|
||||
const config_header = owner.allocator.create(ConfigHeader) catch @panic("OOM");
|
||||
|
||||
var include_path: []const u8 = "config.h";
|
||||
|
||||
|
|
@ -81,7 +81,7 @@ pub fn create(owner: *std.Build, options: Options) *ConfigHeader {
|
|||
else
|
||||
owner.fmt("configure {s} header to {s}", .{ @tagName(options.style), include_path });
|
||||
|
||||
self.* = .{
|
||||
config_header.* = .{
|
||||
.step = Step.init(.{
|
||||
.id = base_id,
|
||||
.name = name,
|
||||
|
|
@ -95,64 +95,64 @@ pub fn create(owner: *std.Build, options: Options) *ConfigHeader {
|
|||
.max_bytes = options.max_bytes,
|
||||
.include_path = include_path,
|
||||
.include_guard_override = options.include_guard_override,
|
||||
.output_file = .{ .step = &self.step },
|
||||
.output_file = .{ .step = &config_header.step },
|
||||
};
|
||||
|
||||
return self;
|
||||
return config_header;
|
||||
}
|
||||
|
||||
pub fn addValues(self: *ConfigHeader, values: anytype) void {
|
||||
return addValuesInner(self, values) catch @panic("OOM");
|
||||
pub fn addValues(config_header: *ConfigHeader, values: anytype) void {
|
||||
return addValuesInner(config_header, values) catch @panic("OOM");
|
||||
}
|
||||
|
||||
pub fn getOutput(self: *ConfigHeader) std.Build.LazyPath {
|
||||
return .{ .generated = &self.output_file };
|
||||
pub fn getOutput(config_header: *ConfigHeader) std.Build.LazyPath {
|
||||
return .{ .generated = &config_header.output_file };
|
||||
}
|
||||
|
||||
fn addValuesInner(self: *ConfigHeader, values: anytype) !void {
|
||||
fn addValuesInner(config_header: *ConfigHeader, values: anytype) !void {
|
||||
inline for (@typeInfo(@TypeOf(values)).Struct.fields) |field| {
|
||||
try putValue(self, field.name, field.type, @field(values, field.name));
|
||||
try putValue(config_header, field.name, field.type, @field(values, field.name));
|
||||
}
|
||||
}
|
||||
|
||||
fn putValue(self: *ConfigHeader, field_name: []const u8, comptime T: type, v: T) !void {
|
||||
fn putValue(config_header: *ConfigHeader, field_name: []const u8, comptime T: type, v: T) !void {
|
||||
switch (@typeInfo(T)) {
|
||||
.Null => {
|
||||
try self.values.put(field_name, .undef);
|
||||
try config_header.values.put(field_name, .undef);
|
||||
},
|
||||
.Void => {
|
||||
try self.values.put(field_name, .defined);
|
||||
try config_header.values.put(field_name, .defined);
|
||||
},
|
||||
.Bool => {
|
||||
try self.values.put(field_name, .{ .boolean = v });
|
||||
try config_header.values.put(field_name, .{ .boolean = v });
|
||||
},
|
||||
.Int => {
|
||||
try self.values.put(field_name, .{ .int = v });
|
||||
try config_header.values.put(field_name, .{ .int = v });
|
||||
},
|
||||
.ComptimeInt => {
|
||||
try self.values.put(field_name, .{ .int = v });
|
||||
try config_header.values.put(field_name, .{ .int = v });
|
||||
},
|
||||
.EnumLiteral => {
|
||||
try self.values.put(field_name, .{ .ident = @tagName(v) });
|
||||
try config_header.values.put(field_name, .{ .ident = @tagName(v) });
|
||||
},
|
||||
.Optional => {
|
||||
if (v) |x| {
|
||||
return putValue(self, field_name, @TypeOf(x), x);
|
||||
return putValue(config_header, field_name, @TypeOf(x), x);
|
||||
} else {
|
||||
try self.values.put(field_name, .undef);
|
||||
try config_header.values.put(field_name, .undef);
|
||||
}
|
||||
},
|
||||
.Pointer => |ptr| {
|
||||
switch (@typeInfo(ptr.child)) {
|
||||
.Array => |array| {
|
||||
if (ptr.size == .One and array.child == u8) {
|
||||
try self.values.put(field_name, .{ .string = v });
|
||||
try config_header.values.put(field_name, .{ .string = v });
|
||||
return;
|
||||
}
|
||||
},
|
||||
.Int => {
|
||||
if (ptr.size == .Slice and ptr.child == u8) {
|
||||
try self.values.put(field_name, .{ .string = v });
|
||||
try config_header.values.put(field_name, .{ .string = v });
|
||||
return;
|
||||
}
|
||||
},
|
||||
|
|
@ -168,7 +168,7 @@ fn putValue(self: *ConfigHeader, field_name: []const u8, comptime T: type, v: T)
|
|||
fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
_ = prog_node;
|
||||
const b = step.owner;
|
||||
const self: *ConfigHeader = @fieldParentPtr("step", step);
|
||||
const config_header: *ConfigHeader = @fieldParentPtr("step", step);
|
||||
const gpa = b.allocator;
|
||||
const arena = b.allocator;
|
||||
|
||||
|
|
@ -179,8 +179,8 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
|||
// random bytes when ConfigHeader implementation is modified in a
|
||||
// non-backwards-compatible way.
|
||||
man.hash.add(@as(u32, 0xdef08d23));
|
||||
man.hash.addBytes(self.include_path);
|
||||
man.hash.addOptionalBytes(self.include_guard_override);
|
||||
man.hash.addBytes(config_header.include_path);
|
||||
man.hash.addOptionalBytes(config_header.include_guard_override);
|
||||
|
||||
var output = std.ArrayList(u8).init(gpa);
|
||||
defer output.deinit();
|
||||
|
|
@ -189,34 +189,34 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
|||
const c_generated_line = "/* " ++ header_text ++ " */\n";
|
||||
const asm_generated_line = "; " ++ header_text ++ "\n";
|
||||
|
||||
switch (self.style) {
|
||||
switch (config_header.style) {
|
||||
.autoconf => |file_source| {
|
||||
try output.appendSlice(c_generated_line);
|
||||
const src_path = file_source.getPath(b);
|
||||
const contents = std.fs.cwd().readFileAlloc(arena, src_path, self.max_bytes) catch |err| {
|
||||
const src_path = file_source.getPath2(b, step);
|
||||
const contents = std.fs.cwd().readFileAlloc(arena, src_path, config_header.max_bytes) catch |err| {
|
||||
return step.fail("unable to read autoconf input file '{s}': {s}", .{
|
||||
src_path, @errorName(err),
|
||||
});
|
||||
};
|
||||
try render_autoconf(step, contents, &output, self.values, src_path);
|
||||
try render_autoconf(step, contents, &output, config_header.values, src_path);
|
||||
},
|
||||
.cmake => |file_source| {
|
||||
try output.appendSlice(c_generated_line);
|
||||
const src_path = file_source.getPath(b);
|
||||
const contents = std.fs.cwd().readFileAlloc(arena, src_path, self.max_bytes) catch |err| {
|
||||
const src_path = file_source.getPath2(b, step);
|
||||
const contents = std.fs.cwd().readFileAlloc(arena, src_path, config_header.max_bytes) catch |err| {
|
||||
return step.fail("unable to read cmake input file '{s}': {s}", .{
|
||||
src_path, @errorName(err),
|
||||
});
|
||||
};
|
||||
try render_cmake(step, contents, &output, self.values, src_path);
|
||||
try render_cmake(step, contents, &output, config_header.values, src_path);
|
||||
},
|
||||
.blank => {
|
||||
try output.appendSlice(c_generated_line);
|
||||
try render_blank(&output, self.values, self.include_path, self.include_guard_override);
|
||||
try render_blank(&output, config_header.values, config_header.include_path, config_header.include_guard_override);
|
||||
},
|
||||
.nasm => {
|
||||
try output.appendSlice(asm_generated_line);
|
||||
try render_nasm(&output, self.values);
|
||||
try render_nasm(&output, config_header.values);
|
||||
},
|
||||
}
|
||||
|
||||
|
|
@ -224,8 +224,8 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
|||
|
||||
if (try step.cacheHit(&man)) {
|
||||
const digest = man.final();
|
||||
self.output_file.path = try b.cache_root.join(arena, &.{
|
||||
"o", &digest, self.include_path,
|
||||
config_header.output_file.path = try b.cache_root.join(arena, &.{
|
||||
"o", &digest, config_header.include_path,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
|
@ -237,7 +237,7 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
|||
// output_path is libavutil/avconfig.h
|
||||
// We want to open directory zig-cache/o/HASH/libavutil/
|
||||
// but keep output_dir as zig-cache/o/HASH for -I include
|
||||
const sub_path = try std.fs.path.join(arena, &.{ "o", &digest, self.include_path });
|
||||
const sub_path = b.pathJoin(&.{ "o", &digest, config_header.include_path });
|
||||
const sub_path_dirname = std.fs.path.dirname(sub_path).?;
|
||||
|
||||
b.cache_root.handle.makePath(sub_path_dirname) catch |err| {
|
||||
|
|
@ -252,7 +252,7 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
|||
});
|
||||
};
|
||||
|
||||
self.output_file.path = try b.cache_root.join(arena, &.{sub_path});
|
||||
config_header.output_file.path = try b.cache_root.join(arena, &.{sub_path});
|
||||
try man.writeManifest();
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ paths: []const []const u8,
|
|||
exclude_paths: []const []const u8,
|
||||
check: bool,
|
||||
|
||||
pub const base_id = .fmt;
|
||||
pub const base_id: Step.Id = .fmt;
|
||||
|
||||
pub const Options = struct {
|
||||
paths: []const []const u8 = &.{},
|
||||
|
|
@ -20,9 +20,9 @@ pub const Options = struct {
|
|||
};
|
||||
|
||||
pub fn create(owner: *std.Build, options: Options) *Fmt {
|
||||
const self = owner.allocator.create(Fmt) catch @panic("OOM");
|
||||
const fmt = owner.allocator.create(Fmt) catch @panic("OOM");
|
||||
const name = if (options.check) "zig fmt --check" else "zig fmt";
|
||||
self.* = .{
|
||||
fmt.* = .{
|
||||
.step = Step.init(.{
|
||||
.id = base_id,
|
||||
.name = name,
|
||||
|
|
@ -33,7 +33,7 @@ pub fn create(owner: *std.Build, options: Options) *Fmt {
|
|||
.exclude_paths = owner.dupeStrings(options.exclude_paths),
|
||||
.check = options.check,
|
||||
};
|
||||
return self;
|
||||
return fmt;
|
||||
}
|
||||
|
||||
fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
|
|
@ -47,23 +47,23 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
|||
|
||||
const b = step.owner;
|
||||
const arena = b.allocator;
|
||||
const self: *Fmt = @fieldParentPtr("step", step);
|
||||
const fmt: *Fmt = @fieldParentPtr("step", step);
|
||||
|
||||
var argv: std.ArrayListUnmanaged([]const u8) = .{};
|
||||
try argv.ensureUnusedCapacity(arena, 2 + 1 + self.paths.len + 2 * self.exclude_paths.len);
|
||||
try argv.ensureUnusedCapacity(arena, 2 + 1 + fmt.paths.len + 2 * fmt.exclude_paths.len);
|
||||
|
||||
argv.appendAssumeCapacity(b.graph.zig_exe);
|
||||
argv.appendAssumeCapacity("fmt");
|
||||
|
||||
if (self.check) {
|
||||
if (fmt.check) {
|
||||
argv.appendAssumeCapacity("--check");
|
||||
}
|
||||
|
||||
for (self.paths) |p| {
|
||||
for (fmt.paths) |p| {
|
||||
argv.appendAssumeCapacity(b.pathFromRoot(p));
|
||||
}
|
||||
|
||||
for (self.exclude_paths) |p| {
|
||||
for (fmt.exclude_paths) |p| {
|
||||
argv.appendAssumeCapacity("--exclude");
|
||||
argv.appendAssumeCapacity(b.pathFromRoot(p));
|
||||
}
|
||||
|
|
|
|||
|
|
@ -29,7 +29,7 @@ const DylibSymlinkInfo = struct {
|
|||
name_only_filename: []const u8,
|
||||
};
|
||||
|
||||
pub const base_id = .install_artifact;
|
||||
pub const base_id: Step.Id = .install_artifact;
|
||||
|
||||
pub const Options = struct {
|
||||
/// Which installation directory to put the main output file into.
|
||||
|
|
@ -52,7 +52,7 @@ pub const Options = struct {
|
|||
};
|
||||
|
||||
pub fn create(owner: *std.Build, artifact: *Step.Compile, options: Options) *InstallArtifact {
|
||||
const self = owner.allocator.create(InstallArtifact) catch @panic("OOM");
|
||||
const install_artifact = owner.allocator.create(InstallArtifact) catch @panic("OOM");
|
||||
const dest_dir: ?InstallDir = switch (options.dest_dir) {
|
||||
.disabled => null,
|
||||
.default => switch (artifact.kind) {
|
||||
|
|
@ -62,7 +62,7 @@ pub fn create(owner: *std.Build, artifact: *Step.Compile, options: Options) *Ins
|
|||
},
|
||||
.override => |o| o,
|
||||
};
|
||||
self.* = .{
|
||||
install_artifact.* = .{
|
||||
.step = Step.init(.{
|
||||
.id = base_id,
|
||||
.name = owner.fmt("install {s}", .{artifact.name}),
|
||||
|
|
@ -104,28 +104,28 @@ pub fn create(owner: *std.Build, artifact: *Step.Compile, options: Options) *Ins
|
|||
.artifact = artifact,
|
||||
};
|
||||
|
||||
self.step.dependOn(&artifact.step);
|
||||
install_artifact.step.dependOn(&artifact.step);
|
||||
|
||||
if (self.dest_dir != null) self.emitted_bin = artifact.getEmittedBin();
|
||||
if (self.pdb_dir != null) self.emitted_pdb = artifact.getEmittedPdb();
|
||||
if (install_artifact.dest_dir != null) install_artifact.emitted_bin = artifact.getEmittedBin();
|
||||
if (install_artifact.pdb_dir != null) install_artifact.emitted_pdb = artifact.getEmittedPdb();
|
||||
// https://github.com/ziglang/zig/issues/9698
|
||||
//if (self.h_dir != null) self.emitted_h = artifact.getEmittedH();
|
||||
if (self.implib_dir != null) self.emitted_implib = artifact.getEmittedImplib();
|
||||
//if (install_artifact.h_dir != null) install_artifact.emitted_h = artifact.getEmittedH();
|
||||
if (install_artifact.implib_dir != null) install_artifact.emitted_implib = artifact.getEmittedImplib();
|
||||
|
||||
return self;
|
||||
return install_artifact;
|
||||
}
|
||||
|
||||
fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
_ = prog_node;
|
||||
const self: *InstallArtifact = @fieldParentPtr("step", step);
|
||||
const install_artifact: *InstallArtifact = @fieldParentPtr("step", step);
|
||||
const b = step.owner;
|
||||
const cwd = fs.cwd();
|
||||
|
||||
var all_cached = true;
|
||||
|
||||
if (self.dest_dir) |dest_dir| {
|
||||
const full_dest_path = b.getInstallPath(dest_dir, self.dest_sub_path);
|
||||
const full_src_path = self.emitted_bin.?.getPath2(b, step);
|
||||
if (install_artifact.dest_dir) |dest_dir| {
|
||||
const full_dest_path = b.getInstallPath(dest_dir, install_artifact.dest_sub_path);
|
||||
const full_src_path = install_artifact.emitted_bin.?.getPath2(b, step);
|
||||
const p = fs.Dir.updateFile(cwd, full_src_path, cwd, full_dest_path, .{}) catch |err| {
|
||||
return step.fail("unable to update file from '{s}' to '{s}': {s}", .{
|
||||
full_src_path, full_dest_path, @errorName(err),
|
||||
|
|
@ -133,15 +133,15 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
|||
};
|
||||
all_cached = all_cached and p == .fresh;
|
||||
|
||||
if (self.dylib_symlinks) |dls| {
|
||||
if (install_artifact.dylib_symlinks) |dls| {
|
||||
try Step.Compile.doAtomicSymLinks(step, full_dest_path, dls.major_only_filename, dls.name_only_filename);
|
||||
}
|
||||
|
||||
self.artifact.installed_path = full_dest_path;
|
||||
install_artifact.artifact.installed_path = full_dest_path;
|
||||
}
|
||||
|
||||
if (self.implib_dir) |implib_dir| {
|
||||
const full_src_path = self.emitted_implib.?.getPath2(b, step);
|
||||
if (install_artifact.implib_dir) |implib_dir| {
|
||||
const full_src_path = install_artifact.emitted_implib.?.getPath2(b, step);
|
||||
const full_implib_path = b.getInstallPath(implib_dir, fs.path.basename(full_src_path));
|
||||
const p = fs.Dir.updateFile(cwd, full_src_path, cwd, full_implib_path, .{}) catch |err| {
|
||||
return step.fail("unable to update file from '{s}' to '{s}': {s}", .{
|
||||
|
|
@ -151,8 +151,8 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
|||
all_cached = all_cached and p == .fresh;
|
||||
}
|
||||
|
||||
if (self.pdb_dir) |pdb_dir| {
|
||||
const full_src_path = self.emitted_pdb.?.getPath2(b, step);
|
||||
if (install_artifact.pdb_dir) |pdb_dir| {
|
||||
const full_src_path = install_artifact.emitted_pdb.?.getPath2(b, step);
|
||||
const full_pdb_path = b.getInstallPath(pdb_dir, fs.path.basename(full_src_path));
|
||||
const p = fs.Dir.updateFile(cwd, full_src_path, cwd, full_pdb_path, .{}) catch |err| {
|
||||
return step.fail("unable to update file from '{s}' to '{s}': {s}", .{
|
||||
|
|
@ -162,8 +162,8 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
|||
all_cached = all_cached and p == .fresh;
|
||||
}
|
||||
|
||||
if (self.h_dir) |h_dir| {
|
||||
if (self.emitted_h) |emitted_h| {
|
||||
if (install_artifact.h_dir) |h_dir| {
|
||||
if (install_artifact.emitted_h) |emitted_h| {
|
||||
const full_src_path = emitted_h.getPath2(b, step);
|
||||
const full_h_path = b.getInstallPath(h_dir, fs.path.basename(full_src_path));
|
||||
const p = fs.Dir.updateFile(cwd, full_src_path, cwd, full_h_path, .{}) catch |err| {
|
||||
|
|
@ -174,7 +174,7 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
|||
all_cached = all_cached and p == .fresh;
|
||||
}
|
||||
|
||||
for (self.artifact.installed_headers.items) |installation| switch (installation) {
|
||||
for (install_artifact.artifact.installed_headers.items) |installation| switch (installation) {
|
||||
.file => |file| {
|
||||
const full_src_path = file.source.getPath2(b, step);
|
||||
const full_h_path = b.getInstallPath(h_dir, file.dest_rel_path);
|
||||
|
|
|
|||
|
|
@ -3,17 +3,16 @@ const mem = std.mem;
|
|||
const fs = std.fs;
|
||||
const Step = std.Build.Step;
|
||||
const LazyPath = std.Build.LazyPath;
|
||||
const InstallDir = std.Build.InstallDir;
|
||||
const InstallDirStep = @This();
|
||||
const InstallDir = @This();
|
||||
|
||||
step: Step,
|
||||
options: Options,
|
||||
|
||||
pub const base_id = .install_dir;
|
||||
pub const base_id: Step.Id = .install_dir;
|
||||
|
||||
pub const Options = struct {
|
||||
source_dir: LazyPath,
|
||||
install_dir: InstallDir,
|
||||
install_dir: std.Build.InstallDir,
|
||||
install_subdir: []const u8,
|
||||
/// File paths which end in any of these suffixes will be excluded
|
||||
/// from being installed.
|
||||
|
|
@ -29,41 +28,41 @@ pub const Options = struct {
|
|||
/// `@import("test.zig")` would be a compile error.
|
||||
blank_extensions: []const []const u8 = &.{},
|
||||
|
||||
fn dupe(self: Options, b: *std.Build) Options {
|
||||
fn dupe(opts: Options, b: *std.Build) Options {
|
||||
return .{
|
||||
.source_dir = self.source_dir.dupe(b),
|
||||
.install_dir = self.install_dir.dupe(b),
|
||||
.install_subdir = b.dupe(self.install_subdir),
|
||||
.exclude_extensions = b.dupeStrings(self.exclude_extensions),
|
||||
.include_extensions = if (self.include_extensions) |incs| b.dupeStrings(incs) else null,
|
||||
.blank_extensions = b.dupeStrings(self.blank_extensions),
|
||||
.source_dir = opts.source_dir.dupe(b),
|
||||
.install_dir = opts.install_dir.dupe(b),
|
||||
.install_subdir = b.dupe(opts.install_subdir),
|
||||
.exclude_extensions = b.dupeStrings(opts.exclude_extensions),
|
||||
.include_extensions = if (opts.include_extensions) |incs| b.dupeStrings(incs) else null,
|
||||
.blank_extensions = b.dupeStrings(opts.blank_extensions),
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
pub fn create(owner: *std.Build, options: Options) *InstallDirStep {
|
||||
pub fn create(owner: *std.Build, options: Options) *InstallDir {
|
||||
owner.pushInstalledFile(options.install_dir, options.install_subdir);
|
||||
const self = owner.allocator.create(InstallDirStep) catch @panic("OOM");
|
||||
self.* = .{
|
||||
const install_dir = owner.allocator.create(InstallDir) catch @panic("OOM");
|
||||
install_dir.* = .{
|
||||
.step = Step.init(.{
|
||||
.id = .install_dir,
|
||||
.id = base_id,
|
||||
.name = owner.fmt("install {s}/", .{options.source_dir.getDisplayName()}),
|
||||
.owner = owner,
|
||||
.makeFn = make,
|
||||
}),
|
||||
.options = options.dupe(owner),
|
||||
};
|
||||
options.source_dir.addStepDependencies(&self.step);
|
||||
return self;
|
||||
options.source_dir.addStepDependencies(&install_dir.step);
|
||||
return install_dir;
|
||||
}
|
||||
|
||||
fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
_ = prog_node;
|
||||
const b = step.owner;
|
||||
const self: *InstallDirStep = @fieldParentPtr("step", step);
|
||||
const install_dir: *InstallDir = @fieldParentPtr("step", step);
|
||||
const arena = b.allocator;
|
||||
const dest_prefix = b.getInstallPath(self.options.install_dir, self.options.install_subdir);
|
||||
const src_dir_path = self.options.source_dir.getPath2(b, step);
|
||||
const dest_prefix = b.getInstallPath(install_dir.options.install_dir, install_dir.options.install_subdir);
|
||||
const src_dir_path = install_dir.options.source_dir.getPath2(b, step);
|
||||
var src_dir = b.build_root.handle.openDir(src_dir_path, .{ .iterate = true }) catch |err| {
|
||||
return step.fail("unable to open source directory '{}{s}': {s}", .{
|
||||
b.build_root, src_dir_path, @errorName(err),
|
||||
|
|
@ -73,12 +72,12 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
|||
var it = try src_dir.walk(arena);
|
||||
var all_cached = true;
|
||||
next_entry: while (try it.next()) |entry| {
|
||||
for (self.options.exclude_extensions) |ext| {
|
||||
for (install_dir.options.exclude_extensions) |ext| {
|
||||
if (mem.endsWith(u8, entry.path, ext)) {
|
||||
continue :next_entry;
|
||||
}
|
||||
}
|
||||
if (self.options.include_extensions) |incs| {
|
||||
if (install_dir.options.include_extensions) |incs| {
|
||||
var found = false;
|
||||
for (incs) |inc| {
|
||||
if (mem.endsWith(u8, entry.path, inc)) {
|
||||
|
|
@ -90,14 +89,14 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
|||
}
|
||||
|
||||
// relative to src build root
|
||||
const src_sub_path = try fs.path.join(arena, &.{ src_dir_path, entry.path });
|
||||
const dest_path = try fs.path.join(arena, &.{ dest_prefix, entry.path });
|
||||
const src_sub_path = b.pathJoin(&.{ src_dir_path, entry.path });
|
||||
const dest_path = b.pathJoin(&.{ dest_prefix, entry.path });
|
||||
const cwd = fs.cwd();
|
||||
|
||||
switch (entry.kind) {
|
||||
.directory => try cwd.makePath(dest_path),
|
||||
.file => {
|
||||
for (self.options.blank_extensions) |ext| {
|
||||
for (install_dir.options.blank_extensions) |ext| {
|
||||
if (mem.endsWith(u8, entry.path, ext)) {
|
||||
try b.truncateFile(dest_path);
|
||||
continue :next_entry;
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ const InstallDir = std.Build.InstallDir;
|
|||
const InstallFile = @This();
|
||||
const assert = std.debug.assert;
|
||||
|
||||
pub const base_id = .install_file;
|
||||
pub const base_id: Step.Id = .install_file;
|
||||
|
||||
step: Step,
|
||||
source: LazyPath,
|
||||
|
|
@ -20,8 +20,8 @@ pub fn create(
|
|||
) *InstallFile {
|
||||
assert(dest_rel_path.len != 0);
|
||||
owner.pushInstalledFile(dir, dest_rel_path);
|
||||
const self = owner.allocator.create(InstallFile) catch @panic("OOM");
|
||||
self.* = .{
|
||||
const install_file = owner.allocator.create(InstallFile) catch @panic("OOM");
|
||||
install_file.* = .{
|
||||
.step = Step.init(.{
|
||||
.id = base_id,
|
||||
.name = owner.fmt("install {s} to {s}", .{ source.getDisplayName(), dest_rel_path }),
|
||||
|
|
@ -32,16 +32,16 @@ pub fn create(
|
|||
.dir = dir.dupe(owner),
|
||||
.dest_rel_path = owner.dupePath(dest_rel_path),
|
||||
};
|
||||
source.addStepDependencies(&self.step);
|
||||
return self;
|
||||
source.addStepDependencies(&install_file.step);
|
||||
return install_file;
|
||||
}
|
||||
|
||||
fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
_ = prog_node;
|
||||
const b = step.owner;
|
||||
const self: *InstallFile = @fieldParentPtr("step", step);
|
||||
const full_src_path = self.source.getPath2(b, step);
|
||||
const full_dest_path = b.getInstallPath(self.dir, self.dest_rel_path);
|
||||
const install_file: *InstallFile = @fieldParentPtr("step", step);
|
||||
const full_src_path = install_file.source.getPath2(b, step);
|
||||
const full_dest_path = b.getInstallPath(install_file.dir, install_file.dest_rel_path);
|
||||
const cwd = std.fs.cwd();
|
||||
const prev = std.fs.Dir.updateFile(cwd, full_src_path, cwd, full_dest_path, .{}) catch |err| {
|
||||
return step.fail("unable to update file from '{s}' to '{s}': {s}", .{
|
||||
|
|
|
|||
|
|
@ -58,8 +58,8 @@ pub fn create(
|
|||
input_file: std.Build.LazyPath,
|
||||
options: Options,
|
||||
) *ObjCopy {
|
||||
const self = owner.allocator.create(ObjCopy) catch @panic("OOM");
|
||||
self.* = ObjCopy{
|
||||
const objcopy = owner.allocator.create(ObjCopy) catch @panic("OOM");
|
||||
objcopy.* = ObjCopy{
|
||||
.step = Step.init(.{
|
||||
.id = base_id,
|
||||
.name = owner.fmt("objcopy {s}", .{input_file.getDisplayName()}),
|
||||
|
|
@ -68,31 +68,31 @@ pub fn create(
|
|||
}),
|
||||
.input_file = input_file,
|
||||
.basename = options.basename orelse input_file.getDisplayName(),
|
||||
.output_file = std.Build.GeneratedFile{ .step = &self.step },
|
||||
.output_file_debug = if (options.strip != .none and options.extract_to_separate_file) std.Build.GeneratedFile{ .step = &self.step } else null,
|
||||
.output_file = std.Build.GeneratedFile{ .step = &objcopy.step },
|
||||
.output_file_debug = if (options.strip != .none and options.extract_to_separate_file) std.Build.GeneratedFile{ .step = &objcopy.step } else null,
|
||||
.format = options.format,
|
||||
.only_sections = options.only_sections,
|
||||
.pad_to = options.pad_to,
|
||||
.strip = options.strip,
|
||||
.compress_debug = options.compress_debug,
|
||||
};
|
||||
input_file.addStepDependencies(&self.step);
|
||||
return self;
|
||||
input_file.addStepDependencies(&objcopy.step);
|
||||
return objcopy;
|
||||
}
|
||||
|
||||
/// deprecated: use getOutput
|
||||
pub const getOutputSource = getOutput;
|
||||
|
||||
pub fn getOutput(self: *const ObjCopy) std.Build.LazyPath {
|
||||
return .{ .generated = &self.output_file };
|
||||
pub fn getOutput(objcopy: *const ObjCopy) std.Build.LazyPath {
|
||||
return .{ .generated = &objcopy.output_file };
|
||||
}
|
||||
pub fn getOutputSeparatedDebug(self: *const ObjCopy) ?std.Build.LazyPath {
|
||||
return if (self.output_file_debug) |*file| .{ .generated = file } else null;
|
||||
pub fn getOutputSeparatedDebug(objcopy: *const ObjCopy) ?std.Build.LazyPath {
|
||||
return if (objcopy.output_file_debug) |*file| .{ .generated = file } else null;
|
||||
}
|
||||
|
||||
fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
const b = step.owner;
|
||||
const self: *ObjCopy = @fieldParentPtr("step", step);
|
||||
const objcopy: *ObjCopy = @fieldParentPtr("step", step);
|
||||
|
||||
var man = b.graph.cache.obtain();
|
||||
defer man.deinit();
|
||||
|
|
@ -101,24 +101,24 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
|||
// bytes when ObjCopy implementation is modified incompatibly.
|
||||
man.hash.add(@as(u32, 0xe18b7baf));
|
||||
|
||||
const full_src_path = self.input_file.getPath(b);
|
||||
const full_src_path = objcopy.input_file.getPath2(b, step);
|
||||
_ = try man.addFile(full_src_path, null);
|
||||
man.hash.addOptionalListOfBytes(self.only_sections);
|
||||
man.hash.addOptional(self.pad_to);
|
||||
man.hash.addOptional(self.format);
|
||||
man.hash.add(self.compress_debug);
|
||||
man.hash.add(self.strip);
|
||||
man.hash.add(self.output_file_debug != null);
|
||||
man.hash.addOptionalListOfBytes(objcopy.only_sections);
|
||||
man.hash.addOptional(objcopy.pad_to);
|
||||
man.hash.addOptional(objcopy.format);
|
||||
man.hash.add(objcopy.compress_debug);
|
||||
man.hash.add(objcopy.strip);
|
||||
man.hash.add(objcopy.output_file_debug != null);
|
||||
|
||||
if (try step.cacheHit(&man)) {
|
||||
// Cache hit, skip subprocess execution.
|
||||
const digest = man.final();
|
||||
self.output_file.path = try b.cache_root.join(b.allocator, &.{
|
||||
"o", &digest, self.basename,
|
||||
objcopy.output_file.path = try b.cache_root.join(b.allocator, &.{
|
||||
"o", &digest, objcopy.basename,
|
||||
});
|
||||
if (self.output_file_debug) |*file| {
|
||||
if (objcopy.output_file_debug) |*file| {
|
||||
file.path = try b.cache_root.join(b.allocator, &.{
|
||||
"o", &digest, b.fmt("{s}.debug", .{self.basename}),
|
||||
"o", &digest, b.fmt("{s}.debug", .{objcopy.basename}),
|
||||
});
|
||||
}
|
||||
return;
|
||||
|
|
@ -126,8 +126,8 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
|||
|
||||
const digest = man.final();
|
||||
const cache_path = "o" ++ fs.path.sep_str ++ digest;
|
||||
const full_dest_path = try b.cache_root.join(b.allocator, &.{ cache_path, self.basename });
|
||||
const full_dest_path_debug = try b.cache_root.join(b.allocator, &.{ cache_path, b.fmt("{s}.debug", .{self.basename}) });
|
||||
const full_dest_path = try b.cache_root.join(b.allocator, &.{ cache_path, objcopy.basename });
|
||||
const full_dest_path_debug = try b.cache_root.join(b.allocator, &.{ cache_path, b.fmt("{s}.debug", .{objcopy.basename}) });
|
||||
b.cache_root.handle.makePath(cache_path) catch |err| {
|
||||
return step.fail("unable to make path {s}: {s}", .{ cache_path, @errorName(err) });
|
||||
};
|
||||
|
|
@ -135,28 +135,28 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
|||
var argv = std.ArrayList([]const u8).init(b.allocator);
|
||||
try argv.appendSlice(&.{ b.graph.zig_exe, "objcopy" });
|
||||
|
||||
if (self.only_sections) |only_sections| {
|
||||
if (objcopy.only_sections) |only_sections| {
|
||||
for (only_sections) |only_section| {
|
||||
try argv.appendSlice(&.{ "-j", only_section });
|
||||
}
|
||||
}
|
||||
switch (self.strip) {
|
||||
switch (objcopy.strip) {
|
||||
.none => {},
|
||||
.debug => try argv.appendSlice(&.{"--strip-debug"}),
|
||||
.debug_and_symbols => try argv.appendSlice(&.{"--strip-all"}),
|
||||
}
|
||||
if (self.pad_to) |pad_to| {
|
||||
if (objcopy.pad_to) |pad_to| {
|
||||
try argv.appendSlice(&.{ "--pad-to", b.fmt("{d}", .{pad_to}) });
|
||||
}
|
||||
if (self.format) |format| switch (format) {
|
||||
if (objcopy.format) |format| switch (format) {
|
||||
.bin => try argv.appendSlice(&.{ "-O", "binary" }),
|
||||
.hex => try argv.appendSlice(&.{ "-O", "hex" }),
|
||||
.elf => try argv.appendSlice(&.{ "-O", "elf" }),
|
||||
};
|
||||
if (self.compress_debug) {
|
||||
if (objcopy.compress_debug) {
|
||||
try argv.appendSlice(&.{"--compress-debug-sections"});
|
||||
}
|
||||
if (self.output_file_debug != null) {
|
||||
if (objcopy.output_file_debug != null) {
|
||||
try argv.appendSlice(&.{b.fmt("--extract-to={s}", .{full_dest_path_debug})});
|
||||
}
|
||||
|
||||
|
|
@ -165,7 +165,7 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
|||
try argv.append("--listen=-");
|
||||
_ = try step.evalZigProcess(argv.items, prog_node);
|
||||
|
||||
self.output_file.path = full_dest_path;
|
||||
if (self.output_file_debug) |*file| file.path = full_dest_path_debug;
|
||||
objcopy.output_file.path = full_dest_path;
|
||||
if (objcopy.output_file_debug) |*file| file.path = full_dest_path_debug;
|
||||
try man.writeManifest();
|
||||
}
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ const LazyPath = std.Build.LazyPath;
|
|||
|
||||
const Options = @This();
|
||||
|
||||
pub const base_id = .options;
|
||||
pub const base_id: Step.Id = .options;
|
||||
|
||||
step: Step,
|
||||
generated_file: GeneratedFile,
|
||||
|
|
@ -17,8 +17,8 @@ args: std.ArrayList(Arg),
|
|||
encountered_types: std.StringHashMap(void),
|
||||
|
||||
pub fn create(owner: *std.Build) *Options {
|
||||
const self = owner.allocator.create(Options) catch @panic("OOM");
|
||||
self.* = .{
|
||||
const options = owner.allocator.create(Options) catch @panic("OOM");
|
||||
options.* = .{
|
||||
.step = Step.init(.{
|
||||
.id = base_id,
|
||||
.name = "options",
|
||||
|
|
@ -30,21 +30,21 @@ pub fn create(owner: *std.Build) *Options {
|
|||
.args = std.ArrayList(Arg).init(owner.allocator),
|
||||
.encountered_types = std.StringHashMap(void).init(owner.allocator),
|
||||
};
|
||||
self.generated_file = .{ .step = &self.step };
|
||||
options.generated_file = .{ .step = &options.step };
|
||||
|
||||
return self;
|
||||
return options;
|
||||
}
|
||||
|
||||
pub fn addOption(self: *Options, comptime T: type, name: []const u8, value: T) void {
|
||||
return addOptionFallible(self, T, name, value) catch @panic("unhandled error");
|
||||
pub fn addOption(options: *Options, comptime T: type, name: []const u8, value: T) void {
|
||||
return addOptionFallible(options, T, name, value) catch @panic("unhandled error");
|
||||
}
|
||||
|
||||
fn addOptionFallible(self: *Options, comptime T: type, name: []const u8, value: T) !void {
|
||||
const out = self.contents.writer();
|
||||
try printType(self, out, T, value, 0, name);
|
||||
fn addOptionFallible(options: *Options, comptime T: type, name: []const u8, value: T) !void {
|
||||
const out = options.contents.writer();
|
||||
try printType(options, out, T, value, 0, name);
|
||||
}
|
||||
|
||||
fn printType(self: *Options, out: anytype, comptime T: type, value: T, indent: u8, name: ?[]const u8) !void {
|
||||
fn printType(options: *Options, out: anytype, comptime T: type, value: T, indent: u8, name: ?[]const u8) !void {
|
||||
switch (T) {
|
||||
[]const []const u8 => {
|
||||
if (name) |payload| {
|
||||
|
|
@ -159,7 +159,7 @@ fn printType(self: *Options, out: anytype, comptime T: type, value: T, indent: u
|
|||
try out.print("{s} {{\n", .{@typeName(T)});
|
||||
for (value) |item| {
|
||||
try out.writeByteNTimes(' ', indent + 4);
|
||||
try printType(self, out, @TypeOf(item), item, indent + 4, null);
|
||||
try printType(options, out, @TypeOf(item), item, indent + 4, null);
|
||||
}
|
||||
try out.writeByteNTimes(' ', indent);
|
||||
try out.writeAll("}");
|
||||
|
|
@ -183,7 +183,7 @@ fn printType(self: *Options, out: anytype, comptime T: type, value: T, indent: u
|
|||
try out.print("&[_]{s} {{\n", .{@typeName(p.child)});
|
||||
for (value) |item| {
|
||||
try out.writeByteNTimes(' ', indent + 4);
|
||||
try printType(self, out, @TypeOf(item), item, indent + 4, null);
|
||||
try printType(options, out, @TypeOf(item), item, indent + 4, null);
|
||||
}
|
||||
try out.writeByteNTimes(' ', indent);
|
||||
try out.writeAll("}");
|
||||
|
|
@ -201,10 +201,10 @@ fn printType(self: *Options, out: anytype, comptime T: type, value: T, indent: u
|
|||
}
|
||||
|
||||
if (value) |inner| {
|
||||
try printType(self, out, @TypeOf(inner), inner, indent + 4, null);
|
||||
try printType(options, out, @TypeOf(inner), inner, indent + 4, null);
|
||||
// Pop the '\n' and ',' chars
|
||||
_ = self.contents.pop();
|
||||
_ = self.contents.pop();
|
||||
_ = options.contents.pop();
|
||||
_ = options.contents.pop();
|
||||
} else {
|
||||
try out.writeAll("null");
|
||||
}
|
||||
|
|
@ -231,7 +231,7 @@ fn printType(self: *Options, out: anytype, comptime T: type, value: T, indent: u
|
|||
return;
|
||||
},
|
||||
.Enum => |info| {
|
||||
try printEnum(self, out, T, info, indent);
|
||||
try printEnum(options, out, T, info, indent);
|
||||
|
||||
if (name) |some| {
|
||||
try out.print("pub const {}: {} = .{p_};\n", .{
|
||||
|
|
@ -243,14 +243,14 @@ fn printType(self: *Options, out: anytype, comptime T: type, value: T, indent: u
|
|||
return;
|
||||
},
|
||||
.Struct => |info| {
|
||||
try printStruct(self, out, T, info, indent);
|
||||
try printStruct(options, out, T, info, indent);
|
||||
|
||||
if (name) |some| {
|
||||
try out.print("pub const {}: {} = ", .{
|
||||
std.zig.fmtId(some),
|
||||
std.zig.fmtId(@typeName(T)),
|
||||
});
|
||||
try printStructValue(self, out, info, value, indent);
|
||||
try printStructValue(options, out, info, value, indent);
|
||||
}
|
||||
return;
|
||||
},
|
||||
|
|
@ -258,20 +258,20 @@ fn printType(self: *Options, out: anytype, comptime T: type, value: T, indent: u
|
|||
}
|
||||
}
|
||||
|
||||
fn printUserDefinedType(self: *Options, out: anytype, comptime T: type, indent: u8) !void {
|
||||
fn printUserDefinedType(options: *Options, out: anytype, comptime T: type, indent: u8) !void {
|
||||
switch (@typeInfo(T)) {
|
||||
.Enum => |info| {
|
||||
return try printEnum(self, out, T, info, indent);
|
||||
return try printEnum(options, out, T, info, indent);
|
||||
},
|
||||
.Struct => |info| {
|
||||
return try printStruct(self, out, T, info, indent);
|
||||
return try printStruct(options, out, T, info, indent);
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
|
||||
fn printEnum(self: *Options, out: anytype, comptime T: type, comptime val: std.builtin.Type.Enum, indent: u8) !void {
|
||||
const gop = try self.encountered_types.getOrPut(@typeName(T));
|
||||
fn printEnum(options: *Options, out: anytype, comptime T: type, comptime val: std.builtin.Type.Enum, indent: u8) !void {
|
||||
const gop = try options.encountered_types.getOrPut(@typeName(T));
|
||||
if (gop.found_existing) return;
|
||||
|
||||
try out.writeByteNTimes(' ', indent);
|
||||
|
|
@ -291,8 +291,8 @@ fn printEnum(self: *Options, out: anytype, comptime T: type, comptime val: std.b
|
|||
try out.writeAll("};\n");
|
||||
}
|
||||
|
||||
fn printStruct(self: *Options, out: anytype, comptime T: type, comptime val: std.builtin.Type.Struct, indent: u8) !void {
|
||||
const gop = try self.encountered_types.getOrPut(@typeName(T));
|
||||
fn printStruct(options: *Options, out: anytype, comptime T: type, comptime val: std.builtin.Type.Struct, indent: u8) !void {
|
||||
const gop = try options.encountered_types.getOrPut(@typeName(T));
|
||||
if (gop.found_existing) return;
|
||||
|
||||
try out.writeByteNTimes(' ', indent);
|
||||
|
|
@ -325,9 +325,9 @@ fn printStruct(self: *Options, out: anytype, comptime T: type, comptime val: std
|
|||
switch (@typeInfo(@TypeOf(default_value))) {
|
||||
.Enum => try out.print(".{s},\n", .{@tagName(default_value)}),
|
||||
.Struct => |info| {
|
||||
try printStructValue(self, out, info, default_value, indent + 4);
|
||||
try printStructValue(options, out, info, default_value, indent + 4);
|
||||
},
|
||||
else => try printType(self, out, @TypeOf(default_value), default_value, indent, null),
|
||||
else => try printType(options, out, @TypeOf(default_value), default_value, indent, null),
|
||||
}
|
||||
} else {
|
||||
try out.writeAll(",\n");
|
||||
|
|
@ -340,17 +340,17 @@ fn printStruct(self: *Options, out: anytype, comptime T: type, comptime val: std
|
|||
try out.writeAll("};\n");
|
||||
|
||||
inline for (val.fields) |field| {
|
||||
try printUserDefinedType(self, out, field.type, 0);
|
||||
try printUserDefinedType(options, out, field.type, 0);
|
||||
}
|
||||
}
|
||||
|
||||
fn printStructValue(self: *Options, out: anytype, comptime struct_val: std.builtin.Type.Struct, val: anytype, indent: u8) !void {
|
||||
fn printStructValue(options: *Options, out: anytype, comptime struct_val: std.builtin.Type.Struct, val: anytype, indent: u8) !void {
|
||||
try out.writeAll(".{\n");
|
||||
|
||||
if (struct_val.is_tuple) {
|
||||
inline for (struct_val.fields) |field| {
|
||||
try out.writeByteNTimes(' ', indent);
|
||||
try printType(self, out, @TypeOf(@field(val, field.name)), @field(val, field.name), indent, null);
|
||||
try printType(options, out, @TypeOf(@field(val, field.name)), @field(val, field.name), indent, null);
|
||||
}
|
||||
} else {
|
||||
inline for (struct_val.fields) |field| {
|
||||
|
|
@ -361,9 +361,9 @@ fn printStructValue(self: *Options, out: anytype, comptime struct_val: std.built
|
|||
switch (@typeInfo(@TypeOf(field_name))) {
|
||||
.Enum => try out.print(".{s},\n", .{@tagName(field_name)}),
|
||||
.Struct => |struct_info| {
|
||||
try printStructValue(self, out, struct_info, field_name, indent + 4);
|
||||
try printStructValue(options, out, struct_info, field_name, indent + 4);
|
||||
},
|
||||
else => try printType(self, out, @TypeOf(field_name), field_name, indent, null),
|
||||
else => try printType(options, out, @TypeOf(field_name), field_name, indent, null),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -379,25 +379,25 @@ fn printStructValue(self: *Options, out: anytype, comptime struct_val: std.built
|
|||
/// The value is the path in the cache dir.
|
||||
/// Adds a dependency automatically.
|
||||
pub fn addOptionPath(
|
||||
self: *Options,
|
||||
options: *Options,
|
||||
name: []const u8,
|
||||
path: LazyPath,
|
||||
) void {
|
||||
self.args.append(.{
|
||||
.name = self.step.owner.dupe(name),
|
||||
.path = path.dupe(self.step.owner),
|
||||
options.args.append(.{
|
||||
.name = options.step.owner.dupe(name),
|
||||
.path = path.dupe(options.step.owner),
|
||||
}) catch @panic("OOM");
|
||||
path.addStepDependencies(&self.step);
|
||||
path.addStepDependencies(&options.step);
|
||||
}
|
||||
|
||||
/// Deprecated: use `addOptionPath(options, name, artifact.getEmittedBin())` instead.
|
||||
pub fn addOptionArtifact(self: *Options, name: []const u8, artifact: *Step.Compile) void {
|
||||
return addOptionPath(self, name, artifact.getEmittedBin());
|
||||
pub fn addOptionArtifact(options: *Options, name: []const u8, artifact: *Step.Compile) void {
|
||||
return addOptionPath(options, name, artifact.getEmittedBin());
|
||||
}
|
||||
|
||||
pub fn createModule(self: *Options) *std.Build.Module {
|
||||
return self.step.owner.createModule(.{
|
||||
.root_source_file = self.getOutput(),
|
||||
pub fn createModule(options: *Options) *std.Build.Module {
|
||||
return options.step.owner.createModule(.{
|
||||
.root_source_file = options.getOutput(),
|
||||
});
|
||||
}
|
||||
|
||||
|
|
@ -406,8 +406,8 @@ pub const getSource = getOutput;
|
|||
|
||||
/// Returns the main artifact of this Build Step which is a Zig source file
|
||||
/// generated from the key-value pairs of the Options.
|
||||
pub fn getOutput(self: *Options) LazyPath {
|
||||
return .{ .generated = &self.generated_file };
|
||||
pub fn getOutput(options: *Options) LazyPath {
|
||||
return .{ .generated = &options.generated_file };
|
||||
}
|
||||
|
||||
fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
|
|
@ -415,13 +415,13 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
|||
_ = prog_node;
|
||||
|
||||
const b = step.owner;
|
||||
const self: *Options = @fieldParentPtr("step", step);
|
||||
const options: *Options = @fieldParentPtr("step", step);
|
||||
|
||||
for (self.args.items) |item| {
|
||||
self.addOption(
|
||||
for (options.args.items) |item| {
|
||||
options.addOption(
|
||||
[]const u8,
|
||||
item.name,
|
||||
item.path.getPath(b),
|
||||
item.path.getPath2(b, step),
|
||||
);
|
||||
}
|
||||
|
||||
|
|
@ -432,10 +432,10 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
|||
// Random bytes to make unique. Refresh this with new random bytes when
|
||||
// implementation is modified in a non-backwards-compatible way.
|
||||
hash.add(@as(u32, 0xad95e922));
|
||||
hash.addBytes(self.contents.items);
|
||||
hash.addBytes(options.contents.items);
|
||||
const sub_path = "c" ++ fs.path.sep_str ++ hash.final() ++ fs.path.sep_str ++ basename;
|
||||
|
||||
self.generated_file.path = try b.cache_root.join(b.allocator, &.{sub_path});
|
||||
options.generated_file.path = try b.cache_root.join(b.allocator, &.{sub_path});
|
||||
|
||||
// Optimize for the hot path. Stat the file, and if it already exists,
|
||||
// cache hit.
|
||||
|
|
@ -464,7 +464,7 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
|||
});
|
||||
};
|
||||
|
||||
b.cache_root.handle.writeFile(.{ .sub_path = tmp_sub_path, .data = self.contents.items }) catch |err| {
|
||||
b.cache_root.handle.writeFile(.{ .sub_path = tmp_sub_path, .data = options.contents.items }) catch |err| {
|
||||
return step.fail("unable to write options to '{}{s}': {s}", .{
|
||||
b.cache_root, tmp_sub_path, @errorName(err),
|
||||
});
|
||||
|
|
|
|||
|
|
@ -3,23 +3,23 @@ const fs = std.fs;
|
|||
const Step = std.Build.Step;
|
||||
const RemoveDir = @This();
|
||||
|
||||
pub const base_id = .remove_dir;
|
||||
pub const base_id: Step.Id = .remove_dir;
|
||||
|
||||
step: Step,
|
||||
dir_path: []const u8,
|
||||
|
||||
pub fn create(owner: *std.Build, dir_path: []const u8) *RemoveDir {
|
||||
const self = owner.allocator.create(RemoveDir) catch @panic("OOM");
|
||||
self.* = .{
|
||||
const remove_dir = owner.allocator.create(RemoveDir) catch @panic("OOM");
|
||||
remove_dir.* = .{
|
||||
.step = Step.init(.{
|
||||
.id = .remove_dir,
|
||||
.id = base_id,
|
||||
.name = owner.fmt("RemoveDir {s}", .{dir_path}),
|
||||
.owner = owner,
|
||||
.makeFn = make,
|
||||
}),
|
||||
.dir_path = owner.dupePath(dir_path),
|
||||
};
|
||||
return self;
|
||||
return remove_dir;
|
||||
}
|
||||
|
||||
fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
|
|
@ -28,16 +28,16 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
|||
_ = prog_node;
|
||||
|
||||
const b = step.owner;
|
||||
const self: *RemoveDir = @fieldParentPtr("step", step);
|
||||
const remove_dir: *RemoveDir = @fieldParentPtr("step", step);
|
||||
|
||||
b.build_root.handle.deleteTree(self.dir_path) catch |err| {
|
||||
b.build_root.handle.deleteTree(remove_dir.dir_path) catch |err| {
|
||||
if (b.build_root.path) |base| {
|
||||
return step.fail("unable to recursively delete path '{s}/{s}': {s}", .{
|
||||
base, self.dir_path, @errorName(err),
|
||||
base, remove_dir.dir_path, @errorName(err),
|
||||
});
|
||||
} else {
|
||||
return step.fail("unable to recursively delete path '{s}': {s}", .{
|
||||
self.dir_path, @errorName(err),
|
||||
remove_dir.dir_path, @errorName(err),
|
||||
});
|
||||
}
|
||||
};
|
||||
|
|
|
|||
|
|
@ -140,8 +140,8 @@ pub const Output = struct {
|
|||
};
|
||||
|
||||
pub fn create(owner: *std.Build, name: []const u8) *Run {
|
||||
const self = owner.allocator.create(Run) catch @panic("OOM");
|
||||
self.* = .{
|
||||
const run = owner.allocator.create(Run) catch @panic("OOM");
|
||||
run.* = .{
|
||||
.step = Step.init(.{
|
||||
.id = base_id,
|
||||
.name = name,
|
||||
|
|
@ -164,24 +164,24 @@ pub fn create(owner: *std.Build, name: []const u8) *Run {
|
|||
.dep_output_file = null,
|
||||
.has_side_effects = false,
|
||||
};
|
||||
return self;
|
||||
return run;
|
||||
}
|
||||
|
||||
pub fn setName(self: *Run, name: []const u8) void {
|
||||
self.step.name = name;
|
||||
self.rename_step_with_output_arg = false;
|
||||
pub fn setName(run: *Run, name: []const u8) void {
|
||||
run.step.name = name;
|
||||
run.rename_step_with_output_arg = false;
|
||||
}
|
||||
|
||||
pub fn enableTestRunnerMode(self: *Run) void {
|
||||
self.stdio = .zig_test;
|
||||
self.addArgs(&.{"--listen=-"});
|
||||
pub fn enableTestRunnerMode(run: *Run) void {
|
||||
run.stdio = .zig_test;
|
||||
run.addArgs(&.{"--listen=-"});
|
||||
}
|
||||
|
||||
pub fn addArtifactArg(self: *Run, artifact: *Step.Compile) void {
|
||||
const b = self.step.owner;
|
||||
pub fn addArtifactArg(run: *Run, artifact: *Step.Compile) void {
|
||||
const b = run.step.owner;
|
||||
const bin_file = artifact.getEmittedBin();
|
||||
bin_file.addStepDependencies(&self.step);
|
||||
self.argv.append(b.allocator, Arg{ .artifact = artifact }) catch @panic("OOM");
|
||||
bin_file.addStepDependencies(&run.step);
|
||||
run.argv.append(b.allocator, Arg{ .artifact = artifact }) catch @panic("OOM");
|
||||
}
|
||||
|
||||
/// Provides a file path as a command line argument to the command being run.
|
||||
|
|
@ -192,8 +192,8 @@ pub fn addArtifactArg(self: *Run, artifact: *Step.Compile) void {
|
|||
/// Related:
|
||||
/// * `addPrefixedOutputFileArg` - same thing but prepends a string to the argument
|
||||
/// * `addFileArg` - for input files given to the child process
|
||||
pub fn addOutputFileArg(self: *Run, basename: []const u8) std.Build.LazyPath {
|
||||
return self.addPrefixedOutputFileArg("", basename);
|
||||
pub fn addOutputFileArg(run: *Run, basename: []const u8) std.Build.LazyPath {
|
||||
return run.addPrefixedOutputFileArg("", basename);
|
||||
}
|
||||
|
||||
/// Provides a file path as a command line argument to the command being run.
|
||||
|
|
@ -212,23 +212,23 @@ pub fn addOutputFileArg(self: *Run, basename: []const u8) std.Build.LazyPath {
|
|||
/// * `addOutputFileArg` - same thing but without the prefix
|
||||
/// * `addFileArg` - for input files given to the child process
|
||||
pub fn addPrefixedOutputFileArg(
|
||||
self: *Run,
|
||||
run: *Run,
|
||||
prefix: []const u8,
|
||||
basename: []const u8,
|
||||
) std.Build.LazyPath {
|
||||
const b = self.step.owner;
|
||||
const b = run.step.owner;
|
||||
if (basename.len == 0) @panic("basename must not be empty");
|
||||
|
||||
const output = b.allocator.create(Output) catch @panic("OOM");
|
||||
output.* = .{
|
||||
.prefix = b.dupe(prefix),
|
||||
.basename = b.dupe(basename),
|
||||
.generated_file = .{ .step = &self.step },
|
||||
.generated_file = .{ .step = &run.step },
|
||||
};
|
||||
self.argv.append(b.allocator, .{ .output = output }) catch @panic("OOM");
|
||||
run.argv.append(b.allocator, .{ .output = output }) catch @panic("OOM");
|
||||
|
||||
if (self.rename_step_with_output_arg) {
|
||||
self.setName(b.fmt("{s} ({s})", .{ self.step.name, basename }));
|
||||
if (run.rename_step_with_output_arg) {
|
||||
run.setName(b.fmt("{s} ({s})", .{ run.step.name, basename }));
|
||||
}
|
||||
|
||||
return .{ .generated = &output.generated_file };
|
||||
|
|
@ -243,8 +243,8 @@ pub fn addPrefixedOutputFileArg(
|
|||
/// Related:
|
||||
/// * `addPrefixedFileArg` - same thing but prepends a string to the argument
|
||||
/// * `addOutputFileArg` - for files generated by the child process
|
||||
pub fn addFileArg(self: *Run, lp: std.Build.LazyPath) void {
|
||||
self.addPrefixedFileArg("", lp);
|
||||
pub fn addFileArg(run: *Run, lp: std.Build.LazyPath) void {
|
||||
run.addPrefixedFileArg("", lp);
|
||||
}
|
||||
|
||||
/// Appends an input file to the command line arguments prepended with a string.
|
||||
|
|
@ -259,100 +259,98 @@ pub fn addFileArg(self: *Run, lp: std.Build.LazyPath) void {
|
|||
/// Related:
|
||||
/// * `addFileArg` - same thing but without the prefix
|
||||
/// * `addOutputFileArg` - for files generated by the child process
|
||||
pub fn addPrefixedFileArg(self: *Run, prefix: []const u8, lp: std.Build.LazyPath) void {
|
||||
const b = self.step.owner;
|
||||
pub fn addPrefixedFileArg(run: *Run, prefix: []const u8, lp: std.Build.LazyPath) void {
|
||||
const b = run.step.owner;
|
||||
|
||||
const prefixed_file_source: PrefixedLazyPath = .{
|
||||
.prefix = b.dupe(prefix),
|
||||
.lazy_path = lp.dupe(b),
|
||||
};
|
||||
self.argv.append(b.allocator, .{ .lazy_path = prefixed_file_source }) catch @panic("OOM");
|
||||
lp.addStepDependencies(&self.step);
|
||||
run.argv.append(b.allocator, .{ .lazy_path = prefixed_file_source }) catch @panic("OOM");
|
||||
lp.addStepDependencies(&run.step);
|
||||
}
|
||||
|
||||
/// deprecated: use `addDirectoryArg`
|
||||
pub const addDirectorySourceArg = addDirectoryArg;
|
||||
|
||||
pub fn addDirectoryArg(self: *Run, directory_source: std.Build.LazyPath) void {
|
||||
self.addPrefixedDirectoryArg("", directory_source);
|
||||
pub fn addDirectoryArg(run: *Run, directory_source: std.Build.LazyPath) void {
|
||||
run.addPrefixedDirectoryArg("", directory_source);
|
||||
}
|
||||
|
||||
// deprecated: use `addPrefixedDirectoryArg`
|
||||
pub const addPrefixedDirectorySourceArg = addPrefixedDirectoryArg;
|
||||
|
||||
pub fn addPrefixedDirectoryArg(self: *Run, prefix: []const u8, directory_source: std.Build.LazyPath) void {
|
||||
const b = self.step.owner;
|
||||
pub fn addPrefixedDirectoryArg(run: *Run, prefix: []const u8, directory_source: std.Build.LazyPath) void {
|
||||
const b = run.step.owner;
|
||||
|
||||
const prefixed_directory_source: PrefixedLazyPath = .{
|
||||
.prefix = b.dupe(prefix),
|
||||
.lazy_path = directory_source.dupe(b),
|
||||
};
|
||||
self.argv.append(b.allocator, .{ .directory_source = prefixed_directory_source }) catch @panic("OOM");
|
||||
directory_source.addStepDependencies(&self.step);
|
||||
run.argv.append(b.allocator, .{ .directory_source = prefixed_directory_source }) catch @panic("OOM");
|
||||
directory_source.addStepDependencies(&run.step);
|
||||
}
|
||||
|
||||
/// Add a path argument to a dep file (.d) for the child process to write its
|
||||
/// discovered additional dependencies.
|
||||
/// Only one dep file argument is allowed by instance.
|
||||
pub fn addDepFileOutputArg(self: *Run, basename: []const u8) std.Build.LazyPath {
|
||||
return self.addPrefixedDepFileOutputArg("", basename);
|
||||
pub fn addDepFileOutputArg(run: *Run, basename: []const u8) std.Build.LazyPath {
|
||||
return run.addPrefixedDepFileOutputArg("", basename);
|
||||
}
|
||||
|
||||
/// Add a prefixed path argument to a dep file (.d) for the child process to
|
||||
/// write its discovered additional dependencies.
|
||||
/// Only one dep file argument is allowed by instance.
|
||||
pub fn addPrefixedDepFileOutputArg(self: *Run, prefix: []const u8, basename: []const u8) std.Build.LazyPath {
|
||||
const b = self.step.owner;
|
||||
assert(self.dep_output_file == null);
|
||||
pub fn addPrefixedDepFileOutputArg(run: *Run, prefix: []const u8, basename: []const u8) std.Build.LazyPath {
|
||||
const b = run.step.owner;
|
||||
assert(run.dep_output_file == null);
|
||||
|
||||
const dep_file = b.allocator.create(Output) catch @panic("OOM");
|
||||
dep_file.* = .{
|
||||
.prefix = b.dupe(prefix),
|
||||
.basename = b.dupe(basename),
|
||||
.generated_file = .{ .step = &self.step },
|
||||
.generated_file = .{ .step = &run.step },
|
||||
};
|
||||
|
||||
self.dep_output_file = dep_file;
|
||||
run.dep_output_file = dep_file;
|
||||
|
||||
self.argv.append(b.allocator, .{ .output = dep_file }) catch @panic("OOM");
|
||||
run.argv.append(b.allocator, .{ .output = dep_file }) catch @panic("OOM");
|
||||
|
||||
return .{ .generated = &dep_file.generated_file };
|
||||
}
|
||||
|
||||
pub fn addArg(self: *Run, arg: []const u8) void {
|
||||
const b = self.step.owner;
|
||||
self.argv.append(b.allocator, .{ .bytes = self.step.owner.dupe(arg) }) catch @panic("OOM");
|
||||
pub fn addArg(run: *Run, arg: []const u8) void {
|
||||
const b = run.step.owner;
|
||||
run.argv.append(b.allocator, .{ .bytes = b.dupe(arg) }) catch @panic("OOM");
|
||||
}
|
||||
|
||||
pub fn addArgs(self: *Run, args: []const []const u8) void {
|
||||
for (args) |arg| {
|
||||
self.addArg(arg);
|
||||
}
|
||||
pub fn addArgs(run: *Run, args: []const []const u8) void {
|
||||
for (args) |arg| run.addArg(arg);
|
||||
}
|
||||
|
||||
pub fn setStdIn(self: *Run, stdin: StdIn) void {
|
||||
pub fn setStdIn(run: *Run, stdin: StdIn) void {
|
||||
switch (stdin) {
|
||||
.lazy_path => |lazy_path| lazy_path.addStepDependencies(&self.step),
|
||||
.lazy_path => |lazy_path| lazy_path.addStepDependencies(&run.step),
|
||||
.bytes, .none => {},
|
||||
}
|
||||
self.stdin = stdin;
|
||||
run.stdin = stdin;
|
||||
}
|
||||
|
||||
pub fn setCwd(self: *Run, cwd: Build.LazyPath) void {
|
||||
cwd.addStepDependencies(&self.step);
|
||||
self.cwd = cwd;
|
||||
pub fn setCwd(run: *Run, cwd: Build.LazyPath) void {
|
||||
cwd.addStepDependencies(&run.step);
|
||||
run.cwd = cwd.dupe(run.step.owner);
|
||||
}
|
||||
|
||||
pub fn clearEnvironment(self: *Run) void {
|
||||
const b = self.step.owner;
|
||||
pub fn clearEnvironment(run: *Run) void {
|
||||
const b = run.step.owner;
|
||||
const new_env_map = b.allocator.create(EnvMap) catch @panic("OOM");
|
||||
new_env_map.* = EnvMap.init(b.allocator);
|
||||
self.env_map = new_env_map;
|
||||
run.env_map = new_env_map;
|
||||
}
|
||||
|
||||
pub fn addPathDir(self: *Run, search_path: []const u8) void {
|
||||
const b = self.step.owner;
|
||||
const env_map = getEnvMapInternal(self);
|
||||
pub fn addPathDir(run: *Run, search_path: []const u8) void {
|
||||
const b = run.step.owner;
|
||||
const env_map = getEnvMapInternal(run);
|
||||
|
||||
const key = "PATH";
|
||||
const prev_path = env_map.get(key);
|
||||
|
|
@ -365,99 +363,99 @@ pub fn addPathDir(self: *Run, search_path: []const u8) void {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn getEnvMap(self: *Run) *EnvMap {
|
||||
return getEnvMapInternal(self);
|
||||
pub fn getEnvMap(run: *Run) *EnvMap {
|
||||
return getEnvMapInternal(run);
|
||||
}
|
||||
|
||||
fn getEnvMapInternal(self: *Run) *EnvMap {
|
||||
const arena = self.step.owner.allocator;
|
||||
return self.env_map orelse {
|
||||
fn getEnvMapInternal(run: *Run) *EnvMap {
|
||||
const arena = run.step.owner.allocator;
|
||||
return run.env_map orelse {
|
||||
const env_map = arena.create(EnvMap) catch @panic("OOM");
|
||||
env_map.* = process.getEnvMap(arena) catch @panic("unhandled error");
|
||||
self.env_map = env_map;
|
||||
run.env_map = env_map;
|
||||
return env_map;
|
||||
};
|
||||
}
|
||||
|
||||
pub fn setEnvironmentVariable(self: *Run, key: []const u8, value: []const u8) void {
|
||||
const b = self.step.owner;
|
||||
const env_map = self.getEnvMap();
|
||||
pub fn setEnvironmentVariable(run: *Run, key: []const u8, value: []const u8) void {
|
||||
const b = run.step.owner;
|
||||
const env_map = run.getEnvMap();
|
||||
env_map.put(b.dupe(key), b.dupe(value)) catch @panic("unhandled error");
|
||||
}
|
||||
|
||||
pub fn removeEnvironmentVariable(self: *Run, key: []const u8) void {
|
||||
self.getEnvMap().remove(key);
|
||||
pub fn removeEnvironmentVariable(run: *Run, key: []const u8) void {
|
||||
run.getEnvMap().remove(key);
|
||||
}
|
||||
|
||||
/// Adds a check for exact stderr match. Does not add any other checks.
|
||||
pub fn expectStdErrEqual(self: *Run, bytes: []const u8) void {
|
||||
const new_check: StdIo.Check = .{ .expect_stderr_exact = self.step.owner.dupe(bytes) };
|
||||
self.addCheck(new_check);
|
||||
pub fn expectStdErrEqual(run: *Run, bytes: []const u8) void {
|
||||
const new_check: StdIo.Check = .{ .expect_stderr_exact = run.step.owner.dupe(bytes) };
|
||||
run.addCheck(new_check);
|
||||
}
|
||||
|
||||
/// Adds a check for exact stdout match as well as a check for exit code 0, if
|
||||
/// there is not already an expected termination check.
|
||||
pub fn expectStdOutEqual(self: *Run, bytes: []const u8) void {
|
||||
const new_check: StdIo.Check = .{ .expect_stdout_exact = self.step.owner.dupe(bytes) };
|
||||
self.addCheck(new_check);
|
||||
if (!self.hasTermCheck()) {
|
||||
self.expectExitCode(0);
|
||||
pub fn expectStdOutEqual(run: *Run, bytes: []const u8) void {
|
||||
const new_check: StdIo.Check = .{ .expect_stdout_exact = run.step.owner.dupe(bytes) };
|
||||
run.addCheck(new_check);
|
||||
if (!run.hasTermCheck()) {
|
||||
run.expectExitCode(0);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expectExitCode(self: *Run, code: u8) void {
|
||||
pub fn expectExitCode(run: *Run, code: u8) void {
|
||||
const new_check: StdIo.Check = .{ .expect_term = .{ .Exited = code } };
|
||||
self.addCheck(new_check);
|
||||
run.addCheck(new_check);
|
||||
}
|
||||
|
||||
pub fn hasTermCheck(self: Run) bool {
|
||||
for (self.stdio.check.items) |check| switch (check) {
|
||||
pub fn hasTermCheck(run: Run) bool {
|
||||
for (run.stdio.check.items) |check| switch (check) {
|
||||
.expect_term => return true,
|
||||
else => continue,
|
||||
};
|
||||
return false;
|
||||
}
|
||||
|
||||
pub fn addCheck(self: *Run, new_check: StdIo.Check) void {
|
||||
const b = self.step.owner;
|
||||
pub fn addCheck(run: *Run, new_check: StdIo.Check) void {
|
||||
const b = run.step.owner;
|
||||
|
||||
switch (self.stdio) {
|
||||
switch (run.stdio) {
|
||||
.infer_from_args => {
|
||||
self.stdio = .{ .check = .{} };
|
||||
self.stdio.check.append(b.allocator, new_check) catch @panic("OOM");
|
||||
run.stdio = .{ .check = .{} };
|
||||
run.stdio.check.append(b.allocator, new_check) catch @panic("OOM");
|
||||
},
|
||||
.check => |*checks| checks.append(b.allocator, new_check) catch @panic("OOM"),
|
||||
else => @panic("illegal call to addCheck: conflicting helper method calls. Suggest to directly set stdio field of Run instead"),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn captureStdErr(self: *Run) std.Build.LazyPath {
|
||||
assert(self.stdio != .inherit);
|
||||
pub fn captureStdErr(run: *Run) std.Build.LazyPath {
|
||||
assert(run.stdio != .inherit);
|
||||
|
||||
if (self.captured_stderr) |output| return .{ .generated = &output.generated_file };
|
||||
if (run.captured_stderr) |output| return .{ .generated = &output.generated_file };
|
||||
|
||||
const output = self.step.owner.allocator.create(Output) catch @panic("OOM");
|
||||
const output = run.step.owner.allocator.create(Output) catch @panic("OOM");
|
||||
output.* = .{
|
||||
.prefix = "",
|
||||
.basename = "stderr",
|
||||
.generated_file = .{ .step = &self.step },
|
||||
.generated_file = .{ .step = &run.step },
|
||||
};
|
||||
self.captured_stderr = output;
|
||||
run.captured_stderr = output;
|
||||
return .{ .generated = &output.generated_file };
|
||||
}
|
||||
|
||||
pub fn captureStdOut(self: *Run) std.Build.LazyPath {
|
||||
assert(self.stdio != .inherit);
|
||||
pub fn captureStdOut(run: *Run) std.Build.LazyPath {
|
||||
assert(run.stdio != .inherit);
|
||||
|
||||
if (self.captured_stdout) |output| return .{ .generated = &output.generated_file };
|
||||
if (run.captured_stdout) |output| return .{ .generated = &output.generated_file };
|
||||
|
||||
const output = self.step.owner.allocator.create(Output) catch @panic("OOM");
|
||||
const output = run.step.owner.allocator.create(Output) catch @panic("OOM");
|
||||
output.* = .{
|
||||
.prefix = "",
|
||||
.basename = "stdout",
|
||||
.generated_file = .{ .step = &self.step },
|
||||
.generated_file = .{ .step = &run.step },
|
||||
};
|
||||
self.captured_stdout = output;
|
||||
run.captured_stdout = output;
|
||||
return .{ .generated = &output.generated_file };
|
||||
}
|
||||
|
||||
|
|
@ -472,20 +470,20 @@ pub fn addFileInput(self: *Run, file_input: std.Build.LazyPath) void {
|
|||
}
|
||||
|
||||
/// Returns whether the Run step has side effects *other than* updating the output arguments.
|
||||
fn hasSideEffects(self: Run) bool {
|
||||
if (self.has_side_effects) return true;
|
||||
return switch (self.stdio) {
|
||||
.infer_from_args => !self.hasAnyOutputArgs(),
|
||||
fn hasSideEffects(run: Run) bool {
|
||||
if (run.has_side_effects) return true;
|
||||
return switch (run.stdio) {
|
||||
.infer_from_args => !run.hasAnyOutputArgs(),
|
||||
.inherit => true,
|
||||
.check => false,
|
||||
.zig_test => false,
|
||||
};
|
||||
}
|
||||
|
||||
fn hasAnyOutputArgs(self: Run) bool {
|
||||
if (self.captured_stdout != null) return true;
|
||||
if (self.captured_stderr != null) return true;
|
||||
for (self.argv.items) |arg| switch (arg) {
|
||||
fn hasAnyOutputArgs(run: Run) bool {
|
||||
if (run.captured_stdout != null) return true;
|
||||
if (run.captured_stderr != null) return true;
|
||||
for (run.argv.items) |arg| switch (arg) {
|
||||
.output => return true,
|
||||
else => continue,
|
||||
};
|
||||
|
|
@ -527,8 +525,8 @@ const IndexedOutput = struct {
|
|||
fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
const b = step.owner;
|
||||
const arena = b.allocator;
|
||||
const self: *Run = @fieldParentPtr("step", step);
|
||||
const has_side_effects = self.hasSideEffects();
|
||||
const run: *Run = @fieldParentPtr("step", step);
|
||||
const has_side_effects = run.hasSideEffects();
|
||||
|
||||
var argv_list = std.ArrayList([]const u8).init(arena);
|
||||
var output_placeholders = std.ArrayList(IndexedOutput).init(arena);
|
||||
|
|
@ -536,20 +534,20 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
|||
var man = b.graph.cache.obtain();
|
||||
defer man.deinit();
|
||||
|
||||
for (self.argv.items) |arg| {
|
||||
for (run.argv.items) |arg| {
|
||||
switch (arg) {
|
||||
.bytes => |bytes| {
|
||||
try argv_list.append(bytes);
|
||||
man.hash.addBytes(bytes);
|
||||
},
|
||||
.lazy_path => |file| {
|
||||
const file_path = file.lazy_path.getPath(b);
|
||||
const file_path = file.lazy_path.getPath2(b, step);
|
||||
try argv_list.append(b.fmt("{s}{s}", .{ file.prefix, file_path }));
|
||||
man.hash.addBytes(file.prefix);
|
||||
_ = try man.addFile(file_path, null);
|
||||
},
|
||||
.directory_source => |file| {
|
||||
const file_path = file.lazy_path.getPath(b);
|
||||
const file_path = file.lazy_path.getPath2(b, step);
|
||||
try argv_list.append(b.fmt("{s}{s}", .{ file.prefix, file_path }));
|
||||
man.hash.addBytes(file.prefix);
|
||||
man.hash.addBytes(file_path);
|
||||
|
|
@ -557,7 +555,7 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
|||
.artifact => |artifact| {
|
||||
if (artifact.rootModuleTarget().os.tag == .windows) {
|
||||
// On Windows we don't have rpaths so we have to add .dll search paths to PATH
|
||||
self.addPathForDynLibs(artifact);
|
||||
run.addPathForDynLibs(artifact);
|
||||
}
|
||||
const file_path = artifact.installed_path orelse artifact.generated_bin.?.path.?; // the path is guaranteed to be set
|
||||
|
||||
|
|
@ -580,36 +578,36 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
|||
}
|
||||
}
|
||||
|
||||
switch (self.stdin) {
|
||||
switch (run.stdin) {
|
||||
.bytes => |bytes| {
|
||||
man.hash.addBytes(bytes);
|
||||
},
|
||||
.lazy_path => |lazy_path| {
|
||||
const file_path = lazy_path.getPath(b);
|
||||
const file_path = lazy_path.getPath2(b, step);
|
||||
_ = try man.addFile(file_path, null);
|
||||
},
|
||||
.none => {},
|
||||
}
|
||||
|
||||
if (self.captured_stdout) |output| {
|
||||
if (run.captured_stdout) |output| {
|
||||
man.hash.addBytes(output.basename);
|
||||
}
|
||||
|
||||
if (self.captured_stderr) |output| {
|
||||
if (run.captured_stderr) |output| {
|
||||
man.hash.addBytes(output.basename);
|
||||
}
|
||||
|
||||
hashStdIo(&man.hash, self.stdio);
|
||||
hashStdIo(&man.hash, run.stdio);
|
||||
|
||||
if (has_side_effects) {
|
||||
try runCommand(self, argv_list.items, has_side_effects, null, prog_node);
|
||||
try runCommand(run, argv_list.items, has_side_effects, null, prog_node);
|
||||
return;
|
||||
}
|
||||
|
||||
for (self.extra_file_dependencies) |file_path| {
|
||||
for (run.extra_file_dependencies) |file_path| {
|
||||
_ = try man.addFile(b.pathFromRoot(file_path), null);
|
||||
}
|
||||
for (self.file_inputs.items) |lazy_path| {
|
||||
for (run.file_inputs.items) |lazy_path| {
|
||||
_ = try man.addFile(lazy_path.getPath2(b, step), null);
|
||||
}
|
||||
|
||||
|
|
@ -620,8 +618,8 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
|||
try populateGeneratedPaths(
|
||||
arena,
|
||||
output_placeholders.items,
|
||||
self.captured_stdout,
|
||||
self.captured_stderr,
|
||||
run.captured_stdout,
|
||||
run.captured_stderr,
|
||||
b.cache_root,
|
||||
&digest,
|
||||
);
|
||||
|
|
@ -635,7 +633,7 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
|||
|
||||
for (output_placeholders.items) |placeholder| {
|
||||
const output_components = .{ tmp_dir_path, placeholder.output.basename };
|
||||
const output_sub_path = try fs.path.join(arena, &output_components);
|
||||
const output_sub_path = b.pathJoin(&output_components);
|
||||
const output_sub_dir_path = fs.path.dirname(output_sub_path).?;
|
||||
b.cache_root.handle.makePath(output_sub_dir_path) catch |err| {
|
||||
return step.fail("unable to make path '{}{s}': {s}", .{
|
||||
|
|
@ -651,15 +649,15 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
|||
argv_list.items[placeholder.index] = cli_arg;
|
||||
}
|
||||
|
||||
try runCommand(self, argv_list.items, has_side_effects, tmp_dir_path, prog_node);
|
||||
try runCommand(run, argv_list.items, has_side_effects, tmp_dir_path, prog_node);
|
||||
|
||||
if (self.dep_output_file) |dep_output_file|
|
||||
if (run.dep_output_file) |dep_output_file|
|
||||
try man.addDepFilePost(std.fs.cwd(), dep_output_file.generated_file.getPath());
|
||||
|
||||
const digest = man.final();
|
||||
|
||||
const any_output = output_placeholders.items.len > 0 or
|
||||
self.captured_stdout != null or self.captured_stderr != null;
|
||||
run.captured_stdout != null or run.captured_stderr != null;
|
||||
|
||||
// Rename into place
|
||||
if (any_output) {
|
||||
|
|
@ -696,8 +694,8 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
|||
try populateGeneratedPaths(
|
||||
arena,
|
||||
output_placeholders.items,
|
||||
self.captured_stdout,
|
||||
self.captured_stderr,
|
||||
run.captured_stdout,
|
||||
run.captured_stderr,
|
||||
b.cache_root,
|
||||
&digest,
|
||||
);
|
||||
|
|
@ -776,30 +774,30 @@ fn termMatches(expected: ?std.process.Child.Term, actual: std.process.Child.Term
|
|||
}
|
||||
|
||||
fn runCommand(
|
||||
self: *Run,
|
||||
run: *Run,
|
||||
argv: []const []const u8,
|
||||
has_side_effects: bool,
|
||||
tmp_dir_path: ?[]const u8,
|
||||
prog_node: *std.Progress.Node,
|
||||
) !void {
|
||||
const step = &self.step;
|
||||
const step = &run.step;
|
||||
const b = step.owner;
|
||||
const arena = b.allocator;
|
||||
|
||||
const cwd: ?[]const u8 = if (self.cwd) |lazy_cwd| lazy_cwd.getPath(b) else null;
|
||||
const cwd: ?[]const u8 = if (run.cwd) |lazy_cwd| lazy_cwd.getPath2(b, step) else null;
|
||||
|
||||
try step.handleChildProcUnsupported(cwd, argv);
|
||||
try Step.handleVerbose2(step.owner, cwd, self.env_map, argv);
|
||||
try Step.handleVerbose2(step.owner, cwd, run.env_map, argv);
|
||||
|
||||
const allow_skip = switch (self.stdio) {
|
||||
.check, .zig_test => self.skip_foreign_checks,
|
||||
const allow_skip = switch (run.stdio) {
|
||||
.check, .zig_test => run.skip_foreign_checks,
|
||||
else => false,
|
||||
};
|
||||
|
||||
var interp_argv = std.ArrayList([]const u8).init(b.allocator);
|
||||
defer interp_argv.deinit();
|
||||
|
||||
const result = spawnChildAndCollect(self, argv, has_side_effects, prog_node) catch |err| term: {
|
||||
const result = spawnChildAndCollect(run, argv, has_side_effects, prog_node) catch |err| term: {
|
||||
// InvalidExe: cpu arch mismatch
|
||||
// FileNotFound: can happen with a wrong dynamic linker path
|
||||
if (err == error.InvalidExe or err == error.FileNotFound) interpret: {
|
||||
|
|
@ -807,7 +805,7 @@ fn runCommand(
|
|||
// relying on it being a Compile step. This will make this logic
|
||||
// work even for the edge case that the binary was produced by a
|
||||
// third party.
|
||||
const exe = switch (self.argv.items[0]) {
|
||||
const exe = switch (run.argv.items[0]) {
|
||||
.artifact => |exe| exe,
|
||||
else => break :interpret,
|
||||
};
|
||||
|
|
@ -832,14 +830,14 @@ fn runCommand(
|
|||
try interp_argv.append(bin_name);
|
||||
try interp_argv.appendSlice(argv);
|
||||
} else {
|
||||
return failForeign(self, "-fwine", argv[0], exe);
|
||||
return failForeign(run, "-fwine", argv[0], exe);
|
||||
}
|
||||
},
|
||||
.qemu => |bin_name| {
|
||||
if (b.enable_qemu) {
|
||||
const glibc_dir_arg = if (need_cross_glibc)
|
||||
b.glibc_runtimes_dir orelse
|
||||
return failForeign(self, "--glibc-runtimes", argv[0], exe)
|
||||
return failForeign(run, "--glibc-runtimes", argv[0], exe)
|
||||
else
|
||||
null;
|
||||
|
||||
|
|
@ -867,7 +865,7 @@ fn runCommand(
|
|||
|
||||
try interp_argv.appendSlice(argv);
|
||||
} else {
|
||||
return failForeign(self, "-fqemu", argv[0], exe);
|
||||
return failForeign(run, "-fqemu", argv[0], exe);
|
||||
}
|
||||
},
|
||||
.darling => |bin_name| {
|
||||
|
|
@ -875,7 +873,7 @@ fn runCommand(
|
|||
try interp_argv.append(bin_name);
|
||||
try interp_argv.appendSlice(argv);
|
||||
} else {
|
||||
return failForeign(self, "-fdarling", argv[0], exe);
|
||||
return failForeign(run, "-fdarling", argv[0], exe);
|
||||
}
|
||||
},
|
||||
.wasmtime => |bin_name| {
|
||||
|
|
@ -886,7 +884,7 @@ fn runCommand(
|
|||
try interp_argv.append("--");
|
||||
try interp_argv.appendSlice(argv[1..]);
|
||||
} else {
|
||||
return failForeign(self, "-fwasmtime", argv[0], exe);
|
||||
return failForeign(run, "-fwasmtime", argv[0], exe);
|
||||
}
|
||||
},
|
||||
.bad_dl => |foreign_dl| {
|
||||
|
|
@ -915,13 +913,13 @@ fn runCommand(
|
|||
|
||||
if (exe.rootModuleTarget().os.tag == .windows) {
|
||||
// On Windows we don't have rpaths so we have to add .dll search paths to PATH
|
||||
self.addPathForDynLibs(exe);
|
||||
run.addPathForDynLibs(exe);
|
||||
}
|
||||
|
||||
try Step.handleVerbose2(step.owner, cwd, self.env_map, interp_argv.items);
|
||||
try Step.handleVerbose2(step.owner, cwd, run.env_map, interp_argv.items);
|
||||
|
||||
break :term spawnChildAndCollect(self, interp_argv.items, has_side_effects, prog_node) catch |e| {
|
||||
if (!self.failing_to_execute_foreign_is_an_error) return error.MakeSkipped;
|
||||
break :term spawnChildAndCollect(run, interp_argv.items, has_side_effects, prog_node) catch |e| {
|
||||
if (!run.failing_to_execute_foreign_is_an_error) return error.MakeSkipped;
|
||||
|
||||
return step.fail("unable to spawn interpreter {s}: {s}", .{
|
||||
interp_argv.items[0], @errorName(e),
|
||||
|
|
@ -943,11 +941,11 @@ fn runCommand(
|
|||
};
|
||||
for ([_]Stream{
|
||||
.{
|
||||
.captured = self.captured_stdout,
|
||||
.captured = run.captured_stdout,
|
||||
.bytes = result.stdio.stdout,
|
||||
},
|
||||
.{
|
||||
.captured = self.captured_stderr,
|
||||
.captured = run.captured_stderr,
|
||||
.bytes = result.stdio.stderr,
|
||||
},
|
||||
}) |stream| {
|
||||
|
|
@ -956,7 +954,7 @@ fn runCommand(
|
|||
const output_path = try b.cache_root.join(arena, &output_components);
|
||||
output.generated_file.path = output_path;
|
||||
|
||||
const sub_path = try fs.path.join(arena, &output_components);
|
||||
const sub_path = b.pathJoin(&output_components);
|
||||
const sub_path_dirname = fs.path.dirname(sub_path).?;
|
||||
b.cache_root.handle.makePath(sub_path_dirname) catch |err| {
|
||||
return step.fail("unable to make path '{}{s}': {s}", .{
|
||||
|
|
@ -973,7 +971,7 @@ fn runCommand(
|
|||
|
||||
const final_argv = if (interp_argv.items.len == 0) argv else interp_argv.items;
|
||||
|
||||
switch (self.stdio) {
|
||||
switch (run.stdio) {
|
||||
.check => |checks| for (checks.items) |check| switch (check) {
|
||||
.expect_stderr_exact => |expected_bytes| {
|
||||
if (!mem.eql(u8, expected_bytes, result.stdio.stderr.?)) {
|
||||
|
|
@ -1094,56 +1092,56 @@ const ChildProcResult = struct {
|
|||
};
|
||||
|
||||
fn spawnChildAndCollect(
|
||||
self: *Run,
|
||||
run: *Run,
|
||||
argv: []const []const u8,
|
||||
has_side_effects: bool,
|
||||
prog_node: *std.Progress.Node,
|
||||
) !ChildProcResult {
|
||||
const b = self.step.owner;
|
||||
const b = run.step.owner;
|
||||
const arena = b.allocator;
|
||||
|
||||
var child = std.process.Child.init(argv, arena);
|
||||
if (self.cwd) |lazy_cwd| {
|
||||
child.cwd = lazy_cwd.getPath(b);
|
||||
if (run.cwd) |lazy_cwd| {
|
||||
child.cwd = lazy_cwd.getPath2(b, &run.step);
|
||||
} else {
|
||||
child.cwd = b.build_root.path;
|
||||
child.cwd_dir = b.build_root.handle;
|
||||
}
|
||||
child.env_map = self.env_map orelse &b.graph.env_map;
|
||||
child.env_map = run.env_map orelse &b.graph.env_map;
|
||||
child.request_resource_usage_statistics = true;
|
||||
|
||||
child.stdin_behavior = switch (self.stdio) {
|
||||
child.stdin_behavior = switch (run.stdio) {
|
||||
.infer_from_args => if (has_side_effects) .Inherit else .Ignore,
|
||||
.inherit => .Inherit,
|
||||
.check => .Ignore,
|
||||
.zig_test => .Pipe,
|
||||
};
|
||||
child.stdout_behavior = switch (self.stdio) {
|
||||
child.stdout_behavior = switch (run.stdio) {
|
||||
.infer_from_args => if (has_side_effects) .Inherit else .Ignore,
|
||||
.inherit => .Inherit,
|
||||
.check => |checks| if (checksContainStdout(checks.items)) .Pipe else .Ignore,
|
||||
.zig_test => .Pipe,
|
||||
};
|
||||
child.stderr_behavior = switch (self.stdio) {
|
||||
child.stderr_behavior = switch (run.stdio) {
|
||||
.infer_from_args => if (has_side_effects) .Inherit else .Pipe,
|
||||
.inherit => .Inherit,
|
||||
.check => .Pipe,
|
||||
.zig_test => .Pipe,
|
||||
};
|
||||
if (self.captured_stdout != null) child.stdout_behavior = .Pipe;
|
||||
if (self.captured_stderr != null) child.stderr_behavior = .Pipe;
|
||||
if (self.stdin != .none) {
|
||||
assert(self.stdio != .inherit);
|
||||
if (run.captured_stdout != null) child.stdout_behavior = .Pipe;
|
||||
if (run.captured_stderr != null) child.stderr_behavior = .Pipe;
|
||||
if (run.stdin != .none) {
|
||||
assert(run.stdio != .inherit);
|
||||
child.stdin_behavior = .Pipe;
|
||||
}
|
||||
|
||||
try child.spawn();
|
||||
var timer = try std.time.Timer.start();
|
||||
|
||||
const result = if (self.stdio == .zig_test)
|
||||
evalZigTest(self, &child, prog_node)
|
||||
const result = if (run.stdio == .zig_test)
|
||||
evalZigTest(run, &child, prog_node)
|
||||
else
|
||||
evalGeneric(self, &child);
|
||||
evalGeneric(run, &child);
|
||||
|
||||
const term = try child.wait();
|
||||
const elapsed_ns = timer.read();
|
||||
|
|
@ -1164,12 +1162,12 @@ const StdIoResult = struct {
|
|||
};
|
||||
|
||||
fn evalZigTest(
|
||||
self: *Run,
|
||||
run: *Run,
|
||||
child: *std.process.Child,
|
||||
prog_node: *std.Progress.Node,
|
||||
) !StdIoResult {
|
||||
const gpa = self.step.owner.allocator;
|
||||
const arena = self.step.owner.allocator;
|
||||
const gpa = run.step.owner.allocator;
|
||||
const arena = run.step.owner.allocator;
|
||||
|
||||
var poller = std.io.poll(gpa, enum { stdout, stderr }, .{
|
||||
.stdout = child.stdout.?,
|
||||
|
|
@ -1208,7 +1206,7 @@ fn evalZigTest(
|
|||
switch (header.tag) {
|
||||
.zig_version => {
|
||||
if (!std.mem.eql(u8, builtin.zig_version_string, body)) {
|
||||
return self.step.fail(
|
||||
return run.step.fail(
|
||||
"zig version mismatch build runner vs compiler: '{s}' vs '{s}'",
|
||||
.{ builtin.zig_version_string, body },
|
||||
);
|
||||
|
|
@ -1266,9 +1264,9 @@ fn evalZigTest(
|
|||
else
|
||||
unreachable;
|
||||
if (msg.len > 0) {
|
||||
try self.step.addError("'{s}' {s}: {s}", .{ name, label, msg });
|
||||
try run.step.addError("'{s}' {s}: {s}", .{ name, label, msg });
|
||||
} else {
|
||||
try self.step.addError("'{s}' {s}", .{ name, label });
|
||||
try run.step.addError("'{s}' {s}", .{ name, label });
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -1282,7 +1280,7 @@ fn evalZigTest(
|
|||
|
||||
if (stderr.readableLength() > 0) {
|
||||
const msg = std.mem.trim(u8, try stderr.toOwnedSlice(), "\n");
|
||||
if (msg.len > 0) self.step.result_stderr = msg;
|
||||
if (msg.len > 0) run.step.result_stderr = msg;
|
||||
}
|
||||
|
||||
// Send EOF to stdin.
|
||||
|
|
@ -1350,25 +1348,26 @@ fn sendRunTestMessage(file: std.fs.File, index: u32) !void {
|
|||
try file.writeAll(full_msg);
|
||||
}
|
||||
|
||||
fn evalGeneric(self: *Run, child: *std.process.Child) !StdIoResult {
|
||||
const arena = self.step.owner.allocator;
|
||||
fn evalGeneric(run: *Run, child: *std.process.Child) !StdIoResult {
|
||||
const b = run.step.owner;
|
||||
const arena = b.allocator;
|
||||
|
||||
switch (self.stdin) {
|
||||
switch (run.stdin) {
|
||||
.bytes => |bytes| {
|
||||
child.stdin.?.writeAll(bytes) catch |err| {
|
||||
return self.step.fail("unable to write stdin: {s}", .{@errorName(err)});
|
||||
return run.step.fail("unable to write stdin: {s}", .{@errorName(err)});
|
||||
};
|
||||
child.stdin.?.close();
|
||||
child.stdin = null;
|
||||
},
|
||||
.lazy_path => |lazy_path| {
|
||||
const path = lazy_path.getPath(self.step.owner);
|
||||
const file = self.step.owner.build_root.handle.openFile(path, .{}) catch |err| {
|
||||
return self.step.fail("unable to open stdin file: {s}", .{@errorName(err)});
|
||||
const path = lazy_path.getPath2(b, &run.step);
|
||||
const file = b.build_root.handle.openFile(path, .{}) catch |err| {
|
||||
return run.step.fail("unable to open stdin file: {s}", .{@errorName(err)});
|
||||
};
|
||||
defer file.close();
|
||||
child.stdin.?.writeFileAll(file, .{}) catch |err| {
|
||||
return self.step.fail("unable to write file to stdin: {s}", .{@errorName(err)});
|
||||
return run.step.fail("unable to write file to stdin: {s}", .{@errorName(err)});
|
||||
};
|
||||
child.stdin.?.close();
|
||||
child.stdin = null;
|
||||
|
|
@ -1388,29 +1387,29 @@ fn evalGeneric(self: *Run, child: *std.process.Child) !StdIoResult {
|
|||
defer poller.deinit();
|
||||
|
||||
while (try poller.poll()) {
|
||||
if (poller.fifo(.stdout).count > self.max_stdio_size)
|
||||
if (poller.fifo(.stdout).count > run.max_stdio_size)
|
||||
return error.StdoutStreamTooLong;
|
||||
if (poller.fifo(.stderr).count > self.max_stdio_size)
|
||||
if (poller.fifo(.stderr).count > run.max_stdio_size)
|
||||
return error.StderrStreamTooLong;
|
||||
}
|
||||
|
||||
stdout_bytes = try poller.fifo(.stdout).toOwnedSlice();
|
||||
stderr_bytes = try poller.fifo(.stderr).toOwnedSlice();
|
||||
} else {
|
||||
stdout_bytes = try stdout.reader().readAllAlloc(arena, self.max_stdio_size);
|
||||
stdout_bytes = try stdout.reader().readAllAlloc(arena, run.max_stdio_size);
|
||||
}
|
||||
} else if (child.stderr) |stderr| {
|
||||
stderr_bytes = try stderr.reader().readAllAlloc(arena, self.max_stdio_size);
|
||||
stderr_bytes = try stderr.reader().readAllAlloc(arena, run.max_stdio_size);
|
||||
}
|
||||
|
||||
if (stderr_bytes) |bytes| if (bytes.len > 0) {
|
||||
// Treat stderr as an error message.
|
||||
const stderr_is_diagnostic = self.captured_stderr == null and switch (self.stdio) {
|
||||
const stderr_is_diagnostic = run.captured_stderr == null and switch (run.stdio) {
|
||||
.check => |checks| !checksContainStderr(checks.items),
|
||||
else => true,
|
||||
};
|
||||
if (stderr_is_diagnostic) {
|
||||
self.step.result_stderr = bytes;
|
||||
run.step.result_stderr = bytes;
|
||||
}
|
||||
};
|
||||
|
||||
|
|
@ -1422,8 +1421,8 @@ fn evalGeneric(self: *Run, child: *std.process.Child) !StdIoResult {
|
|||
};
|
||||
}
|
||||
|
||||
fn addPathForDynLibs(self: *Run, artifact: *Step.Compile) void {
|
||||
const b = self.step.owner;
|
||||
fn addPathForDynLibs(run: *Run, artifact: *Step.Compile) void {
|
||||
const b = run.step.owner;
|
||||
var it = artifact.root_module.iterateDependencies(artifact, true);
|
||||
while (it.next()) |item| {
|
||||
const other = item.compile.?;
|
||||
|
|
@ -1431,34 +1430,34 @@ fn addPathForDynLibs(self: *Run, artifact: *Step.Compile) void {
|
|||
if (item.module.resolved_target.?.result.os.tag == .windows and
|
||||
other.isDynamicLibrary())
|
||||
{
|
||||
addPathDir(self, fs.path.dirname(other.getEmittedBin().getPath(b)).?);
|
||||
addPathDir(run, fs.path.dirname(other.getEmittedBin().getPath2(b, &run.step)).?);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn failForeign(
|
||||
self: *Run,
|
||||
run: *Run,
|
||||
suggested_flag: []const u8,
|
||||
argv0: []const u8,
|
||||
exe: *Step.Compile,
|
||||
) error{ MakeFailed, MakeSkipped, OutOfMemory } {
|
||||
switch (self.stdio) {
|
||||
switch (run.stdio) {
|
||||
.check, .zig_test => {
|
||||
if (self.skip_foreign_checks)
|
||||
if (run.skip_foreign_checks)
|
||||
return error.MakeSkipped;
|
||||
|
||||
const b = self.step.owner;
|
||||
const b = run.step.owner;
|
||||
const host_name = try b.host.result.zigTriple(b.allocator);
|
||||
const foreign_name = try exe.rootModuleTarget().zigTriple(b.allocator);
|
||||
|
||||
return self.step.fail(
|
||||
return run.step.fail(
|
||||
\\unable to spawn foreign binary '{s}' ({s}) on host system ({s})
|
||||
\\ consider using {s} or enabling skip_foreign_checks in the Run step
|
||||
, .{ argv0, foreign_name, host_name, suggested_flag });
|
||||
},
|
||||
else => {
|
||||
return self.step.fail("unable to spawn foreign binary '{s}'", .{argv0});
|
||||
return run.step.fail("unable to spawn foreign binary '{s}'", .{argv0});
|
||||
},
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ const mem = std.mem;
|
|||
|
||||
const TranslateC = @This();
|
||||
|
||||
pub const base_id = .translate_c;
|
||||
pub const base_id: Step.Id = .translate_c;
|
||||
|
||||
step: Step,
|
||||
source: std.Build.LazyPath,
|
||||
|
|
@ -27,11 +27,11 @@ pub const Options = struct {
|
|||
};
|
||||
|
||||
pub fn create(owner: *std.Build, options: Options) *TranslateC {
|
||||
const self = owner.allocator.create(TranslateC) catch @panic("OOM");
|
||||
const translate_c = owner.allocator.create(TranslateC) catch @panic("OOM");
|
||||
const source = options.root_source_file.dupe(owner);
|
||||
self.* = TranslateC{
|
||||
translate_c.* = TranslateC{
|
||||
.step = Step.init(.{
|
||||
.id = .translate_c,
|
||||
.id = base_id,
|
||||
.name = "translate-c",
|
||||
.owner = owner,
|
||||
.makeFn = make,
|
||||
|
|
@ -42,12 +42,12 @@ pub fn create(owner: *std.Build, options: Options) *TranslateC {
|
|||
.out_basename = undefined,
|
||||
.target = options.target,
|
||||
.optimize = options.optimize,
|
||||
.output_file = std.Build.GeneratedFile{ .step = &self.step },
|
||||
.output_file = std.Build.GeneratedFile{ .step = &translate_c.step },
|
||||
.link_libc = options.link_libc,
|
||||
.use_clang = options.use_clang,
|
||||
};
|
||||
source.addStepDependencies(&self.step);
|
||||
return self;
|
||||
source.addStepDependencies(&translate_c.step);
|
||||
return translate_c;
|
||||
}
|
||||
|
||||
pub const AddExecutableOptions = struct {
|
||||
|
|
@ -58,18 +58,18 @@ pub const AddExecutableOptions = struct {
|
|||
linkage: ?std.builtin.LinkMode = null,
|
||||
};
|
||||
|
||||
pub fn getOutput(self: *TranslateC) std.Build.LazyPath {
|
||||
return .{ .generated = &self.output_file };
|
||||
pub fn getOutput(translate_c: *TranslateC) std.Build.LazyPath {
|
||||
return .{ .generated = &translate_c.output_file };
|
||||
}
|
||||
|
||||
/// Creates a step to build an executable from the translated source.
|
||||
pub fn addExecutable(self: *TranslateC, options: AddExecutableOptions) *Step.Compile {
|
||||
return self.step.owner.addExecutable(.{
|
||||
.root_source_file = self.getOutput(),
|
||||
pub fn addExecutable(translate_c: *TranslateC, options: AddExecutableOptions) *Step.Compile {
|
||||
return translate_c.step.owner.addExecutable(.{
|
||||
.root_source_file = translate_c.getOutput(),
|
||||
.name = options.name orelse "translated_c",
|
||||
.version = options.version,
|
||||
.target = options.target orelse self.target,
|
||||
.optimize = options.optimize orelse self.optimize,
|
||||
.target = options.target orelse translate_c.target,
|
||||
.optimize = options.optimize orelse translate_c.optimize,
|
||||
.linkage = options.linkage,
|
||||
});
|
||||
}
|
||||
|
|
@ -77,90 +77,87 @@ pub fn addExecutable(self: *TranslateC, options: AddExecutableOptions) *Step.Com
|
|||
/// Creates a module from the translated source and adds it to the package's
|
||||
/// module set making it available to other packages which depend on this one.
|
||||
/// `createModule` can be used instead to create a private module.
|
||||
pub fn addModule(self: *TranslateC, name: []const u8) *std.Build.Module {
|
||||
return self.step.owner.addModule(name, .{
|
||||
.root_source_file = self.getOutput(),
|
||||
pub fn addModule(translate_c: *TranslateC, name: []const u8) *std.Build.Module {
|
||||
return translate_c.step.owner.addModule(name, .{
|
||||
.root_source_file = translate_c.getOutput(),
|
||||
});
|
||||
}
|
||||
|
||||
/// Creates a private module from the translated source to be used by the
|
||||
/// current package, but not exposed to other packages depending on this one.
|
||||
/// `addModule` can be used instead to create a public module.
|
||||
pub fn createModule(self: *TranslateC) *std.Build.Module {
|
||||
return self.step.owner.createModule(.{
|
||||
.root_source_file = self.getOutput(),
|
||||
pub fn createModule(translate_c: *TranslateC) *std.Build.Module {
|
||||
return translate_c.step.owner.createModule(.{
|
||||
.root_source_file = translate_c.getOutput(),
|
||||
});
|
||||
}
|
||||
|
||||
pub fn addIncludeDir(self: *TranslateC, include_dir: []const u8) void {
|
||||
self.include_dirs.append(self.step.owner.dupePath(include_dir)) catch @panic("OOM");
|
||||
pub fn addIncludeDir(translate_c: *TranslateC, include_dir: []const u8) void {
|
||||
translate_c.include_dirs.append(translate_c.step.owner.dupePath(include_dir)) catch @panic("OOM");
|
||||
}
|
||||
|
||||
pub fn addCheckFile(self: *TranslateC, expected_matches: []const []const u8) *Step.CheckFile {
|
||||
pub fn addCheckFile(translate_c: *TranslateC, expected_matches: []const []const u8) *Step.CheckFile {
|
||||
return Step.CheckFile.create(
|
||||
self.step.owner,
|
||||
self.getOutput(),
|
||||
translate_c.step.owner,
|
||||
translate_c.getOutput(),
|
||||
.{ .expected_matches = expected_matches },
|
||||
);
|
||||
}
|
||||
|
||||
/// If the value is omitted, it is set to 1.
|
||||
/// `name` and `value` need not live longer than the function call.
|
||||
pub fn defineCMacro(self: *TranslateC, name: []const u8, value: ?[]const u8) void {
|
||||
const macro = std.Build.constructCMacro(self.step.owner.allocator, name, value);
|
||||
self.c_macros.append(macro) catch @panic("OOM");
|
||||
pub fn defineCMacro(translate_c: *TranslateC, name: []const u8, value: ?[]const u8) void {
|
||||
const macro = std.Build.constructranslate_cMacro(translate_c.step.owner.allocator, name, value);
|
||||
translate_c.c_macros.append(macro) catch @panic("OOM");
|
||||
}
|
||||
|
||||
/// name_and_value looks like [name]=[value]. If the value is omitted, it is set to 1.
|
||||
pub fn defineCMacroRaw(self: *TranslateC, name_and_value: []const u8) void {
|
||||
self.c_macros.append(self.step.owner.dupe(name_and_value)) catch @panic("OOM");
|
||||
pub fn defineCMacroRaw(translate_c: *TranslateC, name_and_value: []const u8) void {
|
||||
translate_c.c_macros.append(translate_c.step.owner.dupe(name_and_value)) catch @panic("OOM");
|
||||
}
|
||||
|
||||
fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
const b = step.owner;
|
||||
const self: *TranslateC = @fieldParentPtr("step", step);
|
||||
const translate_c: *TranslateC = @fieldParentPtr("step", step);
|
||||
|
||||
var argv_list = std.ArrayList([]const u8).init(b.allocator);
|
||||
try argv_list.append(b.graph.zig_exe);
|
||||
try argv_list.append("translate-c");
|
||||
if (self.link_libc) {
|
||||
if (translate_c.link_libc) {
|
||||
try argv_list.append("-lc");
|
||||
}
|
||||
if (!self.use_clang) {
|
||||
if (!translate_c.use_clang) {
|
||||
try argv_list.append("-fno-clang");
|
||||
}
|
||||
|
||||
try argv_list.append("--listen=-");
|
||||
|
||||
if (!self.target.query.isNative()) {
|
||||
if (!translate_c.target.query.isNative()) {
|
||||
try argv_list.append("-target");
|
||||
try argv_list.append(try self.target.query.zigTriple(b.allocator));
|
||||
try argv_list.append(try translate_c.target.query.zigTriple(b.allocator));
|
||||
}
|
||||
|
||||
switch (self.optimize) {
|
||||
switch (translate_c.optimize) {
|
||||
.Debug => {}, // Skip since it's the default.
|
||||
else => try argv_list.append(b.fmt("-O{s}", .{@tagName(self.optimize)})),
|
||||
else => try argv_list.append(b.fmt("-O{s}", .{@tagName(translate_c.optimize)})),
|
||||
}
|
||||
|
||||
for (self.include_dirs.items) |include_dir| {
|
||||
for (translate_c.include_dirs.items) |include_dir| {
|
||||
try argv_list.append("-I");
|
||||
try argv_list.append(include_dir);
|
||||
}
|
||||
|
||||
for (self.c_macros.items) |c_macro| {
|
||||
for (translate_c.c_macros.items) |c_macro| {
|
||||
try argv_list.append("-D");
|
||||
try argv_list.append(c_macro);
|
||||
}
|
||||
|
||||
try argv_list.append(self.source.getPath(b));
|
||||
try argv_list.append(translate_c.source.getPath2(b, step));
|
||||
|
||||
const output_path = try step.evalZigProcess(argv_list.items, prog_node);
|
||||
|
||||
self.out_basename = fs.path.basename(output_path.?);
|
||||
translate_c.out_basename = fs.path.basename(output_path.?);
|
||||
const output_dir = fs.path.dirname(output_path.?).?;
|
||||
|
||||
self.output_file.path = try fs.path.join(
|
||||
b.allocator,
|
||||
&[_][]const u8{ output_dir, self.out_basename },
|
||||
);
|
||||
translate_c.output_file.path = b.pathJoin(&.{ output_dir, translate_c.out_basename });
|
||||
}
|
||||
|
|
|
|||
|
|
@ -23,15 +23,15 @@ directories: std.ArrayListUnmanaged(*Directory),
|
|||
output_source_files: std.ArrayListUnmanaged(OutputSourceFile),
|
||||
generated_directory: std.Build.GeneratedFile,
|
||||
|
||||
pub const base_id = .write_file;
|
||||
pub const base_id: Step.Id = .write_file;
|
||||
|
||||
pub const File = struct {
|
||||
generated_file: std.Build.GeneratedFile,
|
||||
sub_path: []const u8,
|
||||
contents: Contents,
|
||||
|
||||
pub fn getPath(self: *File) std.Build.LazyPath {
|
||||
return .{ .generated = &self.generated_file };
|
||||
pub fn getPath(file: *File) std.Build.LazyPath {
|
||||
return .{ .generated = &file.generated_file };
|
||||
}
|
||||
};
|
||||
|
||||
|
|
@ -49,16 +49,16 @@ pub const Directory = struct {
|
|||
/// `exclude_extensions` takes precedence over `include_extensions`.
|
||||
include_extensions: ?[]const []const u8 = null,
|
||||
|
||||
pub fn dupe(self: Options, b: *std.Build) Options {
|
||||
pub fn dupe(opts: Options, b: *std.Build) Options {
|
||||
return .{
|
||||
.exclude_extensions = b.dupeStrings(self.exclude_extensions),
|
||||
.include_extensions = if (self.include_extensions) |incs| b.dupeStrings(incs) else null,
|
||||
.exclude_extensions = b.dupeStrings(opts.exclude_extensions),
|
||||
.include_extensions = if (opts.include_extensions) |incs| b.dupeStrings(incs) else null,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
pub fn getPath(self: *Directory) std.Build.LazyPath {
|
||||
return .{ .generated = &self.generated_dir };
|
||||
pub fn getPath(dir: *Directory) std.Build.LazyPath {
|
||||
return .{ .generated = &dir.generated_dir };
|
||||
}
|
||||
};
|
||||
|
||||
|
|
@ -73,10 +73,10 @@ pub const Contents = union(enum) {
|
|||
};
|
||||
|
||||
pub fn create(owner: *std.Build) *WriteFile {
|
||||
const wf = owner.allocator.create(WriteFile) catch @panic("OOM");
|
||||
wf.* = .{
|
||||
const write_file = owner.allocator.create(WriteFile) catch @panic("OOM");
|
||||
write_file.* = .{
|
||||
.step = Step.init(.{
|
||||
.id = .write_file,
|
||||
.id = base_id,
|
||||
.name = "WriteFile",
|
||||
.owner = owner,
|
||||
.makeFn = make,
|
||||
|
|
@ -84,22 +84,22 @@ pub fn create(owner: *std.Build) *WriteFile {
|
|||
.files = .{},
|
||||
.directories = .{},
|
||||
.output_source_files = .{},
|
||||
.generated_directory = .{ .step = &wf.step },
|
||||
.generated_directory = .{ .step = &write_file.step },
|
||||
};
|
||||
return wf;
|
||||
return write_file;
|
||||
}
|
||||
|
||||
pub fn add(wf: *WriteFile, sub_path: []const u8, bytes: []const u8) std.Build.LazyPath {
|
||||
const b = wf.step.owner;
|
||||
pub fn add(write_file: *WriteFile, sub_path: []const u8, bytes: []const u8) std.Build.LazyPath {
|
||||
const b = write_file.step.owner;
|
||||
const gpa = b.allocator;
|
||||
const file = gpa.create(File) catch @panic("OOM");
|
||||
file.* = .{
|
||||
.generated_file = .{ .step = &wf.step },
|
||||
.generated_file = .{ .step = &write_file.step },
|
||||
.sub_path = b.dupePath(sub_path),
|
||||
.contents = .{ .bytes = b.dupe(bytes) },
|
||||
};
|
||||
wf.files.append(gpa, file) catch @panic("OOM");
|
||||
wf.maybeUpdateName();
|
||||
write_file.files.append(gpa, file) catch @panic("OOM");
|
||||
write_file.maybeUpdateName();
|
||||
return file.getPath();
|
||||
}
|
||||
|
||||
|
|
@ -110,19 +110,19 @@ pub fn add(wf: *WriteFile, sub_path: []const u8, bytes: []const u8) std.Build.La
|
|||
/// include sub-directories, in which case this step will ensure the
|
||||
/// required sub-path exists.
|
||||
/// This is the option expected to be used most commonly with `addCopyFile`.
|
||||
pub fn addCopyFile(wf: *WriteFile, source: std.Build.LazyPath, sub_path: []const u8) std.Build.LazyPath {
|
||||
const b = wf.step.owner;
|
||||
pub fn addCopyFile(write_file: *WriteFile, source: std.Build.LazyPath, sub_path: []const u8) std.Build.LazyPath {
|
||||
const b = write_file.step.owner;
|
||||
const gpa = b.allocator;
|
||||
const file = gpa.create(File) catch @panic("OOM");
|
||||
file.* = .{
|
||||
.generated_file = .{ .step = &wf.step },
|
||||
.generated_file = .{ .step = &write_file.step },
|
||||
.sub_path = b.dupePath(sub_path),
|
||||
.contents = .{ .copy = source },
|
||||
};
|
||||
wf.files.append(gpa, file) catch @panic("OOM");
|
||||
write_file.files.append(gpa, file) catch @panic("OOM");
|
||||
|
||||
wf.maybeUpdateName();
|
||||
source.addStepDependencies(&wf.step);
|
||||
write_file.maybeUpdateName();
|
||||
source.addStepDependencies(&write_file.step);
|
||||
return file.getPath();
|
||||
}
|
||||
|
||||
|
|
@ -130,24 +130,24 @@ pub fn addCopyFile(wf: *WriteFile, source: std.Build.LazyPath, sub_path: []const
|
|||
/// relative to this step's generated directory.
|
||||
/// The returned value is a lazy path to the generated subdirectory.
|
||||
pub fn addCopyDirectory(
|
||||
wf: *WriteFile,
|
||||
write_file: *WriteFile,
|
||||
source: std.Build.LazyPath,
|
||||
sub_path: []const u8,
|
||||
options: Directory.Options,
|
||||
) std.Build.LazyPath {
|
||||
const b = wf.step.owner;
|
||||
const b = write_file.step.owner;
|
||||
const gpa = b.allocator;
|
||||
const dir = gpa.create(Directory) catch @panic("OOM");
|
||||
dir.* = .{
|
||||
.source = source.dupe(b),
|
||||
.sub_path = b.dupePath(sub_path),
|
||||
.options = options.dupe(b),
|
||||
.generated_dir = .{ .step = &wf.step },
|
||||
.generated_dir = .{ .step = &write_file.step },
|
||||
};
|
||||
wf.directories.append(gpa, dir) catch @panic("OOM");
|
||||
write_file.directories.append(gpa, dir) catch @panic("OOM");
|
||||
|
||||
wf.maybeUpdateName();
|
||||
source.addStepDependencies(&wf.step);
|
||||
write_file.maybeUpdateName();
|
||||
source.addStepDependencies(&write_file.step);
|
||||
return dir.getPath();
|
||||
}
|
||||
|
||||
|
|
@ -156,13 +156,13 @@ pub fn addCopyDirectory(
|
|||
/// used as part of the normal build process, but as a utility occasionally
|
||||
/// run by a developer with intent to modify source files and then commit
|
||||
/// those changes to version control.
|
||||
pub fn addCopyFileToSource(wf: *WriteFile, source: std.Build.LazyPath, sub_path: []const u8) void {
|
||||
const b = wf.step.owner;
|
||||
wf.output_source_files.append(b.allocator, .{
|
||||
pub fn addCopyFileToSource(write_file: *WriteFile, source: std.Build.LazyPath, sub_path: []const u8) void {
|
||||
const b = write_file.step.owner;
|
||||
write_file.output_source_files.append(b.allocator, .{
|
||||
.contents = .{ .copy = source },
|
||||
.sub_path = sub_path,
|
||||
}) catch @panic("OOM");
|
||||
source.addStepDependencies(&wf.step);
|
||||
source.addStepDependencies(&write_file.step);
|
||||
}
|
||||
|
||||
/// A path relative to the package root.
|
||||
|
|
@ -170,9 +170,9 @@ pub fn addCopyFileToSource(wf: *WriteFile, source: std.Build.LazyPath, sub_path:
|
|||
/// used as part of the normal build process, but as a utility occasionally
|
||||
/// run by a developer with intent to modify source files and then commit
|
||||
/// those changes to version control.
|
||||
pub fn addBytesToSource(wf: *WriteFile, bytes: []const u8, sub_path: []const u8) void {
|
||||
const b = wf.step.owner;
|
||||
wf.output_source_files.append(b.allocator, .{
|
||||
pub fn addBytesToSource(write_file: *WriteFile, bytes: []const u8, sub_path: []const u8) void {
|
||||
const b = write_file.step.owner;
|
||||
write_file.output_source_files.append(b.allocator, .{
|
||||
.contents = .{ .bytes = bytes },
|
||||
.sub_path = sub_path,
|
||||
}) catch @panic("OOM");
|
||||
|
|
@ -180,20 +180,20 @@ pub fn addBytesToSource(wf: *WriteFile, bytes: []const u8, sub_path: []const u8)
|
|||
|
||||
/// Returns a `LazyPath` representing the base directory that contains all the
|
||||
/// files from this `WriteFile`.
|
||||
pub fn getDirectory(wf: *WriteFile) std.Build.LazyPath {
|
||||
return .{ .generated = &wf.generated_directory };
|
||||
pub fn getDirectory(write_file: *WriteFile) std.Build.LazyPath {
|
||||
return .{ .generated = &write_file.generated_directory };
|
||||
}
|
||||
|
||||
fn maybeUpdateName(wf: *WriteFile) void {
|
||||
if (wf.files.items.len == 1 and wf.directories.items.len == 0) {
|
||||
fn maybeUpdateName(write_file: *WriteFile) void {
|
||||
if (write_file.files.items.len == 1 and write_file.directories.items.len == 0) {
|
||||
// First time adding a file; update name.
|
||||
if (std.mem.eql(u8, wf.step.name, "WriteFile")) {
|
||||
wf.step.name = wf.step.owner.fmt("WriteFile {s}", .{wf.files.items[0].sub_path});
|
||||
if (std.mem.eql(u8, write_file.step.name, "WriteFile")) {
|
||||
write_file.step.name = write_file.step.owner.fmt("WriteFile {s}", .{write_file.files.items[0].sub_path});
|
||||
}
|
||||
} else if (wf.directories.items.len == 1 and wf.files.items.len == 0) {
|
||||
} else if (write_file.directories.items.len == 1 and write_file.files.items.len == 0) {
|
||||
// First time adding a directory; update name.
|
||||
if (std.mem.eql(u8, wf.step.name, "WriteFile")) {
|
||||
wf.step.name = wf.step.owner.fmt("WriteFile {s}", .{wf.directories.items[0].sub_path});
|
||||
if (std.mem.eql(u8, write_file.step.name, "WriteFile")) {
|
||||
write_file.step.name = write_file.step.owner.fmt("WriteFile {s}", .{write_file.directories.items[0].sub_path});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -201,14 +201,14 @@ fn maybeUpdateName(wf: *WriteFile) void {
|
|||
fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
_ = prog_node;
|
||||
const b = step.owner;
|
||||
const wf: *WriteFile = @fieldParentPtr("step", step);
|
||||
const write_file: *WriteFile = @fieldParentPtr("step", step);
|
||||
|
||||
// Writing to source files is kind of an extra capability of this
|
||||
// WriteFile - arguably it should be a different step. But anyway here
|
||||
// it is, it happens unconditionally and does not interact with the other
|
||||
// files here.
|
||||
var any_miss = false;
|
||||
for (wf.output_source_files.items) |output_source_file| {
|
||||
for (write_file.output_source_files.items) |output_source_file| {
|
||||
if (fs.path.dirname(output_source_file.sub_path)) |dirname| {
|
||||
b.build_root.handle.makePath(dirname) catch |err| {
|
||||
return step.fail("unable to make path '{}{s}': {s}", .{
|
||||
|
|
@ -226,7 +226,7 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
|||
any_miss = true;
|
||||
},
|
||||
.copy => |file_source| {
|
||||
const source_path = file_source.getPath(b);
|
||||
const source_path = file_source.getPath2(b, step);
|
||||
const prev_status = fs.Dir.updateFile(
|
||||
fs.cwd(),
|
||||
source_path,
|
||||
|
|
@ -258,18 +258,18 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
|||
// in a non-backwards-compatible way.
|
||||
man.hash.add(@as(u32, 0xd767ee59));
|
||||
|
||||
for (wf.files.items) |file| {
|
||||
for (write_file.files.items) |file| {
|
||||
man.hash.addBytes(file.sub_path);
|
||||
switch (file.contents) {
|
||||
.bytes => |bytes| {
|
||||
man.hash.addBytes(bytes);
|
||||
},
|
||||
.copy => |file_source| {
|
||||
_ = try man.addFile(file_source.getPath(b), null);
|
||||
_ = try man.addFile(file_source.getPath2(b, step), null);
|
||||
},
|
||||
}
|
||||
}
|
||||
for (wf.directories.items) |dir| {
|
||||
for (write_file.directories.items) |dir| {
|
||||
man.hash.addBytes(dir.source.getPath2(b, step));
|
||||
man.hash.addBytes(dir.sub_path);
|
||||
for (dir.options.exclude_extensions) |ext| man.hash.addBytes(ext);
|
||||
|
|
@ -278,19 +278,19 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
|||
|
||||
if (try step.cacheHit(&man)) {
|
||||
const digest = man.final();
|
||||
for (wf.files.items) |file| {
|
||||
for (write_file.files.items) |file| {
|
||||
file.generated_file.path = try b.cache_root.join(b.allocator, &.{
|
||||
"o", &digest, file.sub_path,
|
||||
});
|
||||
}
|
||||
wf.generated_directory.path = try b.cache_root.join(b.allocator, &.{ "o", &digest });
|
||||
write_file.generated_directory.path = try b.cache_root.join(b.allocator, &.{ "o", &digest });
|
||||
return;
|
||||
}
|
||||
|
||||
const digest = man.final();
|
||||
const cache_path = "o" ++ fs.path.sep_str ++ digest;
|
||||
|
||||
wf.generated_directory.path = try b.cache_root.join(b.allocator, &.{ "o", &digest });
|
||||
write_file.generated_directory.path = try b.cache_root.join(b.allocator, &.{ "o", &digest });
|
||||
|
||||
var cache_dir = b.cache_root.handle.makeOpenPath(cache_path, .{}) catch |err| {
|
||||
return step.fail("unable to make path '{}{s}': {s}", .{
|
||||
|
|
@ -301,7 +301,7 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
|||
|
||||
const cwd = fs.cwd();
|
||||
|
||||
for (wf.files.items) |file| {
|
||||
for (write_file.files.items) |file| {
|
||||
if (fs.path.dirname(file.sub_path)) |dirname| {
|
||||
cache_dir.makePath(dirname) catch |err| {
|
||||
return step.fail("unable to make path '{}{s}{c}{s}': {s}", .{
|
||||
|
|
@ -318,7 +318,7 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
|||
};
|
||||
},
|
||||
.copy => |file_source| {
|
||||
const source_path = file_source.getPath(b);
|
||||
const source_path = file_source.getPath2(b, step);
|
||||
const prev_status = fs.Dir.updateFile(
|
||||
cwd,
|
||||
source_path,
|
||||
|
|
@ -347,7 +347,7 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
|||
cache_path, file.sub_path,
|
||||
});
|
||||
}
|
||||
for (wf.directories.items) |dir| {
|
||||
for (write_file.directories.items) |dir| {
|
||||
const full_src_dir_path = dir.source.getPath2(b, step);
|
||||
const dest_dirname = dir.sub_path;
|
||||
|
||||
|
|
|
|||
|
|
@ -18,7 +18,7 @@ pub fn build(b: *std.Build) void {
|
|||
|
||||
const exe = b.addExecutable(.{
|
||||
.name = "main",
|
||||
.root_source_file = .{ .path = "main.zig" },
|
||||
.root_source_file = b.path("main.zig"),
|
||||
.optimize = optimize,
|
||||
.target = target,
|
||||
});
|
||||
|
|
@ -28,7 +28,7 @@ pub fn build(b: *std.Build) void {
|
|||
.optimize = optimize,
|
||||
.target = target,
|
||||
});
|
||||
lib.addCSourceFile(.{ .file = .{ .path = "shared_lib.c" }, .flags = &.{"-gdwarf"} });
|
||||
lib.addCSourceFile(.{ .file = b.path("shared_lib.c"), .flags = &.{"-gdwarf"} });
|
||||
lib.linkLibC();
|
||||
exe.linkLibrary(lib);
|
||||
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ pub fn build(b: *std.Build) void {
|
|||
b.default_step = test_step;
|
||||
|
||||
const main = b.addTest(.{
|
||||
.root_source_file = .{ .path = "main.zig" },
|
||||
.root_source_file = b.path("main.zig"),
|
||||
.optimize = b.standardOptimizeOption(.{}),
|
||||
});
|
||||
// TODO: actually check these two artifacts for correctness
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ pub fn build(b: *std.Build) void {
|
|||
|
||||
const obj = b.addObject(.{
|
||||
.name = "main",
|
||||
.root_source_file = .{ .path = "main.zig" },
|
||||
.root_source_file = b.path("main.zig"),
|
||||
.optimize = optimize,
|
||||
.target = b.host,
|
||||
});
|
||||
|
|
|
|||
|
|
@ -5,15 +5,15 @@ pub fn build(b: *std.Build) void {
|
|||
b.default_step = test_step;
|
||||
|
||||
const test1 = b.addTest(.{
|
||||
.root_source_file = .{ .path = "test_root/empty.zig" },
|
||||
.root_source_file = b.path("test_root/empty.zig"),
|
||||
.test_runner = "src/main.zig",
|
||||
});
|
||||
const test2 = b.addTest(.{
|
||||
.root_source_file = .{ .path = "src/empty.zig" },
|
||||
.root_source_file = b.path("src/empty.zig"),
|
||||
.test_runner = "src/main.zig",
|
||||
});
|
||||
const test3 = b.addTest(.{
|
||||
.root_source_file = .{ .path = "empty.zig" },
|
||||
.root_source_file = b.path("empty.zig"),
|
||||
.test_runner = "src/main.zig",
|
||||
});
|
||||
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ pub fn build(b: *std.Build) void {
|
|||
const optimize: std.builtin.OptimizeMode = .Debug;
|
||||
const obj = b.addObject(.{
|
||||
.name = "issue_5825",
|
||||
.root_source_file = .{ .path = "main.zig" },
|
||||
.root_source_file = b.path("main.zig"),
|
||||
.optimize = optimize,
|
||||
.target = target,
|
||||
});
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ const std = @import("std");
|
|||
|
||||
pub fn build(b: *std.Build) void {
|
||||
const main = b.addTest(.{
|
||||
.root_source_file = .{ .path = "src/main.zig" },
|
||||
.root_source_file = b.path("src/main.zig"),
|
||||
.target = b.host,
|
||||
.optimize = .Debug,
|
||||
});
|
||||
|
|
|
|||
|
|
@ -29,7 +29,7 @@ pub fn build(b: *std.build.Builder) !void {
|
|||
options.addOption(bool, "keep_sigpipe", keep_sigpipe);
|
||||
const exe = b.addExecutable(.{
|
||||
.name = "breakpipe",
|
||||
.root_source_file = .{ .path = "breakpipe.zig" },
|
||||
.root_source_file = b.path("breakpipe.zig"),
|
||||
});
|
||||
exe.addOptions("build_options", options);
|
||||
const run = b.addRunArtifact(exe);
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ pub fn build(b: *std.Build) !void {
|
|||
|
||||
const lib_gnu = b.addStaticLibrary(.{
|
||||
.name = "toargv-gnu",
|
||||
.root_source_file = .{ .path = "lib.zig" },
|
||||
.root_source_file = b.path("lib.zig"),
|
||||
.target = b.resolveTargetQuery(.{
|
||||
.abi = .gnu,
|
||||
}),
|
||||
|
|
@ -25,7 +25,7 @@ pub fn build(b: *std.Build) !void {
|
|||
.optimize = optimize,
|
||||
});
|
||||
verify_gnu.addCSourceFile(.{
|
||||
.file = .{ .path = "verify.c" },
|
||||
.file = b.path("verify.c"),
|
||||
.flags = &.{ "-DUNICODE", "-D_UNICODE" },
|
||||
});
|
||||
verify_gnu.mingw_unicode_entry_point = true;
|
||||
|
|
@ -34,7 +34,7 @@ pub fn build(b: *std.Build) !void {
|
|||
|
||||
const fuzz = b.addExecutable(.{
|
||||
.name = "fuzz",
|
||||
.root_source_file = .{ .path = "fuzz.zig" },
|
||||
.root_source_file = b.path("fuzz.zig"),
|
||||
.target = b.host,
|
||||
.optimize = optimize,
|
||||
});
|
||||
|
|
@ -69,7 +69,7 @@ pub fn build(b: *std.Build) !void {
|
|||
if (has_msvc) {
|
||||
const lib_msvc = b.addStaticLibrary(.{
|
||||
.name = "toargv-msvc",
|
||||
.root_source_file = .{ .path = "lib.zig" },
|
||||
.root_source_file = b.path("lib.zig"),
|
||||
.target = b.resolveTargetQuery(.{
|
||||
.abi = .msvc,
|
||||
}),
|
||||
|
|
@ -83,7 +83,7 @@ pub fn build(b: *std.Build) !void {
|
|||
.optimize = optimize,
|
||||
});
|
||||
verify_msvc.addCSourceFile(.{
|
||||
.file = .{ .path = "verify.c" },
|
||||
.file = b.path("verify.c"),
|
||||
.flags = &.{ "-DUNICODE", "-D_UNICODE" },
|
||||
});
|
||||
verify_msvc.linkLibrary(lib_msvc);
|
||||
|
|
|
|||
|
|
@ -12,14 +12,14 @@ pub fn build(b: *std.Build) void {
|
|||
|
||||
const hello = b.addExecutable(.{
|
||||
.name = "hello",
|
||||
.root_source_file = .{ .path = "hello.zig" },
|
||||
.root_source_file = b.path("hello.zig"),
|
||||
.optimize = optimize,
|
||||
.target = target,
|
||||
});
|
||||
|
||||
const main = b.addExecutable(.{
|
||||
.name = "main",
|
||||
.root_source_file = .{ .path = "main.zig" },
|
||||
.root_source_file = b.path("main.zig"),
|
||||
.optimize = optimize,
|
||||
.target = target,
|
||||
});
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue