mirror of
https://codeberg.org/ziglang/zig.git
synced 2025-12-06 13:54:21 +00:00
use build.zig.zon instead of build.zig.ini for the manifest file
* improve error message when build manifest file is missing * update std.zig.Ast to support ZON * Compilation.AllErrors.Message: make the notes field a const slice * move build manifest parsing logic into src/Manifest.zig and add more checks, and make the checks integrate into the standard error reporting code so that reported errors look sexy closes #14290
This commit is contained in:
parent
873bb29c98
commit
81c27c74bc
8 changed files with 665 additions and 224 deletions
|
|
@ -1496,8 +1496,8 @@ pub fn dependency(b: *Build, name: []const u8, args: anytype) *Dependency {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const full_path = b.pathFromRoot("build.zig.ini");
|
const full_path = b.pathFromRoot("build.zig.zon");
|
||||||
std.debug.print("no dependency named '{s}' in '{s}'\n", .{ name, full_path });
|
std.debug.print("no dependency named '{s}' in '{s}'. All packages used in build.zig must be declared in this file.\n", .{ name, full_path });
|
||||||
std.process.exit(1);
|
std.process.exit(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1145,7 +1145,8 @@ pub fn ArrayHashMapUnmanaged(
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Create a copy of the hash map which can be modified separately.
|
/// Create a copy of the hash map which can be modified separately.
|
||||||
/// The copy uses the same context and allocator as this instance.
|
/// The copy uses the same context as this instance, but is allocated
|
||||||
|
/// with the provided allocator.
|
||||||
pub fn clone(self: Self, allocator: Allocator) !Self {
|
pub fn clone(self: Self, allocator: Allocator) !Self {
|
||||||
if (@sizeOf(ByIndexContext) != 0)
|
if (@sizeOf(ByIndexContext) != 0)
|
||||||
@compileError("Cannot infer context " ++ @typeName(Context) ++ ", call cloneContext instead.");
|
@compileError("Cannot infer context " ++ @typeName(Context) ++ ", call cloneContext instead.");
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,8 @@
|
||||||
//! Abstract Syntax Tree for Zig source code.
|
//! Abstract Syntax Tree for Zig source code.
|
||||||
|
//! For Zig syntax, the root node is at nodes[0] and contains the list of
|
||||||
|
//! sub-nodes.
|
||||||
|
//! For Zon syntax, the root node is at nodes[0] and contains lhs as the node
|
||||||
|
//! index of the main expression.
|
||||||
|
|
||||||
/// Reference to externally-owned data.
|
/// Reference to externally-owned data.
|
||||||
source: [:0]const u8,
|
source: [:0]const u8,
|
||||||
|
|
|
||||||
|
|
@ -181,17 +181,26 @@ pub fn parseRoot(p: *Parse) !void {
|
||||||
/// TODO: set a flag in Parse struct, and honor that flag
|
/// TODO: set a flag in Parse struct, and honor that flag
|
||||||
/// by emitting compilation errors when non-zon nodes are encountered.
|
/// by emitting compilation errors when non-zon nodes are encountered.
|
||||||
pub fn parseZon(p: *Parse) !void {
|
pub fn parseZon(p: *Parse) !void {
|
||||||
const node_index = p.parseExpr() catch |err| switch (err) {
|
// We must use index 0 so that 0 can be used as null elsewhere.
|
||||||
|
p.nodes.appendAssumeCapacity(.{
|
||||||
|
.tag = .root,
|
||||||
|
.main_token = 0,
|
||||||
|
.data = undefined,
|
||||||
|
});
|
||||||
|
const node_index = p.expectExpr() catch |err| switch (err) {
|
||||||
error.ParseError => {
|
error.ParseError => {
|
||||||
assert(p.errors.items.len > 0);
|
assert(p.errors.items.len > 0);
|
||||||
return;
|
return;
|
||||||
},
|
},
|
||||||
else => |e| return e,
|
else => |e| return e,
|
||||||
};
|
};
|
||||||
assert(node_index == 0);
|
|
||||||
if (p.token_tags[p.tok_i] != .eof) {
|
if (p.token_tags[p.tok_i] != .eof) {
|
||||||
try p.warnExpected(.eof);
|
try p.warnExpected(.eof);
|
||||||
}
|
}
|
||||||
|
p.nodes.items(.data)[0] = .{
|
||||||
|
.lhs = node_index,
|
||||||
|
.rhs = undefined,
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
/// ContainerMembers <- ContainerDeclarations (ContainerField COMMA)* (ContainerField / ContainerDeclarations)
|
/// ContainerMembers <- ContainerDeclarations (ContainerField COMMA)* (ContainerField / ContainerDeclarations)
|
||||||
|
|
|
||||||
|
|
@ -385,7 +385,7 @@ pub const AllErrors = struct {
|
||||||
count: u32 = 1,
|
count: u32 = 1,
|
||||||
/// Does not include the trailing newline.
|
/// Does not include the trailing newline.
|
||||||
source_line: ?[]const u8,
|
source_line: ?[]const u8,
|
||||||
notes: []Message = &.{},
|
notes: []const Message = &.{},
|
||||||
reference_trace: []Message = &.{},
|
reference_trace: []Message = &.{},
|
||||||
|
|
||||||
/// Splits the error message up into lines to properly indent them
|
/// Splits the error message up into lines to properly indent them
|
||||||
|
|
|
||||||
499
src/Manifest.zig
Normal file
499
src/Manifest.zig
Normal file
|
|
@ -0,0 +1,499 @@
|
||||||
|
pub const basename = "build.zig.zon";
|
||||||
|
pub const Hash = std.crypto.hash.sha2.Sha256;
|
||||||
|
|
||||||
|
pub const Dependency = struct {
|
||||||
|
url: []const u8,
|
||||||
|
url_tok: Ast.TokenIndex,
|
||||||
|
hash: ?[]const u8,
|
||||||
|
hash_tok: Ast.TokenIndex,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub const ErrorMessage = struct {
|
||||||
|
msg: []const u8,
|
||||||
|
tok: Ast.TokenIndex,
|
||||||
|
off: u32,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub const MultihashFunction = enum(u16) {
|
||||||
|
identity = 0x00,
|
||||||
|
sha1 = 0x11,
|
||||||
|
@"sha2-256" = 0x12,
|
||||||
|
@"sha2-512" = 0x13,
|
||||||
|
@"sha3-512" = 0x14,
|
||||||
|
@"sha3-384" = 0x15,
|
||||||
|
@"sha3-256" = 0x16,
|
||||||
|
@"sha3-224" = 0x17,
|
||||||
|
@"sha2-384" = 0x20,
|
||||||
|
@"sha2-256-trunc254-padded" = 0x1012,
|
||||||
|
@"sha2-224" = 0x1013,
|
||||||
|
@"sha2-512-224" = 0x1014,
|
||||||
|
@"sha2-512-256" = 0x1015,
|
||||||
|
@"blake2b-256" = 0xb220,
|
||||||
|
_,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub const multihash_function: MultihashFunction = switch (Hash) {
|
||||||
|
std.crypto.hash.sha2.Sha256 => .@"sha2-256",
|
||||||
|
else => @compileError("unreachable"),
|
||||||
|
};
|
||||||
|
comptime {
|
||||||
|
// We avoid unnecessary uleb128 code in hexDigest by asserting here the
|
||||||
|
// values are small enough to be contained in the one-byte encoding.
|
||||||
|
assert(@enumToInt(multihash_function) < 127);
|
||||||
|
assert(Hash.digest_length < 127);
|
||||||
|
}
|
||||||
|
pub const multihash_len = 1 + 1 + Hash.digest_length;
|
||||||
|
|
||||||
|
name: []const u8,
|
||||||
|
version: std.SemanticVersion,
|
||||||
|
dependencies: std.StringArrayHashMapUnmanaged(Dependency),
|
||||||
|
|
||||||
|
errors: []ErrorMessage,
|
||||||
|
arena_state: std.heap.ArenaAllocator.State,
|
||||||
|
|
||||||
|
pub const Error = Allocator.Error;
|
||||||
|
|
||||||
|
pub fn parse(gpa: Allocator, ast: std.zig.Ast) Error!Manifest {
|
||||||
|
const node_tags = ast.nodes.items(.tag);
|
||||||
|
const node_datas = ast.nodes.items(.data);
|
||||||
|
assert(node_tags[0] == .root);
|
||||||
|
const main_node_index = node_datas[0].lhs;
|
||||||
|
|
||||||
|
var arena_instance = std.heap.ArenaAllocator.init(gpa);
|
||||||
|
errdefer arena_instance.deinit();
|
||||||
|
|
||||||
|
var p: Parse = .{
|
||||||
|
.gpa = gpa,
|
||||||
|
.ast = ast,
|
||||||
|
.arena = arena_instance.allocator(),
|
||||||
|
.errors = .{},
|
||||||
|
|
||||||
|
.name = undefined,
|
||||||
|
.version = undefined,
|
||||||
|
.dependencies = .{},
|
||||||
|
.buf = .{},
|
||||||
|
};
|
||||||
|
defer p.buf.deinit(gpa);
|
||||||
|
defer p.errors.deinit(gpa);
|
||||||
|
defer p.dependencies.deinit(gpa);
|
||||||
|
|
||||||
|
p.parseRoot(main_node_index) catch |err| switch (err) {
|
||||||
|
error.ParseFailure => assert(p.errors.items.len > 0),
|
||||||
|
else => |e| return e,
|
||||||
|
};
|
||||||
|
|
||||||
|
return .{
|
||||||
|
.name = p.name,
|
||||||
|
.version = p.version,
|
||||||
|
.dependencies = try p.dependencies.clone(p.arena),
|
||||||
|
.errors = try p.arena.dupe(ErrorMessage, p.errors.items),
|
||||||
|
.arena_state = arena_instance.state,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn deinit(man: *Manifest, gpa: Allocator) void {
|
||||||
|
man.arena_state.promote(gpa).deinit();
|
||||||
|
man.* = undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
const hex_charset = "0123456789abcdef";
|
||||||
|
|
||||||
|
pub fn hex64(x: u64) [16]u8 {
|
||||||
|
var result: [16]u8 = undefined;
|
||||||
|
var i: usize = 0;
|
||||||
|
while (i < 8) : (i += 1) {
|
||||||
|
const byte = @truncate(u8, x >> @intCast(u6, 8 * i));
|
||||||
|
result[i * 2 + 0] = hex_charset[byte >> 4];
|
||||||
|
result[i * 2 + 1] = hex_charset[byte & 15];
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
test hex64 {
|
||||||
|
const s = "[" ++ hex64(0x12345678_abcdef00) ++ "]";
|
||||||
|
try std.testing.expectEqualStrings("[00efcdab78563412]", s);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn hexDigest(digest: [Hash.digest_length]u8) [multihash_len * 2]u8 {
|
||||||
|
var result: [multihash_len * 2]u8 = undefined;
|
||||||
|
|
||||||
|
result[0] = hex_charset[@enumToInt(multihash_function) >> 4];
|
||||||
|
result[1] = hex_charset[@enumToInt(multihash_function) & 15];
|
||||||
|
|
||||||
|
result[2] = hex_charset[Hash.digest_length >> 4];
|
||||||
|
result[3] = hex_charset[Hash.digest_length & 15];
|
||||||
|
|
||||||
|
for (digest) |byte, i| {
|
||||||
|
result[4 + i * 2] = hex_charset[byte >> 4];
|
||||||
|
result[5 + i * 2] = hex_charset[byte & 15];
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
const Parse = struct {
|
||||||
|
gpa: Allocator,
|
||||||
|
ast: std.zig.Ast,
|
||||||
|
arena: Allocator,
|
||||||
|
buf: std.ArrayListUnmanaged(u8),
|
||||||
|
errors: std.ArrayListUnmanaged(ErrorMessage),
|
||||||
|
|
||||||
|
name: []const u8,
|
||||||
|
version: std.SemanticVersion,
|
||||||
|
dependencies: std.StringArrayHashMapUnmanaged(Dependency),
|
||||||
|
|
||||||
|
const InnerError = error{ ParseFailure, OutOfMemory };
|
||||||
|
|
||||||
|
fn parseRoot(p: *Parse, node: Ast.Node.Index) !void {
|
||||||
|
const ast = p.ast;
|
||||||
|
const main_tokens = ast.nodes.items(.main_token);
|
||||||
|
const main_token = main_tokens[node];
|
||||||
|
|
||||||
|
var buf: [2]Ast.Node.Index = undefined;
|
||||||
|
const struct_init = ast.fullStructInit(&buf, node) orelse {
|
||||||
|
return fail(p, main_token, "expected top level expression to be a struct", .{});
|
||||||
|
};
|
||||||
|
|
||||||
|
var have_name = false;
|
||||||
|
var have_version = false;
|
||||||
|
|
||||||
|
for (struct_init.ast.fields) |field_init| {
|
||||||
|
const name_token = ast.firstToken(field_init) - 2;
|
||||||
|
const field_name = try identifierTokenString(p, name_token);
|
||||||
|
// We could get fancy with reflection and comptime logic here but doing
|
||||||
|
// things manually provides an opportunity to do any additional verification
|
||||||
|
// that is desirable on a per-field basis.
|
||||||
|
if (mem.eql(u8, field_name, "dependencies")) {
|
||||||
|
try parseDependencies(p, field_init);
|
||||||
|
} else if (mem.eql(u8, field_name, "name")) {
|
||||||
|
p.name = try parseString(p, field_init);
|
||||||
|
have_name = true;
|
||||||
|
} else if (mem.eql(u8, field_name, "version")) {
|
||||||
|
const version_text = try parseString(p, field_init);
|
||||||
|
p.version = std.SemanticVersion.parse(version_text) catch |err| v: {
|
||||||
|
try appendError(p, main_tokens[field_init], "unable to parse semantic version: {s}", .{@errorName(err)});
|
||||||
|
break :v undefined;
|
||||||
|
};
|
||||||
|
have_version = true;
|
||||||
|
} else {
|
||||||
|
// Ignore unknown fields so that we can add fields in future zig
|
||||||
|
// versions without breaking older zig versions.
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!have_name) {
|
||||||
|
try appendError(p, main_token, "missing top-level 'name' field", .{});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!have_version) {
|
||||||
|
try appendError(p, main_token, "missing top-level 'version' field", .{});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parseDependencies(p: *Parse, node: Ast.Node.Index) !void {
|
||||||
|
const ast = p.ast;
|
||||||
|
const main_tokens = ast.nodes.items(.main_token);
|
||||||
|
|
||||||
|
var buf: [2]Ast.Node.Index = undefined;
|
||||||
|
const struct_init = ast.fullStructInit(&buf, node) orelse {
|
||||||
|
const tok = main_tokens[node];
|
||||||
|
return fail(p, tok, "expected dependencies expression to be a struct", .{});
|
||||||
|
};
|
||||||
|
|
||||||
|
for (struct_init.ast.fields) |field_init| {
|
||||||
|
const name_token = ast.firstToken(field_init) - 2;
|
||||||
|
const dep_name = try identifierTokenString(p, name_token);
|
||||||
|
const dep = try parseDependency(p, field_init);
|
||||||
|
try p.dependencies.put(p.gpa, dep_name, dep);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parseDependency(p: *Parse, node: Ast.Node.Index) !Dependency {
|
||||||
|
const ast = p.ast;
|
||||||
|
const main_tokens = ast.nodes.items(.main_token);
|
||||||
|
|
||||||
|
var buf: [2]Ast.Node.Index = undefined;
|
||||||
|
const struct_init = ast.fullStructInit(&buf, node) orelse {
|
||||||
|
const tok = main_tokens[node];
|
||||||
|
return fail(p, tok, "expected dependency expression to be a struct", .{});
|
||||||
|
};
|
||||||
|
|
||||||
|
var dep: Dependency = .{
|
||||||
|
.url = undefined,
|
||||||
|
.url_tok = undefined,
|
||||||
|
.hash = null,
|
||||||
|
.hash_tok = undefined,
|
||||||
|
};
|
||||||
|
var have_url = false;
|
||||||
|
|
||||||
|
for (struct_init.ast.fields) |field_init| {
|
||||||
|
const name_token = ast.firstToken(field_init) - 2;
|
||||||
|
const field_name = try identifierTokenString(p, name_token);
|
||||||
|
// We could get fancy with reflection and comptime logic here but doing
|
||||||
|
// things manually provides an opportunity to do any additional verification
|
||||||
|
// that is desirable on a per-field basis.
|
||||||
|
if (mem.eql(u8, field_name, "url")) {
|
||||||
|
dep.url = parseString(p, field_init) catch |err| switch (err) {
|
||||||
|
error.ParseFailure => continue,
|
||||||
|
else => |e| return e,
|
||||||
|
};
|
||||||
|
dep.url_tok = main_tokens[field_init];
|
||||||
|
have_url = true;
|
||||||
|
} else if (mem.eql(u8, field_name, "hash")) {
|
||||||
|
dep.hash = parseHash(p, field_init) catch |err| switch (err) {
|
||||||
|
error.ParseFailure => continue,
|
||||||
|
else => |e| return e,
|
||||||
|
};
|
||||||
|
dep.hash_tok = main_tokens[field_init];
|
||||||
|
} else {
|
||||||
|
// Ignore unknown fields so that we can add fields in future zig
|
||||||
|
// versions without breaking older zig versions.
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!have_url) {
|
||||||
|
try appendError(p, main_tokens[node], "dependency is missing 'url' field", .{});
|
||||||
|
}
|
||||||
|
|
||||||
|
return dep;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parseString(p: *Parse, node: Ast.Node.Index) ![]const u8 {
|
||||||
|
const ast = p.ast;
|
||||||
|
const node_tags = ast.nodes.items(.tag);
|
||||||
|
const main_tokens = ast.nodes.items(.main_token);
|
||||||
|
if (node_tags[node] != .string_literal) {
|
||||||
|
return fail(p, main_tokens[node], "expected string literal", .{});
|
||||||
|
}
|
||||||
|
const str_lit_token = main_tokens[node];
|
||||||
|
const token_bytes = ast.tokenSlice(str_lit_token);
|
||||||
|
p.buf.clearRetainingCapacity();
|
||||||
|
try parseStrLit(p, str_lit_token, &p.buf, token_bytes, 0);
|
||||||
|
const duped = try p.arena.dupe(u8, p.buf.items);
|
||||||
|
return duped;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parseHash(p: *Parse, node: Ast.Node.Index) ![]const u8 {
|
||||||
|
const ast = p.ast;
|
||||||
|
const main_tokens = ast.nodes.items(.main_token);
|
||||||
|
const tok = main_tokens[node];
|
||||||
|
const h = try parseString(p, node);
|
||||||
|
|
||||||
|
if (h.len >= 2) {
|
||||||
|
const their_multihash_func = std.fmt.parseInt(u8, h[0..2], 16) catch |err| {
|
||||||
|
return fail(p, tok, "invalid multihash value: unable to parse hash function: {s}", .{
|
||||||
|
@errorName(err),
|
||||||
|
});
|
||||||
|
};
|
||||||
|
if (@intToEnum(MultihashFunction, their_multihash_func) != multihash_function) {
|
||||||
|
return fail(p, tok, "unsupported hash function: only sha2-256 is supported", .{});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const hex_multihash_len = 2 * Manifest.multihash_len;
|
||||||
|
if (h.len != hex_multihash_len) {
|
||||||
|
return fail(p, tok, "wrong hash size. expected: {d}, found: {d}", .{
|
||||||
|
hex_multihash_len, h.len,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return h;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// TODO: try to DRY this with AstGen.identifierTokenString
|
||||||
|
fn identifierTokenString(p: *Parse, token: Ast.TokenIndex) InnerError![]const u8 {
|
||||||
|
const ast = p.ast;
|
||||||
|
const token_tags = ast.tokens.items(.tag);
|
||||||
|
assert(token_tags[token] == .identifier);
|
||||||
|
const ident_name = ast.tokenSlice(token);
|
||||||
|
if (!mem.startsWith(u8, ident_name, "@")) {
|
||||||
|
return ident_name;
|
||||||
|
}
|
||||||
|
p.buf.clearRetainingCapacity();
|
||||||
|
try parseStrLit(p, token, &p.buf, ident_name, 1);
|
||||||
|
const duped = try p.arena.dupe(u8, p.buf.items);
|
||||||
|
return duped;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// TODO: try to DRY this with AstGen.parseStrLit
|
||||||
|
fn parseStrLit(
|
||||||
|
p: *Parse,
|
||||||
|
token: Ast.TokenIndex,
|
||||||
|
buf: *std.ArrayListUnmanaged(u8),
|
||||||
|
bytes: []const u8,
|
||||||
|
offset: u32,
|
||||||
|
) InnerError!void {
|
||||||
|
const raw_string = bytes[offset..];
|
||||||
|
var buf_managed = buf.toManaged(p.gpa);
|
||||||
|
const result = std.zig.string_literal.parseWrite(buf_managed.writer(), raw_string);
|
||||||
|
buf.* = buf_managed.moveToUnmanaged();
|
||||||
|
switch (try result) {
|
||||||
|
.success => {},
|
||||||
|
.failure => |err| try p.appendStrLitError(err, token, bytes, offset),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// TODO: try to DRY this with AstGen.failWithStrLitError
|
||||||
|
fn appendStrLitError(
|
||||||
|
p: *Parse,
|
||||||
|
err: std.zig.string_literal.Error,
|
||||||
|
token: Ast.TokenIndex,
|
||||||
|
bytes: []const u8,
|
||||||
|
offset: u32,
|
||||||
|
) Allocator.Error!void {
|
||||||
|
const raw_string = bytes[offset..];
|
||||||
|
switch (err) {
|
||||||
|
.invalid_escape_character => |bad_index| {
|
||||||
|
try p.appendErrorOff(
|
||||||
|
token,
|
||||||
|
offset + @intCast(u32, bad_index),
|
||||||
|
"invalid escape character: '{c}'",
|
||||||
|
.{raw_string[bad_index]},
|
||||||
|
);
|
||||||
|
},
|
||||||
|
.expected_hex_digit => |bad_index| {
|
||||||
|
try p.appendErrorOff(
|
||||||
|
token,
|
||||||
|
offset + @intCast(u32, bad_index),
|
||||||
|
"expected hex digit, found '{c}'",
|
||||||
|
.{raw_string[bad_index]},
|
||||||
|
);
|
||||||
|
},
|
||||||
|
.empty_unicode_escape_sequence => |bad_index| {
|
||||||
|
try p.appendErrorOff(
|
||||||
|
token,
|
||||||
|
offset + @intCast(u32, bad_index),
|
||||||
|
"empty unicode escape sequence",
|
||||||
|
.{},
|
||||||
|
);
|
||||||
|
},
|
||||||
|
.expected_hex_digit_or_rbrace => |bad_index| {
|
||||||
|
try p.appendErrorOff(
|
||||||
|
token,
|
||||||
|
offset + @intCast(u32, bad_index),
|
||||||
|
"expected hex digit or '}}', found '{c}'",
|
||||||
|
.{raw_string[bad_index]},
|
||||||
|
);
|
||||||
|
},
|
||||||
|
.invalid_unicode_codepoint => |bad_index| {
|
||||||
|
try p.appendErrorOff(
|
||||||
|
token,
|
||||||
|
offset + @intCast(u32, bad_index),
|
||||||
|
"unicode escape does not correspond to a valid codepoint",
|
||||||
|
.{},
|
||||||
|
);
|
||||||
|
},
|
||||||
|
.expected_lbrace => |bad_index| {
|
||||||
|
try p.appendErrorOff(
|
||||||
|
token,
|
||||||
|
offset + @intCast(u32, bad_index),
|
||||||
|
"expected '{{', found '{c}",
|
||||||
|
.{raw_string[bad_index]},
|
||||||
|
);
|
||||||
|
},
|
||||||
|
.expected_rbrace => |bad_index| {
|
||||||
|
try p.appendErrorOff(
|
||||||
|
token,
|
||||||
|
offset + @intCast(u32, bad_index),
|
||||||
|
"expected '}}', found '{c}",
|
||||||
|
.{raw_string[bad_index]},
|
||||||
|
);
|
||||||
|
},
|
||||||
|
.expected_single_quote => |bad_index| {
|
||||||
|
try p.appendErrorOff(
|
||||||
|
token,
|
||||||
|
offset + @intCast(u32, bad_index),
|
||||||
|
"expected single quote ('), found '{c}",
|
||||||
|
.{raw_string[bad_index]},
|
||||||
|
);
|
||||||
|
},
|
||||||
|
.invalid_character => |bad_index| {
|
||||||
|
try p.appendErrorOff(
|
||||||
|
token,
|
||||||
|
offset + @intCast(u32, bad_index),
|
||||||
|
"invalid byte in string or character literal: '{c}'",
|
||||||
|
.{raw_string[bad_index]},
|
||||||
|
);
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn fail(
|
||||||
|
p: *Parse,
|
||||||
|
tok: Ast.TokenIndex,
|
||||||
|
comptime fmt: []const u8,
|
||||||
|
args: anytype,
|
||||||
|
) InnerError {
|
||||||
|
try appendError(p, tok, fmt, args);
|
||||||
|
return error.ParseFailure;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn appendError(p: *Parse, tok: Ast.TokenIndex, comptime fmt: []const u8, args: anytype) !void {
|
||||||
|
return appendErrorOff(p, tok, 0, fmt, args);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn appendErrorOff(
|
||||||
|
p: *Parse,
|
||||||
|
tok: Ast.TokenIndex,
|
||||||
|
byte_offset: u32,
|
||||||
|
comptime fmt: []const u8,
|
||||||
|
args: anytype,
|
||||||
|
) Allocator.Error!void {
|
||||||
|
try p.errors.append(p.gpa, .{
|
||||||
|
.msg = try std.fmt.allocPrint(p.arena, fmt, args),
|
||||||
|
.tok = tok,
|
||||||
|
.off = byte_offset,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const Manifest = @This();
|
||||||
|
const std = @import("std");
|
||||||
|
const mem = std.mem;
|
||||||
|
const Allocator = std.mem.Allocator;
|
||||||
|
const assert = std.debug.assert;
|
||||||
|
const Ast = std.zig.Ast;
|
||||||
|
const testing = std.testing;
|
||||||
|
|
||||||
|
test "basic" {
|
||||||
|
const gpa = testing.allocator;
|
||||||
|
|
||||||
|
const example =
|
||||||
|
\\.{
|
||||||
|
\\ .name = "foo",
|
||||||
|
\\ .version = "3.2.1",
|
||||||
|
\\ .dependencies = .{
|
||||||
|
\\ .bar = .{
|
||||||
|
\\ .url = "https://example.com/baz.tar.gz",
|
||||||
|
\\ .hash = "1220f1b680b6065fcfc94fe777f22e73bcb7e2767e5f4d99d4255fe76ded69c7a35f",
|
||||||
|
\\ },
|
||||||
|
\\ },
|
||||||
|
\\}
|
||||||
|
;
|
||||||
|
|
||||||
|
var ast = try std.zig.Ast.parse(gpa, example, .zon);
|
||||||
|
defer ast.deinit(gpa);
|
||||||
|
|
||||||
|
try testing.expect(ast.errors.len == 0);
|
||||||
|
|
||||||
|
var manifest = try Manifest.parse(gpa, ast);
|
||||||
|
defer manifest.deinit(gpa);
|
||||||
|
|
||||||
|
try testing.expectEqualStrings("foo", manifest.name);
|
||||||
|
|
||||||
|
try testing.expectEqual(@as(std.SemanticVersion, .{
|
||||||
|
.major = 3,
|
||||||
|
.minor = 2,
|
||||||
|
.patch = 1,
|
||||||
|
}), manifest.version);
|
||||||
|
|
||||||
|
try testing.expect(manifest.dependencies.count() == 1);
|
||||||
|
try testing.expectEqualStrings("bar", manifest.dependencies.keys()[0]);
|
||||||
|
try testing.expectEqualStrings(
|
||||||
|
"https://example.com/baz.tar.gz",
|
||||||
|
manifest.dependencies.values()[0].url,
|
||||||
|
);
|
||||||
|
try testing.expectEqualStrings(
|
||||||
|
"1220f1b680b6065fcfc94fe777f22e73bcb7e2767e5f4d99d4255fe76ded69c7a35f",
|
||||||
|
manifest.dependencies.values()[0].hash orelse return error.TestFailed,
|
||||||
|
);
|
||||||
|
}
|
||||||
351
src/Package.zig
351
src/Package.zig
|
|
@ -6,8 +6,8 @@ const fs = std.fs;
|
||||||
const mem = std.mem;
|
const mem = std.mem;
|
||||||
const Allocator = mem.Allocator;
|
const Allocator = mem.Allocator;
|
||||||
const assert = std.debug.assert;
|
const assert = std.debug.assert;
|
||||||
const Hash = std.crypto.hash.sha2.Sha256;
|
|
||||||
const log = std.log.scoped(.package);
|
const log = std.log.scoped(.package);
|
||||||
|
const main = @import("main.zig");
|
||||||
|
|
||||||
const Compilation = @import("Compilation.zig");
|
const Compilation = @import("Compilation.zig");
|
||||||
const Module = @import("Module.zig");
|
const Module = @import("Module.zig");
|
||||||
|
|
@ -15,6 +15,7 @@ const ThreadPool = @import("ThreadPool.zig");
|
||||||
const WaitGroup = @import("WaitGroup.zig");
|
const WaitGroup = @import("WaitGroup.zig");
|
||||||
const Cache = @import("Cache.zig");
|
const Cache = @import("Cache.zig");
|
||||||
const build_options = @import("build_options");
|
const build_options = @import("build_options");
|
||||||
|
const Manifest = @import("Manifest.zig");
|
||||||
|
|
||||||
pub const Table = std.StringHashMapUnmanaged(*Package);
|
pub const Table = std.StringHashMapUnmanaged(*Package);
|
||||||
|
|
||||||
|
|
@ -141,10 +142,10 @@ pub fn addAndAdopt(parent: *Package, gpa: Allocator, child: *Package) !void {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub const build_zig_basename = "build.zig";
|
pub const build_zig_basename = "build.zig";
|
||||||
pub const ini_basename = build_zig_basename ++ ".ini";
|
|
||||||
|
|
||||||
pub fn fetchAndAddDependencies(
|
pub fn fetchAndAddDependencies(
|
||||||
pkg: *Package,
|
pkg: *Package,
|
||||||
|
arena: Allocator,
|
||||||
thread_pool: *ThreadPool,
|
thread_pool: *ThreadPool,
|
||||||
http_client: *std.http.Client,
|
http_client: *std.http.Client,
|
||||||
directory: Compilation.Directory,
|
directory: Compilation.Directory,
|
||||||
|
|
@ -153,89 +154,77 @@ pub fn fetchAndAddDependencies(
|
||||||
dependencies_source: *std.ArrayList(u8),
|
dependencies_source: *std.ArrayList(u8),
|
||||||
build_roots_source: *std.ArrayList(u8),
|
build_roots_source: *std.ArrayList(u8),
|
||||||
name_prefix: []const u8,
|
name_prefix: []const u8,
|
||||||
|
color: main.Color,
|
||||||
) !void {
|
) !void {
|
||||||
const max_bytes = 10 * 1024 * 1024;
|
const max_bytes = 10 * 1024 * 1024;
|
||||||
const gpa = thread_pool.allocator;
|
const gpa = thread_pool.allocator;
|
||||||
const build_zig_ini = directory.handle.readFileAlloc(gpa, ini_basename, max_bytes) catch |err| switch (err) {
|
const build_zig_zon_bytes = directory.handle.readFileAllocOptions(
|
||||||
|
arena,
|
||||||
|
Manifest.basename,
|
||||||
|
max_bytes,
|
||||||
|
null,
|
||||||
|
1,
|
||||||
|
0,
|
||||||
|
) catch |err| switch (err) {
|
||||||
error.FileNotFound => {
|
error.FileNotFound => {
|
||||||
// Handle the same as no dependencies.
|
// Handle the same as no dependencies.
|
||||||
return;
|
return;
|
||||||
},
|
},
|
||||||
else => |e| return e,
|
else => |e| return e,
|
||||||
};
|
};
|
||||||
defer gpa.free(build_zig_ini);
|
|
||||||
|
|
||||||
const ini: std.Ini = .{ .bytes = build_zig_ini };
|
var ast = try std.zig.Ast.parse(gpa, build_zig_zon_bytes, .zon);
|
||||||
|
defer ast.deinit(gpa);
|
||||||
|
|
||||||
|
if (ast.errors.len > 0) {
|
||||||
|
const file_path = try directory.join(arena, &.{Manifest.basename});
|
||||||
|
try main.printErrsMsgToStdErr(gpa, arena, ast, file_path, color);
|
||||||
|
return error.PackageFetchFailed;
|
||||||
|
}
|
||||||
|
|
||||||
|
var manifest = try Manifest.parse(gpa, ast);
|
||||||
|
defer manifest.deinit(gpa);
|
||||||
|
|
||||||
|
if (manifest.errors.len > 0) {
|
||||||
|
const ttyconf: std.debug.TTY.Config = switch (color) {
|
||||||
|
.auto => std.debug.detectTTYConfig(std.io.getStdErr()),
|
||||||
|
.on => .escape_codes,
|
||||||
|
.off => .no_color,
|
||||||
|
};
|
||||||
|
const file_path = try directory.join(arena, &.{Manifest.basename});
|
||||||
|
for (manifest.errors) |msg| {
|
||||||
|
Report.renderErrorMessage(ast, file_path, ttyconf, msg, &.{});
|
||||||
|
}
|
||||||
|
return error.PackageFetchFailed;
|
||||||
|
}
|
||||||
|
|
||||||
|
const report: Report = .{
|
||||||
|
.ast = &ast,
|
||||||
|
.directory = directory,
|
||||||
|
.color = color,
|
||||||
|
.arena = arena,
|
||||||
|
};
|
||||||
|
|
||||||
var any_error = false;
|
var any_error = false;
|
||||||
var it = ini.iterateSection("\n[dependency]\n");
|
const deps_list = manifest.dependencies.values();
|
||||||
while (it.next()) |dep| {
|
for (manifest.dependencies.keys()) |name, i| {
|
||||||
var line_it = mem.split(u8, dep, "\n");
|
const dep = deps_list[i];
|
||||||
var opt_name: ?[]const u8 = null;
|
|
||||||
var opt_url: ?[]const u8 = null;
|
|
||||||
var expected_hash: ?[]const u8 = null;
|
|
||||||
while (line_it.next()) |kv| {
|
|
||||||
const eq_pos = mem.indexOfScalar(u8, kv, '=') orelse continue;
|
|
||||||
const key = kv[0..eq_pos];
|
|
||||||
const value = kv[eq_pos + 1 ..];
|
|
||||||
if (mem.eql(u8, key, "name")) {
|
|
||||||
opt_name = value;
|
|
||||||
} else if (mem.eql(u8, key, "url")) {
|
|
||||||
opt_url = value;
|
|
||||||
} else if (mem.eql(u8, key, "hash")) {
|
|
||||||
expected_hash = value;
|
|
||||||
} else {
|
|
||||||
const loc = std.zig.findLineColumn(ini.bytes, @ptrToInt(key.ptr) - @ptrToInt(ini.bytes.ptr));
|
|
||||||
std.log.warn("{s}/{s}:{d}:{d} unrecognized key: '{s}'", .{
|
|
||||||
directory.path orelse ".",
|
|
||||||
"build.zig.ini",
|
|
||||||
loc.line,
|
|
||||||
loc.column,
|
|
||||||
key,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const name = opt_name orelse {
|
const sub_prefix = try std.fmt.allocPrint(arena, "{s}{s}.", .{ name_prefix, name });
|
||||||
const loc = std.zig.findLineColumn(ini.bytes, @ptrToInt(dep.ptr) - @ptrToInt(ini.bytes.ptr));
|
|
||||||
std.log.err("{s}/{s}:{d}:{d} missing key: 'name'", .{
|
|
||||||
directory.path orelse ".",
|
|
||||||
"build.zig.ini",
|
|
||||||
loc.line,
|
|
||||||
loc.column,
|
|
||||||
});
|
|
||||||
any_error = true;
|
|
||||||
continue;
|
|
||||||
};
|
|
||||||
|
|
||||||
const url = opt_url orelse {
|
|
||||||
const loc = std.zig.findLineColumn(ini.bytes, @ptrToInt(dep.ptr) - @ptrToInt(ini.bytes.ptr));
|
|
||||||
std.log.err("{s}/{s}:{d}:{d} missing key: 'name'", .{
|
|
||||||
directory.path orelse ".",
|
|
||||||
"build.zig.ini",
|
|
||||||
loc.line,
|
|
||||||
loc.column,
|
|
||||||
});
|
|
||||||
any_error = true;
|
|
||||||
continue;
|
|
||||||
};
|
|
||||||
|
|
||||||
const sub_prefix = try std.fmt.allocPrint(gpa, "{s}{s}.", .{ name_prefix, name });
|
|
||||||
defer gpa.free(sub_prefix);
|
|
||||||
const fqn = sub_prefix[0 .. sub_prefix.len - 1];
|
const fqn = sub_prefix[0 .. sub_prefix.len - 1];
|
||||||
|
|
||||||
const sub_pkg = try fetchAndUnpack(
|
const sub_pkg = try fetchAndUnpack(
|
||||||
thread_pool,
|
thread_pool,
|
||||||
http_client,
|
http_client,
|
||||||
global_cache_directory,
|
global_cache_directory,
|
||||||
url,
|
dep,
|
||||||
expected_hash,
|
report,
|
||||||
ini,
|
|
||||||
directory,
|
|
||||||
build_roots_source,
|
build_roots_source,
|
||||||
fqn,
|
fqn,
|
||||||
);
|
);
|
||||||
|
|
||||||
try pkg.fetchAndAddDependencies(
|
try pkg.fetchAndAddDependencies(
|
||||||
|
arena,
|
||||||
thread_pool,
|
thread_pool,
|
||||||
http_client,
|
http_client,
|
||||||
sub_pkg.root_src_directory,
|
sub_pkg.root_src_directory,
|
||||||
|
|
@ -244,6 +233,7 @@ pub fn fetchAndAddDependencies(
|
||||||
dependencies_source,
|
dependencies_source,
|
||||||
build_roots_source,
|
build_roots_source,
|
||||||
sub_prefix,
|
sub_prefix,
|
||||||
|
color,
|
||||||
);
|
);
|
||||||
|
|
||||||
try addAndAdopt(pkg, gpa, sub_pkg);
|
try addAndAdopt(pkg, gpa, sub_pkg);
|
||||||
|
|
@ -253,7 +243,7 @@ pub fn fetchAndAddDependencies(
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
if (any_error) return error.InvalidBuildZigIniFile;
|
if (any_error) return error.InvalidBuildManifestFile;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn createFilePkg(
|
pub fn createFilePkg(
|
||||||
|
|
@ -264,7 +254,7 @@ pub fn createFilePkg(
|
||||||
contents: []const u8,
|
contents: []const u8,
|
||||||
) !*Package {
|
) !*Package {
|
||||||
const rand_int = std.crypto.random.int(u64);
|
const rand_int = std.crypto.random.int(u64);
|
||||||
const tmp_dir_sub_path = "tmp" ++ fs.path.sep_str ++ hex64(rand_int);
|
const tmp_dir_sub_path = "tmp" ++ fs.path.sep_str ++ Manifest.hex64(rand_int);
|
||||||
{
|
{
|
||||||
var tmp_dir = try cache_directory.handle.makeOpenPath(tmp_dir_sub_path, .{});
|
var tmp_dir = try cache_directory.handle.makeOpenPath(tmp_dir_sub_path, .{});
|
||||||
defer tmp_dir.close();
|
defer tmp_dir.close();
|
||||||
|
|
@ -282,14 +272,73 @@ pub fn createFilePkg(
|
||||||
return createWithDir(gpa, name, cache_directory, o_dir_sub_path, basename);
|
return createWithDir(gpa, name, cache_directory, o_dir_sub_path, basename);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const Report = struct {
|
||||||
|
ast: *const std.zig.Ast,
|
||||||
|
directory: Compilation.Directory,
|
||||||
|
color: main.Color,
|
||||||
|
arena: Allocator,
|
||||||
|
|
||||||
|
fn fail(
|
||||||
|
report: Report,
|
||||||
|
tok: std.zig.Ast.TokenIndex,
|
||||||
|
comptime fmt_string: []const u8,
|
||||||
|
fmt_args: anytype,
|
||||||
|
) error{ PackageFetchFailed, OutOfMemory } {
|
||||||
|
return failWithNotes(report, &.{}, tok, fmt_string, fmt_args);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn failWithNotes(
|
||||||
|
report: Report,
|
||||||
|
notes: []const Compilation.AllErrors.Message,
|
||||||
|
tok: std.zig.Ast.TokenIndex,
|
||||||
|
comptime fmt_string: []const u8,
|
||||||
|
fmt_args: anytype,
|
||||||
|
) error{ PackageFetchFailed, OutOfMemory } {
|
||||||
|
const ttyconf: std.debug.TTY.Config = switch (report.color) {
|
||||||
|
.auto => std.debug.detectTTYConfig(std.io.getStdErr()),
|
||||||
|
.on => .escape_codes,
|
||||||
|
.off => .no_color,
|
||||||
|
};
|
||||||
|
const file_path = try report.directory.join(report.arena, &.{Manifest.basename});
|
||||||
|
renderErrorMessage(report.ast.*, file_path, ttyconf, .{
|
||||||
|
.tok = tok,
|
||||||
|
.off = 0,
|
||||||
|
.msg = try std.fmt.allocPrint(report.arena, fmt_string, fmt_args),
|
||||||
|
}, notes);
|
||||||
|
return error.PackageFetchFailed;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn renderErrorMessage(
|
||||||
|
ast: std.zig.Ast,
|
||||||
|
file_path: []const u8,
|
||||||
|
ttyconf: std.debug.TTY.Config,
|
||||||
|
msg: Manifest.ErrorMessage,
|
||||||
|
notes: []const Compilation.AllErrors.Message,
|
||||||
|
) void {
|
||||||
|
const token_starts = ast.tokens.items(.start);
|
||||||
|
const start_loc = ast.tokenLocation(0, msg.tok);
|
||||||
|
Compilation.AllErrors.Message.renderToStdErr(.{ .src = .{
|
||||||
|
.msg = msg.msg,
|
||||||
|
.src_path = file_path,
|
||||||
|
.line = @intCast(u32, start_loc.line),
|
||||||
|
.column = @intCast(u32, start_loc.column),
|
||||||
|
.span = .{
|
||||||
|
.start = token_starts[msg.tok],
|
||||||
|
.end = @intCast(u32, token_starts[msg.tok] + ast.tokenSlice(msg.tok).len),
|
||||||
|
.main = token_starts[msg.tok] + msg.off,
|
||||||
|
},
|
||||||
|
.source_line = ast.source[start_loc.line_start..start_loc.line_end],
|
||||||
|
.notes = notes,
|
||||||
|
} }, ttyconf);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
fn fetchAndUnpack(
|
fn fetchAndUnpack(
|
||||||
thread_pool: *ThreadPool,
|
thread_pool: *ThreadPool,
|
||||||
http_client: *std.http.Client,
|
http_client: *std.http.Client,
|
||||||
global_cache_directory: Compilation.Directory,
|
global_cache_directory: Compilation.Directory,
|
||||||
url: []const u8,
|
dep: Manifest.Dependency,
|
||||||
expected_hash: ?[]const u8,
|
report: Report,
|
||||||
ini: std.Ini,
|
|
||||||
comp_directory: Compilation.Directory,
|
|
||||||
build_roots_source: *std.ArrayList(u8),
|
build_roots_source: *std.ArrayList(u8),
|
||||||
fqn: []const u8,
|
fqn: []const u8,
|
||||||
) !*Package {
|
) !*Package {
|
||||||
|
|
@ -298,37 +347,8 @@ fn fetchAndUnpack(
|
||||||
|
|
||||||
// Check if the expected_hash is already present in the global package
|
// Check if the expected_hash is already present in the global package
|
||||||
// cache, and thereby avoid both fetching and unpacking.
|
// cache, and thereby avoid both fetching and unpacking.
|
||||||
if (expected_hash) |h| cached: {
|
if (dep.hash) |h| cached: {
|
||||||
const hex_multihash_len = 2 * multihash_len;
|
const hex_multihash_len = 2 * Manifest.multihash_len;
|
||||||
if (h.len >= 2) {
|
|
||||||
const their_multihash_func = std.fmt.parseInt(u8, h[0..2], 16) catch |err| {
|
|
||||||
return reportError(
|
|
||||||
ini,
|
|
||||||
comp_directory,
|
|
||||||
h.ptr,
|
|
||||||
"invalid multihash value: unable to parse hash function: {s}",
|
|
||||||
.{@errorName(err)},
|
|
||||||
);
|
|
||||||
};
|
|
||||||
if (@intToEnum(MultihashFunction, their_multihash_func) != multihash_function) {
|
|
||||||
return reportError(
|
|
||||||
ini,
|
|
||||||
comp_directory,
|
|
||||||
h.ptr,
|
|
||||||
"unsupported hash function: only sha2-256 is supported",
|
|
||||||
.{},
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (h.len != hex_multihash_len) {
|
|
||||||
return reportError(
|
|
||||||
ini,
|
|
||||||
comp_directory,
|
|
||||||
h.ptr,
|
|
||||||
"wrong hash size. expected: {d}, found: {d}",
|
|
||||||
.{ hex_multihash_len, h.len },
|
|
||||||
);
|
|
||||||
}
|
|
||||||
const hex_digest = h[0..hex_multihash_len];
|
const hex_digest = h[0..hex_multihash_len];
|
||||||
const pkg_dir_sub_path = "p" ++ s ++ hex_digest;
|
const pkg_dir_sub_path = "p" ++ s ++ hex_digest;
|
||||||
var pkg_dir = global_cache_directory.handle.openDir(pkg_dir_sub_path, .{}) catch |err| switch (err) {
|
var pkg_dir = global_cache_directory.handle.openDir(pkg_dir_sub_path, .{}) catch |err| switch (err) {
|
||||||
|
|
@ -366,10 +386,10 @@ fn fetchAndUnpack(
|
||||||
return ptr;
|
return ptr;
|
||||||
}
|
}
|
||||||
|
|
||||||
const uri = try std.Uri.parse(url);
|
const uri = try std.Uri.parse(dep.url);
|
||||||
|
|
||||||
const rand_int = std.crypto.random.int(u64);
|
const rand_int = std.crypto.random.int(u64);
|
||||||
const tmp_dir_sub_path = "tmp" ++ s ++ hex64(rand_int);
|
const tmp_dir_sub_path = "tmp" ++ s ++ Manifest.hex64(rand_int);
|
||||||
|
|
||||||
const actual_hash = a: {
|
const actual_hash = a: {
|
||||||
var tmp_directory: Compilation.Directory = d: {
|
var tmp_directory: Compilation.Directory = d: {
|
||||||
|
|
@ -398,13 +418,9 @@ fn fetchAndUnpack(
|
||||||
// by default, so the same logic applies for buffering the reader as for gzip.
|
// by default, so the same logic applies for buffering the reader as for gzip.
|
||||||
try unpackTarball(gpa, &req, tmp_directory.handle, std.compress.xz);
|
try unpackTarball(gpa, &req, tmp_directory.handle, std.compress.xz);
|
||||||
} else {
|
} else {
|
||||||
return reportError(
|
return report.fail(dep.url_tok, "unknown file extension for path '{s}'", .{
|
||||||
ini,
|
uri.path,
|
||||||
comp_directory,
|
});
|
||||||
uri.path.ptr,
|
|
||||||
"unknown file extension for path '{s}'",
|
|
||||||
.{uri.path},
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: delete files not included in the package prior to computing the package hash.
|
// TODO: delete files not included in the package prior to computing the package hash.
|
||||||
|
|
@ -415,28 +431,21 @@ fn fetchAndUnpack(
|
||||||
break :a try computePackageHash(thread_pool, .{ .dir = tmp_directory.handle });
|
break :a try computePackageHash(thread_pool, .{ .dir = tmp_directory.handle });
|
||||||
};
|
};
|
||||||
|
|
||||||
const pkg_dir_sub_path = "p" ++ s ++ hexDigest(actual_hash);
|
const pkg_dir_sub_path = "p" ++ s ++ Manifest.hexDigest(actual_hash);
|
||||||
try renameTmpIntoCache(global_cache_directory.handle, tmp_dir_sub_path, pkg_dir_sub_path);
|
try renameTmpIntoCache(global_cache_directory.handle, tmp_dir_sub_path, pkg_dir_sub_path);
|
||||||
|
|
||||||
const actual_hex = hexDigest(actual_hash);
|
const actual_hex = Manifest.hexDigest(actual_hash);
|
||||||
if (expected_hash) |h| {
|
if (dep.hash) |h| {
|
||||||
if (!mem.eql(u8, h, &actual_hex)) {
|
if (!mem.eql(u8, h, &actual_hex)) {
|
||||||
return reportError(
|
return report.fail(dep.hash_tok, "hash mismatch: expected: {s}, found: {s}", .{
|
||||||
ini,
|
h, actual_hex,
|
||||||
comp_directory,
|
});
|
||||||
h.ptr,
|
|
||||||
"hash mismatch: expected: {s}, found: {s}",
|
|
||||||
.{ h, actual_hex },
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
return reportError(
|
const notes: [1]Compilation.AllErrors.Message = .{.{ .plain = .{
|
||||||
ini,
|
.msg = try std.fmt.allocPrint(report.arena, "expected .hash = \"{s}\",", .{&actual_hex}),
|
||||||
comp_directory,
|
} }};
|
||||||
url.ptr,
|
return report.failWithNotes(¬es, dep.url_tok, "url field is missing corresponding hash field", .{});
|
||||||
"url field is missing corresponding hash field: hash={s}",
|
|
||||||
.{&actual_hex},
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const build_root = try global_cache_directory.join(gpa, &.{pkg_dir_sub_path});
|
const build_root = try global_cache_directory.join(gpa, &.{pkg_dir_sub_path});
|
||||||
|
|
@ -471,29 +480,9 @@ fn unpackTarball(
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
fn reportError(
|
|
||||||
ini: std.Ini,
|
|
||||||
comp_directory: Compilation.Directory,
|
|
||||||
src_ptr: [*]const u8,
|
|
||||||
comptime fmt_string: []const u8,
|
|
||||||
fmt_args: anytype,
|
|
||||||
) error{PackageFetchFailed} {
|
|
||||||
const loc = std.zig.findLineColumn(ini.bytes, @ptrToInt(src_ptr) - @ptrToInt(ini.bytes.ptr));
|
|
||||||
if (comp_directory.path) |p| {
|
|
||||||
std.debug.print("{s}{c}{s}:{d}:{d}: error: " ++ fmt_string ++ "\n", .{
|
|
||||||
p, fs.path.sep, ini_basename, loc.line + 1, loc.column + 1,
|
|
||||||
} ++ fmt_args);
|
|
||||||
} else {
|
|
||||||
std.debug.print("{s}:{d}:{d}: error: " ++ fmt_string ++ "\n", .{
|
|
||||||
ini_basename, loc.line + 1, loc.column + 1,
|
|
||||||
} ++ fmt_args);
|
|
||||||
}
|
|
||||||
return error.PackageFetchFailed;
|
|
||||||
}
|
|
||||||
|
|
||||||
const HashedFile = struct {
|
const HashedFile = struct {
|
||||||
path: []const u8,
|
path: []const u8,
|
||||||
hash: [Hash.digest_length]u8,
|
hash: [Manifest.Hash.digest_length]u8,
|
||||||
failure: Error!void,
|
failure: Error!void,
|
||||||
|
|
||||||
const Error = fs.File.OpenError || fs.File.ReadError || fs.File.StatError;
|
const Error = fs.File.OpenError || fs.File.ReadError || fs.File.StatError;
|
||||||
|
|
@ -507,7 +496,7 @@ const HashedFile = struct {
|
||||||
fn computePackageHash(
|
fn computePackageHash(
|
||||||
thread_pool: *ThreadPool,
|
thread_pool: *ThreadPool,
|
||||||
pkg_dir: fs.IterableDir,
|
pkg_dir: fs.IterableDir,
|
||||||
) ![Hash.digest_length]u8 {
|
) ![Manifest.Hash.digest_length]u8 {
|
||||||
const gpa = thread_pool.allocator;
|
const gpa = thread_pool.allocator;
|
||||||
|
|
||||||
// We'll use an arena allocator for the path name strings since they all
|
// We'll use an arena allocator for the path name strings since they all
|
||||||
|
|
@ -550,7 +539,7 @@ fn computePackageHash(
|
||||||
|
|
||||||
std.sort.sort(*HashedFile, all_files.items, {}, HashedFile.lessThan);
|
std.sort.sort(*HashedFile, all_files.items, {}, HashedFile.lessThan);
|
||||||
|
|
||||||
var hasher = Hash.init(.{});
|
var hasher = Manifest.Hash.init(.{});
|
||||||
var any_failures = false;
|
var any_failures = false;
|
||||||
for (all_files.items) |hashed_file| {
|
for (all_files.items) |hashed_file| {
|
||||||
hashed_file.failure catch |err| {
|
hashed_file.failure catch |err| {
|
||||||
|
|
@ -571,7 +560,7 @@ fn workerHashFile(dir: fs.Dir, hashed_file: *HashedFile, wg: *WaitGroup) void {
|
||||||
fn hashFileFallible(dir: fs.Dir, hashed_file: *HashedFile) HashedFile.Error!void {
|
fn hashFileFallible(dir: fs.Dir, hashed_file: *HashedFile) HashedFile.Error!void {
|
||||||
var buf: [8000]u8 = undefined;
|
var buf: [8000]u8 = undefined;
|
||||||
var file = try dir.openFile(hashed_file.path, .{});
|
var file = try dir.openFile(hashed_file.path, .{});
|
||||||
var hasher = Hash.init(.{});
|
var hasher = Manifest.Hash.init(.{});
|
||||||
hasher.update(hashed_file.path);
|
hasher.update(hashed_file.path);
|
||||||
hasher.update(&.{ 0, @boolToInt(try isExecutable(file)) });
|
hasher.update(&.{ 0, @boolToInt(try isExecutable(file)) });
|
||||||
while (true) {
|
while (true) {
|
||||||
|
|
@ -595,52 +584,6 @@ fn isExecutable(file: fs.File) !bool {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const hex_charset = "0123456789abcdef";
|
|
||||||
|
|
||||||
fn hex64(x: u64) [16]u8 {
|
|
||||||
var result: [16]u8 = undefined;
|
|
||||||
var i: usize = 0;
|
|
||||||
while (i < 8) : (i += 1) {
|
|
||||||
const byte = @truncate(u8, x >> @intCast(u6, 8 * i));
|
|
||||||
result[i * 2 + 0] = hex_charset[byte >> 4];
|
|
||||||
result[i * 2 + 1] = hex_charset[byte & 15];
|
|
||||||
}
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
test hex64 {
|
|
||||||
const s = "[" ++ hex64(0x12345678_abcdef00) ++ "]";
|
|
||||||
try std.testing.expectEqualStrings("[00efcdab78563412]", s);
|
|
||||||
}
|
|
||||||
|
|
||||||
const multihash_function: MultihashFunction = switch (Hash) {
|
|
||||||
std.crypto.hash.sha2.Sha256 => .@"sha2-256",
|
|
||||||
else => @compileError("unreachable"),
|
|
||||||
};
|
|
||||||
comptime {
|
|
||||||
// We avoid unnecessary uleb128 code in hexDigest by asserting here the
|
|
||||||
// values are small enough to be contained in the one-byte encoding.
|
|
||||||
assert(@enumToInt(multihash_function) < 127);
|
|
||||||
assert(Hash.digest_length < 127);
|
|
||||||
}
|
|
||||||
const multihash_len = 1 + 1 + Hash.digest_length;
|
|
||||||
|
|
||||||
fn hexDigest(digest: [Hash.digest_length]u8) [multihash_len * 2]u8 {
|
|
||||||
var result: [multihash_len * 2]u8 = undefined;
|
|
||||||
|
|
||||||
result[0] = hex_charset[@enumToInt(multihash_function) >> 4];
|
|
||||||
result[1] = hex_charset[@enumToInt(multihash_function) & 15];
|
|
||||||
|
|
||||||
result[2] = hex_charset[Hash.digest_length >> 4];
|
|
||||||
result[3] = hex_charset[Hash.digest_length & 15];
|
|
||||||
|
|
||||||
for (digest) |byte, i| {
|
|
||||||
result[4 + i * 2] = hex_charset[byte >> 4];
|
|
||||||
result[5 + i * 2] = hex_charset[byte & 15];
|
|
||||||
}
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
fn renameTmpIntoCache(
|
fn renameTmpIntoCache(
|
||||||
cache_dir: fs.Dir,
|
cache_dir: fs.Dir,
|
||||||
tmp_dir_sub_path: []const u8,
|
tmp_dir_sub_path: []const u8,
|
||||||
|
|
@ -669,21 +612,3 @@ fn renameTmpIntoCache(
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const MultihashFunction = enum(u16) {
|
|
||||||
identity = 0x00,
|
|
||||||
sha1 = 0x11,
|
|
||||||
@"sha2-256" = 0x12,
|
|
||||||
@"sha2-512" = 0x13,
|
|
||||||
@"sha3-512" = 0x14,
|
|
||||||
@"sha3-384" = 0x15,
|
|
||||||
@"sha3-256" = 0x16,
|
|
||||||
@"sha3-224" = 0x17,
|
|
||||||
@"sha2-384" = 0x20,
|
|
||||||
@"sha2-256-trunc254-padded" = 0x1012,
|
|
||||||
@"sha2-224" = 0x1013,
|
|
||||||
@"sha2-512-224" = 0x1014,
|
|
||||||
@"sha2-512-256" = 0x1015,
|
|
||||||
@"blake2b-256" = 0xb220,
|
|
||||||
_,
|
|
||||||
};
|
|
||||||
|
|
|
||||||
17
src/main.zig
17
src/main.zig
|
|
@ -3915,6 +3915,7 @@ pub const usage_build =
|
||||||
;
|
;
|
||||||
|
|
||||||
pub fn cmdBuild(gpa: Allocator, arena: Allocator, args: []const []const u8) !void {
|
pub fn cmdBuild(gpa: Allocator, arena: Allocator, args: []const []const u8) !void {
|
||||||
|
var color: Color = .auto;
|
||||||
var prominent_compile_errors: bool = false;
|
var prominent_compile_errors: bool = false;
|
||||||
|
|
||||||
// We want to release all the locks before executing the child process, so we make a nice
|
// We want to release all the locks before executing the child process, so we make a nice
|
||||||
|
|
@ -4117,6 +4118,7 @@ pub fn cmdBuild(gpa: Allocator, arena: Allocator, args: []const []const u8) !voi
|
||||||
// Here we borrow main package's table and will replace it with a fresh
|
// Here we borrow main package's table and will replace it with a fresh
|
||||||
// one after this process completes.
|
// one after this process completes.
|
||||||
main_pkg.fetchAndAddDependencies(
|
main_pkg.fetchAndAddDependencies(
|
||||||
|
arena,
|
||||||
&thread_pool,
|
&thread_pool,
|
||||||
&http_client,
|
&http_client,
|
||||||
build_directory,
|
build_directory,
|
||||||
|
|
@ -4125,6 +4127,7 @@ pub fn cmdBuild(gpa: Allocator, arena: Allocator, args: []const []const u8) !voi
|
||||||
&dependencies_source,
|
&dependencies_source,
|
||||||
&build_roots_source,
|
&build_roots_source,
|
||||||
"",
|
"",
|
||||||
|
color,
|
||||||
) catch |err| switch (err) {
|
) catch |err| switch (err) {
|
||||||
error.PackageFetchFailed => process.exit(1),
|
error.PackageFetchFailed => process.exit(1),
|
||||||
else => |e| return e,
|
else => |e| return e,
|
||||||
|
|
@ -4366,7 +4369,7 @@ pub fn cmdFmt(gpa: Allocator, arena: Allocator, args: []const []const u8) !void
|
||||||
};
|
};
|
||||||
defer tree.deinit(gpa);
|
defer tree.deinit(gpa);
|
||||||
|
|
||||||
try printErrsMsgToStdErr(gpa, arena, tree.errors, tree, "<stdin>", color);
|
try printErrsMsgToStdErr(gpa, arena, tree, "<stdin>", color);
|
||||||
var has_ast_error = false;
|
var has_ast_error = false;
|
||||||
if (check_ast_flag) {
|
if (check_ast_flag) {
|
||||||
const Module = @import("Module.zig");
|
const Module = @import("Module.zig");
|
||||||
|
|
@ -4569,7 +4572,7 @@ fn fmtPathFile(
|
||||||
var tree = try Ast.parse(fmt.gpa, source_code, .zig);
|
var tree = try Ast.parse(fmt.gpa, source_code, .zig);
|
||||||
defer tree.deinit(fmt.gpa);
|
defer tree.deinit(fmt.gpa);
|
||||||
|
|
||||||
try printErrsMsgToStdErr(fmt.gpa, fmt.arena, tree.errors, tree, file_path, fmt.color);
|
try printErrsMsgToStdErr(fmt.gpa, fmt.arena, tree, file_path, fmt.color);
|
||||||
if (tree.errors.len != 0) {
|
if (tree.errors.len != 0) {
|
||||||
fmt.any_error = true;
|
fmt.any_error = true;
|
||||||
return;
|
return;
|
||||||
|
|
@ -4649,14 +4652,14 @@ fn fmtPathFile(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn printErrsMsgToStdErr(
|
pub fn printErrsMsgToStdErr(
|
||||||
gpa: mem.Allocator,
|
gpa: mem.Allocator,
|
||||||
arena: mem.Allocator,
|
arena: mem.Allocator,
|
||||||
parse_errors: []const Ast.Error,
|
|
||||||
tree: Ast,
|
tree: Ast,
|
||||||
path: []const u8,
|
path: []const u8,
|
||||||
color: Color,
|
color: Color,
|
||||||
) !void {
|
) !void {
|
||||||
|
const parse_errors: []const Ast.Error = tree.errors;
|
||||||
var i: usize = 0;
|
var i: usize = 0;
|
||||||
while (i < parse_errors.len) : (i += 1) {
|
while (i < parse_errors.len) : (i += 1) {
|
||||||
const parse_error = parse_errors[i];
|
const parse_error = parse_errors[i];
|
||||||
|
|
@ -5316,7 +5319,7 @@ pub fn cmdAstCheck(
|
||||||
file.tree_loaded = true;
|
file.tree_loaded = true;
|
||||||
defer file.tree.deinit(gpa);
|
defer file.tree.deinit(gpa);
|
||||||
|
|
||||||
try printErrsMsgToStdErr(gpa, arena, file.tree.errors, file.tree, file.sub_file_path, color);
|
try printErrsMsgToStdErr(gpa, arena, file.tree, file.sub_file_path, color);
|
||||||
if (file.tree.errors.len != 0) {
|
if (file.tree.errors.len != 0) {
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
}
|
}
|
||||||
|
|
@ -5442,7 +5445,7 @@ pub fn cmdChangelist(
|
||||||
file.tree_loaded = true;
|
file.tree_loaded = true;
|
||||||
defer file.tree.deinit(gpa);
|
defer file.tree.deinit(gpa);
|
||||||
|
|
||||||
try printErrsMsgToStdErr(gpa, arena, file.tree.errors, file.tree, old_source_file, .auto);
|
try printErrsMsgToStdErr(gpa, arena, file.tree, old_source_file, .auto);
|
||||||
if (file.tree.errors.len != 0) {
|
if (file.tree.errors.len != 0) {
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
}
|
}
|
||||||
|
|
@ -5479,7 +5482,7 @@ pub fn cmdChangelist(
|
||||||
var new_tree = try Ast.parse(gpa, new_source, .zig);
|
var new_tree = try Ast.parse(gpa, new_source, .zig);
|
||||||
defer new_tree.deinit(gpa);
|
defer new_tree.deinit(gpa);
|
||||||
|
|
||||||
try printErrsMsgToStdErr(gpa, arena, new_tree.errors, new_tree, new_source_file, .auto);
|
try printErrsMsgToStdErr(gpa, arena, new_tree, new_source_file, .auto);
|
||||||
if (new_tree.errors.len != 0) {
|
if (new_tree.errors.len != 0) {
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
}
|
}
|
||||||
|
|
|
||||||
Loading…
Add table
Reference in a new issue