mirror of
https://codeberg.org/ziglang/zig.git
synced 2025-12-06 13:54:21 +00:00
Merge pull request #22602 from mlugg/incr-embedfile
incremental: handle `@embedFile`
This commit is contained in:
commit
3767b08039
19 changed files with 468 additions and 209 deletions
|
|
@ -18,7 +18,6 @@ const debug = std.debug;
|
||||||
const assert = debug.assert;
|
const assert = debug.assert;
|
||||||
const testing = std.testing;
|
const testing = std.testing;
|
||||||
const mem = std.mem;
|
const mem = std.mem;
|
||||||
const fmt = std.fmt;
|
|
||||||
const ascii = std.ascii;
|
const ascii = std.ascii;
|
||||||
const Allocator = mem.Allocator;
|
const Allocator = mem.Allocator;
|
||||||
const math = std.math;
|
const math = std.math;
|
||||||
|
|
@ -147,6 +146,36 @@ pub fn joinZ(allocator: Allocator, paths: []const []const u8) ![:0]u8 {
|
||||||
return out[0 .. out.len - 1 :0];
|
return out[0 .. out.len - 1 :0];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn fmtJoin(paths: []const []const u8) std.fmt.Formatter(formatJoin) {
|
||||||
|
return .{ .data = paths };
|
||||||
|
}
|
||||||
|
|
||||||
|
fn formatJoin(paths: []const []const u8, comptime fmt: []const u8, options: std.fmt.FormatOptions, w: anytype) !void {
|
||||||
|
_ = fmt;
|
||||||
|
_ = options;
|
||||||
|
|
||||||
|
const first_path_idx = for (paths, 0..) |p, idx| {
|
||||||
|
if (p.len != 0) break idx;
|
||||||
|
} else return;
|
||||||
|
|
||||||
|
try w.writeAll(paths[first_path_idx]); // first component
|
||||||
|
var prev_path = paths[first_path_idx];
|
||||||
|
for (paths[first_path_idx + 1 ..]) |this_path| {
|
||||||
|
if (this_path.len == 0) continue; // skip empty components
|
||||||
|
const prev_sep = isSep(prev_path[prev_path.len - 1]);
|
||||||
|
const this_sep = isSep(this_path[0]);
|
||||||
|
if (!prev_sep and !this_sep) {
|
||||||
|
try w.writeByte(sep);
|
||||||
|
}
|
||||||
|
if (prev_sep and this_sep) {
|
||||||
|
try w.writeAll(this_path[1..]); // skip redundant separator
|
||||||
|
} else {
|
||||||
|
try w.writeAll(this_path);
|
||||||
|
}
|
||||||
|
prev_path = this_path;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn testJoinMaybeZUefi(paths: []const []const u8, expected: []const u8, zero: bool) !void {
|
fn testJoinMaybeZUefi(paths: []const []const u8, expected: []const u8, zero: bool) !void {
|
||||||
const uefiIsSep = struct {
|
const uefiIsSep = struct {
|
||||||
fn isSep(byte: u8) bool {
|
fn isSep(byte: u8) bool {
|
||||||
|
|
|
||||||
|
|
@ -154,10 +154,6 @@ win32_resource_work_queue: if (dev.env.supports(.win32_resource)) std.fifo.Linea
|
||||||
/// since the last compilation, as well as scan for `@import` and queue up
|
/// since the last compilation, as well as scan for `@import` and queue up
|
||||||
/// additional jobs corresponding to those new files.
|
/// additional jobs corresponding to those new files.
|
||||||
astgen_work_queue: std.fifo.LinearFifo(Zcu.File.Index, .Dynamic),
|
astgen_work_queue: std.fifo.LinearFifo(Zcu.File.Index, .Dynamic),
|
||||||
/// These jobs are to inspect the file system stat() and if the embedded file has changed
|
|
||||||
/// on disk, mark the corresponding Decl outdated and queue up an `analyze_decl`
|
|
||||||
/// task for it.
|
|
||||||
embed_file_work_queue: std.fifo.LinearFifo(*Zcu.EmbedFile, .Dynamic),
|
|
||||||
|
|
||||||
/// The ErrorMsg memory is owned by the `CObject`, using Compilation's general purpose allocator.
|
/// The ErrorMsg memory is owned by the `CObject`, using Compilation's general purpose allocator.
|
||||||
/// This data is accessed by multiple threads and is protected by `mutex`.
|
/// This data is accessed by multiple threads and is protected by `mutex`.
|
||||||
|
|
@ -1465,7 +1461,6 @@ pub fn create(gpa: Allocator, arena: Allocator, options: CreateOptions) !*Compil
|
||||||
.c_object_work_queue = std.fifo.LinearFifo(*CObject, .Dynamic).init(gpa),
|
.c_object_work_queue = std.fifo.LinearFifo(*CObject, .Dynamic).init(gpa),
|
||||||
.win32_resource_work_queue = if (dev.env.supports(.win32_resource)) std.fifo.LinearFifo(*Win32Resource, .Dynamic).init(gpa) else .{},
|
.win32_resource_work_queue = if (dev.env.supports(.win32_resource)) std.fifo.LinearFifo(*Win32Resource, .Dynamic).init(gpa) else .{},
|
||||||
.astgen_work_queue = std.fifo.LinearFifo(Zcu.File.Index, .Dynamic).init(gpa),
|
.astgen_work_queue = std.fifo.LinearFifo(Zcu.File.Index, .Dynamic).init(gpa),
|
||||||
.embed_file_work_queue = std.fifo.LinearFifo(*Zcu.EmbedFile, .Dynamic).init(gpa),
|
|
||||||
.c_source_files = options.c_source_files,
|
.c_source_files = options.c_source_files,
|
||||||
.rc_source_files = options.rc_source_files,
|
.rc_source_files = options.rc_source_files,
|
||||||
.cache_parent = cache,
|
.cache_parent = cache,
|
||||||
|
|
@ -1920,7 +1915,6 @@ pub fn destroy(comp: *Compilation) void {
|
||||||
comp.c_object_work_queue.deinit();
|
comp.c_object_work_queue.deinit();
|
||||||
comp.win32_resource_work_queue.deinit();
|
comp.win32_resource_work_queue.deinit();
|
||||||
comp.astgen_work_queue.deinit();
|
comp.astgen_work_queue.deinit();
|
||||||
comp.embed_file_work_queue.deinit();
|
|
||||||
|
|
||||||
comp.windows_libs.deinit(gpa);
|
comp.windows_libs.deinit(gpa);
|
||||||
|
|
||||||
|
|
@ -2235,11 +2229,6 @@ pub fn update(comp: *Compilation, main_progress_node: std.Progress.Node) !void {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Put a work item in for checking if any files used with `@embedFile` changed.
|
|
||||||
try comp.embed_file_work_queue.ensureUnusedCapacity(zcu.embed_table.count());
|
|
||||||
for (zcu.embed_table.values()) |embed_file| {
|
|
||||||
comp.embed_file_work_queue.writeItemAssumeCapacity(embed_file);
|
|
||||||
}
|
|
||||||
if (comp.file_system_inputs) |fsi| {
|
if (comp.file_system_inputs) |fsi| {
|
||||||
const ip = &zcu.intern_pool;
|
const ip = &zcu.intern_pool;
|
||||||
for (zcu.embed_table.values()) |embed_file| {
|
for (zcu.embed_table.values()) |embed_file| {
|
||||||
|
|
@ -3223,9 +3212,6 @@ pub fn getAllErrorsAlloc(comp: *Compilation) !ErrorBundle {
|
||||||
try addZirErrorMessages(&bundle, file);
|
try addZirErrorMessages(&bundle, file);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for (zcu.failed_embed_files.values()) |error_msg| {
|
|
||||||
try addModuleErrorMsg(zcu, &bundle, error_msg.*);
|
|
||||||
}
|
|
||||||
var sorted_failed_analysis: std.AutoArrayHashMapUnmanaged(InternPool.AnalUnit, *Zcu.ErrorMsg).DataList.Slice = s: {
|
var sorted_failed_analysis: std.AutoArrayHashMapUnmanaged(InternPool.AnalUnit, *Zcu.ErrorMsg).DataList.Slice = s: {
|
||||||
const SortOrder = struct {
|
const SortOrder = struct {
|
||||||
zcu: *Zcu,
|
zcu: *Zcu,
|
||||||
|
|
@ -3804,9 +3790,10 @@ fn performAllTheWorkInner(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
while (comp.embed_file_work_queue.readItem()) |embed_file| {
|
for (0.., zcu.embed_table.values()) |ef_index_usize, ef| {
|
||||||
comp.thread_pool.spawnWg(&astgen_wait_group, workerCheckEmbedFile, .{
|
const ef_index: Zcu.EmbedFile.Index = @enumFromInt(ef_index_usize);
|
||||||
comp, embed_file,
|
comp.thread_pool.spawnWgId(&astgen_wait_group, workerCheckEmbedFile, .{
|
||||||
|
comp, ef_index, ef,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -4369,33 +4356,33 @@ fn workerUpdateBuiltinZigFile(
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
fn workerCheckEmbedFile(comp: *Compilation, embed_file: *Zcu.EmbedFile) void {
|
fn workerCheckEmbedFile(tid: usize, comp: *Compilation, ef_index: Zcu.EmbedFile.Index, ef: *Zcu.EmbedFile) void {
|
||||||
comp.detectEmbedFileUpdate(embed_file) catch |err| {
|
comp.detectEmbedFileUpdate(@enumFromInt(tid), ef_index, ef) catch |err| switch (err) {
|
||||||
comp.reportRetryableEmbedFileError(embed_file, err) catch |oom| switch (oom) {
|
error.OutOfMemory => {
|
||||||
// Swallowing this error is OK because it's implied to be OOM when
|
comp.mutex.lock();
|
||||||
// there is a missing `failed_embed_files` error message.
|
defer comp.mutex.unlock();
|
||||||
error.OutOfMemory => {},
|
comp.setAllocFailure();
|
||||||
};
|
},
|
||||||
return;
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
fn detectEmbedFileUpdate(comp: *Compilation, embed_file: *Zcu.EmbedFile) !void {
|
fn detectEmbedFileUpdate(comp: *Compilation, tid: Zcu.PerThread.Id, ef_index: Zcu.EmbedFile.Index, ef: *Zcu.EmbedFile) !void {
|
||||||
const zcu = comp.zcu.?;
|
const zcu = comp.zcu.?;
|
||||||
const ip = &zcu.intern_pool;
|
const pt: Zcu.PerThread = .activate(zcu, tid);
|
||||||
var file = try embed_file.owner.root.openFile(embed_file.sub_file_path.toSlice(ip), .{});
|
defer pt.deactivate();
|
||||||
defer file.close();
|
|
||||||
|
|
||||||
const stat = try file.stat();
|
const old_val = ef.val;
|
||||||
|
const old_err = ef.err;
|
||||||
|
|
||||||
const unchanged_metadata =
|
try pt.updateEmbedFile(ef, null);
|
||||||
stat.size == embed_file.stat.size and
|
|
||||||
stat.mtime == embed_file.stat.mtime and
|
|
||||||
stat.inode == embed_file.stat.inode;
|
|
||||||
|
|
||||||
if (unchanged_metadata) return;
|
if (ef.val != .none and ef.val == old_val) return; // success, value unchanged
|
||||||
|
if (ef.val == .none and old_val == .none and ef.err == old_err) return; // failure, error unchanged
|
||||||
|
|
||||||
@panic("TODO: handle embed file incremental update");
|
comp.mutex.lock();
|
||||||
|
defer comp.mutex.unlock();
|
||||||
|
|
||||||
|
try zcu.markDependeeOutdated(.not_marked_po, .{ .embed_file = ef_index });
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn obtainCObjectCacheManifest(
|
pub fn obtainCObjectCacheManifest(
|
||||||
|
|
@ -4797,30 +4784,6 @@ fn reportRetryableWin32ResourceError(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn reportRetryableEmbedFileError(
|
|
||||||
comp: *Compilation,
|
|
||||||
embed_file: *Zcu.EmbedFile,
|
|
||||||
err: anyerror,
|
|
||||||
) error{OutOfMemory}!void {
|
|
||||||
const zcu = comp.zcu.?;
|
|
||||||
const gpa = zcu.gpa;
|
|
||||||
const src_loc = embed_file.src_loc;
|
|
||||||
const ip = &zcu.intern_pool;
|
|
||||||
const err_msg = try Zcu.ErrorMsg.create(gpa, src_loc, "unable to load '{}/{s}': {s}", .{
|
|
||||||
embed_file.owner.root,
|
|
||||||
embed_file.sub_file_path.toSlice(ip),
|
|
||||||
@errorName(err),
|
|
||||||
});
|
|
||||||
|
|
||||||
errdefer err_msg.destroy(gpa);
|
|
||||||
|
|
||||||
{
|
|
||||||
comp.mutex.lock();
|
|
||||||
defer comp.mutex.unlock();
|
|
||||||
try zcu.failed_embed_files.putNoClobber(gpa, embed_file, err_msg);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn updateCObject(comp: *Compilation, c_object: *CObject, c_obj_prog_node: std.Progress.Node) !void {
|
fn updateCObject(comp: *Compilation, c_object: *CObject, c_obj_prog_node: std.Progress.Node) !void {
|
||||||
if (comp.config.c_frontend == .aro) {
|
if (comp.config.c_frontend == .aro) {
|
||||||
return comp.failCObj(c_object, "aro does not support compiling C objects yet", .{});
|
return comp.failCObj(c_object, "aro does not support compiling C objects yet", .{});
|
||||||
|
|
|
||||||
|
|
@ -42,6 +42,10 @@ nav_ty_deps: std.AutoArrayHashMapUnmanaged(Nav.Index, DepEntry.Index),
|
||||||
/// * a container type requiring resolution (invalidated when the type must be recreated at a new index)
|
/// * a container type requiring resolution (invalidated when the type must be recreated at a new index)
|
||||||
/// Value is index into `dep_entries` of the first dependency on this interned value.
|
/// Value is index into `dep_entries` of the first dependency on this interned value.
|
||||||
interned_deps: std.AutoArrayHashMapUnmanaged(Index, DepEntry.Index),
|
interned_deps: std.AutoArrayHashMapUnmanaged(Index, DepEntry.Index),
|
||||||
|
/// Dependencies on an embedded file.
|
||||||
|
/// Introduced by `@embedFile`; invalidated when the file changes.
|
||||||
|
/// Value is index into `dep_entries` of the first dependency on this `Zcu.EmbedFile`.
|
||||||
|
embed_file_deps: std.AutoArrayHashMapUnmanaged(Zcu.EmbedFile.Index, DepEntry.Index),
|
||||||
/// Dependencies on the full set of names in a ZIR namespace.
|
/// Dependencies on the full set of names in a ZIR namespace.
|
||||||
/// Key refers to a `struct_decl`, `union_decl`, etc.
|
/// Key refers to a `struct_decl`, `union_decl`, etc.
|
||||||
/// Value is index into `dep_entries` of the first dependency on this namespace.
|
/// Value is index into `dep_entries` of the first dependency on this namespace.
|
||||||
|
|
@ -90,6 +94,7 @@ pub const empty: InternPool = .{
|
||||||
.nav_val_deps = .empty,
|
.nav_val_deps = .empty,
|
||||||
.nav_ty_deps = .empty,
|
.nav_ty_deps = .empty,
|
||||||
.interned_deps = .empty,
|
.interned_deps = .empty,
|
||||||
|
.embed_file_deps = .empty,
|
||||||
.namespace_deps = .empty,
|
.namespace_deps = .empty,
|
||||||
.namespace_name_deps = .empty,
|
.namespace_name_deps = .empty,
|
||||||
.memoized_state_main_deps = .none,
|
.memoized_state_main_deps = .none,
|
||||||
|
|
@ -824,6 +829,7 @@ pub const Dependee = union(enum) {
|
||||||
nav_val: Nav.Index,
|
nav_val: Nav.Index,
|
||||||
nav_ty: Nav.Index,
|
nav_ty: Nav.Index,
|
||||||
interned: Index,
|
interned: Index,
|
||||||
|
embed_file: Zcu.EmbedFile.Index,
|
||||||
namespace: TrackedInst.Index,
|
namespace: TrackedInst.Index,
|
||||||
namespace_name: NamespaceNameKey,
|
namespace_name: NamespaceNameKey,
|
||||||
memoized_state: MemoizedStateStage,
|
memoized_state: MemoizedStateStage,
|
||||||
|
|
@ -875,6 +881,7 @@ pub fn dependencyIterator(ip: *const InternPool, dependee: Dependee) DependencyI
|
||||||
.nav_val => |x| ip.nav_val_deps.get(x),
|
.nav_val => |x| ip.nav_val_deps.get(x),
|
||||||
.nav_ty => |x| ip.nav_ty_deps.get(x),
|
.nav_ty => |x| ip.nav_ty_deps.get(x),
|
||||||
.interned => |x| ip.interned_deps.get(x),
|
.interned => |x| ip.interned_deps.get(x),
|
||||||
|
.embed_file => |x| ip.embed_file_deps.get(x),
|
||||||
.namespace => |x| ip.namespace_deps.get(x),
|
.namespace => |x| ip.namespace_deps.get(x),
|
||||||
.namespace_name => |x| ip.namespace_name_deps.get(x),
|
.namespace_name => |x| ip.namespace_name_deps.get(x),
|
||||||
.memoized_state => |stage| switch (stage) {
|
.memoized_state => |stage| switch (stage) {
|
||||||
|
|
@ -945,6 +952,7 @@ pub fn addDependency(ip: *InternPool, gpa: Allocator, depender: AnalUnit, depend
|
||||||
.nav_val => ip.nav_val_deps,
|
.nav_val => ip.nav_val_deps,
|
||||||
.nav_ty => ip.nav_ty_deps,
|
.nav_ty => ip.nav_ty_deps,
|
||||||
.interned => ip.interned_deps,
|
.interned => ip.interned_deps,
|
||||||
|
.embed_file => ip.embed_file_deps,
|
||||||
.namespace => ip.namespace_deps,
|
.namespace => ip.namespace_deps,
|
||||||
.namespace_name => ip.namespace_name_deps,
|
.namespace_name => ip.namespace_name_deps,
|
||||||
.memoized_state => comptime unreachable,
|
.memoized_state => comptime unreachable,
|
||||||
|
|
@ -6682,6 +6690,7 @@ pub fn deinit(ip: *InternPool, gpa: Allocator) void {
|
||||||
ip.nav_val_deps.deinit(gpa);
|
ip.nav_val_deps.deinit(gpa);
|
||||||
ip.nav_ty_deps.deinit(gpa);
|
ip.nav_ty_deps.deinit(gpa);
|
||||||
ip.interned_deps.deinit(gpa);
|
ip.interned_deps.deinit(gpa);
|
||||||
|
ip.embed_file_deps.deinit(gpa);
|
||||||
ip.namespace_deps.deinit(gpa);
|
ip.namespace_deps.deinit(gpa);
|
||||||
ip.namespace_name_deps.deinit(gpa);
|
ip.namespace_name_deps.deinit(gpa);
|
||||||
|
|
||||||
|
|
|
||||||
20
src/Sema.zig
20
src/Sema.zig
|
|
@ -13964,6 +13964,8 @@ fn zirEmbedFile(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!A
|
||||||
defer tracy.end();
|
defer tracy.end();
|
||||||
|
|
||||||
const pt = sema.pt;
|
const pt = sema.pt;
|
||||||
|
const zcu = pt.zcu;
|
||||||
|
|
||||||
const inst_data = sema.code.instructions.items(.data)[@intFromEnum(inst)].un_node;
|
const inst_data = sema.code.instructions.items(.data)[@intFromEnum(inst)].un_node;
|
||||||
const operand_src = block.builtinCallArgSrc(inst_data.src_node, 0);
|
const operand_src = block.builtinCallArgSrc(inst_data.src_node, 0);
|
||||||
const name = try sema.resolveConstString(block, operand_src, inst_data.operand, .{ .simple = .operand_embedFile });
|
const name = try sema.resolveConstString(block, operand_src, inst_data.operand, .{ .simple = .operand_embedFile });
|
||||||
|
|
@ -13972,18 +13974,24 @@ fn zirEmbedFile(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!A
|
||||||
return sema.fail(block, operand_src, "file path name cannot be empty", .{});
|
return sema.fail(block, operand_src, "file path name cannot be empty", .{});
|
||||||
}
|
}
|
||||||
|
|
||||||
const val = pt.embedFile(block.getFileScope(pt.zcu), name, operand_src) catch |err| switch (err) {
|
const ef_idx = pt.embedFile(block.getFileScope(zcu), name) catch |err| switch (err) {
|
||||||
error.ImportOutsideModulePath => {
|
error.ImportOutsideModulePath => {
|
||||||
return sema.fail(block, operand_src, "embed of file outside package path: '{s}'", .{name});
|
return sema.fail(block, operand_src, "embed of file outside package path: '{s}'", .{name});
|
||||||
},
|
},
|
||||||
else => {
|
error.CurrentWorkingDirectoryUnlinked => {
|
||||||
// TODO: these errors are file system errors; make sure an update() will
|
// TODO: this should be some kind of retryable failure, in case the cwd is put back
|
||||||
// retry this and not cache the file system error, which may be transient.
|
return sema.fail(block, operand_src, "unable to resolve '{s}': working directory has been unlinked", .{name});
|
||||||
return sema.fail(block, operand_src, "unable to open '{s}': {s}", .{ name, @errorName(err) });
|
|
||||||
},
|
},
|
||||||
|
error.OutOfMemory => |e| return e,
|
||||||
};
|
};
|
||||||
|
try sema.declareDependency(.{ .embed_file = ef_idx });
|
||||||
|
|
||||||
return Air.internedToRef(val);
|
const result = ef_idx.get(zcu);
|
||||||
|
if (result.val == .none) {
|
||||||
|
return sema.fail(block, operand_src, "unable to open '{s}': {s}", .{ name, @errorName(result.err.?) });
|
||||||
|
}
|
||||||
|
|
||||||
|
return Air.internedToRef(result.val);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn zirRetErrValueCode(sema: *Sema, inst: Zir.Inst.Index) CompileError!Air.Inst.Ref {
|
fn zirRetErrValueCode(sema: *Sema, inst: Zir.Inst.Index) CompileError!Air.Inst.Ref {
|
||||||
|
|
|
||||||
33
src/Zcu.zig
33
src/Zcu.zig
|
|
@ -143,8 +143,6 @@ compile_log_sources: std.AutoArrayHashMapUnmanaged(AnalUnit, extern struct {
|
||||||
/// Using a map here for consistency with the other fields here.
|
/// Using a map here for consistency with the other fields here.
|
||||||
/// The ErrorMsg memory is owned by the `File`, using Module's general purpose allocator.
|
/// The ErrorMsg memory is owned by the `File`, using Module's general purpose allocator.
|
||||||
failed_files: std.AutoArrayHashMapUnmanaged(*File, ?*ErrorMsg) = .empty,
|
failed_files: std.AutoArrayHashMapUnmanaged(*File, ?*ErrorMsg) = .empty,
|
||||||
/// The ErrorMsg memory is owned by the `EmbedFile`, using Module's general purpose allocator.
|
|
||||||
failed_embed_files: std.AutoArrayHashMapUnmanaged(*EmbedFile, *ErrorMsg) = .empty,
|
|
||||||
failed_exports: std.AutoArrayHashMapUnmanaged(Export.Index, *ErrorMsg) = .empty,
|
failed_exports: std.AutoArrayHashMapUnmanaged(Export.Index, *ErrorMsg) = .empty,
|
||||||
/// If analysis failed due to a cimport error, the corresponding Clang errors
|
/// If analysis failed due to a cimport error, the corresponding Clang errors
|
||||||
/// are stored here.
|
/// are stored here.
|
||||||
|
|
@ -898,13 +896,23 @@ pub const File = struct {
|
||||||
};
|
};
|
||||||
|
|
||||||
pub const EmbedFile = struct {
|
pub const EmbedFile = struct {
|
||||||
/// Relative to the owning module's root directory.
|
|
||||||
sub_file_path: InternPool.NullTerminatedString,
|
|
||||||
/// Module that this file is a part of, managed externally.
|
/// Module that this file is a part of, managed externally.
|
||||||
owner: *Package.Module,
|
owner: *Package.Module,
|
||||||
stat: Cache.File.Stat,
|
/// Relative to the owning module's root directory.
|
||||||
|
sub_file_path: InternPool.NullTerminatedString,
|
||||||
|
|
||||||
|
/// `.none` means the file was not loaded, so `stat` is undefined.
|
||||||
val: InternPool.Index,
|
val: InternPool.Index,
|
||||||
src_loc: LazySrcLoc,
|
/// If this is `null` and `val` is `.none`, the file has never been loaded.
|
||||||
|
err: ?(std.fs.File.OpenError || std.fs.File.StatError || std.fs.File.ReadError || error{UnexpectedEof}),
|
||||||
|
stat: Cache.File.Stat,
|
||||||
|
|
||||||
|
pub const Index = enum(u32) {
|
||||||
|
_,
|
||||||
|
pub fn get(idx: Index, zcu: *const Zcu) *EmbedFile {
|
||||||
|
return zcu.embed_table.values()[@intFromEnum(idx)];
|
||||||
|
}
|
||||||
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
/// This struct holds data necessary to construct API-facing `AllErrors.Message`.
|
/// This struct holds data necessary to construct API-facing `AllErrors.Message`.
|
||||||
|
|
@ -2464,11 +2472,6 @@ pub fn deinit(zcu: *Zcu) void {
|
||||||
}
|
}
|
||||||
zcu.failed_files.deinit(gpa);
|
zcu.failed_files.deinit(gpa);
|
||||||
|
|
||||||
for (zcu.failed_embed_files.values()) |msg| {
|
|
||||||
msg.destroy(gpa);
|
|
||||||
}
|
|
||||||
zcu.failed_embed_files.deinit(gpa);
|
|
||||||
|
|
||||||
for (zcu.failed_exports.values()) |value| {
|
for (zcu.failed_exports.values()) |value| {
|
||||||
value.destroy(gpa);
|
value.destroy(gpa);
|
||||||
}
|
}
|
||||||
|
|
@ -3887,6 +3890,14 @@ fn formatDependee(data: struct { dependee: InternPool.Dependee, zcu: *Zcu }, com
|
||||||
.func => |f| return writer.print("ies('{}')", .{ip.getNav(f.owner_nav).fqn.fmt(ip)}),
|
.func => |f| return writer.print("ies('{}')", .{ip.getNav(f.owner_nav).fqn.fmt(ip)}),
|
||||||
else => unreachable,
|
else => unreachable,
|
||||||
},
|
},
|
||||||
|
.embed_file => |ef_idx| {
|
||||||
|
const ef = ef_idx.get(zcu);
|
||||||
|
return writer.print("embed_file('{s}')", .{std.fs.path.fmtJoin(&.{
|
||||||
|
ef.owner.root.root_dir.path orelse "",
|
||||||
|
ef.owner.root.sub_path,
|
||||||
|
ef.sub_file_path.toSlice(ip),
|
||||||
|
})});
|
||||||
|
},
|
||||||
.namespace => |ti| {
|
.namespace => |ti| {
|
||||||
const info = ti.resolveFull(ip) orelse {
|
const info = ti.resolveFull(ip) orelse {
|
||||||
return writer.writeAll("namespace(<lost>)");
|
return writer.writeAll("namespace(<lost>)");
|
||||||
|
|
|
||||||
|
|
@ -2117,32 +2117,32 @@ pub fn embedFile(
|
||||||
pt: Zcu.PerThread,
|
pt: Zcu.PerThread,
|
||||||
cur_file: *Zcu.File,
|
cur_file: *Zcu.File,
|
||||||
import_string: []const u8,
|
import_string: []const u8,
|
||||||
src_loc: Zcu.LazySrcLoc,
|
) error{
|
||||||
) !InternPool.Index {
|
OutOfMemory,
|
||||||
|
ImportOutsideModulePath,
|
||||||
|
CurrentWorkingDirectoryUnlinked,
|
||||||
|
}!Zcu.EmbedFile.Index {
|
||||||
const zcu = pt.zcu;
|
const zcu = pt.zcu;
|
||||||
const gpa = zcu.gpa;
|
const gpa = zcu.gpa;
|
||||||
|
|
||||||
if (cur_file.mod.deps.get(import_string)) |pkg| {
|
if (cur_file.mod.deps.get(import_string)) |mod| {
|
||||||
const resolved_path = try std.fs.path.resolve(gpa, &.{
|
const resolved_path = try std.fs.path.resolve(gpa, &.{
|
||||||
pkg.root.root_dir.path orelse ".",
|
mod.root.root_dir.path orelse ".",
|
||||||
pkg.root.sub_path,
|
mod.root.sub_path,
|
||||||
pkg.root_src_path,
|
mod.root_src_path,
|
||||||
});
|
});
|
||||||
var keep_resolved_path = false;
|
errdefer gpa.free(resolved_path);
|
||||||
defer if (!keep_resolved_path) gpa.free(resolved_path);
|
|
||||||
|
|
||||||
const gop = try zcu.embed_table.getOrPut(gpa, resolved_path);
|
const gop = try zcu.embed_table.getOrPut(gpa, resolved_path);
|
||||||
errdefer {
|
errdefer assert(std.mem.eql(u8, zcu.embed_table.pop().key, resolved_path));
|
||||||
assert(std.mem.eql(u8, zcu.embed_table.pop().key, resolved_path));
|
|
||||||
keep_resolved_path = false;
|
if (gop.found_existing) {
|
||||||
|
gpa.free(resolved_path); // we're not using this key
|
||||||
|
return @enumFromInt(gop.index);
|
||||||
}
|
}
|
||||||
if (gop.found_existing) return gop.value_ptr.*.val;
|
|
||||||
keep_resolved_path = true;
|
|
||||||
|
|
||||||
const sub_file_path = try gpa.dupe(u8, pkg.root_src_path);
|
gop.value_ptr.* = try pt.newEmbedFile(mod, mod.root_src_path, resolved_path);
|
||||||
errdefer gpa.free(sub_file_path);
|
return @enumFromInt(gop.index);
|
||||||
|
|
||||||
return pt.newEmbedFile(pkg, sub_file_path, resolved_path, gop.value_ptr, src_loc);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// The resolved path is used as the key in the table, to detect if a file
|
// The resolved path is used as the key in the table, to detect if a file
|
||||||
|
|
@ -2154,17 +2154,15 @@ pub fn embedFile(
|
||||||
"..",
|
"..",
|
||||||
import_string,
|
import_string,
|
||||||
});
|
});
|
||||||
|
errdefer gpa.free(resolved_path);
|
||||||
var keep_resolved_path = false;
|
|
||||||
defer if (!keep_resolved_path) gpa.free(resolved_path);
|
|
||||||
|
|
||||||
const gop = try zcu.embed_table.getOrPut(gpa, resolved_path);
|
const gop = try zcu.embed_table.getOrPut(gpa, resolved_path);
|
||||||
errdefer {
|
errdefer assert(std.mem.eql(u8, zcu.embed_table.pop().key, resolved_path));
|
||||||
assert(std.mem.eql(u8, zcu.embed_table.pop().key, resolved_path));
|
|
||||||
keep_resolved_path = false;
|
if (gop.found_existing) {
|
||||||
|
gpa.free(resolved_path); // we're not using this key
|
||||||
|
return @enumFromInt(gop.index);
|
||||||
}
|
}
|
||||||
if (gop.found_existing) return gop.value_ptr.*.val;
|
|
||||||
keep_resolved_path = true;
|
|
||||||
|
|
||||||
const resolved_root_path = try std.fs.path.resolve(gpa, &.{
|
const resolved_root_path = try std.fs.path.resolve(gpa, &.{
|
||||||
cur_file.mod.root.root_dir.path orelse ".",
|
cur_file.mod.root.root_dir.path orelse ".",
|
||||||
|
|
@ -2172,101 +2170,156 @@ pub fn embedFile(
|
||||||
});
|
});
|
||||||
defer gpa.free(resolved_root_path);
|
defer gpa.free(resolved_root_path);
|
||||||
|
|
||||||
const sub_file_path = p: {
|
const sub_file_path = std.fs.path.relative(gpa, resolved_root_path, resolved_path) catch |err| switch (err) {
|
||||||
const relative = try std.fs.path.relative(gpa, resolved_root_path, resolved_path);
|
error.Unexpected => unreachable,
|
||||||
errdefer gpa.free(relative);
|
else => |e| return e,
|
||||||
|
|
||||||
if (!isUpDir(relative) and !std.fs.path.isAbsolute(relative)) {
|
|
||||||
break :p relative;
|
|
||||||
}
|
|
||||||
return error.ImportOutsideModulePath;
|
|
||||||
};
|
};
|
||||||
defer gpa.free(sub_file_path);
|
defer gpa.free(sub_file_path);
|
||||||
|
|
||||||
return pt.newEmbedFile(cur_file.mod, sub_file_path, resolved_path, gop.value_ptr, src_loc);
|
if (isUpDir(sub_file_path) or std.fs.path.isAbsolute(sub_file_path)) {
|
||||||
|
return error.ImportOutsideModulePath;
|
||||||
|
}
|
||||||
|
|
||||||
|
gop.value_ptr.* = try pt.newEmbedFile(cur_file.mod, sub_file_path, resolved_path);
|
||||||
|
return @enumFromInt(gop.index);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// https://github.com/ziglang/zig/issues/14307
|
pub fn updateEmbedFile(
|
||||||
fn newEmbedFile(
|
|
||||||
pt: Zcu.PerThread,
|
pt: Zcu.PerThread,
|
||||||
pkg: *Module,
|
ef: *Zcu.EmbedFile,
|
||||||
sub_file_path: []const u8,
|
/// If not `null`, the interned file data is stored here, if it was loaded.
|
||||||
resolved_path: []const u8,
|
/// `newEmbedFile` uses this to add the file to the `whole` cache manifest.
|
||||||
result: **Zcu.EmbedFile,
|
ip_str_out: ?*?InternPool.String,
|
||||||
src_loc: Zcu.LazySrcLoc,
|
) Allocator.Error!void {
|
||||||
) !InternPool.Index {
|
pt.updateEmbedFileInner(ef, ip_str_out) catch |err| switch (err) {
|
||||||
|
error.OutOfMemory => |e| return e,
|
||||||
|
else => |e| {
|
||||||
|
ef.val = .none;
|
||||||
|
ef.err = e;
|
||||||
|
ef.stat = undefined;
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
fn updateEmbedFileInner(
|
||||||
|
pt: Zcu.PerThread,
|
||||||
|
ef: *Zcu.EmbedFile,
|
||||||
|
ip_str_out: ?*?InternPool.String,
|
||||||
|
) !void {
|
||||||
|
const tid = pt.tid;
|
||||||
const zcu = pt.zcu;
|
const zcu = pt.zcu;
|
||||||
const gpa = zcu.gpa;
|
const gpa = zcu.gpa;
|
||||||
const ip = &zcu.intern_pool;
|
const ip = &zcu.intern_pool;
|
||||||
|
|
||||||
const new_file = try gpa.create(Zcu.EmbedFile);
|
var file = try ef.owner.root.openFile(ef.sub_file_path.toSlice(ip), .{});
|
||||||
errdefer gpa.destroy(new_file);
|
|
||||||
|
|
||||||
var file = try pkg.root.openFile(sub_file_path, .{});
|
|
||||||
defer file.close();
|
defer file.close();
|
||||||
|
|
||||||
const actual_stat = try file.stat();
|
const stat: Cache.File.Stat = .fromFs(try file.stat());
|
||||||
const stat: Cache.File.Stat = .{
|
|
||||||
.size = actual_stat.size,
|
|
||||||
.inode = actual_stat.inode,
|
|
||||||
.mtime = actual_stat.mtime,
|
|
||||||
};
|
|
||||||
const size = std.math.cast(usize, actual_stat.size) orelse return error.Overflow;
|
|
||||||
|
|
||||||
const strings = ip.getLocal(pt.tid).getMutableStrings(gpa);
|
if (ef.val != .none) {
|
||||||
const bytes = try strings.addManyAsSlice(try std.math.add(usize, size, 1));
|
const old_stat = ef.stat;
|
||||||
const actual_read = try file.readAll(bytes[0][0..size]);
|
const unchanged_metadata =
|
||||||
if (actual_read != size) return error.UnexpectedEndOfFile;
|
stat.size == old_stat.size and
|
||||||
bytes[0][size] = 0;
|
stat.mtime == old_stat.mtime and
|
||||||
|
stat.inode == old_stat.inode;
|
||||||
const comp = zcu.comp;
|
if (unchanged_metadata) return;
|
||||||
switch (comp.cache_use) {
|
|
||||||
.whole => |whole| if (whole.cache_manifest) |man| {
|
|
||||||
const copied_resolved_path = try gpa.dupe(u8, resolved_path);
|
|
||||||
errdefer gpa.free(copied_resolved_path);
|
|
||||||
whole.cache_manifest_mutex.lock();
|
|
||||||
defer whole.cache_manifest_mutex.unlock();
|
|
||||||
try man.addFilePostContents(copied_resolved_path, bytes[0][0..size], stat);
|
|
||||||
},
|
|
||||||
.incremental => {},
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const array_ty = try pt.intern(.{ .array_type = .{
|
const size = std.math.cast(usize, stat.size) orelse return error.FileTooBig;
|
||||||
|
const size_plus_one = std.math.add(usize, size, 1) catch return error.FileTooBig;
|
||||||
|
|
||||||
|
// The loaded bytes of the file, including a sentinel 0 byte.
|
||||||
|
const ip_str: InternPool.String = str: {
|
||||||
|
const strings = ip.getLocal(tid).getMutableStrings(gpa);
|
||||||
|
const old_len = strings.mutate.len;
|
||||||
|
errdefer strings.shrinkRetainingCapacity(old_len);
|
||||||
|
const bytes = (try strings.addManyAsSlice(size_plus_one))[0];
|
||||||
|
const actual_read = try file.readAll(bytes[0..size]);
|
||||||
|
if (actual_read != size) return error.UnexpectedEof;
|
||||||
|
bytes[size] = 0;
|
||||||
|
break :str try ip.getOrPutTrailingString(gpa, tid, @intCast(bytes.len), .maybe_embedded_nulls);
|
||||||
|
};
|
||||||
|
if (ip_str_out) |p| p.* = ip_str;
|
||||||
|
|
||||||
|
const array_ty = try pt.arrayType(.{
|
||||||
.len = size,
|
.len = size,
|
||||||
.sentinel = .zero_u8,
|
.sentinel = .zero_u8,
|
||||||
.child = .u8_type,
|
.child = .u8_type,
|
||||||
} });
|
});
|
||||||
const array_val = try pt.intern(.{ .aggregate = .{
|
const ptr_ty = try pt.singleConstPtrType(array_ty);
|
||||||
.ty = array_ty,
|
|
||||||
.storage = .{ .bytes = try ip.getOrPutTrailingString(gpa, pt.tid, @intCast(bytes[0].len), .maybe_embedded_nulls) },
|
|
||||||
} });
|
|
||||||
|
|
||||||
const ptr_ty = (try pt.ptrType(.{
|
const array_val = try pt.intern(.{ .aggregate = .{
|
||||||
.child = array_ty,
|
.ty = array_ty.toIntern(),
|
||||||
.flags = .{
|
.storage = .{ .bytes = ip_str },
|
||||||
.alignment = .none,
|
} });
|
||||||
.is_const = true,
|
|
||||||
.address_space = .generic,
|
|
||||||
},
|
|
||||||
})).toIntern();
|
|
||||||
const ptr_val = try pt.intern(.{ .ptr = .{
|
const ptr_val = try pt.intern(.{ .ptr = .{
|
||||||
.ty = ptr_ty,
|
.ty = ptr_ty.toIntern(),
|
||||||
.base_addr = .{ .uav = .{
|
.base_addr = .{ .uav = .{
|
||||||
.val = array_val,
|
.val = array_val,
|
||||||
.orig_ty = ptr_ty,
|
.orig_ty = ptr_ty.toIntern(),
|
||||||
} },
|
} },
|
||||||
.byte_offset = 0,
|
.byte_offset = 0,
|
||||||
} });
|
} });
|
||||||
|
|
||||||
result.* = new_file;
|
ef.val = ptr_val;
|
||||||
|
ef.err = null;
|
||||||
|
ef.stat = stat;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn newEmbedFile(
|
||||||
|
pt: Zcu.PerThread,
|
||||||
|
mod: *Module,
|
||||||
|
/// The path of the file to embed relative to the root of `mod`.
|
||||||
|
sub_file_path: []const u8,
|
||||||
|
/// The resolved path of the file to embed.
|
||||||
|
resolved_path: []const u8,
|
||||||
|
) !*Zcu.EmbedFile {
|
||||||
|
const zcu = pt.zcu;
|
||||||
|
const comp = zcu.comp;
|
||||||
|
const gpa = zcu.gpa;
|
||||||
|
const ip = &zcu.intern_pool;
|
||||||
|
|
||||||
|
if (comp.file_system_inputs) |fsi|
|
||||||
|
try comp.appendFileSystemInput(fsi, mod.root, sub_file_path);
|
||||||
|
|
||||||
|
const new_file = try gpa.create(Zcu.EmbedFile);
|
||||||
|
errdefer gpa.destroy(new_file);
|
||||||
|
|
||||||
new_file.* = .{
|
new_file.* = .{
|
||||||
|
.owner = mod,
|
||||||
.sub_file_path = try ip.getOrPutString(gpa, pt.tid, sub_file_path, .no_embedded_nulls),
|
.sub_file_path = try ip.getOrPutString(gpa, pt.tid, sub_file_path, .no_embedded_nulls),
|
||||||
.owner = pkg,
|
.val = .none,
|
||||||
.stat = stat,
|
.err = null,
|
||||||
.val = ptr_val,
|
.stat = undefined,
|
||||||
.src_loc = src_loc,
|
|
||||||
};
|
};
|
||||||
return ptr_val;
|
|
||||||
|
var opt_ip_str: ?InternPool.String = null;
|
||||||
|
try pt.updateEmbedFile(new_file, &opt_ip_str);
|
||||||
|
|
||||||
|
// Add the file contents to the `whole` cache manifest if necessary.
|
||||||
|
cache: {
|
||||||
|
const whole = switch (zcu.comp.cache_use) {
|
||||||
|
.whole => |whole| whole,
|
||||||
|
.incremental => break :cache,
|
||||||
|
};
|
||||||
|
const man = whole.cache_manifest orelse break :cache;
|
||||||
|
const ip_str = opt_ip_str orelse break :cache;
|
||||||
|
|
||||||
|
const copied_resolved_path = try gpa.dupe(u8, resolved_path);
|
||||||
|
errdefer gpa.free(copied_resolved_path);
|
||||||
|
|
||||||
|
const array_len = Value.fromInterned(new_file.val).typeOf(zcu).childType(zcu).arrayLen(zcu);
|
||||||
|
|
||||||
|
whole.cache_manifest_mutex.lock();
|
||||||
|
defer whole.cache_manifest_mutex.unlock();
|
||||||
|
|
||||||
|
man.addFilePostContents(copied_resolved_path, ip_str.toSlice(array_len, ip), new_file.stat) catch |err| switch (err) {
|
||||||
|
error.Unexpected => unreachable,
|
||||||
|
else => |e| return e,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return new_file;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn scanNamespace(
|
pub fn scanNamespace(
|
||||||
|
|
|
||||||
|
|
@ -39,7 +39,7 @@ pub fn main() !void {
|
||||||
}
|
}
|
||||||
const foo = "good morning\n";
|
const foo = "good morning\n";
|
||||||
const bar = "good evening\n";
|
const bar = "good evening\n";
|
||||||
#expect_error=ignored
|
#expect_error=main.zig:3:37: error: use of undeclared identifier 'qux'
|
||||||
|
|
||||||
#update=add missing declaration
|
#update=add missing declaration
|
||||||
#file=main.zig
|
#file=main.zig
|
||||||
|
|
|
||||||
|
|
@ -39,7 +39,8 @@ pub fn main() !void {
|
||||||
}
|
}
|
||||||
const foo = "good morning\n";
|
const foo = "good morning\n";
|
||||||
const bar = "good evening\n";
|
const bar = "good evening\n";
|
||||||
#expect_error=ignored
|
#expect_error=main.zig:3:44: error: root source file struct 'main' has no member named 'qux'
|
||||||
|
#expect_error=main.zig:1:1: note: struct declared here
|
||||||
|
|
||||||
#update=add missing declaration
|
#update=add missing declaration
|
||||||
#file=main.zig
|
#file=main.zig
|
||||||
|
|
|
||||||
46
test/incremental/change_embed_file
Normal file
46
test/incremental/change_embed_file
Normal file
|
|
@ -0,0 +1,46 @@
|
||||||
|
#target=x86_64-linux-selfhosted
|
||||||
|
#target=x86_64-linux-cbe
|
||||||
|
#target=x86_64-windows-cbe
|
||||||
|
#target=wasm32-wasi-selfhosted
|
||||||
|
#update=initial version
|
||||||
|
#file=main.zig
|
||||||
|
const std = @import("std");
|
||||||
|
const string = @embedFile("string.txt");
|
||||||
|
pub fn main() !void {
|
||||||
|
try std.io.getStdOut().writeAll(string);
|
||||||
|
}
|
||||||
|
#file=string.txt
|
||||||
|
Hello, World!
|
||||||
|
#expect_stdout="Hello, World!\n"
|
||||||
|
|
||||||
|
#update=change file contents
|
||||||
|
#file=string.txt
|
||||||
|
Hello again, World!
|
||||||
|
#expect_stdout="Hello again, World!\n"
|
||||||
|
|
||||||
|
#update=delete file
|
||||||
|
#rm_file=string.txt
|
||||||
|
#expect_error=main.zig:2:27: error: unable to open 'string.txt': FileNotFound
|
||||||
|
|
||||||
|
#update=remove reference to file
|
||||||
|
#file=main.zig
|
||||||
|
const std = @import("std");
|
||||||
|
const string = @embedFile("string.txt");
|
||||||
|
pub fn main() !void {
|
||||||
|
try std.io.getStdOut().writeAll("a hardcoded string\n");
|
||||||
|
}
|
||||||
|
#expect_stdout="a hardcoded string\n"
|
||||||
|
|
||||||
|
#update=re-introduce reference to file
|
||||||
|
#file=main.zig
|
||||||
|
const std = @import("std");
|
||||||
|
const string = @embedFile("string.txt");
|
||||||
|
pub fn main() !void {
|
||||||
|
try std.io.getStdOut().writeAll(string);
|
||||||
|
}
|
||||||
|
#expect_error=main.zig:2:27: error: unable to open 'string.txt': FileNotFound
|
||||||
|
|
||||||
|
#update=recreate file
|
||||||
|
#file=string.txt
|
||||||
|
We're back, World!
|
||||||
|
#expect_stdout="We're back, World!\n"
|
||||||
|
|
@ -39,7 +39,7 @@ comptime {
|
||||||
std.debug.assert(@TypeOf(@intFromEnum(Foo.e)) == Tag);
|
std.debug.assert(@TypeOf(@intFromEnum(Foo.e)) == Tag);
|
||||||
}
|
}
|
||||||
const std = @import("std");
|
const std = @import("std");
|
||||||
#expect_error=ignored
|
#expect_error=main.zig:7:5: error: enumeration value '4' too large for type 'u2'
|
||||||
#update=increase tag size
|
#update=increase tag size
|
||||||
#file=main.zig
|
#file=main.zig
|
||||||
const Tag = u3;
|
const Tag = u3;
|
||||||
|
|
|
||||||
|
|
@ -4,19 +4,22 @@
|
||||||
#target=wasm32-wasi-selfhosted
|
#target=wasm32-wasi-selfhosted
|
||||||
#update=initial version with compile error
|
#update=initial version with compile error
|
||||||
#file=main.zig
|
#file=main.zig
|
||||||
|
pub fn main() void {}
|
||||||
comptime {
|
comptime {
|
||||||
@compileError("this is an error");
|
@compileError("this is an error");
|
||||||
}
|
}
|
||||||
comptime {
|
comptime {
|
||||||
@compileLog("this is a log");
|
@compileLog("this is a log");
|
||||||
}
|
}
|
||||||
#expect_error=ignored
|
#expect_error=main.zig:3:5: error: this is an error
|
||||||
|
|
||||||
#update=remove the compile error
|
#update=remove the compile error
|
||||||
#file=main.zig
|
#file=main.zig
|
||||||
|
pub fn main() void {}
|
||||||
comptime {
|
comptime {
|
||||||
//@compileError("this is an error");
|
//@compileError("this is an error");
|
||||||
}
|
}
|
||||||
comptime {
|
comptime {
|
||||||
@compileLog("this is a log");
|
@compileLog("this is a log");
|
||||||
}
|
}
|
||||||
#expect_error=ignored
|
#expect_error=main.zig:6:5: error: found compile log statement
|
||||||
|
|
|
||||||
|
|
@ -26,7 +26,10 @@ comptime {
|
||||||
const slice = array[3..2];
|
const slice = array[3..2];
|
||||||
_ = slice;
|
_ = slice;
|
||||||
}
|
}
|
||||||
#expect_error=ignored
|
#expect_error=main.zig:5:32: error: end index 6 out of bounds for slice of length 4 +1 (sentinel)
|
||||||
|
#expect_error=main.zig:10:28: error: end index 6 out of bounds for array of length 4 +1 (sentinel)
|
||||||
|
#expect_error=main.zig:15:28: error: end index 5 out of bounds for array of length 4
|
||||||
|
#expect_error=main.zig:20:25: error: start index 3 is larger than end index 2
|
||||||
|
|
||||||
#update=delete and modify comptime decls
|
#update=delete and modify comptime decls
|
||||||
#file=main.zig
|
#file=main.zig
|
||||||
|
|
@ -38,4 +41,4 @@ comptime {
|
||||||
const y = x[0..runtime_len];
|
const y = x[0..runtime_len];
|
||||||
_ = y;
|
_ = y;
|
||||||
}
|
}
|
||||||
#expect_error=ignored
|
#expect_error=main.zig:6:16: error: slice of null pointer
|
||||||
|
|
|
||||||
|
|
@ -11,7 +11,7 @@ pub fn main() !void {
|
||||||
pub fn hello() !void {
|
pub fn hello() !void {
|
||||||
try std.io.getStdOut().writeAll("Hello, World!\n");
|
try std.io.getStdOut().writeAll("Hello, World!\n");
|
||||||
}
|
}
|
||||||
#expect_error=ignored
|
#expect_error=foo.zig:2:9: error: use of undeclared identifier 'std'
|
||||||
#update=fix the error
|
#update=fix the error
|
||||||
#file=foo.zig
|
#file=foo.zig
|
||||||
const std = @import("std");
|
const std = @import("std");
|
||||||
|
|
@ -25,7 +25,7 @@ const std = @import("std");
|
||||||
pub fn hello() !void {
|
pub fn hello() !void {
|
||||||
try std.io.getStdOut().writeAll(hello_str);
|
try std.io.getStdOut().writeAll(hello_str);
|
||||||
}
|
}
|
||||||
#expect_error=ignored
|
#expect_error=foo.zig:3:37: error: use of undeclared identifier 'hello_str'
|
||||||
#update=fix the new error
|
#update=fix the new error
|
||||||
#file=foo.zig
|
#file=foo.zig
|
||||||
const std = @import("std");
|
const std = @import("std");
|
||||||
|
|
|
||||||
|
|
@ -24,7 +24,26 @@ export fn f6() void { @compileError("f6"); }
|
||||||
export fn f7() void { @compileError("f7"); }
|
export fn f7() void { @compileError("f7"); }
|
||||||
export fn f8() void { @compileError("f8"); }
|
export fn f8() void { @compileError("f8"); }
|
||||||
export fn f9() void { @compileError("f9"); }
|
export fn f9() void { @compileError("f9"); }
|
||||||
#expect_error=ignored
|
#expect_error=main.zig:2:12: error: c0
|
||||||
|
#expect_error=main.zig:3:12: error: c1
|
||||||
|
#expect_error=main.zig:4:12: error: c2
|
||||||
|
#expect_error=main.zig:5:12: error: c3
|
||||||
|
#expect_error=main.zig:6:12: error: c4
|
||||||
|
#expect_error=main.zig:7:12: error: c5
|
||||||
|
#expect_error=main.zig:8:12: error: c6
|
||||||
|
#expect_error=main.zig:9:12: error: c7
|
||||||
|
#expect_error=main.zig:10:12: error: c8
|
||||||
|
#expect_error=main.zig:11:12: error: c9
|
||||||
|
#expect_error=main.zig:12:23: error: f0
|
||||||
|
#expect_error=main.zig:13:23: error: f1
|
||||||
|
#expect_error=main.zig:14:23: error: f2
|
||||||
|
#expect_error=main.zig:15:23: error: f3
|
||||||
|
#expect_error=main.zig:16:23: error: f4
|
||||||
|
#expect_error=main.zig:17:23: error: f5
|
||||||
|
#expect_error=main.zig:18:23: error: f6
|
||||||
|
#expect_error=main.zig:19:23: error: f7
|
||||||
|
#expect_error=main.zig:20:23: error: f8
|
||||||
|
#expect_error=main.zig:21:23: error: f9
|
||||||
#update=fix all the errors
|
#update=fix all the errors
|
||||||
#file=main.zig
|
#file=main.zig
|
||||||
pub fn main() !void {}
|
pub fn main() !void {}
|
||||||
|
|
|
||||||
|
|
@ -23,4 +23,5 @@ pub fn main() !void {
|
||||||
try std.io.getStdOut().writer().print("{}\n", .{@intFromEnum(MyEnum.foo)});
|
try std.io.getStdOut().writer().print("{}\n", .{@intFromEnum(MyEnum.foo)});
|
||||||
}
|
}
|
||||||
const std = @import("std");
|
const std = @import("std");
|
||||||
#expect_error=ignored
|
#expect_error=main.zig:6:73: error: enum 'main.MyEnum' has no member named 'foo'
|
||||||
|
#expect_error=main.zig:1:16: note: enum declared here
|
||||||
|
|
|
||||||
|
|
@ -15,7 +15,8 @@ pub fn main() void {
|
||||||
const u: U = .{ .a = 123 };
|
const u: U = .{ .a = 123 };
|
||||||
_ = u;
|
_ = u;
|
||||||
}
|
}
|
||||||
#expect_error=ignored
|
#expect_error=main.zig:6:5: error: no field named 'd' in enum 'main.E'
|
||||||
|
#expect_error=main.zig:1:11: note: enum declared here
|
||||||
#update=remove invalid backing enum
|
#update=remove invalid backing enum
|
||||||
#file=main.zig
|
#file=main.zig
|
||||||
const U = union {
|
const U = union {
|
||||||
|
|
|
||||||
|
|
@ -11,7 +11,7 @@ pub fn main() !void {}
|
||||||
#update=introduce parse error
|
#update=introduce parse error
|
||||||
#file=main.zig
|
#file=main.zig
|
||||||
pub fn main() !void {
|
pub fn main() !void {
|
||||||
#expect_error=ignored
|
#expect_error=main.zig:2:1: error: expected statement, found 'EOF'
|
||||||
|
|
||||||
#update=fix parse error
|
#update=fix parse error
|
||||||
#file=main.zig
|
#file=main.zig
|
||||||
|
|
|
||||||
|
|
@ -18,7 +18,7 @@ pub fn main() !void {
|
||||||
try std.io.getStdOut().writeAll(a);
|
try std.io.getStdOut().writeAll(a);
|
||||||
}
|
}
|
||||||
const a = @compileError("bad a");
|
const a = @compileError("bad a");
|
||||||
#expect_error=ignored
|
#expect_error=main.zig:5:11: error: bad a
|
||||||
|
|
||||||
#update=remove error reference
|
#update=remove error reference
|
||||||
#file=main.zig
|
#file=main.zig
|
||||||
|
|
|
||||||
|
|
@ -340,19 +340,63 @@ const Eval = struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn checkErrorOutcome(eval: *Eval, update: Case.Update, error_bundle: std.zig.ErrorBundle) !void {
|
fn checkErrorOutcome(eval: *Eval, update: Case.Update, error_bundle: std.zig.ErrorBundle) !void {
|
||||||
switch (update.outcome) {
|
const expected_errors = switch (update.outcome) {
|
||||||
.unknown => return,
|
.unknown => return,
|
||||||
.compile_errors => |expected_errors| {
|
.compile_errors => |expected_errors| expected_errors,
|
||||||
for (expected_errors) |expected_error| {
|
|
||||||
_ = expected_error;
|
|
||||||
@panic("TODO check if the expected error matches the compile errors");
|
|
||||||
}
|
|
||||||
},
|
|
||||||
.stdout, .exit_code => {
|
.stdout, .exit_code => {
|
||||||
const color: std.zig.Color = .auto;
|
const color: std.zig.Color = .auto;
|
||||||
error_bundle.renderToStdErr(color.renderOptions());
|
error_bundle.renderToStdErr(color.renderOptions());
|
||||||
eval.fatal("update '{s}': unexpected compile errors", .{update.name});
|
eval.fatal("update '{s}': unexpected compile errors", .{update.name});
|
||||||
},
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
var expected_idx: usize = 0;
|
||||||
|
|
||||||
|
for (error_bundle.getMessages()) |err_idx| {
|
||||||
|
if (expected_idx == expected_errors.len) {
|
||||||
|
const color: std.zig.Color = .auto;
|
||||||
|
error_bundle.renderToStdErr(color.renderOptions());
|
||||||
|
eval.fatal("update '{s}': more errors than expected", .{update.name});
|
||||||
|
}
|
||||||
|
eval.checkOneError(update, error_bundle, expected_errors[expected_idx], false, err_idx);
|
||||||
|
expected_idx += 1;
|
||||||
|
|
||||||
|
for (error_bundle.getNotes(err_idx)) |note_idx| {
|
||||||
|
if (expected_idx == expected_errors.len) {
|
||||||
|
const color: std.zig.Color = .auto;
|
||||||
|
error_bundle.renderToStdErr(color.renderOptions());
|
||||||
|
eval.fatal("update '{s}': more error notes than expected", .{update.name});
|
||||||
|
}
|
||||||
|
eval.checkOneError(update, error_bundle, expected_errors[expected_idx], true, note_idx);
|
||||||
|
expected_idx += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn checkOneError(
|
||||||
|
eval: *Eval,
|
||||||
|
update: Case.Update,
|
||||||
|
eb: std.zig.ErrorBundle,
|
||||||
|
expected: Case.ExpectedError,
|
||||||
|
is_note: bool,
|
||||||
|
err_idx: std.zig.ErrorBundle.MessageIndex,
|
||||||
|
) void {
|
||||||
|
const err = eb.getErrorMessage(err_idx);
|
||||||
|
if (err.src_loc == .none) @panic("TODO error message with no source location");
|
||||||
|
if (err.count != 1) @panic("TODO error message with count>1");
|
||||||
|
const msg = eb.nullTerminatedString(err.msg);
|
||||||
|
const src = eb.getSourceLocation(err.src_loc);
|
||||||
|
const filename = eb.nullTerminatedString(src.src_path);
|
||||||
|
|
||||||
|
if (expected.is_note != is_note or
|
||||||
|
!std.mem.eql(u8, expected.filename, filename) or
|
||||||
|
expected.line != src.line + 1 or
|
||||||
|
expected.column != src.column + 1 or
|
||||||
|
!std.mem.eql(u8, expected.msg, msg))
|
||||||
|
{
|
||||||
|
const color: std.zig.Color = .auto;
|
||||||
|
eb.renderToStdErr(color.renderOptions());
|
||||||
|
eval.fatal("update '{s}': compile error did not match expected error", .{update.name});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -595,11 +639,11 @@ const Case = struct {
|
||||||
};
|
};
|
||||||
|
|
||||||
const ExpectedError = struct {
|
const ExpectedError = struct {
|
||||||
file_name: ?[]const u8 = null,
|
is_note: bool,
|
||||||
line: ?u32 = null,
|
filename: []const u8,
|
||||||
column: ?u32 = null,
|
line: u32,
|
||||||
msg_exact: ?[]const u8 = null,
|
column: u32,
|
||||||
msg_substring: ?[]const u8 = null,
|
msg: []const u8,
|
||||||
};
|
};
|
||||||
|
|
||||||
fn parse(arena: Allocator, bytes: []const u8) !Case {
|
fn parse(arena: Allocator, bytes: []const u8) !Case {
|
||||||
|
|
@ -608,6 +652,7 @@ const Case = struct {
|
||||||
var targets: std.ArrayListUnmanaged(Target) = .empty;
|
var targets: std.ArrayListUnmanaged(Target) = .empty;
|
||||||
var updates: std.ArrayListUnmanaged(Update) = .empty;
|
var updates: std.ArrayListUnmanaged(Update) = .empty;
|
||||||
var changes: std.ArrayListUnmanaged(FullContents) = .empty;
|
var changes: std.ArrayListUnmanaged(FullContents) = .empty;
|
||||||
|
var deletes: std.ArrayListUnmanaged([]const u8) = .empty;
|
||||||
var it = std.mem.splitScalar(u8, bytes, '\n');
|
var it = std.mem.splitScalar(u8, bytes, '\n');
|
||||||
var line_n: usize = 1;
|
var line_n: usize = 1;
|
||||||
var root_source_file: ?[]const u8 = null;
|
var root_source_file: ?[]const u8 = null;
|
||||||
|
|
@ -647,33 +692,42 @@ const Case = struct {
|
||||||
if (updates.items.len > 0) {
|
if (updates.items.len > 0) {
|
||||||
const last_update = &updates.items[updates.items.len - 1];
|
const last_update = &updates.items[updates.items.len - 1];
|
||||||
last_update.changes = try changes.toOwnedSlice(arena);
|
last_update.changes = try changes.toOwnedSlice(arena);
|
||||||
|
last_update.deletes = try deletes.toOwnedSlice(arena);
|
||||||
}
|
}
|
||||||
try updates.append(arena, .{
|
try updates.append(arena, .{
|
||||||
.name = val,
|
.name = val,
|
||||||
.outcome = .unknown,
|
.outcome = .unknown,
|
||||||
});
|
});
|
||||||
} else if (std.mem.eql(u8, key, "file")) {
|
} else if (std.mem.eql(u8, key, "file")) {
|
||||||
if (updates.items.len == 0) fatal("line {d}: expect directive before update", .{line_n});
|
if (updates.items.len == 0) fatal("line {d}: file directive before update", .{line_n});
|
||||||
|
|
||||||
if (root_source_file == null)
|
if (root_source_file == null)
|
||||||
root_source_file = val;
|
root_source_file = val;
|
||||||
|
|
||||||
const start_index = it.index.?;
|
// Because Windows is so excellent, we need to convert CRLF to LF, so
|
||||||
const src = while (true) : (line_n += 1) {
|
// can't just slice into the input here. How delightful!
|
||||||
const old = it;
|
var src: std.ArrayListUnmanaged(u8) = .empty;
|
||||||
const next_line = it.next() orelse fatal("line {d}: unexpected EOF", .{line_n});
|
|
||||||
if (std.mem.startsWith(u8, next_line, "#")) {
|
while (true) {
|
||||||
const end_index = old.index.?;
|
const next_line_raw = it.peek() orelse fatal("line {d}: unexpected EOF", .{line_n});
|
||||||
const src = bytes[start_index..end_index];
|
const next_line = std.mem.trimRight(u8, next_line_raw, "\r");
|
||||||
it = old;
|
if (std.mem.startsWith(u8, next_line, "#")) break;
|
||||||
break src;
|
|
||||||
}
|
_ = it.next();
|
||||||
};
|
line_n += 1;
|
||||||
|
|
||||||
|
try src.ensureUnusedCapacity(arena, next_line.len + 1);
|
||||||
|
src.appendSliceAssumeCapacity(next_line);
|
||||||
|
src.appendAssumeCapacity('\n');
|
||||||
|
}
|
||||||
|
|
||||||
try changes.append(arena, .{
|
try changes.append(arena, .{
|
||||||
.name = val,
|
.name = val,
|
||||||
.bytes = src,
|
.bytes = src.items,
|
||||||
});
|
});
|
||||||
|
} else if (std.mem.eql(u8, key, "rm_file")) {
|
||||||
|
if (updates.items.len == 0) fatal("line {d}: rm_file directive before update", .{line_n});
|
||||||
|
try deletes.append(arena, val);
|
||||||
} else if (std.mem.eql(u8, key, "expect_stdout")) {
|
} else if (std.mem.eql(u8, key, "expect_stdout")) {
|
||||||
if (updates.items.len == 0) fatal("line {d}: expect directive before update", .{line_n});
|
if (updates.items.len == 0) fatal("line {d}: expect directive before update", .{line_n});
|
||||||
const last_update = &updates.items[updates.items.len - 1];
|
const last_update = &updates.items[updates.items.len - 1];
|
||||||
|
|
@ -687,7 +741,24 @@ const Case = struct {
|
||||||
if (updates.items.len == 0) fatal("line {d}: expect directive before update", .{line_n});
|
if (updates.items.len == 0) fatal("line {d}: expect directive before update", .{line_n});
|
||||||
const last_update = &updates.items[updates.items.len - 1];
|
const last_update = &updates.items[updates.items.len - 1];
|
||||||
if (last_update.outcome != .unknown) fatal("line {d}: conflicting expect directive", .{line_n});
|
if (last_update.outcome != .unknown) fatal("line {d}: conflicting expect directive", .{line_n});
|
||||||
last_update.outcome = .{ .compile_errors = &.{} };
|
|
||||||
|
var errors: std.ArrayListUnmanaged(ExpectedError) = .empty;
|
||||||
|
try errors.append(arena, parseExpectedError(val, line_n));
|
||||||
|
while (true) {
|
||||||
|
const next_line = it.peek() orelse break;
|
||||||
|
if (!std.mem.startsWith(u8, next_line, "#")) break;
|
||||||
|
var new_line_it = std.mem.splitScalar(u8, next_line, '=');
|
||||||
|
const new_key = new_line_it.first()[1..];
|
||||||
|
const new_val = std.mem.trimRight(u8, new_line_it.rest(), "\r");
|
||||||
|
if (new_val.len == 0) break;
|
||||||
|
if (!std.mem.eql(u8, new_key, "expect_error")) break;
|
||||||
|
|
||||||
|
_ = it.next();
|
||||||
|
line_n += 1;
|
||||||
|
try errors.append(arena, parseExpectedError(new_val, line_n));
|
||||||
|
}
|
||||||
|
|
||||||
|
last_update.outcome = .{ .compile_errors = errors.items };
|
||||||
} else {
|
} else {
|
||||||
fatal("line {d}: unrecognized key '{s}'", .{ line_n, key });
|
fatal("line {d}: unrecognized key '{s}'", .{ line_n, key });
|
||||||
}
|
}
|
||||||
|
|
@ -701,6 +772,7 @@ const Case = struct {
|
||||||
if (changes.items.len > 0) {
|
if (changes.items.len > 0) {
|
||||||
const last_update = &updates.items[updates.items.len - 1];
|
const last_update = &updates.items[updates.items.len - 1];
|
||||||
last_update.changes = changes.items; // arena so no need for toOwnedSlice
|
last_update.changes = changes.items; // arena so no need for toOwnedSlice
|
||||||
|
last_update.deletes = deletes.items;
|
||||||
}
|
}
|
||||||
|
|
||||||
return .{
|
return .{
|
||||||
|
|
@ -736,3 +808,43 @@ fn waitChild(child: *std.process.Child, eval: *Eval) void {
|
||||||
.Signal, .Stopped, .Unknown => eval.fatal("compiler terminated unexpectedly", .{}),
|
.Signal, .Stopped, .Unknown => eval.fatal("compiler terminated unexpectedly", .{}),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn parseExpectedError(str: []const u8, l: usize) Case.ExpectedError {
|
||||||
|
// #expect_error=foo.zig:1:2: error: the error message
|
||||||
|
// #expect_error=foo.zig:1:2: note: and a note
|
||||||
|
|
||||||
|
const fatal = std.process.fatal;
|
||||||
|
|
||||||
|
var it = std.mem.splitScalar(u8, str, ':');
|
||||||
|
const filename = it.first();
|
||||||
|
const line_str = it.next() orelse fatal("line {d}: incomplete error specification", .{l});
|
||||||
|
const column_str = it.next() orelse fatal("line {d}: incomplete error specification", .{l});
|
||||||
|
const error_or_note_str = std.mem.trim(
|
||||||
|
u8,
|
||||||
|
it.next() orelse fatal("line {d}: incomplete error specification", .{l}),
|
||||||
|
" ",
|
||||||
|
);
|
||||||
|
const message = std.mem.trim(u8, it.rest(), " ");
|
||||||
|
if (filename.len == 0) fatal("line {d}: empty filename", .{l});
|
||||||
|
if (message.len == 0) fatal("line {d}: empty error message", .{l});
|
||||||
|
const is_note = if (std.mem.eql(u8, error_or_note_str, "error"))
|
||||||
|
false
|
||||||
|
else if (std.mem.eql(u8, error_or_note_str, "note"))
|
||||||
|
true
|
||||||
|
else
|
||||||
|
fatal("line {d}: expeted 'error' or 'note', found '{s}'", .{ l, error_or_note_str });
|
||||||
|
|
||||||
|
const line = std.fmt.parseInt(u32, line_str, 10) catch
|
||||||
|
fatal("line {d}: invalid line number '{s}'", .{ l, line_str });
|
||||||
|
|
||||||
|
const column = std.fmt.parseInt(u32, column_str, 10) catch
|
||||||
|
fatal("line {d}: invalid column number '{s}'", .{ l, column_str });
|
||||||
|
|
||||||
|
return .{
|
||||||
|
.is_note = is_note,
|
||||||
|
.filename = filename,
|
||||||
|
.line = line,
|
||||||
|
.column = column,
|
||||||
|
.msg = message,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
|
||||||
Loading…
Add table
Reference in a new issue