mirror of
https://codeberg.org/ziglang/zig.git
synced 2025-12-06 05:44:20 +00:00
CLI: finish updating module API usage
Finish the work started in 4c4fb839972f66f55aa44fc0aca5f80b0608c731. Now the compiler compiles again. Wire up dependency tree fetching code in the CLI for `zig build`. Everything is hooked up except for `createDependenciesModule` is not yet implemented.
This commit is contained in:
parent
1c0d6f9c00
commit
f708c5fafc
18 changed files with 320 additions and 284 deletions
|
|
@ -46,7 +46,7 @@ pub fn getCwdAlloc(allocator: Allocator) ![]u8 {
|
|||
}
|
||||
}
|
||||
|
||||
test "getCwdAlloc" {
|
||||
test getCwdAlloc {
|
||||
if (builtin.os.tag == .wasi) return error.SkipZigTest;
|
||||
|
||||
const cwd = try getCwdAlloc(testing.allocator);
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ const Autodoc = @This();
|
|||
const Compilation = @import("Compilation.zig");
|
||||
const CompilationModule = @import("Module.zig");
|
||||
const File = CompilationModule.File;
|
||||
const Module = @import("Package.zig");
|
||||
const Module = @import("Package.zig").Module;
|
||||
const Tokenizer = std.zig.Tokenizer;
|
||||
const InternPool = @import("InternPool.zig");
|
||||
const Zir = @import("Zir.zig");
|
||||
|
|
@ -98,9 +98,8 @@ pub fn generate(cm: *CompilationModule, output_dir: std.fs.Dir) !void {
|
|||
}
|
||||
|
||||
fn generateZirData(self: *Autodoc, output_dir: std.fs.Dir) !void {
|
||||
const root_src_dir = self.comp_module.main_pkg.root_src_directory;
|
||||
const root_src_path = self.comp_module.main_pkg.root_src_path;
|
||||
const joined_src_path = try root_src_dir.join(self.arena, &.{root_src_path});
|
||||
const root_src_path = self.comp_module.main_mod.root_src_path;
|
||||
const joined_src_path = try self.comp_module.main_mod.root.joinString(self.arena, root_src_path);
|
||||
defer self.arena.free(joined_src_path);
|
||||
|
||||
const abs_root_src_path = try std.fs.path.resolve(self.arena, &.{ ".", joined_src_path });
|
||||
|
|
@ -295,20 +294,20 @@ fn generateZirData(self: *Autodoc, output_dir: std.fs.Dir) !void {
|
|||
}
|
||||
|
||||
const rootName = blk: {
|
||||
const rootName = std.fs.path.basename(self.comp_module.main_pkg.root_src_path);
|
||||
const rootName = std.fs.path.basename(self.comp_module.main_mod.root_src_path);
|
||||
break :blk rootName[0 .. rootName.len - 4];
|
||||
};
|
||||
|
||||
const main_type_index = self.types.items.len;
|
||||
{
|
||||
try self.modules.put(self.arena, self.comp_module.main_pkg, .{
|
||||
try self.modules.put(self.arena, self.comp_module.main_mod, .{
|
||||
.name = rootName,
|
||||
.main = main_type_index,
|
||||
.table = .{},
|
||||
});
|
||||
try self.modules.entries.items(.value)[0].table.put(
|
||||
self.arena,
|
||||
self.comp_module.main_pkg,
|
||||
self.comp_module.main_mod,
|
||||
.{
|
||||
.name = rootName,
|
||||
.value = 0,
|
||||
|
|
@ -412,7 +411,7 @@ fn generateZirData(self: *Autodoc, output_dir: std.fs.Dir) !void {
|
|||
|
||||
while (files_iterator.next()) |entry| {
|
||||
const sub_file_path = entry.key_ptr.*.sub_file_path;
|
||||
const file_module = entry.key_ptr.*.pkg;
|
||||
const file_module = entry.key_ptr.*.mod;
|
||||
const module_name = (self.modules.get(file_module) orelse continue).name;
|
||||
|
||||
const file_path = std.fs.path.dirname(sub_file_path) orelse "";
|
||||
|
|
@ -986,12 +985,12 @@ fn walkInstruction(
|
|||
|
||||
// importFile cannot error out since all files
|
||||
// are already loaded at this point
|
||||
if (file.pkg.table.get(path)) |other_module| {
|
||||
if (file.mod.deps.get(path)) |other_module| {
|
||||
const result = try self.modules.getOrPut(self.arena, other_module);
|
||||
|
||||
// Immediately add this module to the import table of our
|
||||
// current module, regardless of wether it's new or not.
|
||||
if (self.modules.getPtr(file.pkg)) |current_module| {
|
||||
if (self.modules.getPtr(file.mod)) |current_module| {
|
||||
// TODO: apparently, in the stdlib a file gets analyzed before
|
||||
// its module gets added. I guess we're importing a file
|
||||
// that belongs to another module through its file path?
|
||||
|
|
@ -1025,12 +1024,12 @@ fn walkInstruction(
|
|||
// TODO: Add this module as a dependency to the current module
|
||||
// TODO: this seems something that could be done in bulk
|
||||
// at the beginning or the end, or something.
|
||||
const root_src_dir = other_module.root_src_directory;
|
||||
const root_src_path = other_module.root_src_path;
|
||||
const joined_src_path = try root_src_dir.join(self.arena, &.{root_src_path});
|
||||
defer self.arena.free(joined_src_path);
|
||||
|
||||
const abs_root_src_path = try std.fs.path.resolve(self.arena, &.{ ".", joined_src_path });
|
||||
const abs_root_src_path = try std.fs.path.resolve(self.arena, &.{
|
||||
".",
|
||||
other_module.root.root_dir.path orelse ".",
|
||||
other_module.root.sub_path,
|
||||
other_module.root_src_path,
|
||||
});
|
||||
defer self.arena.free(abs_root_src_path);
|
||||
|
||||
const new_file = self.comp_module.import_table.get(abs_root_src_path).?;
|
||||
|
|
@ -5683,7 +5682,7 @@ fn writeFileTableToJson(
|
|||
while (it.next()) |entry| {
|
||||
try jsw.beginArray();
|
||||
try jsw.write(entry.key_ptr.*.sub_file_path);
|
||||
try jsw.write(mods.getIndex(entry.key_ptr.*.pkg) orelse 0);
|
||||
try jsw.write(mods.getIndex(entry.key_ptr.*.mod) orelse 0);
|
||||
try jsw.endArray();
|
||||
}
|
||||
try jsw.endArray();
|
||||
|
|
@ -5840,7 +5839,7 @@ fn addGuide(self: *Autodoc, file: *File, guide_path: []const u8, section: *Secti
|
|||
file.sub_file_path, "..", guide_path,
|
||||
});
|
||||
|
||||
var guide_file = try file.pkg.root_src_directory.handle.openFile(resolved_path, .{});
|
||||
var guide_file = try file.mod.root.openFile(resolved_path, .{});
|
||||
defer guide_file.close();
|
||||
|
||||
const guide = guide_file.reader().readAllAlloc(self.arena, 1 * 1024 * 1024) catch |err| switch (err) {
|
||||
|
|
|
|||
|
|
@ -772,12 +772,14 @@ fn addModuleTableToCacheHash(
|
|||
switch (hash_type) {
|
||||
.path_bytes => {
|
||||
hash.addBytes(mod.value.root_src_path);
|
||||
hash.addOptionalBytes(mod.value.root_src_directory.path);
|
||||
hash.addOptionalBytes(mod.value.root.root_dir.path);
|
||||
hash.addBytes(mod.value.root.sub_path);
|
||||
},
|
||||
.files => |man| {
|
||||
const pkg_zig_file = try mod.value.root_src_directory.join(allocator, &[_][]const u8{
|
||||
const pkg_zig_file = try mod.value.root.joinString(
|
||||
allocator,
|
||||
mod.value.root_src_path,
|
||||
});
|
||||
);
|
||||
_ = try man.addFile(pkg_zig_file, null);
|
||||
},
|
||||
}
|
||||
|
|
@ -1310,53 +1312,50 @@ pub fn create(gpa: Allocator, options: InitOptions) !*Compilation {
|
|||
const std_mod = if (main_mod_is_std)
|
||||
main_mod
|
||||
else
|
||||
try Package.createWithDir(
|
||||
gpa,
|
||||
options.zig_lib_directory,
|
||||
"std",
|
||||
"std.zig",
|
||||
);
|
||||
|
||||
errdefer if (!main_mod_is_std) std_mod.destroy(gpa);
|
||||
try Package.Module.create(arena, .{
|
||||
.root = .{
|
||||
.root_dir = options.zig_lib_directory,
|
||||
.sub_path = "std",
|
||||
},
|
||||
.root_src_path = "std.zig",
|
||||
});
|
||||
|
||||
const root_mod = if (options.is_test) root_mod: {
|
||||
const test_pkg = if (options.test_runner_path) |test_runner| test_pkg: {
|
||||
const test_dir = std.fs.path.dirname(test_runner);
|
||||
const basename = std.fs.path.basename(test_runner);
|
||||
const pkg = try Package.create(gpa, test_dir, basename);
|
||||
const test_mod = if (options.test_runner_path) |test_runner| test_mod: {
|
||||
const pkg = try Package.Module.create(arena, .{
|
||||
.root = .{
|
||||
.root_dir = Directory.cwd(),
|
||||
.sub_path = std.fs.path.dirname(test_runner) orelse "",
|
||||
},
|
||||
.root_src_path = std.fs.path.basename(test_runner),
|
||||
});
|
||||
|
||||
// copy module table from main_mod to root_mod
|
||||
pkg.deps = try main_mod.deps.clone(gpa);
|
||||
break :test_pkg pkg;
|
||||
} else try Package.createWithDir(
|
||||
gpa,
|
||||
options.zig_lib_directory,
|
||||
null,
|
||||
"test_runner.zig",
|
||||
);
|
||||
errdefer test_pkg.destroy(gpa);
|
||||
pkg.deps = try main_mod.deps.clone(arena);
|
||||
break :test_mod pkg;
|
||||
} else try Package.Module.create(arena, .{
|
||||
.root = .{
|
||||
.root_dir = options.zig_lib_directory,
|
||||
},
|
||||
.root_src_path = "test_runner.zig",
|
||||
});
|
||||
|
||||
break :root_mod test_pkg;
|
||||
break :root_mod test_mod;
|
||||
} else main_mod;
|
||||
errdefer if (options.is_test) root_mod.destroy(gpa);
|
||||
|
||||
const compiler_rt_mod = if (include_compiler_rt and options.output_mode == .Obj) compiler_rt_mod: {
|
||||
break :compiler_rt_mod try Package.createWithDir(
|
||||
gpa,
|
||||
options.zig_lib_directory,
|
||||
null,
|
||||
"compiler_rt.zig",
|
||||
);
|
||||
break :compiler_rt_mod try Package.Module.create(arena, .{
|
||||
.root = .{
|
||||
.root_dir = options.zig_lib_directory,
|
||||
},
|
||||
.root_src_path = "compiler_rt.zig",
|
||||
});
|
||||
} else null;
|
||||
errdefer if (compiler_rt_mod) |p| p.destroy(gpa);
|
||||
|
||||
try main_mod.add(gpa, "builtin", builtin_mod);
|
||||
try main_mod.add(gpa, "root", root_mod);
|
||||
try main_mod.add(gpa, "std", std_mod);
|
||||
|
||||
if (compiler_rt_mod) |p| {
|
||||
try main_mod.add(gpa, "compiler_rt", p);
|
||||
}
|
||||
try main_mod.deps.put(gpa, "builtin", builtin_mod);
|
||||
try main_mod.deps.put(gpa, "root", root_mod);
|
||||
try main_mod.deps.put(gpa, "std", std_mod);
|
||||
if (compiler_rt_mod) |m|
|
||||
try main_mod.deps.put(gpa, "compiler_rt", m);
|
||||
|
||||
// Pre-open the directory handles for cached ZIR code so that it does not need
|
||||
// to redundantly happen for each AstGen operation.
|
||||
|
|
@ -2004,7 +2003,7 @@ fn restorePrevZigCacheArtifactDirectory(comp: *Compilation, directory: *Director
|
|||
// on the handle of zig_cache_artifact_directory.
|
||||
if (comp.bin_file.options.module) |module| {
|
||||
const builtin_mod = module.main_mod.deps.get("builtin").?;
|
||||
module.zig_cache_artifact_directory = builtin_mod.root_src_directory;
|
||||
module.zig_cache_artifact_directory = builtin_mod.root.root_dir;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -2418,9 +2417,7 @@ fn addNonIncrementalStuffToCacheManifest(comp: *Compilation, man: *Cache.Manifes
|
|||
comptime assert(link_hash_implementation_version == 10);
|
||||
|
||||
if (comp.bin_file.options.module) |mod| {
|
||||
const main_zig_file = try mod.main_mod.root_src_directory.join(arena, &[_][]const u8{
|
||||
mod.main_mod.root_src_path,
|
||||
});
|
||||
const main_zig_file = try mod.main_mod.root.joinString(arena, mod.main_mod.root_src_path);
|
||||
_ = try man.addFile(main_zig_file, null);
|
||||
{
|
||||
var seen_table = std.AutoHashMap(*Package.Module, void).init(arena);
|
||||
|
|
@ -2614,23 +2611,23 @@ fn reportMultiModuleErrors(mod: *Module) !void {
|
|||
errdefer for (notes[0..i]) |*n| n.deinit(mod.gpa);
|
||||
note.* = switch (ref) {
|
||||
.import => |loc| blk: {
|
||||
const name = try loc.file_scope.pkg.getName(mod.gpa, mod.*);
|
||||
defer mod.gpa.free(name);
|
||||
//const name = try loc.file_scope.mod.getName(mod.gpa, mod.*);
|
||||
//defer mod.gpa.free(name);
|
||||
break :blk try Module.ErrorMsg.init(
|
||||
mod.gpa,
|
||||
loc,
|
||||
"imported from module {s}",
|
||||
.{name},
|
||||
"imported from module {}",
|
||||
.{loc.file_scope.mod.root},
|
||||
);
|
||||
},
|
||||
.root => |pkg| blk: {
|
||||
const name = try pkg.getName(mod.gpa, mod.*);
|
||||
defer mod.gpa.free(name);
|
||||
//const name = try pkg.getName(mod.gpa, mod.*);
|
||||
//defer mod.gpa.free(name);
|
||||
break :blk try Module.ErrorMsg.init(
|
||||
mod.gpa,
|
||||
.{ .file_scope = file, .parent_decl_node = 0, .lazy = .entire_file },
|
||||
"root of module {s}",
|
||||
.{name},
|
||||
"root of module {}",
|
||||
.{pkg.root},
|
||||
);
|
||||
},
|
||||
};
|
||||
|
|
@ -4212,17 +4209,9 @@ fn reportRetryableAstGenError(
|
|||
},
|
||||
};
|
||||
|
||||
const err_msg = if (file.pkg.root_src_directory.path) |dir_path|
|
||||
try Module.ErrorMsg.create(
|
||||
gpa,
|
||||
src_loc,
|
||||
"unable to load '{s}" ++ std.fs.path.sep_str ++ "{s}': {s}",
|
||||
.{ dir_path, file.sub_file_path, @errorName(err) },
|
||||
)
|
||||
else
|
||||
try Module.ErrorMsg.create(gpa, src_loc, "unable to load '{s}': {s}", .{
|
||||
file.sub_file_path, @errorName(err),
|
||||
});
|
||||
const err_msg = try Module.ErrorMsg.create(gpa, src_loc, "unable to load '{}{s}': {s}", .{
|
||||
file.mod.root, file.sub_file_path, @errorName(err),
|
||||
});
|
||||
errdefer err_msg.destroy(gpa);
|
||||
|
||||
{
|
||||
|
|
@ -4242,17 +4231,10 @@ fn reportRetryableEmbedFileError(
|
|||
|
||||
const src_loc: Module.SrcLoc = mod.declPtr(embed_file.owner_decl).srcLoc(mod);
|
||||
|
||||
const err_msg = if (embed_file.pkg.root_src_directory.path) |dir_path|
|
||||
try Module.ErrorMsg.create(
|
||||
gpa,
|
||||
src_loc,
|
||||
"unable to load '{s}" ++ std.fs.path.sep_str ++ "{s}': {s}",
|
||||
.{ dir_path, embed_file.sub_file_path, @errorName(err) },
|
||||
)
|
||||
else
|
||||
try Module.ErrorMsg.create(gpa, src_loc, "unable to load '{s}': {s}", .{
|
||||
embed_file.sub_file_path, @errorName(err),
|
||||
});
|
||||
const err_msg = try Module.ErrorMsg.create(gpa, src_loc, "unable to load '{}{s}': {s}", .{
|
||||
embed_file.mod.root, embed_file.sub_file_path, @errorName(err),
|
||||
});
|
||||
|
||||
errdefer err_msg.destroy(gpa);
|
||||
|
||||
{
|
||||
|
|
@ -6375,13 +6357,12 @@ fn buildOutputFromZig(
|
|||
const tracy_trace = trace(@src());
|
||||
defer tracy_trace.end();
|
||||
|
||||
std.debug.assert(output_mode != .Exe);
|
||||
assert(output_mode != .Exe);
|
||||
|
||||
var main_mod: Package = .{
|
||||
.root_src_directory = comp.zig_lib_directory,
|
||||
var main_mod: Package.Module = .{
|
||||
.root = .{ .root_dir = comp.zig_lib_directory },
|
||||
.root_src_path = src_basename,
|
||||
};
|
||||
defer main_mod.deinitTable(comp.gpa);
|
||||
const root_name = src_basename[0 .. src_basename.len - std.fs.path.extension(src_basename).len];
|
||||
const target = comp.getTarget();
|
||||
const bin_basename = try std.zig.binNameAlloc(comp.gpa, .{
|
||||
|
|
|
|||
164
src/Module.zig
164
src/Module.zig
|
|
@ -998,8 +998,8 @@ pub const File = struct {
|
|||
pub const Reference = union(enum) {
|
||||
/// The file is imported directly (i.e. not as a package) with @import.
|
||||
import: SrcLoc,
|
||||
/// The file is the root of a package.
|
||||
root: *Package,
|
||||
/// The file is the root of a module.
|
||||
root: *Package.Module,
|
||||
};
|
||||
|
||||
pub fn unload(file: *File, gpa: Allocator) void {
|
||||
|
|
@ -1174,10 +1174,10 @@ pub const File = struct {
|
|||
}
|
||||
|
||||
const pkg = switch (ref) {
|
||||
.import => |loc| loc.file_scope.pkg,
|
||||
.import => |loc| loc.file_scope.mod,
|
||||
.root => |pkg| pkg,
|
||||
};
|
||||
if (pkg != file.pkg) file.multi_pkg = true;
|
||||
if (pkg != file.mod) file.multi_pkg = true;
|
||||
}
|
||||
|
||||
/// Mark this file and every file referenced by it as multi_pkg and report an
|
||||
|
|
@ -1219,7 +1219,7 @@ pub const EmbedFile = struct {
|
|||
bytes: [:0]const u8,
|
||||
stat: Cache.File.Stat,
|
||||
/// Package that this file is a part of, managed externally.
|
||||
pkg: *Package,
|
||||
mod: *Package.Module,
|
||||
/// The Decl that was created from the `@embedFile` to own this resource.
|
||||
/// This is how zig knows what other Decl objects to invalidate if the file
|
||||
/// changes on disk.
|
||||
|
|
@ -2535,28 +2535,6 @@ pub fn deinit(mod: *Module) void {
|
|||
}
|
||||
|
||||
mod.deletion_set.deinit(gpa);
|
||||
|
||||
// The callsite of `Compilation.create` owns the `main_mod`, however
|
||||
// Module owns the builtin and std packages that it adds.
|
||||
if (mod.main_mod.table.fetchRemove("builtin")) |kv| {
|
||||
gpa.free(kv.key);
|
||||
kv.value.destroy(gpa);
|
||||
}
|
||||
if (mod.main_mod.table.fetchRemove("std")) |kv| {
|
||||
gpa.free(kv.key);
|
||||
// It's possible for main_mod to be std when running 'zig test'! In this case, we must not
|
||||
// destroy it, since it would lead to a double-free.
|
||||
if (kv.value != mod.main_mod) {
|
||||
kv.value.destroy(gpa);
|
||||
}
|
||||
}
|
||||
if (mod.main_mod.table.fetchRemove("root")) |kv| {
|
||||
gpa.free(kv.key);
|
||||
}
|
||||
if (mod.root_mod != mod.main_mod) {
|
||||
mod.root_mod.destroy(gpa);
|
||||
}
|
||||
|
||||
mod.compile_log_text.deinit(gpa);
|
||||
|
||||
mod.zig_cache_artifact_directory.handle.close();
|
||||
|
|
@ -2703,18 +2681,19 @@ pub fn astGenFile(mod: *Module, file: *File) !void {
|
|||
const gpa = mod.gpa;
|
||||
|
||||
// In any case we need to examine the stat of the file to determine the course of action.
|
||||
var source_file = try file.pkg.root_src_directory.handle.openFile(file.sub_file_path, .{});
|
||||
var source_file = try file.mod.root.openFile(file.sub_file_path, .{});
|
||||
defer source_file.close();
|
||||
|
||||
const stat = try source_file.stat();
|
||||
|
||||
const want_local_cache = file.pkg == mod.main_mod;
|
||||
const want_local_cache = file.mod == mod.main_mod;
|
||||
const digest = hash: {
|
||||
var path_hash: Cache.HashHelper = .{};
|
||||
path_hash.addBytes(build_options.version);
|
||||
path_hash.add(builtin.zig_backend);
|
||||
if (!want_local_cache) {
|
||||
path_hash.addOptionalBytes(file.pkg.root_src_directory.path);
|
||||
path_hash.addOptionalBytes(file.mod.root.root_dir.path);
|
||||
path_hash.addBytes(file.mod.root.sub_path);
|
||||
}
|
||||
path_hash.addBytes(file.sub_file_path);
|
||||
break :hash path_hash.final();
|
||||
|
|
@ -2939,10 +2918,8 @@ pub fn astGenFile(mod: *Module, file: *File) !void {
|
|||
},
|
||||
};
|
||||
cache_file.writevAll(&iovecs) catch |err| {
|
||||
const pkg_path = file.pkg.root_src_directory.path orelse ".";
|
||||
const cache_path = cache_directory.path orelse ".";
|
||||
log.warn("unable to write cached ZIR code for {s}/{s} to {s}/{s}: {s}", .{
|
||||
pkg_path, file.sub_file_path, cache_path, &digest, @errorName(err),
|
||||
log.warn("unable to write cached ZIR code for {}{s} to {}{s}: {s}", .{
|
||||
file.mod.root, file.sub_file_path, cache_directory, &digest, @errorName(err),
|
||||
});
|
||||
};
|
||||
|
||||
|
|
@ -3147,34 +3124,24 @@ pub fn populateBuiltinFile(mod: *Module) !void {
|
|||
defer tracy.end();
|
||||
|
||||
const comp = mod.comp;
|
||||
const pkg_and_file = blk: {
|
||||
const builtin_mod, const file = blk: {
|
||||
comp.mutex.lock();
|
||||
defer comp.mutex.unlock();
|
||||
|
||||
const builtin_mod = mod.main_mod.table.get("builtin").?;
|
||||
const builtin_mod = mod.main_mod.deps.get("builtin").?;
|
||||
const result = try mod.importPkg(builtin_mod);
|
||||
break :blk .{
|
||||
.file = result.file,
|
||||
.pkg = builtin_mod,
|
||||
};
|
||||
break :blk .{ builtin_mod, result.file };
|
||||
};
|
||||
const file = pkg_and_file.file;
|
||||
const builtin_mod = pkg_and_file.pkg;
|
||||
const gpa = mod.gpa;
|
||||
file.source = try comp.generateBuiltinZigSource(gpa);
|
||||
file.source_loaded = true;
|
||||
|
||||
if (builtin_mod.root_src_directory.handle.statFile(builtin_mod.root_src_path)) |stat| {
|
||||
if (builtin_mod.root.statFile(builtin_mod.root_src_path)) |stat| {
|
||||
if (stat.size != file.source.len) {
|
||||
const full_path = try builtin_mod.root_src_directory.join(gpa, &.{
|
||||
builtin_mod.root_src_path,
|
||||
});
|
||||
defer gpa.free(full_path);
|
||||
|
||||
log.warn(
|
||||
"the cached file '{s}' had the wrong size. Expected {d}, found {d}. " ++
|
||||
"the cached file '{}{s}' had the wrong size. Expected {d}, found {d}. " ++
|
||||
"Overwriting with correct file contents now",
|
||||
.{ full_path, file.source.len, stat.size },
|
||||
.{ builtin_mod.root, builtin_mod.root_src_path, file.source.len, stat.size },
|
||||
);
|
||||
|
||||
try writeBuiltinFile(file, builtin_mod);
|
||||
|
|
@ -3206,7 +3173,7 @@ pub fn populateBuiltinFile(mod: *Module) !void {
|
|||
}
|
||||
|
||||
fn writeBuiltinFile(file: *File, builtin_mod: *Package.Module) !void {
|
||||
var af = try builtin_mod.root_src_directory.handle.atomicFile(builtin_mod.root_src_path, .{});
|
||||
var af = try builtin_mod.root.atomicFile(builtin_mod.root_src_path, .{});
|
||||
defer af.deinit();
|
||||
try af.file.writeAll(file.source);
|
||||
try af.finish();
|
||||
|
|
@ -3602,7 +3569,8 @@ pub fn updateEmbedFile(mod: *Module, embed_file: *EmbedFile) SemaError!void {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn semaPkg(mod: *Module, pkg: *Package) !void {
|
||||
/// https://github.com/ziglang/zig/issues/14307
|
||||
pub fn semaPkg(mod: *Module, pkg: *Package.Module) !void {
|
||||
const file = (try mod.importPkg(pkg)).file;
|
||||
return mod.semaFile(file);
|
||||
}
|
||||
|
|
@ -3704,13 +3672,11 @@ pub fn semaFile(mod: *Module, file: *File) SemaError!void {
|
|||
return error.AnalysisFail;
|
||||
};
|
||||
|
||||
const resolved_path = std.fs.path.resolve(
|
||||
gpa,
|
||||
if (file.pkg.root_src_directory.path) |pkg_path|
|
||||
&[_][]const u8{ pkg_path, file.sub_file_path }
|
||||
else
|
||||
&[_][]const u8{file.sub_file_path},
|
||||
) catch |err| {
|
||||
const resolved_path = std.fs.path.resolve(gpa, &.{
|
||||
file.mod.root.root_dir.path orelse ".",
|
||||
file.mod.root.sub_path,
|
||||
file.sub_file_path,
|
||||
}) catch |err| {
|
||||
try reportRetryableFileError(mod, file, "unable to resolve path: {s}", .{@errorName(err)});
|
||||
return error.AnalysisFail;
|
||||
};
|
||||
|
|
@ -3741,8 +3707,8 @@ fn semaDecl(mod: *Module, decl_index: Decl.Index) !bool {
|
|||
|
||||
// TODO: figure out how this works under incremental changes to builtin.zig!
|
||||
const builtin_type_target_index: InternPool.Index = blk: {
|
||||
const std_mod = mod.main_mod.table.get("std").?;
|
||||
if (decl.getFileScope(mod).pkg != std_mod) break :blk .none;
|
||||
const std_mod = mod.main_mod.deps.get("std").?;
|
||||
if (decl.getFileScope(mod).mod != std_mod) break :blk .none;
|
||||
// We're in the std module.
|
||||
const std_file = (try mod.importPkg(std_mod)).file;
|
||||
const std_decl = mod.declPtr(std_file.root_decl.unwrap().?);
|
||||
|
|
@ -4035,14 +4001,17 @@ pub const ImportFileResult = struct {
|
|||
is_pkg: bool,
|
||||
};
|
||||
|
||||
pub fn importPkg(mod: *Module, pkg: *Package) !ImportFileResult {
|
||||
/// https://github.com/ziglang/zig/issues/14307
|
||||
pub fn importPkg(mod: *Module, pkg: *Package.Module) !ImportFileResult {
|
||||
const gpa = mod.gpa;
|
||||
|
||||
// The resolved path is used as the key in the import table, to detect if
|
||||
// an import refers to the same as another, despite different relative paths
|
||||
// or differently mapped package names.
|
||||
const resolved_path = try std.fs.path.resolve(gpa, &[_][]const u8{
|
||||
pkg.root_src_directory.path orelse ".", pkg.root_src_path,
|
||||
const resolved_path = try std.fs.path.resolve(gpa, &.{
|
||||
pkg.root.root_dir.path orelse ".",
|
||||
pkg.root.sub_path,
|
||||
pkg.root_src_path,
|
||||
});
|
||||
var keep_resolved_path = false;
|
||||
defer if (!keep_resolved_path) gpa.free(resolved_path);
|
||||
|
|
@ -4076,7 +4045,7 @@ pub fn importPkg(mod: *Module, pkg: *Package) !ImportFileResult {
|
|||
.tree = undefined,
|
||||
.zir = undefined,
|
||||
.status = .never_loaded,
|
||||
.pkg = pkg,
|
||||
.mod = pkg,
|
||||
.root_decl = .none,
|
||||
};
|
||||
try new_file.addReference(mod.*, .{ .root = pkg });
|
||||
|
|
@ -4093,15 +4062,15 @@ pub fn importFile(
|
|||
import_string: []const u8,
|
||||
) !ImportFileResult {
|
||||
if (std.mem.eql(u8, import_string, "std")) {
|
||||
return mod.importPkg(mod.main_mod.table.get("std").?);
|
||||
return mod.importPkg(mod.main_mod.deps.get("std").?);
|
||||
}
|
||||
if (std.mem.eql(u8, import_string, "builtin")) {
|
||||
return mod.importPkg(mod.main_mod.table.get("builtin").?);
|
||||
return mod.importPkg(mod.main_mod.deps.get("builtin").?);
|
||||
}
|
||||
if (std.mem.eql(u8, import_string, "root")) {
|
||||
return mod.importPkg(mod.root_mod);
|
||||
}
|
||||
if (cur_file.pkg.table.get(import_string)) |pkg| {
|
||||
if (cur_file.mod.deps.get(import_string)) |pkg| {
|
||||
return mod.importPkg(pkg);
|
||||
}
|
||||
if (!mem.endsWith(u8, import_string, ".zig")) {
|
||||
|
|
@ -4112,10 +4081,14 @@ pub fn importFile(
|
|||
// The resolved path is used as the key in the import table, to detect if
|
||||
// an import refers to the same as another, despite different relative paths
|
||||
// or differently mapped package names.
|
||||
const cur_pkg_dir_path = cur_file.pkg.root_src_directory.path orelse ".";
|
||||
const resolved_path = try std.fs.path.resolve(gpa, &[_][]const u8{
|
||||
cur_pkg_dir_path, cur_file.sub_file_path, "..", import_string,
|
||||
const resolved_path = try std.fs.path.resolve(gpa, &.{
|
||||
cur_file.mod.root.root_dir.path orelse ".",
|
||||
cur_file.mod.root.sub_path,
|
||||
cur_file.sub_file_path,
|
||||
"..",
|
||||
import_string,
|
||||
});
|
||||
|
||||
var keep_resolved_path = false;
|
||||
defer if (!keep_resolved_path) gpa.free(resolved_path);
|
||||
|
||||
|
|
@ -4130,7 +4103,10 @@ pub fn importFile(
|
|||
const new_file = try gpa.create(File);
|
||||
errdefer gpa.destroy(new_file);
|
||||
|
||||
const resolved_root_path = try std.fs.path.resolve(gpa, &[_][]const u8{cur_pkg_dir_path});
|
||||
const resolved_root_path = try std.fs.path.resolve(gpa, &.{
|
||||
cur_file.mod.root.root_dir.path orelse ".",
|
||||
cur_file.mod.root.sub_path,
|
||||
});
|
||||
defer gpa.free(resolved_root_path);
|
||||
|
||||
const sub_file_path = p: {
|
||||
|
|
@ -4164,7 +4140,7 @@ pub fn importFile(
|
|||
.tree = undefined,
|
||||
.zir = undefined,
|
||||
.status = .never_loaded,
|
||||
.pkg = cur_file.pkg,
|
||||
.mod = cur_file.mod,
|
||||
.root_decl = .none,
|
||||
};
|
||||
return ImportFileResult{
|
||||
|
|
@ -4177,9 +4153,11 @@ pub fn importFile(
|
|||
pub fn embedFile(mod: *Module, cur_file: *File, import_string: []const u8) !*EmbedFile {
|
||||
const gpa = mod.gpa;
|
||||
|
||||
if (cur_file.pkg.table.get(import_string)) |pkg| {
|
||||
const resolved_path = try std.fs.path.resolve(gpa, &[_][]const u8{
|
||||
pkg.root_src_directory.path orelse ".", pkg.root_src_path,
|
||||
if (cur_file.mod.deps.get(import_string)) |pkg| {
|
||||
const resolved_path = try std.fs.path.resolve(gpa, &.{
|
||||
pkg.root.root_dir.path orelse ".",
|
||||
pkg.root.sub_path,
|
||||
pkg.root_src_path,
|
||||
});
|
||||
var keep_resolved_path = false;
|
||||
defer if (!keep_resolved_path) gpa.free(resolved_path);
|
||||
|
|
@ -4196,10 +4174,14 @@ pub fn embedFile(mod: *Module, cur_file: *File, import_string: []const u8) !*Emb
|
|||
|
||||
// The resolved path is used as the key in the table, to detect if a file
|
||||
// refers to the same as another, despite different relative paths.
|
||||
const cur_pkg_dir_path = cur_file.pkg.root_src_directory.path orelse ".";
|
||||
const resolved_path = try std.fs.path.resolve(gpa, &[_][]const u8{
|
||||
cur_pkg_dir_path, cur_file.sub_file_path, "..", import_string,
|
||||
const resolved_path = try std.fs.path.resolve(gpa, &.{
|
||||
cur_file.mod.root.root_dir.path orelse ".",
|
||||
cur_file.mod.root.sub_path,
|
||||
cur_file.sub_file_path,
|
||||
"..",
|
||||
import_string,
|
||||
});
|
||||
|
||||
var keep_resolved_path = false;
|
||||
defer if (!keep_resolved_path) gpa.free(resolved_path);
|
||||
|
||||
|
|
@ -4207,7 +4189,10 @@ pub fn embedFile(mod: *Module, cur_file: *File, import_string: []const u8) !*Emb
|
|||
errdefer assert(mod.embed_table.remove(resolved_path));
|
||||
if (gop.found_existing) return gop.value_ptr.*;
|
||||
|
||||
const resolved_root_path = try std.fs.path.resolve(gpa, &[_][]const u8{cur_pkg_dir_path});
|
||||
const resolved_root_path = try std.fs.path.resolve(gpa, &.{
|
||||
cur_file.mod.root.root_dir.path orelse ".",
|
||||
cur_file.mod.root.sub_path,
|
||||
});
|
||||
defer gpa.free(resolved_root_path);
|
||||
|
||||
const sub_file_path = p: {
|
||||
|
|
@ -4225,12 +4210,13 @@ pub fn embedFile(mod: *Module, cur_file: *File, import_string: []const u8) !*Emb
|
|||
};
|
||||
errdefer gpa.free(sub_file_path);
|
||||
|
||||
return newEmbedFile(mod, cur_file.pkg, sub_file_path, resolved_path, &keep_resolved_path, gop);
|
||||
return newEmbedFile(mod, cur_file.mod, sub_file_path, resolved_path, &keep_resolved_path, gop);
|
||||
}
|
||||
|
||||
/// https://github.com/ziglang/zig/issues/14307
|
||||
fn newEmbedFile(
|
||||
mod: *Module,
|
||||
pkg: *Package,
|
||||
pkg: *Package.Module,
|
||||
sub_file_path: []const u8,
|
||||
resolved_path: []const u8,
|
||||
keep_resolved_path: *bool,
|
||||
|
|
@ -4241,7 +4227,7 @@ fn newEmbedFile(
|
|||
const new_file = try gpa.create(EmbedFile);
|
||||
errdefer gpa.destroy(new_file);
|
||||
|
||||
var file = try pkg.root_src_directory.handle.openFile(sub_file_path, .{});
|
||||
var file = try pkg.root.openFile(sub_file_path, .{});
|
||||
defer file.close();
|
||||
|
||||
const actual_stat = try file.stat();
|
||||
|
|
@ -4268,14 +4254,14 @@ fn newEmbedFile(
|
|||
.sub_file_path = sub_file_path,
|
||||
.bytes = bytes,
|
||||
.stat = stat,
|
||||
.pkg = pkg,
|
||||
.mod = pkg,
|
||||
.owner_decl = undefined, // Set by Sema immediately after this function returns.
|
||||
};
|
||||
return new_file;
|
||||
}
|
||||
|
||||
pub fn detectEmbedFileUpdate(mod: *Module, embed_file: *EmbedFile) !void {
|
||||
var file = try embed_file.pkg.root_src_directory.handle.openFile(embed_file.sub_file_path, .{});
|
||||
var file = try embed_file.mod.root.openFile(embed_file.sub_file_path, .{});
|
||||
defer file.close();
|
||||
|
||||
const stat = try file.stat();
|
||||
|
|
@ -4448,21 +4434,21 @@ fn scanDecl(iter: *ScanDeclIter, decl_sub_index: usize, flags: u4) Allocator.Err
|
|||
gop.key_ptr.* = new_decl_index;
|
||||
// Exported decls, comptime decls, usingnamespace decls, and
|
||||
// test decls if in test mode, get analyzed.
|
||||
const decl_pkg = namespace.file_scope.pkg;
|
||||
const decl_mod = namespace.file_scope.mod;
|
||||
const want_analysis = is_exported or switch (decl_name_index) {
|
||||
0 => true, // comptime or usingnamespace decl
|
||||
1 => blk: {
|
||||
// test decl with no name. Skip the part where we check against
|
||||
// the test name filter.
|
||||
if (!comp.bin_file.options.is_test) break :blk false;
|
||||
if (decl_pkg != mod.main_mod) break :blk false;
|
||||
if (decl_mod != mod.main_mod) break :blk false;
|
||||
try mod.test_functions.put(gpa, new_decl_index, {});
|
||||
break :blk true;
|
||||
},
|
||||
else => blk: {
|
||||
if (!is_named_test) break :blk false;
|
||||
if (!comp.bin_file.options.is_test) break :blk false;
|
||||
if (decl_pkg != mod.main_mod) break :blk false;
|
||||
if (decl_mod != mod.main_mod) break :blk false;
|
||||
if (comp.test_filter) |test_filter| {
|
||||
if (mem.indexOf(u8, ip.stringToSlice(decl_name), test_filter) == null) {
|
||||
break :blk false;
|
||||
|
|
@ -5589,7 +5575,7 @@ pub fn populateTestFunctions(
|
|||
) !void {
|
||||
const gpa = mod.gpa;
|
||||
const ip = &mod.intern_pool;
|
||||
const builtin_mod = mod.main_mod.table.get("builtin").?;
|
||||
const builtin_mod = mod.main_mod.deps.get("builtin").?;
|
||||
const builtin_file = (mod.importPkg(builtin_mod) catch unreachable).file;
|
||||
const root_decl = mod.declPtr(builtin_file.root_decl.unwrap().?);
|
||||
const builtin_namespace = mod.namespacePtr(root_decl.src_namespace);
|
||||
|
|
|
|||
|
|
@ -13,12 +13,13 @@ pub const Path = struct {
|
|||
return .{ .root_dir = Cache.Directory.cwd() };
|
||||
}
|
||||
|
||||
pub fn join(p: Path, allocator: Allocator, sub_path: []const u8) Allocator.Error!Path {
|
||||
pub fn join(p: Path, arena: Allocator, sub_path: []const u8) Allocator.Error!Path {
|
||||
if (sub_path.len == 0) return p;
|
||||
const parts: []const []const u8 =
|
||||
if (p.sub_path.len == 0) &.{sub_path} else &.{ p.sub_path, sub_path };
|
||||
return .{
|
||||
.root_dir = p.root_dir,
|
||||
.sub_path = try fs.path.join(allocator, parts),
|
||||
.sub_path = try fs.path.join(arena, parts),
|
||||
};
|
||||
}
|
||||
|
||||
|
|
@ -28,7 +29,7 @@ pub const Path = struct {
|
|||
return p.root_dir.join(allocator, parts);
|
||||
}
|
||||
|
||||
pub fn joinStringZ(p: Path, allocator: Allocator, sub_path: []const u8) Allocator.Error![]u8 {
|
||||
pub fn joinStringZ(p: Path, allocator: Allocator, sub_path: []const u8) Allocator.Error![:0]u8 {
|
||||
const parts: []const []const u8 =
|
||||
if (p.sub_path.len == 0) &.{sub_path} else &.{ p.sub_path, sub_path };
|
||||
return p.root_dir.joinZ(allocator, parts);
|
||||
|
|
@ -38,7 +39,7 @@ pub const Path = struct {
|
|||
p: Path,
|
||||
sub_path: []const u8,
|
||||
flags: fs.File.OpenFlags,
|
||||
) fs.File.OpenError!fs.File {
|
||||
) !fs.File {
|
||||
var buf: [fs.MAX_PATH_BYTES]u8 = undefined;
|
||||
const joined_path = if (p.sub_path.len == 0) sub_path else p: {
|
||||
break :p std.fmt.bufPrint(&buf, "{s}" ++ fs.path.sep_str ++ "{s}", .{
|
||||
|
|
@ -58,6 +59,30 @@ pub const Path = struct {
|
|||
return p.root_dir.handle.makeOpenPath(joined_path, opts);
|
||||
}
|
||||
|
||||
pub fn statFile(p: Path, sub_path: []const u8) !fs.Dir.Stat {
|
||||
var buf: [fs.MAX_PATH_BYTES]u8 = undefined;
|
||||
const joined_path = if (p.sub_path.len == 0) sub_path else p: {
|
||||
break :p std.fmt.bufPrint(&buf, "{s}" ++ fs.path.sep_str ++ "{s}", .{
|
||||
p.sub_path, sub_path,
|
||||
}) catch return error.NameTooLong;
|
||||
};
|
||||
return p.root_dir.handle.statFile(joined_path);
|
||||
}
|
||||
|
||||
pub fn atomicFile(
|
||||
p: Path,
|
||||
sub_path: []const u8,
|
||||
options: fs.Dir.AtomicFileOptions,
|
||||
) !fs.AtomicFile {
|
||||
var buf: [fs.MAX_PATH_BYTES]u8 = undefined;
|
||||
const joined_path = if (p.sub_path.len == 0) sub_path else p: {
|
||||
break :p std.fmt.bufPrint(&buf, "{s}" ++ fs.path.sep_str ++ "{s}", .{
|
||||
p.sub_path, sub_path,
|
||||
}) catch return error.NameTooLong;
|
||||
};
|
||||
return p.root_dir.handle.atomicFile(joined_path, options);
|
||||
}
|
||||
|
||||
pub fn format(
|
||||
self: Path,
|
||||
comptime fmt_string: []const u8,
|
||||
|
|
|
|||
|
|
@ -49,7 +49,7 @@ allow_missing_paths_field: bool,
|
|||
/// This will either be relative to `global_cache`, or to the build root of
|
||||
/// the root package.
|
||||
package_root: Package.Path,
|
||||
error_bundle: std.zig.ErrorBundle.Wip,
|
||||
error_bundle: ErrorBundle.Wip,
|
||||
manifest: ?Manifest,
|
||||
manifest_ast: std.zig.Ast,
|
||||
actual_hash: Manifest.Digest,
|
||||
|
|
@ -89,6 +89,31 @@ pub const JobQueue = struct {
|
|||
jq.all_fetches.deinit(gpa);
|
||||
jq.* = undefined;
|
||||
}
|
||||
|
||||
/// Dumps all subsequent error bundles into the first one.
|
||||
pub fn consolidateErrors(jq: *JobQueue) !void {
|
||||
const root = &jq.all_fetches.items[0].error_bundle;
|
||||
for (jq.all_fetches.items[1..]) |fetch| {
|
||||
if (fetch.error_bundle.root_list.items.len > 0) {
|
||||
try root.addBundleAsRoots(fetch.error_bundle.tmpBundle());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates the dependencies.zig file and corresponding `Module` for the
|
||||
/// build runner to obtain via `@import("@dependencies")`.
|
||||
pub fn createDependenciesModule(
|
||||
jq: *JobQueue,
|
||||
arena: Allocator,
|
||||
local_cache_directory: Cache.Directory,
|
||||
basename: []const u8,
|
||||
) !*Package.Module {
|
||||
_ = jq;
|
||||
_ = arena;
|
||||
_ = local_cache_directory;
|
||||
_ = basename;
|
||||
@panic("TODO: createDependenciesModule");
|
||||
}
|
||||
};
|
||||
|
||||
pub const Location = union(enum) {
|
||||
|
|
@ -502,7 +527,7 @@ pub fn workerRun(f: *Fetch) void {
|
|||
fn srcLoc(
|
||||
f: *Fetch,
|
||||
tok: std.zig.Ast.TokenIndex,
|
||||
) Allocator.Error!std.zig.ErrorBundle.SourceLocationIndex {
|
||||
) Allocator.Error!ErrorBundle.SourceLocationIndex {
|
||||
const ast = f.parent_manifest_ast orelse return .none;
|
||||
const eb = &f.error_bundle;
|
||||
const token_starts = ast.tokens.items(.start);
|
||||
|
|
@ -1258,3 +1283,4 @@ const Fetch = @This();
|
|||
const main = @import("../main.zig");
|
||||
const git = @import("../git.zig");
|
||||
const Package = @import("../Package.zig");
|
||||
const ErrorBundle = std.zig.ErrorBundle;
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ root: Package.Path,
|
|||
root_src_path: []const u8,
|
||||
/// The dependency table of this module. Shared dependencies such as 'std',
|
||||
/// 'builtin', and 'root' are not specified in every dependency table, but
|
||||
/// instead only in the table of `main_pkg`. `Module.importFile` is
|
||||
/// instead only in the table of `main_mod`. `Module.importFile` is
|
||||
/// responsible for detecting these names and using the correct package.
|
||||
deps: Deps = .{},
|
||||
|
||||
|
|
|
|||
12
src/Sema.zig
12
src/Sema.zig
|
|
@ -13075,9 +13075,11 @@ fn zirImport(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air.
|
|||
return sema.fail(block, operand_src, "import of file outside package path: '{s}'", .{operand});
|
||||
},
|
||||
error.PackageNotFound => {
|
||||
const name = try block.getFileScope(mod).pkg.getName(sema.gpa, mod.*);
|
||||
defer sema.gpa.free(name);
|
||||
return sema.fail(block, operand_src, "no package named '{s}' available within package '{s}'", .{ operand, name });
|
||||
//const name = try block.getFileScope(mod).mod.getName(sema.gpa, mod.*);
|
||||
//defer sema.gpa.free(name);
|
||||
return sema.fail(block, operand_src, "no package named '{s}' available within package '{}'", .{
|
||||
operand, block.getFileScope(mod).mod.root,
|
||||
});
|
||||
},
|
||||
else => {
|
||||
// TODO: these errors are file system errors; make sure an update() will
|
||||
|
|
@ -36415,8 +36417,8 @@ fn getBuiltinDecl(sema: *Sema, block: *Block, name: []const u8) CompileError!Mod
|
|||
|
||||
const mod = sema.mod;
|
||||
const ip = &mod.intern_pool;
|
||||
const std_pkg = mod.main_pkg.table.get("std").?;
|
||||
const std_file = (mod.importPkg(std_pkg) catch unreachable).file;
|
||||
const std_mod = mod.main_mod.deps.get("std").?;
|
||||
const std_file = (mod.importPkg(std_mod) catch unreachable).file;
|
||||
const opt_builtin_inst = (try sema.namespaceLookupRef(
|
||||
block,
|
||||
src,
|
||||
|
|
|
|||
|
|
@ -892,21 +892,24 @@ pub const Object = struct {
|
|||
build_options.semver.patch,
|
||||
});
|
||||
|
||||
// We fully resolve all paths at this point to avoid lack of source line info in stack
|
||||
// traces or lack of debugging information which, if relative paths were used, would
|
||||
// be very location dependent.
|
||||
// We fully resolve all paths at this point to avoid lack of
|
||||
// source line info in stack traces or lack of debugging
|
||||
// information which, if relative paths were used, would be
|
||||
// very location dependent.
|
||||
// TODO: the only concern I have with this is WASI as either host or target, should
|
||||
// we leave the paths as relative then?
|
||||
var buf: [std.fs.MAX_PATH_BYTES]u8 = undefined;
|
||||
const compile_unit_dir = blk: {
|
||||
const path = d: {
|
||||
const mod = options.module orelse break :d ".";
|
||||
break :d mod.root_pkg.root_src_directory.path orelse ".";
|
||||
};
|
||||
if (std.fs.path.isAbsolute(path)) break :blk path;
|
||||
break :blk std.os.realpath(path, &buf) catch path; // If realpath fails, fallback to whatever path was
|
||||
const compile_unit_dir_z = blk: {
|
||||
if (options.module) |mod| {
|
||||
const d = try mod.root_mod.root.joinStringZ(builder.gpa, "");
|
||||
if (std.fs.path.isAbsolute(d)) break :blk d;
|
||||
const abs = std.fs.realpath(d, &buf) catch break :blk d;
|
||||
builder.gpa.free(d);
|
||||
break :blk try builder.gpa.dupeZ(u8, abs);
|
||||
}
|
||||
const cwd = try std.process.getCwd(&buf);
|
||||
break :blk try builder.gpa.dupeZ(u8, cwd);
|
||||
};
|
||||
const compile_unit_dir_z = try builder.gpa.dupeZ(u8, compile_unit_dir);
|
||||
defer builder.gpa.free(compile_unit_dir_z);
|
||||
|
||||
builder.llvm.di_compile_unit = builder.llvm.di_builder.?.createCompileUnit(
|
||||
|
|
@ -2828,8 +2831,8 @@ pub const Object = struct {
|
|||
fn getStackTraceType(o: *Object) Allocator.Error!Type {
|
||||
const mod = o.module;
|
||||
|
||||
const std_pkg = mod.main_pkg.table.get("std").?;
|
||||
const std_file = (mod.importPkg(std_pkg) catch unreachable).file;
|
||||
const std_mod = mod.main_mod.deps.get("std").?;
|
||||
const std_file = (mod.importPkg(std_mod) catch unreachable).file;
|
||||
|
||||
const builtin_str = try mod.intern_pool.getOrPutString(mod.gpa, "builtin");
|
||||
const std_namespace = mod.namespacePtr(mod.declPtr(std_file.root_decl.unwrap().?).src_namespace);
|
||||
|
|
|
|||
|
|
@ -1074,7 +1074,7 @@ fn buildSharedLib(
|
|||
.cache_mode = .whole,
|
||||
.target = comp.getTarget(),
|
||||
.root_name = lib.name,
|
||||
.main_pkg = null,
|
||||
.main_mod = null,
|
||||
.output_mode = .Lib,
|
||||
.link_mode = .Dynamic,
|
||||
.thread_pool = comp.thread_pool,
|
||||
|
|
|
|||
|
|
@ -233,7 +233,7 @@ pub fn buildLibCXX(comp: *Compilation, prog_node: *std.Progress.Node) !void {
|
|||
.cache_mode = .whole,
|
||||
.target = target,
|
||||
.root_name = root_name,
|
||||
.main_pkg = null,
|
||||
.main_mod = null,
|
||||
.output_mode = output_mode,
|
||||
.thread_pool = comp.thread_pool,
|
||||
.libc_installation = comp.bin_file.options.libc_installation,
|
||||
|
|
@ -396,7 +396,7 @@ pub fn buildLibCXXABI(comp: *Compilation, prog_node: *std.Progress.Node) !void {
|
|||
.cache_mode = .whole,
|
||||
.target = target,
|
||||
.root_name = root_name,
|
||||
.main_pkg = null,
|
||||
.main_mod = null,
|
||||
.output_mode = output_mode,
|
||||
.thread_pool = comp.thread_pool,
|
||||
.libc_installation = comp.bin_file.options.libc_installation,
|
||||
|
|
|
|||
|
|
@ -202,7 +202,7 @@ pub fn buildTsan(comp: *Compilation, prog_node: *std.Progress.Node) !void {
|
|||
.cache_mode = .whole,
|
||||
.target = target,
|
||||
.root_name = root_name,
|
||||
.main_pkg = null,
|
||||
.main_mod = null,
|
||||
.output_mode = output_mode,
|
||||
.thread_pool = comp.thread_pool,
|
||||
.libc_installation = comp.bin_file.options.libc_installation,
|
||||
|
|
|
|||
|
|
@ -89,7 +89,7 @@ pub fn buildStaticLib(comp: *Compilation, prog_node: *std.Progress.Node) !void {
|
|||
.cache_mode = .whole,
|
||||
.target = target,
|
||||
.root_name = root_name,
|
||||
.main_pkg = null,
|
||||
.main_mod = null,
|
||||
.output_mode = output_mode,
|
||||
.thread_pool = comp.thread_pool,
|
||||
.libc_installation = comp.bin_file.options.libc_installation,
|
||||
|
|
|
|||
|
|
@ -1880,7 +1880,7 @@ pub fn writeDbgInfoHeader(self: *Dwarf, module: *Module, low_pc: u64, high_pc: u
|
|||
},
|
||||
}
|
||||
// Write the form for the compile unit, which must match the abbrev table above.
|
||||
const name_strp = try self.strtab.insert(self.allocator, module.root_pkg.root_src_path);
|
||||
const name_strp = try self.strtab.insert(self.allocator, module.root_mod.root_src_path);
|
||||
var compile_unit_dir_buffer: [std.fs.MAX_PATH_BYTES]u8 = undefined;
|
||||
const compile_unit_dir = resolveCompilationDir(module, &compile_unit_dir_buffer);
|
||||
const comp_dir_strp = try self.strtab.insert(self.allocator, compile_unit_dir);
|
||||
|
|
@ -1940,9 +1940,17 @@ fn resolveCompilationDir(module: *Module, buffer: *[std.fs.MAX_PATH_BYTES]u8) []
|
|||
// be very location dependent.
|
||||
// TODO: the only concern I have with this is WASI as either host or target, should
|
||||
// we leave the paths as relative then?
|
||||
const comp_dir_path = module.root_pkg.root_src_directory.path orelse ".";
|
||||
if (std.fs.path.isAbsolute(comp_dir_path)) return comp_dir_path;
|
||||
return std.os.realpath(comp_dir_path, buffer) catch comp_dir_path; // If realpath fails, fallback to whatever comp_dir_path was
|
||||
const root_dir_path = module.root_mod.root.root_dir.path orelse ".";
|
||||
const sub_path = module.root_mod.root.sub_path;
|
||||
const realpath = if (std.fs.path.isAbsolute(root_dir_path)) r: {
|
||||
@memcpy(buffer[0..root_dir_path.len], root_dir_path);
|
||||
break :r root_dir_path;
|
||||
} else std.fs.realpath(root_dir_path, buffer) catch return root_dir_path;
|
||||
const len = realpath.len + 1 + sub_path.len;
|
||||
if (buffer.len < len) return root_dir_path;
|
||||
buffer[realpath.len] = '/';
|
||||
@memcpy(buffer[realpath.len + 1 ..][0..sub_path.len], sub_path);
|
||||
return buffer[0..len];
|
||||
}
|
||||
|
||||
fn writeAddrAssumeCapacity(self: *Dwarf, buf: *std.ArrayList(u8), addr: u64) void {
|
||||
|
|
@ -2664,7 +2672,7 @@ fn genIncludeDirsAndFileNames(self: *Dwarf, arena: Allocator) !struct {
|
|||
for (self.di_files.keys()) |dif| {
|
||||
const dir_path = d: {
|
||||
var buffer: [std.fs.MAX_PATH_BYTES]u8 = undefined;
|
||||
const dir_path = dif.pkg.root_src_directory.path orelse ".";
|
||||
const dir_path = try dif.mod.root.joinString(arena, dif.mod.root.sub_path);
|
||||
const abs_dir_path = if (std.fs.path.isAbsolute(dir_path))
|
||||
dir_path
|
||||
else
|
||||
|
|
|
|||
|
|
@ -929,15 +929,15 @@ pub fn populateMissingMetadata(self: *Elf) !void {
|
|||
|
||||
if (self.base.options.module) |module| {
|
||||
if (self.zig_module_index == null and !self.base.options.use_llvm) {
|
||||
const index = @as(File.Index, @intCast(try self.files.addOne(gpa)));
|
||||
const index: File.Index = @intCast(try self.files.addOne(gpa));
|
||||
self.files.set(index, .{ .zig_module = .{
|
||||
.index = index,
|
||||
.path = module.main_pkg.root_src_path,
|
||||
.path = module.main_mod.root_src_path,
|
||||
} });
|
||||
self.zig_module_index = index;
|
||||
const zig_module = self.file(index).?.zig_module;
|
||||
|
||||
const name_off = try self.strtab.insert(gpa, std.fs.path.stem(module.main_pkg.root_src_path));
|
||||
const name_off = try self.strtab.insert(gpa, std.fs.path.stem(module.main_mod.root_src_path));
|
||||
const symbol_index = try self.addSymbol();
|
||||
try zig_module.local_symbols.append(gpa, symbol_index);
|
||||
const symbol_ptr = self.symbol(symbol_index);
|
||||
|
|
|
|||
|
|
@ -352,9 +352,12 @@ fn putFn(self: *Plan9, decl_index: Module.Decl.Index, out: FnDeclOutput) !void {
|
|||
|
||||
// getting the full file path
|
||||
var buf: [std.fs.MAX_PATH_BYTES]u8 = undefined;
|
||||
const dir = file.pkg.root_src_directory.path orelse try std.os.getcwd(&buf);
|
||||
const sub_path = try std.fs.path.join(arena, &.{ dir, file.sub_file_path });
|
||||
try self.addPathComponents(sub_path, &a);
|
||||
const full_path = try std.fs.path.join(arena, &.{
|
||||
file.mod.root.root_dir.path orelse try std.os.getcwd(&buf),
|
||||
file.mod.root.sub_path,
|
||||
file.sub_file_path,
|
||||
});
|
||||
try self.addPathComponents(full_path, &a);
|
||||
|
||||
// null terminate
|
||||
try a.append(0);
|
||||
|
|
|
|||
113
src/main.zig
113
src/main.zig
|
|
@ -897,7 +897,7 @@ fn buildOutputType(
|
|||
var override_local_cache_dir: ?[]const u8 = try optionalStringEnvVar(arena, "ZIG_LOCAL_CACHE_DIR");
|
||||
var override_global_cache_dir: ?[]const u8 = try optionalStringEnvVar(arena, "ZIG_GLOBAL_CACHE_DIR");
|
||||
var override_lib_dir: ?[]const u8 = try optionalStringEnvVar(arena, "ZIG_LIB_DIR");
|
||||
var main_pkg_path: ?[]const u8 = null;
|
||||
var main_mod_path: ?[]const u8 = null;
|
||||
var clang_preprocessor_mode: Compilation.ClangPreprocessorMode = .no;
|
||||
var subsystem: ?std.Target.SubSystem = null;
|
||||
var major_subsystem_version: ?u32 = null;
|
||||
|
|
@ -1047,7 +1047,7 @@ fn buildOutputType(
|
|||
}
|
||||
root_deps_str = args_iter.nextOrFatal();
|
||||
} else if (mem.eql(u8, arg, "--main-mod-path")) {
|
||||
main_pkg_path = args_iter.nextOrFatal();
|
||||
main_mod_path = args_iter.nextOrFatal();
|
||||
} else if (mem.eql(u8, arg, "-cflags")) {
|
||||
extra_cflags.shrinkRetainingCapacity(0);
|
||||
while (true) {
|
||||
|
|
@ -3236,8 +3236,8 @@ fn buildOutputType(
|
|||
|
||||
const main_mod: ?*Package.Module = if (root_src_file) |unresolved_src_path| blk: {
|
||||
const src_path = try introspect.resolvePath(arena, unresolved_src_path);
|
||||
if (main_pkg_path) |unresolved_main_pkg_path| {
|
||||
const p = try introspect.resolvePath(arena, unresolved_main_pkg_path);
|
||||
if (main_mod_path) |unresolved_main_mod_path| {
|
||||
const p = try introspect.resolvePath(arena, unresolved_main_mod_path);
|
||||
break :blk try Package.Module.create(arena, .{
|
||||
.root = .{
|
||||
.root_dir = Cache.Directory.cwd(),
|
||||
|
|
@ -3386,8 +3386,6 @@ fn buildOutputType(
|
|||
|
||||
gimmeMoreOfThoseSweetSweetFileDescriptors();
|
||||
|
||||
if (true) @panic("TODO restore Compilation logic");
|
||||
|
||||
const comp = Compilation.create(gpa, .{
|
||||
.zig_lib_directory = zig_lib_directory,
|
||||
.local_cache_directory = local_cache_directory,
|
||||
|
|
@ -4628,6 +4626,8 @@ pub const usage_build =
|
|||
;
|
||||
|
||||
pub fn cmdBuild(gpa: Allocator, arena: Allocator, args: []const []const u8) !void {
|
||||
const work_around_btrfs_bug = builtin.os.tag == .linux and
|
||||
std.process.hasEnvVarConstant("ZIG_BTRFS_WORKAROUND");
|
||||
var color: Color = .auto;
|
||||
|
||||
// We want to release all the locks before executing the child process, so we make a nice
|
||||
|
|
@ -4725,7 +4725,7 @@ pub fn cmdBuild(gpa: Allocator, arena: Allocator, args: []const []const u8) !voi
|
|||
|
||||
const cwd_path = try process.getCwdAlloc(arena);
|
||||
const build_zig_basename = if (build_file) |bf| fs.path.basename(bf) else Package.build_zig_basename;
|
||||
const build_directory: Compilation.Directory = blk: {
|
||||
const build_root: Compilation.Directory = blk: {
|
||||
if (build_file) |bf| {
|
||||
if (fs.path.dirname(bf)) |dirname| {
|
||||
const dir = fs.cwd().openDir(dirname, .{}) catch |err| {
|
||||
|
|
@ -4761,7 +4761,7 @@ pub fn cmdBuild(gpa: Allocator, arena: Allocator, args: []const []const u8) !voi
|
|||
}
|
||||
}
|
||||
};
|
||||
child_argv.items[argv_index_build_file] = build_directory.path orelse cwd_path;
|
||||
child_argv.items[argv_index_build_file] = build_root.path orelse cwd_path;
|
||||
|
||||
var global_cache_directory: Compilation.Directory = l: {
|
||||
const p = override_global_cache_dir orelse try introspect.resolveGlobalCacheDir(arena);
|
||||
|
|
@ -4781,9 +4781,9 @@ pub fn cmdBuild(gpa: Allocator, arena: Allocator, args: []const []const u8) !voi
|
|||
.path = local_cache_dir_path,
|
||||
};
|
||||
}
|
||||
const cache_dir_path = try build_directory.join(arena, &[_][]const u8{"zig-cache"});
|
||||
const cache_dir_path = try build_root.join(arena, &[_][]const u8{"zig-cache"});
|
||||
break :l .{
|
||||
.handle = try build_directory.handle.makeOpenPath("zig-cache", .{}),
|
||||
.handle = try build_root.handle.makeOpenPath("zig-cache", .{}),
|
||||
.path = cache_dir_path,
|
||||
};
|
||||
};
|
||||
|
|
@ -4824,74 +4824,77 @@ pub fn cmdBuild(gpa: Allocator, arena: Allocator, args: []const []const u8) !voi
|
|||
};
|
||||
|
||||
var build_mod: Package.Module = .{
|
||||
.root = .{ .root_dir = build_directory },
|
||||
.root = .{ .root_dir = build_root },
|
||||
.root_src_path = build_zig_basename,
|
||||
};
|
||||
if (build_options.only_core_functionality) {
|
||||
const deps_pkg = try Package.createFilePkg(gpa, local_cache_directory, "dependencies.zig",
|
||||
const deps_mod = try Package.createFilePkg(gpa, local_cache_directory, "dependencies.zig",
|
||||
\\pub const packages = struct {};
|
||||
\\pub const root_deps: []const struct { []const u8, []const u8 } = &.{};
|
||||
\\
|
||||
);
|
||||
try main_mod.deps.put(arena, "@dependencies", deps_pkg);
|
||||
try main_mod.deps.put(arena, "@dependencies", deps_mod);
|
||||
} else {
|
||||
var http_client: std.http.Client = .{ .allocator = gpa };
|
||||
defer http_client.deinit();
|
||||
|
||||
if (true) @panic("TODO restore package fetching logic");
|
||||
|
||||
// Here we provide an import to the build runner that allows using reflection to find
|
||||
// all of the dependencies. Without this, there would be no way to use `@import` to
|
||||
// access dependencies by name, since `@import` requires string literals.
|
||||
var dependencies_source = std.ArrayList(u8).init(gpa);
|
||||
defer dependencies_source.deinit();
|
||||
|
||||
var all_modules: Package.AllModules = .{};
|
||||
defer all_modules.deinit(gpa);
|
||||
|
||||
var wip_errors: std.zig.ErrorBundle.Wip = undefined;
|
||||
try wip_errors.init(gpa);
|
||||
defer wip_errors.deinit();
|
||||
|
||||
var progress: std.Progress = .{ .dont_print_on_dumb = true };
|
||||
const root_prog_node = progress.start("Fetch Packages", 0);
|
||||
defer root_prog_node.end();
|
||||
|
||||
// Here we borrow main package's table and will replace it with a fresh
|
||||
// one after this process completes.
|
||||
const fetch_result = build_mod.fetchAndAddDependencies(
|
||||
&main_mod,
|
||||
arena,
|
||||
&thread_pool,
|
||||
&http_client,
|
||||
build_directory,
|
||||
global_cache_directory,
|
||||
local_cache_directory,
|
||||
&dependencies_source,
|
||||
&wip_errors,
|
||||
&all_modules,
|
||||
root_prog_node,
|
||||
null,
|
||||
);
|
||||
if (wip_errors.root_list.items.len > 0) {
|
||||
var errors = try wip_errors.toOwnedBundle("");
|
||||
defer errors.deinit(gpa);
|
||||
var job_queue: Package.Fetch.JobQueue = .{
|
||||
.http_client = &http_client,
|
||||
.thread_pool = &thread_pool,
|
||||
.global_cache = global_cache_directory,
|
||||
.recursive = true,
|
||||
.work_around_btrfs_bug = work_around_btrfs_bug,
|
||||
};
|
||||
defer job_queue.deinit();
|
||||
|
||||
try job_queue.all_fetches.ensureUnusedCapacity(gpa, 1);
|
||||
|
||||
var fetch: Package.Fetch = .{
|
||||
.arena = std.heap.ArenaAllocator.init(gpa),
|
||||
.location = .{ .relative_path = "" },
|
||||
.location_tok = 0,
|
||||
.hash_tok = 0,
|
||||
.parent_package_root = build_mod.root,
|
||||
.parent_manifest_ast = null,
|
||||
.prog_node = root_prog_node,
|
||||
.job_queue = &job_queue,
|
||||
.omit_missing_hash_error = true,
|
||||
.allow_missing_paths_field = true,
|
||||
|
||||
.package_root = undefined,
|
||||
.error_bundle = undefined,
|
||||
.manifest = null,
|
||||
.manifest_ast = undefined,
|
||||
.actual_hash = undefined,
|
||||
.has_build_zig = false,
|
||||
.oom_flag = false,
|
||||
};
|
||||
job_queue.all_fetches.appendAssumeCapacity(&fetch);
|
||||
|
||||
job_queue.wait_group.start();
|
||||
try job_queue.thread_pool.spawn(Package.Fetch.workerRun, .{&fetch});
|
||||
job_queue.wait_group.wait();
|
||||
|
||||
try job_queue.consolidateErrors();
|
||||
|
||||
if (fetch.error_bundle.root_list.items.len > 0) {
|
||||
var errors = try fetch.error_bundle.toOwnedBundle("");
|
||||
errors.renderToStdErr(renderOptions(color));
|
||||
process.exit(1);
|
||||
}
|
||||
try fetch_result;
|
||||
|
||||
const deps_pkg = try Package.createFilePkg(
|
||||
gpa,
|
||||
const deps_mod = try job_queue.createDependenciesModule(
|
||||
arena,
|
||||
local_cache_directory,
|
||||
"dependencies.zig",
|
||||
dependencies_source.items,
|
||||
);
|
||||
|
||||
mem.swap(Package.Table, &main_mod.table, &deps_pkg.table);
|
||||
try main_mod.add(gpa, "@dependencies", deps_pkg);
|
||||
try main_mod.deps.put(arena, "@dependencies", deps_mod);
|
||||
}
|
||||
try main_mod.add(gpa, "@build", &build_mod);
|
||||
try main_mod.deps.put(arena, "@build", &build_mod);
|
||||
|
||||
const comp = Compilation.create(gpa, .{
|
||||
.zig_lib_directory = zig_lib_directory,
|
||||
|
|
|
|||
|
|
@ -206,7 +206,7 @@ pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile, prog_node: *std.Progr
|
|||
.zig_lib_directory = comp.zig_lib_directory,
|
||||
.target = target,
|
||||
.root_name = "c",
|
||||
.main_pkg = null,
|
||||
.main_mod = null,
|
||||
.output_mode = .Lib,
|
||||
.link_mode = .Dynamic,
|
||||
.thread_pool = comp.thread_pool,
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue