mirror of
https://github.com/zigzap/zap.git
synced 2025-10-21 15:44:10 +00:00
Update pkghash to use the new fs.dir changes
This commit is contained in:
parent
34431067a9
commit
60c910f096
2 changed files with 7 additions and 7 deletions
|
@ -82,7 +82,7 @@ pub fn build(b: *std.build.Builder) !void {
|
||||||
const example_run_step = b.step(ex_run_stepname, ex_run_stepdesc);
|
const example_run_step = b.step(ex_run_stepname, ex_run_stepdesc);
|
||||||
const example_step = b.step(ex_name, ex_build_desc);
|
const example_step = b.step(ex_name, ex_build_desc);
|
||||||
|
|
||||||
var example = b.addExecutable(.{
|
const example = b.addExecutable(.{
|
||||||
.name = ex_name,
|
.name = ex_name,
|
||||||
.root_source_file = .{ .path = ex_src },
|
.root_source_file = .{ .path = ex_src },
|
||||||
.target = target,
|
.target = target,
|
||||||
|
@ -206,7 +206,7 @@ pub fn build(b: *std.build.Builder) !void {
|
||||||
.target = target,
|
.target = target,
|
||||||
.optimize = optimize,
|
.optimize = optimize,
|
||||||
});
|
});
|
||||||
var pkghash_step = b.step("pkghash", "Build pkghash");
|
const pkghash_step = b.step("pkghash", "Build pkghash");
|
||||||
const pkghash_build_step = b.addInstallArtifact(pkghash_exe, .{});
|
const pkghash_build_step = b.addInstallArtifact(pkghash_exe, .{});
|
||||||
pkghash_step.dependOn(&pkghash_build_step.step);
|
pkghash_step.dependOn(&pkghash_build_step.step);
|
||||||
all_step.dependOn(&pkghash_build_step.step);
|
all_step.dependOn(&pkghash_build_step.step);
|
||||||
|
|
|
@ -266,8 +266,8 @@ pub fn cmdPkg(gpa: Allocator, arena: Allocator, args: []const []const u8) !void
|
||||||
|
|
||||||
// computePackageHash will close the directory after completion
|
// computePackageHash will close the directory after completion
|
||||||
// std.debug.print("abspath: {s}\n", .{cwd_absolute_path});
|
// std.debug.print("abspath: {s}\n", .{cwd_absolute_path});
|
||||||
var cwd_copy = try fs.openIterableDirAbsolute(cwd_absolute_path, .{});
|
var cwd_copy = try fs.openDirAbsolute(cwd_absolute_path, .{});
|
||||||
errdefer cwd_copy.dir.close();
|
errdefer cwd_copy.close();
|
||||||
|
|
||||||
var thread_pool: ThreadPool = undefined;
|
var thread_pool: ThreadPool = undefined;
|
||||||
try thread_pool.init(.{ .allocator = gpa });
|
try thread_pool.init(.{ .allocator = gpa });
|
||||||
|
@ -281,7 +281,7 @@ pub fn cmdPkg(gpa: Allocator, arena: Allocator, args: []const []const u8) !void
|
||||||
};
|
};
|
||||||
break :blk try computePackageHashExcludingDirectories(
|
break :blk try computePackageHashExcludingDirectories(
|
||||||
&thread_pool,
|
&thread_pool,
|
||||||
.{ .dir = cwd_copy.dir },
|
cwd_copy,
|
||||||
excluded_directories,
|
excluded_directories,
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
@ -355,7 +355,7 @@ fn isExecutable(file: fs.File) !bool {
|
||||||
|
|
||||||
pub fn computePackageHashExcludingDirectories(
|
pub fn computePackageHashExcludingDirectories(
|
||||||
thread_pool: *ThreadPool,
|
thread_pool: *ThreadPool,
|
||||||
pkg_dir: fs.IterableDir,
|
pkg_dir: fs.Dir,
|
||||||
excluded_directories: []const []const u8,
|
excluded_directories: []const []const u8,
|
||||||
) ![Manifest.Hash.digest_length]u8 {
|
) ![Manifest.Hash.digest_length]u8 {
|
||||||
const gpa = thread_pool.allocator;
|
const gpa = thread_pool.allocator;
|
||||||
|
@ -405,7 +405,7 @@ pub fn computePackageHashExcludingDirectories(
|
||||||
.failure = undefined, // to be populated by the worker
|
.failure = undefined, // to be populated by the worker
|
||||||
};
|
};
|
||||||
wait_group.start();
|
wait_group.start();
|
||||||
try thread_pool.spawn(workerHashFile, .{ pkg_dir.dir, hashed_file, &wait_group });
|
try thread_pool.spawn(workerHashFile, .{ pkg_dir, hashed_file, &wait_group });
|
||||||
|
|
||||||
try all_files.append(hashed_file);
|
try all_files.append(hashed_file);
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Reference in a new issue