Allow only relative paths.

This commit makes the following changes:
* Disallow file:/// URIs
* Allow only relative paths in the .path field of build.zig.zon
* Remote now-unneeded shlwapi dependency
This commit is contained in:
Adam Goertz 2023-09-24 01:24:49 +00:00 committed by Andrew Kelley
parent b3cad98534
commit 2f0e5b00b0
5 changed files with 249 additions and 248 deletions

View file

@ -336,7 +336,6 @@ pub fn build(b: *std.Build) !void {
artifact.linkSystemLibrary("version"); artifact.linkSystemLibrary("version");
artifact.linkSystemLibrary("uuid"); artifact.linkSystemLibrary("uuid");
artifact.linkSystemLibrary("ole32"); artifact.linkSystemLibrary("ole32");
artifact.linkSystemLibrary("shlwapi");
} }
} }
} }
@ -713,7 +712,6 @@ fn addStaticLlvmOptionsToExe(exe: *std.Build.Step.Compile) !void {
exe.linkSystemLibrary("version"); exe.linkSystemLibrary("version");
exe.linkSystemLibrary("uuid"); exe.linkSystemLibrary("uuid");
exe.linkSystemLibrary("ole32"); exe.linkSystemLibrary("ole32");
exe.linkSystemLibrary("shlwapi");
} }
} }

View file

@ -150,7 +150,7 @@ pub fn parseWithoutScheme(text: []const u8) ParseError!Uri {
std.debug.assert(reader.get().? == '/'); std.debug.assert(reader.get().? == '/');
std.debug.assert(reader.get().? == '/'); std.debug.assert(reader.get().? == '/');
var authority = reader.readUntil(isAuthoritySeparator); const authority = reader.readUntil(isAuthoritySeparator);
if (authority.len == 0) { if (authority.len == 0) {
if (reader.peekPrefix("/")) break :a else return error.InvalidFormat; if (reader.peekPrefix("/")) break :a else return error.InvalidFormat;
} }

View file

@ -30,7 +30,6 @@ pub const gdi32 = @import("windows/gdi32.zig");
pub const winmm = @import("windows/winmm.zig"); pub const winmm = @import("windows/winmm.zig");
pub const crypt32 = @import("windows/crypt32.zig"); pub const crypt32 = @import("windows/crypt32.zig");
pub const nls = @import("windows/nls.zig"); pub const nls = @import("windows/nls.zig");
pub const shlwapi = @import("windows/shlwapi.zig");
pub const self_process_handle = @as(HANDLE, @ptrFromInt(maxInt(usize))); pub const self_process_handle = @as(HANDLE, @ptrFromInt(maxInt(usize)));

View file

@ -1,13 +0,0 @@
const std = @import("../../std.zig");
const windows = std.os.windows;
const DWORD = windows.DWORD;
const WINAPI = windows.WINAPI;
const HRESULT = windows.HRESULT;
const LPCSTR = windows.LPCSTR;
const LPSTR = windows.LPSTR;
const LPWSTR = windows.LPWSTR;
const LPCWSTR = windows.LPCWSTR;
pub extern "shlwapi" fn PathCreateFromUrlW(pszUrl: LPCWSTR, pszPath: LPWSTR, pcchPath: *DWORD, dwFlags: DWORD) callconv(WINAPI) HRESULT;
pub extern "shlwapi" fn PathCreateFromUrlA(pszUrl: LPCSTR, pszPath: LPSTR, pcchPath: *DWORD, dwFlags: DWORD) callconv(WINAPI) HRESULT;

View file

@ -245,8 +245,6 @@ pub fn fetchAndAddDependencies(
error.FileNotFound => { error.FileNotFound => {
// Handle the same as no dependencies. // Handle the same as no dependencies.
if (this_hash) |hash| { if (this_hash) |hash| {
const pkg_dir_sub_path = "p" ++ fs.path.sep_str ++ hash[0..hex_multihash_len];
const build_root = try global_cache_directory.join(arena, &.{pkg_dir_sub_path});
try dependencies_source.writer().print( try dependencies_source.writer().print(
\\ pub const {} = struct {{ \\ pub const {} = struct {{
\\ pub const build_root = "{}"; \\ pub const build_root = "{}";
@ -256,7 +254,7 @@ pub fn fetchAndAddDependencies(
\\ \\
, .{ , .{
std.zig.fmtId(hash), std.zig.fmtId(hash),
std.zig.fmtEscapes(build_root), std.zig.fmtEscapes(pkg.root_src_directory.path.?),
std.zig.fmtEscapes(hash), std.zig.fmtEscapes(hash),
}); });
} else { } else {
@ -312,19 +310,15 @@ pub fn fetchAndAddDependencies(
try dependencies_source.writer().writeAll("pub const packages = struct {\n"); try dependencies_source.writer().writeAll("pub const packages = struct {\n");
} }
const deps_list = manifest.dependencies.values(); for (manifest.dependencies.keys(), manifest.dependencies.values()) |name, *dep| {
for (manifest.dependencies.keys(), 0..) |name, i| { const sub_mod, const found_existing = try getCachedPackage(
const dep = deps_list[i]; arena,
const sub_pkg = try getCachedPackage(
http_client.allocator,
global_cache_directory, global_cache_directory,
dep, dep.*,
report,
all_modules, all_modules,
root_prog_node, root_prog_node,
) orelse m: { ) orelse .{
const mod = try fetchAndUnpack( try fetchAndUnpack(
thread_pool, thread_pool,
http_client, http_client,
directory, directory,
@ -334,39 +328,58 @@ pub fn fetchAndAddDependencies(
all_modules, all_modules,
root_prog_node, root_prog_node,
name, name,
); ),
false,
try mod.fetchAndAddDependencies(
deps_pkg,
arena,
thread_pool,
http_client,
mod.root_src_directory,
global_cache_directory,
local_cache_directory,
dependencies_source,
error_bundle,
all_modules,
root_prog_node,
dep.hash.?,
);
break :m mod;
}; };
try pkg.add(gpa, name, sub_pkg); assert(dep.hash != null);
if (deps_pkg.table.get(dep.hash.?)) |other_sub| {
// This should be the same package (and hence module) since it's the same hash switch (sub_mod) {
// TODO: dedup multiple versions of the same package .zig_pkg => |sub_pkg| {
assert(other_sub == sub_pkg); if (!found_existing) {
} else { try sub_pkg.fetchAndAddDependencies(
try deps_pkg.add(gpa, dep.hash.?, sub_pkg); deps_pkg,
arena,
thread_pool,
http_client,
sub_pkg.root_src_directory,
global_cache_directory,
local_cache_directory,
dependencies_source,
error_bundle,
all_modules,
root_prog_node,
dep.hash.?,
);
}
try pkg.add(gpa, name, sub_pkg);
if (deps_pkg.table.get(dep.hash.?)) |other_sub| {
// This should be the same package (and hence module) since it's the same hash
// TODO: dedup multiple versions of the same package
assert(other_sub == sub_pkg);
} else {
try deps_pkg.add(gpa, dep.hash.?, sub_pkg);
}
},
.non_zig_pkg => |sub_pkg| {
if (!found_existing) {
try dependencies_source.writer().print(
\\ pub const {} = struct {{
\\ pub const build_root = "{}";
\\ pub const deps: []const struct {{ []const u8, []const u8 }} = &.{{}};
\\ }};
\\
, .{
std.zig.fmtId(dep.hash.?),
std.zig.fmtEscapes(sub_pkg.root_src_directory.path.?),
});
}
},
} }
} }
if (this_hash) |hash| { if (this_hash) |hash| {
const pkg_dir_sub_path = "p" ++ fs.path.sep_str ++ hash[0..hex_multihash_len];
const build_root = try global_cache_directory.join(arena, &.{pkg_dir_sub_path});
try dependencies_source.writer().print( try dependencies_source.writer().print(
\\ pub const {} = struct {{ \\ pub const {} = struct {{
\\ pub const build_root = "{}"; \\ pub const build_root = "{}";
@ -375,7 +388,7 @@ pub fn fetchAndAddDependencies(
\\ \\
, .{ , .{
std.zig.fmtId(hash), std.zig.fmtId(hash),
std.zig.fmtEscapes(build_root), std.zig.fmtEscapes(pkg.root_src_directory.path.?),
std.zig.fmtEscapes(hash), std.zig.fmtEscapes(hash),
}); });
for (manifest.dependencies.keys(), manifest.dependencies.values()) |name, dep| { for (manifest.dependencies.keys(), manifest.dependencies.values()) |name, dep| {
@ -485,44 +498,40 @@ const Report = struct {
} }
}; };
const FetchLocation = union(SourceType) { const FetchLocation = union(enum) {
/// The absolute path to a file or directory. /// The absolute path to a file or directory.
/// This may be a file that requires unpacking (such as a .tar.gz), /// This may be a file that requires unpacking (such as a .tar.gz),
/// or the path to the root directory of a package. /// or the path to the root directory of a package.
file: []const u8, file: []const u8,
http_request: std.Uri, http_request: std.Uri,
pub fn init(gpa: Allocator, uri: std.Uri, directory: Compilation.Directory, dep: Manifest.Dependency, report: Report) !FetchLocation { pub fn init(gpa: Allocator, directory: Compilation.Directory, dep: Manifest.Dependency, report: Report) !FetchLocation {
const source_type = getPackageSourceType(uri) catch switch (dep.location) {
return report.fail(dep.location_tok, "Unknown scheme: {s}", .{uri.scheme}); .url => |url| {
const uri = std.Uri.parse(url) catch |err| switch (err) {
return switch (source_type) { error.UnexpectedCharacter => return report.fail(dep.location_tok, "failed to parse dependency location as URI", .{}),
.file => f: { else => return err,
const path = if (builtin.os.tag == .windows) p: { };
var uri_str = std.ArrayList(u8).init(gpa); if (ascii.eqlIgnoreCase(uri.scheme, "file")) {
defer uri_str.deinit(); return report.fail(dep.location_tok, "'file' scheme is not allowed for URLs. Use '.path' instead", .{});
try uri.format("+/", .{}, uri_str.writer()); }
const uri_str_z = try gpa.dupeZ(u8, uri_str.items); return .{ .http_request = uri };
defer gpa.free(uri_str_z);
var buf: [std.os.windows.MAX_PATH:0]u8 = undefined;
var buf_len: std.os.windows.DWORD = std.os.windows.MAX_PATH;
const result = std.os.windows.shlwapi.PathCreateFromUrlA(uri_str_z, &buf, &buf_len, 0);
if (result != std.os.windows.S_OK) return report.fail(dep.location_tok, "Invalid URI", .{});
break :p try gpa.dupe(u8, buf[0..buf_len]);
} else try std.Uri.unescapeString(gpa, uri.path);
defer gpa.free(path);
const new_path = try fs.path.resolve(gpa, &.{ directory.path.?, path });
break :f .{ .file = new_path };
}, },
.http_request => r: { .path => |path| {
break :r .{ .http_request = uri }; const unescaped = try std.Uri.unescapeString(gpa, path);
defer gpa.free(unescaped);
const unnormalized_path = try unnormalizePath(gpa, unescaped);
defer gpa.free(unnormalized_path);
if (fs.path.isAbsolute(unnormalized_path)) {
return report.fail(dep.location_tok, "Absolute paths are not allowed. Use a relative path instead", .{});
}
const new_path = try fs.path.resolve(gpa, &.{ directory.path.?, unnormalized_path });
return .{ .file = new_path };
}, },
}; }
} }
pub fn deinit(f: *FetchLocation, gpa: Allocator) void { pub fn deinit(f: *FetchLocation, gpa: Allocator) void {
@ -533,41 +542,6 @@ const FetchLocation = union(SourceType) {
f.* = undefined; f.* = undefined;
} }
const SourceType = enum {
file,
http_request,
};
fn getPackageSourceType(uri: std.Uri) error{UnknownScheme}!SourceType {
const package_source_map = std.ComptimeStringMap(
SourceType,
.{
.{ "file", .file },
.{ "http", .http_request },
.{ "https", .http_request },
},
);
return package_source_map.get(uri.scheme) orelse error.UnknownScheme;
}
pub fn isDirectory(path: []const u8, root_dir: Compilation.Directory) !bool {
return if (mem.endsWith(u8, path, std.fs.path.sep_str))
true
else if (std.fs.path.extension(path).len > 0)
false
else d: {
// It's common to write directories without a trailing '/'.
// This is some special casing logic to detect directories if
// the file type cannot be determined from the extension.
var dir = root_dir.handle.openDir(path, .{}) catch |err| switch (err) {
error.NotDir => break :d false,
else => break :d err,
};
defer dir.close();
break :d true;
};
}
pub fn fetch( pub fn fetch(
f: FetchLocation, f: FetchLocation,
gpa: Allocator, gpa: Allocator,
@ -578,25 +552,28 @@ const FetchLocation = union(SourceType) {
) !ReadableResource { ) !ReadableResource {
switch (f) { switch (f) {
.file => |file| { .file => |file| {
const is_dir = isDirectory(file, root_dir) catch const is_dir = isDirectory(root_dir, file) catch |err| switch (err) {
return report.fail(dep.location_tok, "File not found: {s}", .{file}); error.FileNotFound => return report.fail(dep.location_tok, "File not found: {s}", .{file}),
else => return err,
};
return if (is_dir) const owned_path = try gpa.dupe(u8, file);
.{ errdefer gpa.free(owned_path);
.path = try gpa.dupe(u8, file),
.resource = .{ .directory = try fs.openIterableDirAbsolute(file, .{}) }, return .{
} .path = owned_path,
else .resource = if (is_dir)
.{ .{ .directory = try fs.openIterableDirAbsolute(file, .{}) }
.path = try gpa.dupe(u8, file), else
.resource = .{ .file = try fs.openFileAbsolute(file, .{}) }, .{ .file = try fs.openFileAbsolute(file, .{}) },
}; };
}, },
.http_request => |uri| { .http_request => |uri| {
var h = std.http.Headers{ .allocator = gpa }; var h = std.http.Headers{ .allocator = gpa };
defer h.deinit(); defer h.deinit();
var req = try http_client.request(.GET, uri, h, .{}); var req = try http_client.request(.GET, uri, h, .{});
errdefer req.deinit();
try req.start(.{}); try req.start(.{});
try req.wait(); try req.wait();
@ -638,10 +615,9 @@ const ReadableResource = struct {
pkg_prog_node: *std.Progress.Node, pkg_prog_node: *std.Progress.Node,
) !PackageLocation { ) !PackageLocation {
switch (rr.resource) { switch (rr.resource) {
.directory => |dir| { .directory => {
const actual_hash = try computePackageHash(thread_pool, dir);
return .{ return .{
.hash = actual_hash, .hash = computePathHash(rr.path),
.dir_path = try allocator.dupe(u8, rr.path), .dir_path = try allocator.dupe(u8, rr.path),
}; };
}, },
@ -739,11 +715,7 @@ const ReadableResource = struct {
pub fn getFileType(rr: ReadableResource, dep: Manifest.Dependency, report: Report) !FileType { pub fn getFileType(rr: ReadableResource, dep: Manifest.Dependency, report: Report) !FileType {
switch (rr.resource) { switch (rr.resource) {
.file => { .file => {
return if (mem.endsWith(u8, rr.path, ".tar.gz")) return fileTypeFromPath(rr.path) orelse
.@"tar.gz"
else if (mem.endsWith(u8, rr.path, ".tar.xz"))
.@"tar.xz"
else
return report.fail(dep.location_tok, "Unknown file type", .{}); return report.fail(dep.location_tok, "Unknown file type", .{});
}, },
.directory => return error.IsDir, .directory => return error.IsDir,
@ -764,16 +736,40 @@ const ReadableResource = struct {
// whose content-disposition header is: 'attachment; filename="<project>-<sha>.tar.gz"' // whose content-disposition header is: 'attachment; filename="<project>-<sha>.tar.gz"'
const content_disposition = req.response.headers.getFirstValue("Content-Disposition") orelse const content_disposition = req.response.headers.getFirstValue("Content-Disposition") orelse
return report.fail(dep.location_tok, "Missing 'Content-Disposition' header for Content-Type=application/octet-stream", .{}); return report.fail(dep.location_tok, "Missing 'Content-Disposition' header for Content-Type=application/octet-stream", .{});
if (mem.startsWith(u8, content_disposition, "attachment;") and break :ty getAttachmentType(content_disposition) orelse
mem.endsWith(u8, content_disposition, ".tar.gz\"")) return report.fail(dep.location_tok, "Unsupported 'Content-Disposition' header value: '{s}' for Content-Type=application/octet-stream", .{content_disposition});
{
break :ty .@"tar.gz";
} else return report.fail(dep.location_tok, "Unsupported 'Content-Disposition' header value: '{s}' for Content-Type=application/octet-stream", .{content_disposition});
} else return report.fail(dep.location_tok, "Unrecognized value for 'Content-Type' header: {s}", .{content_type}); } else return report.fail(dep.location_tok, "Unrecognized value for 'Content-Type' header: {s}", .{content_type});
}, },
} }
} }
fn fileTypeFromPath(file_path: []const u8) ?FileType {
return if (ascii.endsWithIgnoreCase(file_path, ".tar.gz"))
.@"tar.gz"
else if (ascii.endsWithIgnoreCase(file_path, ".tar.xz"))
.@"tar.xz"
else
null;
}
fn getAttachmentType(content_disposition: []const u8) ?FileType {
const disposition_type_end = ascii.indexOfIgnoreCase(content_disposition, "attachment;") orelse return null;
var value_start = ascii.indexOfIgnoreCasePos(content_disposition, disposition_type_end + 1, "filename") orelse return null;
value_start += "filename".len;
if (content_disposition[value_start] == '*') {
value_start += 1;
}
if (content_disposition[value_start] != '=') return null;
value_start += 1;
var value_end = mem.indexOfPos(u8, content_disposition, value_start, ";") orelse content_disposition.len;
if (content_disposition[value_end - 1] == '\"') {
value_end -= 1;
}
return fileTypeFromPath(content_disposition[value_start..value_end]);
}
pub fn deinit(rr: *ReadableResource, gpa: Allocator) void { pub fn deinit(rr: *ReadableResource, gpa: Allocator) void {
gpa.free(rr.path); gpa.free(rr.path);
switch (rr.resource) { switch (rr.resource) {
@ -786,6 +782,8 @@ const ReadableResource = struct {
}; };
pub const PackageLocation = struct { pub const PackageLocation = struct {
/// For packages that require unpacking, this is the hash of the package contents.
/// For directories, this is the hash of the absolute file path.
hash: [Manifest.Hash.digest_length]u8, hash: [Manifest.Hash.digest_length]u8,
dir_path: []const u8, dir_path: []const u8,
@ -797,13 +795,15 @@ pub const PackageLocation = struct {
const hex_multihash_len = 2 * Manifest.multihash_len; const hex_multihash_len = 2 * Manifest.multihash_len;
const MultiHashHexDigest = [hex_multihash_len]u8; const MultiHashHexDigest = [hex_multihash_len]u8;
const DependencyModule = union(enum) {
zig_pkg: *Package,
non_zig_pkg: *Package,
};
/// This is to avoid creating multiple modules for the same build.zig file. /// This is to avoid creating multiple modules for the same build.zig file.
/// If the value is `null`, the package is a known dependency, but has not yet /// If the value is `null`, the package is a known dependency, but has not yet
/// been fetched. /// been fetched.
pub const AllModules = std.AutoHashMapUnmanaged(MultiHashHexDigest, ?union(enum) { pub const AllModules = std.AutoHashMapUnmanaged(MultiHashHexDigest, ?DependencyModule);
zig_pkg: *Package,
non_zig_pkg: void,
});
fn ProgressReader(comptime ReaderType: type) type { fn ProgressReader(comptime ReaderType: type) type {
return struct { return struct {
@ -847,15 +847,18 @@ fn ProgressReader(comptime ReaderType: type) type {
}; };
} }
/// Get a cached package if it exists.
/// Returns `null` if the package has not been cached
/// If the package exists in the cache, returns a pointer to the package and a
/// boolean indicating whether this package has already been seen in the build
/// (i.e. whether or not its transitive dependencies have been fetched).
fn getCachedPackage( fn getCachedPackage(
gpa: Allocator, gpa: Allocator,
global_cache_directory: Compilation.Directory, global_cache_directory: Compilation.Directory,
dep: Manifest.Dependency, dep: Manifest.Dependency,
report: Report,
all_modules: *AllModules, all_modules: *AllModules,
root_prog_node: *std.Progress.Node, root_prog_node: *std.Progress.Node,
) !?*Package { ) !?struct { DependencyModule, bool } {
_ = report;
const s = fs.path.sep_str; const s = fs.path.sep_str;
// Check if the expected_hash is already present in the global package // Check if the expected_hash is already present in the global package
// cache, and thereby avoid both fetching and unpacking. // cache, and thereby avoid both fetching and unpacking.
@ -874,27 +877,21 @@ fn getCachedPackage(
const gop = try all_modules.getOrPut(gpa, hex_digest.*); const gop = try all_modules.getOrPut(gpa, hex_digest.*);
if (gop.found_existing) { if (gop.found_existing) {
if (gop.value_ptr.*) |mod| { if (gop.value_ptr.*) |mod| {
return mod; return .{ mod, true };
} }
} }
pkg_dir.access(build_zig_basename, .{}) catch { root_prog_node.completeOne();
gop.value_ptr.* = .non_zig_pkg;
return .{ const is_zig_mod = if (pkg_dir.access(build_zig_basename, .{})) |_| true else |_| false;
.mod = null,
.found_existing = false,
};
};
const build_root = try global_cache_directory.join(gpa, &.{pkg_dir_sub_path}); const build_root = try global_cache_directory.join(gpa, &.{pkg_dir_sub_path});
errdefer gpa.free(build_root); errdefer gpa.free(build_root);
root_prog_node.completeOne();
const ptr = try gpa.create(Package); const ptr = try gpa.create(Package);
errdefer gpa.destroy(ptr); errdefer gpa.destroy(ptr);
const owned_src_path = try gpa.dupe(u8, build_zig_basename); const owned_src_path = if (is_zig_mod) try gpa.dupe(u8, build_zig_basename) else "";
errdefer gpa.free(owned_src_path); errdefer gpa.free(owned_src_path);
ptr.* = .{ ptr.* = .{
@ -906,8 +903,12 @@ fn getCachedPackage(
.root_src_path = owned_src_path, .root_src_path = owned_src_path,
}; };
gop.value_ptr.* = ptr; gop.value_ptr.* = if (is_zig_mod)
return ptr; .{ .zig_pkg = ptr }
else
.{ .non_zig_pkg = ptr };
return .{ gop.value_ptr.*.?, false };
} }
return null; return null;
@ -918,14 +919,14 @@ fn fetchAndUnpack(
http_client: *std.http.Client, http_client: *std.http.Client,
directory: Compilation.Directory, directory: Compilation.Directory,
global_cache_directory: Compilation.Directory, global_cache_directory: Compilation.Directory,
dep: Manifest.Dependency, dep: *Manifest.Dependency,
report: Report, report: Report,
all_modules: *AllModules, all_modules: *AllModules,
root_prog_node: *std.Progress.Node, root_prog_node: *std.Progress.Node,
/// This does not have to be any form of canonical or fully-qualified name: it /// This does not have to be any form of canonical or fully-qualified name: it
/// is only intended to be human-readable for progress reporting. /// is only intended to be human-readable for progress reporting.
name_for_prog: []const u8, name_for_prog: []const u8,
) !*Package { ) !DependencyModule {
const gpa = http_client.allocator; const gpa = http_client.allocator;
var pkg_prog_node = root_prog_node.start(name_for_prog, 0); var pkg_prog_node = root_prog_node.start(name_for_prog, 0);
@ -933,66 +934,65 @@ fn fetchAndUnpack(
pkg_prog_node.activate(); pkg_prog_node.activate();
pkg_prog_node.context.refresh(); pkg_prog_node.context.refresh();
const uri = switch (dep.location) { var fetch_location = try FetchLocation.init(gpa, directory, dep.*, report);
.url => |url| std.Uri.parse(url) catch |err| switch (err) {
error.UnexpectedCharacter => return report.fail(dep.location_tok, "failed to parse dependency location as URI.", .{}),
else => return err,
},
.path => |path| std.Uri{
.scheme = "file",
.user = null,
.password = null,
.host = null,
.port = null,
.path = path,
.query = null,
.fragment = null,
},
};
var fetch_location = try FetchLocation.init(gpa, uri, directory, dep, report);
defer fetch_location.deinit(gpa); defer fetch_location.deinit(gpa);
var readable_resource = try fetch_location.fetch(gpa, directory, http_client, dep, report); var readable_resource = try fetch_location.fetch(gpa, directory, http_client, dep.*, report);
defer readable_resource.deinit(gpa); defer readable_resource.deinit(gpa);
var package_location = try readable_resource.unpack(gpa, thread_pool, global_cache_directory, dep, report, &pkg_prog_node); var package_location = try readable_resource.unpack(gpa, thread_pool, global_cache_directory, dep.*, report, &pkg_prog_node);
defer package_location.deinit(gpa); defer package_location.deinit(gpa);
const actual_hex = Manifest.hexDigest(package_location.hash); const actual_hex = Manifest.hexDigest(package_location.hash);
if (dep.hash) |h| { if (readable_resource.resource != .directory) {
if (!mem.eql(u8, h, &actual_hex)) { if (dep.hash) |h| {
return report.fail(dep.hash_tok, "hash mismatch: expected: {s}, found: {s}", .{ if (!mem.eql(u8, h, &actual_hex)) {
h, actual_hex, return report.fail(dep.hash_tok, "hash mismatch: expected: {s}, found: {s}", .{
h, actual_hex,
});
}
} else {
const file_path = try report.directory.join(gpa, &.{Manifest.basename});
defer gpa.free(file_path);
const eb = report.error_bundle;
const notes_len = 1;
try Report.addErrorMessage(report.ast.*, file_path, eb, notes_len, .{
.tok = dep.location_tok,
.off = 0,
.msg = "dependency is missing hash field",
}); });
const notes_start = try eb.reserveNotes(notes_len);
eb.extra.items[notes_start] = @intFromEnum(try eb.addErrorMessage(.{
.msg = try eb.printString("expected .hash = \"{s}\",", .{&actual_hex}),
}));
return error.PackageFetchFailed;
} }
} else { } else {
const file_path = try report.directory.join(gpa, &.{Manifest.basename}); if (dep.hash != null) {
defer gpa.free(file_path); return report.fail(dep.hash_tok, "hash not allowed for directory package", .{});
}
const eb = report.error_bundle; // Since directory dependencies don't provide a hash in build.zig.zon,
const notes_len = 1; // set the hash here to be the hash of the absolute path to the dependency.
try Report.addErrorMessage(report.ast.*, file_path, eb, notes_len, .{ dep.hash = try gpa.dupe(u8, &actual_hex);
.tok = dep.location_tok,
.off = 0,
.msg = "dependency is missing hash field",
});
const notes_start = try eb.reserveNotes(notes_len);
eb.extra.items[notes_start] = @intFromEnum(try eb.addErrorMessage(.{
.msg = try eb.printString("expected .hash = \"{s}\",", .{&actual_hex}),
}));
return error.PackageFetchFailed;
} }
const gop = try all_modules.getOrPut(gpa, actual_hex); const build_zig_path = try std.fs.path.join(gpa, &.{ package_location.dir_path, build_zig_basename });
defer gpa.free(build_zig_path);
assert(fs.path.isAbsolute(build_zig_path));
if (gop.found_existing and gop.value_ptr.* != null) { global_cache_directory.handle.access(build_zig_path, .{}) catch |err| switch (err) {
return gop.value_ptr.*.?; error.FileNotFound => {
} else { const module = try create(gpa, package_location.dir_path, "");
const module = try create(gpa, package_location.dir_path, build_zig_basename); try all_modules.put(gpa, actual_hex, .{ .non_zig_pkg = module });
gop.value_ptr.* = module; return .{ .non_zig_pkg = module };
return module; },
} else => return err,
};
const module = try create(gpa, package_location.dir_path, build_zig_basename);
try all_modules.put(gpa, actual_hex, .{ .zig_pkg = module });
return .{ .zig_pkg = module };
} }
fn unpackTarball( fn unpackTarball(
@ -1092,6 +1092,22 @@ fn computePackageHash(
return hasher.finalResult(); return hasher.finalResult();
} }
/// Compute the hash of a file path.
fn computePathHash(path: []const u8) [Manifest.Hash.digest_length]u8 {
var hasher = Manifest.Hash.init(.{});
hasher.update(path);
return hasher.finalResult();
}
fn isDirectory(root_dir: Compilation.Directory, path: []const u8) !bool {
var dir = root_dir.handle.openDir(path, .{}) catch |err| switch (err) {
error.NotDir => return false,
else => return err,
};
defer dir.close();
return true;
}
/// Make a file system path identical independently of operating system path inconsistencies. /// Make a file system path identical independently of operating system path inconsistencies.
/// This converts backslashes into forward slashes. /// This converts backslashes into forward slashes.
fn normalizePath(arena: Allocator, fs_path: []const u8) ![]const u8 { fn normalizePath(arena: Allocator, fs_path: []const u8) ![]const u8 {
@ -1110,6 +1126,25 @@ fn normalizePath(arena: Allocator, fs_path: []const u8) ![]const u8 {
return normalized; return normalized;
} }
/// Make a OS-specific file system path
/// This performs the inverse operation of normalizePath,
/// converting forward slashes into backslashes on Windows
fn unnormalizePath(arena: Allocator, fs_path: []const u8) ![]const u8 {
const canonical_sep = '/';
const unnormalized = try arena.dupe(u8, fs_path);
if (fs.path.sep == canonical_sep)
return unnormalized;
for (unnormalized) |*byte| {
switch (byte.*) {
canonical_sep => byte.* = fs.path.sep,
else => continue,
}
}
return unnormalized;
}
fn workerHashFile(dir: fs.Dir, hashed_file: *HashedFile, wg: *WaitGroup) void { fn workerHashFile(dir: fs.Dir, hashed_file: *HashedFile, wg: *WaitGroup) void {
defer wg.finish(); defer wg.finish();
hashed_file.failure = hashFileFallible(dir, hashed_file); hashed_file.failure = hashFileFallible(dir, hashed_file);
@ -1172,36 +1207,18 @@ fn renameTmpIntoCache(
} }
} }
fn isTarAttachment(content_disposition: []const u8) bool { test "getAttachmentType" {
const disposition_type_end = ascii.indexOfIgnoreCase(content_disposition, "attachment;") orelse return false; try std.testing.expectEqual(@as(?ReadableResource.FileType, .@"tar.gz"), ReadableResource.getAttachmentType("attaChment; FILENAME=\"stuff.tar.gz\"; size=42"));
try std.testing.expectEqual(@as(?ReadableResource.FileType, .@"tar.gz"), ReadableResource.getAttachmentType("attachment; filename*=\"stuff.tar.gz\""));
try std.testing.expectEqual(@as(?ReadableResource.FileType, .@"tar.xz"), ReadableResource.getAttachmentType("ATTACHMENT; filename=\"stuff.tar.xz\""));
try std.testing.expectEqual(@as(?ReadableResource.FileType, .@"tar.xz"), ReadableResource.getAttachmentType("attachment; FileName=\"stuff.tar.xz\""));
try std.testing.expectEqual(@as(?ReadableResource.FileType, .@"tar.gz"), ReadableResource.getAttachmentType("attachment; FileName*=UTF-8\'\'xyz%2Fstuff.tar.gz"));
var value_start = ascii.indexOfIgnoreCasePos(content_disposition, disposition_type_end + 1, "filename") orelse return false; try std.testing.expect(ReadableResource.getAttachmentType("attachment FileName=\"stuff.tar.gz\"") == null);
value_start += "filename".len; try std.testing.expect(ReadableResource.getAttachmentType("attachment; FileName=\"stuff.tar\"") == null);
if (content_disposition[value_start] == '*') { try std.testing.expect(ReadableResource.getAttachmentType("attachment; FileName\"stuff.gz\"") == null);
value_start += 1; try std.testing.expect(ReadableResource.getAttachmentType("attachment; size=42") == null);
} try std.testing.expect(ReadableResource.getAttachmentType("inline; size=42") == null);
if (content_disposition[value_start] != '=') return false; try std.testing.expect(ReadableResource.getAttachmentType("FileName=\"stuff.tar.gz\"; attachment;") == null);
value_start += 1; try std.testing.expect(ReadableResource.getAttachmentType("FileName=\"stuff.tar.gz\";") == null);
var value_end = mem.indexOfPos(u8, content_disposition, value_start, ";") orelse content_disposition.len;
if (content_disposition[value_end - 1] == '\"') {
value_end -= 1;
}
return ascii.endsWithIgnoreCase(content_disposition[value_start..value_end], ".tar.gz");
}
test "isTarAttachment" {
try std.testing.expect(isTarAttachment("attaChment; FILENAME=\"stuff.tar.gz\"; size=42"));
try std.testing.expect(isTarAttachment("attachment; filename*=\"stuff.tar.gz\""));
try std.testing.expect(isTarAttachment("ATTACHMENT; filename=\"stuff.tar.gz\""));
try std.testing.expect(isTarAttachment("attachment; FileName=\"stuff.tar.gz\""));
try std.testing.expect(isTarAttachment("attachment; FileName*=UTF-8\'\'xyz%2Fstuff.tar.gz"));
try std.testing.expect(!isTarAttachment("attachment FileName=\"stuff.tar.gz\""));
try std.testing.expect(!isTarAttachment("attachment; FileName=\"stuff.tar\""));
try std.testing.expect(!isTarAttachment("attachment; FileName\"stuff.gz\""));
try std.testing.expect(!isTarAttachment("attachment; size=42"));
try std.testing.expect(!isTarAttachment("inline; size=42"));
try std.testing.expect(!isTarAttachment("FileName=\"stuff.tar.gz\"; attachment;"));
try std.testing.expect(!isTarAttachment("FileName=\"stuff.tar.gz\";"));
} }