mirror of
https://codeberg.org/ziglang/zig.git
synced 2025-12-06 05:44:20 +00:00
rename "nonce" to "fingerprint"
This commit is contained in:
parent
67904e925d
commit
de43f5eb6a
7 changed files with 38 additions and 38 deletions
|
|
@ -12,5 +12,5 @@
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
.paths = .{""},
|
.paths = .{""},
|
||||||
.nonce = 0xc1ce108124179e16,
|
.fingerprint = 0xc1ce108124179e16,
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -22,24 +22,24 @@ Zig package namespace.
|
||||||
|
|
||||||
Must be a valid bare Zig identifier (don't `@` me), limited to 32 bytes.
|
Must be a valid bare Zig identifier (don't `@` me), limited to 32 bytes.
|
||||||
|
|
||||||
Together with `nonce`, this represents a globally unique package identifier.
|
Together with `fingerprint`, this represents a globally unique package identifier.
|
||||||
|
|
||||||
### `nonce`
|
### `fingerprint`
|
||||||
|
|
||||||
Together with `name`, this represents a globally unique package identifier. This
|
Together with `name`, this represents a globally unique package identifier. This
|
||||||
field is auto-initialized by the toolchain when the package is first created,
|
field is auto-initialized by the toolchain when the package is first created,
|
||||||
and then *never changes*. This allows Zig to unambiguously detect when one
|
and then *never changes*. This allows Zig to unambiguously detect when one
|
||||||
package is an updated version of another.
|
package is an updated version of another.
|
||||||
|
|
||||||
When forking a Zig project, this nonce should be regenerated if the upstream
|
When forking a Zig project, this fingerprint should be regenerated if the upstream
|
||||||
project is still maintained. Otherwise, the fork is *hostile*, attempting to
|
project is still maintained. Otherwise, the fork is *hostile*, attempting to
|
||||||
take control over the original project's identity. The nonce can be regenerated
|
take control over the original project's identity. The fingerprint can be regenerated
|
||||||
by deleting the field and running `zig build`.
|
by deleting the field and running `zig build`.
|
||||||
|
|
||||||
This 64-bit integer is the combination of a 32-bit id component and a 32-bit
|
This 64-bit integer is the combination of a 32-bit id component and a 32-bit
|
||||||
checksum.
|
checksum.
|
||||||
|
|
||||||
The id component within the nonce has these restrictions:
|
The id component within the fingerprint has these restrictions:
|
||||||
|
|
||||||
`0x00000000` is reserved for legacy packages.
|
`0x00000000` is reserved for legacy packages.
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -24,7 +24,7 @@
|
||||||
// original project's identity. Thus it is recommended to leave the comment
|
// original project's identity. Thus it is recommended to leave the comment
|
||||||
// on the following line intact, so that it shows up in code reviews that
|
// on the following line intact, so that it shows up in code reviews that
|
||||||
// modify the field.
|
// modify the field.
|
||||||
.nonce = .NONCE, // Changing this has security and trust implications.
|
.fingerprint = .FINGERPRINT, // Changing this has security and trust implications.
|
||||||
|
|
||||||
// Tracks the earliest Zig version that the package considers to be a
|
// Tracks the earliest Zig version that the package considers to be a
|
||||||
// supported use case.
|
// supported use case.
|
||||||
|
|
|
||||||
|
|
@ -10,25 +10,25 @@ pub const multihash_len = 1 + 1 + Hash.Algo.digest_length;
|
||||||
pub const multihash_hex_digest_len = 2 * multihash_len;
|
pub const multihash_hex_digest_len = 2 * multihash_len;
|
||||||
pub const MultiHashHexDigest = [multihash_hex_digest_len]u8;
|
pub const MultiHashHexDigest = [multihash_hex_digest_len]u8;
|
||||||
|
|
||||||
pub const Nonce = packed struct(u64) {
|
pub const Fingerprint = packed struct(u64) {
|
||||||
id: u32,
|
id: u32,
|
||||||
checksum: u32,
|
checksum: u32,
|
||||||
|
|
||||||
pub fn generate(name: []const u8) Nonce {
|
pub fn generate(name: []const u8) Fingerprint {
|
||||||
return .{
|
return .{
|
||||||
.id = std.crypto.random.intRangeLessThan(u32, 1, 0xffffffff),
|
.id = std.crypto.random.intRangeLessThan(u32, 1, 0xffffffff),
|
||||||
.checksum = std.hash.Crc32.hash(name),
|
.checksum = std.hash.Crc32.hash(name),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn validate(n: Nonce, name: []const u8) bool {
|
pub fn validate(n: Fingerprint, name: []const u8) bool {
|
||||||
switch (n.id) {
|
switch (n.id) {
|
||||||
0x00000000, 0xffffffff => return false,
|
0x00000000, 0xffffffff => return false,
|
||||||
else => return std.hash.Crc32.hash(name) == n.checksum,
|
else => return std.hash.Crc32.hash(name) == n.checksum,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn int(n: Nonce) u64 {
|
pub fn int(n: Fingerprint) u64 {
|
||||||
return @bitCast(n);
|
return @bitCast(n);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
|
||||||
|
|
@ -44,7 +44,7 @@ omit_missing_hash_error: bool,
|
||||||
/// which specifies inclusion rules. This is intended to be true for the first
|
/// which specifies inclusion rules. This is intended to be true for the first
|
||||||
/// fetch task and false for the recursive dependencies.
|
/// fetch task and false for the recursive dependencies.
|
||||||
allow_missing_paths_field: bool,
|
allow_missing_paths_field: bool,
|
||||||
allow_missing_nonce: bool,
|
allow_missing_fingerprint: bool,
|
||||||
allow_name_string: bool,
|
allow_name_string: bool,
|
||||||
/// If true and URL points to a Git repository, will use the latest commit.
|
/// If true and URL points to a Git repository, will use the latest commit.
|
||||||
use_latest_commit: bool,
|
use_latest_commit: bool,
|
||||||
|
|
@ -649,7 +649,7 @@ fn loadManifest(f: *Fetch, pkg_root: Cache.Path) RunError!void {
|
||||||
|
|
||||||
f.manifest = try Manifest.parse(arena, ast.*, .{
|
f.manifest = try Manifest.parse(arena, ast.*, .{
|
||||||
.allow_missing_paths_field = f.allow_missing_paths_field,
|
.allow_missing_paths_field = f.allow_missing_paths_field,
|
||||||
.allow_missing_nonce = f.allow_missing_nonce,
|
.allow_missing_fingerprint = f.allow_missing_fingerprint,
|
||||||
.allow_name_string = f.allow_name_string,
|
.allow_name_string = f.allow_name_string,
|
||||||
});
|
});
|
||||||
const manifest = &f.manifest.?;
|
const manifest = &f.manifest.?;
|
||||||
|
|
@ -752,7 +752,7 @@ fn queueJobsForDeps(f: *Fetch) RunError!void {
|
||||||
.job_queue = f.job_queue,
|
.job_queue = f.job_queue,
|
||||||
.omit_missing_hash_error = false,
|
.omit_missing_hash_error = false,
|
||||||
.allow_missing_paths_field = true,
|
.allow_missing_paths_field = true,
|
||||||
.allow_missing_nonce = true,
|
.allow_missing_fingerprint = true,
|
||||||
.allow_name_string = true,
|
.allow_name_string = true,
|
||||||
.use_latest_commit = false,
|
.use_latest_commit = false,
|
||||||
|
|
||||||
|
|
@ -2323,7 +2323,7 @@ const TestFetchBuilder = struct {
|
||||||
.job_queue = &self.job_queue,
|
.job_queue = &self.job_queue,
|
||||||
.omit_missing_hash_error = true,
|
.omit_missing_hash_error = true,
|
||||||
.allow_missing_paths_field = false,
|
.allow_missing_paths_field = false,
|
||||||
.allow_missing_nonce = true, // so we can keep using the old testdata .tar.gz
|
.allow_missing_fingerprint = true, // so we can keep using the old testdata .tar.gz
|
||||||
.allow_name_string = true, // so we can keep using the old testdata .tar.gz
|
.allow_name_string = true, // so we can keep using the old testdata .tar.gz
|
||||||
.use_latest_commit = true,
|
.use_latest_commit = true,
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -52,7 +52,7 @@ pub const ParseOptions = struct {
|
||||||
/// Deprecated, to be removed after 0.14.0 is tagged.
|
/// Deprecated, to be removed after 0.14.0 is tagged.
|
||||||
allow_name_string: bool = true,
|
allow_name_string: bool = true,
|
||||||
/// Deprecated, to be removed after 0.14.0 is tagged.
|
/// Deprecated, to be removed after 0.14.0 is tagged.
|
||||||
allow_missing_nonce: bool = true,
|
allow_missing_fingerprint: bool = true,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub const Error = Allocator.Error;
|
pub const Error = Allocator.Error;
|
||||||
|
|
@ -81,7 +81,7 @@ pub fn parse(gpa: Allocator, ast: Ast, options: ParseOptions) Error!Manifest {
|
||||||
.paths = .{},
|
.paths = .{},
|
||||||
.allow_missing_paths_field = options.allow_missing_paths_field,
|
.allow_missing_paths_field = options.allow_missing_paths_field,
|
||||||
.allow_name_string = options.allow_name_string,
|
.allow_name_string = options.allow_name_string,
|
||||||
.allow_missing_nonce = options.allow_missing_nonce,
|
.allow_missing_fingerprint = options.allow_missing_fingerprint,
|
||||||
.minimum_zig_version = null,
|
.minimum_zig_version = null,
|
||||||
.buf = .{},
|
.buf = .{},
|
||||||
};
|
};
|
||||||
|
|
@ -157,7 +157,7 @@ const Parse = struct {
|
||||||
paths: std.StringArrayHashMapUnmanaged(void),
|
paths: std.StringArrayHashMapUnmanaged(void),
|
||||||
allow_missing_paths_field: bool,
|
allow_missing_paths_field: bool,
|
||||||
allow_name_string: bool,
|
allow_name_string: bool,
|
||||||
allow_missing_nonce: bool,
|
allow_missing_fingerprint: bool,
|
||||||
minimum_zig_version: ?std.SemanticVersion,
|
minimum_zig_version: ?std.SemanticVersion,
|
||||||
|
|
||||||
const InnerError = error{ ParseFailure, OutOfMemory };
|
const InnerError = error{ ParseFailure, OutOfMemory };
|
||||||
|
|
@ -175,7 +175,7 @@ const Parse = struct {
|
||||||
var have_name = false;
|
var have_name = false;
|
||||||
var have_version = false;
|
var have_version = false;
|
||||||
var have_included_paths = false;
|
var have_included_paths = false;
|
||||||
var nonce: ?Package.Nonce = null;
|
var fingerprint: ?Package.Fingerprint = null;
|
||||||
|
|
||||||
for (struct_init.ast.fields) |field_init| {
|
for (struct_init.ast.fields) |field_init| {
|
||||||
const name_token = ast.firstToken(field_init) - 2;
|
const name_token = ast.firstToken(field_init) - 2;
|
||||||
|
|
@ -192,8 +192,8 @@ const Parse = struct {
|
||||||
} else if (mem.eql(u8, field_name, "name")) {
|
} else if (mem.eql(u8, field_name, "name")) {
|
||||||
p.name = try parseName(p, field_init);
|
p.name = try parseName(p, field_init);
|
||||||
have_name = true;
|
have_name = true;
|
||||||
} else if (mem.eql(u8, field_name, "nonce")) {
|
} else if (mem.eql(u8, field_name, "fingerprint")) {
|
||||||
nonce = try parseNonce(p, field_init);
|
fingerprint = try parseFingerprint(p, field_init);
|
||||||
} else if (mem.eql(u8, field_name, "version")) {
|
} else if (mem.eql(u8, field_name, "version")) {
|
||||||
p.version_node = field_init;
|
p.version_node = field_init;
|
||||||
const version_text = try parseString(p, field_init);
|
const version_text = try parseString(p, field_init);
|
||||||
|
|
@ -220,16 +220,16 @@ const Parse = struct {
|
||||||
if (!have_name) {
|
if (!have_name) {
|
||||||
try appendError(p, main_token, "missing top-level 'name' field", .{});
|
try appendError(p, main_token, "missing top-level 'name' field", .{});
|
||||||
} else {
|
} else {
|
||||||
if (nonce) |n| {
|
if (fingerprint) |n| {
|
||||||
if (!n.validate(p.name)) {
|
if (!n.validate(p.name)) {
|
||||||
return fail(p, main_token, "invalid nonce: 0x{x}; if this is a new or forked package, use this value: 0x{x}", .{
|
return fail(p, main_token, "invalid fingerprint: 0x{x}; if this is a new or forked package, use this value: 0x{x}", .{
|
||||||
n.int(), Package.Nonce.generate(p.name).int(),
|
n.int(), Package.Fingerprint.generate(p.name).int(),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
p.id = n.id;
|
p.id = n.id;
|
||||||
} else if (!p.allow_missing_nonce) {
|
} else if (!p.allow_missing_fingerprint) {
|
||||||
try appendError(p, main_token, "missing top-level 'nonce' field; suggested value: 0x{x}", .{
|
try appendError(p, main_token, "missing top-level 'fingerprint' field; suggested value: 0x{x}", .{
|
||||||
Package.Nonce.generate(p.name).int(),
|
Package.Fingerprint.generate(p.name).int(),
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
p.id = 0;
|
p.id = 0;
|
||||||
|
|
@ -385,7 +385,7 @@ const Parse = struct {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parseNonce(p: *Parse, node: Ast.Node.Index) !Package.Nonce {
|
fn parseFingerprint(p: *Parse, node: Ast.Node.Index) !Package.Fingerprint {
|
||||||
const ast = p.ast;
|
const ast = p.ast;
|
||||||
const node_tags = ast.nodes.items(.tag);
|
const node_tags = ast.nodes.items(.tag);
|
||||||
const main_tokens = ast.nodes.items(.main_token);
|
const main_tokens = ast.nodes.items(.main_token);
|
||||||
|
|
|
||||||
20
src/main.zig
20
src/main.zig
|
|
@ -4752,10 +4752,10 @@ fn cmdInit(gpa: Allocator, arena: Allocator, args: []const []const u8) !void {
|
||||||
};
|
};
|
||||||
var ok_count: usize = 0;
|
var ok_count: usize = 0;
|
||||||
|
|
||||||
const nonce: Package.Nonce = .generate(sanitized_root_name);
|
const fingerprint: Package.Fingerprint = .generate(sanitized_root_name);
|
||||||
|
|
||||||
for (template_paths) |template_path| {
|
for (template_paths) |template_path| {
|
||||||
if (templates.write(arena, fs.cwd(), sanitized_root_name, template_path, nonce)) |_| {
|
if (templates.write(arena, fs.cwd(), sanitized_root_name, template_path, fingerprint)) |_| {
|
||||||
std.log.info("created {s}", .{template_path});
|
std.log.info("created {s}", .{template_path});
|
||||||
ok_count += 1;
|
ok_count += 1;
|
||||||
} else |err| switch (err) {
|
} else |err| switch (err) {
|
||||||
|
|
@ -5225,7 +5225,7 @@ fn cmdBuild(gpa: Allocator, arena: Allocator, args: []const []const u8) !void {
|
||||||
.job_queue = &job_queue,
|
.job_queue = &job_queue,
|
||||||
.omit_missing_hash_error = true,
|
.omit_missing_hash_error = true,
|
||||||
.allow_missing_paths_field = false,
|
.allow_missing_paths_field = false,
|
||||||
.allow_missing_nonce = false,
|
.allow_missing_fingerprint = false,
|
||||||
.allow_name_string = false,
|
.allow_name_string = false,
|
||||||
.use_latest_commit = false,
|
.use_latest_commit = false,
|
||||||
|
|
||||||
|
|
@ -7127,7 +7127,7 @@ fn cmdFetch(
|
||||||
.job_queue = &job_queue,
|
.job_queue = &job_queue,
|
||||||
.omit_missing_hash_error = true,
|
.omit_missing_hash_error = true,
|
||||||
.allow_missing_paths_field = false,
|
.allow_missing_paths_field = false,
|
||||||
.allow_missing_nonce = true,
|
.allow_missing_fingerprint = true,
|
||||||
.allow_name_string = true,
|
.allow_name_string = true,
|
||||||
.use_latest_commit = true,
|
.use_latest_commit = true,
|
||||||
|
|
||||||
|
|
@ -7468,10 +7468,10 @@ fn loadManifest(
|
||||||
0,
|
0,
|
||||||
) catch |err| switch (err) {
|
) catch |err| switch (err) {
|
||||||
error.FileNotFound => {
|
error.FileNotFound => {
|
||||||
const nonce: Package.Nonce = .generate(options.root_name);
|
const fingerprint: Package.Fingerprint = .generate(options.root_name);
|
||||||
var templates = findTemplates(gpa, arena);
|
var templates = findTemplates(gpa, arena);
|
||||||
defer templates.deinit();
|
defer templates.deinit();
|
||||||
templates.write(arena, options.dir, options.root_name, Package.Manifest.basename, nonce) catch |e| {
|
templates.write(arena, options.dir, options.root_name, Package.Manifest.basename, fingerprint) catch |e| {
|
||||||
fatal("unable to write {s}: {s}", .{
|
fatal("unable to write {s}: {s}", .{
|
||||||
Package.Manifest.basename, @errorName(e),
|
Package.Manifest.basename, @errorName(e),
|
||||||
});
|
});
|
||||||
|
|
@ -7529,7 +7529,7 @@ const Templates = struct {
|
||||||
out_dir: fs.Dir,
|
out_dir: fs.Dir,
|
||||||
root_name: []const u8,
|
root_name: []const u8,
|
||||||
template_path: []const u8,
|
template_path: []const u8,
|
||||||
nonce: Package.Nonce,
|
fingerprint: Package.Fingerprint,
|
||||||
) !void {
|
) !void {
|
||||||
if (fs.path.dirname(template_path)) |dirname| {
|
if (fs.path.dirname(template_path)) |dirname| {
|
||||||
out_dir.makePath(dirname) catch |err| {
|
out_dir.makePath(dirname) catch |err| {
|
||||||
|
|
@ -7555,9 +7555,9 @@ const Templates = struct {
|
||||||
try templates.buffer.appendSlice(root_name);
|
try templates.buffer.appendSlice(root_name);
|
||||||
i += ".NAME".len;
|
i += ".NAME".len;
|
||||||
continue;
|
continue;
|
||||||
} else if (std.mem.startsWith(u8, contents[i..], ".NONCE")) {
|
} else if (std.mem.startsWith(u8, contents[i..], ".FINGERPRINT")) {
|
||||||
try templates.buffer.writer().print("0x{x}", .{nonce.int()});
|
try templates.buffer.writer().print("0x{x}", .{fingerprint.int()});
|
||||||
i += ".NONCE".len;
|
i += ".FINGERPRINT".len;
|
||||||
continue;
|
continue;
|
||||||
} else if (std.mem.startsWith(u8, contents[i..], ".ZIGVER")) {
|
} else if (std.mem.startsWith(u8, contents[i..], ".ZIGVER")) {
|
||||||
try templates.buffer.appendSlice(build_options.version);
|
try templates.buffer.appendSlice(build_options.version);
|
||||||
|
|
|
||||||
Loading…
Add table
Reference in a new issue