coerce vectors to arrays rather than inline for

This commit is contained in:
Andrew Kelley 2025-09-05 15:00:30 -07:00
parent 9b74651cd2
commit 5ec0a7d8a5
8 changed files with 72 additions and 116 deletions

View file

@ -1370,19 +1370,12 @@ pub fn printValue(
}, },
.array => { .array => {
if (!is_any) @compileError("cannot format array without a specifier (i.e. {s} or {any})"); if (!is_any) @compileError("cannot format array without a specifier (i.e. {s} or {any})");
if (max_depth == 0) return w.writeAll("{ ... }"); return printArray(w, fmt, options, &value, max_depth);
try w.writeAll("{ ");
for (value, 0..) |elem, i| {
try w.printValue(fmt, options, elem, max_depth - 1);
if (i < value.len - 1) {
try w.writeAll(", ");
}
}
try w.writeAll(" }");
}, },
.vector => { .vector => |vector| {
if (!is_any and fmt.len != 0) invalidFmtError(fmt, value); if (!is_any and fmt.len != 0) invalidFmtError(fmt, value);
return printVector(w, fmt, options, value, max_depth); const array: [vector.len]vector.child = value;
return printArray(w, fmt, options, &array, max_depth);
}, },
.@"fn" => @compileError("unable to format function body type, use '*const " ++ @typeName(T) ++ "' for a function pointer type"), .@"fn" => @compileError("unable to format function body type, use '*const " ++ @typeName(T) ++ "' for a function pointer type"),
.type => { .type => {
@ -1436,12 +1429,25 @@ pub fn printVector(
value: anytype, value: anytype,
max_depth: usize, max_depth: usize,
) Error!void { ) Error!void {
const len = @typeInfo(@TypeOf(value)).vector.len; const vector = @typeInfo(@TypeOf(value)).vector;
const array: [vector.len]vector.child = value;
return printArray(w, fmt, options, &array, max_depth);
}
pub fn printArray(
w: *Writer,
comptime fmt: []const u8,
options: std.fmt.Options,
ptr_to_array: anytype,
max_depth: usize,
) Error!void {
if (max_depth == 0) return w.writeAll("{ ... }"); if (max_depth == 0) return w.writeAll("{ ... }");
try w.writeAll("{ "); try w.writeAll("{ ");
inline for (0..len) |i| { for (ptr_to_array, 0..) |elem, i| {
try w.printValue(fmt, options, value[i], max_depth - 1); try w.printValue(fmt, options, elem, max_depth - 1);
if (i < len - 1) try w.writeAll(", "); if (i < ptr_to_array.len - 1) {
try w.writeAll(", ");
}
} }
try w.writeAll(" }"); try w.writeAll(" }");
} }

View file

@ -215,7 +215,7 @@ fn ChaChaVecImpl(comptime rounds_nb: usize, comptime degree: comptime_int) type
} }
} }
fn hashToBytes(comptime dm: usize, out: *[64 * dm]u8, x: BlockVec) void { fn hashToBytes(comptime dm: usize, out: *[64 * dm]u8, x: *const BlockVec) void {
inline for (0..dm) |d| { inline for (0..dm) |d| {
for (0..4) |i| { for (0..4) |i| {
mem.writeInt(u32, out[64 * d + 16 * i + 0 ..][0..4], x[i][0 + 4 * d], .little); mem.writeInt(u32, out[64 * d + 16 * i + 0 ..][0..4], x[i][0 + 4 * d], .little);
@ -242,7 +242,7 @@ fn ChaChaVecImpl(comptime rounds_nb: usize, comptime degree: comptime_int) type
while (degree >= d and i + 64 * d <= in.len) : (i += 64 * d) { while (degree >= d and i + 64 * d <= in.len) : (i += 64 * d) {
chacha20Core(x[0..], ctx); chacha20Core(x[0..], ctx);
contextFeedback(&x, ctx); contextFeedback(&x, ctx);
hashToBytes(d, buf[0 .. 64 * d], x); hashToBytes(d, buf[0 .. 64 * d], &x);
var xout = out[i..]; var xout = out[i..];
const xin = in[i..]; const xin = in[i..];
@ -266,7 +266,7 @@ fn ChaChaVecImpl(comptime rounds_nb: usize, comptime degree: comptime_int) type
if (i < in.len) { if (i < in.len) {
chacha20Core(x[0..], ctx); chacha20Core(x[0..], ctx);
contextFeedback(&x, ctx); contextFeedback(&x, ctx);
hashToBytes(1, buf[0..64], x); hashToBytes(1, buf[0..64], &x);
var xout = out[i..]; var xout = out[i..];
const xin = in[i..]; const xin = in[i..];
@ -284,7 +284,7 @@ fn ChaChaVecImpl(comptime rounds_nb: usize, comptime degree: comptime_int) type
while (degree >= d and i + 64 * d <= out.len) : (i += 64 * d) { while (degree >= d and i + 64 * d <= out.len) : (i += 64 * d) {
chacha20Core(x[0..], ctx); chacha20Core(x[0..], ctx);
contextFeedback(&x, ctx); contextFeedback(&x, ctx);
hashToBytes(d, out[i..][0 .. 64 * d], x); hashToBytes(d, out[i..][0 .. 64 * d], &x);
inline for (0..d) |d_| { inline for (0..d) |d_| {
if (count64) { if (count64) {
const next = @addWithOverflow(ctx[3][4 * d_], d); const next = @addWithOverflow(ctx[3][4 * d_], d);
@ -301,7 +301,7 @@ fn ChaChaVecImpl(comptime rounds_nb: usize, comptime degree: comptime_int) type
contextFeedback(&x, ctx); contextFeedback(&x, ctx);
var buf: [64]u8 = undefined; var buf: [64]u8 = undefined;
hashToBytes(1, buf[0..], x); hashToBytes(1, buf[0..], &x);
@memcpy(out[i..], buf[0 .. out.len - i]); @memcpy(out[i..], buf[0 .. out.len - i]);
} }
} }
@ -394,7 +394,7 @@ fn ChaChaNonVecImpl(comptime rounds_nb: usize) type {
} }
} }
fn hashToBytes(out: *[64]u8, x: BlockVec) void { fn hashToBytes(out: *[64]u8, x: *const BlockVec) void {
for (0..4) |i| { for (0..4) |i| {
mem.writeInt(u32, out[16 * i + 0 ..][0..4], x[i * 4 + 0], .little); mem.writeInt(u32, out[16 * i + 0 ..][0..4], x[i * 4 + 0], .little);
mem.writeInt(u32, out[16 * i + 4 ..][0..4], x[i * 4 + 1], .little); mem.writeInt(u32, out[16 * i + 4 ..][0..4], x[i * 4 + 1], .little);
@ -417,7 +417,7 @@ fn ChaChaNonVecImpl(comptime rounds_nb: usize) type {
while (i + 64 <= in.len) : (i += 64) { while (i + 64 <= in.len) : (i += 64) {
chacha20Core(x[0..], ctx); chacha20Core(x[0..], ctx);
contextFeedback(&x, ctx); contextFeedback(&x, ctx);
hashToBytes(buf[0..], x); hashToBytes(buf[0..], &x);
var xout = out[i..]; var xout = out[i..];
const xin = in[i..]; const xin = in[i..];
@ -438,7 +438,7 @@ fn ChaChaNonVecImpl(comptime rounds_nb: usize) type {
if (i < in.len) { if (i < in.len) {
chacha20Core(x[0..], ctx); chacha20Core(x[0..], ctx);
contextFeedback(&x, ctx); contextFeedback(&x, ctx);
hashToBytes(buf[0..], x); hashToBytes(buf[0..], &x);
var xout = out[i..]; var xout = out[i..];
const xin = in[i..]; const xin = in[i..];
@ -455,7 +455,7 @@ fn ChaChaNonVecImpl(comptime rounds_nb: usize) type {
while (i + 64 <= out.len) : (i += 64) { while (i + 64 <= out.len) : (i += 64) {
chacha20Core(x[0..], ctx); chacha20Core(x[0..], ctx);
contextFeedback(&x, ctx); contextFeedback(&x, ctx);
hashToBytes(out[i..][0..64], x); hashToBytes(out[i..][0..64], &x);
if (count64) { if (count64) {
const next = @addWithOverflow(ctx[12], 1); const next = @addWithOverflow(ctx[12], 1);
ctx[12] = next[0]; ctx[12] = next[0];
@ -469,7 +469,7 @@ fn ChaChaNonVecImpl(comptime rounds_nb: usize) type {
contextFeedback(&x, ctx); contextFeedback(&x, ctx);
var buf: [64]u8 = undefined; var buf: [64]u8 = undefined;
hashToBytes(buf[0..], x); hashToBytes(buf[0..], &x);
@memcpy(out[i..], buf[0 .. out.len - i]); @memcpy(out[i..], buf[0 .. out.len - i]);
} }
} }

View file

@ -440,10 +440,11 @@ pub fn innerParse(
} }
}, },
.vector => |vecInfo| { .vector => |vector_info| {
switch (try source.peekNextTokenType()) { switch (try source.peekNextTokenType()) {
.array_begin => { .array_begin => {
return internalParseVector(T, vecInfo.child, vecInfo.len, allocator, source, options); const A = [vector_info.len]vector_info.child;
return try internalParseArray(A, vector_info.child, allocator, source, options);
}, },
else => return error.UnexpectedToken, else => return error.UnexpectedToken,
} }
@ -535,26 +536,6 @@ fn internalParseArray(
return r; return r;
} }
fn internalParseVector(
comptime T: type,
comptime Child: type,
comptime len: comptime_int,
allocator: Allocator,
source: anytype,
options: ParseOptions,
) !T {
assert(.array_begin == try source.next());
var r: T = undefined;
inline for (0..len) |i| {
r[i] = try innerParse(Child, allocator, source, options);
}
if (.array_end != try source.next()) return error.UnexpectedToken;
return r;
}
/// This is an internal function called recursively /// This is an internal function called recursively
/// during the implementation of `parseFromValueLeaky`. /// during the implementation of `parseFromValueLeaky`.
/// It is exposed primarily to enable custom `jsonParseFromValue()` methods to call back into the `parseFromValue*` system, /// It is exposed primarily to enable custom `jsonParseFromValue()` methods to call back into the `parseFromValue*` system,
@ -587,12 +568,12 @@ pub fn innerParseFromValue(
if (@round(f) != f) return error.InvalidNumber; if (@round(f) != f) return error.InvalidNumber;
if (f > @as(@TypeOf(f), @floatFromInt(std.math.maxInt(T)))) return error.Overflow; if (f > @as(@TypeOf(f), @floatFromInt(std.math.maxInt(T)))) return error.Overflow;
if (f < @as(@TypeOf(f), @floatFromInt(std.math.minInt(T)))) return error.Overflow; if (f < @as(@TypeOf(f), @floatFromInt(std.math.minInt(T)))) return error.Overflow;
return @as(T, @intFromFloat(f)); return @intFromFloat(f);
}, },
.integer => |i| { .integer => |i| {
if (i > std.math.maxInt(T)) return error.Overflow; if (i > std.math.maxInt(T)) return error.Overflow;
if (i < std.math.minInt(T)) return error.Overflow; if (i < std.math.minInt(T)) return error.Overflow;
return @as(T, @intCast(i)); return @intCast(i);
}, },
.number_string, .string => |s| { .number_string, .string => |s| {
return sliceToInt(T, s); return sliceToInt(T, s);

View file

@ -742,12 +742,7 @@ pub fn eql(a: anytype, b: @TypeOf(a)) bool {
if (!eql(e, b[i])) return false; if (!eql(e, b[i])) return false;
return true; return true;
}, },
.vector => |info| { .vector => return @reduce(.And, a == b),
inline for (0..info.len) |i| {
if (a[i] != b[i]) return false;
}
return true;
},
.pointer => |info| { .pointer => |info| {
return switch (info.size) { return switch (info.size) {
.one, .many, .c => a == b, .one, .many, .c => a == b,

View file

@ -135,14 +135,9 @@ fn expectEqualInner(comptime T: type, expected: T, actual: T) !void {
.array => |array| try expectEqualSlices(array.child, &expected, &actual), .array => |array| try expectEqualSlices(array.child, &expected, &actual),
.vector => |info| { .vector => |info| {
inline for (0..info.len) |i| { const expect_array: [info.len]info.child = expected;
if (expected[i] != actual[i]) { const actual_array: [info.len]info.child = actual;
print("index {d} incorrect. expected {any}, found {any}\n", .{ try expectEqualSlices(info.child, &expect_array, &actual_array);
i, expected[i], actual[i],
});
return error.TestExpectedEqual;
}
}
}, },
.@"struct" => |structType| { .@"struct" => |structType| {

View file

@ -157,13 +157,11 @@ pub fn valueArbitraryDepth(self: *Serializer, val: anytype, options: ValueOption
} }
}, },
.array => { .array => {
var container = try self.beginTuple( try valueArbitraryDepthArray(self, @TypeOf(val), &val, options);
.{ .whitespace_style = .{ .fields = val.len } }, },
); .vector => |vector| {
for (val) |item_val| { const array: [vector.len]vector.child = val;
try container.fieldArbitraryDepth(item_val, options); try valueArbitraryDepthArray(self, @TypeOf(array), &array, options);
}
try container.end();
}, },
.@"struct" => |@"struct"| if (@"struct".is_tuple) { .@"struct" => |@"struct"| if (@"struct".is_tuple) {
var container = try self.beginTuple( var container = try self.beginTuple(
@ -231,20 +229,21 @@ pub fn valueArbitraryDepth(self: *Serializer, val: anytype, options: ValueOption
} else { } else {
try self.writer.writeAll("null"); try self.writer.writeAll("null");
}, },
.vector => |vector| {
var container = try self.beginTuple(
.{ .whitespace_style = .{ .fields = vector.len } },
);
inline for (0..vector.len) |i| {
try container.fieldArbitraryDepth(val[i], options);
}
try container.end();
},
else => comptime unreachable, else => comptime unreachable,
} }
} }
fn valueArbitraryDepthArray(s: *Serializer, comptime A: type, array: *const A, options: ValueOptions) Error!void {
var container = try s.beginTuple(
.{ .whitespace_style = .{ .fields = array.len } },
);
for (array) |elem| {
try container.fieldArbitraryDepth(elem, options);
}
try container.end();
}
/// Serialize an integer. /// Serialize an integer.
pub fn int(self: *Serializer, val: anytype) Error!void { pub fn int(self: *Serializer, val: anytype) Error!void {
try self.writer.printInt(val, 10, .lower, .{}); try self.writer.printInt(val, 10, .lower, .{});

View file

@ -430,8 +430,12 @@ pub fn free(gpa: Allocator, value: anytype) void {
.many, .c => comptime unreachable, .many, .c => comptime unreachable,
} }
}, },
.array => for (value) |item| { .array => {
free(gpa, item); freeArray(gpa, @TypeOf(value), &value);
},
.vector => |vector| {
const array: [vector.len]vector.child = value;
freeArray(gpa, @TypeOf(array), &array);
}, },
.@"struct" => |@"struct"| inline for (@"struct".fields) |field| { .@"struct" => |@"struct"| inline for (@"struct".fields) |field| {
free(gpa, @field(value, field.name)); free(gpa, @field(value, field.name));
@ -446,12 +450,15 @@ pub fn free(gpa: Allocator, value: anytype) void {
.optional => if (value) |some| { .optional => if (value) |some| {
free(gpa, some); free(gpa, some);
}, },
.vector => |vector| inline for (0..vector.len) |i| free(gpa, value[i]),
.void => {}, .void => {},
else => comptime unreachable, else => comptime unreachable,
} }
} }
fn freeArray(gpa: Allocator, comptime A: type, array: *const A) void {
for (array) |elem| free(gpa, elem);
}
fn requiresAllocator(T: type) bool { fn requiresAllocator(T: type) bool {
_ = valid_types; _ = valid_types;
return switch (@typeInfo(T)) { return switch (@typeInfo(T)) {
@ -521,12 +528,15 @@ const Parser = struct {
else => comptime unreachable, else => comptime unreachable,
}, },
.array => return self.parseArray(T, node), .array => return self.parseArray(T, node),
.vector => |vector| {
const A = [vector.len]vector.child;
return try self.parseArray(A, node);
},
.@"struct" => |@"struct"| if (@"struct".is_tuple) .@"struct" => |@"struct"| if (@"struct".is_tuple)
return self.parseTuple(T, node) return self.parseTuple(T, node)
else else
return self.parseStruct(T, node), return self.parseStruct(T, node),
.@"union" => return self.parseUnion(T, node), .@"union" => return self.parseUnion(T, node),
.vector => return self.parseVector(T, node),
else => comptime unreachable, else => comptime unreachable,
} }
@ -999,33 +1009,6 @@ const Parser = struct {
} }
} }
fn parseVector(self: *@This(), T: type, node: Zoir.Node.Index) !T {
const vector_info = @typeInfo(T).vector;
const nodes: Zoir.Node.Index.Range = switch (node.get(self.zoir)) {
.array_literal => |nodes| nodes,
.empty_literal => .{ .start = node, .len = 0 },
else => return error.WrongType,
};
var result: T = undefined;
if (nodes.len != vector_info.len) {
return self.failNodeFmt(
node,
"expected {} vector elements; found {}",
.{ vector_info.len, nodes.len },
);
}
inline for (0..vector_info.len) |i| {
errdefer inline for (0..i) |j| free(self.gpa, result[j]);
result[i] = try self.parseExpr(vector_info.child, nodes.at(@intCast(i)));
}
return result;
}
fn failTokenFmt( fn failTokenFmt(
self: @This(), self: @This(),
token: Ast.TokenIndex, token: Ast.TokenIndex,
@ -3206,7 +3189,7 @@ test "std.zon vector" {
fromSlice(@Vector(2, f32), gpa, ".{0.5}", &diag, .{}), fromSlice(@Vector(2, f32), gpa, ".{0.5}", &diag, .{}),
); );
try std.testing.expectFmt( try std.testing.expectFmt(
"1:2: error: expected 2 vector elements; found 1\n", "1:2: error: expected 2 array elements; found 1\n",
"{f}", "{f}",
.{diag}, .{diag},
); );
@ -3221,7 +3204,7 @@ test "std.zon vector" {
fromSlice(@Vector(2, f32), gpa, ".{0.5, 1.5, 2.5}", &diag, .{}), fromSlice(@Vector(2, f32), gpa, ".{0.5, 1.5, 2.5}", &diag, .{}),
); );
try std.testing.expectFmt( try std.testing.expectFmt(
"1:2: error: expected 2 vector elements; found 3\n", "1:13: error: index 2 outside of array of length 2\n",
"{f}", "{f}",
.{diag}, .{diag},
); );

View file

@ -139,10 +139,7 @@ fn expectVectorsEqual(a: anytype, b: anytype) !void {
const len_a = @typeInfo(@TypeOf(a)).vector.len; const len_a = @typeInfo(@TypeOf(a)).vector.len;
const len_b = @typeInfo(@TypeOf(b)).vector.len; const len_b = @typeInfo(@TypeOf(b)).vector.len;
try expect(len_a == len_b); try expect(len_a == len_b);
try expect(@reduce(.And, a == b));
inline for (0..len_a) |i| {
try expect(a[i] == b[i]);
}
} }
test "@ctz" { test "@ctz" {