add some new fuzz tests

This commit is contained in:
Kendall Condon 2025-05-01 16:56:54 -04:00
parent e66b269333
commit b905c65661
3 changed files with 48 additions and 14 deletions

View file

@ -517,3 +517,20 @@ test isNumberFormattedLikeAnInteger {
try std.testing.expect(!isNumberFormattedLikeAnInteger("1e10"));
try std.testing.expect(!isNumberFormattedLikeAnInteger("1E10"));
}
test "fuzz" {
try std.testing.fuzz({}, fuzzTestOne, .{});
}
fn fuzzTestOne(_: void, input: []const u8) !void {
var buf: [16384]u8 = undefined;
var fba: std.heap.FixedBufferAllocator = .init(&buf);
var scanner = Scanner.initCompleteInput(fba.allocator(), input);
// Property: There are at most input.len tokens
var tokens: usize = 0;
while ((scanner.next() catch return) != .end_of_document) {
tokens += 1;
if (tokens > input.len) return error.Overflow;
}
}

View file

@ -6451,3 +6451,19 @@ fn testError(source: [:0]const u8, expected_errors: []const Error) !void {
try std.testing.expectEqual(expected, tree.errors[i].tag);
}
}
test "fuzz ast parse" {
try std.testing.fuzz({}, fuzzTestOneParse, .{});
}
fn fuzzTestOneParse(_: void, input: []const u8) !void {
// The first byte holds if zig / zon
if (input.len == 0) return;
const mode: std.zig.Ast.Mode = if (input[0] & 1 == 0) .zig else .zon;
const bytes = input[1..];
var fba: std.heap.FixedBufferAllocator = .init(&fixed_buffer_mem);
const allocator = fba.allocator();
const source = allocator.dupeZ(u8, bytes) catch return;
_ = std.zig.Ast.parse(allocator, source, mode) catch return;
}

View file

@ -1721,10 +1721,14 @@ fn testTokenize(source: [:0]const u8, expected_token_tags: []const Token.Tag) !v
try std.testing.expectEqual(source.len, last_token.loc.end);
}
fn testPropertiesUpheld(context: void, source: []const u8) anyerror!void {
_ = context;
const source0 = try std.testing.allocator.dupeZ(u8, source);
defer std.testing.allocator.free(source0);
fn testPropertiesUpheld(_: void, source: []const u8) !void {
var source0_buf: [512]u8 = undefined;
if (source.len + 1 > source0_buf.len)
return;
@memcpy(source0_buf[0..source.len], source);
source0_buf[source.len] = 0;
const source0 = source0_buf[0..source.len :0];
var tokenizer = Tokenizer.init(source0);
var tokenization_failed = false;
while (true) {
@ -1750,18 +1754,15 @@ fn testPropertiesUpheld(context: void, source: []const u8) anyerror!void {
}
}
if (source0.len > 0) for (source0, source0[1..][0..source0.len]) |cur, next| {
if (tokenization_failed) return;
for (source0) |cur| {
// Property: No null byte allowed except at end.
if (cur == 0) {
try std.testing.expect(tokenization_failed);
return error.TestUnexpectedResult;
}
// Property: No ASCII control characters other than \n and \t are allowed.
if (std.ascii.isControl(cur) and cur != '\n' and cur != '\t') {
try std.testing.expect(tokenization_failed);
// Property: No ASCII control characters other than \n, \t, and \r are allowed.
if (std.ascii.isControl(cur) and cur != '\n' and cur != '\t' and cur != '\r') {
return error.TestUnexpectedResult;
}
// Property: All '\r' must be followed by '\n'.
if (cur == '\r' and next != '\n') {
try std.testing.expect(tokenization_failed);
}
};
}
}