mirror of
https://codeberg.org/ziglang/zig.git
synced 2025-12-06 13:54:21 +00:00
std: Support user-provided jsonParse method. Unify json.Parser and json.parse* (#15705)
This commit is contained in:
parent
423d7b848b
commit
32cb9462ff
11 changed files with 471 additions and 696 deletions
|
|
@ -1,19 +1,73 @@
|
||||||
//! JSON parsing and stringification conforming to RFC 8259. https://datatracker.ietf.org/doc/html/rfc8259
|
//! JSON parsing and stringification conforming to RFC 8259. https://datatracker.ietf.org/doc/html/rfc8259
|
||||||
//!
|
//!
|
||||||
//! The low-level `Scanner` API reads from an input slice or successive slices of inputs,
|
//! The low-level `Scanner` API produces `Token`s from an input slice or successive slices of inputs,
|
||||||
//! The `Reader` API connects a `std.io.Reader` to a `Scanner`.
|
//! The `Reader` API connects a `std.io.Reader` to a `Scanner`.
|
||||||
//!
|
//!
|
||||||
//! The high-level `parseFromSlice` and `parseFromTokenSource` deserializes a JSON document into a Zig type.
|
//! The high-level `parseFromSlice` and `parseFromTokenSource` deserialize a JSON document into a Zig type.
|
||||||
//! The high-level `Parser` parses any JSON document into a dynamically typed `ValueTree` that has its own memory arena.
|
//! Parse into a dynamically-typed `Value` to load any JSON value for runtime inspection.
|
||||||
//!
|
//!
|
||||||
//! The low-level `writeStream` emits syntax-conformant JSON tokens to a `std.io.Writer`.
|
//! The low-level `writeStream` emits syntax-conformant JSON tokens to a `std.io.Writer`.
|
||||||
//! The high-level `stringify` serializes a Zig type into JSON.
|
//! The high-level `stringify` serializes a Zig or `Value` type into JSON.
|
||||||
|
|
||||||
|
const testing = @import("std").testing;
|
||||||
|
const ArrayList = @import("std").ArrayList;
|
||||||
|
|
||||||
|
test Scanner {
|
||||||
|
var scanner = Scanner.initCompleteInput(testing.allocator, "{\"foo\": 123}\n");
|
||||||
|
defer scanner.deinit();
|
||||||
|
try testing.expectEqual(Token.object_begin, try scanner.next());
|
||||||
|
try testing.expectEqualSlices(u8, "foo", (try scanner.next()).string);
|
||||||
|
try testing.expectEqualSlices(u8, "123", (try scanner.next()).number);
|
||||||
|
try testing.expectEqual(Token.object_end, try scanner.next());
|
||||||
|
try testing.expectEqual(Token.end_of_document, try scanner.next());
|
||||||
|
}
|
||||||
|
|
||||||
|
test parseFromSlice {
|
||||||
|
var parsed_str = try parseFromSlice([]const u8, testing.allocator, "\"a\\u0020b\"", .{});
|
||||||
|
defer parsed_str.deinit();
|
||||||
|
try testing.expectEqualSlices(u8, "a b", parsed_str.value);
|
||||||
|
|
||||||
|
const T = struct { a: i32 = -1, b: [2]u8 };
|
||||||
|
var parsed_struct = try parseFromSlice(T, testing.allocator, "{\"b\":\"xy\"}", .{});
|
||||||
|
defer parsed_struct.deinit();
|
||||||
|
try testing.expectEqual(@as(i32, -1), parsed_struct.value.a); // default value
|
||||||
|
try testing.expectEqualSlices(u8, "xy", parsed_struct.value.b[0..]);
|
||||||
|
}
|
||||||
|
|
||||||
|
test Value {
|
||||||
|
var parsed = try parseFromSlice(Value, testing.allocator, "{\"anything\": \"goes\"}", .{});
|
||||||
|
defer parsed.deinit();
|
||||||
|
try testing.expectEqualSlices(u8, "goes", parsed.value.object.get("anything").?.string);
|
||||||
|
}
|
||||||
|
|
||||||
|
test writeStream {
|
||||||
|
var out = ArrayList(u8).init(testing.allocator);
|
||||||
|
defer out.deinit();
|
||||||
|
var write_stream = writeStream(out.writer(), 99);
|
||||||
|
try write_stream.beginObject();
|
||||||
|
try write_stream.objectField("foo");
|
||||||
|
try write_stream.emitNumber(123);
|
||||||
|
try write_stream.endObject();
|
||||||
|
const expected =
|
||||||
|
\\{
|
||||||
|
\\ "foo": 123
|
||||||
|
\\}
|
||||||
|
;
|
||||||
|
try testing.expectEqualSlices(u8, expected, out.items);
|
||||||
|
}
|
||||||
|
|
||||||
|
test stringify {
|
||||||
|
var out = ArrayList(u8).init(testing.allocator);
|
||||||
|
defer out.deinit();
|
||||||
|
|
||||||
|
const T = struct { a: i32, b: []const u8 };
|
||||||
|
try stringify(T{ .a = 123, .b = "xy" }, .{}, out.writer());
|
||||||
|
try testing.expectEqualSlices(u8, "{\"a\":123,\"b\":\"xy\"}", out.items);
|
||||||
|
}
|
||||||
|
|
||||||
pub const ValueTree = @import("json/dynamic.zig").ValueTree;
|
|
||||||
pub const ObjectMap = @import("json/dynamic.zig").ObjectMap;
|
pub const ObjectMap = @import("json/dynamic.zig").ObjectMap;
|
||||||
pub const Array = @import("json/dynamic.zig").Array;
|
pub const Array = @import("json/dynamic.zig").Array;
|
||||||
pub const Value = @import("json/dynamic.zig").Value;
|
pub const Value = @import("json/dynamic.zig").Value;
|
||||||
pub const Parser = @import("json/dynamic.zig").Parser;
|
|
||||||
|
|
||||||
pub const validate = @import("json/scanner.zig").validate;
|
pub const validate = @import("json/scanner.zig").validate;
|
||||||
pub const Error = @import("json/scanner.zig").Error;
|
pub const Error = @import("json/scanner.zig").Error;
|
||||||
|
|
@ -30,9 +84,11 @@ pub const isNumberFormattedLikeAnInteger = @import("json/scanner.zig").isNumberF
|
||||||
|
|
||||||
pub const ParseOptions = @import("json/static.zig").ParseOptions;
|
pub const ParseOptions = @import("json/static.zig").ParseOptions;
|
||||||
pub const parseFromSlice = @import("json/static.zig").parseFromSlice;
|
pub const parseFromSlice = @import("json/static.zig").parseFromSlice;
|
||||||
|
pub const parseFromSliceLeaky = @import("json/static.zig").parseFromSliceLeaky;
|
||||||
pub const parseFromTokenSource = @import("json/static.zig").parseFromTokenSource;
|
pub const parseFromTokenSource = @import("json/static.zig").parseFromTokenSource;
|
||||||
|
pub const parseFromTokenSourceLeaky = @import("json/static.zig").parseFromTokenSourceLeaky;
|
||||||
pub const ParseError = @import("json/static.zig").ParseError;
|
pub const ParseError = @import("json/static.zig").ParseError;
|
||||||
pub const parseFree = @import("json/static.zig").parseFree;
|
pub const Parsed = @import("json/static.zig").Parsed;
|
||||||
|
|
||||||
pub const StringifyOptions = @import("json/stringify.zig").StringifyOptions;
|
pub const StringifyOptions = @import("json/stringify.zig").StringifyOptions;
|
||||||
pub const encodeJsonString = @import("json/stringify.zig").encodeJsonString;
|
pub const encodeJsonString = @import("json/stringify.zig").encodeJsonString;
|
||||||
|
|
@ -45,6 +101,9 @@ pub const writeStream = @import("json/write_stream.zig").writeStream;
|
||||||
|
|
||||||
// Deprecations
|
// Deprecations
|
||||||
pub const parse = @compileError("Deprecated; use parseFromSlice() or parseFromTokenSource() instead.");
|
pub const parse = @compileError("Deprecated; use parseFromSlice() or parseFromTokenSource() instead.");
|
||||||
|
pub const parseFree = @compileError("Deprecated; call Parsed(T).deinit() instead.");
|
||||||
|
pub const Parser = @compileError("Deprecated; use parseFromSlice(Value) or parseFromTokenSource(Value) instead.");
|
||||||
|
pub const ValueTree = @compileError("Deprecated; use Parsed(Value) instead.");
|
||||||
pub const StreamingParser = @compileError("Deprecated; use json.Scanner or json.Reader instead.");
|
pub const StreamingParser = @compileError("Deprecated; use json.Scanner or json.Reader instead.");
|
||||||
pub const TokenStream = @compileError("Deprecated; use json.Scanner or json.Reader instead.");
|
pub const TokenStream = @compileError("Deprecated; use json.Scanner or json.Reader instead.");
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -8,26 +8,20 @@ const Allocator = std.mem.Allocator;
|
||||||
const StringifyOptions = @import("./stringify.zig").StringifyOptions;
|
const StringifyOptions = @import("./stringify.zig").StringifyOptions;
|
||||||
const stringify = @import("./stringify.zig").stringify;
|
const stringify = @import("./stringify.zig").stringify;
|
||||||
|
|
||||||
|
const ParseOptions = @import("./static.zig").ParseOptions;
|
||||||
|
const ParseError = @import("./static.zig").ParseError;
|
||||||
|
|
||||||
const JsonScanner = @import("./scanner.zig").Scanner;
|
const JsonScanner = @import("./scanner.zig").Scanner;
|
||||||
const AllocWhen = @import("./scanner.zig").AllocWhen;
|
const AllocWhen = @import("./scanner.zig").AllocWhen;
|
||||||
const Token = @import("./scanner.zig").Token;
|
const Token = @import("./scanner.zig").Token;
|
||||||
const isNumberFormattedLikeAnInteger = @import("./scanner.zig").isNumberFormattedLikeAnInteger;
|
const isNumberFormattedLikeAnInteger = @import("./scanner.zig").isNumberFormattedLikeAnInteger;
|
||||||
|
|
||||||
pub const ValueTree = struct {
|
|
||||||
arena: *ArenaAllocator,
|
|
||||||
root: Value,
|
|
||||||
|
|
||||||
pub fn deinit(self: *ValueTree) void {
|
|
||||||
self.arena.deinit();
|
|
||||||
self.arena.child_allocator.destroy(self.arena);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
pub const ObjectMap = StringArrayHashMap(Value);
|
pub const ObjectMap = StringArrayHashMap(Value);
|
||||||
pub const Array = ArrayList(Value);
|
pub const Array = ArrayList(Value);
|
||||||
|
|
||||||
/// Represents a JSON value
|
/// Represents any JSON value, potentially containing other JSON values.
|
||||||
/// Currently only supports numbers that fit into i64 or f64.
|
/// A .float value may be an approximation of the original value.
|
||||||
|
/// Arbitrary precision numbers can be represented by .number_string values.
|
||||||
pub const Value = union(enum) {
|
pub const Value = union(enum) {
|
||||||
null,
|
null,
|
||||||
bool: bool,
|
bool: bool,
|
||||||
|
|
@ -38,6 +32,33 @@ pub const Value = union(enum) {
|
||||||
array: Array,
|
array: Array,
|
||||||
object: ObjectMap,
|
object: ObjectMap,
|
||||||
|
|
||||||
|
pub fn parseFromNumberSlice(s: []const u8) Value {
|
||||||
|
if (!isNumberFormattedLikeAnInteger(s)) {
|
||||||
|
const f = std.fmt.parseFloat(f64, s) catch unreachable;
|
||||||
|
if (std.math.isFinite(f)) {
|
||||||
|
return Value{ .float = f };
|
||||||
|
} else {
|
||||||
|
return Value{ .number_string = s };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (std.fmt.parseInt(i64, s, 10)) |i| {
|
||||||
|
return Value{ .integer = i };
|
||||||
|
} else |e| {
|
||||||
|
switch (e) {
|
||||||
|
error.Overflow => return Value{ .number_string = s },
|
||||||
|
error.InvalidCharacter => unreachable,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn dump(self: Value) void {
|
||||||
|
std.debug.getStderrMutex().lock();
|
||||||
|
defer std.debug.getStderrMutex().unlock();
|
||||||
|
|
||||||
|
const stderr = std.io.getStdErr().writer();
|
||||||
|
stringify(self, .{}, stderr) catch return;
|
||||||
|
}
|
||||||
|
|
||||||
pub fn jsonStringify(
|
pub fn jsonStringify(
|
||||||
value: @This(),
|
value: @This(),
|
||||||
options: StringifyOptions,
|
options: StringifyOptions,
|
||||||
|
|
@ -80,265 +101,99 @@ pub const Value = union(enum) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn dump(self: Value) void {
|
pub fn jsonParse(allocator: Allocator, source: anytype, options: ParseOptions) ParseError(@TypeOf(source.*))!@This() {
|
||||||
std.debug.getStderrMutex().lock();
|
_ = options;
|
||||||
defer std.debug.getStderrMutex().unlock();
|
// The grammar of the stack is:
|
||||||
|
// (.array | .object .string)*
|
||||||
const stderr = std.io.getStdErr().writer();
|
var stack = Array.init(allocator);
|
||||||
stringify(self, .{}, stderr) catch return;
|
defer stack.deinit();
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
/// A non-stream JSON parser which constructs a tree of Value's.
|
|
||||||
pub const Parser = struct {
|
|
||||||
allocator: Allocator,
|
|
||||||
state: State,
|
|
||||||
alloc_when: AllocWhen,
|
|
||||||
// Stores parent nodes and un-combined Values.
|
|
||||||
stack: Array,
|
|
||||||
|
|
||||||
const State = enum {
|
|
||||||
object_key,
|
|
||||||
object_value,
|
|
||||||
array_value,
|
|
||||||
simple,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub fn init(allocator: Allocator, alloc_when: AllocWhen) Parser {
|
|
||||||
return Parser{
|
|
||||||
.allocator = allocator,
|
|
||||||
.state = .simple,
|
|
||||||
.alloc_when = alloc_when,
|
|
||||||
.stack = Array.init(allocator),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn deinit(p: *Parser) void {
|
|
||||||
p.stack.deinit();
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn reset(p: *Parser) void {
|
|
||||||
p.state = .simple;
|
|
||||||
p.stack.shrinkRetainingCapacity(0);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn parse(p: *Parser, input: []const u8) !ValueTree {
|
|
||||||
var scanner = JsonScanner.initCompleteInput(p.allocator, input);
|
|
||||||
defer scanner.deinit();
|
|
||||||
|
|
||||||
var arena = try p.allocator.create(ArenaAllocator);
|
|
||||||
errdefer p.allocator.destroy(arena);
|
|
||||||
|
|
||||||
arena.* = ArenaAllocator.init(p.allocator);
|
|
||||||
errdefer arena.deinit();
|
|
||||||
|
|
||||||
const allocator = arena.allocator();
|
|
||||||
|
|
||||||
while (true) {
|
while (true) {
|
||||||
const token = try scanner.nextAlloc(allocator, p.alloc_when);
|
// Assert the stack grammar at the top of the stack.
|
||||||
if (token == .end_of_document) break;
|
debug.assert(stack.items.len == 0 or
|
||||||
try p.transition(allocator, token);
|
stack.items[stack.items.len - 1] == .array or
|
||||||
}
|
(stack.items[stack.items.len - 2] == .object and stack.items[stack.items.len - 1] == .string));
|
||||||
|
|
||||||
debug.assert(p.stack.items.len == 1);
|
switch (try source.nextAlloc(allocator, .alloc_if_needed)) {
|
||||||
|
inline .string, .allocated_string => |s| {
|
||||||
return ValueTree{
|
return try handleCompleteValue(&stack, allocator, source, Value{ .string = s }) orelse continue;
|
||||||
.arena = arena,
|
|
||||||
.root = p.stack.items[0],
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
// Even though p.allocator exists, we take an explicit allocator so that allocation state
|
|
||||||
// can be cleaned up on error correctly during a `parse` on call.
|
|
||||||
fn transition(p: *Parser, allocator: Allocator, token: Token) !void {
|
|
||||||
switch (p.state) {
|
|
||||||
.object_key => switch (token) {
|
|
||||||
.object_end => {
|
|
||||||
if (p.stack.items.len == 1) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
var value = p.stack.pop();
|
|
||||||
try p.pushToParent(&value);
|
|
||||||
},
|
},
|
||||||
.string => |s| {
|
inline .number, .allocated_number => |slice| {
|
||||||
try p.stack.append(Value{ .string = s });
|
return try handleCompleteValue(&stack, allocator, source, Value.parseFromNumberSlice(slice)) orelse continue;
|
||||||
p.state = .object_value;
|
|
||||||
},
|
},
|
||||||
.allocated_string => |s| {
|
|
||||||
try p.stack.append(Value{ .string = s });
|
.null => return try handleCompleteValue(&stack, allocator, source, .null) orelse continue,
|
||||||
p.state = .object_value;
|
.true => return try handleCompleteValue(&stack, allocator, source, Value{ .bool = true }) orelse continue,
|
||||||
|
.false => return try handleCompleteValue(&stack, allocator, source, Value{ .bool = false }) orelse continue,
|
||||||
|
|
||||||
|
.object_begin => {
|
||||||
|
switch (try source.nextAlloc(allocator, .alloc_if_needed)) {
|
||||||
|
.object_end => return try handleCompleteValue(&stack, allocator, source, Value{ .object = ObjectMap.init(allocator) }) orelse continue,
|
||||||
|
inline .string, .allocated_string => |key| {
|
||||||
|
try stack.appendSlice(&[_]Value{
|
||||||
|
Value{ .object = ObjectMap.init(allocator) },
|
||||||
|
Value{ .string = key },
|
||||||
|
});
|
||||||
},
|
},
|
||||||
else => unreachable,
|
else => unreachable,
|
||||||
},
|
}
|
||||||
.object_value => {
|
|
||||||
var object = &p.stack.items[p.stack.items.len - 2].object;
|
|
||||||
var key = p.stack.items[p.stack.items.len - 1].string;
|
|
||||||
|
|
||||||
switch (token) {
|
|
||||||
.object_begin => {
|
|
||||||
try p.stack.append(Value{ .object = ObjectMap.init(allocator) });
|
|
||||||
p.state = .object_key;
|
|
||||||
},
|
},
|
||||||
.array_begin => {
|
.array_begin => {
|
||||||
try p.stack.append(Value{ .array = Array.init(allocator) });
|
try stack.append(Value{ .array = Array.init(allocator) });
|
||||||
p.state = .array_value;
|
|
||||||
},
|
},
|
||||||
.string => |s| {
|
.array_end => return try handleCompleteValue(&stack, allocator, source, stack.pop()) orelse continue,
|
||||||
try object.put(key, Value{ .string = s });
|
|
||||||
_ = p.stack.pop();
|
|
||||||
p.state = .object_key;
|
|
||||||
},
|
|
||||||
.allocated_string => |s| {
|
|
||||||
try object.put(key, Value{ .string = s });
|
|
||||||
_ = p.stack.pop();
|
|
||||||
p.state = .object_key;
|
|
||||||
},
|
|
||||||
.number => |slice| {
|
|
||||||
try object.put(key, try p.parseNumber(slice));
|
|
||||||
_ = p.stack.pop();
|
|
||||||
p.state = .object_key;
|
|
||||||
},
|
|
||||||
.allocated_number => |slice| {
|
|
||||||
try object.put(key, try p.parseNumber(slice));
|
|
||||||
_ = p.stack.pop();
|
|
||||||
p.state = .object_key;
|
|
||||||
},
|
|
||||||
.true => {
|
|
||||||
try object.put(key, Value{ .bool = true });
|
|
||||||
_ = p.stack.pop();
|
|
||||||
p.state = .object_key;
|
|
||||||
},
|
|
||||||
.false => {
|
|
||||||
try object.put(key, Value{ .bool = false });
|
|
||||||
_ = p.stack.pop();
|
|
||||||
p.state = .object_key;
|
|
||||||
},
|
|
||||||
.null => {
|
|
||||||
try object.put(key, .null);
|
|
||||||
_ = p.stack.pop();
|
|
||||||
p.state = .object_key;
|
|
||||||
},
|
|
||||||
.object_end, .array_end, .end_of_document => unreachable,
|
|
||||||
.partial_number, .partial_string, .partial_string_escaped_1, .partial_string_escaped_2, .partial_string_escaped_3, .partial_string_escaped_4 => unreachable,
|
|
||||||
}
|
|
||||||
},
|
|
||||||
.array_value => {
|
|
||||||
var array = &p.stack.items[p.stack.items.len - 1].array;
|
|
||||||
|
|
||||||
switch (token) {
|
else => unreachable,
|
||||||
.array_end => {
|
|
||||||
if (p.stack.items.len == 1) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
var value = p.stack.pop();
|
|
||||||
try p.pushToParent(&value);
|
|
||||||
},
|
|
||||||
.object_begin => {
|
|
||||||
try p.stack.append(Value{ .object = ObjectMap.init(allocator) });
|
|
||||||
p.state = .object_key;
|
|
||||||
},
|
|
||||||
.array_begin => {
|
|
||||||
try p.stack.append(Value{ .array = Array.init(allocator) });
|
|
||||||
p.state = .array_value;
|
|
||||||
},
|
|
||||||
.string => |s| {
|
|
||||||
try array.append(Value{ .string = s });
|
|
||||||
},
|
|
||||||
.allocated_string => |s| {
|
|
||||||
try array.append(Value{ .string = s });
|
|
||||||
},
|
|
||||||
.number => |slice| {
|
|
||||||
try array.append(try p.parseNumber(slice));
|
|
||||||
},
|
|
||||||
.allocated_number => |slice| {
|
|
||||||
try array.append(try p.parseNumber(slice));
|
|
||||||
},
|
|
||||||
.true => {
|
|
||||||
try array.append(Value{ .bool = true });
|
|
||||||
},
|
|
||||||
.false => {
|
|
||||||
try array.append(Value{ .bool = false });
|
|
||||||
},
|
|
||||||
.null => {
|
|
||||||
try array.append(.null);
|
|
||||||
},
|
|
||||||
.object_end, .end_of_document => unreachable,
|
|
||||||
.partial_number, .partial_string, .partial_string_escaped_1, .partial_string_escaped_2, .partial_string_escaped_3, .partial_string_escaped_4 => unreachable,
|
|
||||||
}
|
|
||||||
},
|
|
||||||
.simple => switch (token) {
|
|
||||||
.object_begin => {
|
|
||||||
try p.stack.append(Value{ .object = ObjectMap.init(allocator) });
|
|
||||||
p.state = .object_key;
|
|
||||||
},
|
|
||||||
.array_begin => {
|
|
||||||
try p.stack.append(Value{ .array = Array.init(allocator) });
|
|
||||||
p.state = .array_value;
|
|
||||||
},
|
|
||||||
.string => |s| {
|
|
||||||
try p.stack.append(Value{ .string = s });
|
|
||||||
},
|
|
||||||
.allocated_string => |s| {
|
|
||||||
try p.stack.append(Value{ .string = s });
|
|
||||||
},
|
|
||||||
.number => |slice| {
|
|
||||||
try p.stack.append(try p.parseNumber(slice));
|
|
||||||
},
|
|
||||||
.allocated_number => |slice| {
|
|
||||||
try p.stack.append(try p.parseNumber(slice));
|
|
||||||
},
|
|
||||||
.true => {
|
|
||||||
try p.stack.append(Value{ .bool = true });
|
|
||||||
},
|
|
||||||
.false => {
|
|
||||||
try p.stack.append(Value{ .bool = false });
|
|
||||||
},
|
|
||||||
.null => {
|
|
||||||
try p.stack.append(.null);
|
|
||||||
},
|
|
||||||
.object_end, .array_end, .end_of_document => unreachable,
|
|
||||||
.partial_number, .partial_string, .partial_string_escaped_1, .partial_string_escaped_2, .partial_string_escaped_3, .partial_string_escaped_4 => unreachable,
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn pushToParent(p: *Parser, value: *const Value) !void {
|
|
||||||
switch (p.stack.items[p.stack.items.len - 1]) {
|
|
||||||
// Object Parent -> [ ..., object, <key>, value ]
|
|
||||||
.string => |key| {
|
|
||||||
_ = p.stack.pop();
|
|
||||||
|
|
||||||
var object = &p.stack.items[p.stack.items.len - 1].object;
|
|
||||||
try object.put(key, value.*);
|
|
||||||
p.state = .object_key;
|
|
||||||
},
|
|
||||||
// Array Parent -> [ ..., <array>, value ]
|
|
||||||
.array => |*array| {
|
|
||||||
try array.append(value.*);
|
|
||||||
p.state = .array_value;
|
|
||||||
},
|
|
||||||
else => {
|
|
||||||
unreachable;
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn parseNumber(p: *Parser, slice: []const u8) !Value {
|
|
||||||
_ = p;
|
|
||||||
return if (isNumberFormattedLikeAnInteger(slice))
|
|
||||||
Value{
|
|
||||||
.integer = std.fmt.parseInt(i64, slice, 10) catch |e| switch (e) {
|
|
||||||
error.Overflow => return Value{ .number_string = slice },
|
|
||||||
error.InvalidCharacter => |err| return err,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
else
|
|
||||||
Value{ .float = try std.fmt.parseFloat(f64, slice) };
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
fn handleCompleteValue(stack: *Array, allocator: Allocator, source: anytype, value_: Value) !?Value {
|
||||||
|
if (stack.items.len == 0) return value_;
|
||||||
|
var value = value_;
|
||||||
|
while (true) {
|
||||||
|
// Assert the stack grammar at the top of the stack.
|
||||||
|
debug.assert(stack.items[stack.items.len - 1] == .array or
|
||||||
|
(stack.items[stack.items.len - 2] == .object and stack.items[stack.items.len - 1] == .string));
|
||||||
|
switch (stack.items[stack.items.len - 1]) {
|
||||||
|
.string => |key| {
|
||||||
|
// stack: [..., .object, .string]
|
||||||
|
_ = stack.pop();
|
||||||
|
|
||||||
|
// stack: [..., .object]
|
||||||
|
var object = &stack.items[stack.items.len - 1].object;
|
||||||
|
try object.put(key, value);
|
||||||
|
|
||||||
|
// This is an invalid state to leave the stack in,
|
||||||
|
// so we have to process the next token before we return.
|
||||||
|
switch (try source.nextAlloc(allocator, .alloc_if_needed)) {
|
||||||
|
.object_end => {
|
||||||
|
// This object is complete.
|
||||||
|
value = stack.pop();
|
||||||
|
// Effectively recurse now that we have a complete value.
|
||||||
|
if (stack.items.len == 0) return value;
|
||||||
|
continue;
|
||||||
|
},
|
||||||
|
inline .string, .allocated_string => |next_key| {
|
||||||
|
// We've got another key.
|
||||||
|
try stack.append(Value{ .string = next_key });
|
||||||
|
// stack: [..., .object, .string]
|
||||||
|
return null;
|
||||||
|
},
|
||||||
|
else => unreachable,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
.array => |*array| {
|
||||||
|
// stack: [..., .array]
|
||||||
|
try array.append(value);
|
||||||
|
return null;
|
||||||
|
},
|
||||||
|
else => unreachable,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
test {
|
test {
|
||||||
_ = @import("dynamic_test.zig");
|
_ = @import("dynamic_test.zig");
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -5,12 +5,14 @@ const testing = std.testing;
|
||||||
const ObjectMap = @import("dynamic.zig").ObjectMap;
|
const ObjectMap = @import("dynamic.zig").ObjectMap;
|
||||||
const Array = @import("dynamic.zig").Array;
|
const Array = @import("dynamic.zig").Array;
|
||||||
const Value = @import("dynamic.zig").Value;
|
const Value = @import("dynamic.zig").Value;
|
||||||
const Parser = @import("dynamic.zig").Parser;
|
|
||||||
|
const parseFromSlice = @import("static.zig").parseFromSlice;
|
||||||
|
const parseFromSliceLeaky = @import("static.zig").parseFromSliceLeaky;
|
||||||
|
const parseFromTokenSource = @import("static.zig").parseFromTokenSource;
|
||||||
|
|
||||||
|
const jsonReader = @import("scanner.zig").reader;
|
||||||
|
|
||||||
test "json.parser.dynamic" {
|
test "json.parser.dynamic" {
|
||||||
var p = Parser.init(testing.allocator, .alloc_if_needed);
|
|
||||||
defer p.deinit();
|
|
||||||
|
|
||||||
const s =
|
const s =
|
||||||
\\{
|
\\{
|
||||||
\\ "Image": {
|
\\ "Image": {
|
||||||
|
|
@ -31,10 +33,10 @@ test "json.parser.dynamic" {
|
||||||
\\}
|
\\}
|
||||||
;
|
;
|
||||||
|
|
||||||
var tree = try p.parse(s);
|
var parsed = try parseFromSlice(Value, testing.allocator, s, .{});
|
||||||
defer tree.deinit();
|
defer parsed.deinit();
|
||||||
|
|
||||||
var root = tree.root;
|
var root = parsed.value;
|
||||||
|
|
||||||
var image = root.object.get("Image").?;
|
var image = root.object.get("Image").?;
|
||||||
|
|
||||||
|
|
@ -98,22 +100,22 @@ test "write json then parse it" {
|
||||||
|
|
||||||
try jw.endObject();
|
try jw.endObject();
|
||||||
|
|
||||||
var parser = Parser.init(testing.allocator, .alloc_if_needed);
|
fixed_buffer_stream = std.io.fixedBufferStream(fixed_buffer_stream.getWritten());
|
||||||
defer parser.deinit();
|
var json_reader = jsonReader(testing.allocator, fixed_buffer_stream.reader());
|
||||||
var tree = try parser.parse(fixed_buffer_stream.getWritten());
|
defer json_reader.deinit();
|
||||||
defer tree.deinit();
|
var parsed = try parseFromTokenSource(Value, testing.allocator, &json_reader, .{});
|
||||||
|
defer parsed.deinit();
|
||||||
|
|
||||||
try testing.expect(tree.root.object.get("f").?.bool == false);
|
try testing.expect(parsed.value.object.get("f").?.bool == false);
|
||||||
try testing.expect(tree.root.object.get("t").?.bool == true);
|
try testing.expect(parsed.value.object.get("t").?.bool == true);
|
||||||
try testing.expect(tree.root.object.get("int").?.integer == 1234);
|
try testing.expect(parsed.value.object.get("int").?.integer == 1234);
|
||||||
try testing.expect(tree.root.object.get("array").?.array.items[0].null == {});
|
try testing.expect(parsed.value.object.get("array").?.array.items[0].null == {});
|
||||||
try testing.expect(tree.root.object.get("array").?.array.items[1].float == 12.34);
|
try testing.expect(parsed.value.object.get("array").?.array.items[1].float == 12.34);
|
||||||
try testing.expect(mem.eql(u8, tree.root.object.get("str").?.string, "hello"));
|
try testing.expect(mem.eql(u8, parsed.value.object.get("str").?.string, "hello"));
|
||||||
}
|
}
|
||||||
|
|
||||||
fn testParse(arena_allocator: std.mem.Allocator, json_str: []const u8) !Value {
|
fn testParse(allocator: std.mem.Allocator, json_str: []const u8) !Value {
|
||||||
var p = Parser.init(arena_allocator, .alloc_if_needed);
|
return parseFromSliceLeaky(Value, allocator, json_str, .{});
|
||||||
return (try p.parse(json_str)).root;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
test "parsing empty string gives appropriate error" {
|
test "parsing empty string gives appropriate error" {
|
||||||
|
|
@ -122,22 +124,16 @@ test "parsing empty string gives appropriate error" {
|
||||||
try testing.expectError(error.UnexpectedEndOfInput, testParse(arena_allocator.allocator(), ""));
|
try testing.expectError(error.UnexpectedEndOfInput, testParse(arena_allocator.allocator(), ""));
|
||||||
}
|
}
|
||||||
|
|
||||||
test "parse tree should not contain dangling pointers" {
|
test "Value.array allocator should still be usable after parsing" {
|
||||||
var arena_allocator = std.heap.ArenaAllocator.init(std.testing.allocator);
|
var parsed = try parseFromSlice(Value, std.testing.allocator, "[]", .{});
|
||||||
defer arena_allocator.deinit();
|
defer parsed.deinit();
|
||||||
|
|
||||||
var p = Parser.init(arena_allocator.allocator(), .alloc_if_needed);
|
|
||||||
defer p.deinit();
|
|
||||||
|
|
||||||
var tree = try p.parse("[]");
|
|
||||||
defer tree.deinit();
|
|
||||||
|
|
||||||
// Allocation should succeed
|
// Allocation should succeed
|
||||||
var i: usize = 0;
|
var i: usize = 0;
|
||||||
while (i < 100) : (i += 1) {
|
while (i < 100) : (i += 1) {
|
||||||
try tree.root.array.append(Value{ .integer = 100 });
|
try parsed.value.array.append(Value{ .integer = 100 });
|
||||||
}
|
}
|
||||||
try testing.expectEqual(tree.root.array.items.len, 100);
|
try testing.expectEqual(parsed.value.array.items.len, 100);
|
||||||
}
|
}
|
||||||
|
|
||||||
test "integer after float has proper type" {
|
test "integer after float has proper type" {
|
||||||
|
|
@ -184,45 +180,6 @@ test "escaped characters" {
|
||||||
try testing.expectEqualSlices(u8, obj.get("surrogatepair").?.string, "😂");
|
try testing.expectEqualSlices(u8, obj.get("surrogatepair").?.string, "😂");
|
||||||
}
|
}
|
||||||
|
|
||||||
test "string copy option" {
|
|
||||||
const input =
|
|
||||||
\\{
|
|
||||||
\\ "noescape": "aą😂",
|
|
||||||
\\ "simple": "\\\/\n\r\t\f\b\"",
|
|
||||||
\\ "unicode": "\u0105",
|
|
||||||
\\ "surrogatepair": "\ud83d\ude02"
|
|
||||||
\\}
|
|
||||||
;
|
|
||||||
|
|
||||||
var arena_allocator = std.heap.ArenaAllocator.init(std.testing.allocator);
|
|
||||||
defer arena_allocator.deinit();
|
|
||||||
const allocator = arena_allocator.allocator();
|
|
||||||
|
|
||||||
var parser = Parser.init(allocator, .alloc_if_needed);
|
|
||||||
const tree_nocopy = try parser.parse(input);
|
|
||||||
const obj_nocopy = tree_nocopy.root.object;
|
|
||||||
|
|
||||||
parser = Parser.init(allocator, .alloc_always);
|
|
||||||
const tree_copy = try parser.parse(input);
|
|
||||||
const obj_copy = tree_copy.root.object;
|
|
||||||
|
|
||||||
for ([_][]const u8{ "noescape", "simple", "unicode", "surrogatepair" }) |field_name| {
|
|
||||||
try testing.expectEqualSlices(u8, obj_nocopy.get(field_name).?.string, obj_copy.get(field_name).?.string);
|
|
||||||
}
|
|
||||||
|
|
||||||
const nocopy_addr = &obj_nocopy.get("noescape").?.string[0];
|
|
||||||
const copy_addr = &obj_copy.get("noescape").?.string[0];
|
|
||||||
|
|
||||||
var found_nocopy = false;
|
|
||||||
for (input, 0..) |_, index| {
|
|
||||||
try testing.expect(copy_addr != &input[index]);
|
|
||||||
if (nocopy_addr == &input[index]) {
|
|
||||||
found_nocopy = true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
try testing.expect(found_nocopy);
|
|
||||||
}
|
|
||||||
|
|
||||||
test "Value.jsonStringify" {
|
test "Value.jsonStringify" {
|
||||||
{
|
{
|
||||||
var buffer: [10]u8 = undefined;
|
var buffer: [10]u8 = undefined;
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,7 @@
|
||||||
const std = @import("std");
|
const std = @import("std");
|
||||||
const assert = std.debug.assert;
|
const assert = std.debug.assert;
|
||||||
const Allocator = std.mem.Allocator;
|
const Allocator = std.mem.Allocator;
|
||||||
|
const ArenaAllocator = std.heap.ArenaAllocator;
|
||||||
const ArrayList = std.ArrayList;
|
const ArrayList = std.ArrayList;
|
||||||
|
|
||||||
const Scanner = @import("./scanner.zig").Scanner;
|
const Scanner = @import("./scanner.zig").Scanner;
|
||||||
|
|
@ -27,27 +28,79 @@ pub const ParseOptions = struct {
|
||||||
max_value_len: ?usize = null,
|
max_value_len: ?usize = null,
|
||||||
};
|
};
|
||||||
|
|
||||||
/// Parses the json document from s and returns the result.
|
pub fn Parsed(comptime T: type) type {
|
||||||
/// The provided allocator is used both for temporary allocations during parsing the document,
|
return struct {
|
||||||
/// and also to allocate any pointer values in the return type.
|
arena: *ArenaAllocator,
|
||||||
/// If T contains any pointers, free the memory with `std.json.parseFree`.
|
value: T,
|
||||||
|
|
||||||
|
pub fn deinit(self: @This()) void {
|
||||||
|
const allocator = self.arena.child_allocator;
|
||||||
|
self.arena.deinit();
|
||||||
|
allocator.destroy(self.arena);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Parses the json document from `s` and returns the result packaged in a `std.json.Parsed`.
|
||||||
|
/// You must call `deinit()` of the returned object to clean up allocated resources.
|
||||||
/// Note that `error.BufferUnderrun` is not actually possible to return from this function.
|
/// Note that `error.BufferUnderrun` is not actually possible to return from this function.
|
||||||
pub fn parseFromSlice(comptime T: type, allocator: Allocator, s: []const u8, options: ParseOptions) ParseError(T, Scanner)!T {
|
pub fn parseFromSlice(
|
||||||
|
comptime T: type,
|
||||||
|
allocator: Allocator,
|
||||||
|
s: []const u8,
|
||||||
|
options: ParseOptions,
|
||||||
|
) ParseError(Scanner)!Parsed(T) {
|
||||||
var scanner = Scanner.initCompleteInput(allocator, s);
|
var scanner = Scanner.initCompleteInput(allocator, s);
|
||||||
defer scanner.deinit();
|
defer scanner.deinit();
|
||||||
|
|
||||||
return parseFromTokenSource(T, allocator, &scanner, options);
|
return parseFromTokenSource(T, allocator, &scanner, options);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Parses the json document from `s` and returns the result.
|
||||||
|
/// Allocations made during this operation are not carefully tracked and may not be possible to individually clean up.
|
||||||
|
/// It is recommended to use a `std.heap.ArenaAllocator` or similar.
|
||||||
|
pub fn parseFromSliceLeaky(
|
||||||
|
comptime T: type,
|
||||||
|
allocator: Allocator,
|
||||||
|
s: []const u8,
|
||||||
|
options: ParseOptions,
|
||||||
|
) ParseError(Scanner)!T {
|
||||||
|
var scanner = Scanner.initCompleteInput(allocator, s);
|
||||||
|
defer scanner.deinit();
|
||||||
|
|
||||||
|
return parseFromTokenSourceLeaky(T, allocator, &scanner, options);
|
||||||
|
}
|
||||||
|
|
||||||
/// `scanner_or_reader` must be either a `*std.json.Scanner` with complete input or a `*std.json.Reader`.
|
/// `scanner_or_reader` must be either a `*std.json.Scanner` with complete input or a `*std.json.Reader`.
|
||||||
/// allocator is used to allocate the data of T if necessary,
|
|
||||||
/// such as if T is `*u32` or `[]u32`.
|
|
||||||
/// If T contains any pointers, free the memory with `std.json.parseFree`.
|
|
||||||
/// If T contains no pointers, the allocator may sometimes be used for temporary allocations,
|
|
||||||
/// but no call to `std.json.parseFree` will be necessary;
|
|
||||||
/// all temporary allocations will be freed before this function returns.
|
|
||||||
/// Note that `error.BufferUnderrun` is not actually possible to return from this function.
|
/// Note that `error.BufferUnderrun` is not actually possible to return from this function.
|
||||||
pub fn parseFromTokenSource(comptime T: type, allocator: Allocator, scanner_or_reader: anytype, options: ParseOptions) ParseError(T, @TypeOf(scanner_or_reader.*))!T {
|
pub fn parseFromTokenSource(
|
||||||
|
comptime T: type,
|
||||||
|
allocator: Allocator,
|
||||||
|
scanner_or_reader: anytype,
|
||||||
|
options: ParseOptions,
|
||||||
|
) ParseError(@TypeOf(scanner_or_reader.*))!Parsed(T) {
|
||||||
|
var parsed = Parsed(T){
|
||||||
|
.arena = try allocator.create(ArenaAllocator),
|
||||||
|
.value = undefined,
|
||||||
|
};
|
||||||
|
errdefer allocator.destroy(parsed.arena);
|
||||||
|
parsed.arena.* = ArenaAllocator.init(allocator);
|
||||||
|
errdefer parsed.arena.deinit();
|
||||||
|
|
||||||
|
parsed.value = try parseFromTokenSourceLeaky(T, parsed.arena.allocator(), scanner_or_reader, options);
|
||||||
|
|
||||||
|
return parsed;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// `scanner_or_reader` must be either a `*std.json.Scanner` with complete input or a `*std.json.Reader`.
|
||||||
|
/// Allocations made during this operation are not carefully tracked and may not be possible to individually clean up.
|
||||||
|
/// It is recommended to use a `std.heap.ArenaAllocator` or similar.
|
||||||
|
pub fn parseFromTokenSourceLeaky(
|
||||||
|
comptime T: type,
|
||||||
|
allocator: Allocator,
|
||||||
|
scanner_or_reader: anytype,
|
||||||
|
options: ParseOptions,
|
||||||
|
) ParseError(@TypeOf(scanner_or_reader.*))!T {
|
||||||
if (@TypeOf(scanner_or_reader.*) == Scanner) {
|
if (@TypeOf(scanner_or_reader.*) == Scanner) {
|
||||||
assert(scanner_or_reader.is_end_of_input);
|
assert(scanner_or_reader.is_end_of_input);
|
||||||
}
|
}
|
||||||
|
|
@ -61,80 +114,30 @@ pub fn parseFromTokenSource(comptime T: type, allocator: Allocator, scanner_or_r
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const r = try parseInternal(T, allocator, scanner_or_reader, resolved_options);
|
const value = try parseInternal(T, allocator, scanner_or_reader, resolved_options);
|
||||||
errdefer parseFree(T, allocator, r);
|
|
||||||
|
|
||||||
assert(.end_of_document == try scanner_or_reader.next());
|
assert(.end_of_document == try scanner_or_reader.next());
|
||||||
|
|
||||||
return r;
|
return value;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The error set that will be returned from parsing T from *Source.
|
/// The error set that will be returned when parsing from `*Source`.
|
||||||
/// Note that this may contain error.BufferUnderrun, but that error will never actually be returned.
|
/// Note that this may contain `error.BufferUnderrun`, but that error will never actually be returned.
|
||||||
pub fn ParseError(comptime T: type, comptime Source: type) type {
|
pub fn ParseError(comptime Source: type) type {
|
||||||
// `inferred_types` is used to avoid infinite recursion for recursive type definitions.
|
|
||||||
const inferred_types = [_]type{};
|
|
||||||
// A few of these will either always be present or present enough of the time that
|
// A few of these will either always be present or present enough of the time that
|
||||||
// omitting them is more confusing than always including them.
|
// omitting them is more confusing than always including them.
|
||||||
return error{UnexpectedToken} || Source.NextError || Source.PeekError ||
|
return error{
|
||||||
ParseInternalErrorImpl(T, Source, &inferred_types);
|
UnexpectedToken,
|
||||||
}
|
InvalidNumber,
|
||||||
|
Overflow,
|
||||||
fn ParseInternalErrorImpl(comptime T: type, comptime Source: type, comptime inferred_types: []const type) type {
|
InvalidEnumTag,
|
||||||
for (inferred_types) |ty| {
|
|
||||||
if (T == ty) return error{};
|
|
||||||
}
|
|
||||||
|
|
||||||
switch (@typeInfo(T)) {
|
|
||||||
.Bool => return error{},
|
|
||||||
.Float, .ComptimeFloat => return Source.AllocError || std.fmt.ParseFloatError,
|
|
||||||
.Int, .ComptimeInt => {
|
|
||||||
return Source.AllocError || error{ InvalidNumber, Overflow } ||
|
|
||||||
std.fmt.ParseIntError || std.fmt.ParseFloatError;
|
|
||||||
},
|
|
||||||
.Optional => |optional_info| return ParseInternalErrorImpl(optional_info.child, Source, inferred_types ++ [_]type{T}),
|
|
||||||
.Enum => return Source.AllocError || error{InvalidEnumTag},
|
|
||||||
.Union => |unionInfo| {
|
|
||||||
if (unionInfo.tag_type) |_| {
|
|
||||||
var errors = Source.AllocError || error{UnknownField};
|
|
||||||
for (unionInfo.fields) |u_field| {
|
|
||||||
errors = errors || ParseInternalErrorImpl(u_field.type, Source, inferred_types ++ [_]type{T});
|
|
||||||
}
|
|
||||||
return errors;
|
|
||||||
} else {
|
|
||||||
@compileError("Unable to parse into untagged union '" ++ @typeName(T) ++ "'");
|
|
||||||
}
|
|
||||||
},
|
|
||||||
.Struct => |structInfo| {
|
|
||||||
var errors = Scanner.AllocError || error{
|
|
||||||
DuplicateField,
|
DuplicateField,
|
||||||
UnknownField,
|
UnknownField,
|
||||||
MissingField,
|
MissingField,
|
||||||
};
|
LengthMismatch,
|
||||||
for (structInfo.fields) |field| {
|
} ||
|
||||||
errors = errors || ParseInternalErrorImpl(field.type, Source, inferred_types ++ [_]type{T});
|
std.fmt.ParseIntError || std.fmt.ParseFloatError ||
|
||||||
}
|
Source.NextError || Source.PeekError || Source.AllocError;
|
||||||
return errors;
|
|
||||||
},
|
|
||||||
.Array => |arrayInfo| {
|
|
||||||
return error{LengthMismatch} ||
|
|
||||||
ParseInternalErrorImpl(arrayInfo.child, Source, inferred_types ++ [_]type{T});
|
|
||||||
},
|
|
||||||
.Vector => |vecInfo| {
|
|
||||||
return error{LengthMismatch} ||
|
|
||||||
ParseInternalErrorImpl(vecInfo.child, Source, inferred_types ++ [_]type{T});
|
|
||||||
},
|
|
||||||
.Pointer => |ptrInfo| {
|
|
||||||
switch (ptrInfo.size) {
|
|
||||||
.One, .Slice => {
|
|
||||||
return ParseInternalErrorImpl(ptrInfo.child, Source, inferred_types ++ [_]type{T});
|
|
||||||
},
|
|
||||||
else => @compileError("Unable to parse into type '" ++ @typeName(T) ++ "'"),
|
|
||||||
}
|
|
||||||
},
|
|
||||||
else => return error{},
|
|
||||||
}
|
|
||||||
unreachable;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parseInternal(
|
fn parseInternal(
|
||||||
|
|
@ -142,7 +145,7 @@ fn parseInternal(
|
||||||
allocator: Allocator,
|
allocator: Allocator,
|
||||||
source: anytype,
|
source: anytype,
|
||||||
options: ParseOptions,
|
options: ParseOptions,
|
||||||
) ParseError(T, @TypeOf(source.*))!T {
|
) ParseError(@TypeOf(source.*))!T {
|
||||||
switch (@typeInfo(T)) {
|
switch (@typeInfo(T)) {
|
||||||
.Bool => {
|
.Bool => {
|
||||||
return switch (try source.next()) {
|
return switch (try source.next()) {
|
||||||
|
|
@ -155,8 +158,7 @@ fn parseInternal(
|
||||||
const token = try source.nextAllocMax(allocator, .alloc_if_needed, options.max_value_len.?);
|
const token = try source.nextAllocMax(allocator, .alloc_if_needed, options.max_value_len.?);
|
||||||
defer freeAllocated(allocator, token);
|
defer freeAllocated(allocator, token);
|
||||||
const slice = switch (token) {
|
const slice = switch (token) {
|
||||||
.number, .string => |slice| slice,
|
inline .number, .allocated_number, .string, .allocated_string => |slice| slice,
|
||||||
.allocated_number, .allocated_string => |slice| slice,
|
|
||||||
else => return error.UnexpectedToken,
|
else => return error.UnexpectedToken,
|
||||||
};
|
};
|
||||||
return try std.fmt.parseFloat(T, slice);
|
return try std.fmt.parseFloat(T, slice);
|
||||||
|
|
@ -165,8 +167,7 @@ fn parseInternal(
|
||||||
const token = try source.nextAllocMax(allocator, .alloc_if_needed, options.max_value_len.?);
|
const token = try source.nextAllocMax(allocator, .alloc_if_needed, options.max_value_len.?);
|
||||||
defer freeAllocated(allocator, token);
|
defer freeAllocated(allocator, token);
|
||||||
const slice = switch (token) {
|
const slice = switch (token) {
|
||||||
.number, .string => |slice| slice,
|
inline .number, .allocated_number, .string, .allocated_string => |slice| slice,
|
||||||
.allocated_number, .allocated_string => |slice| slice,
|
|
||||||
else => return error.UnexpectedToken,
|
else => return error.UnexpectedToken,
|
||||||
};
|
};
|
||||||
if (isNumberFormattedLikeAnInteger(slice))
|
if (isNumberFormattedLikeAnInteger(slice))
|
||||||
|
|
@ -189,11 +190,14 @@ fn parseInternal(
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
.Enum => |enumInfo| {
|
.Enum => |enumInfo| {
|
||||||
|
if (comptime std.meta.trait.hasFn("jsonParse")(T)) {
|
||||||
|
return T.jsonParse(allocator, source, options);
|
||||||
|
}
|
||||||
|
|
||||||
const token = try source.nextAllocMax(allocator, .alloc_if_needed, options.max_value_len.?);
|
const token = try source.nextAllocMax(allocator, .alloc_if_needed, options.max_value_len.?);
|
||||||
defer freeAllocated(allocator, token);
|
defer freeAllocated(allocator, token);
|
||||||
const slice = switch (token) {
|
const slice = switch (token) {
|
||||||
.number, .string => |slice| slice,
|
inline .number, .allocated_number, .string, .allocated_string => |slice| slice,
|
||||||
.allocated_number, .allocated_string => |slice| slice,
|
|
||||||
else => return error.UnexpectedToken,
|
else => return error.UnexpectedToken,
|
||||||
};
|
};
|
||||||
// Check for a named value.
|
// Check for a named value.
|
||||||
|
|
@ -204,30 +208,18 @@ fn parseInternal(
|
||||||
return try std.meta.intToEnum(T, n);
|
return try std.meta.intToEnum(T, n);
|
||||||
},
|
},
|
||||||
.Union => |unionInfo| {
|
.Union => |unionInfo| {
|
||||||
const UnionTagType = unionInfo.tag_type orelse @compileError("Unable to parse into untagged union '" ++ @typeName(T) ++ "'");
|
if (comptime std.meta.trait.hasFn("jsonParse")(T)) {
|
||||||
|
return T.jsonParse(allocator, source, options);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (unionInfo.tag_type == null) @compileError("Unable to parse into untagged union '" ++ @typeName(T) ++ "'");
|
||||||
|
|
||||||
if (.object_begin != try source.next()) return error.UnexpectedToken;
|
if (.object_begin != try source.next()) return error.UnexpectedToken;
|
||||||
|
|
||||||
var result: ?T = null;
|
var result: ?T = null;
|
||||||
errdefer {
|
|
||||||
if (result) |r| {
|
|
||||||
inline for (unionInfo.fields) |u_field| {
|
|
||||||
if (r == @field(UnionTagType, u_field.name)) {
|
|
||||||
parseFree(u_field.type, allocator, @field(r, u_field.name));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var name_token: ?Token = try source.nextAllocMax(allocator, .alloc_if_needed, options.max_value_len.?);
|
var name_token: ?Token = try source.nextAllocMax(allocator, .alloc_if_needed, options.max_value_len.?);
|
||||||
errdefer {
|
|
||||||
if (name_token) |t| {
|
|
||||||
freeAllocated(allocator, t);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
const field_name = switch (name_token.?) {
|
const field_name = switch (name_token.?) {
|
||||||
.string => |slice| slice,
|
inline .string, .allocated_string => |slice| slice,
|
||||||
.allocated_string => |slice| slice,
|
|
||||||
else => return error.UnexpectedToken,
|
else => return error.UnexpectedToken,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
@ -265,13 +257,6 @@ fn parseInternal(
|
||||||
|
|
||||||
var r: T = undefined;
|
var r: T = undefined;
|
||||||
var fields_seen: usize = 0;
|
var fields_seen: usize = 0;
|
||||||
errdefer {
|
|
||||||
inline for (0..structInfo.fields.len) |i| {
|
|
||||||
if (i < fields_seen) {
|
|
||||||
parseFree(structInfo.fields[i].type, allocator, r[i]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
inline for (0..structInfo.fields.len) |i| {
|
inline for (0..structInfo.fields.len) |i| {
|
||||||
r[i] = try parseInternal(structInfo.fields[i].type, allocator, source, options);
|
r[i] = try parseInternal(structInfo.fields[i].type, allocator, source, options);
|
||||||
fields_seen = i + 1;
|
fields_seen = i + 1;
|
||||||
|
|
@ -282,29 +267,20 @@ fn parseInternal(
|
||||||
return r;
|
return r;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (comptime std.meta.trait.hasFn("jsonParse")(T)) {
|
||||||
|
return T.jsonParse(allocator, source, options);
|
||||||
|
}
|
||||||
|
|
||||||
if (.object_begin != try source.next()) return error.UnexpectedToken;
|
if (.object_begin != try source.next()) return error.UnexpectedToken;
|
||||||
|
|
||||||
var r: T = undefined;
|
var r: T = undefined;
|
||||||
var fields_seen = [_]bool{false} ** structInfo.fields.len;
|
var fields_seen = [_]bool{false} ** structInfo.fields.len;
|
||||||
errdefer {
|
|
||||||
inline for (structInfo.fields, 0..) |field, i| {
|
|
||||||
if (fields_seen[i]) {
|
|
||||||
parseFree(field.type, allocator, @field(r, field.name));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
while (true) {
|
while (true) {
|
||||||
var name_token: ?Token = try source.nextAllocMax(allocator, .alloc_if_needed, options.max_value_len.?);
|
var name_token: ?Token = try source.nextAllocMax(allocator, .alloc_if_needed, options.max_value_len.?);
|
||||||
errdefer {
|
|
||||||
if (name_token) |t| {
|
|
||||||
freeAllocated(allocator, t);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
const field_name = switch (name_token.?) {
|
const field_name = switch (name_token.?) {
|
||||||
.object_end => break, // No more fields.
|
.object_end => break, // No more fields.
|
||||||
.string => |slice| slice,
|
inline .string, .allocated_string => |slice| slice,
|
||||||
.allocated_string => |slice| slice,
|
|
||||||
else => return error.UnexpectedToken,
|
else => return error.UnexpectedToken,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
@ -319,18 +295,13 @@ fn parseInternal(
|
||||||
if (fields_seen[i]) {
|
if (fields_seen[i]) {
|
||||||
switch (options.duplicate_field_behavior) {
|
switch (options.duplicate_field_behavior) {
|
||||||
.use_first => {
|
.use_first => {
|
||||||
// Parse and then delete the redundant value.
|
// Parse and ignore the redundant value.
|
||||||
// We don't want to skip the value, because we want type checking.
|
// We don't want to skip the value, because we want type checking.
|
||||||
const ignored_value = try parseInternal(field.type, allocator, source, options);
|
_ = try parseInternal(field.type, allocator, source, options);
|
||||||
parseFree(field.type, allocator, ignored_value);
|
|
||||||
break;
|
break;
|
||||||
},
|
},
|
||||||
.@"error" => return error.DuplicateField,
|
.@"error" => return error.DuplicateField,
|
||||||
.use_last => {
|
.use_last => {},
|
||||||
// Delete the stale value. We're about to get a new one.
|
|
||||||
parseFree(field.type, allocator, @field(r, field.name));
|
|
||||||
fields_seen[i] = false;
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@field(r, field.name) = try parseInternal(field.type, allocator, source, options);
|
@field(r, field.name) = try parseInternal(field.type, allocator, source, options);
|
||||||
|
|
@ -428,7 +399,6 @@ fn parseInternal(
|
||||||
switch (ptrInfo.size) {
|
switch (ptrInfo.size) {
|
||||||
.One => {
|
.One => {
|
||||||
const r: *ptrInfo.child = try allocator.create(ptrInfo.child);
|
const r: *ptrInfo.child = try allocator.create(ptrInfo.child);
|
||||||
errdefer allocator.destroy(r);
|
|
||||||
r.* = try parseInternal(ptrInfo.child, allocator, source, options);
|
r.* = try parseInternal(ptrInfo.child, allocator, source, options);
|
||||||
return r;
|
return r;
|
||||||
},
|
},
|
||||||
|
|
@ -439,13 +409,6 @@ fn parseInternal(
|
||||||
|
|
||||||
// Typical array.
|
// Typical array.
|
||||||
var arraylist = ArrayList(ptrInfo.child).init(allocator);
|
var arraylist = ArrayList(ptrInfo.child).init(allocator);
|
||||||
errdefer {
|
|
||||||
while (arraylist.popOrNull()) |v| {
|
|
||||||
parseFree(ptrInfo.child, allocator, v);
|
|
||||||
}
|
|
||||||
arraylist.deinit();
|
|
||||||
}
|
|
||||||
|
|
||||||
while (true) {
|
while (true) {
|
||||||
switch (try source.peekNextTokenType()) {
|
switch (try source.peekNextTokenType()) {
|
||||||
.array_end => {
|
.array_end => {
|
||||||
|
|
@ -473,14 +436,21 @@ fn parseInternal(
|
||||||
if (ptrInfo.sentinel) |sentinel_ptr| {
|
if (ptrInfo.sentinel) |sentinel_ptr| {
|
||||||
// Use our own array list so we can append the sentinel.
|
// Use our own array list so we can append the sentinel.
|
||||||
var value_list = ArrayList(u8).init(allocator);
|
var value_list = ArrayList(u8).init(allocator);
|
||||||
errdefer value_list.deinit();
|
|
||||||
_ = try source.allocNextIntoArrayList(&value_list, .alloc_always);
|
_ = try source.allocNextIntoArrayList(&value_list, .alloc_always);
|
||||||
return try value_list.toOwnedSliceSentinel(@ptrCast(*const u8, sentinel_ptr).*);
|
return try value_list.toOwnedSliceSentinel(@ptrCast(*const u8, sentinel_ptr).*);
|
||||||
}
|
}
|
||||||
|
if (ptrInfo.is_const) {
|
||||||
|
switch (try source.nextAllocMax(allocator, .alloc_if_needed, options.max_value_len.?)) {
|
||||||
|
inline .string, .allocated_string => |slice| return slice,
|
||||||
|
else => unreachable,
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Have to allocate to get a mutable copy.
|
||||||
switch (try source.nextAllocMax(allocator, .alloc_always, options.max_value_len.?)) {
|
switch (try source.nextAllocMax(allocator, .alloc_always, options.max_value_len.?)) {
|
||||||
.allocated_string => |slice| return slice,
|
.allocated_string => |slice| return slice,
|
||||||
else => unreachable,
|
else => unreachable,
|
||||||
}
|
}
|
||||||
|
}
|
||||||
},
|
},
|
||||||
else => return error.UnexpectedToken,
|
else => return error.UnexpectedToken,
|
||||||
}
|
}
|
||||||
|
|
@ -505,13 +475,6 @@ fn parseInternalArray(
|
||||||
|
|
||||||
var r: T = undefined;
|
var r: T = undefined;
|
||||||
var i: usize = 0;
|
var i: usize = 0;
|
||||||
errdefer {
|
|
||||||
// Without the len check `r[i]` is not allowed
|
|
||||||
if (len > 0) while (true) : (i -= 1) {
|
|
||||||
parseFree(Child, allocator, r[i]);
|
|
||||||
if (i == 0) break;
|
|
||||||
};
|
|
||||||
}
|
|
||||||
while (i < len) : (i += 1) {
|
while (i < len) : (i += 1) {
|
||||||
r[i] = try parseInternal(Child, allocator, source, options);
|
r[i] = try parseInternal(Child, allocator, source, options);
|
||||||
}
|
}
|
||||||
|
|
@ -530,92 +493,6 @@ fn freeAllocated(allocator: Allocator, token: Token) void {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Releases resources created by parseFromSlice() or parseFromTokenSource().
|
|
||||||
pub fn parseFree(comptime T: type, allocator: Allocator, value: T) void {
|
|
||||||
switch (@typeInfo(T)) {
|
|
||||||
.Bool, .Float, .ComptimeFloat, .Int, .ComptimeInt, .Enum => {},
|
|
||||||
.Optional => {
|
|
||||||
if (value) |v| {
|
|
||||||
return parseFree(@TypeOf(v), allocator, v);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
.Union => |unionInfo| {
|
|
||||||
if (unionInfo.tag_type) |UnionTagType| {
|
|
||||||
inline for (unionInfo.fields) |u_field| {
|
|
||||||
if (value == @field(UnionTagType, u_field.name)) {
|
|
||||||
parseFree(u_field.type, allocator, @field(value, u_field.name));
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
unreachable;
|
|
||||||
}
|
|
||||||
},
|
|
||||||
.Struct => |structInfo| {
|
|
||||||
inline for (structInfo.fields) |field| {
|
|
||||||
var should_free = true;
|
|
||||||
if (field.default_value) |default| {
|
|
||||||
switch (@typeInfo(field.type)) {
|
|
||||||
// We must not attempt to free pointers to struct default values
|
|
||||||
.Pointer => |fieldPtrInfo| {
|
|
||||||
const field_value = @field(value, field.name);
|
|
||||||
const field_ptr = switch (fieldPtrInfo.size) {
|
|
||||||
.One => field_value,
|
|
||||||
.Slice => field_value.ptr,
|
|
||||||
else => unreachable, // Other pointer types are not parseable
|
|
||||||
};
|
|
||||||
const field_addr = @ptrToInt(field_ptr);
|
|
||||||
|
|
||||||
const casted_default = @ptrCast(*const field.type, @alignCast(@alignOf(field.type), default)).*;
|
|
||||||
const default_ptr = switch (fieldPtrInfo.size) {
|
|
||||||
.One => casted_default,
|
|
||||||
.Slice => casted_default.ptr,
|
|
||||||
else => unreachable, // Other pointer types are not parseable
|
|
||||||
};
|
|
||||||
const default_addr = @ptrToInt(default_ptr);
|
|
||||||
|
|
||||||
if (field_addr == default_addr) {
|
|
||||||
should_free = false;
|
|
||||||
}
|
|
||||||
},
|
|
||||||
else => {},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (should_free) {
|
|
||||||
parseFree(field.type, allocator, @field(value, field.name));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
.Array => |arrayInfo| {
|
|
||||||
for (value) |v| {
|
|
||||||
parseFree(arrayInfo.child, allocator, v);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
.Vector => |vecInfo| {
|
|
||||||
var i: usize = 0;
|
|
||||||
while (i < vecInfo.len) : (i += 1) {
|
|
||||||
parseFree(vecInfo.child, allocator, value[i]);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
.Pointer => |ptrInfo| {
|
|
||||||
switch (ptrInfo.size) {
|
|
||||||
.One => {
|
|
||||||
parseFree(ptrInfo.child, allocator, value.*);
|
|
||||||
allocator.destroy(value);
|
|
||||||
},
|
|
||||||
.Slice => {
|
|
||||||
for (value) |v| {
|
|
||||||
parseFree(ptrInfo.child, allocator, v);
|
|
||||||
}
|
|
||||||
allocator.free(value);
|
|
||||||
},
|
|
||||||
else => unreachable,
|
|
||||||
}
|
|
||||||
},
|
|
||||||
else => unreachable,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
test {
|
test {
|
||||||
_ = @import("./static_test.zig");
|
_ = @import("./static_test.zig");
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,29 +1,31 @@
|
||||||
const std = @import("std");
|
const std = @import("std");
|
||||||
const testing = std.testing;
|
const testing = std.testing;
|
||||||
|
const ArenaAllocator = std.heap.ArenaAllocator;
|
||||||
|
|
||||||
const parseFromSlice = @import("./static.zig").parseFromSlice;
|
const parseFromSlice = @import("./static.zig").parseFromSlice;
|
||||||
|
const parseFromSliceLeaky = @import("./static.zig").parseFromSliceLeaky;
|
||||||
const parseFromTokenSource = @import("./static.zig").parseFromTokenSource;
|
const parseFromTokenSource = @import("./static.zig").parseFromTokenSource;
|
||||||
const parseFree = @import("./static.zig").parseFree;
|
const parseFromTokenSourceLeaky = @import("./static.zig").parseFromTokenSourceLeaky;
|
||||||
const ParseOptions = @import("./static.zig").ParseOptions;
|
const ParseOptions = @import("./static.zig").ParseOptions;
|
||||||
const JsonScanner = @import("./scanner.zig").Scanner;
|
const JsonScanner = @import("./scanner.zig").Scanner;
|
||||||
const jsonReader = @import("./scanner.zig").reader;
|
const jsonReader = @import("./scanner.zig").reader;
|
||||||
|
|
||||||
test "parse" {
|
test "parse" {
|
||||||
try testing.expectEqual(false, try parseFromSlice(bool, testing.allocator, "false", .{}));
|
try testing.expectEqual(false, try parseFromSliceLeaky(bool, testing.allocator, "false", .{}));
|
||||||
try testing.expectEqual(true, try parseFromSlice(bool, testing.allocator, "true", .{}));
|
try testing.expectEqual(true, try parseFromSliceLeaky(bool, testing.allocator, "true", .{}));
|
||||||
try testing.expectEqual(@as(u1, 1), try parseFromSlice(u1, testing.allocator, "1", .{}));
|
try testing.expectEqual(@as(u1, 1), try parseFromSliceLeaky(u1, testing.allocator, "1", .{}));
|
||||||
try testing.expectError(error.Overflow, parseFromSlice(u1, testing.allocator, "50", .{}));
|
try testing.expectError(error.Overflow, parseFromSliceLeaky(u1, testing.allocator, "50", .{}));
|
||||||
try testing.expectEqual(@as(u64, 42), try parseFromSlice(u64, testing.allocator, "42", .{}));
|
try testing.expectEqual(@as(u64, 42), try parseFromSliceLeaky(u64, testing.allocator, "42", .{}));
|
||||||
try testing.expectEqual(@as(f64, 42), try parseFromSlice(f64, testing.allocator, "42.0", .{}));
|
try testing.expectEqual(@as(f64, 42), try parseFromSliceLeaky(f64, testing.allocator, "42.0", .{}));
|
||||||
try testing.expectEqual(@as(?bool, null), try parseFromSlice(?bool, testing.allocator, "null", .{}));
|
try testing.expectEqual(@as(?bool, null), try parseFromSliceLeaky(?bool, testing.allocator, "null", .{}));
|
||||||
try testing.expectEqual(@as(?bool, true), try parseFromSlice(?bool, testing.allocator, "true", .{}));
|
try testing.expectEqual(@as(?bool, true), try parseFromSliceLeaky(?bool, testing.allocator, "true", .{}));
|
||||||
|
|
||||||
try testing.expectEqual(@as([3]u8, "foo".*), try parseFromSlice([3]u8, testing.allocator, "\"foo\"", .{}));
|
try testing.expectEqual(@as([3]u8, "foo".*), try parseFromSliceLeaky([3]u8, testing.allocator, "\"foo\"", .{}));
|
||||||
try testing.expectEqual(@as([3]u8, "foo".*), try parseFromSlice([3]u8, testing.allocator, "[102, 111, 111]", .{}));
|
try testing.expectEqual(@as([3]u8, "foo".*), try parseFromSliceLeaky([3]u8, testing.allocator, "[102, 111, 111]", .{}));
|
||||||
try testing.expectEqual(@as([0]u8, undefined), try parseFromSlice([0]u8, testing.allocator, "[]", .{}));
|
try testing.expectEqual(@as([0]u8, undefined), try parseFromSliceLeaky([0]u8, testing.allocator, "[]", .{}));
|
||||||
|
|
||||||
try testing.expectEqual(@as(u64, 12345678901234567890), try parseFromSlice(u64, testing.allocator, "\"12345678901234567890\"", .{}));
|
try testing.expectEqual(@as(u64, 12345678901234567890), try parseFromSliceLeaky(u64, testing.allocator, "\"12345678901234567890\"", .{}));
|
||||||
try testing.expectEqual(@as(f64, 123.456), try parseFromSlice(f64, testing.allocator, "\"123.456\"", .{}));
|
try testing.expectEqual(@as(f64, 123.456), try parseFromSliceLeaky(f64, testing.allocator, "\"123.456\"", .{}));
|
||||||
}
|
}
|
||||||
|
|
||||||
test "parse into enum" {
|
test "parse into enum" {
|
||||||
|
|
@ -32,37 +34,37 @@ test "parse into enum" {
|
||||||
Bar,
|
Bar,
|
||||||
@"with\\escape",
|
@"with\\escape",
|
||||||
};
|
};
|
||||||
try testing.expectEqual(@as(T, .Foo), try parseFromSlice(T, testing.allocator, "\"Foo\"", .{}));
|
try testing.expectEqual(@as(T, .Foo), try parseFromSliceLeaky(T, testing.allocator, "\"Foo\"", .{}));
|
||||||
try testing.expectEqual(@as(T, .Foo), try parseFromSlice(T, testing.allocator, "42", .{}));
|
try testing.expectEqual(@as(T, .Foo), try parseFromSliceLeaky(T, testing.allocator, "42", .{}));
|
||||||
try testing.expectEqual(@as(T, .@"with\\escape"), try parseFromSlice(T, testing.allocator, "\"with\\\\escape\"", .{}));
|
try testing.expectEqual(@as(T, .@"with\\escape"), try parseFromSliceLeaky(T, testing.allocator, "\"with\\\\escape\"", .{}));
|
||||||
try testing.expectError(error.InvalidEnumTag, parseFromSlice(T, testing.allocator, "5", .{}));
|
try testing.expectError(error.InvalidEnumTag, parseFromSliceLeaky(T, testing.allocator, "5", .{}));
|
||||||
try testing.expectError(error.InvalidEnumTag, parseFromSlice(T, testing.allocator, "\"Qux\"", .{}));
|
try testing.expectError(error.InvalidEnumTag, parseFromSliceLeaky(T, testing.allocator, "\"Qux\"", .{}));
|
||||||
}
|
}
|
||||||
|
|
||||||
test "parse into that allocates a slice" {
|
test "parse into that allocates a slice" {
|
||||||
{
|
{
|
||||||
// string as string
|
// string as string
|
||||||
const r = try parseFromSlice([]u8, testing.allocator, "\"foo\"", .{});
|
const parsed = try parseFromSlice([]u8, testing.allocator, "\"foo\"", .{});
|
||||||
defer parseFree([]u8, testing.allocator, r);
|
defer parsed.deinit();
|
||||||
try testing.expectEqualSlices(u8, "foo", r);
|
try testing.expectEqualSlices(u8, "foo", parsed.value);
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
// string as array of u8 integers
|
// string as array of u8 integers
|
||||||
const r = try parseFromSlice([]u8, testing.allocator, "[102, 111, 111]", .{});
|
const parsed = try parseFromSlice([]u8, testing.allocator, "[102, 111, 111]", .{});
|
||||||
defer parseFree([]u8, testing.allocator, r);
|
defer parsed.deinit();
|
||||||
try testing.expectEqualSlices(u8, "foo", r);
|
try testing.expectEqualSlices(u8, "foo", parsed.value);
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
const r = try parseFromSlice([]u8, testing.allocator, "\"with\\\\escape\"", .{});
|
const parsed = try parseFromSlice([]u8, testing.allocator, "\"with\\\\escape\"", .{});
|
||||||
defer parseFree([]u8, testing.allocator, r);
|
defer parsed.deinit();
|
||||||
try testing.expectEqualSlices(u8, "with\\escape", r);
|
try testing.expectEqualSlices(u8, "with\\escape", parsed.value);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
test "parse into sentinel slice" {
|
test "parse into sentinel slice" {
|
||||||
const result = try parseFromSlice([:0]const u8, testing.allocator, "\"\\n\"", .{});
|
const parsed = try parseFromSlice([:0]const u8, testing.allocator, "\"\\n\"", .{});
|
||||||
defer parseFree([:0]const u8, testing.allocator, result);
|
defer parsed.deinit();
|
||||||
try testing.expect(std.mem.eql(u8, result, "\n"));
|
try testing.expect(std.mem.eql(u8, parsed.value, "\n"));
|
||||||
}
|
}
|
||||||
|
|
||||||
test "parse into tagged union" {
|
test "parse into tagged union" {
|
||||||
|
|
@ -72,9 +74,12 @@ test "parse into tagged union" {
|
||||||
float: f64,
|
float: f64,
|
||||||
string: []const u8,
|
string: []const u8,
|
||||||
};
|
};
|
||||||
try testing.expectEqual(T{ .float = 1.5 }, try parseFromSlice(T, testing.allocator, "{\"float\":1.5}", .{}));
|
try testing.expectEqual(T{ .float = 1.5 }, try parseFromSliceLeaky(T, testing.allocator, "{\"float\":1.5}", .{}));
|
||||||
try testing.expectEqual(T{ .int = 1 }, try parseFromSlice(T, testing.allocator, "{\"int\":1}", .{}));
|
try testing.expectEqual(T{ .int = 1 }, try parseFromSliceLeaky(T, testing.allocator, "{\"int\":1}", .{}));
|
||||||
try testing.expectEqual(T{ .nothing = {} }, try parseFromSlice(T, testing.allocator, "{\"nothing\":{}}", .{}));
|
try testing.expectEqual(T{ .nothing = {} }, try parseFromSliceLeaky(T, testing.allocator, "{\"nothing\":{}}", .{}));
|
||||||
|
const parsed = try parseFromSlice(T, testing.allocator, "{\"string\":\"foo\"}", .{});
|
||||||
|
defer parsed.deinit();
|
||||||
|
try testing.expectEqualSlices(u8, "foo", parsed.value.string);
|
||||||
}
|
}
|
||||||
|
|
||||||
test "parse into tagged union errors" {
|
test "parse into tagged union errors" {
|
||||||
|
|
@ -84,42 +89,36 @@ test "parse into tagged union errors" {
|
||||||
float: f64,
|
float: f64,
|
||||||
string: []const u8,
|
string: []const u8,
|
||||||
};
|
};
|
||||||
try testing.expectError(error.UnexpectedToken, parseFromSlice(T, testing.allocator, "42", .{}));
|
var arena = ArenaAllocator.init(testing.allocator);
|
||||||
try testing.expectError(error.UnexpectedToken, parseFromSlice(T, testing.allocator, "{}", .{}));
|
defer arena.deinit();
|
||||||
try testing.expectError(error.UnknownField, parseFromSlice(T, testing.allocator, "{\"bogus\":1}", .{}));
|
try testing.expectError(error.UnexpectedToken, parseFromSliceLeaky(T, arena.allocator(), "42", .{}));
|
||||||
try testing.expectError(error.UnexpectedToken, parseFromSlice(T, testing.allocator, "{\"int\":1, \"int\":1", .{}));
|
try testing.expectError(error.SyntaxError, parseFromSliceLeaky(T, arena.allocator(), "{\"int\":1} 42", .{}));
|
||||||
try testing.expectError(error.UnexpectedToken, parseFromSlice(T, testing.allocator, "{\"int\":1, \"float\":1.0}", .{}));
|
try testing.expectError(error.UnexpectedToken, parseFromSliceLeaky(T, arena.allocator(), "{}", .{}));
|
||||||
try testing.expectError(error.UnexpectedToken, parseFromSlice(T, testing.allocator, "{\"nothing\":null}", .{}));
|
try testing.expectError(error.UnknownField, parseFromSliceLeaky(T, arena.allocator(), "{\"bogus\":1}", .{}));
|
||||||
try testing.expectError(error.UnexpectedToken, parseFromSlice(T, testing.allocator, "{\"nothing\":{\"no\":0}}", .{}));
|
try testing.expectError(error.UnexpectedToken, parseFromSliceLeaky(T, arena.allocator(), "{\"int\":1, \"int\":1", .{}));
|
||||||
|
try testing.expectError(error.UnexpectedToken, parseFromSliceLeaky(T, arena.allocator(), "{\"int\":1, \"float\":1.0}", .{}));
|
||||||
|
try testing.expectError(error.UnexpectedToken, parseFromSliceLeaky(T, arena.allocator(), "{\"nothing\":null}", .{}));
|
||||||
|
try testing.expectError(error.UnexpectedToken, parseFromSliceLeaky(T, arena.allocator(), "{\"nothing\":{\"no\":0}}", .{}));
|
||||||
|
|
||||||
// Allocator failure
|
// Allocator failure
|
||||||
var fail_alloc = testing.FailingAllocator.init(testing.allocator, 0);
|
var fail_alloc = testing.FailingAllocator.init(testing.allocator, 0);
|
||||||
const failing_allocator = fail_alloc.allocator();
|
try testing.expectError(error.OutOfMemory, parseFromSlice(T, fail_alloc.allocator(), "{\"string\"\"foo\"}", .{}));
|
||||||
try testing.expectError(error.OutOfMemory, parseFromSlice(T, failing_allocator, "{\"string\"\"foo\"}", .{}));
|
|
||||||
}
|
|
||||||
|
|
||||||
test "parseFree descends into tagged union" {
|
|
||||||
const T = union(enum) {
|
|
||||||
nothing,
|
|
||||||
int: i32,
|
|
||||||
float: f64,
|
|
||||||
string: []const u8,
|
|
||||||
};
|
|
||||||
const r = try parseFromSlice(T, testing.allocator, "{\"string\":\"foo\"}", .{});
|
|
||||||
try testing.expectEqualSlices(u8, "foo", r.string);
|
|
||||||
parseFree(T, testing.allocator, r);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
test "parse into struct with no fields" {
|
test "parse into struct with no fields" {
|
||||||
const T = struct {};
|
const T = struct {};
|
||||||
try testing.expectEqual(T{}, try parseFromSlice(T, testing.allocator, "{}", .{}));
|
const parsed = try parseFromSlice(T, testing.allocator, "{}", .{});
|
||||||
|
defer parsed.deinit();
|
||||||
|
try testing.expectEqual(T{}, parsed.value);
|
||||||
}
|
}
|
||||||
|
|
||||||
const test_const_value: usize = 123;
|
const test_const_value: usize = 123;
|
||||||
|
|
||||||
test "parse into struct with default const pointer field" {
|
test "parse into struct with default const pointer field" {
|
||||||
const T = struct { a: *const usize = &test_const_value };
|
const T = struct { a: *const usize = &test_const_value };
|
||||||
try testing.expectEqual(T{}, try parseFromSlice(T, testing.allocator, "{}", .{}));
|
const parsed = try parseFromSlice(T, testing.allocator, "{}", .{});
|
||||||
|
defer parsed.deinit();
|
||||||
|
try testing.expectEqual(T{}, parsed.value);
|
||||||
}
|
}
|
||||||
|
|
||||||
const test_default_usize: usize = 123;
|
const test_default_usize: usize = 123;
|
||||||
|
|
@ -138,10 +137,9 @@ test "freeing parsed structs with pointers to default values" {
|
||||||
str_slice: []const []const u8 = &test_default_str_slice,
|
str_slice: []const []const u8 = &test_default_str_slice,
|
||||||
};
|
};
|
||||||
|
|
||||||
const parsed = try parseFromSlice(T, testing.allocator, "{}", .{});
|
var parsed = try parseFromSlice(T, testing.allocator, "{}", .{});
|
||||||
try testing.expectEqual(T{}, parsed);
|
try testing.expectEqual(T{}, parsed.value);
|
||||||
// This will panic if it tries to free global constants:
|
defer parsed.deinit();
|
||||||
parseFree(T, testing.allocator, parsed);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
test "parse into struct where destination and source lengths mismatch" {
|
test "parse into struct where destination and source lengths mismatch" {
|
||||||
|
|
@ -201,8 +199,9 @@ test "parse into struct with misc fields" {
|
||||||
\\ }
|
\\ }
|
||||||
\\}
|
\\}
|
||||||
;
|
;
|
||||||
const r = try parseFromSlice(T, testing.allocator, document_str, .{});
|
const parsed = try parseFromSlice(T, testing.allocator, document_str, .{});
|
||||||
defer parseFree(T, testing.allocator, r);
|
defer parsed.deinit();
|
||||||
|
const r = &parsed.value;
|
||||||
try testing.expectEqual(@as(i64, 420), r.int);
|
try testing.expectEqual(@as(i64, 420), r.int);
|
||||||
try testing.expectEqual(@as(f64, 3.14), r.float);
|
try testing.expectEqual(@as(f64, 3.14), r.float);
|
||||||
try testing.expectEqual(true, r.@"with\\escape");
|
try testing.expectEqual(true, r.@"with\\escape");
|
||||||
|
|
@ -238,24 +237,20 @@ test "parse into struct with strings and arrays with sentinels" {
|
||||||
\\ "simple_data": [4, 5, 6]
|
\\ "simple_data": [4, 5, 6]
|
||||||
\\}
|
\\}
|
||||||
;
|
;
|
||||||
const r = try parseFromSlice(T, testing.allocator, document_str, .{});
|
const parsed = try parseFromSlice(T, testing.allocator, document_str, .{});
|
||||||
defer parseFree(T, testing.allocator, r);
|
defer parsed.deinit();
|
||||||
|
|
||||||
try testing.expectEqualSentinel(u8, 0, "zig", r.language);
|
try testing.expectEqualSentinel(u8, 0, "zig", parsed.value.language);
|
||||||
|
|
||||||
const data = [_:99]i32{ 1, 2, 3 };
|
const data = [_:99]i32{ 1, 2, 3 };
|
||||||
try testing.expectEqualSentinel(i32, 99, data[0..data.len], r.data);
|
try testing.expectEqualSentinel(i32, 99, data[0..data.len], parsed.value.data);
|
||||||
|
|
||||||
// Make sure that arrays who aren't supposed to have a sentinel still parse without one.
|
// Make sure that arrays who aren't supposed to have a sentinel still parse without one.
|
||||||
try testing.expectEqual(@as(?i32, null), std.meta.sentinel(@TypeOf(r.simple_data)));
|
try testing.expectEqual(@as(?i32, null), std.meta.sentinel(@TypeOf(parsed.value.simple_data)));
|
||||||
try testing.expectEqual(@as(?u8, null), std.meta.sentinel(@TypeOf(r.language_without_sentinel)));
|
try testing.expectEqual(@as(?u8, null), std.meta.sentinel(@TypeOf(parsed.value.language_without_sentinel)));
|
||||||
}
|
}
|
||||||
|
|
||||||
test "parse into struct with duplicate field" {
|
test "parse into struct with duplicate field" {
|
||||||
// allow allocator to detect double frees by keeping bucket in use
|
|
||||||
const ballast = try testing.allocator.alloc(u64, 1);
|
|
||||||
defer testing.allocator.free(ballast);
|
|
||||||
|
|
||||||
const options_first = ParseOptions{ .duplicate_field_behavior = .use_first };
|
const options_first = ParseOptions{ .duplicate_field_behavior = .use_first };
|
||||||
const options_last = ParseOptions{ .duplicate_field_behavior = .use_last };
|
const options_last = ParseOptions{ .duplicate_field_behavior = .use_last };
|
||||||
|
|
||||||
|
|
@ -266,9 +261,12 @@ test "parse into struct with duplicate field" {
|
||||||
try testing.expectError(error.InvalidNumber, parseFromSlice(T1, testing.allocator, str, options_first));
|
try testing.expectError(error.InvalidNumber, parseFromSlice(T1, testing.allocator, str, options_first));
|
||||||
try testing.expectError(error.InvalidNumber, parseFromSlice(T1, testing.allocator, str, options_last));
|
try testing.expectError(error.InvalidNumber, parseFromSlice(T1, testing.allocator, str, options_last));
|
||||||
|
|
||||||
|
var arena = ArenaAllocator.init(testing.allocator);
|
||||||
|
defer arena.deinit();
|
||||||
|
|
||||||
const T2 = struct { a: f64 };
|
const T2 = struct { a: f64 };
|
||||||
try testing.expectEqual(T2{ .a = 1.0 }, try parseFromSlice(T2, testing.allocator, str, options_first));
|
try testing.expectEqual(T2{ .a = 1.0 }, try parseFromSliceLeaky(T2, arena.allocator(), str, options_first));
|
||||||
try testing.expectEqual(T2{ .a = 0.25 }, try parseFromSlice(T2, testing.allocator, str, options_last));
|
try testing.expectEqual(T2{ .a = 0.25 }, try parseFromSliceLeaky(T2, arena.allocator(), str, options_last));
|
||||||
}
|
}
|
||||||
|
|
||||||
test "parse into struct ignoring unknown fields" {
|
test "parse into struct ignoring unknown fields" {
|
||||||
|
|
@ -302,11 +300,11 @@ test "parse into struct ignoring unknown fields" {
|
||||||
\\ "language": "zig"
|
\\ "language": "zig"
|
||||||
\\}
|
\\}
|
||||||
;
|
;
|
||||||
const r = try parseFromSlice(T, testing.allocator, str, .{ .ignore_unknown_fields = true });
|
const parsed = try parseFromSlice(T, testing.allocator, str, .{ .ignore_unknown_fields = true });
|
||||||
defer parseFree(T, testing.allocator, r);
|
defer parsed.deinit();
|
||||||
|
|
||||||
try testing.expectEqual(@as(i64, 420), r.int);
|
try testing.expectEqual(@as(i64, 420), parsed.value.int);
|
||||||
try testing.expectEqualSlices(u8, "zig", r.language);
|
try testing.expectEqualSlices(u8, "zig", parsed.value.language);
|
||||||
}
|
}
|
||||||
|
|
||||||
test "parse into tuple" {
|
test "parse into tuple" {
|
||||||
|
|
@ -343,8 +341,9 @@ test "parse into tuple" {
|
||||||
\\ {"float": 12.34}
|
\\ {"float": 12.34}
|
||||||
\\]
|
\\]
|
||||||
;
|
;
|
||||||
const r = try parseFromSlice(T, testing.allocator, str, .{});
|
const parsed = try parseFromSlice(T, testing.allocator, str, .{});
|
||||||
defer parseFree(T, testing.allocator, r);
|
defer parsed.deinit();
|
||||||
|
const r = parsed.value;
|
||||||
try testing.expectEqual(@as(i64, 420), r[0]);
|
try testing.expectEqual(@as(i64, 420), r[0]);
|
||||||
try testing.expectEqual(@as(f64, 3.14), r[1]);
|
try testing.expectEqual(@as(f64, 3.14), r[1]);
|
||||||
try testing.expectEqual(true, r[2]);
|
try testing.expectEqual(true, r[2]);
|
||||||
|
|
@ -368,10 +367,10 @@ test "parse into recursive union definition" {
|
||||||
values: ParseIntoRecursiveUnionDefinitionValue,
|
values: ParseIntoRecursiveUnionDefinitionValue,
|
||||||
};
|
};
|
||||||
|
|
||||||
const r = try parseFromSlice(T, testing.allocator, "{\"values\":{\"array\":[{\"integer\":58}]}}", .{});
|
const parsed = try parseFromSlice(T, testing.allocator, "{\"values\":{\"array\":[{\"integer\":58}]}}", .{});
|
||||||
defer parseFree(T, testing.allocator, r);
|
defer parsed.deinit();
|
||||||
|
|
||||||
try testing.expectEqual(@as(i64, 58), r.values.array[0].integer);
|
try testing.expectEqual(@as(i64, 58), parsed.value.values.array[0].integer);
|
||||||
}
|
}
|
||||||
|
|
||||||
const ParseIntoDoubleRecursiveUnionValueFirst = union(enum) {
|
const ParseIntoDoubleRecursiveUnionValueFirst = union(enum) {
|
||||||
|
|
@ -389,29 +388,37 @@ test "parse into double recursive union definition" {
|
||||||
values: ParseIntoDoubleRecursiveUnionValueFirst,
|
values: ParseIntoDoubleRecursiveUnionValueFirst,
|
||||||
};
|
};
|
||||||
|
|
||||||
const r = try parseFromSlice(T, testing.allocator, "{\"values\":{\"array\":[{\"array\":[{\"integer\":58}]}]}}", .{});
|
const parsed = try parseFromSlice(T, testing.allocator, "{\"values\":{\"array\":[{\"array\":[{\"integer\":58}]}]}}", .{});
|
||||||
defer parseFree(T, testing.allocator, r);
|
defer parsed.deinit();
|
||||||
|
|
||||||
try testing.expectEqual(@as(i64, 58), r.values.array[0].array[0].integer);
|
try testing.expectEqual(@as(i64, 58), parsed.value.values.array[0].array[0].integer);
|
||||||
}
|
}
|
||||||
|
|
||||||
test "parse exponential into int" {
|
test "parse exponential into int" {
|
||||||
const T = struct { int: i64 };
|
const T = struct { int: i64 };
|
||||||
const r = try parseFromSlice(T, testing.allocator, "{ \"int\": 4.2e2 }", .{});
|
const r = try parseFromSliceLeaky(T, testing.allocator, "{ \"int\": 4.2e2 }", .{});
|
||||||
try testing.expectEqual(@as(i64, 420), r.int);
|
try testing.expectEqual(@as(i64, 420), r.int);
|
||||||
try testing.expectError(error.InvalidNumber, parseFromSlice(T, testing.allocator, "{ \"int\": 0.042e2 }", .{}));
|
try testing.expectError(error.InvalidNumber, parseFromSliceLeaky(T, testing.allocator, "{ \"int\": 0.042e2 }", .{}));
|
||||||
try testing.expectError(error.Overflow, parseFromSlice(T, testing.allocator, "{ \"int\": 18446744073709551616.0 }", .{}));
|
try testing.expectError(error.Overflow, parseFromSliceLeaky(T, testing.allocator, "{ \"int\": 18446744073709551616.0 }", .{}));
|
||||||
}
|
}
|
||||||
|
|
||||||
test "parseFromTokenSource" {
|
test "parseFromTokenSource" {
|
||||||
|
{
|
||||||
var scanner = JsonScanner.initCompleteInput(testing.allocator, "123");
|
var scanner = JsonScanner.initCompleteInput(testing.allocator, "123");
|
||||||
defer scanner.deinit();
|
defer scanner.deinit();
|
||||||
try testing.expectEqual(@as(u32, 123), try parseFromTokenSource(u32, testing.allocator, &scanner, .{}));
|
var parsed = try parseFromTokenSource(u32, testing.allocator, &scanner, .{});
|
||||||
|
defer parsed.deinit();
|
||||||
|
try testing.expectEqual(@as(u32, 123), parsed.value);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
var stream = std.io.fixedBufferStream("123");
|
var stream = std.io.fixedBufferStream("123");
|
||||||
var json_reader = jsonReader(std.testing.allocator, stream.reader());
|
var json_reader = jsonReader(std.testing.allocator, stream.reader());
|
||||||
defer json_reader.deinit();
|
defer json_reader.deinit();
|
||||||
try testing.expectEqual(@as(u32, 123), try parseFromTokenSource(u32, testing.allocator, &json_reader, .{}));
|
var parsed = try parseFromTokenSource(u32, testing.allocator, &json_reader, .{});
|
||||||
|
defer parsed.deinit();
|
||||||
|
try testing.expectEqual(@as(u32, 123), parsed.value);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
test "max_value_len" {
|
test "max_value_len" {
|
||||||
|
|
@ -429,9 +436,9 @@ test "parse into vector" {
|
||||||
\\ "vec_i32": [4, 5, 6, 7]
|
\\ "vec_i32": [4, 5, 6, 7]
|
||||||
\\}
|
\\}
|
||||||
;
|
;
|
||||||
const r = try parseFromSlice(T, testing.allocator, s, .{});
|
const parsed = try parseFromSlice(T, testing.allocator, s, .{});
|
||||||
defer parseFree(T, testing.allocator, r);
|
defer parsed.deinit();
|
||||||
try testing.expectApproxEqAbs(@as(f32, 1.5), r.vec_f32[0], 0.0000001);
|
try testing.expectApproxEqAbs(@as(f32, 1.5), parsed.value.vec_f32[0], 0.0000001);
|
||||||
try testing.expectApproxEqAbs(@as(f32, 2.5), r.vec_f32[1], 0.0000001);
|
try testing.expectApproxEqAbs(@as(f32, 2.5), parsed.value.vec_f32[1], 0.0000001);
|
||||||
try testing.expectEqual(@Vector(4, i32){ 4, 5, 6, 7 }, r.vec_i32);
|
try testing.expectEqual(@Vector(4, i32){ 4, 5, 6, 7 }, parsed.value.vec_i32);
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,8 +1,9 @@
|
||||||
const std = @import("std");
|
const std = @import("std");
|
||||||
const testing = std.testing;
|
const testing = std.testing;
|
||||||
const Parser = @import("./dynamic.zig").Parser;
|
const parseFromSlice = @import("./static.zig").parseFromSlice;
|
||||||
const validate = @import("./scanner.zig").validate;
|
const validate = @import("./scanner.zig").validate;
|
||||||
const JsonScanner = @import("./scanner.zig").Scanner;
|
const JsonScanner = @import("./scanner.zig").Scanner;
|
||||||
|
const Value = @import("./dynamic.zig").Value;
|
||||||
|
|
||||||
// Support for JSONTestSuite.zig
|
// Support for JSONTestSuite.zig
|
||||||
pub fn ok(s: []const u8) !void {
|
pub fn ok(s: []const u8) !void {
|
||||||
|
|
@ -26,10 +27,8 @@ fn testLowLevelScanner(s: []const u8) !void {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
fn testHighLevelDynamicParser(s: []const u8) !void {
|
fn testHighLevelDynamicParser(s: []const u8) !void {
|
||||||
var p = Parser.init(testing.allocator, .alloc_if_needed);
|
var parsed = try parseFromSlice(Value, testing.allocator, s, .{});
|
||||||
defer p.deinit();
|
defer parsed.deinit();
|
||||||
var tree = try p.parse(s);
|
|
||||||
defer tree.deinit();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Additional tests not part of test JSONTestSuite.
|
// Additional tests not part of test JSONTestSuite.
|
||||||
|
|
@ -47,15 +46,12 @@ test "n_object_closed_missing_value" {
|
||||||
fn roundTrip(s: []const u8) !void {
|
fn roundTrip(s: []const u8) !void {
|
||||||
try testing.expect(try validate(testing.allocator, s));
|
try testing.expect(try validate(testing.allocator, s));
|
||||||
|
|
||||||
var p = Parser.init(testing.allocator, .alloc_if_needed);
|
var parsed = try parseFromSlice(Value, testing.allocator, s, .{});
|
||||||
defer p.deinit();
|
defer parsed.deinit();
|
||||||
|
|
||||||
var tree = try p.parse(s);
|
|
||||||
defer tree.deinit();
|
|
||||||
|
|
||||||
var buf: [256]u8 = undefined;
|
var buf: [256]u8 = undefined;
|
||||||
var fbs = std.io.fixedBufferStream(&buf);
|
var fbs = std.io.fixedBufferStream(&buf);
|
||||||
try tree.root.jsonStringify(.{}, fbs.writer());
|
try parsed.value.jsonStringify(.{}, fbs.writer());
|
||||||
|
|
||||||
try testing.expectEqualStrings(s, fbs.getWritten());
|
try testing.expectEqualStrings(s, fbs.getWritten());
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -20,15 +20,16 @@ pub fn main() !void {
|
||||||
// Required for json parsing.
|
// Required for json parsing.
|
||||||
@setEvalBranchQuota(10000);
|
@setEvalBranchQuota(10000);
|
||||||
|
|
||||||
var registry = try std.json.parseFromSlice(g.Registry, allocator, spec, .{});
|
var scanner = std.json.Scanner.initCompleteInput(allocator, spec);
|
||||||
|
var diagnostics = std.json.Diagnostics{};
|
||||||
const core_reg = switch (registry) {
|
scanner.enableDiagnostics(&diagnostics);
|
||||||
.core => |core_reg| core_reg,
|
var parsed = std.json.parseFromTokenSource(g.CoreRegistry, allocator, &scanner, .{}) catch |err| {
|
||||||
.extension => return error.TODOSpirVExtensionSpec,
|
std.debug.print("line,col: {},{}\n", .{ diagnostics.getLine(), diagnostics.getColumn() });
|
||||||
|
return err;
|
||||||
};
|
};
|
||||||
|
|
||||||
var bw = std.io.bufferedWriter(std.io.getStdOut().writer());
|
var bw = std.io.bufferedWriter(std.io.getStdOut().writer());
|
||||||
try render(bw.writer(), allocator, core_reg);
|
try render(bw.writer(), allocator, parsed.value);
|
||||||
try bw.flush();
|
try bw.flush();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,9 @@
|
||||||
//! See https://www.khronos.org/registry/spir-v/specs/unified1/MachineReadableGrammar.html
|
//! See https://www.khronos.org/registry/spir-v/specs/unified1/MachineReadableGrammar.html
|
||||||
//! and the files in https://github.com/KhronosGroup/SPIRV-Headers/blob/master/include/spirv/unified1/
|
//! and the files in https://github.com/KhronosGroup/SPIRV-Headers/blob/master/include/spirv/unified1/
|
||||||
//! Note: Non-canonical casing in these structs used to match SPIR-V spec json.
|
//! Note: Non-canonical casing in these structs used to match SPIR-V spec json.
|
||||||
|
|
||||||
|
const std = @import("std");
|
||||||
|
|
||||||
pub const Registry = union(enum) {
|
pub const Registry = union(enum) {
|
||||||
core: CoreRegistry,
|
core: CoreRegistry,
|
||||||
extension: ExtensionRegistry,
|
extension: ExtensionRegistry,
|
||||||
|
|
@ -79,6 +82,20 @@ pub const Enumerant = struct {
|
||||||
value: union(enum) {
|
value: union(enum) {
|
||||||
bitflag: []const u8, // Hexadecimal representation of the value
|
bitflag: []const u8, // Hexadecimal representation of the value
|
||||||
int: u31,
|
int: u31,
|
||||||
|
|
||||||
|
pub fn jsonParse(
|
||||||
|
allocator: std.mem.Allocator,
|
||||||
|
source: anytype,
|
||||||
|
options: std.json.ParseOptions,
|
||||||
|
) std.json.ParseError(@TypeOf(source.*))!@This() {
|
||||||
|
_ = options;
|
||||||
|
switch (try source.nextAlloc(allocator, .alloc_if_needed)) {
|
||||||
|
inline .string, .allocated_string => |s| return @This(){ .bitflag = s },
|
||||||
|
inline .number, .allocated_number => |s| return @This(){ .int = try std.fmt.parseInt(u31, s, 10) },
|
||||||
|
else => return error.UnexpectedToken,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pub const jsonStringify = @compileError("not supported");
|
||||||
},
|
},
|
||||||
capabilities: [][]const u8 = &[_][]const u8{},
|
capabilities: [][]const u8 = &[_][]const u8{},
|
||||||
/// Valid for .ValueEnum and .BitEnum
|
/// Valid for .ValueEnum and .BitEnum
|
||||||
|
|
|
||||||
|
|
@ -624,9 +624,9 @@ pub fn main() anyerror!void {
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
var parser = json.Parser.init(allocator, .alloc_if_needed);
|
const parsed = try json.parseFromSlice(json.Value, allocator, json_text, .{});
|
||||||
const tree = try parser.parse(json_text);
|
defer parsed.deinit();
|
||||||
const root_map = &tree.root.object;
|
const root_map = &parsed.value.object;
|
||||||
|
|
||||||
var all_objects = std.ArrayList(*json.ObjectMap).init(allocator);
|
var all_objects = std.ArrayList(*json.ObjectMap).init(allocator);
|
||||||
{
|
{
|
||||||
|
|
|
||||||
|
|
@ -1054,14 +1054,14 @@ fn processOneTarget(job: Job) anyerror!void {
|
||||||
var json_parse_progress = progress_node.start("parse JSON", 0);
|
var json_parse_progress = progress_node.start("parse JSON", 0);
|
||||||
json_parse_progress.activate();
|
json_parse_progress.activate();
|
||||||
|
|
||||||
var parser = json.Parser.init(arena, .alloc_if_needed);
|
const parsed = try json.parseFromSlice(json.Value, arena, json_text, .{});
|
||||||
const tree = try parser.parse(json_text);
|
defer parsed.deinit();
|
||||||
|
const root_map = &parsed.value.object;
|
||||||
json_parse_progress.end();
|
json_parse_progress.end();
|
||||||
|
|
||||||
var render_progress = progress_node.start("render zig code", 0);
|
var render_progress = progress_node.start("render zig code", 0);
|
||||||
render_progress.activate();
|
render_progress.activate();
|
||||||
|
|
||||||
const root_map = &tree.root.object;
|
|
||||||
var features_table = std.StringHashMap(Feature).init(arena);
|
var features_table = std.StringHashMap(Feature).init(arena);
|
||||||
var all_features = std.ArrayList(Feature).init(arena);
|
var all_features = std.ArrayList(Feature).init(arena);
|
||||||
var all_cpus = std.ArrayList(Cpu).init(arena);
|
var all_cpus = std.ArrayList(Cpu).init(arena);
|
||||||
|
|
|
||||||
|
|
@ -74,7 +74,13 @@ pub fn main() !void {
|
||||||
|
|
||||||
const registry_path = try fs.path.join(allocator, &.{ spirv_headers_root, "include", "spirv", "unified1", "spirv.core.grammar.json" });
|
const registry_path = try fs.path.join(allocator, &.{ spirv_headers_root, "include", "spirv", "unified1", "spirv.core.grammar.json" });
|
||||||
const registry_json = try std.fs.cwd().readFileAlloc(allocator, registry_path, std.math.maxInt(usize));
|
const registry_json = try std.fs.cwd().readFileAlloc(allocator, registry_path, std.math.maxInt(usize));
|
||||||
const registry = try std.json.parseFromSlice(g.CoreRegistry, allocator, registry_json, .{});
|
var scanner = std.json.Scanner.initCompleteInput(allocator, registry_json);
|
||||||
|
var diagnostics = std.json.Diagnostics{};
|
||||||
|
scanner.enableDiagnostics(&diagnostics);
|
||||||
|
const registry = std.json.parseFromTokenSourceLeaky(g.CoreRegistry, allocator, &scanner, .{}) catch |err| {
|
||||||
|
std.debug.print("line,col: {},{}\n", .{ diagnostics.getLine(), diagnostics.getColumn() });
|
||||||
|
return err;
|
||||||
|
};
|
||||||
|
|
||||||
const capabilities = for (registry.operand_kinds) |opkind| {
|
const capabilities = for (registry.operand_kinds) |opkind| {
|
||||||
if (std.mem.eql(u8, opkind.kind, "Capability"))
|
if (std.mem.eql(u8, opkind.kind, "Capability"))
|
||||||
|
|
|
||||||
Loading…
Add table
Reference in a new issue