aboutsummaryrefslogtreecommitdiff
path: root/lib/std
diff options
context:
space:
mode:
authorAndrew Kelley <andrew@ziglang.org>2022-07-07 00:06:11 -0700
committerAndrew Kelley <andrew@ziglang.org>2022-07-07 00:06:11 -0700
commite5549de0de8a67d52c0b8acf1fb86c1a5aab2962 (patch)
treea979a6597b103043f81429ebf38ebfbdd6c79464 /lib/std
parentf0fde1d9c2fcd1142ad1343dc2dd788435e743ef (diff)
parentc9006d9479c619d9ed555164831e11a04d88d382 (diff)
downloadzig-e5549de0de8a67d52c0b8acf1fb86c1a5aab2962.tar.gz
zig-e5549de0de8a67d52c0b8acf1fb86c1a5aab2962.zip
Merge remote-tracking branch 'origin/master' into llvm14
Diffstat (limited to 'lib/std')
-rw-r--r--lib/std/json.zig1118
-rw-r--r--lib/std/json/test.zig823
2 files changed, 981 insertions, 960 deletions
diff --git a/lib/std/json.zig b/lib/std/json.zig
index d20797e131..9f678fb4aa 100644
--- a/lib/std/json.zig
+++ b/lib/std/json.zig
@@ -2,6 +2,7 @@
//
// https://tools.ietf.org/html/rfc8259
+const builtin = @import("builtin");
const std = @import("std.zig");
const debug = std.debug;
const assert = debug.assert;
@@ -72,22 +73,6 @@ fn encodesTo(decoded: []const u8, encoded: []const u8) bool {
return true;
}
-test "encodesTo" {
- // same
- try testing.expectEqual(true, encodesTo("false", "false"));
- // totally different
- try testing.expectEqual(false, encodesTo("false", "true"));
- // different lengths
- try testing.expectEqual(false, encodesTo("false", "other"));
- // with escape
- try testing.expectEqual(true, encodesTo("\\", "\\\\"));
- try testing.expectEqual(true, encodesTo("with\nescape", "with\\nescape"));
- // with unicode
- try testing.expectEqual(true, encodesTo("ą", "\\u0105"));
- try testing.expectEqual(true, encodesTo("😂", "\\ud83d\\ude02"));
- try testing.expectEqual(true, encodesTo("withąunicode😂", "with\\u0105unicode\\ud83d\\ude02"));
-}
-
/// A single token slice into the parent string.
///
/// Use `token.slice()` on the input at the current position to get the current slice.
@@ -1100,15 +1085,6 @@ pub const StreamingParser = struct {
}
};
-test "json.serialize issue #5959" {
- var parser: StreamingParser = undefined;
- // StreamingParser has multiple internal fields set to undefined. This causes issues when using
- // expectEqual so these are zeroed. We are testing for equality here only because this is a
- // known small test reproduction which hits the relevant LLVM issue.
- std.mem.set(u8, @ptrCast([*]u8, &parser)[0..@sizeOf(StreamingParser)], 0);
- try std.testing.expectEqual(parser, parser);
-}
-
/// A small wrapper over a StreamingParser for full slices. Returns a stream of json Tokens.
pub const TokenStream = struct {
i: usize,
@@ -1164,80 +1140,6 @@ pub const TokenStream = struct {
}
};
-fn checkNext(p: *TokenStream, id: std.meta.Tag(Token)) !void {
- const token = (p.next() catch unreachable).?;
- try testing.expect(std.meta.activeTag(token) == id);
-}
-
-test "json.token" {
- const s =
- \\{
- \\ "Image": {
- \\ "Width": 800,
- \\ "Height": 600,
- \\ "Title": "View from 15th Floor",
- \\ "Thumbnail": {
- \\ "Url": "http://www.example.com/image/481989943",
- \\ "Height": 125,
- \\ "Width": 100
- \\ },
- \\ "Animated" : false,
- \\ "IDs": [116, 943, 234, 38793]
- \\ }
- \\}
- ;
-
- var p = TokenStream.init(s);
-
- try checkNext(&p, .ObjectBegin);
- try checkNext(&p, .String); // Image
- try checkNext(&p, .ObjectBegin);
- try checkNext(&p, .String); // Width
- try checkNext(&p, .Number);
- try checkNext(&p, .String); // Height
- try checkNext(&p, .Number);
- try checkNext(&p, .String); // Title
- try checkNext(&p, .String);
- try checkNext(&p, .String); // Thumbnail
- try checkNext(&p, .ObjectBegin);
- try checkNext(&p, .String); // Url
- try checkNext(&p, .String);
- try checkNext(&p, .String); // Height
- try checkNext(&p, .Number);
- try checkNext(&p, .String); // Width
- try checkNext(&p, .Number);
- try checkNext(&p, .ObjectEnd);
- try checkNext(&p, .String); // Animated
- try checkNext(&p, .False);
- try checkNext(&p, .String); // IDs
- try checkNext(&p, .ArrayBegin);
- try checkNext(&p, .Number);
- try checkNext(&p, .Number);
- try checkNext(&p, .Number);
- try checkNext(&p, .Number);
- try checkNext(&p, .ArrayEnd);
- try checkNext(&p, .ObjectEnd);
- try checkNext(&p, .ObjectEnd);
-
- try testing.expect((try p.next()) == null);
-}
-
-test "json.token mismatched close" {
- var p = TokenStream.init("[102, 111, 111 }");
- try checkNext(&p, .ArrayBegin);
- try checkNext(&p, .Number);
- try checkNext(&p, .Number);
- try checkNext(&p, .Number);
- try testing.expectError(error.UnexpectedClosingBrace, p.next());
-}
-
-test "json.token premature object close" {
- var p = TokenStream.init("{ \"key\": }");
- try checkNext(&p, .ObjectBegin);
- try checkNext(&p, .String);
- try testing.expectError(error.InvalidValueBegin, p.next());
-}
-
/// Validate a JSON string. This does not limit number precision so a decoder may not necessarily
/// be able to decode the string even if this returns true.
pub fn validate(s: []const u8) bool {
@@ -1255,15 +1157,6 @@ pub fn validate(s: []const u8) bool {
return p.complete;
}
-test "json.validate" {
- try testing.expectEqual(true, validate("{}"));
- try testing.expectEqual(true, validate("[]"));
- try testing.expectEqual(true, validate("[{[[[[{}]]]]}]"));
- try testing.expectEqual(false, validate("{]"));
- try testing.expectEqual(false, validate("[}"));
- try testing.expectEqual(false, validate("{{{{[]}}}]"));
-}
-
const Allocator = std.mem.Allocator;
const ArenaAllocator = std.heap.ArenaAllocator;
const ArrayList = std.ArrayList;
@@ -1352,67 +1245,6 @@ pub const Value = union(enum) {
}
};
-test "Value.jsonStringify" {
- {
- var buffer: [10]u8 = undefined;
- var fbs = std.io.fixedBufferStream(&buffer);
- try @as(Value, .Null).jsonStringify(.{}, fbs.writer());
- try testing.expectEqualSlices(u8, fbs.getWritten(), "null");
- }
- {
- var buffer: [10]u8 = undefined;
- var fbs = std.io.fixedBufferStream(&buffer);
- try (Value{ .Bool = true }).jsonStringify(.{}, fbs.writer());
- try testing.expectEqualSlices(u8, fbs.getWritten(), "true");
- }
- {
- var buffer: [10]u8 = undefined;
- var fbs = std.io.fixedBufferStream(&buffer);
- try (Value{ .Integer = 42 }).jsonStringify(.{}, fbs.writer());
- try testing.expectEqualSlices(u8, fbs.getWritten(), "42");
- }
- {
- var buffer: [10]u8 = undefined;
- var fbs = std.io.fixedBufferStream(&buffer);
- try (Value{ .NumberString = "43" }).jsonStringify(.{}, fbs.writer());
- try testing.expectEqualSlices(u8, fbs.getWritten(), "43");
- }
- {
- var buffer: [10]u8 = undefined;
- var fbs = std.io.fixedBufferStream(&buffer);
- try (Value{ .Float = 42 }).jsonStringify(.{}, fbs.writer());
- try testing.expectEqualSlices(u8, fbs.getWritten(), "4.2e+01");
- }
- {
- var buffer: [10]u8 = undefined;
- var fbs = std.io.fixedBufferStream(&buffer);
- try (Value{ .String = "weeee" }).jsonStringify(.{}, fbs.writer());
- try testing.expectEqualSlices(u8, fbs.getWritten(), "\"weeee\"");
- }
- {
- var buffer: [10]u8 = undefined;
- var fbs = std.io.fixedBufferStream(&buffer);
- var vals = [_]Value{
- .{ .Integer = 1 },
- .{ .Integer = 2 },
- .{ .NumberString = "3" },
- };
- try (Value{
- .Array = Array.fromOwnedSlice(undefined, &vals),
- }).jsonStringify(.{}, fbs.writer());
- try testing.expectEqualSlices(u8, fbs.getWritten(), "[1,2,3]");
- }
- {
- var buffer: [10]u8 = undefined;
- var fbs = std.io.fixedBufferStream(&buffer);
- var obj = ObjectMap.init(testing.allocator);
- defer obj.deinit();
- try obj.putNoClobber("a", .{ .String = "b" });
- try (Value{ .Object = obj }).jsonStringify(.{}, fbs.writer());
- try testing.expectEqualSlices(u8, fbs.getWritten(), "{\"a\":\"b\"}");
- }
-}
-
/// parse tokens from a stream, returning `false` if they do not decode to `value`
fn parsesTo(comptime T: type, value: T, tokens: *TokenStream, options: ParseOptions) !bool {
// TODO: should be able to write this function to not require an allocator
@@ -1503,59 +1335,6 @@ fn skipValue(tokens: *TokenStream) SkipValueError!void {
}
}
-test "skipValue" {
- var ts = TokenStream.init("false");
- try skipValue(&ts);
- ts = TokenStream.init("true");
- try skipValue(&ts);
- ts = TokenStream.init("null");
- try skipValue(&ts);
- ts = TokenStream.init("42");
- try skipValue(&ts);
- ts = TokenStream.init("42.0");
- try skipValue(&ts);
- ts = TokenStream.init("\"foo\"");
- try skipValue(&ts);
- ts = TokenStream.init("[101, 111, 121]");
- try skipValue(&ts);
- ts = TokenStream.init("{}");
- try skipValue(&ts);
- ts = TokenStream.init("{\"foo\": \"bar\"}");
- try skipValue(&ts);
-
- { // An absurd number of nestings
- const nestings = StreamingParser.default_max_nestings + 1;
-
- ts = TokenStream.init("[" ** nestings ++ "]" ** nestings);
- try testing.expectError(error.TooManyNestedItems, skipValue(&ts));
- }
-
- { // Would a number token cause problems in a deeply-nested array?
- const nestings = StreamingParser.default_max_nestings;
- const deeply_nested_array = "[" ** nestings ++ "0.118, 999, 881.99, 911.9, 725, 3" ++ "]" ** nestings;
-
- ts = TokenStream.init(deeply_nested_array);
- try skipValue(&ts);
-
- ts = TokenStream.init("[" ++ deeply_nested_array ++ "]");
- try testing.expectError(error.TooManyNestedItems, skipValue(&ts));
- }
-
- // Mismatched brace/square bracket
- ts = TokenStream.init("[102, 111, 111}");
- try testing.expectError(error.UnexpectedClosingBrace, skipValue(&ts));
-
- { // should fail if no value found (e.g. immediate close of object)
- var empty_object = TokenStream.init("{}");
- assert(.ObjectBegin == (try empty_object.next()).?);
- try testing.expectError(error.UnexpectedJsonDepth, skipValue(&empty_object));
-
- var empty_array = TokenStream.init("[]");
- assert(.ArrayBegin == (try empty_array.next()).?);
- try testing.expectError(error.UnexpectedJsonDepth, skipValue(&empty_array));
- }
-}
-
fn ParseInternalError(comptime T: type) type {
// `inferred_types` is used to avoid infinite recursion for recursive type definitions.
const inferred_types = [_]type{};
@@ -1981,440 +1760,6 @@ pub fn parseFree(comptime T: type, value: T, options: ParseOptions) void {
}
}
-test "parse" {
- var ts = TokenStream.init("false");
- try testing.expectEqual(false, try parse(bool, &ts, ParseOptions{}));
- ts = TokenStream.init("true");
- try testing.expectEqual(true, try parse(bool, &ts, ParseOptions{}));
- ts = TokenStream.init("1");
- try testing.expectEqual(@as(u1, 1), try parse(u1, &ts, ParseOptions{}));
- ts = TokenStream.init("50");
- try testing.expectError(error.Overflow, parse(u1, &ts, ParseOptions{}));
- ts = TokenStream.init("42");
- try testing.expectEqual(@as(u64, 42), try parse(u64, &ts, ParseOptions{}));
- ts = TokenStream.init("42.0");
- try testing.expectEqual(@as(f64, 42), try parse(f64, &ts, ParseOptions{}));
- ts = TokenStream.init("null");
- try testing.expectEqual(@as(?bool, null), try parse(?bool, &ts, ParseOptions{}));
- ts = TokenStream.init("true");
- try testing.expectEqual(@as(?bool, true), try parse(?bool, &ts, ParseOptions{}));
-
- ts = TokenStream.init("\"foo\"");
- try testing.expectEqual(@as([3]u8, "foo".*), try parse([3]u8, &ts, ParseOptions{}));
- ts = TokenStream.init("[102, 111, 111]");
- try testing.expectEqual(@as([3]u8, "foo".*), try parse([3]u8, &ts, ParseOptions{}));
- ts = TokenStream.init("[]");
- try testing.expectEqual(@as([0]u8, undefined), try parse([0]u8, &ts, ParseOptions{}));
-}
-
-test "parse into enum" {
- const T = enum(u32) {
- Foo = 42,
- Bar,
- @"with\\escape",
- };
- var ts = TokenStream.init("\"Foo\"");
- try testing.expectEqual(@as(T, .Foo), try parse(T, &ts, ParseOptions{}));
- ts = TokenStream.init("42");
- try testing.expectEqual(@as(T, .Foo), try parse(T, &ts, ParseOptions{}));
- ts = TokenStream.init("\"with\\\\escape\"");
- try testing.expectEqual(@as(T, .@"with\\escape"), try parse(T, &ts, ParseOptions{}));
- ts = TokenStream.init("5");
- try testing.expectError(error.InvalidEnumTag, parse(T, &ts, ParseOptions{}));
- ts = TokenStream.init("\"Qux\"");
- try testing.expectError(error.InvalidEnumTag, parse(T, &ts, ParseOptions{}));
-}
-
-test "parse with trailing data" {
- var ts = TokenStream.init("falsed");
- try testing.expectEqual(false, try parse(bool, &ts, ParseOptions{ .allow_trailing_data = true }));
- ts = TokenStream.init("falsed");
- try testing.expectError(error.InvalidTopLevelTrailing, parse(bool, &ts, ParseOptions{ .allow_trailing_data = false }));
- // trailing whitespace is okay
- ts = TokenStream.init("false \n");
- try testing.expectEqual(false, try parse(bool, &ts, ParseOptions{ .allow_trailing_data = false }));
-}
-
-test "parse into that allocates a slice" {
- var ts = TokenStream.init("\"foo\"");
- try testing.expectError(error.AllocatorRequired, parse([]u8, &ts, ParseOptions{}));
-
- const options = ParseOptions{ .allocator = testing.allocator };
- {
- ts = TokenStream.init("\"foo\"");
- const r = try parse([]u8, &ts, options);
- defer parseFree([]u8, r, options);
- try testing.expectEqualSlices(u8, "foo", r);
- }
- {
- ts = TokenStream.init("[102, 111, 111]");
- const r = try parse([]u8, &ts, options);
- defer parseFree([]u8, r, options);
- try testing.expectEqualSlices(u8, "foo", r);
- }
- {
- ts = TokenStream.init("\"with\\\\escape\"");
- const r = try parse([]u8, &ts, options);
- defer parseFree([]u8, r, options);
- try testing.expectEqualSlices(u8, "with\\escape", r);
- }
-}
-
-test "parse into tagged union" {
- {
- const T = union(enum) {
- int: i32,
- float: f64,
- string: []const u8,
- };
- var ts = TokenStream.init("1.5");
- try testing.expectEqual(T{ .float = 1.5 }, try parse(T, &ts, ParseOptions{}));
- }
-
- { // failing allocations should be bubbled up instantly without trying next member
- var fail_alloc = testing.FailingAllocator.init(testing.allocator, 0);
- const options = ParseOptions{ .allocator = fail_alloc.allocator() };
- const T = union(enum) {
- // both fields here match the input
- string: []const u8,
- array: [3]u8,
- };
- var ts = TokenStream.init("[1,2,3]");
- try testing.expectError(error.OutOfMemory, parse(T, &ts, options));
- }
-
- {
- // if multiple matches possible, takes first option
- const T = union(enum) {
- x: u8,
- y: u8,
- };
- var ts = TokenStream.init("42");
- try testing.expectEqual(T{ .x = 42 }, try parse(T, &ts, ParseOptions{}));
- }
-
- { // needs to back out when first union member doesn't match
- const T = union(enum) {
- A: struct { x: u32 },
- B: struct { y: u32 },
- };
- var ts = TokenStream.init("{\"y\":42}");
- try testing.expectEqual(T{ .B = .{ .y = 42 } }, try parse(T, &ts, ParseOptions{}));
- }
-}
-
-test "parse union bubbles up AllocatorRequired" {
- { // string member first in union (and not matching)
- const T = union(enum) {
- string: []const u8,
- int: i32,
- };
- var ts = TokenStream.init("42");
- try testing.expectError(error.AllocatorRequired, parse(T, &ts, ParseOptions{}));
- }
-
- { // string member not first in union (and matching)
- const T = union(enum) {
- int: i32,
- float: f64,
- string: []const u8,
- };
- var ts = TokenStream.init("\"foo\"");
- try testing.expectError(error.AllocatorRequired, parse(T, &ts, ParseOptions{}));
- }
-}
-
-test "parseFree descends into tagged union" {
- var fail_alloc = testing.FailingAllocator.init(testing.allocator, 1);
- const options = ParseOptions{ .allocator = fail_alloc.allocator() };
- const T = union(enum) {
- int: i32,
- float: f64,
- string: []const u8,
- };
- // use a string with unicode escape so we know result can't be a reference to global constant
- var ts = TokenStream.init("\"with\\u0105unicode\"");
- const r = try parse(T, &ts, options);
- try testing.expectEqual(std.meta.Tag(T).string, @as(std.meta.Tag(T), r));
- try testing.expectEqualSlices(u8, "withąunicode", r.string);
- try testing.expectEqual(@as(usize, 0), fail_alloc.deallocations);
- parseFree(T, r, options);
- try testing.expectEqual(@as(usize, 1), fail_alloc.deallocations);
-}
-
-test "parse with comptime field" {
- {
- const T = struct {
- comptime a: i32 = 0,
- b: bool,
- };
- var ts = TokenStream.init(
- \\{
- \\ "a": 0,
- \\ "b": true
- \\}
- );
- try testing.expectEqual(T{ .a = 0, .b = true }, try parse(T, &ts, ParseOptions{}));
- }
-
- { // string comptime values currently require an allocator
- const T = union(enum) {
- foo: struct {
- comptime kind: []const u8 = "boolean",
- b: bool,
- },
- bar: struct {
- comptime kind: []const u8 = "float",
- b: f64,
- },
- };
-
- const options = ParseOptions{
- .allocator = std.testing.allocator,
- };
-
- var ts = TokenStream.init(
- \\{
- \\ "kind": "float",
- \\ "b": 1.0
- \\}
- );
- const r = try parse(T, &ts, options);
-
- // check that parseFree doesn't try to free comptime fields
- parseFree(T, r, options);
- }
-}
-
-test "parse into struct with no fields" {
- const T = struct {};
- var ts = TokenStream.init("{}");
- try testing.expectEqual(T{}, try parse(T, &ts, ParseOptions{}));
-}
-
-test "parse into struct with misc fields" {
- @setEvalBranchQuota(10000);
- const options = ParseOptions{ .allocator = testing.allocator };
- const T = struct {
- int: i64,
- float: f64,
- @"with\\escape": bool,
- @"withąunicode😂": bool,
- language: []const u8,
- optional: ?bool,
- default_field: i32 = 42,
- static_array: [3]f64,
- dynamic_array: []f64,
-
- complex: struct {
- nested: []const u8,
- },
-
- veryComplex: []struct {
- foo: []const u8,
- },
-
- a_union: Union,
- const Union = union(enum) {
- x: u8,
- float: f64,
- string: []const u8,
- };
- };
- var ts = TokenStream.init(
- \\{
- \\ "int": 420,
- \\ "float": 3.14,
- \\ "with\\escape": true,
- \\ "with\u0105unicode\ud83d\ude02": false,
- \\ "language": "zig",
- \\ "optional": null,
- \\ "static_array": [66.6, 420.420, 69.69],
- \\ "dynamic_array": [66.6, 420.420, 69.69],
- \\ "complex": {
- \\ "nested": "zig"
- \\ },
- \\ "veryComplex": [
- \\ {
- \\ "foo": "zig"
- \\ }, {
- \\ "foo": "rocks"
- \\ }
- \\ ],
- \\ "a_union": 100000
- \\}
- );
- const r = try parse(T, &ts, options);
- defer parseFree(T, r, options);
- try testing.expectEqual(@as(i64, 420), r.int);
- try testing.expectEqual(@as(f64, 3.14), r.float);
- try testing.expectEqual(true, r.@"with\\escape");
- try testing.expectEqual(false, r.@"withąunicode😂");
- try testing.expectEqualSlices(u8, "zig", r.language);
- try testing.expectEqual(@as(?bool, null), r.optional);
- try testing.expectEqual(@as(i32, 42), r.default_field);
- try testing.expectEqual(@as(f64, 66.6), r.static_array[0]);
- try testing.expectEqual(@as(f64, 420.420), r.static_array[1]);
- try testing.expectEqual(@as(f64, 69.69), r.static_array[2]);
- try testing.expectEqual(@as(usize, 3), r.dynamic_array.len);
- try testing.expectEqual(@as(f64, 66.6), r.dynamic_array[0]);
- try testing.expectEqual(@as(f64, 420.420), r.dynamic_array[1]);
- try testing.expectEqual(@as(f64, 69.69), r.dynamic_array[2]);
- try testing.expectEqualSlices(u8, r.complex.nested, "zig");
- try testing.expectEqualSlices(u8, "zig", r.veryComplex[0].foo);
- try testing.expectEqualSlices(u8, "rocks", r.veryComplex[1].foo);
- try testing.expectEqual(T.Union{ .float = 100000 }, r.a_union);
-}
-
-test "parse into struct with strings and arrays with sentinels" {
- @setEvalBranchQuota(10000);
- const options = ParseOptions{ .allocator = testing.allocator };
- const T = struct {
- language: [:0]const u8,
- language_without_sentinel: []const u8,
- data: [:99]const i32,
- simple_data: []const i32,
- };
- var ts = TokenStream.init(
- \\{
- \\ "language": "zig",
- \\ "language_without_sentinel": "zig again!",
- \\ "data": [1, 2, 3],
- \\ "simple_data": [4, 5, 6]
- \\}
- );
- const r = try parse(T, &ts, options);
- defer parseFree(T, r, options);
-
- try testing.expectEqualSentinel(u8, 0, "zig", r.language);
-
- const data = [_:99]i32{ 1, 2, 3 };
- try testing.expectEqualSentinel(i32, 99, data[0..data.len], r.data);
-
- // Make sure that arrays who aren't supposed to have a sentinel still parse without one.
- try testing.expectEqual(@as(?i32, null), std.meta.sentinel(@TypeOf(r.simple_data)));
- try testing.expectEqual(@as(?u8, null), std.meta.sentinel(@TypeOf(r.language_without_sentinel)));
-}
-
-test "parse into struct with duplicate field" {
- // allow allocator to detect double frees by keeping bucket in use
- const ballast = try testing.allocator.alloc(u64, 1);
- defer testing.allocator.free(ballast);
-
- const options_first = ParseOptions{ .allocator = testing.allocator, .duplicate_field_behavior = .UseFirst };
-
- const options_last = ParseOptions{
- .allocator = testing.allocator,
- .duplicate_field_behavior = .UseLast,
- };
-
- const str = "{ \"a\": 1, \"a\": 0.25 }";
-
- const T1 = struct { a: *u64 };
- // both .UseFirst and .UseLast should fail because second "a" value isn't a u64
- var ts = TokenStream.init(str);
- try testing.expectError(error.InvalidNumber, parse(T1, &ts, options_first));
- ts = TokenStream.init(str);
- try testing.expectError(error.InvalidNumber, parse(T1, &ts, options_last));
-
- const T2 = struct { a: f64 };
- ts = TokenStream.init(str);
- try testing.expectEqual(T2{ .a = 1.0 }, try parse(T2, &ts, options_first));
- ts = TokenStream.init(str);
- try testing.expectEqual(T2{ .a = 0.25 }, try parse(T2, &ts, options_last));
-
- const T3 = struct { comptime a: f64 = 1.0 };
- // .UseFirst should succeed because second "a" value is unconditionally ignored (even though != 1.0)
- const t3 = T3{ .a = 1.0 };
- ts = TokenStream.init(str);
- try testing.expectEqual(t3, try parse(T3, &ts, options_first));
- // .UseLast should fail because second "a" value is 0.25 which is not equal to default value of 1.0
- ts = TokenStream.init(str);
- try testing.expectError(error.UnexpectedValue, parse(T3, &ts, options_last));
-}
-
-test "parse into struct ignoring unknown fields" {
- const T = struct {
- int: i64,
- language: []const u8,
- };
-
- const ops = ParseOptions{
- .allocator = testing.allocator,
- .ignore_unknown_fields = true,
- };
-
- var ts = TokenStream.init(
- \\{
- \\ "int": 420,
- \\ "float": 3.14,
- \\ "with\\escape": true,
- \\ "with\u0105unicode\ud83d\ude02": false,
- \\ "optional": null,
- \\ "static_array": [66.6, 420.420, 69.69],
- \\ "dynamic_array": [66.6, 420.420, 69.69],
- \\ "complex": {
- \\ "nested": "zig"
- \\ },
- \\ "veryComplex": [
- \\ {
- \\ "foo": "zig"
- \\ }, {
- \\ "foo": "rocks"
- \\ }
- \\ ],
- \\ "a_union": 100000,
- \\ "language": "zig"
- \\}
- );
- const r = try parse(T, &ts, ops);
- defer parseFree(T, r, ops);
-
- try testing.expectEqual(@as(i64, 420), r.int);
- try testing.expectEqualSlices(u8, "zig", r.language);
-}
-
-const ParseIntoRecursiveUnionDefinitionValue = union(enum) {
- integer: i64,
- array: []const ParseIntoRecursiveUnionDefinitionValue,
-};
-
-test "parse into recursive union definition" {
- const T = struct {
- values: ParseIntoRecursiveUnionDefinitionValue,
- };
- const ops = ParseOptions{ .allocator = testing.allocator };
-
- var ts = TokenStream.init("{\"values\":[58]}");
- const r = try parse(T, &ts, ops);
- defer parseFree(T, r, ops);
-
- try testing.expectEqual(@as(i64, 58), r.values.array[0].integer);
-}
-
-const ParseIntoDoubleRecursiveUnionValueFirst = union(enum) {
- integer: i64,
- array: []const ParseIntoDoubleRecursiveUnionValueSecond,
-};
-
-const ParseIntoDoubleRecursiveUnionValueSecond = union(enum) {
- boolean: bool,
- array: []const ParseIntoDoubleRecursiveUnionValueFirst,
-};
-
-test "parse into double recursive union definition" {
- const T = struct {
- values: ParseIntoDoubleRecursiveUnionValueFirst,
- };
- const ops = ParseOptions{ .allocator = testing.allocator };
-
- var ts = TokenStream.init("{\"values\":[[58]]}");
- const r = try parse(T, &ts, ops);
- defer parseFree(T, r, ops);
-
- try testing.expectEqual(@as(i64, 58), r.values.array[0].array[0].integer);
-}
-
/// A non-stream JSON parser which constructs a tree of Value's.
pub const Parser = struct {
allocator: Allocator,
@@ -2719,219 +2064,6 @@ pub fn unescapeValidString(output: []u8, input: []const u8) UnescapeValidStringE
assert(outIndex == output.len);
}
-test "json.parser.dynamic" {
- var p = Parser.init(testing.allocator, false);
- defer p.deinit();
-
- const s =
- \\{
- \\ "Image": {
- \\ "Width": 800,
- \\ "Height": 600,
- \\ "Title": "View from 15th Floor",
- \\ "Thumbnail": {
- \\ "Url": "http://www.example.com/image/481989943",
- \\ "Height": 125,
- \\ "Width": 100
- \\ },
- \\ "Animated" : false,
- \\ "IDs": [116, 943, 234, 38793],
- \\ "ArrayOfObject": [{"n": "m"}],
- \\ "double": 1.3412,
- \\ "LargeInt": 18446744073709551615
- \\ }
- \\}
- ;
-
- var tree = try p.parse(s);
- defer tree.deinit();
-
- var root = tree.root;
-
- var image = root.Object.get("Image").?;
-
- const width = image.Object.get("Width").?;
- try testing.expect(width.Integer == 800);
-
- const height = image.Object.get("Height").?;
- try testing.expect(height.Integer == 600);
-
- const title = image.Object.get("Title").?;
- try testing.expect(mem.eql(u8, title.String, "View from 15th Floor"));
-
- const animated = image.Object.get("Animated").?;
- try testing.expect(animated.Bool == false);
-
- const array_of_object = image.Object.get("ArrayOfObject").?;
- try testing.expect(array_of_object.Array.items.len == 1);
-
- const obj0 = array_of_object.Array.items[0].Object.get("n").?;
- try testing.expect(mem.eql(u8, obj0.String, "m"));
-
- const double = image.Object.get("double").?;
- try testing.expect(double.Float == 1.3412);
-
- const large_int = image.Object.get("LargeInt").?;
- try testing.expect(mem.eql(u8, large_int.NumberString, "18446744073709551615"));
-}
-
-test {
- _ = @import("json/test.zig");
- _ = @import("json/write_stream.zig");
-}
-
-test "write json then parse it" {
- var out_buffer: [1000]u8 = undefined;
-
- var fixed_buffer_stream = std.io.fixedBufferStream(&out_buffer);
- const out_stream = fixed_buffer_stream.writer();
- var jw = writeStream(out_stream, 4);
-
- try jw.beginObject();
-
- try jw.objectField("f");
- try jw.emitBool(false);
-
- try jw.objectField("t");
- try jw.emitBool(true);
-
- try jw.objectField("int");
- try jw.emitNumber(1234);
-
- try jw.objectField("array");
- try jw.beginArray();
-
- try jw.arrayElem();
- try jw.emitNull();
-
- try jw.arrayElem();
- try jw.emitNumber(12.34);
-
- try jw.endArray();
-
- try jw.objectField("str");
- try jw.emitString("hello");
-
- try jw.endObject();
-
- var parser = Parser.init(testing.allocator, false);
- defer parser.deinit();
- var tree = try parser.parse(fixed_buffer_stream.getWritten());
- defer tree.deinit();
-
- try testing.expect(tree.root.Object.get("f").?.Bool == false);
- try testing.expect(tree.root.Object.get("t").?.Bool == true);
- try testing.expect(tree.root.Object.get("int").?.Integer == 1234);
- try testing.expect(tree.root.Object.get("array").?.Array.items[0].Null == {});
- try testing.expect(tree.root.Object.get("array").?.Array.items[1].Float == 12.34);
- try testing.expect(mem.eql(u8, tree.root.Object.get("str").?.String, "hello"));
-}
-
-fn testParse(arena_allocator: std.mem.Allocator, json_str: []const u8) !Value {
- var p = Parser.init(arena_allocator, false);
- return (try p.parse(json_str)).root;
-}
-
-test "parsing empty string gives appropriate error" {
- var arena_allocator = std.heap.ArenaAllocator.init(std.testing.allocator);
- defer arena_allocator.deinit();
- try testing.expectError(error.UnexpectedEndOfJson, testParse(arena_allocator.allocator(), ""));
-}
-
-test "integer after float has proper type" {
- var arena_allocator = std.heap.ArenaAllocator.init(std.testing.allocator);
- defer arena_allocator.deinit();
- const json = try testParse(arena_allocator.allocator(),
- \\{
- \\ "float": 3.14,
- \\ "ints": [1, 2, 3]
- \\}
- );
- try std.testing.expect(json.Object.get("ints").?.Array.items[0] == .Integer);
-}
-
-test "parse exponential into int" {
- const T = struct { int: i64 };
- var ts = TokenStream.init("{ \"int\": 4.2e2 }");
- const r = try parse(T, &ts, ParseOptions{});
- try testing.expectEqual(@as(i64, 420), r.int);
- ts = TokenStream.init("{ \"int\": 0.042e2 }");
- try testing.expectError(error.InvalidNumber, parse(T, &ts, ParseOptions{}));
- ts = TokenStream.init("{ \"int\": 18446744073709551616.0 }");
- try testing.expectError(error.Overflow, parse(T, &ts, ParseOptions{}));
-}
-
-test "escaped characters" {
- var arena_allocator = std.heap.ArenaAllocator.init(std.testing.allocator);
- defer arena_allocator.deinit();
- const input =
- \\{
- \\ "backslash": "\\",
- \\ "forwardslash": "\/",
- \\ "newline": "\n",
- \\ "carriagereturn": "\r",
- \\ "tab": "\t",
- \\ "formfeed": "\f",
- \\ "backspace": "\b",
- \\ "doublequote": "\"",
- \\ "unicode": "\u0105",
- \\ "surrogatepair": "\ud83d\ude02"
- \\}
- ;
-
- const obj = (try testParse(arena_allocator.allocator(), input)).Object;
-
- try testing.expectEqualSlices(u8, obj.get("backslash").?.String, "\\");
- try testing.expectEqualSlices(u8, obj.get("forwardslash").?.String, "/");
- try testing.expectEqualSlices(u8, obj.get("newline").?.String, "\n");
- try testing.expectEqualSlices(u8, obj.get("carriagereturn").?.String, "\r");
- try testing.expectEqualSlices(u8, obj.get("tab").?.String, "\t");
- try testing.expectEqualSlices(u8, obj.get("formfeed").?.String, "\x0C");
- try testing.expectEqualSlices(u8, obj.get("backspace").?.String, "\x08");
- try testing.expectEqualSlices(u8, obj.get("doublequote").?.String, "\"");
- try testing.expectEqualSlices(u8, obj.get("unicode").?.String, "ą");
- try testing.expectEqualSlices(u8, obj.get("surrogatepair").?.String, "😂");
-}
-
-test "string copy option" {
- const input =
- \\{
- \\ "noescape": "aą😂",
- \\ "simple": "\\\/\n\r\t\f\b\"",
- \\ "unicode": "\u0105",
- \\ "surrogatepair": "\ud83d\ude02"
- \\}
- ;
-
- var arena_allocator = std.heap.ArenaAllocator.init(std.testing.allocator);
- defer arena_allocator.deinit();
- const allocator = arena_allocator.allocator();
-
- var parser = Parser.init(allocator, false);
- const tree_nocopy = try parser.parse(input);
- const obj_nocopy = tree_nocopy.root.Object;
-
- parser = Parser.init(allocator, true);
- const tree_copy = try parser.parse(input);
- const obj_copy = tree_copy.root.Object;
-
- for ([_][]const u8{ "noescape", "simple", "unicode", "surrogatepair" }) |field_name| {
- try testing.expectEqualSlices(u8, obj_nocopy.get(field_name).?.String, obj_copy.get(field_name).?.String);
- }
-
- const nocopy_addr = &obj_nocopy.get("noescape").?.String[0];
- const copy_addr = &obj_copy.get("noescape").?.String[0];
-
- var found_nocopy = false;
- for (input) |_, index| {
- try testing.expect(copy_addr != &input[index]);
- if (nocopy_addr == &input[index]) {
- found_nocopy = true;
- }
- }
- try testing.expect(found_nocopy);
-}
-
pub const StringifyOptions = struct {
pub const Whitespace = struct {
/// How many indentation levels deep are we?
@@ -3213,60 +2345,102 @@ pub fn stringify(
unreachable;
}
-fn teststringify(expected: []const u8, value: anytype, options: StringifyOptions) !void {
- const ValidationWriter = struct {
- const Self = @This();
- pub const Writer = std.io.Writer(*Self, Error, write);
- pub const Error = error{
- TooMuchData,
- DifferentData,
- };
+// Same as `stringify` but accepts an Allocator and stores result in dynamically allocated memory instead of using a Writer.
+// Caller owns returned memory.
+pub fn stringifyAlloc(allocator: std.mem.Allocator, value: anytype, options: StringifyOptions) ![]const u8 {
+ var list = std.ArrayList(u8).init(allocator);
+ errdefer list.deinit();
+ try stringify(value, options, list.writer());
+ return list.toOwnedSlice();
+}
- expected_remaining: []const u8,
+test {
+ if (builtin.zig_backend != .stage1) {
+ // https://github.com/ziglang/zig/issues/8442
+ _ = @import("json/test.zig");
+ }
+ _ = @import("json/write_stream.zig");
+}
- fn init(exp: []const u8) Self {
- return .{ .expected_remaining = exp };
- }
+test "stringify null optional fields" {
+ const MyStruct = struct {
+ optional: ?[]const u8 = null,
+ required: []const u8 = "something",
+ another_optional: ?[]const u8 = null,
+ another_required: []const u8 = "something else",
+ };
+ try teststringify(
+ \\{"optional":null,"required":"something","another_optional":null,"another_required":"something else"}
+ ,
+ MyStruct{},
+ StringifyOptions{},
+ );
+ try teststringify(
+ \\{"required":"something","another_required":"something else"}
+ ,
+ MyStruct{},
+ StringifyOptions{ .emit_null_optional_fields = false },
+ );
- pub fn writer(self: *Self) Writer {
- return .{ .context = self };
- }
+ var ts = TokenStream.init(
+ \\{"required":"something","another_required":"something else"}
+ );
+ try std.testing.expect(try parsesTo(MyStruct, MyStruct{}, &ts, .{
+ .allocator = std.testing.allocator,
+ }));
+}
- fn write(self: *Self, bytes: []const u8) Error!usize {
- if (self.expected_remaining.len < bytes.len) {
- std.debug.print(
- \\====== expected this output: =========
- \\{s}
- \\======== instead found this: =========
- \\{s}
- \\======================================
- , .{
- self.expected_remaining,
- bytes,
- });
- return error.TooMuchData;
- }
- if (!mem.eql(u8, self.expected_remaining[0..bytes.len], bytes)) {
- std.debug.print(
- \\====== expected this output: =========
- \\{s}
- \\======== instead found this: =========
- \\{s}
- \\======================================
- , .{
- self.expected_remaining[0..bytes.len],
- bytes,
- });
- return error.DifferentData;
- }
- self.expected_remaining = self.expected_remaining[bytes.len..];
- return bytes.len;
- }
- };
+test "skipValue" {
+ var ts = TokenStream.init("false");
+ try skipValue(&ts);
+ ts = TokenStream.init("true");
+ try skipValue(&ts);
+ ts = TokenStream.init("null");
+ try skipValue(&ts);
+ ts = TokenStream.init("42");
+ try skipValue(&ts);
+ ts = TokenStream.init("42.0");
+ try skipValue(&ts);
+ ts = TokenStream.init("\"foo\"");
+ try skipValue(&ts);
+ ts = TokenStream.init("[101, 111, 121]");
+ try skipValue(&ts);
+ ts = TokenStream.init("{}");
+ try skipValue(&ts);
+ ts = TokenStream.init("{\"foo\": \"bar\"}");
+ try skipValue(&ts);
- var vos = ValidationWriter.init(expected);
- try stringify(value, options, vos.writer());
- if (vos.expected_remaining.len > 0) return error.NotEnoughData;
+ { // An absurd number of nestings
+ const nestings = StreamingParser.default_max_nestings + 1;
+
+ ts = TokenStream.init("[" ** nestings ++ "]" ** nestings);
+ try testing.expectError(error.TooManyNestedItems, skipValue(&ts));
+ }
+
+ { // Would a number token cause problems in a deeply-nested array?
+ const nestings = StreamingParser.default_max_nestings;
+ const deeply_nested_array = "[" ** nestings ++ "0.118, 999, 881.99, 911.9, 725, 3" ++ "]" ** nestings;
+
+ ts = TokenStream.init(deeply_nested_array);
+ try skipValue(&ts);
+
+ ts = TokenStream.init("[" ++ deeply_nested_array ++ "]");
+ try testing.expectError(error.TooManyNestedItems, skipValue(&ts));
+ }
+
+ // Mismatched brace/square bracket
+ ts = TokenStream.init("[102, 111, 111}");
+ try testing.expectError(error.UnexpectedClosingBrace, skipValue(&ts));
+
+ { // should fail if no value found (e.g. immediate close of object)
+ var empty_object = TokenStream.init("{}");
+ assert(.ObjectBegin == (try empty_object.next()).?);
+ try testing.expectError(error.UnexpectedJsonDepth, skipValue(&empty_object));
+
+ var empty_array = TokenStream.init("[]");
+ assert(.ArrayBegin == (try empty_array.next()).?);
+ try testing.expectError(error.UnexpectedJsonDepth, skipValue(&empty_array));
+ }
}
test "stringify basic types" {
@@ -3423,50 +2597,74 @@ test "stringify vector" {
try teststringify("[1,1]", @splat(2, @as(u32, 1)), StringifyOptions{});
}
-test "stringify null optional fields" {
- const MyStruct = struct {
- optional: ?[]const u8 = null,
- required: []const u8 = "something",
- another_optional: ?[]const u8 = null,
- another_required: []const u8 = "something else",
- };
- try teststringify(
- \\{"optional":null,"required":"something","another_optional":null,"another_required":"something else"}
- ,
- MyStruct{},
- StringifyOptions{},
- );
- try teststringify(
- \\{"required":"something","another_required":"something else"}
- ,
- MyStruct{},
- StringifyOptions{ .emit_null_optional_fields = false },
- );
+fn teststringify(expected: []const u8, value: anytype, options: StringifyOptions) !void {
+ const ValidationWriter = struct {
+ const Self = @This();
+ pub const Writer = std.io.Writer(*Self, Error, write);
+ pub const Error = error{
+ TooMuchData,
+ DifferentData,
+ };
- var ts = TokenStream.init(
- \\{"required":"something","another_required":"something else"}
- );
- try std.testing.expect(try parsesTo(MyStruct, MyStruct{}, &ts, .{
- .allocator = std.testing.allocator,
- }));
-}
+ expected_remaining: []const u8,
-// Same as `stringify` but accepts an Allocator and stores result in dynamically allocated memory instead of using a Writer.
-// Caller owns returned memory.
-pub fn stringifyAlloc(allocator: std.mem.Allocator, value: anytype, options: StringifyOptions) ![]const u8 {
- var list = std.ArrayList(u8).init(allocator);
- errdefer list.deinit();
- try stringify(value, options, list.writer());
- return list.toOwnedSlice();
-}
+ fn init(exp: []const u8) Self {
+ return .{ .expected_remaining = exp };
+ }
-test "stringify alloc" {
- const allocator = std.testing.allocator;
- const expected =
- \\{"foo":"bar","answer":42,"my_friend":"sammy"}
- ;
- const actual = try stringifyAlloc(allocator, .{ .foo = "bar", .answer = 42, .my_friend = "sammy" }, .{});
- defer allocator.free(actual);
+ pub fn writer(self: *Self) Writer {
+ return .{ .context = self };
+ }
- try std.testing.expectEqualStrings(expected, actual);
+ fn write(self: *Self, bytes: []const u8) Error!usize {
+ if (self.expected_remaining.len < bytes.len) {
+ std.debug.print(
+ \\====== expected this output: =========
+ \\{s}
+ \\======== instead found this: =========
+ \\{s}
+ \\======================================
+ , .{
+ self.expected_remaining,
+ bytes,
+ });
+ return error.TooMuchData;
+ }
+ if (!mem.eql(u8, self.expected_remaining[0..bytes.len], bytes)) {
+ std.debug.print(
+ \\====== expected this output: =========
+ \\{s}
+ \\======== instead found this: =========
+ \\{s}
+ \\======================================
+ , .{
+ self.expected_remaining[0..bytes.len],
+ bytes,
+ });
+ return error.DifferentData;
+ }
+ self.expected_remaining = self.expected_remaining[bytes.len..];
+ return bytes.len;
+ }
+ };
+
+ var vos = ValidationWriter.init(expected);
+ try stringify(value, options, vos.writer());
+ if (vos.expected_remaining.len > 0) return error.NotEnoughData;
+}
+
+test "encodesTo" {
+ // same
+ try testing.expectEqual(true, encodesTo("false", "false"));
+ // totally different
+ try testing.expectEqual(false, encodesTo("false", "true"));
+ // different lengths
+ try testing.expectEqual(false, encodesTo("false", "other"));
+ // with escape
+ try testing.expectEqual(true, encodesTo("\\", "\\\\"));
+ try testing.expectEqual(true, encodesTo("with\nescape", "with\\nescape"));
+ // with unicode
+ try testing.expectEqual(true, encodesTo("ą", "\\u0105"));
+ try testing.expectEqual(true, encodesTo("😂", "\\ud83d\\ude02"));
+ try testing.expectEqual(true, encodesTo("withąunicode😂", "with\\u0105unicode\\ud83d\\ude02"));
}
diff --git a/lib/std/json/test.zig b/lib/std/json/test.zig
index c13d6b8417..df0403cf5f 100644
--- a/lib/std/json/test.zig
+++ b/lib/std/json/test.zig
@@ -6,6 +6,23 @@
const std = @import("../std.zig");
const json = std.json;
const testing = std.testing;
+const TokenStream = std.json.TokenStream;
+const parse = std.json.parse;
+const ParseOptions = std.json.ParseOptions;
+const parseFree = std.json.parseFree;
+const Parser = std.json.Parser;
+const mem = std.mem;
+const writeStream = std.json.writeStream;
+const Value = std.json.Value;
+const StringifyOptions = std.json.StringifyOptions;
+const stringify = std.json.stringify;
+const stringifyAlloc = std.json.stringifyAlloc;
+const StreamingParser = std.json.StreamingParser;
+const Token = std.json.Token;
+const validate = std.json.validate;
+const Array = std.json.Array;
+const ObjectMap = std.json.ObjectMap;
+const assert = std.debug.assert;
fn testNonStreaming(s: []const u8) !void {
var p = json.Parser.init(testing.allocator, false);
@@ -2004,3 +2021,809 @@ test "out of UTF-16 range" {
try utf8Error("\"\xfe\x80\x80\x80\"");
try utf8Error("\"\xff\x80\x80\x80\"");
}
+
+test "parse" {
+ var ts = TokenStream.init("false");
+ try testing.expectEqual(false, try parse(bool, &ts, ParseOptions{}));
+ ts = TokenStream.init("true");
+ try testing.expectEqual(true, try parse(bool, &ts, ParseOptions{}));
+ ts = TokenStream.init("1");
+ try testing.expectEqual(@as(u1, 1), try parse(u1, &ts, ParseOptions{}));
+ ts = TokenStream.init("50");
+ try testing.expectError(error.Overflow, parse(u1, &ts, ParseOptions{}));
+ ts = TokenStream.init("42");
+ try testing.expectEqual(@as(u64, 42), try parse(u64, &ts, ParseOptions{}));
+ ts = TokenStream.init("42.0");
+ try testing.expectEqual(@as(f64, 42), try parse(f64, &ts, ParseOptions{}));
+ ts = TokenStream.init("null");
+ try testing.expectEqual(@as(?bool, null), try parse(?bool, &ts, ParseOptions{}));
+ ts = TokenStream.init("true");
+ try testing.expectEqual(@as(?bool, true), try parse(?bool, &ts, ParseOptions{}));
+
+ ts = TokenStream.init("\"foo\"");
+ try testing.expectEqual(@as([3]u8, "foo".*), try parse([3]u8, &ts, ParseOptions{}));
+ ts = TokenStream.init("[102, 111, 111]");
+ try testing.expectEqual(@as([3]u8, "foo".*), try parse([3]u8, &ts, ParseOptions{}));
+ ts = TokenStream.init("[]");
+ try testing.expectEqual(@as([0]u8, undefined), try parse([0]u8, &ts, ParseOptions{}));
+}
+
+test "parse into enum" {
+ const T = enum(u32) {
+ Foo = 42,
+ Bar,
+ @"with\\escape",
+ };
+ var ts = TokenStream.init("\"Foo\"");
+ try testing.expectEqual(@as(T, .Foo), try parse(T, &ts, ParseOptions{}));
+ ts = TokenStream.init("42");
+ try testing.expectEqual(@as(T, .Foo), try parse(T, &ts, ParseOptions{}));
+ ts = TokenStream.init("\"with\\\\escape\"");
+ try testing.expectEqual(@as(T, .@"with\\escape"), try parse(T, &ts, ParseOptions{}));
+ ts = TokenStream.init("5");
+ try testing.expectError(error.InvalidEnumTag, parse(T, &ts, ParseOptions{}));
+ ts = TokenStream.init("\"Qux\"");
+ try testing.expectError(error.InvalidEnumTag, parse(T, &ts, ParseOptions{}));
+}
+
+test "parse with trailing data" {
+ var ts = TokenStream.init("falsed");
+ try testing.expectEqual(false, try parse(bool, &ts, ParseOptions{ .allow_trailing_data = true }));
+ ts = TokenStream.init("falsed");
+ try testing.expectError(error.InvalidTopLevelTrailing, parse(bool, &ts, ParseOptions{ .allow_trailing_data = false }));
+ // trailing whitespace is okay
+ ts = TokenStream.init("false \n");
+ try testing.expectEqual(false, try parse(bool, &ts, ParseOptions{ .allow_trailing_data = false }));
+}
+
+test "parse into that allocates a slice" {
+ var ts = TokenStream.init("\"foo\"");
+ try testing.expectError(error.AllocatorRequired, parse([]u8, &ts, ParseOptions{}));
+
+ const options = ParseOptions{ .allocator = testing.allocator };
+ {
+ ts = TokenStream.init("\"foo\"");
+ const r = try parse([]u8, &ts, options);
+ defer parseFree([]u8, r, options);
+ try testing.expectEqualSlices(u8, "foo", r);
+ }
+ {
+ ts = TokenStream.init("[102, 111, 111]");
+ const r = try parse([]u8, &ts, options);
+ defer parseFree([]u8, r, options);
+ try testing.expectEqualSlices(u8, "foo", r);
+ }
+ {
+ ts = TokenStream.init("\"with\\\\escape\"");
+ const r = try parse([]u8, &ts, options);
+ defer parseFree([]u8, r, options);
+ try testing.expectEqualSlices(u8, "with\\escape", r);
+ }
+}
+
+test "parse into tagged union" {
+ {
+ const T = union(enum) {
+ int: i32,
+ float: f64,
+ string: []const u8,
+ };
+ var ts = TokenStream.init("1.5");
+ try testing.expectEqual(T{ .float = 1.5 }, try parse(T, &ts, ParseOptions{}));
+ }
+
+ { // failing allocations should be bubbled up instantly without trying next member
+ var fail_alloc = testing.FailingAllocator.init(testing.allocator, 0);
+ const options = ParseOptions{ .allocator = fail_alloc.allocator() };
+ const T = union(enum) {
+ // both fields here match the input
+ string: []const u8,
+ array: [3]u8,
+ };
+ var ts = TokenStream.init("[1,2,3]");
+ try testing.expectError(error.OutOfMemory, parse(T, &ts, options));
+ }
+
+ {
+ // if multiple matches possible, takes first option
+ const T = union(enum) {
+ x: u8,
+ y: u8,
+ };
+ var ts = TokenStream.init("42");
+ try testing.expectEqual(T{ .x = 42 }, try parse(T, &ts, ParseOptions{}));
+ }
+
+ { // needs to back out when first union member doesn't match
+ const T = union(enum) {
+ A: struct { x: u32 },
+ B: struct { y: u32 },
+ };
+ var ts = TokenStream.init("{\"y\":42}");
+ try testing.expectEqual(T{ .B = .{ .y = 42 } }, try parse(T, &ts, ParseOptions{}));
+ }
+}
+
+test "parse union bubbles up AllocatorRequired" {
+ { // string member first in union (and not matching)
+ const T = union(enum) {
+ string: []const u8,
+ int: i32,
+ };
+ var ts = TokenStream.init("42");
+ try testing.expectError(error.AllocatorRequired, parse(T, &ts, ParseOptions{}));
+ }
+
+ { // string member not first in union (and matching)
+ const T = union(enum) {
+ int: i32,
+ float: f64,
+ string: []const u8,
+ };
+ var ts = TokenStream.init("\"foo\"");
+ try testing.expectError(error.AllocatorRequired, parse(T, &ts, ParseOptions{}));
+ }
+}
+
+test "parseFree descends into tagged union" {
+ var fail_alloc = testing.FailingAllocator.init(testing.allocator, 1);
+ const options = ParseOptions{ .allocator = fail_alloc.allocator() };
+ const T = union(enum) {
+ int: i32,
+ float: f64,
+ string: []const u8,
+ };
+ // use a string with unicode escape so we know result can't be a reference to global constant
+ var ts = TokenStream.init("\"with\\u0105unicode\"");
+ const r = try parse(T, &ts, options);
+ try testing.expectEqual(std.meta.Tag(T).string, @as(std.meta.Tag(T), r));
+ try testing.expectEqualSlices(u8, "withąunicode", r.string);
+ try testing.expectEqual(@as(usize, 0), fail_alloc.deallocations);
+ parseFree(T, r, options);
+ try testing.expectEqual(@as(usize, 1), fail_alloc.deallocations);
+}
+
+test "parse with comptime field" {
+ {
+ const T = struct {
+ comptime a: i32 = 0,
+ b: bool,
+ };
+ var ts = TokenStream.init(
+ \\{
+ \\ "a": 0,
+ \\ "b": true
+ \\}
+ );
+ try testing.expectEqual(T{ .a = 0, .b = true }, try parse(T, &ts, ParseOptions{}));
+ }
+
+ { // string comptime values currently require an allocator
+ const T = union(enum) {
+ foo: struct {
+ comptime kind: []const u8 = "boolean",
+ b: bool,
+ },
+ bar: struct {
+ comptime kind: []const u8 = "float",
+ b: f64,
+ },
+ };
+
+ const options = ParseOptions{
+ .allocator = std.testing.allocator,
+ };
+
+ var ts = TokenStream.init(
+ \\{
+ \\ "kind": "float",
+ \\ "b": 1.0
+ \\}
+ );
+ const r = try parse(T, &ts, options);
+
+ // check that parseFree doesn't try to free comptime fields
+ parseFree(T, r, options);
+ }
+}
+
+test "parse into struct with no fields" {
+ const T = struct {};
+ var ts = TokenStream.init("{}");
+ try testing.expectEqual(T{}, try parse(T, &ts, ParseOptions{}));
+}
+
+test "parse into struct with misc fields" {
+ @setEvalBranchQuota(10000);
+ const options = ParseOptions{ .allocator = testing.allocator };
+ const T = struct {
+ int: i64,
+ float: f64,
+ @"with\\escape": bool,
+ @"withąunicode😂": bool,
+ language: []const u8,
+ optional: ?bool,
+ default_field: i32 = 42,
+ static_array: [3]f64,
+ dynamic_array: []f64,
+
+ complex: struct {
+ nested: []const u8,
+ },
+
+ veryComplex: []struct {
+ foo: []const u8,
+ },
+
+ a_union: Union,
+ const Union = union(enum) {
+ x: u8,
+ float: f64,
+ string: []const u8,
+ };
+ };
+ var ts = TokenStream.init(
+ \\{
+ \\ "int": 420,
+ \\ "float": 3.14,
+ \\ "with\\escape": true,
+ \\ "with\u0105unicode\ud83d\ude02": false,
+ \\ "language": "zig",
+ \\ "optional": null,
+ \\ "static_array": [66.6, 420.420, 69.69],
+ \\ "dynamic_array": [66.6, 420.420, 69.69],
+ \\ "complex": {
+ \\ "nested": "zig"
+ \\ },
+ \\ "veryComplex": [
+ \\ {
+ \\ "foo": "zig"
+ \\ }, {
+ \\ "foo": "rocks"
+ \\ }
+ \\ ],
+ \\ "a_union": 100000
+ \\}
+ );
+ const r = try parse(T, &ts, options);
+ defer parseFree(T, r, options);
+ try testing.expectEqual(@as(i64, 420), r.int);
+ try testing.expectEqual(@as(f64, 3.14), r.float);
+ try testing.expectEqual(true, r.@"with\\escape");
+ try testing.expectEqual(false, r.@"withąunicode😂");
+ try testing.expectEqualSlices(u8, "zig", r.language);
+ try testing.expectEqual(@as(?bool, null), r.optional);
+ try testing.expectEqual(@as(i32, 42), r.default_field);
+ try testing.expectEqual(@as(f64, 66.6), r.static_array[0]);
+ try testing.expectEqual(@as(f64, 420.420), r.static_array[1]);
+ try testing.expectEqual(@as(f64, 69.69), r.static_array[2]);
+ try testing.expectEqual(@as(usize, 3), r.dynamic_array.len);
+ try testing.expectEqual(@as(f64, 66.6), r.dynamic_array[0]);
+ try testing.expectEqual(@as(f64, 420.420), r.dynamic_array[1]);
+ try testing.expectEqual(@as(f64, 69.69), r.dynamic_array[2]);
+ try testing.expectEqualSlices(u8, r.complex.nested, "zig");
+ try testing.expectEqualSlices(u8, "zig", r.veryComplex[0].foo);
+ try testing.expectEqualSlices(u8, "rocks", r.veryComplex[1].foo);
+ try testing.expectEqual(T.Union{ .float = 100000 }, r.a_union);
+}
+
+test "parse into struct with strings and arrays with sentinels" {
+ @setEvalBranchQuota(10000);
+ const options = ParseOptions{ .allocator = testing.allocator };
+ const T = struct {
+ language: [:0]const u8,
+ language_without_sentinel: []const u8,
+ data: [:99]const i32,
+ simple_data: []const i32,
+ };
+ var ts = TokenStream.init(
+ \\{
+ \\ "language": "zig",
+ \\ "language_without_sentinel": "zig again!",
+ \\ "data": [1, 2, 3],
+ \\ "simple_data": [4, 5, 6]
+ \\}
+ );
+ const r = try parse(T, &ts, options);
+ defer parseFree(T, r, options);
+
+ try testing.expectEqualSentinel(u8, 0, "zig", r.language);
+
+ const data = [_:99]i32{ 1, 2, 3 };
+ try testing.expectEqualSentinel(i32, 99, data[0..data.len], r.data);
+
+ // Make sure that arrays who aren't supposed to have a sentinel still parse without one.
+ try testing.expectEqual(@as(?i32, null), std.meta.sentinel(@TypeOf(r.simple_data)));
+ try testing.expectEqual(@as(?u8, null), std.meta.sentinel(@TypeOf(r.language_without_sentinel)));
+}
+
+test "parse into struct with duplicate field" {
+ // allow allocator to detect double frees by keeping bucket in use
+ const ballast = try testing.allocator.alloc(u64, 1);
+ defer testing.allocator.free(ballast);
+
+ const options_first = ParseOptions{ .allocator = testing.allocator, .duplicate_field_behavior = .UseFirst };
+
+ const options_last = ParseOptions{
+ .allocator = testing.allocator,
+ .duplicate_field_behavior = .UseLast,
+ };
+
+ const str = "{ \"a\": 1, \"a\": 0.25 }";
+
+ const T1 = struct { a: *u64 };
+ // both .UseFirst and .UseLast should fail because second "a" value isn't a u64
+ var ts = TokenStream.init(str);
+ try testing.expectError(error.InvalidNumber, parse(T1, &ts, options_first));
+ ts = TokenStream.init(str);
+ try testing.expectError(error.InvalidNumber, parse(T1, &ts, options_last));
+
+ const T2 = struct { a: f64 };
+ ts = TokenStream.init(str);
+ try testing.expectEqual(T2{ .a = 1.0 }, try parse(T2, &ts, options_first));
+ ts = TokenStream.init(str);
+ try testing.expectEqual(T2{ .a = 0.25 }, try parse(T2, &ts, options_last));
+
+ const T3 = struct { comptime a: f64 = 1.0 };
+ // .UseFirst should succeed because second "a" value is unconditionally ignored (even though != 1.0)
+ const t3 = T3{ .a = 1.0 };
+ ts = TokenStream.init(str);
+ try testing.expectEqual(t3, try parse(T3, &ts, options_first));
+ // .UseLast should fail because second "a" value is 0.25 which is not equal to default value of 1.0
+ ts = TokenStream.init(str);
+ try testing.expectError(error.UnexpectedValue, parse(T3, &ts, options_last));
+}
+
+test "parse into struct ignoring unknown fields" {
+ const T = struct {
+ int: i64,
+ language: []const u8,
+ };
+
+ const ops = ParseOptions{
+ .allocator = testing.allocator,
+ .ignore_unknown_fields = true,
+ };
+
+ var ts = TokenStream.init(
+ \\{
+ \\ "int": 420,
+ \\ "float": 3.14,
+ \\ "with\\escape": true,
+ \\ "with\u0105unicode\ud83d\ude02": false,
+ \\ "optional": null,
+ \\ "static_array": [66.6, 420.420, 69.69],
+ \\ "dynamic_array": [66.6, 420.420, 69.69],
+ \\ "complex": {
+ \\ "nested": "zig"
+ \\ },
+ \\ "veryComplex": [
+ \\ {
+ \\ "foo": "zig"
+ \\ }, {
+ \\ "foo": "rocks"
+ \\ }
+ \\ ],
+ \\ "a_union": 100000,
+ \\ "language": "zig"
+ \\}
+ );
+ const r = try parse(T, &ts, ops);
+ defer parseFree(T, r, ops);
+
+ try testing.expectEqual(@as(i64, 420), r.int);
+ try testing.expectEqualSlices(u8, "zig", r.language);
+}
+
+const ParseIntoRecursiveUnionDefinitionValue = union(enum) {
+ integer: i64,
+ array: []const ParseIntoRecursiveUnionDefinitionValue,
+};
+
+test "parse into recursive union definition" {
+ const T = struct {
+ values: ParseIntoRecursiveUnionDefinitionValue,
+ };
+ const ops = ParseOptions{ .allocator = testing.allocator };
+
+ var ts = TokenStream.init("{\"values\":[58]}");
+ const r = try parse(T, &ts, ops);
+ defer parseFree(T, r, ops);
+
+ try testing.expectEqual(@as(i64, 58), r.values.array[0].integer);
+}
+
+const ParseIntoDoubleRecursiveUnionValueFirst = union(enum) {
+ integer: i64,
+ array: []const ParseIntoDoubleRecursiveUnionValueSecond,
+};
+
+const ParseIntoDoubleRecursiveUnionValueSecond = union(enum) {
+ boolean: bool,
+ array: []const ParseIntoDoubleRecursiveUnionValueFirst,
+};
+
+test "parse into double recursive union definition" {
+ const T = struct {
+ values: ParseIntoDoubleRecursiveUnionValueFirst,
+ };
+ const ops = ParseOptions{ .allocator = testing.allocator };
+
+ var ts = TokenStream.init("{\"values\":[[58]]}");
+ const r = try parse(T, &ts, ops);
+ defer parseFree(T, r, ops);
+
+ try testing.expectEqual(@as(i64, 58), r.values.array[0].array[0].integer);
+}
+
+test "json.parser.dynamic" {
+ var p = Parser.init(testing.allocator, false);
+ defer p.deinit();
+
+ const s =
+ \\{
+ \\ "Image": {
+ \\ "Width": 800,
+ \\ "Height": 600,
+ \\ "Title": "View from 15th Floor",
+ \\ "Thumbnail": {
+ \\ "Url": "http://www.example.com/image/481989943",
+ \\ "Height": 125,
+ \\ "Width": 100
+ \\ },
+ \\ "Animated" : false,
+ \\ "IDs": [116, 943, 234, 38793],
+ \\ "ArrayOfObject": [{"n": "m"}],
+ \\ "double": 1.3412,
+ \\ "LargeInt": 18446744073709551615
+ \\ }
+ \\}
+ ;
+
+ var tree = try p.parse(s);
+ defer tree.deinit();
+
+ var root = tree.root;
+
+ var image = root.Object.get("Image").?;
+
+ const width = image.Object.get("Width").?;
+ try testing.expect(width.Integer == 800);
+
+ const height = image.Object.get("Height").?;
+ try testing.expect(height.Integer == 600);
+
+ const title = image.Object.get("Title").?;
+ try testing.expect(mem.eql(u8, title.String, "View from 15th Floor"));
+
+ const animated = image.Object.get("Animated").?;
+ try testing.expect(animated.Bool == false);
+
+ const array_of_object = image.Object.get("ArrayOfObject").?;
+ try testing.expect(array_of_object.Array.items.len == 1);
+
+ const obj0 = array_of_object.Array.items[0].Object.get("n").?;
+ try testing.expect(mem.eql(u8, obj0.String, "m"));
+
+ const double = image.Object.get("double").?;
+ try testing.expect(double.Float == 1.3412);
+
+ const large_int = image.Object.get("LargeInt").?;
+ try testing.expect(mem.eql(u8, large_int.NumberString, "18446744073709551615"));
+}
+
+test "write json then parse it" {
+ var out_buffer: [1000]u8 = undefined;
+
+ var fixed_buffer_stream = std.io.fixedBufferStream(&out_buffer);
+ const out_stream = fixed_buffer_stream.writer();
+ var jw = writeStream(out_stream, 4);
+
+ try jw.beginObject();
+
+ try jw.objectField("f");
+ try jw.emitBool(false);
+
+ try jw.objectField("t");
+ try jw.emitBool(true);
+
+ try jw.objectField("int");
+ try jw.emitNumber(1234);
+
+ try jw.objectField("array");
+ try jw.beginArray();
+
+ try jw.arrayElem();
+ try jw.emitNull();
+
+ try jw.arrayElem();
+ try jw.emitNumber(12.34);
+
+ try jw.endArray();
+
+ try jw.objectField("str");
+ try jw.emitString("hello");
+
+ try jw.endObject();
+
+ var parser = Parser.init(testing.allocator, false);
+ defer parser.deinit();
+ var tree = try parser.parse(fixed_buffer_stream.getWritten());
+ defer tree.deinit();
+
+ try testing.expect(tree.root.Object.get("f").?.Bool == false);
+ try testing.expect(tree.root.Object.get("t").?.Bool == true);
+ try testing.expect(tree.root.Object.get("int").?.Integer == 1234);
+ try testing.expect(tree.root.Object.get("array").?.Array.items[0].Null == {});
+ try testing.expect(tree.root.Object.get("array").?.Array.items[1].Float == 12.34);
+ try testing.expect(mem.eql(u8, tree.root.Object.get("str").?.String, "hello"));
+}
+
+fn testParse(arena_allocator: std.mem.Allocator, json_str: []const u8) !Value {
+ var p = Parser.init(arena_allocator, false);
+ return (try p.parse(json_str)).root;
+}
+
+test "parsing empty string gives appropriate error" {
+ var arena_allocator = std.heap.ArenaAllocator.init(std.testing.allocator);
+ defer arena_allocator.deinit();
+ try testing.expectError(error.UnexpectedEndOfJson, testParse(arena_allocator.allocator(), ""));
+}
+
+test "integer after float has proper type" {
+ var arena_allocator = std.heap.ArenaAllocator.init(std.testing.allocator);
+ defer arena_allocator.deinit();
+ const parsed = try testParse(arena_allocator.allocator(),
+ \\{
+ \\ "float": 3.14,
+ \\ "ints": [1, 2, 3]
+ \\}
+ );
+ try std.testing.expect(parsed.Object.get("ints").?.Array.items[0] == .Integer);
+}
+
+test "parse exponential into int" {
+ const T = struct { int: i64 };
+ var ts = TokenStream.init("{ \"int\": 4.2e2 }");
+ const r = try parse(T, &ts, ParseOptions{});
+ try testing.expectEqual(@as(i64, 420), r.int);
+ ts = TokenStream.init("{ \"int\": 0.042e2 }");
+ try testing.expectError(error.InvalidNumber, parse(T, &ts, ParseOptions{}));
+ ts = TokenStream.init("{ \"int\": 18446744073709551616.0 }");
+ try testing.expectError(error.Overflow, parse(T, &ts, ParseOptions{}));
+}
+
+test "escaped characters" {
+ var arena_allocator = std.heap.ArenaAllocator.init(std.testing.allocator);
+ defer arena_allocator.deinit();
+ const input =
+ \\{
+ \\ "backslash": "\\",
+ \\ "forwardslash": "\/",
+ \\ "newline": "\n",
+ \\ "carriagereturn": "\r",
+ \\ "tab": "\t",
+ \\ "formfeed": "\f",
+ \\ "backspace": "\b",
+ \\ "doublequote": "\"",
+ \\ "unicode": "\u0105",
+ \\ "surrogatepair": "\ud83d\ude02"
+ \\}
+ ;
+
+ const obj = (try testParse(arena_allocator.allocator(), input)).Object;
+
+ try testing.expectEqualSlices(u8, obj.get("backslash").?.String, "\\");
+ try testing.expectEqualSlices(u8, obj.get("forwardslash").?.String, "/");
+ try testing.expectEqualSlices(u8, obj.get("newline").?.String, "\n");
+ try testing.expectEqualSlices(u8, obj.get("carriagereturn").?.String, "\r");
+ try testing.expectEqualSlices(u8, obj.get("tab").?.String, "\t");
+ try testing.expectEqualSlices(u8, obj.get("formfeed").?.String, "\x0C");
+ try testing.expectEqualSlices(u8, obj.get("backspace").?.String, "\x08");
+ try testing.expectEqualSlices(u8, obj.get("doublequote").?.String, "\"");
+ try testing.expectEqualSlices(u8, obj.get("unicode").?.String, "ą");
+ try testing.expectEqualSlices(u8, obj.get("surrogatepair").?.String, "😂");
+}
+
+test "string copy option" {
+ const input =
+ \\{
+ \\ "noescape": "aą😂",
+ \\ "simple": "\\\/\n\r\t\f\b\"",
+ \\ "unicode": "\u0105",
+ \\ "surrogatepair": "\ud83d\ude02"
+ \\}
+ ;
+
+ var arena_allocator = std.heap.ArenaAllocator.init(std.testing.allocator);
+ defer arena_allocator.deinit();
+ const allocator = arena_allocator.allocator();
+
+ var parser = Parser.init(allocator, false);
+ const tree_nocopy = try parser.parse(input);
+ const obj_nocopy = tree_nocopy.root.Object;
+
+ parser = Parser.init(allocator, true);
+ const tree_copy = try parser.parse(input);
+ const obj_copy = tree_copy.root.Object;
+
+ for ([_][]const u8{ "noescape", "simple", "unicode", "surrogatepair" }) |field_name| {
+ try testing.expectEqualSlices(u8, obj_nocopy.get(field_name).?.String, obj_copy.get(field_name).?.String);
+ }
+
+ const nocopy_addr = &obj_nocopy.get("noescape").?.String[0];
+ const copy_addr = &obj_copy.get("noescape").?.String[0];
+
+ var found_nocopy = false;
+ for (input) |_, index| {
+ try testing.expect(copy_addr != &input[index]);
+ if (nocopy_addr == &input[index]) {
+ found_nocopy = true;
+ }
+ }
+ try testing.expect(found_nocopy);
+}
+
+test "stringify alloc" {
+ const allocator = std.testing.allocator;
+ const expected =
+ \\{"foo":"bar","answer":42,"my_friend":"sammy"}
+ ;
+ const actual = try stringifyAlloc(allocator, .{ .foo = "bar", .answer = 42, .my_friend = "sammy" }, .{});
+ defer allocator.free(actual);
+
+ try std.testing.expectEqualStrings(expected, actual);
+}
+
+test "json.serialize issue #5959" {
+ var parser: StreamingParser = undefined;
+ // StreamingParser has multiple internal fields set to undefined. This causes issues when using
+ // expectEqual so these are zeroed. We are testing for equality here only because this is a
+ // known small test reproduction which hits the relevant LLVM issue.
+ std.mem.set(u8, @ptrCast([*]u8, &parser)[0..@sizeOf(StreamingParser)], 0);
+ try std.testing.expectEqual(parser, parser);
+}
+
+fn checkNext(p: *TokenStream, id: std.meta.Tag(Token)) !void {
+ const token = (p.next() catch unreachable).?;
+ try testing.expect(std.meta.activeTag(token) == id);
+}
+
+test "json.token" {
+ const s =
+ \\{
+ \\ "Image": {
+ \\ "Width": 800,
+ \\ "Height": 600,
+ \\ "Title": "View from 15th Floor",
+ \\ "Thumbnail": {
+ \\ "Url": "http://www.example.com/image/481989943",
+ \\ "Height": 125,
+ \\ "Width": 100
+ \\ },
+ \\ "Animated" : false,
+ \\ "IDs": [116, 943, 234, 38793]
+ \\ }
+ \\}
+ ;
+
+ var p = TokenStream.init(s);
+
+ try checkNext(&p, .ObjectBegin);
+ try checkNext(&p, .String); // Image
+ try checkNext(&p, .ObjectBegin);
+ try checkNext(&p, .String); // Width
+ try checkNext(&p, .Number);
+ try checkNext(&p, .String); // Height
+ try checkNext(&p, .Number);
+ try checkNext(&p, .String); // Title
+ try checkNext(&p, .String);
+ try checkNext(&p, .String); // Thumbnail
+ try checkNext(&p, .ObjectBegin);
+ try checkNext(&p, .String); // Url
+ try checkNext(&p, .String);
+ try checkNext(&p, .String); // Height
+ try checkNext(&p, .Number);
+ try checkNext(&p, .String); // Width
+ try checkNext(&p, .Number);
+ try checkNext(&p, .ObjectEnd);
+ try checkNext(&p, .String); // Animated
+ try checkNext(&p, .False);
+ try checkNext(&p, .String); // IDs
+ try checkNext(&p, .ArrayBegin);
+ try checkNext(&p, .Number);
+ try checkNext(&p, .Number);
+ try checkNext(&p, .Number);
+ try checkNext(&p, .Number);
+ try checkNext(&p, .ArrayEnd);
+ try checkNext(&p, .ObjectEnd);
+ try checkNext(&p, .ObjectEnd);
+
+ try testing.expect((try p.next()) == null);
+}
+
+test "json.token mismatched close" {
+ var p = TokenStream.init("[102, 111, 111 }");
+ try checkNext(&p, .ArrayBegin);
+ try checkNext(&p, .Number);
+ try checkNext(&p, .Number);
+ try checkNext(&p, .Number);
+ try testing.expectError(error.UnexpectedClosingBrace, p.next());
+}
+
+test "json.token premature object close" {
+ var p = TokenStream.init("{ \"key\": }");
+ try checkNext(&p, .ObjectBegin);
+ try checkNext(&p, .String);
+ try testing.expectError(error.InvalidValueBegin, p.next());
+}
+
+test "json.validate" {
+ try testing.expectEqual(true, validate("{}"));
+ try testing.expectEqual(true, validate("[]"));
+ try testing.expectEqual(true, validate("[{[[[[{}]]]]}]"));
+ try testing.expectEqual(false, validate("{]"));
+ try testing.expectEqual(false, validate("[}"));
+ try testing.expectEqual(false, validate("{{{{[]}}}]"));
+}
+
+test "Value.jsonStringify" {
+ {
+ var buffer: [10]u8 = undefined;
+ var fbs = std.io.fixedBufferStream(&buffer);
+ try @as(Value, .Null).jsonStringify(.{}, fbs.writer());
+ try testing.expectEqualSlices(u8, fbs.getWritten(), "null");
+ }
+ {
+ var buffer: [10]u8 = undefined;
+ var fbs = std.io.fixedBufferStream(&buffer);
+ try (Value{ .Bool = true }).jsonStringify(.{}, fbs.writer());
+ try testing.expectEqualSlices(u8, fbs.getWritten(), "true");
+ }
+ {
+ var buffer: [10]u8 = undefined;
+ var fbs = std.io.fixedBufferStream(&buffer);
+ try (Value{ .Integer = 42 }).jsonStringify(.{}, fbs.writer());
+ try testing.expectEqualSlices(u8, fbs.getWritten(), "42");
+ }
+ {
+ var buffer: [10]u8 = undefined;
+ var fbs = std.io.fixedBufferStream(&buffer);
+ try (Value{ .NumberString = "43" }).jsonStringify(.{}, fbs.writer());
+ try testing.expectEqualSlices(u8, fbs.getWritten(), "43");
+ }
+ {
+ var buffer: [10]u8 = undefined;
+ var fbs = std.io.fixedBufferStream(&buffer);
+ try (Value{ .Float = 42 }).jsonStringify(.{}, fbs.writer());
+ try testing.expectEqualSlices(u8, fbs.getWritten(), "4.2e+01");
+ }
+ {
+ var buffer: [10]u8 = undefined;
+ var fbs = std.io.fixedBufferStream(&buffer);
+ try (Value{ .String = "weeee" }).jsonStringify(.{}, fbs.writer());
+ try testing.expectEqualSlices(u8, fbs.getWritten(), "\"weeee\"");
+ }
+ {
+ var buffer: [10]u8 = undefined;
+ var fbs = std.io.fixedBufferStream(&buffer);
+ var vals = [_]Value{
+ .{ .Integer = 1 },
+ .{ .Integer = 2 },
+ .{ .NumberString = "3" },
+ };
+ try (Value{
+ .Array = Array.fromOwnedSlice(undefined, &vals),
+ }).jsonStringify(.{}, fbs.writer());
+ try testing.expectEqualSlices(u8, fbs.getWritten(), "[1,2,3]");
+ }
+ {
+ var buffer: [10]u8 = undefined;
+ var fbs = std.io.fixedBufferStream(&buffer);
+ var obj = ObjectMap.init(testing.allocator);
+ defer obj.deinit();
+ try obj.putNoClobber("a", .{ .String = "b" });
+ try (Value{ .Object = obj }).jsonStringify(.{}, fbs.writer());
+ try testing.expectEqualSlices(u8, fbs.getWritten(), "{\"a\":\"b\"}");
+ }
+}