Merge pull request #24505 from ziglang/json
update std.json and std.zon to new I/O API
This commit is contained in:
@@ -290,12 +290,14 @@ pub fn main() !void {
|
||||
};
|
||||
defer depfile.close();
|
||||
|
||||
const depfile_writer = depfile.deprecatedWriter();
|
||||
var depfile_buffered_writer = std.io.bufferedWriter(depfile_writer);
|
||||
var depfile_buffer: [1024]u8 = undefined;
|
||||
var depfile_writer = depfile.writer(&depfile_buffer);
|
||||
switch (options.depfile_fmt) {
|
||||
.json => {
|
||||
var write_stream = std.json.writeStream(depfile_buffered_writer.writer(), .{ .whitespace = .indent_2 });
|
||||
defer write_stream.deinit();
|
||||
var write_stream: std.json.Stringify = .{
|
||||
.writer = &depfile_writer.interface,
|
||||
.options = .{ .whitespace = .indent_2 },
|
||||
};
|
||||
|
||||
try write_stream.beginArray();
|
||||
for (dependencies_list.items) |dep_path| {
|
||||
@@ -304,7 +306,7 @@ pub fn main() !void {
|
||||
try write_stream.endArray();
|
||||
},
|
||||
}
|
||||
try depfile_buffered_writer.flush();
|
||||
try depfile_writer.interface.flush();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -161,17 +161,19 @@ pub fn formatEscapeString(path: Path, writer: *std.io.Writer) std.io.Writer.Erro
|
||||
}
|
||||
}
|
||||
|
||||
/// Deprecated, use double quoted escape to print paths.
|
||||
pub fn fmtEscapeChar(path: Path) std.fmt.Formatter(Path, formatEscapeChar) {
|
||||
return .{ .data = path };
|
||||
}
|
||||
|
||||
/// Deprecated, use double quoted escape to print paths.
|
||||
pub fn formatEscapeChar(path: Path, writer: *std.io.Writer) std.io.Writer.Error!void {
|
||||
if (path.root_dir.path) |p| {
|
||||
try std.zig.charEscape(p, writer);
|
||||
if (path.sub_path.len > 0) try std.zig.charEscape(fs.path.sep_str, writer);
|
||||
for (p) |byte| try std.zig.charEscape(byte, writer);
|
||||
if (path.sub_path.len > 0) try writer.writeByte(fs.path.sep);
|
||||
}
|
||||
if (path.sub_path.len > 0) {
|
||||
try std.zig.charEscape(path.sub_path, writer);
|
||||
for (path.sub_path) |byte| try std.zig.charEscape(byte, writer);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -990,9 +990,9 @@ pub fn discardDelimiterLimit(r: *Reader, delimiter: u8, limit: Limit) DiscardDel
|
||||
/// Returns `error.EndOfStream` if and only if there are fewer than `n` bytes
|
||||
/// remaining.
|
||||
///
|
||||
/// Asserts buffer capacity is at least `n`.
|
||||
/// If the end of stream is not encountered, asserts buffer capacity is at
|
||||
/// least `n`.
|
||||
pub fn fill(r: *Reader, n: usize) Error!void {
|
||||
assert(n <= r.buffer.len);
|
||||
if (r.seek + n <= r.end) {
|
||||
@branchHint(.likely);
|
||||
return;
|
||||
|
||||
101
lib/std/json.zig
101
lib/std/json.zig
@@ -10,8 +10,8 @@
|
||||
//! The high-level `stringify` serializes a Zig or `Value` type into JSON.
|
||||
|
||||
const builtin = @import("builtin");
|
||||
const testing = @import("std").testing;
|
||||
const ArrayList = @import("std").ArrayList;
|
||||
const std = @import("std");
|
||||
const testing = std.testing;
|
||||
|
||||
test Scanner {
|
||||
var scanner = Scanner.initCompleteInput(testing.allocator, "{\"foo\": 123}\n");
|
||||
@@ -41,11 +41,13 @@ test Value {
|
||||
try testing.expectEqualSlices(u8, "goes", parsed.value.object.get("anything").?.string);
|
||||
}
|
||||
|
||||
test writeStream {
|
||||
var out = ArrayList(u8).init(testing.allocator);
|
||||
test Stringify {
|
||||
var out: std.io.Writer.Allocating = .init(testing.allocator);
|
||||
var write_stream: Stringify = .{
|
||||
.writer = &out.writer,
|
||||
.options = .{ .whitespace = .indent_2 },
|
||||
};
|
||||
defer out.deinit();
|
||||
var write_stream = writeStream(out.writer(), .{ .whitespace = .indent_2 });
|
||||
defer write_stream.deinit();
|
||||
try write_stream.beginObject();
|
||||
try write_stream.objectField("foo");
|
||||
try write_stream.write(123);
|
||||
@@ -55,16 +57,7 @@ test writeStream {
|
||||
\\ "foo": 123
|
||||
\\}
|
||||
;
|
||||
try testing.expectEqualSlices(u8, expected, out.items);
|
||||
}
|
||||
|
||||
test stringify {
|
||||
var out = ArrayList(u8).init(testing.allocator);
|
||||
defer out.deinit();
|
||||
|
||||
const T = struct { a: i32, b: []const u8 };
|
||||
try stringify(T{ .a = 123, .b = "xy" }, .{}, out.writer());
|
||||
try testing.expectEqualSlices(u8, "{\"a\":123,\"b\":\"xy\"}", out.items);
|
||||
try testing.expectEqualSlices(u8, expected, out.getWritten());
|
||||
}
|
||||
|
||||
pub const ObjectMap = @import("json/dynamic.zig").ObjectMap;
|
||||
@@ -73,18 +66,18 @@ pub const Value = @import("json/dynamic.zig").Value;
|
||||
|
||||
pub const ArrayHashMap = @import("json/hashmap.zig").ArrayHashMap;
|
||||
|
||||
pub const validate = @import("json/scanner.zig").validate;
|
||||
pub const Error = @import("json/scanner.zig").Error;
|
||||
pub const reader = @import("json/scanner.zig").reader;
|
||||
pub const default_buffer_size = @import("json/scanner.zig").default_buffer_size;
|
||||
pub const Token = @import("json/scanner.zig").Token;
|
||||
pub const TokenType = @import("json/scanner.zig").TokenType;
|
||||
pub const Diagnostics = @import("json/scanner.zig").Diagnostics;
|
||||
pub const AllocWhen = @import("json/scanner.zig").AllocWhen;
|
||||
pub const default_max_value_len = @import("json/scanner.zig").default_max_value_len;
|
||||
pub const Reader = @import("json/scanner.zig").Reader;
|
||||
pub const Scanner = @import("json/scanner.zig").Scanner;
|
||||
pub const isNumberFormattedLikeAnInteger = @import("json/scanner.zig").isNumberFormattedLikeAnInteger;
|
||||
pub const Scanner = @import("json/Scanner.zig");
|
||||
pub const validate = Scanner.validate;
|
||||
pub const Error = Scanner.Error;
|
||||
pub const reader = Scanner.reader;
|
||||
pub const default_buffer_size = Scanner.default_buffer_size;
|
||||
pub const Token = Scanner.Token;
|
||||
pub const TokenType = Scanner.TokenType;
|
||||
pub const Diagnostics = Scanner.Diagnostics;
|
||||
pub const AllocWhen = Scanner.AllocWhen;
|
||||
pub const default_max_value_len = Scanner.default_max_value_len;
|
||||
pub const Reader = Scanner.Reader;
|
||||
pub const isNumberFormattedLikeAnInteger = Scanner.isNumberFormattedLikeAnInteger;
|
||||
|
||||
pub const ParseOptions = @import("json/static.zig").ParseOptions;
|
||||
pub const Parsed = @import("json/static.zig").Parsed;
|
||||
@@ -99,27 +92,49 @@ pub const innerParseFromValue = @import("json/static.zig").innerParseFromValue;
|
||||
pub const ParseError = @import("json/static.zig").ParseError;
|
||||
pub const ParseFromValueError = @import("json/static.zig").ParseFromValueError;
|
||||
|
||||
pub const StringifyOptions = @import("json/stringify.zig").StringifyOptions;
|
||||
pub const stringify = @import("json/stringify.zig").stringify;
|
||||
pub const stringifyMaxDepth = @import("json/stringify.zig").stringifyMaxDepth;
|
||||
pub const stringifyArbitraryDepth = @import("json/stringify.zig").stringifyArbitraryDepth;
|
||||
pub const stringifyAlloc = @import("json/stringify.zig").stringifyAlloc;
|
||||
pub const writeStream = @import("json/stringify.zig").writeStream;
|
||||
pub const writeStreamMaxDepth = @import("json/stringify.zig").writeStreamMaxDepth;
|
||||
pub const writeStreamArbitraryDepth = @import("json/stringify.zig").writeStreamArbitraryDepth;
|
||||
pub const WriteStream = @import("json/stringify.zig").WriteStream;
|
||||
pub const encodeJsonString = @import("json/stringify.zig").encodeJsonString;
|
||||
pub const encodeJsonStringChars = @import("json/stringify.zig").encodeJsonStringChars;
|
||||
pub const Stringify = @import("json/Stringify.zig");
|
||||
|
||||
pub const Formatter = @import("json/fmt.zig").Formatter;
|
||||
pub const fmt = @import("json/fmt.zig").fmt;
|
||||
/// Returns a formatter that formats the given value using stringify.
|
||||
pub fn fmt(value: anytype, options: Stringify.Options) Formatter(@TypeOf(value)) {
|
||||
return Formatter(@TypeOf(value)){ .value = value, .options = options };
|
||||
}
|
||||
|
||||
test fmt {
|
||||
const expectFmt = std.testing.expectFmt;
|
||||
try expectFmt("123", "{f}", .{fmt(@as(u32, 123), .{})});
|
||||
try expectFmt(
|
||||
\\{"num":927,"msg":"hello","sub":{"mybool":true}}
|
||||
, "{f}", .{fmt(struct {
|
||||
num: u32,
|
||||
msg: []const u8,
|
||||
sub: struct {
|
||||
mybool: bool,
|
||||
},
|
||||
}{
|
||||
.num = 927,
|
||||
.msg = "hello",
|
||||
.sub = .{ .mybool = true },
|
||||
}, .{})});
|
||||
}
|
||||
|
||||
/// Formats the given value using stringify.
|
||||
pub fn Formatter(comptime T: type) type {
|
||||
return struct {
|
||||
value: T,
|
||||
options: Stringify.Options,
|
||||
|
||||
pub fn format(self: @This(), writer: *std.Io.Writer) std.Io.Writer.Error!void {
|
||||
try Stringify.value(self.value, self.options, writer);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
test {
|
||||
_ = @import("json/test.zig");
|
||||
_ = @import("json/scanner.zig");
|
||||
_ = Scanner;
|
||||
_ = @import("json/dynamic.zig");
|
||||
_ = @import("json/hashmap.zig");
|
||||
_ = @import("json/static.zig");
|
||||
_ = @import("json/stringify.zig");
|
||||
_ = Stringify;
|
||||
_ = @import("json/JSONTestSuite_test.zig");
|
||||
}
|
||||
|
||||
1767
lib/std/json/Scanner.zig
Normal file
1767
lib/std/json/Scanner.zig
Normal file
File diff suppressed because it is too large
Load Diff
999
lib/std/json/Stringify.zig
Normal file
999
lib/std/json/Stringify.zig
Normal file
@@ -0,0 +1,999 @@
|
||||
//! Writes JSON ([RFC8259](https://tools.ietf.org/html/rfc8259)) formatted data
|
||||
//! to a stream.
|
||||
//!
|
||||
//! The sequence of method calls to write JSON content must follow this grammar:
|
||||
//! ```
|
||||
//! <once> = <value>
|
||||
//! <value> =
|
||||
//! | <object>
|
||||
//! | <array>
|
||||
//! | write
|
||||
//! | print
|
||||
//! | <writeRawStream>
|
||||
//! <object> = beginObject ( <field> <value> )* endObject
|
||||
//! <field> = objectField | objectFieldRaw | <objectFieldRawStream>
|
||||
//! <array> = beginArray ( <value> )* endArray
|
||||
//! <writeRawStream> = beginWriteRaw ( stream.writeAll )* endWriteRaw
|
||||
//! <objectFieldRawStream> = beginObjectFieldRaw ( stream.writeAll )* endObjectFieldRaw
|
||||
//! ```
|
||||
|
||||
const std = @import("../std.zig");
|
||||
const assert = std.debug.assert;
|
||||
const Allocator = std.mem.Allocator;
|
||||
const ArrayList = std.ArrayList;
|
||||
const BitStack = std.BitStack;
|
||||
const Stringify = @This();
|
||||
const Writer = std.io.Writer;
|
||||
|
||||
const IndentationMode = enum(u1) {
|
||||
object = 0,
|
||||
array = 1,
|
||||
};
|
||||
|
||||
writer: *Writer,
|
||||
options: Options = .{},
|
||||
indent_level: usize = 0,
|
||||
next_punctuation: enum {
|
||||
the_beginning,
|
||||
none,
|
||||
comma,
|
||||
colon,
|
||||
} = .the_beginning,
|
||||
|
||||
nesting_stack: switch (safety_checks) {
|
||||
.checked_to_fixed_depth => |fixed_buffer_size| [(fixed_buffer_size + 7) >> 3]u8,
|
||||
.assumed_correct => void,
|
||||
} = switch (safety_checks) {
|
||||
.checked_to_fixed_depth => @splat(0),
|
||||
.assumed_correct => {},
|
||||
},
|
||||
|
||||
raw_streaming_mode: if (build_mode_has_safety)
|
||||
enum { none, value, objectField }
|
||||
else
|
||||
void = if (build_mode_has_safety) .none else {},
|
||||
|
||||
const build_mode_has_safety = switch (@import("builtin").mode) {
|
||||
.Debug, .ReleaseSafe => true,
|
||||
.ReleaseFast, .ReleaseSmall => false,
|
||||
};
|
||||
|
||||
/// The `safety_checks_hint` parameter determines how much memory is used to enable assertions that the above grammar is being followed,
|
||||
/// e.g. tripping an assertion rather than allowing `endObject` to emit the final `}` in `[[[]]}`.
|
||||
/// "Depth" in this context means the depth of nested `[]` or `{}` expressions
|
||||
/// (or equivalently the amount of recursion on the `<value>` grammar expression above).
|
||||
/// For example, emitting the JSON `[[[]]]` requires a depth of 3.
|
||||
/// If `.checked_to_fixed_depth` is used, there is additionally an assertion that the nesting depth never exceeds the given limit.
|
||||
/// `.checked_to_fixed_depth` embeds the storage required in the `Stringify` struct.
|
||||
/// `.assumed_correct` requires no space and performs none of these assertions.
|
||||
/// In `ReleaseFast` and `ReleaseSmall` mode, the given `safety_checks_hint` is ignored and is always treated as `.assumed_correct`.
|
||||
const safety_checks_hint: union(enum) {
|
||||
/// Rounded up to the nearest multiple of 8.
|
||||
checked_to_fixed_depth: usize,
|
||||
assumed_correct,
|
||||
} = .{ .checked_to_fixed_depth = 256 };
|
||||
|
||||
const safety_checks: @TypeOf(safety_checks_hint) = if (build_mode_has_safety)
|
||||
safety_checks_hint
|
||||
else
|
||||
.assumed_correct;
|
||||
|
||||
pub const Error = Writer.Error;
|
||||
|
||||
pub fn beginArray(self: *Stringify) Error!void {
|
||||
if (build_mode_has_safety) assert(self.raw_streaming_mode == .none);
|
||||
try self.valueStart();
|
||||
try self.writer.writeByte('[');
|
||||
try self.pushIndentation(.array);
|
||||
self.next_punctuation = .none;
|
||||
}
|
||||
|
||||
pub fn beginObject(self: *Stringify) Error!void {
|
||||
if (build_mode_has_safety) assert(self.raw_streaming_mode == .none);
|
||||
try self.valueStart();
|
||||
try self.writer.writeByte('{');
|
||||
try self.pushIndentation(.object);
|
||||
self.next_punctuation = .none;
|
||||
}
|
||||
|
||||
pub fn endArray(self: *Stringify) Error!void {
|
||||
if (build_mode_has_safety) assert(self.raw_streaming_mode == .none);
|
||||
self.popIndentation(.array);
|
||||
switch (self.next_punctuation) {
|
||||
.none => {},
|
||||
.comma => {
|
||||
try self.indent();
|
||||
},
|
||||
.the_beginning, .colon => unreachable,
|
||||
}
|
||||
try self.writer.writeByte(']');
|
||||
self.valueDone();
|
||||
}
|
||||
|
||||
pub fn endObject(self: *Stringify) Error!void {
|
||||
if (build_mode_has_safety) assert(self.raw_streaming_mode == .none);
|
||||
self.popIndentation(.object);
|
||||
switch (self.next_punctuation) {
|
||||
.none => {},
|
||||
.comma => {
|
||||
try self.indent();
|
||||
},
|
||||
.the_beginning, .colon => unreachable,
|
||||
}
|
||||
try self.writer.writeByte('}');
|
||||
self.valueDone();
|
||||
}
|
||||
|
||||
fn pushIndentation(self: *Stringify, mode: IndentationMode) !void {
|
||||
switch (safety_checks) {
|
||||
.checked_to_fixed_depth => {
|
||||
BitStack.pushWithStateAssumeCapacity(&self.nesting_stack, &self.indent_level, @intFromEnum(mode));
|
||||
},
|
||||
.assumed_correct => {
|
||||
self.indent_level += 1;
|
||||
},
|
||||
}
|
||||
}
|
||||
fn popIndentation(self: *Stringify, expected_mode: IndentationMode) void {
|
||||
switch (safety_checks) {
|
||||
.checked_to_fixed_depth => {
|
||||
assert(BitStack.popWithState(&self.nesting_stack, &self.indent_level) == @intFromEnum(expected_mode));
|
||||
},
|
||||
.assumed_correct => {
|
||||
self.indent_level -= 1;
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn indent(self: *Stringify) !void {
|
||||
var char: u8 = ' ';
|
||||
const n_chars = switch (self.options.whitespace) {
|
||||
.minified => return,
|
||||
.indent_1 => 1 * self.indent_level,
|
||||
.indent_2 => 2 * self.indent_level,
|
||||
.indent_3 => 3 * self.indent_level,
|
||||
.indent_4 => 4 * self.indent_level,
|
||||
.indent_8 => 8 * self.indent_level,
|
||||
.indent_tab => blk: {
|
||||
char = '\t';
|
||||
break :blk self.indent_level;
|
||||
},
|
||||
};
|
||||
try self.writer.writeByte('\n');
|
||||
try self.writer.splatByteAll(char, n_chars);
|
||||
}
|
||||
|
||||
fn valueStart(self: *Stringify) !void {
|
||||
if (self.isObjectKeyExpected()) |is_it| assert(!is_it); // Call objectField*(), not write(), for object keys.
|
||||
return self.valueStartAssumeTypeOk();
|
||||
}
|
||||
fn objectFieldStart(self: *Stringify) !void {
|
||||
if (self.isObjectKeyExpected()) |is_it| assert(is_it); // Expected write(), not objectField*().
|
||||
return self.valueStartAssumeTypeOk();
|
||||
}
|
||||
fn valueStartAssumeTypeOk(self: *Stringify) !void {
|
||||
assert(!self.isComplete()); // JSON document already complete.
|
||||
switch (self.next_punctuation) {
|
||||
.the_beginning => {
|
||||
// No indentation for the very beginning.
|
||||
},
|
||||
.none => {
|
||||
// First item in a container.
|
||||
try self.indent();
|
||||
},
|
||||
.comma => {
|
||||
// Subsequent item in a container.
|
||||
try self.writer.writeByte(',');
|
||||
try self.indent();
|
||||
},
|
||||
.colon => {
|
||||
try self.writer.writeByte(':');
|
||||
if (self.options.whitespace != .minified) {
|
||||
try self.writer.writeByte(' ');
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
fn valueDone(self: *Stringify) void {
|
||||
self.next_punctuation = .comma;
|
||||
}
|
||||
|
||||
// Only when safety is enabled:
|
||||
fn isObjectKeyExpected(self: *const Stringify) ?bool {
|
||||
switch (safety_checks) {
|
||||
.checked_to_fixed_depth => return self.indent_level > 0 and
|
||||
BitStack.peekWithState(&self.nesting_stack, self.indent_level) == @intFromEnum(IndentationMode.object) and
|
||||
self.next_punctuation != .colon,
|
||||
.assumed_correct => return null,
|
||||
}
|
||||
}
|
||||
fn isComplete(self: *const Stringify) bool {
|
||||
return self.indent_level == 0 and self.next_punctuation == .comma;
|
||||
}
|
||||
|
||||
/// An alternative to calling `write` that formats a value with `std.fmt`.
|
||||
/// This function does the usual punctuation and indentation formatting
|
||||
/// assuming the resulting formatted string represents a single complete value;
|
||||
/// e.g. `"1"`, `"[]"`, `"[1,2]"`, not `"1,2"`.
|
||||
/// This function may be useful for doing your own number formatting.
|
||||
pub fn print(self: *Stringify, comptime fmt: []const u8, args: anytype) Error!void {
|
||||
if (build_mode_has_safety) assert(self.raw_streaming_mode == .none);
|
||||
try self.valueStart();
|
||||
try self.writer.print(fmt, args);
|
||||
self.valueDone();
|
||||
}
|
||||
|
||||
test print {
|
||||
var out_buf: [1024]u8 = undefined;
|
||||
var out: Writer = .fixed(&out_buf);
|
||||
|
||||
var w: Stringify = .{ .writer = &out, .options = .{ .whitespace = .indent_2 } };
|
||||
|
||||
try w.beginObject();
|
||||
try w.objectField("a");
|
||||
try w.print("[ ]", .{});
|
||||
try w.objectField("b");
|
||||
try w.beginArray();
|
||||
try w.print("[{s}] ", .{"[]"});
|
||||
try w.print(" {}", .{12345});
|
||||
try w.endArray();
|
||||
try w.endObject();
|
||||
|
||||
const expected =
|
||||
\\{
|
||||
\\ "a": [ ],
|
||||
\\ "b": [
|
||||
\\ [[]] ,
|
||||
\\ 12345
|
||||
\\ ]
|
||||
\\}
|
||||
;
|
||||
try std.testing.expectEqualStrings(expected, out.buffered());
|
||||
}
|
||||
|
||||
/// An alternative to calling `write` that allows you to write directly to the `.writer` field, e.g. with `.writer.writeAll()`.
|
||||
/// Call `beginWriteRaw()`, then write a complete value (including any quotes if necessary) directly to the `.writer` field,
|
||||
/// then call `endWriteRaw()`.
|
||||
/// This can be useful for streaming very long strings into the output without needing it all buffered in memory.
|
||||
pub fn beginWriteRaw(self: *Stringify) !void {
|
||||
if (build_mode_has_safety) {
|
||||
assert(self.raw_streaming_mode == .none);
|
||||
self.raw_streaming_mode = .value;
|
||||
}
|
||||
try self.valueStart();
|
||||
}
|
||||
|
||||
/// See `beginWriteRaw`.
|
||||
pub fn endWriteRaw(self: *Stringify) void {
|
||||
if (build_mode_has_safety) {
|
||||
assert(self.raw_streaming_mode == .value);
|
||||
self.raw_streaming_mode = .none;
|
||||
}
|
||||
self.valueDone();
|
||||
}
|
||||
|
||||
/// See `Stringify` for when to call this method.
|
||||
/// `key` is the string content of the property name.
|
||||
/// Surrounding quotes will be added and any special characters will be escaped.
|
||||
/// See also `objectFieldRaw`.
|
||||
pub fn objectField(self: *Stringify, key: []const u8) Error!void {
|
||||
if (build_mode_has_safety) assert(self.raw_streaming_mode == .none);
|
||||
try self.objectFieldStart();
|
||||
try encodeJsonString(key, self.options, self.writer);
|
||||
self.next_punctuation = .colon;
|
||||
}
|
||||
/// See `Stringify` for when to call this method.
|
||||
/// `quoted_key` is the complete bytes of the key including quotes and any necessary escape sequences.
|
||||
/// A few assertions are performed on the given value to ensure that the caller of this function understands the API contract.
|
||||
/// See also `objectField`.
|
||||
pub fn objectFieldRaw(self: *Stringify, quoted_key: []const u8) Error!void {
|
||||
if (build_mode_has_safety) assert(self.raw_streaming_mode == .none);
|
||||
assert(quoted_key.len >= 2 and quoted_key[0] == '"' and quoted_key[quoted_key.len - 1] == '"'); // quoted_key should be "quoted".
|
||||
try self.objectFieldStart();
|
||||
try self.writer.writeAll(quoted_key);
|
||||
self.next_punctuation = .colon;
|
||||
}
|
||||
|
||||
/// In the rare case that you need to write very long object field names,
|
||||
/// this is an alternative to `objectField` and `objectFieldRaw` that allows you to write directly to the `.writer` field
|
||||
/// similar to `beginWriteRaw`.
|
||||
/// Call `endObjectFieldRaw()` when you're done.
|
||||
pub fn beginObjectFieldRaw(self: *Stringify) !void {
|
||||
if (build_mode_has_safety) {
|
||||
assert(self.raw_streaming_mode == .none);
|
||||
self.raw_streaming_mode = .objectField;
|
||||
}
|
||||
try self.objectFieldStart();
|
||||
}
|
||||
|
||||
/// See `beginObjectFieldRaw`.
|
||||
pub fn endObjectFieldRaw(self: *Stringify) void {
|
||||
if (build_mode_has_safety) {
|
||||
assert(self.raw_streaming_mode == .objectField);
|
||||
self.raw_streaming_mode = .none;
|
||||
}
|
||||
self.next_punctuation = .colon;
|
||||
}
|
||||
|
||||
/// Renders the given Zig value as JSON.
|
||||
///
|
||||
/// Supported types:
|
||||
/// * Zig `bool` -> JSON `true` or `false`.
|
||||
/// * Zig `?T` -> `null` or the rendering of `T`.
|
||||
/// * Zig `i32`, `u64`, etc. -> JSON number or string.
|
||||
/// * When option `emit_nonportable_numbers_as_strings` is true, if the value is outside the range `+-1<<53` (the precise integer range of f64), it is rendered as a JSON string in base 10. Otherwise, it is rendered as JSON number.
|
||||
/// * Zig floats -> JSON number or string.
|
||||
/// * If the value cannot be precisely represented by an f64, it is rendered as a JSON string. Otherwise, it is rendered as JSON number.
|
||||
/// * TODO: Float rendering will likely change in the future, e.g. to remove the unnecessary "e+00".
|
||||
/// * Zig `[]const u8`, `[]u8`, `*[N]u8`, `@Vector(N, u8)`, and similar -> JSON string.
|
||||
/// * See `Options.emit_strings_as_arrays`.
|
||||
/// * If the content is not valid UTF-8, rendered as an array of numbers instead.
|
||||
/// * Zig `[]T`, `[N]T`, `*[N]T`, `@Vector(N, T)`, and similar -> JSON array of the rendering of each item.
|
||||
/// * Zig tuple -> JSON array of the rendering of each item.
|
||||
/// * Zig `struct` -> JSON object with each field in declaration order.
|
||||
/// * If the struct declares a method `pub fn jsonStringify(self: *@This(), jw: anytype) !void`, it is called to do the serialization instead of the default behavior. The given `jw` is a pointer to this `Stringify`. See `std.json.Value` for an example.
|
||||
/// * See `Options.emit_null_optional_fields`.
|
||||
/// * Zig `union(enum)` -> JSON object with one field named for the active tag and a value representing the payload.
|
||||
/// * If the payload is `void`, then the emitted value is `{}`.
|
||||
/// * If the union declares a method `pub fn jsonStringify(self: *@This(), jw: anytype) !void`, it is called to do the serialization instead of the default behavior. The given `jw` is a pointer to this `Stringify`.
|
||||
/// * Zig `enum` -> JSON string naming the active tag.
|
||||
/// * If the enum declares a method `pub fn jsonStringify(self: *@This(), jw: anytype) !void`, it is called to do the serialization instead of the default behavior. The given `jw` is a pointer to this `Stringify`.
|
||||
/// * If the enum is non-exhaustive, unnamed values are rendered as integers.
|
||||
/// * Zig untyped enum literal -> JSON string naming the active tag.
|
||||
/// * Zig error -> JSON string naming the error.
|
||||
/// * Zig `*T` -> the rendering of `T`. Note there is no guard against circular-reference infinite recursion.
|
||||
///
|
||||
/// See also alternative functions `print` and `beginWriteRaw`.
|
||||
/// For writing object field names, use `objectField` instead.
|
||||
pub fn write(self: *Stringify, v: anytype) Error!void {
|
||||
if (build_mode_has_safety) assert(self.raw_streaming_mode == .none);
|
||||
const T = @TypeOf(v);
|
||||
switch (@typeInfo(T)) {
|
||||
.int => {
|
||||
try self.valueStart();
|
||||
if (self.options.emit_nonportable_numbers_as_strings and
|
||||
(v <= -(1 << 53) or v >= (1 << 53)))
|
||||
{
|
||||
try self.writer.print("\"{}\"", .{v});
|
||||
} else {
|
||||
try self.writer.print("{}", .{v});
|
||||
}
|
||||
self.valueDone();
|
||||
return;
|
||||
},
|
||||
.comptime_int => {
|
||||
return self.write(@as(std.math.IntFittingRange(v, v), v));
|
||||
},
|
||||
.float, .comptime_float => {
|
||||
if (@as(f64, @floatCast(v)) == v) {
|
||||
try self.valueStart();
|
||||
try self.writer.print("{}", .{@as(f64, @floatCast(v))});
|
||||
self.valueDone();
|
||||
return;
|
||||
}
|
||||
try self.valueStart();
|
||||
try self.writer.print("\"{}\"", .{v});
|
||||
self.valueDone();
|
||||
return;
|
||||
},
|
||||
|
||||
.bool => {
|
||||
try self.valueStart();
|
||||
try self.writer.writeAll(if (v) "true" else "false");
|
||||
self.valueDone();
|
||||
return;
|
||||
},
|
||||
.null => {
|
||||
try self.valueStart();
|
||||
try self.writer.writeAll("null");
|
||||
self.valueDone();
|
||||
return;
|
||||
},
|
||||
.optional => {
|
||||
if (v) |payload| {
|
||||
return try self.write(payload);
|
||||
} else {
|
||||
return try self.write(null);
|
||||
}
|
||||
},
|
||||
.@"enum" => |enum_info| {
|
||||
if (std.meta.hasFn(T, "jsonStringify")) {
|
||||
return v.jsonStringify(self);
|
||||
}
|
||||
|
||||
if (!enum_info.is_exhaustive) {
|
||||
inline for (enum_info.fields) |field| {
|
||||
if (v == @field(T, field.name)) {
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
return self.write(@intFromEnum(v));
|
||||
}
|
||||
}
|
||||
|
||||
return self.stringValue(@tagName(v));
|
||||
},
|
||||
.enum_literal => {
|
||||
return self.stringValue(@tagName(v));
|
||||
},
|
||||
.@"union" => {
|
||||
if (std.meta.hasFn(T, "jsonStringify")) {
|
||||
return v.jsonStringify(self);
|
||||
}
|
||||
|
||||
const info = @typeInfo(T).@"union";
|
||||
if (info.tag_type) |UnionTagType| {
|
||||
try self.beginObject();
|
||||
inline for (info.fields) |u_field| {
|
||||
if (v == @field(UnionTagType, u_field.name)) {
|
||||
try self.objectField(u_field.name);
|
||||
if (u_field.type == void) {
|
||||
// void v is {}
|
||||
try self.beginObject();
|
||||
try self.endObject();
|
||||
} else {
|
||||
try self.write(@field(v, u_field.name));
|
||||
}
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
unreachable; // No active tag?
|
||||
}
|
||||
try self.endObject();
|
||||
return;
|
||||
} else {
|
||||
@compileError("Unable to stringify untagged union '" ++ @typeName(T) ++ "'");
|
||||
}
|
||||
},
|
||||
.@"struct" => |S| {
|
||||
if (std.meta.hasFn(T, "jsonStringify")) {
|
||||
return v.jsonStringify(self);
|
||||
}
|
||||
|
||||
if (S.is_tuple) {
|
||||
try self.beginArray();
|
||||
} else {
|
||||
try self.beginObject();
|
||||
}
|
||||
inline for (S.fields) |Field| {
|
||||
// don't include void fields
|
||||
if (Field.type == void) continue;
|
||||
|
||||
var emit_field = true;
|
||||
|
||||
// don't include optional fields that are null when emit_null_optional_fields is set to false
|
||||
if (@typeInfo(Field.type) == .optional) {
|
||||
if (self.options.emit_null_optional_fields == false) {
|
||||
if (@field(v, Field.name) == null) {
|
||||
emit_field = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (emit_field) {
|
||||
if (!S.is_tuple) {
|
||||
try self.objectField(Field.name);
|
||||
}
|
||||
try self.write(@field(v, Field.name));
|
||||
}
|
||||
}
|
||||
if (S.is_tuple) {
|
||||
try self.endArray();
|
||||
} else {
|
||||
try self.endObject();
|
||||
}
|
||||
return;
|
||||
},
|
||||
.error_set => return self.stringValue(@errorName(v)),
|
||||
.pointer => |ptr_info| switch (ptr_info.size) {
|
||||
.one => switch (@typeInfo(ptr_info.child)) {
|
||||
.array => {
|
||||
// Coerce `*[N]T` to `[]const T`.
|
||||
const Slice = []const std.meta.Elem(ptr_info.child);
|
||||
return self.write(@as(Slice, v));
|
||||
},
|
||||
else => {
|
||||
return self.write(v.*);
|
||||
},
|
||||
},
|
||||
.many, .slice => {
|
||||
if (ptr_info.size == .many and ptr_info.sentinel() == null)
|
||||
@compileError("unable to stringify type '" ++ @typeName(T) ++ "' without sentinel");
|
||||
const slice = if (ptr_info.size == .many) std.mem.span(v) else v;
|
||||
|
||||
if (ptr_info.child == u8) {
|
||||
// This is a []const u8, or some similar Zig string.
|
||||
if (!self.options.emit_strings_as_arrays and std.unicode.utf8ValidateSlice(slice)) {
|
||||
return self.stringValue(slice);
|
||||
}
|
||||
}
|
||||
|
||||
try self.beginArray();
|
||||
for (slice) |x| {
|
||||
try self.write(x);
|
||||
}
|
||||
try self.endArray();
|
||||
return;
|
||||
},
|
||||
else => @compileError("Unable to stringify type '" ++ @typeName(T) ++ "'"),
|
||||
},
|
||||
.array => {
|
||||
// Coerce `[N]T` to `*const [N]T` (and then to `[]const T`).
|
||||
return self.write(&v);
|
||||
},
|
||||
.vector => |info| {
|
||||
const array: [info.len]info.child = v;
|
||||
return self.write(&array);
|
||||
},
|
||||
else => @compileError("Unable to stringify type '" ++ @typeName(T) ++ "'"),
|
||||
}
|
||||
unreachable;
|
||||
}
|
||||
|
||||
fn stringValue(self: *Stringify, s: []const u8) !void {
|
||||
try self.valueStart();
|
||||
try encodeJsonString(s, self.options, self.writer);
|
||||
self.valueDone();
|
||||
}
|
||||
|
||||
pub const Options = struct {
|
||||
/// Controls the whitespace emitted.
|
||||
/// The default `.minified` is a compact encoding with no whitespace between tokens.
|
||||
/// Any setting other than `.minified` will use newlines, indentation, and a space after each ':'.
|
||||
/// `.indent_1` means 1 space for each indentation level, `.indent_2` means 2 spaces, etc.
|
||||
/// `.indent_tab` uses a tab for each indentation level.
|
||||
whitespace: enum {
|
||||
minified,
|
||||
indent_1,
|
||||
indent_2,
|
||||
indent_3,
|
||||
indent_4,
|
||||
indent_8,
|
||||
indent_tab,
|
||||
} = .minified,
|
||||
|
||||
/// Should optional fields with null value be written?
|
||||
emit_null_optional_fields: bool = true,
|
||||
|
||||
/// Arrays/slices of u8 are typically encoded as JSON strings.
|
||||
/// This option emits them as arrays of numbers instead.
|
||||
/// Does not affect calls to `objectField*()`.
|
||||
emit_strings_as_arrays: bool = false,
|
||||
|
||||
/// Should unicode characters be escaped in strings?
|
||||
escape_unicode: bool = false,
|
||||
|
||||
/// When true, renders numbers outside the range `+-1<<53` (the precise integer range of f64) as JSON strings in base 10.
|
||||
emit_nonportable_numbers_as_strings: bool = false,
|
||||
};
|
||||
|
||||
/// Writes the given value to the `Writer` writer.
|
||||
/// See `Stringify` for how the given value is serialized into JSON.
|
||||
/// The maximum nesting depth of the output JSON document is 256.
|
||||
pub fn value(v: anytype, options: Options, writer: *Writer) Error!void {
|
||||
var s: Stringify = .{ .writer = writer, .options = options };
|
||||
try s.write(v);
|
||||
}
|
||||
|
||||
test value {
|
||||
var out: std.io.Writer.Allocating = .init(std.testing.allocator);
|
||||
const writer = &out.writer;
|
||||
defer out.deinit();
|
||||
|
||||
const T = struct { a: i32, b: []const u8 };
|
||||
try value(T{ .a = 123, .b = "xy" }, .{}, writer);
|
||||
try std.testing.expectEqualSlices(u8, "{\"a\":123,\"b\":\"xy\"}", out.getWritten());
|
||||
|
||||
try testStringify("9999999999999999", 9999999999999999, .{});
|
||||
try testStringify("\"9999999999999999\"", 9999999999999999, .{ .emit_nonportable_numbers_as_strings = true });
|
||||
|
||||
try testStringify("[1,1]", @as(@Vector(2, u32), @splat(1)), .{});
|
||||
try testStringify("\"AA\"", @as(@Vector(2, u8), @splat('A')), .{});
|
||||
try testStringify("[65,65]", @as(@Vector(2, u8), @splat('A')), .{ .emit_strings_as_arrays = true });
|
||||
|
||||
// void field
|
||||
try testStringify("{\"foo\":42}", struct {
|
||||
foo: u32,
|
||||
bar: void = {},
|
||||
}{ .foo = 42 }, .{});
|
||||
|
||||
const Tuple = struct { []const u8, usize };
|
||||
try testStringify("[\"foo\",42]", Tuple{ "foo", 42 }, .{});
|
||||
|
||||
comptime {
|
||||
testStringify("false", false, .{}) catch unreachable;
|
||||
const MyStruct = struct { foo: u32 };
|
||||
testStringify("[{\"foo\":42},{\"foo\":100},{\"foo\":1000}]", [_]MyStruct{
|
||||
MyStruct{ .foo = 42 },
|
||||
MyStruct{ .foo = 100 },
|
||||
MyStruct{ .foo = 1000 },
|
||||
}, .{}) catch unreachable;
|
||||
}
|
||||
}
|
||||
|
||||
/// Calls `value` and stores the result in dynamically allocated memory instead
|
||||
/// of taking a writer.
|
||||
///
|
||||
/// Caller owns returned memory.
|
||||
pub fn valueAlloc(gpa: Allocator, v: anytype, options: Options) error{OutOfMemory}![]u8 {
|
||||
var aw: std.io.Writer.Allocating = .init(gpa);
|
||||
defer aw.deinit();
|
||||
value(v, options, &aw.writer) catch return error.OutOfMemory;
|
||||
return aw.toOwnedSlice();
|
||||
}
|
||||
|
||||
test valueAlloc {
|
||||
const allocator = std.testing.allocator;
|
||||
const expected =
|
||||
\\{"foo":"bar","answer":42,"my_friend":"sammy"}
|
||||
;
|
||||
const actual = try valueAlloc(allocator, .{ .foo = "bar", .answer = 42, .my_friend = "sammy" }, .{});
|
||||
defer allocator.free(actual);
|
||||
|
||||
try std.testing.expectEqualStrings(expected, actual);
|
||||
}
|
||||
|
||||
fn outputUnicodeEscape(codepoint: u21, w: *Writer) Error!void {
|
||||
if (codepoint <= 0xFFFF) {
|
||||
// If the character is in the Basic Multilingual Plane (U+0000 through U+FFFF),
|
||||
// then it may be represented as a six-character sequence: a reverse solidus, followed
|
||||
// by the lowercase letter u, followed by four hexadecimal digits that encode the character's code point.
|
||||
try w.writeAll("\\u");
|
||||
try w.printInt(codepoint, 16, .lower, .{ .width = 4, .fill = '0' });
|
||||
} else {
|
||||
assert(codepoint <= 0x10FFFF);
|
||||
// To escape an extended character that is not in the Basic Multilingual Plane,
|
||||
// the character is represented as a 12-character sequence, encoding the UTF-16 surrogate pair.
|
||||
const high = @as(u16, @intCast((codepoint - 0x10000) >> 10)) + 0xD800;
|
||||
const low = @as(u16, @intCast(codepoint & 0x3FF)) + 0xDC00;
|
||||
try w.writeAll("\\u");
|
||||
try w.printInt(high, 16, .lower, .{ .width = 4, .fill = '0' });
|
||||
try w.writeAll("\\u");
|
||||
try w.printInt(low, 16, .lower, .{ .width = 4, .fill = '0' });
|
||||
}
|
||||
}
|
||||
|
||||
fn outputSpecialEscape(c: u8, writer: *Writer) Error!void {
|
||||
switch (c) {
|
||||
'\\' => try writer.writeAll("\\\\"),
|
||||
'\"' => try writer.writeAll("\\\""),
|
||||
0x08 => try writer.writeAll("\\b"),
|
||||
0x0C => try writer.writeAll("\\f"),
|
||||
'\n' => try writer.writeAll("\\n"),
|
||||
'\r' => try writer.writeAll("\\r"),
|
||||
'\t' => try writer.writeAll("\\t"),
|
||||
else => try outputUnicodeEscape(c, writer),
|
||||
}
|
||||
}
|
||||
|
||||
/// Write `string` to `writer` as a JSON encoded string.
|
||||
pub fn encodeJsonString(string: []const u8, options: Options, writer: *Writer) Error!void {
|
||||
try writer.writeByte('\"');
|
||||
try encodeJsonStringChars(string, options, writer);
|
||||
try writer.writeByte('\"');
|
||||
}
|
||||
|
||||
/// Write `chars` to `writer` as JSON encoded string characters.
|
||||
pub fn encodeJsonStringChars(chars: []const u8, options: Options, writer: *Writer) Error!void {
|
||||
var write_cursor: usize = 0;
|
||||
var i: usize = 0;
|
||||
if (options.escape_unicode) {
|
||||
while (i < chars.len) : (i += 1) {
|
||||
switch (chars[i]) {
|
||||
// normal ascii character
|
||||
0x20...0x21, 0x23...0x5B, 0x5D...0x7E => {},
|
||||
0x00...0x1F, '\\', '\"' => {
|
||||
// Always must escape these.
|
||||
try writer.writeAll(chars[write_cursor..i]);
|
||||
try outputSpecialEscape(chars[i], writer);
|
||||
write_cursor = i + 1;
|
||||
},
|
||||
0x7F...0xFF => {
|
||||
try writer.writeAll(chars[write_cursor..i]);
|
||||
const ulen = std.unicode.utf8ByteSequenceLength(chars[i]) catch unreachable;
|
||||
const codepoint = std.unicode.utf8Decode(chars[i..][0..ulen]) catch unreachable;
|
||||
try outputUnicodeEscape(codepoint, writer);
|
||||
i += ulen - 1;
|
||||
write_cursor = i + 1;
|
||||
},
|
||||
}
|
||||
}
|
||||
} else {
|
||||
while (i < chars.len) : (i += 1) {
|
||||
switch (chars[i]) {
|
||||
// normal bytes
|
||||
0x20...0x21, 0x23...0x5B, 0x5D...0xFF => {},
|
||||
0x00...0x1F, '\\', '\"' => {
|
||||
// Always must escape these.
|
||||
try writer.writeAll(chars[write_cursor..i]);
|
||||
try outputSpecialEscape(chars[i], writer);
|
||||
write_cursor = i + 1;
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
try writer.writeAll(chars[write_cursor..chars.len]);
|
||||
}
|
||||
|
||||
test "json write stream" {
|
||||
var out_buf: [1024]u8 = undefined;
|
||||
var out: Writer = .fixed(&out_buf);
|
||||
var w: Stringify = .{ .writer = &out, .options = .{ .whitespace = .indent_2 } };
|
||||
try testBasicWriteStream(&w);
|
||||
}
|
||||
|
||||
fn testBasicWriteStream(w: *Stringify) !void {
|
||||
w.writer.end = 0;
|
||||
|
||||
try w.beginObject();
|
||||
|
||||
try w.objectField("object");
|
||||
var arena_allocator = std.heap.ArenaAllocator.init(std.testing.allocator);
|
||||
defer arena_allocator.deinit();
|
||||
try w.write(try getJsonObject(arena_allocator.allocator()));
|
||||
|
||||
try w.objectFieldRaw("\"string\"");
|
||||
try w.write("This is a string");
|
||||
|
||||
try w.objectField("array");
|
||||
try w.beginArray();
|
||||
try w.write("Another string");
|
||||
try w.write(@as(i32, 1));
|
||||
try w.write(@as(f32, 3.5));
|
||||
try w.endArray();
|
||||
|
||||
try w.objectField("int");
|
||||
try w.write(@as(i32, 10));
|
||||
|
||||
try w.objectField("float");
|
||||
try w.write(@as(f32, 3.5));
|
||||
|
||||
try w.endObject();
|
||||
|
||||
const expected =
|
||||
\\{
|
||||
\\ "object": {
|
||||
\\ "one": 1,
|
||||
\\ "two": 2
|
||||
\\ },
|
||||
\\ "string": "This is a string",
|
||||
\\ "array": [
|
||||
\\ "Another string",
|
||||
\\ 1,
|
||||
\\ 3.5
|
||||
\\ ],
|
||||
\\ "int": 10,
|
||||
\\ "float": 3.5
|
||||
\\}
|
||||
;
|
||||
try std.testing.expectEqualStrings(expected, w.writer.buffered());
|
||||
}
|
||||
|
||||
fn getJsonObject(allocator: std.mem.Allocator) !std.json.Value {
|
||||
var v: std.json.Value = .{ .object = std.json.ObjectMap.init(allocator) };
|
||||
try v.object.put("one", std.json.Value{ .integer = @as(i64, @intCast(1)) });
|
||||
try v.object.put("two", std.json.Value{ .float = 2.0 });
|
||||
return v;
|
||||
}
|
||||
|
||||
test "stringify null optional fields" {
|
||||
const MyStruct = struct {
|
||||
optional: ?[]const u8 = null,
|
||||
required: []const u8 = "something",
|
||||
another_optional: ?[]const u8 = null,
|
||||
another_required: []const u8 = "something else",
|
||||
};
|
||||
try testStringify(
|
||||
\\{"optional":null,"required":"something","another_optional":null,"another_required":"something else"}
|
||||
,
|
||||
MyStruct{},
|
||||
.{},
|
||||
);
|
||||
try testStringify(
|
||||
\\{"required":"something","another_required":"something else"}
|
||||
,
|
||||
MyStruct{},
|
||||
.{ .emit_null_optional_fields = false },
|
||||
);
|
||||
}
|
||||
|
||||
test "stringify basic types" {
|
||||
try testStringify("false", false, .{});
|
||||
try testStringify("true", true, .{});
|
||||
try testStringify("null", @as(?u8, null), .{});
|
||||
try testStringify("null", @as(?*u32, null), .{});
|
||||
try testStringify("42", 42, .{});
|
||||
try testStringify("42", 42.0, .{});
|
||||
try testStringify("42", @as(u8, 42), .{});
|
||||
try testStringify("42", @as(u128, 42), .{});
|
||||
try testStringify("9999999999999999", 9999999999999999, .{});
|
||||
try testStringify("42", @as(f32, 42), .{});
|
||||
try testStringify("42", @as(f64, 42), .{});
|
||||
try testStringify("\"ItBroke\"", @as(anyerror, error.ItBroke), .{});
|
||||
try testStringify("\"ItBroke\"", error.ItBroke, .{});
|
||||
}
|
||||
|
||||
test "stringify string" {
|
||||
try testStringify("\"hello\"", "hello", .{});
|
||||
try testStringify("\"with\\nescapes\\r\"", "with\nescapes\r", .{});
|
||||
try testStringify("\"with\\nescapes\\r\"", "with\nescapes\r", .{ .escape_unicode = true });
|
||||
try testStringify("\"with unicode\\u0001\"", "with unicode\u{1}", .{});
|
||||
try testStringify("\"with unicode\\u0001\"", "with unicode\u{1}", .{ .escape_unicode = true });
|
||||
try testStringify("\"with unicode\u{80}\"", "with unicode\u{80}", .{});
|
||||
try testStringify("\"with unicode\\u0080\"", "with unicode\u{80}", .{ .escape_unicode = true });
|
||||
try testStringify("\"with unicode\u{FF}\"", "with unicode\u{FF}", .{});
|
||||
try testStringify("\"with unicode\\u00ff\"", "with unicode\u{FF}", .{ .escape_unicode = true });
|
||||
try testStringify("\"with unicode\u{100}\"", "with unicode\u{100}", .{});
|
||||
try testStringify("\"with unicode\\u0100\"", "with unicode\u{100}", .{ .escape_unicode = true });
|
||||
try testStringify("\"with unicode\u{800}\"", "with unicode\u{800}", .{});
|
||||
try testStringify("\"with unicode\\u0800\"", "with unicode\u{800}", .{ .escape_unicode = true });
|
||||
try testStringify("\"with unicode\u{8000}\"", "with unicode\u{8000}", .{});
|
||||
try testStringify("\"with unicode\\u8000\"", "with unicode\u{8000}", .{ .escape_unicode = true });
|
||||
try testStringify("\"with unicode\u{D799}\"", "with unicode\u{D799}", .{});
|
||||
try testStringify("\"with unicode\\ud799\"", "with unicode\u{D799}", .{ .escape_unicode = true });
|
||||
try testStringify("\"with unicode\u{10000}\"", "with unicode\u{10000}", .{});
|
||||
try testStringify("\"with unicode\\ud800\\udc00\"", "with unicode\u{10000}", .{ .escape_unicode = true });
|
||||
try testStringify("\"with unicode\u{10FFFF}\"", "with unicode\u{10FFFF}", .{});
|
||||
try testStringify("\"with unicode\\udbff\\udfff\"", "with unicode\u{10FFFF}", .{ .escape_unicode = true });
|
||||
}
|
||||
|
||||
test "stringify many-item sentinel-terminated string" {
|
||||
try testStringify("\"hello\"", @as([*:0]const u8, "hello"), .{});
|
||||
try testStringify("\"with\\nescapes\\r\"", @as([*:0]const u8, "with\nescapes\r"), .{ .escape_unicode = true });
|
||||
try testStringify("\"with unicode\\u0001\"", @as([*:0]const u8, "with unicode\u{1}"), .{ .escape_unicode = true });
|
||||
}
|
||||
|
||||
test "stringify enums" {
|
||||
const E = enum {
|
||||
foo,
|
||||
bar,
|
||||
};
|
||||
try testStringify("\"foo\"", E.foo, .{});
|
||||
try testStringify("\"bar\"", E.bar, .{});
|
||||
}
|
||||
|
||||
test "stringify non-exhaustive enum" {
|
||||
const E = enum(u8) {
|
||||
foo = 0,
|
||||
_,
|
||||
};
|
||||
try testStringify("\"foo\"", E.foo, .{});
|
||||
try testStringify("1", @as(E, @enumFromInt(1)), .{});
|
||||
}
|
||||
|
||||
test "stringify enum literals" {
|
||||
try testStringify("\"foo\"", .foo, .{});
|
||||
try testStringify("\"bar\"", .bar, .{});
|
||||
}
|
||||
|
||||
test "stringify tagged unions" {
|
||||
const T = union(enum) {
|
||||
nothing,
|
||||
foo: u32,
|
||||
bar: bool,
|
||||
};
|
||||
try testStringify("{\"nothing\":{}}", T{ .nothing = {} }, .{});
|
||||
try testStringify("{\"foo\":42}", T{ .foo = 42 }, .{});
|
||||
try testStringify("{\"bar\":true}", T{ .bar = true }, .{});
|
||||
}
|
||||
|
||||
test "stringify struct" {
|
||||
try testStringify("{\"foo\":42}", struct {
|
||||
foo: u32,
|
||||
}{ .foo = 42 }, .{});
|
||||
}
|
||||
|
||||
test "emit_strings_as_arrays" {
|
||||
// Should only affect string values, not object keys.
|
||||
try testStringify("{\"foo\":\"bar\"}", .{ .foo = "bar" }, .{});
|
||||
try testStringify("{\"foo\":[98,97,114]}", .{ .foo = "bar" }, .{ .emit_strings_as_arrays = true });
|
||||
// Should *not* affect these types:
|
||||
try testStringify("\"foo\"", @as(enum { foo, bar }, .foo), .{ .emit_strings_as_arrays = true });
|
||||
try testStringify("\"ItBroke\"", error.ItBroke, .{ .emit_strings_as_arrays = true });
|
||||
// Should work on these:
|
||||
try testStringify("\"bar\"", @Vector(3, u8){ 'b', 'a', 'r' }, .{});
|
||||
try testStringify("[98,97,114]", @Vector(3, u8){ 'b', 'a', 'r' }, .{ .emit_strings_as_arrays = true });
|
||||
try testStringify("\"bar\"", [3]u8{ 'b', 'a', 'r' }, .{});
|
||||
try testStringify("[98,97,114]", [3]u8{ 'b', 'a', 'r' }, .{ .emit_strings_as_arrays = true });
|
||||
}
|
||||
|
||||
test "stringify struct with indentation" {
|
||||
try testStringify(
|
||||
\\{
|
||||
\\ "foo": 42,
|
||||
\\ "bar": [
|
||||
\\ 1,
|
||||
\\ 2,
|
||||
\\ 3
|
||||
\\ ]
|
||||
\\}
|
||||
,
|
||||
struct {
|
||||
foo: u32,
|
||||
bar: [3]u32,
|
||||
}{
|
||||
.foo = 42,
|
||||
.bar = .{ 1, 2, 3 },
|
||||
},
|
||||
.{ .whitespace = .indent_4 },
|
||||
);
|
||||
try testStringify(
|
||||
"{\n\t\"foo\": 42,\n\t\"bar\": [\n\t\t1,\n\t\t2,\n\t\t3\n\t]\n}",
|
||||
struct {
|
||||
foo: u32,
|
||||
bar: [3]u32,
|
||||
}{
|
||||
.foo = 42,
|
||||
.bar = .{ 1, 2, 3 },
|
||||
},
|
||||
.{ .whitespace = .indent_tab },
|
||||
);
|
||||
try testStringify(
|
||||
\\{"foo":42,"bar":[1,2,3]}
|
||||
,
|
||||
struct {
|
||||
foo: u32,
|
||||
bar: [3]u32,
|
||||
}{
|
||||
.foo = 42,
|
||||
.bar = .{ 1, 2, 3 },
|
||||
},
|
||||
.{ .whitespace = .minified },
|
||||
);
|
||||
}
|
||||
|
||||
test "stringify array of structs" {
|
||||
const MyStruct = struct {
|
||||
foo: u32,
|
||||
};
|
||||
try testStringify("[{\"foo\":42},{\"foo\":100},{\"foo\":1000}]", [_]MyStruct{
|
||||
MyStruct{ .foo = 42 },
|
||||
MyStruct{ .foo = 100 },
|
||||
MyStruct{ .foo = 1000 },
|
||||
}, .{});
|
||||
}
|
||||
|
||||
test "stringify struct with custom stringifier" {
|
||||
try testStringify("[\"something special\",42]", struct {
|
||||
foo: u32,
|
||||
const Self = @This();
|
||||
pub fn jsonStringify(v: @This(), jws: anytype) !void {
|
||||
_ = v;
|
||||
try jws.beginArray();
|
||||
try jws.write("something special");
|
||||
try jws.write(42);
|
||||
try jws.endArray();
|
||||
}
|
||||
}{ .foo = 42 }, .{});
|
||||
}
|
||||
|
||||
fn testStringify(expected: []const u8, v: anytype, options: Options) !void {
|
||||
var buffer: [4096]u8 = undefined;
|
||||
var w: Writer = .fixed(&buffer);
|
||||
try value(v, options, &w);
|
||||
try std.testing.expectEqualStrings(expected, w.buffered());
|
||||
}
|
||||
|
||||
test "raw streaming" {
|
||||
var out_buf: [1024]u8 = undefined;
|
||||
var out: Writer = .fixed(&out_buf);
|
||||
|
||||
var w: Stringify = .{ .writer = &out, .options = .{ .whitespace = .indent_2 } };
|
||||
try w.beginObject();
|
||||
try w.beginObjectFieldRaw();
|
||||
try w.writer.writeAll("\"long");
|
||||
try w.writer.writeAll(" key\"");
|
||||
w.endObjectFieldRaw();
|
||||
try w.beginWriteRaw();
|
||||
try w.writer.writeAll("\"long");
|
||||
try w.writer.writeAll(" value\"");
|
||||
w.endWriteRaw();
|
||||
try w.endObject();
|
||||
|
||||
const expected =
|
||||
\\{
|
||||
\\ "long key": "long value"
|
||||
\\}
|
||||
;
|
||||
try std.testing.expectEqualStrings(expected, w.writer.buffered());
|
||||
}
|
||||
@@ -4,17 +4,12 @@ const ArenaAllocator = std.heap.ArenaAllocator;
|
||||
const ArrayList = std.ArrayList;
|
||||
const StringArrayHashMap = std.StringArrayHashMap;
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
const StringifyOptions = @import("./stringify.zig").StringifyOptions;
|
||||
const stringify = @import("./stringify.zig").stringify;
|
||||
const json = std.json;
|
||||
|
||||
const ParseOptions = @import("./static.zig").ParseOptions;
|
||||
const ParseError = @import("./static.zig").ParseError;
|
||||
|
||||
const JsonScanner = @import("./scanner.zig").Scanner;
|
||||
const AllocWhen = @import("./scanner.zig").AllocWhen;
|
||||
const Token = @import("./scanner.zig").Token;
|
||||
const isNumberFormattedLikeAnInteger = @import("./scanner.zig").isNumberFormattedLikeAnInteger;
|
||||
const isNumberFormattedLikeAnInteger = @import("Scanner.zig").isNumberFormattedLikeAnInteger;
|
||||
|
||||
pub const ObjectMap = StringArrayHashMap(Value);
|
||||
pub const Array = ArrayList(Value);
|
||||
@@ -52,12 +47,11 @@ pub const Value = union(enum) {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn dump(self: Value) void {
|
||||
std.debug.lockStdErr();
|
||||
defer std.debug.unlockStdErr();
|
||||
pub fn dump(v: Value) void {
|
||||
const w = std.debug.lockStderrWriter(&.{});
|
||||
defer std.debug.unlockStderrWriter();
|
||||
|
||||
const stderr = std.fs.File.stderr().deprecatedWriter();
|
||||
stringify(self, .{}, stderr) catch return;
|
||||
json.Stringify.value(v, .{}, w) catch return;
|
||||
}
|
||||
|
||||
pub fn jsonStringify(value: @This(), jws: anytype) !void {
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
const std = @import("std");
|
||||
const json = std.json;
|
||||
const mem = std.mem;
|
||||
const testing = std.testing;
|
||||
const ArenaAllocator = std.heap.ArenaAllocator;
|
||||
const Allocator = std.mem.Allocator;
|
||||
const Writer = std.io.Writer;
|
||||
|
||||
const ObjectMap = @import("dynamic.zig").ObjectMap;
|
||||
const Array = @import("dynamic.zig").Array;
|
||||
@@ -14,8 +16,7 @@ const parseFromTokenSource = @import("static.zig").parseFromTokenSource;
|
||||
const parseFromValueLeaky = @import("static.zig").parseFromValueLeaky;
|
||||
const ParseOptions = @import("static.zig").ParseOptions;
|
||||
|
||||
const jsonReader = @import("scanner.zig").reader;
|
||||
const JsonReader = @import("scanner.zig").Reader;
|
||||
const Scanner = @import("Scanner.zig");
|
||||
|
||||
test "json.parser.dynamic" {
|
||||
const s =
|
||||
@@ -70,14 +71,10 @@ test "json.parser.dynamic" {
|
||||
try testing.expect(mem.eql(u8, large_int.number_string, "18446744073709551615"));
|
||||
}
|
||||
|
||||
const writeStream = @import("./stringify.zig").writeStream;
|
||||
test "write json then parse it" {
|
||||
var out_buffer: [1000]u8 = undefined;
|
||||
|
||||
var fixed_buffer_stream = std.io.fixedBufferStream(&out_buffer);
|
||||
const out_stream = fixed_buffer_stream.writer();
|
||||
var jw = writeStream(out_stream, .{});
|
||||
defer jw.deinit();
|
||||
var fixed_writer: Writer = .fixed(&out_buffer);
|
||||
var jw: json.Stringify = .{ .writer = &fixed_writer, .options = .{} };
|
||||
|
||||
try jw.beginObject();
|
||||
|
||||
@@ -101,8 +98,8 @@ test "write json then parse it" {
|
||||
|
||||
try jw.endObject();
|
||||
|
||||
fixed_buffer_stream = std.io.fixedBufferStream(fixed_buffer_stream.getWritten());
|
||||
var json_reader = jsonReader(testing.allocator, fixed_buffer_stream.reader());
|
||||
var fbs: std.Io.Reader = .fixed(fixed_writer.buffered());
|
||||
var json_reader: Scanner.Reader = .init(testing.allocator, &fbs);
|
||||
defer json_reader.deinit();
|
||||
var parsed = try parseFromTokenSource(Value, testing.allocator, &json_reader, .{});
|
||||
defer parsed.deinit();
|
||||
@@ -242,10 +239,9 @@ test "Value.jsonStringify" {
|
||||
.{ .object = obj },
|
||||
};
|
||||
var buffer: [0x1000]u8 = undefined;
|
||||
var fbs = std.io.fixedBufferStream(&buffer);
|
||||
var fixed_writer: Writer = .fixed(&buffer);
|
||||
|
||||
var jw = writeStream(fbs.writer(), .{ .whitespace = .indent_1 });
|
||||
defer jw.deinit();
|
||||
var jw: json.Stringify = .{ .writer = &fixed_writer, .options = .{ .whitespace = .indent_1 } };
|
||||
try jw.write(array);
|
||||
|
||||
const expected =
|
||||
@@ -266,7 +262,7 @@ test "Value.jsonStringify" {
|
||||
\\ }
|
||||
\\]
|
||||
;
|
||||
try testing.expectEqualStrings(expected, fbs.getWritten());
|
||||
try testing.expectEqualStrings(expected, fixed_writer.buffered());
|
||||
}
|
||||
|
||||
test "parseFromValue(std.json.Value,...)" {
|
||||
@@ -334,8 +330,8 @@ test "polymorphic parsing" {
|
||||
test "long object value" {
|
||||
const value = "01234567890123456789";
|
||||
const doc = "{\"key\":\"" ++ value ++ "\"}";
|
||||
var fbs = std.io.fixedBufferStream(doc);
|
||||
var reader = smallBufferJsonReader(testing.allocator, fbs.reader());
|
||||
var fbs: std.Io.Reader = .fixed(doc);
|
||||
var reader = smallBufferJsonReader(testing.allocator, &fbs);
|
||||
defer reader.deinit();
|
||||
var parsed = try parseFromTokenSource(Value, testing.allocator, &reader, .{});
|
||||
defer parsed.deinit();
|
||||
@@ -367,8 +363,8 @@ test "many object keys" {
|
||||
\\ "k5": "v5"
|
||||
\\}
|
||||
;
|
||||
var fbs = std.io.fixedBufferStream(doc);
|
||||
var reader = smallBufferJsonReader(testing.allocator, fbs.reader());
|
||||
var fbs: std.Io.Reader = .fixed(doc);
|
||||
var reader = smallBufferJsonReader(testing.allocator, &fbs);
|
||||
defer reader.deinit();
|
||||
var parsed = try parseFromTokenSource(Value, testing.allocator, &reader, .{});
|
||||
defer parsed.deinit();
|
||||
@@ -382,8 +378,8 @@ test "many object keys" {
|
||||
|
||||
test "negative zero" {
|
||||
const doc = "-0";
|
||||
var fbs = std.io.fixedBufferStream(doc);
|
||||
var reader = smallBufferJsonReader(testing.allocator, fbs.reader());
|
||||
var fbs: std.Io.Reader = .fixed(doc);
|
||||
var reader = smallBufferJsonReader(testing.allocator, &fbs);
|
||||
defer reader.deinit();
|
||||
var parsed = try parseFromTokenSource(Value, testing.allocator, &reader, .{});
|
||||
defer parsed.deinit();
|
||||
@@ -391,6 +387,6 @@ test "negative zero" {
|
||||
try testing.expect(std.math.isNegativeZero(parsed.value.float));
|
||||
}
|
||||
|
||||
fn smallBufferJsonReader(allocator: Allocator, io_reader: anytype) JsonReader(16, @TypeOf(io_reader)) {
|
||||
return JsonReader(16, @TypeOf(io_reader)).init(allocator, io_reader);
|
||||
fn smallBufferJsonReader(allocator: Allocator, io_reader: anytype) Scanner.Reader {
|
||||
return .init(allocator, io_reader);
|
||||
}
|
||||
|
||||
@@ -1,40 +0,0 @@
|
||||
const std = @import("../std.zig");
|
||||
const assert = std.debug.assert;
|
||||
|
||||
const stringify = @import("stringify.zig").stringify;
|
||||
const StringifyOptions = @import("stringify.zig").StringifyOptions;
|
||||
|
||||
/// Returns a formatter that formats the given value using stringify.
|
||||
pub fn fmt(value: anytype, options: StringifyOptions) Formatter(@TypeOf(value)) {
|
||||
return Formatter(@TypeOf(value)){ .value = value, .options = options };
|
||||
}
|
||||
|
||||
/// Formats the given value using stringify.
|
||||
pub fn Formatter(comptime T: type) type {
|
||||
return struct {
|
||||
value: T,
|
||||
options: StringifyOptions,
|
||||
|
||||
pub fn format(self: @This(), writer: *std.io.Writer) std.io.Writer.Error!void {
|
||||
try stringify(self.value, self.options, writer);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
test fmt {
|
||||
const expectFmt = std.testing.expectFmt;
|
||||
try expectFmt("123", "{}", .{fmt(@as(u32, 123), .{})});
|
||||
try expectFmt(
|
||||
\\{"num":927,"msg":"hello","sub":{"mybool":true}}
|
||||
, "{}", .{fmt(struct {
|
||||
num: u32,
|
||||
msg: []const u8,
|
||||
sub: struct {
|
||||
mybool: bool,
|
||||
},
|
||||
}{
|
||||
.num = 927,
|
||||
.msg = "hello",
|
||||
.sub = .{ .mybool = true },
|
||||
}, .{})});
|
||||
}
|
||||
@@ -1,4 +1,5 @@
|
||||
const std = @import("std");
|
||||
const json = std.json;
|
||||
const testing = std.testing;
|
||||
|
||||
const ArrayHashMap = @import("hashmap.zig").ArrayHashMap;
|
||||
@@ -7,10 +8,9 @@ const parseFromSlice = @import("static.zig").parseFromSlice;
|
||||
const parseFromSliceLeaky = @import("static.zig").parseFromSliceLeaky;
|
||||
const parseFromTokenSource = @import("static.zig").parseFromTokenSource;
|
||||
const parseFromValue = @import("static.zig").parseFromValue;
|
||||
const stringifyAlloc = @import("stringify.zig").stringifyAlloc;
|
||||
const Value = @import("dynamic.zig").Value;
|
||||
|
||||
const jsonReader = @import("./scanner.zig").reader;
|
||||
const Scanner = @import("Scanner.zig");
|
||||
|
||||
const T = struct {
|
||||
i: i32,
|
||||
@@ -39,8 +39,8 @@ test "parse json hashmap while streaming" {
|
||||
\\ "xyz": {"i": 1, "s": "w"}
|
||||
\\}
|
||||
;
|
||||
var stream = std.io.fixedBufferStream(doc);
|
||||
var json_reader = jsonReader(testing.allocator, stream.reader());
|
||||
var stream: std.Io.Reader = .fixed(doc);
|
||||
var json_reader: Scanner.Reader = .init(testing.allocator, &stream);
|
||||
|
||||
var parsed = try parseFromTokenSource(
|
||||
ArrayHashMap(T),
|
||||
@@ -89,7 +89,7 @@ test "stringify json hashmap" {
|
||||
var value = ArrayHashMap(T){};
|
||||
defer value.deinit(testing.allocator);
|
||||
{
|
||||
const doc = try stringifyAlloc(testing.allocator, value, .{});
|
||||
const doc = try json.Stringify.valueAlloc(testing.allocator, value, .{});
|
||||
defer testing.allocator.free(doc);
|
||||
try testing.expectEqualStrings("{}", doc);
|
||||
}
|
||||
@@ -98,7 +98,7 @@ test "stringify json hashmap" {
|
||||
try value.map.put(testing.allocator, "xyz", .{ .i = 1, .s = "w" });
|
||||
|
||||
{
|
||||
const doc = try stringifyAlloc(testing.allocator, value, .{});
|
||||
const doc = try json.Stringify.valueAlloc(testing.allocator, value, .{});
|
||||
defer testing.allocator.free(doc);
|
||||
try testing.expectEqualStrings(
|
||||
\\{"abc":{"i":0,"s":"d"},"xyz":{"i":1,"s":"w"}}
|
||||
@@ -107,7 +107,7 @@ test "stringify json hashmap" {
|
||||
|
||||
try testing.expect(value.map.swapRemove("abc"));
|
||||
{
|
||||
const doc = try stringifyAlloc(testing.allocator, value, .{});
|
||||
const doc = try json.Stringify.valueAlloc(testing.allocator, value, .{});
|
||||
defer testing.allocator.free(doc);
|
||||
try testing.expectEqualStrings(
|
||||
\\{"xyz":{"i":1,"s":"w"}}
|
||||
@@ -116,7 +116,7 @@ test "stringify json hashmap" {
|
||||
|
||||
try testing.expect(value.map.swapRemove("xyz"));
|
||||
{
|
||||
const doc = try stringifyAlloc(testing.allocator, value, .{});
|
||||
const doc = try json.Stringify.valueAlloc(testing.allocator, value, .{});
|
||||
defer testing.allocator.free(doc);
|
||||
try testing.expectEqualStrings("{}", doc);
|
||||
}
|
||||
@@ -129,7 +129,7 @@ test "stringify json hashmap whitespace" {
|
||||
try value.map.put(testing.allocator, "xyz", .{ .i = 1, .s = "w" });
|
||||
|
||||
{
|
||||
const doc = try stringifyAlloc(testing.allocator, value, .{ .whitespace = .indent_2 });
|
||||
const doc = try json.Stringify.valueAlloc(testing.allocator, value, .{ .whitespace = .indent_2 });
|
||||
defer testing.allocator.free(doc);
|
||||
try testing.expectEqualStrings(
|
||||
\\{
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,13 +1,11 @@
|
||||
const std = @import("std");
|
||||
const JsonScanner = @import("./scanner.zig").Scanner;
|
||||
const jsonReader = @import("./scanner.zig").reader;
|
||||
const JsonReader = @import("./scanner.zig").Reader;
|
||||
const Token = @import("./scanner.zig").Token;
|
||||
const TokenType = @import("./scanner.zig").TokenType;
|
||||
const Diagnostics = @import("./scanner.zig").Diagnostics;
|
||||
const Error = @import("./scanner.zig").Error;
|
||||
const validate = @import("./scanner.zig").validate;
|
||||
const isNumberFormattedLikeAnInteger = @import("./scanner.zig").isNumberFormattedLikeAnInteger;
|
||||
const Scanner = @import("Scanner.zig");
|
||||
const Token = Scanner.Token;
|
||||
const TokenType = Scanner.TokenType;
|
||||
const Diagnostics = Scanner.Diagnostics;
|
||||
const Error = Scanner.Error;
|
||||
const validate = Scanner.validate;
|
||||
const isNumberFormattedLikeAnInteger = Scanner.isNumberFormattedLikeAnInteger;
|
||||
|
||||
const example_document_str =
|
||||
\\{
|
||||
@@ -36,7 +34,7 @@ fn expectPeekNext(scanner_or_reader: anytype, expected_token_type: TokenType, ex
|
||||
}
|
||||
|
||||
test "token" {
|
||||
var scanner = JsonScanner.initCompleteInput(std.testing.allocator, example_document_str);
|
||||
var scanner = Scanner.initCompleteInput(std.testing.allocator, example_document_str);
|
||||
defer scanner.deinit();
|
||||
|
||||
try expectNext(&scanner, .object_begin);
|
||||
@@ -138,23 +136,25 @@ fn testAllTypes(source: anytype, large_buffer: bool) !void {
|
||||
}
|
||||
|
||||
test "peek all types" {
|
||||
var scanner = JsonScanner.initCompleteInput(std.testing.allocator, all_types_test_case);
|
||||
var scanner = Scanner.initCompleteInput(std.testing.allocator, all_types_test_case);
|
||||
defer scanner.deinit();
|
||||
try testAllTypes(&scanner, true);
|
||||
|
||||
var stream = std.io.fixedBufferStream(all_types_test_case);
|
||||
var json_reader = jsonReader(std.testing.allocator, stream.reader());
|
||||
var stream: std.Io.Reader = .fixed(all_types_test_case);
|
||||
var json_reader: Scanner.Reader = .init(std.testing.allocator, &stream);
|
||||
defer json_reader.deinit();
|
||||
try testAllTypes(&json_reader, true);
|
||||
|
||||
var tiny_stream = std.io.fixedBufferStream(all_types_test_case);
|
||||
var tiny_json_reader = JsonReader(1, @TypeOf(tiny_stream.reader())).init(std.testing.allocator, tiny_stream.reader());
|
||||
var tiny_buffer: [1]u8 = undefined;
|
||||
var tiny_stream: std.testing.Reader = .init(&tiny_buffer, &.{.{ .buffer = all_types_test_case }});
|
||||
tiny_stream.artificial_limit = .limited(1);
|
||||
var tiny_json_reader: Scanner.Reader = .init(std.testing.allocator, &tiny_stream.interface);
|
||||
defer tiny_json_reader.deinit();
|
||||
try testAllTypes(&tiny_json_reader, false);
|
||||
}
|
||||
|
||||
test "token mismatched close" {
|
||||
var scanner = JsonScanner.initCompleteInput(std.testing.allocator, "[102, 111, 111 }");
|
||||
var scanner = Scanner.initCompleteInput(std.testing.allocator, "[102, 111, 111 }");
|
||||
defer scanner.deinit();
|
||||
try expectNext(&scanner, .array_begin);
|
||||
try expectNext(&scanner, Token{ .number = "102" });
|
||||
@@ -164,15 +164,15 @@ test "token mismatched close" {
|
||||
}
|
||||
|
||||
test "token premature object close" {
|
||||
var scanner = JsonScanner.initCompleteInput(std.testing.allocator, "{ \"key\": }");
|
||||
var scanner = Scanner.initCompleteInput(std.testing.allocator, "{ \"key\": }");
|
||||
defer scanner.deinit();
|
||||
try expectNext(&scanner, .object_begin);
|
||||
try expectNext(&scanner, Token{ .string = "key" });
|
||||
try std.testing.expectError(error.SyntaxError, scanner.next());
|
||||
}
|
||||
|
||||
test "JsonScanner basic" {
|
||||
var scanner = JsonScanner.initCompleteInput(std.testing.allocator, example_document_str);
|
||||
test "Scanner basic" {
|
||||
var scanner = Scanner.initCompleteInput(std.testing.allocator, example_document_str);
|
||||
defer scanner.deinit();
|
||||
|
||||
while (true) {
|
||||
@@ -181,10 +181,10 @@ test "JsonScanner basic" {
|
||||
}
|
||||
}
|
||||
|
||||
test "JsonReader basic" {
|
||||
var stream = std.io.fixedBufferStream(example_document_str);
|
||||
test "Scanner.Reader basic" {
|
||||
var stream: std.Io.Reader = .fixed(example_document_str);
|
||||
|
||||
var json_reader = jsonReader(std.testing.allocator, stream.reader());
|
||||
var json_reader: Scanner.Reader = .init(std.testing.allocator, &stream);
|
||||
defer json_reader.deinit();
|
||||
|
||||
while (true) {
|
||||
@@ -215,7 +215,7 @@ const number_test_items = blk: {
|
||||
|
||||
test "numbers" {
|
||||
for (number_test_items) |number_str| {
|
||||
var scanner = JsonScanner.initCompleteInput(std.testing.allocator, number_str);
|
||||
var scanner = Scanner.initCompleteInput(std.testing.allocator, number_str);
|
||||
defer scanner.deinit();
|
||||
|
||||
const token = try scanner.next();
|
||||
@@ -243,10 +243,10 @@ const string_test_cases = .{
|
||||
|
||||
test "strings" {
|
||||
inline for (string_test_cases) |tuple| {
|
||||
var stream = std.io.fixedBufferStream("\"" ++ tuple[0] ++ "\"");
|
||||
var stream: std.Io.Reader = .fixed("\"" ++ tuple[0] ++ "\"");
|
||||
var arena = std.heap.ArenaAllocator.init(std.testing.allocator);
|
||||
defer arena.deinit();
|
||||
var json_reader = jsonReader(std.testing.allocator, stream.reader());
|
||||
var json_reader: Scanner.Reader = .init(std.testing.allocator, &stream);
|
||||
defer json_reader.deinit();
|
||||
|
||||
const token = try json_reader.nextAlloc(arena.allocator(), .alloc_if_needed);
|
||||
@@ -289,7 +289,7 @@ test "nesting" {
|
||||
}
|
||||
|
||||
fn expectMaybeError(document_str: []const u8, maybe_error: ?Error) !void {
|
||||
var scanner = JsonScanner.initCompleteInput(std.testing.allocator, document_str);
|
||||
var scanner = Scanner.initCompleteInput(std.testing.allocator, document_str);
|
||||
defer scanner.deinit();
|
||||
|
||||
while (true) {
|
||||
@@ -352,12 +352,12 @@ fn expectEqualTokens(expected_token: Token, actual_token: Token) !void {
|
||||
}
|
||||
|
||||
fn testTinyBufferSize(document_str: []const u8) !void {
|
||||
var tiny_stream = std.io.fixedBufferStream(document_str);
|
||||
var normal_stream = std.io.fixedBufferStream(document_str);
|
||||
var tiny_stream: std.Io.Reader = .fixed(document_str);
|
||||
var normal_stream: std.Io.Reader = .fixed(document_str);
|
||||
|
||||
var tiny_json_reader = JsonReader(1, @TypeOf(tiny_stream.reader())).init(std.testing.allocator, tiny_stream.reader());
|
||||
var tiny_json_reader: Scanner.Reader = .init(std.testing.allocator, &tiny_stream);
|
||||
defer tiny_json_reader.deinit();
|
||||
var normal_json_reader = JsonReader(0x1000, @TypeOf(normal_stream.reader())).init(std.testing.allocator, normal_stream.reader());
|
||||
var normal_json_reader: Scanner.Reader = .init(std.testing.allocator, &normal_stream);
|
||||
defer normal_json_reader.deinit();
|
||||
|
||||
expectEqualStreamOfTokens(&normal_json_reader, &tiny_json_reader) catch |err| {
|
||||
@@ -397,13 +397,13 @@ test "validate" {
|
||||
}
|
||||
|
||||
fn testSkipValue(s: []const u8) !void {
|
||||
var scanner = JsonScanner.initCompleteInput(std.testing.allocator, s);
|
||||
var scanner = Scanner.initCompleteInput(std.testing.allocator, s);
|
||||
defer scanner.deinit();
|
||||
try scanner.skipValue();
|
||||
try expectEqualTokens(.end_of_document, try scanner.next());
|
||||
|
||||
var stream = std.io.fixedBufferStream(s);
|
||||
var json_reader = jsonReader(std.testing.allocator, stream.reader());
|
||||
var stream: std.Io.Reader = .fixed(s);
|
||||
var json_reader: Scanner.Reader = .init(std.testing.allocator, &stream);
|
||||
defer json_reader.deinit();
|
||||
try json_reader.skipValue();
|
||||
try expectEqualTokens(.end_of_document, try json_reader.next());
|
||||
@@ -441,7 +441,7 @@ fn testEnsureStackCapacity(do_ensure: bool) !void {
|
||||
try input_string.appendNTimes(std.testing.allocator, ']', nestings);
|
||||
defer input_string.deinit(std.testing.allocator);
|
||||
|
||||
var scanner = JsonScanner.initCompleteInput(failing_allocator, input_string.items);
|
||||
var scanner = Scanner.initCompleteInput(failing_allocator, input_string.items);
|
||||
defer scanner.deinit();
|
||||
|
||||
if (do_ensure) {
|
||||
@@ -473,17 +473,17 @@ fn testDiagnosticsFromSource(expected_error: ?anyerror, line: u64, col: u64, byt
|
||||
try std.testing.expectEqual(byte_offset, diagnostics.getByteOffset());
|
||||
}
|
||||
fn testDiagnostics(expected_error: ?anyerror, line: u64, col: u64, byte_offset: u64, s: []const u8) !void {
|
||||
var scanner = JsonScanner.initCompleteInput(std.testing.allocator, s);
|
||||
var scanner = Scanner.initCompleteInput(std.testing.allocator, s);
|
||||
defer scanner.deinit();
|
||||
try testDiagnosticsFromSource(expected_error, line, col, byte_offset, &scanner);
|
||||
|
||||
var tiny_stream = std.io.fixedBufferStream(s);
|
||||
var tiny_json_reader = JsonReader(1, @TypeOf(tiny_stream.reader())).init(std.testing.allocator, tiny_stream.reader());
|
||||
var tiny_stream: std.Io.Reader = .fixed(s);
|
||||
var tiny_json_reader: Scanner.Reader = .init(std.testing.allocator, &tiny_stream);
|
||||
defer tiny_json_reader.deinit();
|
||||
try testDiagnosticsFromSource(expected_error, line, col, byte_offset, &tiny_json_reader);
|
||||
|
||||
var medium_stream = std.io.fixedBufferStream(s);
|
||||
var medium_json_reader = JsonReader(5, @TypeOf(medium_stream.reader())).init(std.testing.allocator, medium_stream.reader());
|
||||
var medium_stream: std.Io.Reader = .fixed(s);
|
||||
var medium_json_reader: Scanner.Reader = .init(std.testing.allocator, &medium_stream);
|
||||
defer medium_json_reader.deinit();
|
||||
try testDiagnosticsFromSource(expected_error, line, col, byte_offset, &medium_json_reader);
|
||||
}
|
||||
|
||||
@@ -4,11 +4,11 @@ const Allocator = std.mem.Allocator;
|
||||
const ArenaAllocator = std.heap.ArenaAllocator;
|
||||
const ArrayList = std.ArrayList;
|
||||
|
||||
const Scanner = @import("./scanner.zig").Scanner;
|
||||
const Token = @import("./scanner.zig").Token;
|
||||
const AllocWhen = @import("./scanner.zig").AllocWhen;
|
||||
const default_max_value_len = @import("./scanner.zig").default_max_value_len;
|
||||
const isNumberFormattedLikeAnInteger = @import("./scanner.zig").isNumberFormattedLikeAnInteger;
|
||||
const Scanner = @import("Scanner.zig");
|
||||
const Token = Scanner.Token;
|
||||
const AllocWhen = Scanner.AllocWhen;
|
||||
const default_max_value_len = Scanner.default_max_value_len;
|
||||
const isNumberFormattedLikeAnInteger = Scanner.isNumberFormattedLikeAnInteger;
|
||||
|
||||
const Value = @import("./dynamic.zig").Value;
|
||||
const Array = @import("./dynamic.zig").Array;
|
||||
|
||||
@@ -12,9 +12,7 @@ const parseFromValue = @import("./static.zig").parseFromValue;
|
||||
const parseFromValueLeaky = @import("./static.zig").parseFromValueLeaky;
|
||||
const ParseOptions = @import("./static.zig").ParseOptions;
|
||||
|
||||
const JsonScanner = @import("./scanner.zig").Scanner;
|
||||
const jsonReader = @import("./scanner.zig").reader;
|
||||
const Diagnostics = @import("./scanner.zig").Diagnostics;
|
||||
const Scanner = @import("Scanner.zig");
|
||||
|
||||
const Value = @import("./dynamic.zig").Value;
|
||||
|
||||
@@ -300,9 +298,9 @@ const subnamespaces_0_doc =
|
||||
fn testAllParseFunctions(comptime T: type, expected: T, doc: []const u8) !void {
|
||||
// First do the one with the debug info in case we get a SyntaxError or something.
|
||||
{
|
||||
var scanner = JsonScanner.initCompleteInput(testing.allocator, doc);
|
||||
var scanner = Scanner.initCompleteInput(testing.allocator, doc);
|
||||
defer scanner.deinit();
|
||||
var diagnostics = Diagnostics{};
|
||||
var diagnostics = Scanner.Diagnostics{};
|
||||
scanner.enableDiagnostics(&diagnostics);
|
||||
var parsed = parseFromTokenSource(T, testing.allocator, &scanner, .{}) catch |e| {
|
||||
std.debug.print("at line,col: {}:{}\n", .{ diagnostics.getLine(), diagnostics.getColumn() });
|
||||
@@ -317,8 +315,8 @@ fn testAllParseFunctions(comptime T: type, expected: T, doc: []const u8) !void {
|
||||
try testing.expectEqualDeep(expected, parsed.value);
|
||||
}
|
||||
{
|
||||
var stream = std.io.fixedBufferStream(doc);
|
||||
var json_reader = jsonReader(std.testing.allocator, stream.reader());
|
||||
var stream: std.Io.Reader = .fixed(doc);
|
||||
var json_reader: Scanner.Reader = .init(std.testing.allocator, &stream);
|
||||
defer json_reader.deinit();
|
||||
var parsed = try parseFromTokenSource(T, testing.allocator, &json_reader, .{});
|
||||
defer parsed.deinit();
|
||||
@@ -331,13 +329,13 @@ fn testAllParseFunctions(comptime T: type, expected: T, doc: []const u8) !void {
|
||||
try testing.expectEqualDeep(expected, try parseFromSliceLeaky(T, arena.allocator(), doc, .{}));
|
||||
}
|
||||
{
|
||||
var scanner = JsonScanner.initCompleteInput(testing.allocator, doc);
|
||||
var scanner = Scanner.initCompleteInput(testing.allocator, doc);
|
||||
defer scanner.deinit();
|
||||
try testing.expectEqualDeep(expected, try parseFromTokenSourceLeaky(T, arena.allocator(), &scanner, .{}));
|
||||
}
|
||||
{
|
||||
var stream = std.io.fixedBufferStream(doc);
|
||||
var json_reader = jsonReader(std.testing.allocator, stream.reader());
|
||||
var stream: std.Io.Reader = .fixed(doc);
|
||||
var json_reader: Scanner.Reader = .init(std.testing.allocator, &stream);
|
||||
defer json_reader.deinit();
|
||||
try testing.expectEqualDeep(expected, try parseFromTokenSourceLeaky(T, arena.allocator(), &json_reader, .{}));
|
||||
}
|
||||
@@ -763,7 +761,7 @@ test "parse exponential into int" {
|
||||
|
||||
test "parseFromTokenSource" {
|
||||
{
|
||||
var scanner = JsonScanner.initCompleteInput(testing.allocator, "123");
|
||||
var scanner = Scanner.initCompleteInput(testing.allocator, "123");
|
||||
defer scanner.deinit();
|
||||
var parsed = try parseFromTokenSource(u32, testing.allocator, &scanner, .{});
|
||||
defer parsed.deinit();
|
||||
@@ -771,8 +769,8 @@ test "parseFromTokenSource" {
|
||||
}
|
||||
|
||||
{
|
||||
var stream = std.io.fixedBufferStream("123");
|
||||
var json_reader = jsonReader(std.testing.allocator, stream.reader());
|
||||
var stream: std.Io.Reader = .fixed("123");
|
||||
var json_reader: Scanner.Reader = .init(std.testing.allocator, &stream);
|
||||
defer json_reader.deinit();
|
||||
var parsed = try parseFromTokenSource(u32, testing.allocator, &json_reader, .{});
|
||||
defer parsed.deinit();
|
||||
@@ -836,7 +834,7 @@ test "json parse partial" {
|
||||
\\}
|
||||
;
|
||||
const allocator = testing.allocator;
|
||||
var scanner = JsonScanner.initCompleteInput(allocator, str);
|
||||
var scanner = Scanner.initCompleteInput(allocator, str);
|
||||
defer scanner.deinit();
|
||||
|
||||
var arena = ArenaAllocator.init(allocator);
|
||||
@@ -886,8 +884,8 @@ test "json parse allocate when streaming" {
|
||||
var arena = ArenaAllocator.init(allocator);
|
||||
defer arena.deinit();
|
||||
|
||||
var stream = std.io.fixedBufferStream(str);
|
||||
var json_reader = jsonReader(std.testing.allocator, stream.reader());
|
||||
var stream: std.Io.Reader = .fixed(str);
|
||||
var json_reader: Scanner.Reader = .init(std.testing.allocator, &stream);
|
||||
|
||||
const parsed = parseFromTokenSourceLeaky(T, arena.allocator(), &json_reader, .{}) catch |err| {
|
||||
json_reader.deinit();
|
||||
|
||||
@@ -1,772 +0,0 @@
|
||||
const std = @import("std");
|
||||
const assert = std.debug.assert;
|
||||
const Allocator = std.mem.Allocator;
|
||||
const ArrayList = std.ArrayList;
|
||||
const BitStack = std.BitStack;
|
||||
|
||||
const OBJECT_MODE = 0;
|
||||
const ARRAY_MODE = 1;
|
||||
|
||||
pub const StringifyOptions = struct {
|
||||
/// Controls the whitespace emitted.
|
||||
/// The default `.minified` is a compact encoding with no whitespace between tokens.
|
||||
/// Any setting other than `.minified` will use newlines, indentation, and a space after each ':'.
|
||||
/// `.indent_1` means 1 space for each indentation level, `.indent_2` means 2 spaces, etc.
|
||||
/// `.indent_tab` uses a tab for each indentation level.
|
||||
whitespace: enum {
|
||||
minified,
|
||||
indent_1,
|
||||
indent_2,
|
||||
indent_3,
|
||||
indent_4,
|
||||
indent_8,
|
||||
indent_tab,
|
||||
} = .minified,
|
||||
|
||||
/// Should optional fields with null value be written?
|
||||
emit_null_optional_fields: bool = true,
|
||||
|
||||
/// Arrays/slices of u8 are typically encoded as JSON strings.
|
||||
/// This option emits them as arrays of numbers instead.
|
||||
/// Does not affect calls to `objectField*()`.
|
||||
emit_strings_as_arrays: bool = false,
|
||||
|
||||
/// Should unicode characters be escaped in strings?
|
||||
escape_unicode: bool = false,
|
||||
|
||||
/// When true, renders numbers outside the range `+-1<<53` (the precise integer range of f64) as JSON strings in base 10.
|
||||
emit_nonportable_numbers_as_strings: bool = false,
|
||||
};
|
||||
|
||||
/// Writes the given value to the `std.io.GenericWriter` stream.
|
||||
/// See `WriteStream` for how the given value is serialized into JSON.
|
||||
/// The maximum nesting depth of the output JSON document is 256.
|
||||
/// See also `stringifyMaxDepth` and `stringifyArbitraryDepth`.
|
||||
pub fn stringify(
|
||||
value: anytype,
|
||||
options: StringifyOptions,
|
||||
out_stream: anytype,
|
||||
) @TypeOf(out_stream).Error!void {
|
||||
var jw = writeStream(out_stream, options);
|
||||
defer jw.deinit();
|
||||
try jw.write(value);
|
||||
}
|
||||
|
||||
/// Like `stringify` with configurable nesting depth.
|
||||
/// `max_depth` is rounded up to the nearest multiple of 8.
|
||||
/// Give `null` for `max_depth` to disable some safety checks and allow arbitrary nesting depth.
|
||||
/// See `writeStreamMaxDepth` for more info.
|
||||
pub fn stringifyMaxDepth(
|
||||
value: anytype,
|
||||
options: StringifyOptions,
|
||||
out_stream: anytype,
|
||||
comptime max_depth: ?usize,
|
||||
) @TypeOf(out_stream).Error!void {
|
||||
var jw = writeStreamMaxDepth(out_stream, options, max_depth);
|
||||
try jw.write(value);
|
||||
}
|
||||
|
||||
/// Like `stringify` but takes an allocator to facilitate safety checks while allowing arbitrary nesting depth.
|
||||
/// These safety checks can be helpful when debugging custom `jsonStringify` implementations;
|
||||
/// See `WriteStream`.
|
||||
pub fn stringifyArbitraryDepth(
|
||||
allocator: Allocator,
|
||||
value: anytype,
|
||||
options: StringifyOptions,
|
||||
out_stream: anytype,
|
||||
) WriteStream(@TypeOf(out_stream), .checked_to_arbitrary_depth).Error!void {
|
||||
var jw = writeStreamArbitraryDepth(allocator, out_stream, options);
|
||||
defer jw.deinit();
|
||||
try jw.write(value);
|
||||
}
|
||||
|
||||
/// Calls `stringifyArbitraryDepth` and stores the result in dynamically allocated memory
|
||||
/// instead of taking a `std.io.GenericWriter`.
|
||||
///
|
||||
/// Caller owns returned memory.
|
||||
pub fn stringifyAlloc(
|
||||
allocator: Allocator,
|
||||
value: anytype,
|
||||
options: StringifyOptions,
|
||||
) error{OutOfMemory}![]u8 {
|
||||
var list = std.ArrayList(u8).init(allocator);
|
||||
errdefer list.deinit();
|
||||
try stringifyArbitraryDepth(allocator, value, options, list.writer());
|
||||
return list.toOwnedSlice();
|
||||
}
|
||||
|
||||
/// See `WriteStream` for documentation.
|
||||
/// Equivalent to calling `writeStreamMaxDepth` with a depth of `256`.
|
||||
///
|
||||
/// The caller does *not* need to call `deinit()` on the returned object.
|
||||
pub fn writeStream(
|
||||
out_stream: anytype,
|
||||
options: StringifyOptions,
|
||||
) WriteStream(@TypeOf(out_stream), .{ .checked_to_fixed_depth = 256 }) {
|
||||
return writeStreamMaxDepth(out_stream, options, 256);
|
||||
}
|
||||
|
||||
/// See `WriteStream` for documentation.
|
||||
/// The returned object includes 1 bit of size per `max_depth` to enable safety checks on the order of method calls;
|
||||
/// see the grammar in the `WriteStream` documentation.
|
||||
/// `max_depth` is rounded up to the nearest multiple of 8.
|
||||
/// If the nesting depth exceeds `max_depth`, it is detectable illegal behavior.
|
||||
/// Give `null` for `max_depth` to disable safety checks for the grammar and allow arbitrary nesting depth.
|
||||
/// In `ReleaseFast` and `ReleaseSmall`, `max_depth` is ignored, effectively equivalent to passing `null`.
|
||||
/// Alternatively, see `writeStreamArbitraryDepth` to do safety checks to arbitrary depth.
|
||||
///
|
||||
/// The caller does *not* need to call `deinit()` on the returned object.
|
||||
pub fn writeStreamMaxDepth(
|
||||
out_stream: anytype,
|
||||
options: StringifyOptions,
|
||||
comptime max_depth: ?usize,
|
||||
) WriteStream(
|
||||
@TypeOf(out_stream),
|
||||
if (max_depth) |d| .{ .checked_to_fixed_depth = d } else .assumed_correct,
|
||||
) {
|
||||
return WriteStream(
|
||||
@TypeOf(out_stream),
|
||||
if (max_depth) |d| .{ .checked_to_fixed_depth = d } else .assumed_correct,
|
||||
).init(undefined, out_stream, options);
|
||||
}
|
||||
|
||||
/// See `WriteStream` for documentation.
|
||||
/// This version of the write stream enables safety checks to arbitrarily deep nesting levels
|
||||
/// by using the given allocator.
|
||||
/// The caller should call `deinit()` on the returned object to free allocated memory.
|
||||
///
|
||||
/// In `ReleaseFast` and `ReleaseSmall` mode, this function is effectively equivalent to calling `writeStreamMaxDepth(..., null)`;
|
||||
/// in those build modes, the allocator is *not used*.
|
||||
pub fn writeStreamArbitraryDepth(
|
||||
allocator: Allocator,
|
||||
out_stream: anytype,
|
||||
options: StringifyOptions,
|
||||
) WriteStream(@TypeOf(out_stream), .checked_to_arbitrary_depth) {
|
||||
return WriteStream(@TypeOf(out_stream), .checked_to_arbitrary_depth).init(allocator, out_stream, options);
|
||||
}
|
||||
|
||||
/// Writes JSON ([RFC8259](https://tools.ietf.org/html/rfc8259)) formatted data
|
||||
/// to a stream.
|
||||
///
|
||||
/// The sequence of method calls to write JSON content must follow this grammar:
|
||||
/// ```
|
||||
/// <once> = <value>
|
||||
/// <value> =
|
||||
/// | <object>
|
||||
/// | <array>
|
||||
/// | write
|
||||
/// | print
|
||||
/// | <writeRawStream>
|
||||
/// <object> = beginObject ( <field> <value> )* endObject
|
||||
/// <field> = objectField | objectFieldRaw | <objectFieldRawStream>
|
||||
/// <array> = beginArray ( <value> )* endArray
|
||||
/// <writeRawStream> = beginWriteRaw ( stream.writeAll )* endWriteRaw
|
||||
/// <objectFieldRawStream> = beginObjectFieldRaw ( stream.writeAll )* endObjectFieldRaw
|
||||
/// ```
|
||||
///
|
||||
/// The `safety_checks_hint` parameter determines how much memory is used to enable assertions that the above grammar is being followed,
|
||||
/// e.g. tripping an assertion rather than allowing `endObject` to emit the final `}` in `[[[]]}`.
|
||||
/// "Depth" in this context means the depth of nested `[]` or `{}` expressions
|
||||
/// (or equivalently the amount of recursion on the `<value>` grammar expression above).
|
||||
/// For example, emitting the JSON `[[[]]]` requires a depth of 3.
|
||||
/// If `.checked_to_fixed_depth` is used, there is additionally an assertion that the nesting depth never exceeds the given limit.
|
||||
/// `.checked_to_arbitrary_depth` requires a runtime allocator for the memory.
|
||||
/// `.checked_to_fixed_depth` embeds the storage required in the `WriteStream` struct.
|
||||
/// `.assumed_correct` requires no space and performs none of these assertions.
|
||||
/// In `ReleaseFast` and `ReleaseSmall` mode, the given `safety_checks_hint` is ignored and is always treated as `.assumed_correct`.
|
||||
pub fn WriteStream(
|
||||
comptime OutStream: type,
|
||||
comptime safety_checks_hint: union(enum) {
|
||||
checked_to_arbitrary_depth,
|
||||
checked_to_fixed_depth: usize, // Rounded up to the nearest multiple of 8.
|
||||
assumed_correct,
|
||||
},
|
||||
) type {
|
||||
return struct {
|
||||
const Self = @This();
|
||||
const build_mode_has_safety = switch (@import("builtin").mode) {
|
||||
.Debug, .ReleaseSafe => true,
|
||||
.ReleaseFast, .ReleaseSmall => false,
|
||||
};
|
||||
const safety_checks: @TypeOf(safety_checks_hint) = if (build_mode_has_safety)
|
||||
safety_checks_hint
|
||||
else
|
||||
.assumed_correct;
|
||||
|
||||
pub const Stream = OutStream;
|
||||
pub const Error = switch (safety_checks) {
|
||||
.checked_to_arbitrary_depth => Stream.Error || error{OutOfMemory},
|
||||
.checked_to_fixed_depth, .assumed_correct => Stream.Error,
|
||||
};
|
||||
|
||||
options: StringifyOptions,
|
||||
|
||||
stream: OutStream,
|
||||
indent_level: usize = 0,
|
||||
next_punctuation: enum {
|
||||
the_beginning,
|
||||
none,
|
||||
comma,
|
||||
colon,
|
||||
} = .the_beginning,
|
||||
|
||||
nesting_stack: switch (safety_checks) {
|
||||
.checked_to_arbitrary_depth => BitStack,
|
||||
.checked_to_fixed_depth => |fixed_buffer_size| [(fixed_buffer_size + 7) >> 3]u8,
|
||||
.assumed_correct => void,
|
||||
},
|
||||
|
||||
raw_streaming_mode: if (build_mode_has_safety)
|
||||
enum { none, value, objectField }
|
||||
else
|
||||
void = if (build_mode_has_safety) .none else {},
|
||||
|
||||
pub fn init(safety_allocator: Allocator, stream: OutStream, options: StringifyOptions) Self {
|
||||
return .{
|
||||
.options = options,
|
||||
.stream = stream,
|
||||
.nesting_stack = switch (safety_checks) {
|
||||
.checked_to_arbitrary_depth => BitStack.init(safety_allocator),
|
||||
.checked_to_fixed_depth => |fixed_buffer_size| [_]u8{0} ** ((fixed_buffer_size + 7) >> 3),
|
||||
.assumed_correct => {},
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/// Only necessary with .checked_to_arbitrary_depth.
|
||||
pub fn deinit(self: *Self) void {
|
||||
switch (safety_checks) {
|
||||
.checked_to_arbitrary_depth => self.nesting_stack.deinit(),
|
||||
.checked_to_fixed_depth, .assumed_correct => {},
|
||||
}
|
||||
self.* = undefined;
|
||||
}
|
||||
|
||||
pub fn beginArray(self: *Self) Error!void {
|
||||
if (build_mode_has_safety) assert(self.raw_streaming_mode == .none);
|
||||
try self.valueStart();
|
||||
try self.stream.writeByte('[');
|
||||
try self.pushIndentation(ARRAY_MODE);
|
||||
self.next_punctuation = .none;
|
||||
}
|
||||
|
||||
pub fn beginObject(self: *Self) Error!void {
|
||||
if (build_mode_has_safety) assert(self.raw_streaming_mode == .none);
|
||||
try self.valueStart();
|
||||
try self.stream.writeByte('{');
|
||||
try self.pushIndentation(OBJECT_MODE);
|
||||
self.next_punctuation = .none;
|
||||
}
|
||||
|
||||
pub fn endArray(self: *Self) Error!void {
|
||||
if (build_mode_has_safety) assert(self.raw_streaming_mode == .none);
|
||||
self.popIndentation(ARRAY_MODE);
|
||||
switch (self.next_punctuation) {
|
||||
.none => {},
|
||||
.comma => {
|
||||
try self.indent();
|
||||
},
|
||||
.the_beginning, .colon => unreachable,
|
||||
}
|
||||
try self.stream.writeByte(']');
|
||||
self.valueDone();
|
||||
}
|
||||
|
||||
pub fn endObject(self: *Self) Error!void {
|
||||
if (build_mode_has_safety) assert(self.raw_streaming_mode == .none);
|
||||
self.popIndentation(OBJECT_MODE);
|
||||
switch (self.next_punctuation) {
|
||||
.none => {},
|
||||
.comma => {
|
||||
try self.indent();
|
||||
},
|
||||
.the_beginning, .colon => unreachable,
|
||||
}
|
||||
try self.stream.writeByte('}');
|
||||
self.valueDone();
|
||||
}
|
||||
|
||||
fn pushIndentation(self: *Self, mode: u1) !void {
|
||||
switch (safety_checks) {
|
||||
.checked_to_arbitrary_depth => {
|
||||
try self.nesting_stack.push(mode);
|
||||
self.indent_level += 1;
|
||||
},
|
||||
.checked_to_fixed_depth => {
|
||||
BitStack.pushWithStateAssumeCapacity(&self.nesting_stack, &self.indent_level, mode);
|
||||
},
|
||||
.assumed_correct => {
|
||||
self.indent_level += 1;
|
||||
},
|
||||
}
|
||||
}
|
||||
fn popIndentation(self: *Self, assert_its_this_one: u1) void {
|
||||
switch (safety_checks) {
|
||||
.checked_to_arbitrary_depth => {
|
||||
assert(self.nesting_stack.pop() == assert_its_this_one);
|
||||
self.indent_level -= 1;
|
||||
},
|
||||
.checked_to_fixed_depth => {
|
||||
assert(BitStack.popWithState(&self.nesting_stack, &self.indent_level) == assert_its_this_one);
|
||||
},
|
||||
.assumed_correct => {
|
||||
self.indent_level -= 1;
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn indent(self: *Self) !void {
|
||||
var char: u8 = ' ';
|
||||
const n_chars = switch (self.options.whitespace) {
|
||||
.minified => return,
|
||||
.indent_1 => 1 * self.indent_level,
|
||||
.indent_2 => 2 * self.indent_level,
|
||||
.indent_3 => 3 * self.indent_level,
|
||||
.indent_4 => 4 * self.indent_level,
|
||||
.indent_8 => 8 * self.indent_level,
|
||||
.indent_tab => blk: {
|
||||
char = '\t';
|
||||
break :blk self.indent_level;
|
||||
},
|
||||
};
|
||||
try self.stream.writeByte('\n');
|
||||
try self.stream.writeByteNTimes(char, n_chars);
|
||||
}
|
||||
|
||||
fn valueStart(self: *Self) !void {
|
||||
if (self.isObjectKeyExpected()) |is_it| assert(!is_it); // Call objectField*(), not write(), for object keys.
|
||||
return self.valueStartAssumeTypeOk();
|
||||
}
|
||||
fn objectFieldStart(self: *Self) !void {
|
||||
if (self.isObjectKeyExpected()) |is_it| assert(is_it); // Expected write(), not objectField*().
|
||||
return self.valueStartAssumeTypeOk();
|
||||
}
|
||||
fn valueStartAssumeTypeOk(self: *Self) !void {
|
||||
assert(!self.isComplete()); // JSON document already complete.
|
||||
switch (self.next_punctuation) {
|
||||
.the_beginning => {
|
||||
// No indentation for the very beginning.
|
||||
},
|
||||
.none => {
|
||||
// First item in a container.
|
||||
try self.indent();
|
||||
},
|
||||
.comma => {
|
||||
// Subsequent item in a container.
|
||||
try self.stream.writeByte(',');
|
||||
try self.indent();
|
||||
},
|
||||
.colon => {
|
||||
try self.stream.writeByte(':');
|
||||
if (self.options.whitespace != .minified) {
|
||||
try self.stream.writeByte(' ');
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
fn valueDone(self: *Self) void {
|
||||
self.next_punctuation = .comma;
|
||||
}
|
||||
|
||||
// Only when safety is enabled:
|
||||
fn isObjectKeyExpected(self: *const Self) ?bool {
|
||||
switch (safety_checks) {
|
||||
.checked_to_arbitrary_depth => return self.indent_level > 0 and
|
||||
self.nesting_stack.peek() == OBJECT_MODE and
|
||||
self.next_punctuation != .colon,
|
||||
.checked_to_fixed_depth => return self.indent_level > 0 and
|
||||
BitStack.peekWithState(&self.nesting_stack, self.indent_level) == OBJECT_MODE and
|
||||
self.next_punctuation != .colon,
|
||||
.assumed_correct => return null,
|
||||
}
|
||||
}
|
||||
fn isComplete(self: *const Self) bool {
|
||||
return self.indent_level == 0 and self.next_punctuation == .comma;
|
||||
}
|
||||
|
||||
/// An alternative to calling `write` that formats a value with `std.fmt`.
|
||||
/// This function does the usual punctuation and indentation formatting
|
||||
/// assuming the resulting formatted string represents a single complete value;
|
||||
/// e.g. `"1"`, `"[]"`, `"[1,2]"`, not `"1,2"`.
|
||||
/// This function may be useful for doing your own number formatting.
|
||||
pub fn print(self: *Self, comptime fmt: []const u8, args: anytype) Error!void {
|
||||
if (build_mode_has_safety) assert(self.raw_streaming_mode == .none);
|
||||
try self.valueStart();
|
||||
try self.stream.print(fmt, args);
|
||||
self.valueDone();
|
||||
}
|
||||
|
||||
/// An alternative to calling `write` that allows you to write directly to the `.stream` field, e.g. with `.stream.writeAll()`.
|
||||
/// Call `beginWriteRaw()`, then write a complete value (including any quotes if necessary) directly to the `.stream` field,
|
||||
/// then call `endWriteRaw()`.
|
||||
/// This can be useful for streaming very long strings into the output without needing it all buffered in memory.
|
||||
pub fn beginWriteRaw(self: *Self) !void {
|
||||
if (build_mode_has_safety) {
|
||||
assert(self.raw_streaming_mode == .none);
|
||||
self.raw_streaming_mode = .value;
|
||||
}
|
||||
try self.valueStart();
|
||||
}
|
||||
|
||||
/// See `beginWriteRaw`.
|
||||
pub fn endWriteRaw(self: *Self) void {
|
||||
if (build_mode_has_safety) {
|
||||
assert(self.raw_streaming_mode == .value);
|
||||
self.raw_streaming_mode = .none;
|
||||
}
|
||||
self.valueDone();
|
||||
}
|
||||
|
||||
/// See `WriteStream` for when to call this method.
|
||||
/// `key` is the string content of the property name.
|
||||
/// Surrounding quotes will be added and any special characters will be escaped.
|
||||
/// See also `objectFieldRaw`.
|
||||
pub fn objectField(self: *Self, key: []const u8) Error!void {
|
||||
if (build_mode_has_safety) assert(self.raw_streaming_mode == .none);
|
||||
try self.objectFieldStart();
|
||||
try encodeJsonString(key, self.options, self.stream);
|
||||
self.next_punctuation = .colon;
|
||||
}
|
||||
/// See `WriteStream` for when to call this method.
|
||||
/// `quoted_key` is the complete bytes of the key including quotes and any necessary escape sequences.
|
||||
/// A few assertions are performed on the given value to ensure that the caller of this function understands the API contract.
|
||||
/// See also `objectField`.
|
||||
pub fn objectFieldRaw(self: *Self, quoted_key: []const u8) Error!void {
|
||||
if (build_mode_has_safety) assert(self.raw_streaming_mode == .none);
|
||||
assert(quoted_key.len >= 2 and quoted_key[0] == '"' and quoted_key[quoted_key.len - 1] == '"'); // quoted_key should be "quoted".
|
||||
try self.objectFieldStart();
|
||||
try self.stream.writeAll(quoted_key);
|
||||
self.next_punctuation = .colon;
|
||||
}
|
||||
|
||||
/// In the rare case that you need to write very long object field names,
|
||||
/// this is an alternative to `objectField` and `objectFieldRaw` that allows you to write directly to the `.stream` field
|
||||
/// similar to `beginWriteRaw`.
|
||||
/// Call `endObjectFieldRaw()` when you're done.
|
||||
pub fn beginObjectFieldRaw(self: *Self) !void {
|
||||
if (build_mode_has_safety) {
|
||||
assert(self.raw_streaming_mode == .none);
|
||||
self.raw_streaming_mode = .objectField;
|
||||
}
|
||||
try self.objectFieldStart();
|
||||
}
|
||||
|
||||
/// See `beginObjectFieldRaw`.
|
||||
pub fn endObjectFieldRaw(self: *Self) void {
|
||||
if (build_mode_has_safety) {
|
||||
assert(self.raw_streaming_mode == .objectField);
|
||||
self.raw_streaming_mode = .none;
|
||||
}
|
||||
self.next_punctuation = .colon;
|
||||
}
|
||||
|
||||
/// Renders the given Zig value as JSON.
|
||||
///
|
||||
/// Supported types:
|
||||
/// * Zig `bool` -> JSON `true` or `false`.
|
||||
/// * Zig `?T` -> `null` or the rendering of `T`.
|
||||
/// * Zig `i32`, `u64`, etc. -> JSON number or string.
|
||||
/// * When option `emit_nonportable_numbers_as_strings` is true, if the value is outside the range `+-1<<53` (the precise integer range of f64), it is rendered as a JSON string in base 10. Otherwise, it is rendered as JSON number.
|
||||
/// * Zig floats -> JSON number or string.
|
||||
/// * If the value cannot be precisely represented by an f64, it is rendered as a JSON string. Otherwise, it is rendered as JSON number.
|
||||
/// * Zig `[]const u8`, `[]u8`, `*[N]u8`, `@Vector(N, u8)`, and similar -> JSON string.
|
||||
/// * See `StringifyOptions.emit_strings_as_arrays`.
|
||||
/// * If the content is not valid UTF-8, rendered as an array of numbers instead.
|
||||
/// * Zig `[]T`, `[N]T`, `*[N]T`, `@Vector(N, T)`, and similar -> JSON array of the rendering of each item.
|
||||
/// * Zig tuple -> JSON array of the rendering of each item.
|
||||
/// * Zig `struct` -> JSON object with each field in declaration order.
|
||||
/// * If the struct declares a method `pub fn jsonStringify(self: *@This(), jw: anytype) !void`, it is called to do the serialization instead of the default behavior. The given `jw` is a pointer to this `WriteStream`. See `std.json.Value` for an example.
|
||||
/// * See `StringifyOptions.emit_null_optional_fields`.
|
||||
/// * Zig `union(enum)` -> JSON object with one field named for the active tag and a value representing the payload.
|
||||
/// * If the payload is `void`, then the emitted value is `{}`.
|
||||
/// * If the union declares a method `pub fn jsonStringify(self: *@This(), jw: anytype) !void`, it is called to do the serialization instead of the default behavior. The given `jw` is a pointer to this `WriteStream`.
|
||||
/// * Zig `enum` -> JSON string naming the active tag.
|
||||
/// * If the enum declares a method `pub fn jsonStringify(self: *@This(), jw: anytype) !void`, it is called to do the serialization instead of the default behavior. The given `jw` is a pointer to this `WriteStream`.
|
||||
/// * If the enum is non-exhaustive, unnamed values are rendered as integers.
|
||||
/// * Zig untyped enum literal -> JSON string naming the active tag.
|
||||
/// * Zig error -> JSON string naming the error.
|
||||
/// * Zig `*T` -> the rendering of `T`. Note there is no guard against circular-reference infinite recursion.
|
||||
///
|
||||
/// See also alternative functions `print` and `beginWriteRaw`.
|
||||
/// For writing object field names, use `objectField` instead.
|
||||
pub fn write(self: *Self, value: anytype) Error!void {
|
||||
if (build_mode_has_safety) assert(self.raw_streaming_mode == .none);
|
||||
const T = @TypeOf(value);
|
||||
switch (@typeInfo(T)) {
|
||||
.int => {
|
||||
try self.valueStart();
|
||||
if (self.options.emit_nonportable_numbers_as_strings and
|
||||
(value <= -(1 << 53) or value >= (1 << 53)))
|
||||
{
|
||||
try self.stream.print("\"{}\"", .{value});
|
||||
} else {
|
||||
try self.stream.print("{}", .{value});
|
||||
}
|
||||
self.valueDone();
|
||||
return;
|
||||
},
|
||||
.comptime_int => {
|
||||
return self.write(@as(std.math.IntFittingRange(value, value), value));
|
||||
},
|
||||
.float, .comptime_float => {
|
||||
if (@as(f64, @floatCast(value)) == value) {
|
||||
try self.valueStart();
|
||||
try self.stream.print("{}", .{@as(f64, @floatCast(value))});
|
||||
self.valueDone();
|
||||
return;
|
||||
}
|
||||
try self.valueStart();
|
||||
try self.stream.print("\"{}\"", .{value});
|
||||
self.valueDone();
|
||||
return;
|
||||
},
|
||||
|
||||
.bool => {
|
||||
try self.valueStart();
|
||||
try self.stream.writeAll(if (value) "true" else "false");
|
||||
self.valueDone();
|
||||
return;
|
||||
},
|
||||
.null => {
|
||||
try self.valueStart();
|
||||
try self.stream.writeAll("null");
|
||||
self.valueDone();
|
||||
return;
|
||||
},
|
||||
.optional => {
|
||||
if (value) |payload| {
|
||||
return try self.write(payload);
|
||||
} else {
|
||||
return try self.write(null);
|
||||
}
|
||||
},
|
||||
.@"enum" => |enum_info| {
|
||||
if (std.meta.hasFn(T, "jsonStringify")) {
|
||||
return value.jsonStringify(self);
|
||||
}
|
||||
|
||||
if (!enum_info.is_exhaustive) {
|
||||
inline for (enum_info.fields) |field| {
|
||||
if (value == @field(T, field.name)) {
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
return self.write(@intFromEnum(value));
|
||||
}
|
||||
}
|
||||
|
||||
return self.stringValue(@tagName(value));
|
||||
},
|
||||
.enum_literal => {
|
||||
return self.stringValue(@tagName(value));
|
||||
},
|
||||
.@"union" => {
|
||||
if (std.meta.hasFn(T, "jsonStringify")) {
|
||||
return value.jsonStringify(self);
|
||||
}
|
||||
|
||||
const info = @typeInfo(T).@"union";
|
||||
if (info.tag_type) |UnionTagType| {
|
||||
try self.beginObject();
|
||||
inline for (info.fields) |u_field| {
|
||||
if (value == @field(UnionTagType, u_field.name)) {
|
||||
try self.objectField(u_field.name);
|
||||
if (u_field.type == void) {
|
||||
// void value is {}
|
||||
try self.beginObject();
|
||||
try self.endObject();
|
||||
} else {
|
||||
try self.write(@field(value, u_field.name));
|
||||
}
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
unreachable; // No active tag?
|
||||
}
|
||||
try self.endObject();
|
||||
return;
|
||||
} else {
|
||||
@compileError("Unable to stringify untagged union '" ++ @typeName(T) ++ "'");
|
||||
}
|
||||
},
|
||||
.@"struct" => |S| {
|
||||
if (std.meta.hasFn(T, "jsonStringify")) {
|
||||
return value.jsonStringify(self);
|
||||
}
|
||||
|
||||
if (S.is_tuple) {
|
||||
try self.beginArray();
|
||||
} else {
|
||||
try self.beginObject();
|
||||
}
|
||||
inline for (S.fields) |Field| {
|
||||
// don't include void fields
|
||||
if (Field.type == void) continue;
|
||||
|
||||
var emit_field = true;
|
||||
|
||||
// don't include optional fields that are null when emit_null_optional_fields is set to false
|
||||
if (@typeInfo(Field.type) == .optional) {
|
||||
if (self.options.emit_null_optional_fields == false) {
|
||||
if (@field(value, Field.name) == null) {
|
||||
emit_field = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (emit_field) {
|
||||
if (!S.is_tuple) {
|
||||
try self.objectField(Field.name);
|
||||
}
|
||||
try self.write(@field(value, Field.name));
|
||||
}
|
||||
}
|
||||
if (S.is_tuple) {
|
||||
try self.endArray();
|
||||
} else {
|
||||
try self.endObject();
|
||||
}
|
||||
return;
|
||||
},
|
||||
.error_set => return self.stringValue(@errorName(value)),
|
||||
.pointer => |ptr_info| switch (ptr_info.size) {
|
||||
.one => switch (@typeInfo(ptr_info.child)) {
|
||||
.array => {
|
||||
// Coerce `*[N]T` to `[]const T`.
|
||||
const Slice = []const std.meta.Elem(ptr_info.child);
|
||||
return self.write(@as(Slice, value));
|
||||
},
|
||||
else => {
|
||||
return self.write(value.*);
|
||||
},
|
||||
},
|
||||
.many, .slice => {
|
||||
if (ptr_info.size == .many and ptr_info.sentinel() == null)
|
||||
@compileError("unable to stringify type '" ++ @typeName(T) ++ "' without sentinel");
|
||||
const slice = if (ptr_info.size == .many) std.mem.span(value) else value;
|
||||
|
||||
if (ptr_info.child == u8) {
|
||||
// This is a []const u8, or some similar Zig string.
|
||||
if (!self.options.emit_strings_as_arrays and std.unicode.utf8ValidateSlice(slice)) {
|
||||
return self.stringValue(slice);
|
||||
}
|
||||
}
|
||||
|
||||
try self.beginArray();
|
||||
for (slice) |x| {
|
||||
try self.write(x);
|
||||
}
|
||||
try self.endArray();
|
||||
return;
|
||||
},
|
||||
else => @compileError("Unable to stringify type '" ++ @typeName(T) ++ "'"),
|
||||
},
|
||||
.array => {
|
||||
// Coerce `[N]T` to `*const [N]T` (and then to `[]const T`).
|
||||
return self.write(&value);
|
||||
},
|
||||
.vector => |info| {
|
||||
const array: [info.len]info.child = value;
|
||||
return self.write(&array);
|
||||
},
|
||||
else => @compileError("Unable to stringify type '" ++ @typeName(T) ++ "'"),
|
||||
}
|
||||
unreachable;
|
||||
}
|
||||
|
||||
fn stringValue(self: *Self, s: []const u8) !void {
|
||||
try self.valueStart();
|
||||
try encodeJsonString(s, self.options, self.stream);
|
||||
self.valueDone();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
fn outputUnicodeEscape(codepoint: u21, out_stream: anytype) !void {
|
||||
if (codepoint <= 0xFFFF) {
|
||||
// If the character is in the Basic Multilingual Plane (U+0000 through U+FFFF),
|
||||
// then it may be represented as a six-character sequence: a reverse solidus, followed
|
||||
// by the lowercase letter u, followed by four hexadecimal digits that encode the character's code point.
|
||||
try out_stream.writeAll("\\u");
|
||||
//try w.printInt("x", .{ .width = 4, .fill = '0' }, codepoint);
|
||||
try std.fmt.format(out_stream, "{x:0>4}", .{codepoint});
|
||||
} else {
|
||||
assert(codepoint <= 0x10FFFF);
|
||||
// To escape an extended character that is not in the Basic Multilingual Plane,
|
||||
// the character is represented as a 12-character sequence, encoding the UTF-16 surrogate pair.
|
||||
const high = @as(u16, @intCast((codepoint - 0x10000) >> 10)) + 0xD800;
|
||||
const low = @as(u16, @intCast(codepoint & 0x3FF)) + 0xDC00;
|
||||
try out_stream.writeAll("\\u");
|
||||
//try w.printInt("x", .{ .width = 4, .fill = '0' }, high);
|
||||
try std.fmt.format(out_stream, "{x:0>4}", .{high});
|
||||
try out_stream.writeAll("\\u");
|
||||
//try w.printInt("x", .{ .width = 4, .fill = '0' }, low);
|
||||
try std.fmt.format(out_stream, "{x:0>4}", .{low});
|
||||
}
|
||||
}
|
||||
|
||||
fn outputSpecialEscape(c: u8, writer: anytype) !void {
|
||||
switch (c) {
|
||||
'\\' => try writer.writeAll("\\\\"),
|
||||
'\"' => try writer.writeAll("\\\""),
|
||||
0x08 => try writer.writeAll("\\b"),
|
||||
0x0C => try writer.writeAll("\\f"),
|
||||
'\n' => try writer.writeAll("\\n"),
|
||||
'\r' => try writer.writeAll("\\r"),
|
||||
'\t' => try writer.writeAll("\\t"),
|
||||
else => try outputUnicodeEscape(c, writer),
|
||||
}
|
||||
}
|
||||
|
||||
/// Write `string` to `writer` as a JSON encoded string.
|
||||
pub fn encodeJsonString(string: []const u8, options: StringifyOptions, writer: anytype) !void {
|
||||
try writer.writeByte('\"');
|
||||
try encodeJsonStringChars(string, options, writer);
|
||||
try writer.writeByte('\"');
|
||||
}
|
||||
|
||||
/// Write `chars` to `writer` as JSON encoded string characters.
|
||||
pub fn encodeJsonStringChars(chars: []const u8, options: StringifyOptions, writer: anytype) !void {
|
||||
var write_cursor: usize = 0;
|
||||
var i: usize = 0;
|
||||
if (options.escape_unicode) {
|
||||
while (i < chars.len) : (i += 1) {
|
||||
switch (chars[i]) {
|
||||
// normal ascii character
|
||||
0x20...0x21, 0x23...0x5B, 0x5D...0x7E => {},
|
||||
0x00...0x1F, '\\', '\"' => {
|
||||
// Always must escape these.
|
||||
try writer.writeAll(chars[write_cursor..i]);
|
||||
try outputSpecialEscape(chars[i], writer);
|
||||
write_cursor = i + 1;
|
||||
},
|
||||
0x7F...0xFF => {
|
||||
try writer.writeAll(chars[write_cursor..i]);
|
||||
const ulen = std.unicode.utf8ByteSequenceLength(chars[i]) catch unreachable;
|
||||
const codepoint = std.unicode.utf8Decode(chars[i..][0..ulen]) catch unreachable;
|
||||
try outputUnicodeEscape(codepoint, writer);
|
||||
i += ulen - 1;
|
||||
write_cursor = i + 1;
|
||||
},
|
||||
}
|
||||
}
|
||||
} else {
|
||||
while (i < chars.len) : (i += 1) {
|
||||
switch (chars[i]) {
|
||||
// normal bytes
|
||||
0x20...0x21, 0x23...0x5B, 0x5D...0xFF => {},
|
||||
0x00...0x1F, '\\', '\"' => {
|
||||
// Always must escape these.
|
||||
try writer.writeAll(chars[write_cursor..i]);
|
||||
try outputSpecialEscape(chars[i], writer);
|
||||
write_cursor = i + 1;
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
try writer.writeAll(chars[write_cursor..chars.len]);
|
||||
}
|
||||
|
||||
test {
|
||||
_ = @import("./stringify_test.zig");
|
||||
}
|
||||
@@ -1,504 +0,0 @@
|
||||
const std = @import("std");
|
||||
const mem = std.mem;
|
||||
const testing = std.testing;
|
||||
|
||||
const ObjectMap = @import("dynamic.zig").ObjectMap;
|
||||
const Value = @import("dynamic.zig").Value;
|
||||
|
||||
const StringifyOptions = @import("stringify.zig").StringifyOptions;
|
||||
const stringify = @import("stringify.zig").stringify;
|
||||
const stringifyMaxDepth = @import("stringify.zig").stringifyMaxDepth;
|
||||
const stringifyArbitraryDepth = @import("stringify.zig").stringifyArbitraryDepth;
|
||||
const stringifyAlloc = @import("stringify.zig").stringifyAlloc;
|
||||
const writeStream = @import("stringify.zig").writeStream;
|
||||
const writeStreamMaxDepth = @import("stringify.zig").writeStreamMaxDepth;
|
||||
const writeStreamArbitraryDepth = @import("stringify.zig").writeStreamArbitraryDepth;
|
||||
|
||||
test "json write stream" {
|
||||
var out_buf: [1024]u8 = undefined;
|
||||
var slice_stream = std.io.fixedBufferStream(&out_buf);
|
||||
const out = slice_stream.writer();
|
||||
|
||||
{
|
||||
var w = writeStream(out, .{ .whitespace = .indent_2 });
|
||||
try testBasicWriteStream(&w, &slice_stream);
|
||||
}
|
||||
|
||||
{
|
||||
var w = writeStreamMaxDepth(out, .{ .whitespace = .indent_2 }, 8);
|
||||
try testBasicWriteStream(&w, &slice_stream);
|
||||
}
|
||||
|
||||
{
|
||||
var w = writeStreamMaxDepth(out, .{ .whitespace = .indent_2 }, null);
|
||||
try testBasicWriteStream(&w, &slice_stream);
|
||||
}
|
||||
|
||||
{
|
||||
var w = writeStreamArbitraryDepth(testing.allocator, out, .{ .whitespace = .indent_2 });
|
||||
defer w.deinit();
|
||||
try testBasicWriteStream(&w, &slice_stream);
|
||||
}
|
||||
}
|
||||
|
||||
fn testBasicWriteStream(w: anytype, slice_stream: anytype) !void {
|
||||
slice_stream.reset();
|
||||
|
||||
try w.beginObject();
|
||||
|
||||
try w.objectField("object");
|
||||
var arena_allocator = std.heap.ArenaAllocator.init(testing.allocator);
|
||||
defer arena_allocator.deinit();
|
||||
try w.write(try getJsonObject(arena_allocator.allocator()));
|
||||
|
||||
try w.objectFieldRaw("\"string\"");
|
||||
try w.write("This is a string");
|
||||
|
||||
try w.objectField("array");
|
||||
try w.beginArray();
|
||||
try w.write("Another string");
|
||||
try w.write(@as(i32, 1));
|
||||
try w.write(@as(f32, 3.5));
|
||||
try w.endArray();
|
||||
|
||||
try w.objectField("int");
|
||||
try w.write(@as(i32, 10));
|
||||
|
||||
try w.objectField("float");
|
||||
try w.write(@as(f32, 3.5));
|
||||
|
||||
try w.endObject();
|
||||
|
||||
const result = slice_stream.getWritten();
|
||||
const expected =
|
||||
\\{
|
||||
\\ "object": {
|
||||
\\ "one": 1,
|
||||
\\ "two": 2
|
||||
\\ },
|
||||
\\ "string": "This is a string",
|
||||
\\ "array": [
|
||||
\\ "Another string",
|
||||
\\ 1,
|
||||
\\ 3.5
|
||||
\\ ],
|
||||
\\ "int": 10,
|
||||
\\ "float": 3.5
|
||||
\\}
|
||||
;
|
||||
try std.testing.expectEqualStrings(expected, result);
|
||||
}
|
||||
|
||||
fn getJsonObject(allocator: std.mem.Allocator) !Value {
|
||||
var value = Value{ .object = ObjectMap.init(allocator) };
|
||||
try value.object.put("one", Value{ .integer = @as(i64, @intCast(1)) });
|
||||
try value.object.put("two", Value{ .float = 2.0 });
|
||||
return value;
|
||||
}
|
||||
|
||||
test "stringify null optional fields" {
|
||||
const MyStruct = struct {
|
||||
optional: ?[]const u8 = null,
|
||||
required: []const u8 = "something",
|
||||
another_optional: ?[]const u8 = null,
|
||||
another_required: []const u8 = "something else",
|
||||
};
|
||||
try testStringify(
|
||||
\\{"optional":null,"required":"something","another_optional":null,"another_required":"something else"}
|
||||
,
|
||||
MyStruct{},
|
||||
.{},
|
||||
);
|
||||
try testStringify(
|
||||
\\{"required":"something","another_required":"something else"}
|
||||
,
|
||||
MyStruct{},
|
||||
.{ .emit_null_optional_fields = false },
|
||||
);
|
||||
}
|
||||
|
||||
test "stringify basic types" {
|
||||
try testStringify("false", false, .{});
|
||||
try testStringify("true", true, .{});
|
||||
try testStringify("null", @as(?u8, null), .{});
|
||||
try testStringify("null", @as(?*u32, null), .{});
|
||||
try testStringify("42", 42, .{});
|
||||
try testStringify("42", 42.0, .{});
|
||||
try testStringify("42", @as(u8, 42), .{});
|
||||
try testStringify("42", @as(u128, 42), .{});
|
||||
try testStringify("9999999999999999", 9999999999999999, .{});
|
||||
try testStringify("42", @as(f32, 42), .{});
|
||||
try testStringify("42", @as(f64, 42), .{});
|
||||
try testStringify("\"ItBroke\"", @as(anyerror, error.ItBroke), .{});
|
||||
try testStringify("\"ItBroke\"", error.ItBroke, .{});
|
||||
}
|
||||
|
||||
test "stringify string" {
|
||||
try testStringify("\"hello\"", "hello", .{});
|
||||
try testStringify("\"with\\nescapes\\r\"", "with\nescapes\r", .{});
|
||||
try testStringify("\"with\\nescapes\\r\"", "with\nescapes\r", .{ .escape_unicode = true });
|
||||
try testStringify("\"with unicode\\u0001\"", "with unicode\u{1}", .{});
|
||||
try testStringify("\"with unicode\\u0001\"", "with unicode\u{1}", .{ .escape_unicode = true });
|
||||
try testStringify("\"with unicode\u{80}\"", "with unicode\u{80}", .{});
|
||||
try testStringify("\"with unicode\\u0080\"", "with unicode\u{80}", .{ .escape_unicode = true });
|
||||
try testStringify("\"with unicode\u{FF}\"", "with unicode\u{FF}", .{});
|
||||
try testStringify("\"with unicode\\u00ff\"", "with unicode\u{FF}", .{ .escape_unicode = true });
|
||||
try testStringify("\"with unicode\u{100}\"", "with unicode\u{100}", .{});
|
||||
try testStringify("\"with unicode\\u0100\"", "with unicode\u{100}", .{ .escape_unicode = true });
|
||||
try testStringify("\"with unicode\u{800}\"", "with unicode\u{800}", .{});
|
||||
try testStringify("\"with unicode\\u0800\"", "with unicode\u{800}", .{ .escape_unicode = true });
|
||||
try testStringify("\"with unicode\u{8000}\"", "with unicode\u{8000}", .{});
|
||||
try testStringify("\"with unicode\\u8000\"", "with unicode\u{8000}", .{ .escape_unicode = true });
|
||||
try testStringify("\"with unicode\u{D799}\"", "with unicode\u{D799}", .{});
|
||||
try testStringify("\"with unicode\\ud799\"", "with unicode\u{D799}", .{ .escape_unicode = true });
|
||||
try testStringify("\"with unicode\u{10000}\"", "with unicode\u{10000}", .{});
|
||||
try testStringify("\"with unicode\\ud800\\udc00\"", "with unicode\u{10000}", .{ .escape_unicode = true });
|
||||
try testStringify("\"with unicode\u{10FFFF}\"", "with unicode\u{10FFFF}", .{});
|
||||
try testStringify("\"with unicode\\udbff\\udfff\"", "with unicode\u{10FFFF}", .{ .escape_unicode = true });
|
||||
}
|
||||
|
||||
test "stringify many-item sentinel-terminated string" {
|
||||
try testStringify("\"hello\"", @as([*:0]const u8, "hello"), .{});
|
||||
try testStringify("\"with\\nescapes\\r\"", @as([*:0]const u8, "with\nescapes\r"), .{ .escape_unicode = true });
|
||||
try testStringify("\"with unicode\\u0001\"", @as([*:0]const u8, "with unicode\u{1}"), .{ .escape_unicode = true });
|
||||
}
|
||||
|
||||
test "stringify enums" {
|
||||
const E = enum {
|
||||
foo,
|
||||
bar,
|
||||
};
|
||||
try testStringify("\"foo\"", E.foo, .{});
|
||||
try testStringify("\"bar\"", E.bar, .{});
|
||||
}
|
||||
|
||||
test "stringify non-exhaustive enum" {
|
||||
const E = enum(u8) {
|
||||
foo = 0,
|
||||
_,
|
||||
};
|
||||
try testStringify("\"foo\"", E.foo, .{});
|
||||
try testStringify("1", @as(E, @enumFromInt(1)), .{});
|
||||
}
|
||||
|
||||
test "stringify enum literals" {
|
||||
try testStringify("\"foo\"", .foo, .{});
|
||||
try testStringify("\"bar\"", .bar, .{});
|
||||
}
|
||||
|
||||
test "stringify tagged unions" {
|
||||
const T = union(enum) {
|
||||
nothing,
|
||||
foo: u32,
|
||||
bar: bool,
|
||||
};
|
||||
try testStringify("{\"nothing\":{}}", T{ .nothing = {} }, .{});
|
||||
try testStringify("{\"foo\":42}", T{ .foo = 42 }, .{});
|
||||
try testStringify("{\"bar\":true}", T{ .bar = true }, .{});
|
||||
}
|
||||
|
||||
test "stringify struct" {
|
||||
try testStringify("{\"foo\":42}", struct {
|
||||
foo: u32,
|
||||
}{ .foo = 42 }, .{});
|
||||
}
|
||||
|
||||
test "emit_strings_as_arrays" {
|
||||
// Should only affect string values, not object keys.
|
||||
try testStringify("{\"foo\":\"bar\"}", .{ .foo = "bar" }, .{});
|
||||
try testStringify("{\"foo\":[98,97,114]}", .{ .foo = "bar" }, .{ .emit_strings_as_arrays = true });
|
||||
// Should *not* affect these types:
|
||||
try testStringify("\"foo\"", @as(enum { foo, bar }, .foo), .{ .emit_strings_as_arrays = true });
|
||||
try testStringify("\"ItBroke\"", error.ItBroke, .{ .emit_strings_as_arrays = true });
|
||||
// Should work on these:
|
||||
try testStringify("\"bar\"", @Vector(3, u8){ 'b', 'a', 'r' }, .{});
|
||||
try testStringify("[98,97,114]", @Vector(3, u8){ 'b', 'a', 'r' }, .{ .emit_strings_as_arrays = true });
|
||||
try testStringify("\"bar\"", [3]u8{ 'b', 'a', 'r' }, .{});
|
||||
try testStringify("[98,97,114]", [3]u8{ 'b', 'a', 'r' }, .{ .emit_strings_as_arrays = true });
|
||||
}
|
||||
|
||||
test "stringify struct with indentation" {
|
||||
try testStringify(
|
||||
\\{
|
||||
\\ "foo": 42,
|
||||
\\ "bar": [
|
||||
\\ 1,
|
||||
\\ 2,
|
||||
\\ 3
|
||||
\\ ]
|
||||
\\}
|
||||
,
|
||||
struct {
|
||||
foo: u32,
|
||||
bar: [3]u32,
|
||||
}{
|
||||
.foo = 42,
|
||||
.bar = .{ 1, 2, 3 },
|
||||
},
|
||||
.{ .whitespace = .indent_4 },
|
||||
);
|
||||
try testStringify(
|
||||
"{\n\t\"foo\": 42,\n\t\"bar\": [\n\t\t1,\n\t\t2,\n\t\t3\n\t]\n}",
|
||||
struct {
|
||||
foo: u32,
|
||||
bar: [3]u32,
|
||||
}{
|
||||
.foo = 42,
|
||||
.bar = .{ 1, 2, 3 },
|
||||
},
|
||||
.{ .whitespace = .indent_tab },
|
||||
);
|
||||
try testStringify(
|
||||
\\{"foo":42,"bar":[1,2,3]}
|
||||
,
|
||||
struct {
|
||||
foo: u32,
|
||||
bar: [3]u32,
|
||||
}{
|
||||
.foo = 42,
|
||||
.bar = .{ 1, 2, 3 },
|
||||
},
|
||||
.{ .whitespace = .minified },
|
||||
);
|
||||
}
|
||||
|
||||
test "stringify struct with void field" {
|
||||
try testStringify("{\"foo\":42}", struct {
|
||||
foo: u32,
|
||||
bar: void = {},
|
||||
}{ .foo = 42 }, .{});
|
||||
}
|
||||
|
||||
test "stringify array of structs" {
|
||||
const MyStruct = struct {
|
||||
foo: u32,
|
||||
};
|
||||
try testStringify("[{\"foo\":42},{\"foo\":100},{\"foo\":1000}]", [_]MyStruct{
|
||||
MyStruct{ .foo = 42 },
|
||||
MyStruct{ .foo = 100 },
|
||||
MyStruct{ .foo = 1000 },
|
||||
}, .{});
|
||||
}
|
||||
|
||||
test "stringify struct with custom stringifier" {
|
||||
try testStringify("[\"something special\",42]", struct {
|
||||
foo: u32,
|
||||
const Self = @This();
|
||||
pub fn jsonStringify(value: @This(), jws: anytype) !void {
|
||||
_ = value;
|
||||
try jws.beginArray();
|
||||
try jws.write("something special");
|
||||
try jws.write(42);
|
||||
try jws.endArray();
|
||||
}
|
||||
}{ .foo = 42 }, .{});
|
||||
}
|
||||
|
||||
test "stringify vector" {
|
||||
try testStringify("[1,1]", @as(@Vector(2, u32), @splat(1)), .{});
|
||||
try testStringify("\"AA\"", @as(@Vector(2, u8), @splat('A')), .{});
|
||||
try testStringify("[65,65]", @as(@Vector(2, u8), @splat('A')), .{ .emit_strings_as_arrays = true });
|
||||
}
|
||||
|
||||
test "stringify tuple" {
|
||||
try testStringify("[\"foo\",42]", std.meta.Tuple(&.{ []const u8, usize }){ "foo", 42 }, .{});
|
||||
}
|
||||
|
||||
fn testStringify(expected: []const u8, value: anytype, options: StringifyOptions) !void {
|
||||
const ValidationWriter = struct {
|
||||
const Self = @This();
|
||||
pub const Writer = std.io.GenericWriter(*Self, Error, write);
|
||||
pub const Error = error{
|
||||
TooMuchData,
|
||||
DifferentData,
|
||||
};
|
||||
|
||||
expected_remaining: []const u8,
|
||||
|
||||
fn init(exp: []const u8) Self {
|
||||
return .{ .expected_remaining = exp };
|
||||
}
|
||||
|
||||
pub fn writer(self: *Self) Writer {
|
||||
return .{ .context = self };
|
||||
}
|
||||
|
||||
fn write(self: *Self, bytes: []const u8) Error!usize {
|
||||
if (self.expected_remaining.len < bytes.len) {
|
||||
std.debug.print(
|
||||
\\====== expected this output: =========
|
||||
\\{s}
|
||||
\\======== instead found this: =========
|
||||
\\{s}
|
||||
\\======================================
|
||||
, .{
|
||||
self.expected_remaining,
|
||||
bytes,
|
||||
});
|
||||
return error.TooMuchData;
|
||||
}
|
||||
if (!mem.eql(u8, self.expected_remaining[0..bytes.len], bytes)) {
|
||||
std.debug.print(
|
||||
\\====== expected this output: =========
|
||||
\\{s}
|
||||
\\======== instead found this: =========
|
||||
\\{s}
|
||||
\\======================================
|
||||
, .{
|
||||
self.expected_remaining[0..bytes.len],
|
||||
bytes,
|
||||
});
|
||||
return error.DifferentData;
|
||||
}
|
||||
self.expected_remaining = self.expected_remaining[bytes.len..];
|
||||
return bytes.len;
|
||||
}
|
||||
};
|
||||
|
||||
var vos = ValidationWriter.init(expected);
|
||||
try stringifyArbitraryDepth(testing.allocator, value, options, vos.writer());
|
||||
if (vos.expected_remaining.len > 0) return error.NotEnoughData;
|
||||
|
||||
// Also test with safety disabled.
|
||||
try testStringifyMaxDepth(expected, value, options, null);
|
||||
try testStringifyArbitraryDepth(expected, value, options);
|
||||
}
|
||||
|
||||
fn testStringifyMaxDepth(expected: []const u8, value: anytype, options: StringifyOptions, comptime max_depth: ?usize) !void {
|
||||
var out_buf: [1024]u8 = undefined;
|
||||
var slice_stream = std.io.fixedBufferStream(&out_buf);
|
||||
const out = slice_stream.writer();
|
||||
|
||||
try stringifyMaxDepth(value, options, out, max_depth);
|
||||
const got = slice_stream.getWritten();
|
||||
|
||||
try testing.expectEqualStrings(expected, got);
|
||||
}
|
||||
|
||||
fn testStringifyArbitraryDepth(expected: []const u8, value: anytype, options: StringifyOptions) !void {
|
||||
var out_buf: [1024]u8 = undefined;
|
||||
var slice_stream = std.io.fixedBufferStream(&out_buf);
|
||||
const out = slice_stream.writer();
|
||||
|
||||
try stringifyArbitraryDepth(testing.allocator, value, options, out);
|
||||
const got = slice_stream.getWritten();
|
||||
|
||||
try testing.expectEqualStrings(expected, got);
|
||||
}
|
||||
|
||||
test "stringify alloc" {
|
||||
const allocator = std.testing.allocator;
|
||||
const expected =
|
||||
\\{"foo":"bar","answer":42,"my_friend":"sammy"}
|
||||
;
|
||||
const actual = try stringifyAlloc(allocator, .{ .foo = "bar", .answer = 42, .my_friend = "sammy" }, .{});
|
||||
defer allocator.free(actual);
|
||||
|
||||
try std.testing.expectEqualStrings(expected, actual);
|
||||
}
|
||||
|
||||
test "comptime stringify" {
|
||||
comptime testStringifyMaxDepth("false", false, .{}, null) catch unreachable;
|
||||
comptime testStringifyMaxDepth("false", false, .{}, 0) catch unreachable;
|
||||
comptime testStringifyArbitraryDepth("false", false, .{}) catch unreachable;
|
||||
|
||||
const MyStruct = struct {
|
||||
foo: u32,
|
||||
};
|
||||
comptime testStringifyMaxDepth("[{\"foo\":42},{\"foo\":100},{\"foo\":1000}]", [_]MyStruct{
|
||||
MyStruct{ .foo = 42 },
|
||||
MyStruct{ .foo = 100 },
|
||||
MyStruct{ .foo = 1000 },
|
||||
}, .{}, null) catch unreachable;
|
||||
comptime testStringifyMaxDepth("[{\"foo\":42},{\"foo\":100},{\"foo\":1000}]", [_]MyStruct{
|
||||
MyStruct{ .foo = 42 },
|
||||
MyStruct{ .foo = 100 },
|
||||
MyStruct{ .foo = 1000 },
|
||||
}, .{}, 8) catch unreachable;
|
||||
}
|
||||
|
||||
test "print" {
|
||||
var out_buf: [1024]u8 = undefined;
|
||||
var slice_stream = std.io.fixedBufferStream(&out_buf);
|
||||
const out = slice_stream.writer();
|
||||
|
||||
var w = writeStream(out, .{ .whitespace = .indent_2 });
|
||||
defer w.deinit();
|
||||
|
||||
try w.beginObject();
|
||||
try w.objectField("a");
|
||||
try w.print("[ ]", .{});
|
||||
try w.objectField("b");
|
||||
try w.beginArray();
|
||||
try w.print("[{s}] ", .{"[]"});
|
||||
try w.print(" {}", .{12345});
|
||||
try w.endArray();
|
||||
try w.endObject();
|
||||
|
||||
const result = slice_stream.getWritten();
|
||||
const expected =
|
||||
\\{
|
||||
\\ "a": [ ],
|
||||
\\ "b": [
|
||||
\\ [[]] ,
|
||||
\\ 12345
|
||||
\\ ]
|
||||
\\}
|
||||
;
|
||||
try std.testing.expectEqualStrings(expected, result);
|
||||
}
|
||||
|
||||
test "nonportable numbers" {
|
||||
try testStringify("9999999999999999", 9999999999999999, .{});
|
||||
try testStringify("\"9999999999999999\"", 9999999999999999, .{ .emit_nonportable_numbers_as_strings = true });
|
||||
}
|
||||
|
||||
test "stringify raw streaming" {
|
||||
var out_buf: [1024]u8 = undefined;
|
||||
var slice_stream = std.io.fixedBufferStream(&out_buf);
|
||||
const out = slice_stream.writer();
|
||||
|
||||
{
|
||||
var w = writeStream(out, .{ .whitespace = .indent_2 });
|
||||
try testRawStreaming(&w, &slice_stream);
|
||||
}
|
||||
|
||||
{
|
||||
var w = writeStreamMaxDepth(out, .{ .whitespace = .indent_2 }, 8);
|
||||
try testRawStreaming(&w, &slice_stream);
|
||||
}
|
||||
|
||||
{
|
||||
var w = writeStreamMaxDepth(out, .{ .whitespace = .indent_2 }, null);
|
||||
try testRawStreaming(&w, &slice_stream);
|
||||
}
|
||||
|
||||
{
|
||||
var w = writeStreamArbitraryDepth(testing.allocator, out, .{ .whitespace = .indent_2 });
|
||||
defer w.deinit();
|
||||
try testRawStreaming(&w, &slice_stream);
|
||||
}
|
||||
}
|
||||
|
||||
fn testRawStreaming(w: anytype, slice_stream: anytype) !void {
|
||||
slice_stream.reset();
|
||||
|
||||
try w.beginObject();
|
||||
try w.beginObjectFieldRaw();
|
||||
try w.stream.writeAll("\"long");
|
||||
try w.stream.writeAll(" key\"");
|
||||
w.endObjectFieldRaw();
|
||||
try w.beginWriteRaw();
|
||||
try w.stream.writeAll("\"long");
|
||||
try w.stream.writeAll(" value\"");
|
||||
w.endWriteRaw();
|
||||
try w.endObject();
|
||||
|
||||
const result = slice_stream.getWritten();
|
||||
const expected =
|
||||
\\{
|
||||
\\ "long key": "long value"
|
||||
\\}
|
||||
;
|
||||
try std.testing.expectEqualStrings(expected, result);
|
||||
}
|
||||
@@ -1,10 +1,9 @@
|
||||
const std = @import("std");
|
||||
const json = std.json;
|
||||
const testing = std.testing;
|
||||
const parseFromSlice = @import("./static.zig").parseFromSlice;
|
||||
const validate = @import("./scanner.zig").validate;
|
||||
const JsonScanner = @import("./scanner.zig").Scanner;
|
||||
const Scanner = @import("./Scanner.zig");
|
||||
const Value = @import("./dynamic.zig").Value;
|
||||
const stringifyAlloc = @import("./stringify.zig").stringifyAlloc;
|
||||
|
||||
// Support for JSONTestSuite.zig
|
||||
pub fn ok(s: []const u8) !void {
|
||||
@@ -20,7 +19,7 @@ pub fn any(s: []const u8) !void {
|
||||
testHighLevelDynamicParser(s) catch {};
|
||||
}
|
||||
fn testLowLevelScanner(s: []const u8) !void {
|
||||
var scanner = JsonScanner.initCompleteInput(testing.allocator, s);
|
||||
var scanner = Scanner.initCompleteInput(testing.allocator, s);
|
||||
defer scanner.deinit();
|
||||
while (true) {
|
||||
const token = try scanner.next();
|
||||
@@ -47,12 +46,12 @@ test "n_object_closed_missing_value" {
|
||||
}
|
||||
|
||||
fn roundTrip(s: []const u8) !void {
|
||||
try testing.expect(try validate(testing.allocator, s));
|
||||
try testing.expect(try Scanner.validate(testing.allocator, s));
|
||||
|
||||
var parsed = try parseFromSlice(Value, testing.allocator, s, .{});
|
||||
defer parsed.deinit();
|
||||
|
||||
const rendered = try stringifyAlloc(testing.allocator, parsed.value, .{});
|
||||
const rendered = try json.Stringify.valueAlloc(testing.allocator, parsed.value, .{});
|
||||
defer testing.allocator.free(rendered);
|
||||
|
||||
try testing.expectEqualStrings(s, rendered);
|
||||
|
||||
@@ -446,8 +446,8 @@ pub fn fmtString(bytes: []const u8) std.fmt.Formatter([]const u8, stringEscape)
|
||||
}
|
||||
|
||||
/// Return a formatter for escaping a single quoted Zig string.
|
||||
pub fn fmtChar(bytes: []const u8) std.fmt.Formatter([]const u8, charEscape) {
|
||||
return .{ .data = bytes };
|
||||
pub fn fmtChar(c: u21) std.fmt.Formatter(u21, charEscape) {
|
||||
return .{ .data = c };
|
||||
}
|
||||
|
||||
test fmtString {
|
||||
@@ -458,9 +458,7 @@ test fmtString {
|
||||
}
|
||||
|
||||
test fmtChar {
|
||||
try std.testing.expectFmt(
|
||||
\\" \\ hi \x07 \x11 " derp \'"
|
||||
, "\"{f}\"", .{fmtChar(" \\ hi \x07 \x11 \" derp '")});
|
||||
try std.testing.expectFmt("c \\u{26a1}", "{f} {f}", .{ fmtChar('c'), fmtChar('⚡') });
|
||||
}
|
||||
|
||||
/// Print the string as escaped contents of a double quoted string.
|
||||
@@ -480,21 +478,26 @@ pub fn stringEscape(bytes: []const u8, w: *Writer) Writer.Error!void {
|
||||
};
|
||||
}
|
||||
|
||||
/// Print the string as escaped contents of a single-quoted string.
|
||||
pub fn charEscape(bytes: []const u8, w: *Writer) Writer.Error!void {
|
||||
for (bytes) |byte| switch (byte) {
|
||||
/// Print as escaped contents of a single-quoted string.
|
||||
pub fn charEscape(codepoint: u21, w: *Writer) Writer.Error!void {
|
||||
switch (codepoint) {
|
||||
'\n' => try w.writeAll("\\n"),
|
||||
'\r' => try w.writeAll("\\r"),
|
||||
'\t' => try w.writeAll("\\t"),
|
||||
'\\' => try w.writeAll("\\\\"),
|
||||
'"' => try w.writeByte('"'),
|
||||
'\'' => try w.writeAll("\\'"),
|
||||
' ', '!', '#'...'&', '('...'[', ']'...'~' => try w.writeByte(byte),
|
||||
'"', ' ', '!', '#'...'&', '('...'[', ']'...'~' => try w.writeByte(@intCast(codepoint)),
|
||||
else => {
|
||||
try w.writeAll("\\x");
|
||||
try w.printInt(byte, 16, .lower, .{ .width = 2, .fill = '0' });
|
||||
if (std.math.cast(u8, codepoint)) |byte| {
|
||||
try w.writeAll("\\x");
|
||||
try w.printInt(byte, 16, .lower, .{ .width = 2, .fill = '0' });
|
||||
} else {
|
||||
try w.writeAll("\\u{");
|
||||
try w.printInt(codepoint, 16, .lower, .{});
|
||||
try w.writeByte('}');
|
||||
}
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
pub fn isValidId(bytes: []const u8) bool {
|
||||
|
||||
@@ -574,7 +574,7 @@ pub fn renderError(tree: Ast, parse_error: Error, w: *Writer) Writer.Error!void
|
||||
'/' => "comment",
|
||||
else => unreachable,
|
||||
},
|
||||
std.zig.fmtChar(tok_slice[parse_error.extra.offset..][0..1]),
|
||||
std.zig.fmtChar(tok_slice[parse_error.extra.offset]),
|
||||
});
|
||||
},
|
||||
|
||||
|
||||
@@ -38,6 +38,7 @@
|
||||
|
||||
pub const parse = @import("zon/parse.zig");
|
||||
pub const stringify = @import("zon/stringify.zig");
|
||||
pub const Serializer = @import("zon/Serializer.zig");
|
||||
|
||||
test {
|
||||
_ = parse;
|
||||
|
||||
929
lib/std/zon/Serializer.zig
Normal file
929
lib/std/zon/Serializer.zig
Normal file
@@ -0,0 +1,929 @@
|
||||
//! Lower level control over serialization, you can create a new instance with `serializer`.
|
||||
//!
|
||||
//! Useful when you want control over which fields are serialized, how they're represented,
|
||||
//! or want to write a ZON object that does not exist in memory.
|
||||
//!
|
||||
//! You can serialize values with `value`. To serialize recursive types, the following are provided:
|
||||
//! * `valueMaxDepth`
|
||||
//! * `valueArbitraryDepth`
|
||||
//!
|
||||
//! You can also serialize values using specific notations:
|
||||
//! * `int`
|
||||
//! * `float`
|
||||
//! * `codePoint`
|
||||
//! * `tuple`
|
||||
//! * `tupleMaxDepth`
|
||||
//! * `tupleArbitraryDepth`
|
||||
//! * `string`
|
||||
//! * `multilineString`
|
||||
//!
|
||||
//! For manual serialization of containers, see:
|
||||
//! * `beginStruct`
|
||||
//! * `beginTuple`
|
||||
|
||||
options: Options = .{},
|
||||
indent_level: u8 = 0,
|
||||
writer: *Writer,
|
||||
|
||||
const Serializer = @This();
|
||||
const std = @import("std");
|
||||
const assert = std.debug.assert;
|
||||
const Writer = std.Io.Writer;
|
||||
|
||||
pub const Error = Writer.Error;
|
||||
pub const DepthError = Error || error{ExceededMaxDepth};
|
||||
|
||||
pub const Options = struct {
|
||||
/// If false, only syntactically necessary whitespace is emitted.
|
||||
whitespace: bool = true,
|
||||
};
|
||||
|
||||
/// Options for manual serialization of container types.
|
||||
pub const ContainerOptions = struct {
|
||||
/// The whitespace style that should be used for this container. Ignored if whitespace is off.
|
||||
whitespace_style: union(enum) {
|
||||
/// If true, wrap every field. If false do not.
|
||||
wrap: bool,
|
||||
/// Automatically decide whether to wrap or not based on the number of fields. Following
|
||||
/// the standard rule of thumb, containers with more than two fields are wrapped.
|
||||
fields: usize,
|
||||
} = .{ .wrap = true },
|
||||
|
||||
fn shouldWrap(self: ContainerOptions) bool {
|
||||
return switch (self.whitespace_style) {
|
||||
.wrap => |wrap| wrap,
|
||||
.fields => |fields| fields > 2,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
/// Options for serialization of an individual value.
|
||||
///
|
||||
/// See `SerializeOptions` for more information on these options.
|
||||
pub const ValueOptions = struct {
|
||||
emit_codepoint_literals: EmitCodepointLiterals = .never,
|
||||
emit_strings_as_containers: bool = false,
|
||||
emit_default_optional_fields: bool = true,
|
||||
};
|
||||
|
||||
/// Determines when to emit Unicode code point literals as opposed to integer literals.
|
||||
pub const EmitCodepointLiterals = enum {
|
||||
/// Never emit Unicode code point literals.
|
||||
never,
|
||||
/// Emit Unicode code point literals for any `u8` in the printable ASCII range.
|
||||
printable_ascii,
|
||||
/// Emit Unicode code point literals for any unsigned integer with 21 bits or fewer
|
||||
/// whose value is a valid non-surrogate code point.
|
||||
always,
|
||||
|
||||
/// If the value should be emitted as a Unicode codepoint, return it as a u21.
|
||||
fn emitAsCodepoint(self: @This(), val: anytype) ?u21 {
|
||||
// Rule out incompatible integer types
|
||||
switch (@typeInfo(@TypeOf(val))) {
|
||||
.int => |int_info| if (int_info.signedness == .signed or int_info.bits > 21) {
|
||||
return null;
|
||||
},
|
||||
.comptime_int => {},
|
||||
else => comptime unreachable,
|
||||
}
|
||||
|
||||
// Return null if the value shouldn't be printed as a Unicode codepoint, or the value casted
|
||||
// to a u21 if it should.
|
||||
switch (self) {
|
||||
.always => {
|
||||
const c = std.math.cast(u21, val) orelse return null;
|
||||
if (!std.unicode.utf8ValidCodepoint(c)) return null;
|
||||
return c;
|
||||
},
|
||||
.printable_ascii => {
|
||||
const c = std.math.cast(u8, val) orelse return null;
|
||||
if (!std.ascii.isPrint(c)) return null;
|
||||
return c;
|
||||
},
|
||||
.never => {
|
||||
return null;
|
||||
},
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
/// Serialize a value, similar to `serialize`.
|
||||
pub fn value(self: *Serializer, val: anytype, options: ValueOptions) Error!void {
|
||||
comptime assert(!typeIsRecursive(@TypeOf(val)));
|
||||
return self.valueArbitraryDepth(val, options);
|
||||
}
|
||||
|
||||
/// Serialize a value, similar to `serializeMaxDepth`.
|
||||
/// Can return `error.ExceededMaxDepth`.
|
||||
pub fn valueMaxDepth(self: *Serializer, val: anytype, options: ValueOptions, depth: usize) DepthError!void {
|
||||
try checkValueDepth(val, depth);
|
||||
return self.valueArbitraryDepth(val, options);
|
||||
}
|
||||
|
||||
/// Serialize a value, similar to `serializeArbitraryDepth`.
|
||||
pub fn valueArbitraryDepth(self: *Serializer, val: anytype, options: ValueOptions) Error!void {
|
||||
comptime assert(canSerializeType(@TypeOf(val)));
|
||||
switch (@typeInfo(@TypeOf(val))) {
|
||||
.int, .comptime_int => if (options.emit_codepoint_literals.emitAsCodepoint(val)) |c| {
|
||||
self.codePoint(c) catch |err| switch (err) {
|
||||
error.InvalidCodepoint => unreachable, // Already validated
|
||||
else => |e| return e,
|
||||
};
|
||||
} else {
|
||||
try self.int(val);
|
||||
},
|
||||
.float, .comptime_float => try self.float(val),
|
||||
.bool, .null => try self.writer.print("{}", .{val}),
|
||||
.enum_literal => try self.ident(@tagName(val)),
|
||||
.@"enum" => try self.ident(@tagName(val)),
|
||||
.pointer => |pointer| {
|
||||
// Try to serialize as a string
|
||||
const item: ?type = switch (@typeInfo(pointer.child)) {
|
||||
.array => |array| array.child,
|
||||
else => if (pointer.size == .slice) pointer.child else null,
|
||||
};
|
||||
if (item == u8 and
|
||||
(pointer.sentinel() == null or pointer.sentinel() == 0) and
|
||||
!options.emit_strings_as_containers)
|
||||
{
|
||||
return try self.string(val);
|
||||
}
|
||||
|
||||
// Serialize as either a tuple or as the child type
|
||||
switch (pointer.size) {
|
||||
.slice => try self.tupleImpl(val, options),
|
||||
.one => try self.valueArbitraryDepth(val.*, options),
|
||||
else => comptime unreachable,
|
||||
}
|
||||
},
|
||||
.array => {
|
||||
var container = try self.beginTuple(
|
||||
.{ .whitespace_style = .{ .fields = val.len } },
|
||||
);
|
||||
for (val) |item_val| {
|
||||
try container.fieldArbitraryDepth(item_val, options);
|
||||
}
|
||||
try container.end();
|
||||
},
|
||||
.@"struct" => |@"struct"| if (@"struct".is_tuple) {
|
||||
var container = try self.beginTuple(
|
||||
.{ .whitespace_style = .{ .fields = @"struct".fields.len } },
|
||||
);
|
||||
inline for (val) |field_value| {
|
||||
try container.fieldArbitraryDepth(field_value, options);
|
||||
}
|
||||
try container.end();
|
||||
} else {
|
||||
// Decide which fields to emit
|
||||
const fields, const skipped: [@"struct".fields.len]bool = if (options.emit_default_optional_fields) b: {
|
||||
break :b .{ @"struct".fields.len, @splat(false) };
|
||||
} else b: {
|
||||
var fields = @"struct".fields.len;
|
||||
var skipped: [@"struct".fields.len]bool = @splat(false);
|
||||
inline for (@"struct".fields, &skipped) |field_info, *skip| {
|
||||
if (field_info.default_value_ptr) |ptr| {
|
||||
const default: *const field_info.type = @ptrCast(@alignCast(ptr));
|
||||
const field_value = @field(val, field_info.name);
|
||||
if (std.meta.eql(field_value, default.*)) {
|
||||
skip.* = true;
|
||||
fields -= 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
break :b .{ fields, skipped };
|
||||
};
|
||||
|
||||
// Emit those fields
|
||||
var container = try self.beginStruct(
|
||||
.{ .whitespace_style = .{ .fields = fields } },
|
||||
);
|
||||
inline for (@"struct".fields, skipped) |field_info, skip| {
|
||||
if (!skip) {
|
||||
try container.fieldArbitraryDepth(
|
||||
field_info.name,
|
||||
@field(val, field_info.name),
|
||||
options,
|
||||
);
|
||||
}
|
||||
}
|
||||
try container.end();
|
||||
},
|
||||
.@"union" => |@"union"| {
|
||||
comptime assert(@"union".tag_type != null);
|
||||
switch (val) {
|
||||
inline else => |pl, tag| if (@TypeOf(pl) == void)
|
||||
try self.writer.print(".{s}", .{@tagName(tag)})
|
||||
else {
|
||||
var container = try self.beginStruct(.{ .whitespace_style = .{ .fields = 1 } });
|
||||
|
||||
try container.fieldArbitraryDepth(
|
||||
@tagName(tag),
|
||||
pl,
|
||||
options,
|
||||
);
|
||||
|
||||
try container.end();
|
||||
},
|
||||
}
|
||||
},
|
||||
.optional => if (val) |inner| {
|
||||
try self.valueArbitraryDepth(inner, options);
|
||||
} else {
|
||||
try self.writer.writeAll("null");
|
||||
},
|
||||
.vector => |vector| {
|
||||
var container = try self.beginTuple(
|
||||
.{ .whitespace_style = .{ .fields = vector.len } },
|
||||
);
|
||||
for (0..vector.len) |i| {
|
||||
try container.fieldArbitraryDepth(val[i], options);
|
||||
}
|
||||
try container.end();
|
||||
},
|
||||
|
||||
else => comptime unreachable,
|
||||
}
|
||||
}
|
||||
|
||||
/// Serialize an integer.
|
||||
pub fn int(self: *Serializer, val: anytype) Error!void {
|
||||
try self.writer.printInt(val, 10, .lower, .{});
|
||||
}
|
||||
|
||||
/// Serialize a float.
|
||||
pub fn float(self: *Serializer, val: anytype) Error!void {
|
||||
switch (@typeInfo(@TypeOf(val))) {
|
||||
.float => if (std.math.isNan(val)) {
|
||||
return self.writer.writeAll("nan");
|
||||
} else if (std.math.isPositiveInf(val)) {
|
||||
return self.writer.writeAll("inf");
|
||||
} else if (std.math.isNegativeInf(val)) {
|
||||
return self.writer.writeAll("-inf");
|
||||
} else if (std.math.isNegativeZero(val)) {
|
||||
return self.writer.writeAll("-0.0");
|
||||
} else {
|
||||
try self.writer.print("{d}", .{val});
|
||||
},
|
||||
.comptime_float => if (val == 0) {
|
||||
return self.writer.writeAll("0");
|
||||
} else {
|
||||
try self.writer.print("{d}", .{val});
|
||||
},
|
||||
else => comptime unreachable,
|
||||
}
|
||||
}
|
||||
|
||||
/// Serialize `name` as an identifier prefixed with `.`.
|
||||
///
|
||||
/// Escapes the identifier if necessary.
|
||||
pub fn ident(self: *Serializer, name: []const u8) Error!void {
|
||||
try self.writer.print(".{f}", .{std.zig.fmtIdPU(name)});
|
||||
}
|
||||
|
||||
pub const CodePointError = Error || error{InvalidCodepoint};
|
||||
|
||||
/// Serialize `val` as a Unicode codepoint.
|
||||
///
|
||||
/// Returns `error.InvalidCodepoint` if `val` is not a valid Unicode codepoint.
|
||||
pub fn codePoint(self: *Serializer, val: u21) CodePointError!void {
|
||||
try self.writer.print("'{f}'", .{std.zig.fmtChar(val)});
|
||||
}
|
||||
|
||||
/// Like `value`, but always serializes `val` as a tuple.
|
||||
///
|
||||
/// Will fail at comptime if `val` is not a tuple, array, pointer to an array, or slice.
|
||||
pub fn tuple(self: *Serializer, val: anytype, options: ValueOptions) Error!void {
|
||||
comptime assert(!typeIsRecursive(@TypeOf(val)));
|
||||
try self.tupleArbitraryDepth(val, options);
|
||||
}
|
||||
|
||||
/// Like `tuple`, but recursive types are allowed.
|
||||
///
|
||||
/// Returns `error.ExceededMaxDepth` if `depth` is exceeded.
|
||||
pub fn tupleMaxDepth(
|
||||
self: *Serializer,
|
||||
val: anytype,
|
||||
options: ValueOptions,
|
||||
depth: usize,
|
||||
) DepthError!void {
|
||||
try checkValueDepth(val, depth);
|
||||
try self.tupleArbitraryDepth(val, options);
|
||||
}
|
||||
|
||||
/// Like `tuple`, but recursive types are allowed.
|
||||
///
|
||||
/// It is the caller's responsibility to ensure that `val` does not contain cycles.
|
||||
pub fn tupleArbitraryDepth(
|
||||
self: *Serializer,
|
||||
val: anytype,
|
||||
options: ValueOptions,
|
||||
) Error!void {
|
||||
try self.tupleImpl(val, options);
|
||||
}
|
||||
|
||||
fn tupleImpl(self: *Serializer, val: anytype, options: ValueOptions) Error!void {
|
||||
comptime assert(canSerializeType(@TypeOf(val)));
|
||||
switch (@typeInfo(@TypeOf(val))) {
|
||||
.@"struct" => {
|
||||
var container = try self.beginTuple(.{ .whitespace_style = .{ .fields = val.len } });
|
||||
inline for (val) |item_val| {
|
||||
try container.fieldArbitraryDepth(item_val, options);
|
||||
}
|
||||
try container.end();
|
||||
},
|
||||
.pointer, .array => {
|
||||
var container = try self.beginTuple(.{ .whitespace_style = .{ .fields = val.len } });
|
||||
for (val) |item_val| {
|
||||
try container.fieldArbitraryDepth(item_val, options);
|
||||
}
|
||||
try container.end();
|
||||
},
|
||||
else => comptime unreachable,
|
||||
}
|
||||
}
|
||||
|
||||
/// Like `value`, but always serializes `val` as a string.
|
||||
pub fn string(self: *Serializer, val: []const u8) Error!void {
|
||||
try self.writer.print("\"{f}\"", .{std.zig.fmtString(val)});
|
||||
}
|
||||
|
||||
/// Options for formatting multiline strings.
|
||||
pub const MultilineStringOptions = struct {
|
||||
/// If top level is true, whitespace before and after the multiline string is elided.
|
||||
/// If it is true, a newline is printed, then the value, followed by a newline, and if
|
||||
/// whitespace is true any necessary indentation follows.
|
||||
top_level: bool = false,
|
||||
};
|
||||
|
||||
pub const MultilineStringError = Error || error{InnerCarriageReturn};
|
||||
|
||||
/// Like `value`, but always serializes to a multiline string literal.
|
||||
///
|
||||
/// Returns `error.InnerCarriageReturn` if `val` contains a CR not followed by a newline,
|
||||
/// since multiline strings cannot represent CR without a following newline.
|
||||
pub fn multilineString(
|
||||
self: *Serializer,
|
||||
val: []const u8,
|
||||
options: MultilineStringOptions,
|
||||
) MultilineStringError!void {
|
||||
// Make sure the string does not contain any carriage returns not followed by a newline
|
||||
var i: usize = 0;
|
||||
while (i < val.len) : (i += 1) {
|
||||
if (val[i] == '\r') {
|
||||
if (i + 1 < val.len) {
|
||||
if (val[i + 1] == '\n') {
|
||||
i += 1;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
return error.InnerCarriageReturn;
|
||||
}
|
||||
}
|
||||
|
||||
if (!options.top_level) {
|
||||
try self.newline();
|
||||
try self.indent();
|
||||
}
|
||||
|
||||
try self.writer.writeAll("\\\\");
|
||||
for (val) |c| {
|
||||
if (c != '\r') {
|
||||
try self.writer.writeByte(c); // We write newlines here even if whitespace off
|
||||
if (c == '\n') {
|
||||
try self.indent();
|
||||
try self.writer.writeAll("\\\\");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!options.top_level) {
|
||||
try self.writer.writeByte('\n'); // Even if whitespace off
|
||||
try self.indent();
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a `Struct` for writing ZON structs field by field.
|
||||
pub fn beginStruct(self: *Serializer, options: ContainerOptions) Error!Struct {
|
||||
return Struct.begin(self, options);
|
||||
}
|
||||
|
||||
/// Creates a `Tuple` for writing ZON tuples field by field.
|
||||
pub fn beginTuple(self: *Serializer, options: ContainerOptions) Error!Tuple {
|
||||
return Tuple.begin(self, options);
|
||||
}
|
||||
|
||||
fn indent(self: *Serializer) Error!void {
|
||||
if (self.options.whitespace) {
|
||||
try self.writer.splatByteAll(' ', 4 * self.indent_level);
|
||||
}
|
||||
}
|
||||
|
||||
fn newline(self: *Serializer) Error!void {
|
||||
if (self.options.whitespace) {
|
||||
try self.writer.writeByte('\n');
|
||||
}
|
||||
}
|
||||
|
||||
fn newlineOrSpace(self: *Serializer, len: usize) Error!void {
|
||||
if (self.containerShouldWrap(len)) {
|
||||
try self.newline();
|
||||
} else {
|
||||
try self.space();
|
||||
}
|
||||
}
|
||||
|
||||
fn space(self: *Serializer) Error!void {
|
||||
if (self.options.whitespace) {
|
||||
try self.writer.writeByte(' ');
|
||||
}
|
||||
}
|
||||
|
||||
/// Writes ZON tuples field by field.
|
||||
pub const Tuple = struct {
|
||||
container: Container,
|
||||
|
||||
fn begin(parent: *Serializer, options: ContainerOptions) Error!Tuple {
|
||||
return .{
|
||||
.container = try Container.begin(parent, .anon, options),
|
||||
};
|
||||
}
|
||||
|
||||
/// Finishes serializing the tuple.
|
||||
///
|
||||
/// Prints a trailing comma as configured when appropriate, and the closing bracket.
|
||||
pub fn end(self: *Tuple) Error!void {
|
||||
try self.container.end();
|
||||
self.* = undefined;
|
||||
}
|
||||
|
||||
/// Serialize a field. Equivalent to calling `fieldPrefix` followed by `value`.
|
||||
pub fn field(
|
||||
self: *Tuple,
|
||||
val: anytype,
|
||||
options: ValueOptions,
|
||||
) Error!void {
|
||||
try self.container.field(null, val, options);
|
||||
}
|
||||
|
||||
/// Serialize a field. Equivalent to calling `fieldPrefix` followed by `valueMaxDepth`.
|
||||
/// Returns `error.ExceededMaxDepth` if `depth` is exceeded.
|
||||
pub fn fieldMaxDepth(
|
||||
self: *Tuple,
|
||||
val: anytype,
|
||||
options: ValueOptions,
|
||||
depth: usize,
|
||||
) DepthError!void {
|
||||
try self.container.fieldMaxDepth(null, val, options, depth);
|
||||
}
|
||||
|
||||
/// Serialize a field. Equivalent to calling `fieldPrefix` followed by
|
||||
/// `valueArbitraryDepth`.
|
||||
pub fn fieldArbitraryDepth(
|
||||
self: *Tuple,
|
||||
val: anytype,
|
||||
options: ValueOptions,
|
||||
) Error!void {
|
||||
try self.container.fieldArbitraryDepth(null, val, options);
|
||||
}
|
||||
|
||||
/// Starts a field with a struct as a value. Returns the struct.
|
||||
pub fn beginStructField(
|
||||
self: *Tuple,
|
||||
options: ContainerOptions,
|
||||
) Error!Struct {
|
||||
try self.fieldPrefix();
|
||||
return self.container.serializer.beginStruct(options);
|
||||
}
|
||||
|
||||
/// Starts a field with a tuple as a value. Returns the tuple.
|
||||
pub fn beginTupleField(
|
||||
self: *Tuple,
|
||||
options: ContainerOptions,
|
||||
) Error!Tuple {
|
||||
try self.fieldPrefix();
|
||||
return self.container.serializer.beginTuple(options);
|
||||
}
|
||||
|
||||
/// Print a field prefix. This prints any necessary commas, and whitespace as
|
||||
/// configured. Useful if you want to serialize the field value yourself.
|
||||
pub fn fieldPrefix(self: *Tuple) Error!void {
|
||||
try self.container.fieldPrefix(null);
|
||||
}
|
||||
};
|
||||
|
||||
/// Writes ZON structs field by field.
|
||||
pub const Struct = struct {
|
||||
container: Container,
|
||||
|
||||
fn begin(parent: *Serializer, options: ContainerOptions) Error!Struct {
|
||||
return .{
|
||||
.container = try Container.begin(parent, .named, options),
|
||||
};
|
||||
}
|
||||
|
||||
/// Finishes serializing the struct.
|
||||
///
|
||||
/// Prints a trailing comma as configured when appropriate, and the closing bracket.
|
||||
pub fn end(self: *Struct) Error!void {
|
||||
try self.container.end();
|
||||
self.* = undefined;
|
||||
}
|
||||
|
||||
/// Serialize a field. Equivalent to calling `fieldPrefix` followed by `value`.
|
||||
pub fn field(
|
||||
self: *Struct,
|
||||
name: []const u8,
|
||||
val: anytype,
|
||||
options: ValueOptions,
|
||||
) Error!void {
|
||||
try self.container.field(name, val, options);
|
||||
}
|
||||
|
||||
/// Serialize a field. Equivalent to calling `fieldPrefix` followed by `valueMaxDepth`.
|
||||
/// Returns `error.ExceededMaxDepth` if `depth` is exceeded.
|
||||
pub fn fieldMaxDepth(
|
||||
self: *Struct,
|
||||
name: []const u8,
|
||||
val: anytype,
|
||||
options: ValueOptions,
|
||||
depth: usize,
|
||||
) DepthError!void {
|
||||
try self.container.fieldMaxDepth(name, val, options, depth);
|
||||
}
|
||||
|
||||
/// Serialize a field. Equivalent to calling `fieldPrefix` followed by
|
||||
/// `valueArbitraryDepth`.
|
||||
pub fn fieldArbitraryDepth(
|
||||
self: *Struct,
|
||||
name: []const u8,
|
||||
val: anytype,
|
||||
options: ValueOptions,
|
||||
) Error!void {
|
||||
try self.container.fieldArbitraryDepth(name, val, options);
|
||||
}
|
||||
|
||||
/// Starts a field with a struct as a value. Returns the struct.
|
||||
pub fn beginStructField(
|
||||
self: *Struct,
|
||||
name: []const u8,
|
||||
options: ContainerOptions,
|
||||
) Error!Struct {
|
||||
try self.fieldPrefix(name);
|
||||
return self.container.serializer.beginStruct(options);
|
||||
}
|
||||
|
||||
/// Starts a field with a tuple as a value. Returns the tuple.
|
||||
pub fn beginTupleField(
|
||||
self: *Struct,
|
||||
name: []const u8,
|
||||
options: ContainerOptions,
|
||||
) Error!Tuple {
|
||||
try self.fieldPrefix(name);
|
||||
return self.container.serializer.beginTuple(options);
|
||||
}
|
||||
|
||||
/// Print a field prefix. This prints any necessary commas, the field name (escaped if
|
||||
/// necessary) and whitespace as configured. Useful if you want to serialize the field
|
||||
/// value yourself.
|
||||
pub fn fieldPrefix(self: *Struct, name: []const u8) Error!void {
|
||||
try self.container.fieldPrefix(name);
|
||||
}
|
||||
};
|
||||
|
||||
const Container = struct {
|
||||
const FieldStyle = enum { named, anon };
|
||||
|
||||
serializer: *Serializer,
|
||||
field_style: FieldStyle,
|
||||
options: ContainerOptions,
|
||||
empty: bool,
|
||||
|
||||
fn begin(
|
||||
sz: *Serializer,
|
||||
field_style: FieldStyle,
|
||||
options: ContainerOptions,
|
||||
) Error!Container {
|
||||
if (options.shouldWrap()) sz.indent_level +|= 1;
|
||||
try sz.writer.writeAll(".{");
|
||||
return .{
|
||||
.serializer = sz,
|
||||
.field_style = field_style,
|
||||
.options = options,
|
||||
.empty = true,
|
||||
};
|
||||
}
|
||||
|
||||
fn end(self: *Container) Error!void {
|
||||
if (self.options.shouldWrap()) self.serializer.indent_level -|= 1;
|
||||
if (!self.empty) {
|
||||
if (self.options.shouldWrap()) {
|
||||
if (self.serializer.options.whitespace) {
|
||||
try self.serializer.writer.writeByte(',');
|
||||
}
|
||||
try self.serializer.newline();
|
||||
try self.serializer.indent();
|
||||
} else if (!self.shouldElideSpaces()) {
|
||||
try self.serializer.space();
|
||||
}
|
||||
}
|
||||
try self.serializer.writer.writeByte('}');
|
||||
self.* = undefined;
|
||||
}
|
||||
|
||||
fn fieldPrefix(self: *Container, name: ?[]const u8) Error!void {
|
||||
if (!self.empty) {
|
||||
try self.serializer.writer.writeByte(',');
|
||||
}
|
||||
self.empty = false;
|
||||
if (self.options.shouldWrap()) {
|
||||
try self.serializer.newline();
|
||||
} else if (!self.shouldElideSpaces()) {
|
||||
try self.serializer.space();
|
||||
}
|
||||
if (self.options.shouldWrap()) try self.serializer.indent();
|
||||
if (name) |n| {
|
||||
try self.serializer.ident(n);
|
||||
try self.serializer.space();
|
||||
try self.serializer.writer.writeByte('=');
|
||||
try self.serializer.space();
|
||||
}
|
||||
}
|
||||
|
||||
fn field(
|
||||
self: *Container,
|
||||
name: ?[]const u8,
|
||||
val: anytype,
|
||||
options: ValueOptions,
|
||||
) Error!void {
|
||||
comptime assert(!typeIsRecursive(@TypeOf(val)));
|
||||
try self.fieldArbitraryDepth(name, val, options);
|
||||
}
|
||||
|
||||
/// Returns `error.ExceededMaxDepth` if `depth` is exceeded.
|
||||
fn fieldMaxDepth(
|
||||
self: *Container,
|
||||
name: ?[]const u8,
|
||||
val: anytype,
|
||||
options: ValueOptions,
|
||||
depth: usize,
|
||||
) DepthError!void {
|
||||
try checkValueDepth(val, depth);
|
||||
try self.fieldArbitraryDepth(name, val, options);
|
||||
}
|
||||
|
||||
fn fieldArbitraryDepth(
|
||||
self: *Container,
|
||||
name: ?[]const u8,
|
||||
val: anytype,
|
||||
options: ValueOptions,
|
||||
) Error!void {
|
||||
try self.fieldPrefix(name);
|
||||
try self.serializer.valueArbitraryDepth(val, options);
|
||||
}
|
||||
|
||||
fn shouldElideSpaces(self: *const Container) bool {
|
||||
return switch (self.options.whitespace_style) {
|
||||
.fields => |fields| self.field_style != .named and fields == 1,
|
||||
else => false,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
test Serializer {
|
||||
var discarding: Writer.Discarding = .init(&.{});
|
||||
var s: Serializer = .{ .writer = &discarding.writer };
|
||||
var vec2 = try s.beginStruct(.{});
|
||||
try vec2.field("x", 1.5, .{});
|
||||
try vec2.fieldPrefix("prefix");
|
||||
try s.value(2.5, .{});
|
||||
try vec2.end();
|
||||
}
|
||||
|
||||
inline fn typeIsRecursive(comptime T: type) bool {
|
||||
return comptime typeIsRecursiveInner(T, &.{});
|
||||
}
|
||||
|
||||
fn typeIsRecursiveInner(comptime T: type, comptime prev_visited: []const type) bool {
|
||||
for (prev_visited) |V| {
|
||||
if (V == T) return true;
|
||||
}
|
||||
const visited = prev_visited ++ .{T};
|
||||
|
||||
return switch (@typeInfo(T)) {
|
||||
.pointer => |pointer| typeIsRecursiveInner(pointer.child, visited),
|
||||
.optional => |optional| typeIsRecursiveInner(optional.child, visited),
|
||||
.array => |array| typeIsRecursiveInner(array.child, visited),
|
||||
.vector => |vector| typeIsRecursiveInner(vector.child, visited),
|
||||
.@"struct" => |@"struct"| for (@"struct".fields) |field| {
|
||||
if (typeIsRecursiveInner(field.type, visited)) break true;
|
||||
} else false,
|
||||
.@"union" => |@"union"| inline for (@"union".fields) |field| {
|
||||
if (typeIsRecursiveInner(field.type, visited)) break true;
|
||||
} else false,
|
||||
else => false,
|
||||
};
|
||||
}
|
||||
|
||||
test typeIsRecursive {
|
||||
try std.testing.expect(!typeIsRecursive(bool));
|
||||
try std.testing.expect(!typeIsRecursive(struct { x: i32, y: i32 }));
|
||||
try std.testing.expect(!typeIsRecursive(struct { i32, i32 }));
|
||||
try std.testing.expect(typeIsRecursive(struct { x: i32, y: i32, z: *@This() }));
|
||||
try std.testing.expect(typeIsRecursive(struct {
|
||||
a: struct {
|
||||
const A = @This();
|
||||
b: struct {
|
||||
c: *struct {
|
||||
a: ?A,
|
||||
},
|
||||
},
|
||||
},
|
||||
}));
|
||||
try std.testing.expect(typeIsRecursive(struct {
|
||||
a: [3]*@This(),
|
||||
}));
|
||||
try std.testing.expect(typeIsRecursive(struct {
|
||||
a: union { a: i32, b: *@This() },
|
||||
}));
|
||||
}
|
||||
|
||||
fn checkValueDepth(val: anytype, depth: usize) error{ExceededMaxDepth}!void {
|
||||
if (depth == 0) return error.ExceededMaxDepth;
|
||||
const child_depth = depth - 1;
|
||||
|
||||
switch (@typeInfo(@TypeOf(val))) {
|
||||
.pointer => |pointer| switch (pointer.size) {
|
||||
.one => try checkValueDepth(val.*, child_depth),
|
||||
.slice => for (val) |item| {
|
||||
try checkValueDepth(item, child_depth);
|
||||
},
|
||||
.c, .many => {},
|
||||
},
|
||||
.array => for (val) |item| {
|
||||
try checkValueDepth(item, child_depth);
|
||||
},
|
||||
.@"struct" => |@"struct"| inline for (@"struct".fields) |field_info| {
|
||||
try checkValueDepth(@field(val, field_info.name), child_depth);
|
||||
},
|
||||
.@"union" => |@"union"| if (@"union".tag_type == null) {
|
||||
return;
|
||||
} else switch (val) {
|
||||
inline else => |payload| {
|
||||
return checkValueDepth(payload, child_depth);
|
||||
},
|
||||
},
|
||||
.optional => if (val) |inner| try checkValueDepth(inner, child_depth),
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
|
||||
fn expectValueDepthEquals(expected: usize, v: anytype) !void {
|
||||
try checkValueDepth(v, expected);
|
||||
try std.testing.expectError(error.ExceededMaxDepth, checkValueDepth(v, expected - 1));
|
||||
}
|
||||
|
||||
test checkValueDepth {
|
||||
try expectValueDepthEquals(1, 10);
|
||||
try expectValueDepthEquals(2, .{ .x = 1, .y = 2 });
|
||||
try expectValueDepthEquals(2, .{ 1, 2 });
|
||||
try expectValueDepthEquals(3, .{ 1, .{ 2, 3 } });
|
||||
try expectValueDepthEquals(3, .{ .{ 1, 2 }, 3 });
|
||||
try expectValueDepthEquals(3, .{ .x = 0, .y = 1, .z = .{ .x = 3 } });
|
||||
try expectValueDepthEquals(3, .{ .x = 0, .y = .{ .x = 1 }, .z = 2 });
|
||||
try expectValueDepthEquals(3, .{ .x = .{ .x = 0 }, .y = 1, .z = 2 });
|
||||
try expectValueDepthEquals(2, @as(?u32, 1));
|
||||
try expectValueDepthEquals(1, @as(?u32, null));
|
||||
try expectValueDepthEquals(1, null);
|
||||
try expectValueDepthEquals(2, &1);
|
||||
try expectValueDepthEquals(3, &@as(?u32, 1));
|
||||
|
||||
const Union = union(enum) {
|
||||
x: u32,
|
||||
y: struct { x: u32 },
|
||||
};
|
||||
try expectValueDepthEquals(2, Union{ .x = 1 });
|
||||
try expectValueDepthEquals(3, Union{ .y = .{ .x = 1 } });
|
||||
|
||||
const Recurse = struct { r: ?*const @This() };
|
||||
try expectValueDepthEquals(2, Recurse{ .r = null });
|
||||
try expectValueDepthEquals(5, Recurse{ .r = &Recurse{ .r = null } });
|
||||
try expectValueDepthEquals(8, Recurse{ .r = &Recurse{ .r = &Recurse{ .r = null } } });
|
||||
|
||||
try expectValueDepthEquals(2, @as([]const u8, &.{ 1, 2, 3 }));
|
||||
try expectValueDepthEquals(3, @as([]const []const u8, &.{&.{ 1, 2, 3 }}));
|
||||
}
|
||||
|
||||
inline fn canSerializeType(T: type) bool {
|
||||
comptime return canSerializeTypeInner(T, &.{}, false);
|
||||
}
|
||||
|
||||
fn canSerializeTypeInner(
|
||||
T: type,
|
||||
/// Visited structs and unions, to avoid infinite recursion.
|
||||
/// Tracking more types is unnecessary, and a little complex due to optional nesting.
|
||||
visited: []const type,
|
||||
parent_is_optional: bool,
|
||||
) bool {
|
||||
return switch (@typeInfo(T)) {
|
||||
.bool,
|
||||
.int,
|
||||
.float,
|
||||
.comptime_float,
|
||||
.comptime_int,
|
||||
.null,
|
||||
.enum_literal,
|
||||
=> true,
|
||||
|
||||
.noreturn,
|
||||
.void,
|
||||
.type,
|
||||
.undefined,
|
||||
.error_union,
|
||||
.error_set,
|
||||
.@"fn",
|
||||
.frame,
|
||||
.@"anyframe",
|
||||
.@"opaque",
|
||||
=> false,
|
||||
|
||||
.@"enum" => |@"enum"| @"enum".is_exhaustive,
|
||||
|
||||
.pointer => |pointer| switch (pointer.size) {
|
||||
.one => canSerializeTypeInner(pointer.child, visited, parent_is_optional),
|
||||
.slice => canSerializeTypeInner(pointer.child, visited, false),
|
||||
.many, .c => false,
|
||||
},
|
||||
|
||||
.optional => |optional| if (parent_is_optional)
|
||||
false
|
||||
else
|
||||
canSerializeTypeInner(optional.child, visited, true),
|
||||
|
||||
.array => |array| canSerializeTypeInner(array.child, visited, false),
|
||||
.vector => |vector| canSerializeTypeInner(vector.child, visited, false),
|
||||
|
||||
.@"struct" => |@"struct"| {
|
||||
for (visited) |V| if (T == V) return true;
|
||||
const new_visited = visited ++ .{T};
|
||||
for (@"struct".fields) |field| {
|
||||
if (!canSerializeTypeInner(field.type, new_visited, false)) return false;
|
||||
}
|
||||
return true;
|
||||
},
|
||||
.@"union" => |@"union"| {
|
||||
for (visited) |V| if (T == V) return true;
|
||||
const new_visited = visited ++ .{T};
|
||||
if (@"union".tag_type == null) return false;
|
||||
for (@"union".fields) |field| {
|
||||
if (field.type != void and !canSerializeTypeInner(field.type, new_visited, false)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
test canSerializeType {
|
||||
try std.testing.expect(!comptime canSerializeType(void));
|
||||
try std.testing.expect(!comptime canSerializeType(struct { f: [*]u8 }));
|
||||
try std.testing.expect(!comptime canSerializeType(struct { error{foo} }));
|
||||
try std.testing.expect(!comptime canSerializeType(union(enum) { a: void, f: [*c]u8 }));
|
||||
try std.testing.expect(!comptime canSerializeType(@Vector(0, [*c]u8)));
|
||||
try std.testing.expect(!comptime canSerializeType(*?[*c]u8));
|
||||
try std.testing.expect(!comptime canSerializeType(enum(u8) { _ }));
|
||||
try std.testing.expect(!comptime canSerializeType(union { foo: void }));
|
||||
try std.testing.expect(comptime canSerializeType(union(enum) { foo: void }));
|
||||
try std.testing.expect(comptime canSerializeType(comptime_float));
|
||||
try std.testing.expect(comptime canSerializeType(comptime_int));
|
||||
try std.testing.expect(!comptime canSerializeType(struct { comptime foo: ??u8 = null }));
|
||||
try std.testing.expect(comptime canSerializeType(@TypeOf(.foo)));
|
||||
try std.testing.expect(comptime canSerializeType(?u8));
|
||||
try std.testing.expect(comptime canSerializeType(*?*u8));
|
||||
try std.testing.expect(comptime canSerializeType(?struct {
|
||||
foo: ?struct {
|
||||
?union(enum) {
|
||||
a: ?@Vector(0, ?*u8),
|
||||
},
|
||||
?struct {
|
||||
f: ?[]?u8,
|
||||
},
|
||||
},
|
||||
}));
|
||||
try std.testing.expect(!comptime canSerializeType(??u8));
|
||||
try std.testing.expect(!comptime canSerializeType(?*?u8));
|
||||
try std.testing.expect(!comptime canSerializeType(*?*?*u8));
|
||||
try std.testing.expect(comptime canSerializeType(struct { x: comptime_int = 2 }));
|
||||
try std.testing.expect(comptime canSerializeType(struct { x: comptime_float = 2 }));
|
||||
try std.testing.expect(comptime canSerializeType(struct { comptime_int }));
|
||||
try std.testing.expect(comptime canSerializeType(struct { comptime x: @TypeOf(.foo) = .foo }));
|
||||
const Recursive = struct { foo: ?*@This() };
|
||||
try std.testing.expect(comptime canSerializeType(Recursive));
|
||||
|
||||
// Make sure we validate nested optional before we early out due to already having seen
|
||||
// a type recursion!
|
||||
try std.testing.expect(!comptime canSerializeType(struct {
|
||||
add_to_visited: ?u8,
|
||||
retrieve_from_visited: ??u8,
|
||||
}));
|
||||
}
|
||||
@@ -64,14 +64,14 @@ pub const Error = union(enum) {
|
||||
}
|
||||
};
|
||||
|
||||
fn formatMessage(self: []const u8, w: *std.io.Writer) std.io.Writer.Error!void {
|
||||
fn formatMessage(self: []const u8, w: *std.Io.Writer) std.Io.Writer.Error!void {
|
||||
// Just writes the string for now, but we're keeping this behind a formatter so we have
|
||||
// the option to extend it in the future to print more advanced messages (like `Error`
|
||||
// does) without breaking the API.
|
||||
try w.writeAll(self);
|
||||
}
|
||||
|
||||
pub fn fmtMessage(self: Note, diag: *const Diagnostics) std.fmt.Formatter([]const u8, Note.formatMessage) {
|
||||
pub fn fmtMessage(self: Note, diag: *const Diagnostics) std.fmt.Alt([]const u8, Note.formatMessage) {
|
||||
return .{ .data = switch (self) {
|
||||
.zoir => |note| note.msg.get(diag.zoir),
|
||||
.type_check => |note| note.msg,
|
||||
@@ -147,14 +147,14 @@ pub const Error = union(enum) {
|
||||
diag: *const Diagnostics,
|
||||
};
|
||||
|
||||
fn formatMessage(self: FormatMessage, w: *std.io.Writer) std.io.Writer.Error!void {
|
||||
fn formatMessage(self: FormatMessage, w: *std.Io.Writer) std.Io.Writer.Error!void {
|
||||
switch (self.err) {
|
||||
.zoir => |err| try w.writeAll(err.msg.get(self.diag.zoir)),
|
||||
.type_check => |tc| try w.writeAll(tc.message),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn fmtMessage(self: @This(), diag: *const Diagnostics) std.fmt.Formatter(FormatMessage, formatMessage) {
|
||||
pub fn fmtMessage(self: @This(), diag: *const Diagnostics) std.fmt.Alt(FormatMessage, formatMessage) {
|
||||
return .{ .data = .{
|
||||
.err = self,
|
||||
.diag = diag,
|
||||
@@ -226,7 +226,7 @@ pub const Diagnostics = struct {
|
||||
return .{ .diag = self };
|
||||
}
|
||||
|
||||
pub fn format(self: *const @This(), w: *std.io.Writer) std.io.Writer.Error!void {
|
||||
pub fn format(self: *const @This(), w: *std.Io.Writer) std.Io.Writer.Error!void {
|
||||
var errors = self.iterateErrors();
|
||||
while (errors.next()) |err| {
|
||||
const loc = err.getLocation(self);
|
||||
@@ -606,7 +606,7 @@ const Parser = struct {
|
||||
}
|
||||
}
|
||||
|
||||
fn parseSlicePointer(self: *@This(), T: type, node: Zoir.Node.Index) !T {
|
||||
fn parseSlicePointer(self: *@This(), T: type, node: Zoir.Node.Index) ParseExprInnerError!T {
|
||||
switch (node.get(self.zoir)) {
|
||||
.string_literal => return self.parseString(T, node),
|
||||
.array_literal => |nodes| return self.parseSlice(T, nodes),
|
||||
@@ -1048,6 +1048,7 @@ const Parser = struct {
|
||||
name: []const u8,
|
||||
) error{ OutOfMemory, ParseZon } {
|
||||
@branchHint(.cold);
|
||||
const gpa = self.gpa;
|
||||
const token = if (field) |f| b: {
|
||||
var buf: [2]Ast.Node.Index = undefined;
|
||||
const struct_init = self.ast.fullStructInit(&buf, node.getAstNode(self.zoir)).?;
|
||||
@@ -1065,13 +1066,12 @@ const Parser = struct {
|
||||
};
|
||||
} else b: {
|
||||
const msg = "supported: ";
|
||||
var buf: std.ArrayListUnmanaged(u8) = try .initCapacity(self.gpa, 64);
|
||||
defer buf.deinit(self.gpa);
|
||||
const writer = buf.writer(self.gpa);
|
||||
try writer.writeAll(msg);
|
||||
var buf: std.ArrayListUnmanaged(u8) = try .initCapacity(gpa, 64);
|
||||
defer buf.deinit(gpa);
|
||||
try buf.appendSlice(gpa, msg);
|
||||
inline for (info.fields, 0..) |field_info, i| {
|
||||
if (i != 0) try writer.writeAll(", ");
|
||||
try writer.print("'{f}'", .{std.zig.fmtIdFlags(field_info.name, .{
|
||||
if (i != 0) try buf.appendSlice(gpa, ", ");
|
||||
try buf.print(gpa, "'{f}'", .{std.zig.fmtIdFlags(field_info.name, .{
|
||||
.allow_primitive = true,
|
||||
.allow_underscore = true,
|
||||
})});
|
||||
@@ -1079,7 +1079,7 @@ const Parser = struct {
|
||||
break :b .{
|
||||
.token = token,
|
||||
.offset = 0,
|
||||
.msg = try buf.toOwnedSlice(self.gpa),
|
||||
.msg = try buf.toOwnedSlice(gpa),
|
||||
.owned = true,
|
||||
};
|
||||
};
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -344,8 +344,9 @@ fn mainArgs(gpa: Allocator, arena: Allocator, args: []const []const u8) !void {
|
||||
} else if (mem.eql(u8, cmd, "targets")) {
|
||||
dev.check(.targets_command);
|
||||
const host = std.zig.resolveTargetQueryOrFatal(.{});
|
||||
const stdout = fs.File.stdout().deprecatedWriter();
|
||||
return @import("print_targets.zig").cmdTargets(arena, cmd_args, stdout, &host);
|
||||
var stdout_writer = fs.File.stdout().writer(&stdio_buffer);
|
||||
try @import("print_targets.zig").cmdTargets(arena, cmd_args, &stdout_writer.interface, &host);
|
||||
return stdout_writer.interface.flush();
|
||||
} else if (mem.eql(u8, cmd, "version")) {
|
||||
dev.check(.version_command);
|
||||
try fs.File.stdout().writeAll(build_options.version ++ "\n");
|
||||
@@ -356,7 +357,9 @@ fn mainArgs(gpa: Allocator, arena: Allocator, args: []const []const u8) !void {
|
||||
} else if (mem.eql(u8, cmd, "env")) {
|
||||
dev.check(.env_command);
|
||||
verifyLibcxxCorrectlyLinked();
|
||||
return @import("print_env.zig").cmdEnv(arena, cmd_args);
|
||||
var stdout_writer = fs.File.stdout().writer(&stdio_buffer);
|
||||
try @import("print_env.zig").cmdEnv(arena, &stdout_writer.interface);
|
||||
return stdout_writer.interface.flush();
|
||||
} else if (mem.eql(u8, cmd, "reduce")) {
|
||||
return jitCmd(gpa, arena, cmd_args, .{
|
||||
.cmd_name = "reduce",
|
||||
|
||||
@@ -4,8 +4,7 @@ const introspect = @import("introspect.zig");
|
||||
const Allocator = std.mem.Allocator;
|
||||
const fatal = std.process.fatal;
|
||||
|
||||
pub fn cmdEnv(arena: Allocator, args: []const []const u8) !void {
|
||||
_ = args;
|
||||
pub fn cmdEnv(arena: Allocator, out: *std.Io.Writer) !void {
|
||||
const cwd_path = try introspect.getResolvedCwd(arena);
|
||||
const self_exe_path = try std.fs.selfExePathAlloc(arena);
|
||||
|
||||
@@ -21,41 +20,21 @@ pub fn cmdEnv(arena: Allocator, args: []const []const u8) !void {
|
||||
const host = try std.zig.system.resolveTargetQuery(.{});
|
||||
const triple = try host.zigTriple(arena);
|
||||
|
||||
var bw = std.io.bufferedWriter(std.fs.File.stdout().deprecatedWriter());
|
||||
const w = bw.writer();
|
||||
var serializer: std.zon.Serializer = .{ .writer = out };
|
||||
var root = try serializer.beginStruct(.{});
|
||||
|
||||
var jws = std.json.writeStream(w, .{ .whitespace = .indent_1 });
|
||||
|
||||
try jws.beginObject();
|
||||
|
||||
try jws.objectField("zig_exe");
|
||||
try jws.write(self_exe_path);
|
||||
|
||||
try jws.objectField("lib_dir");
|
||||
try jws.write(zig_lib_directory.path.?);
|
||||
|
||||
try jws.objectField("std_dir");
|
||||
try jws.write(zig_std_dir);
|
||||
|
||||
try jws.objectField("global_cache_dir");
|
||||
try jws.write(global_cache_dir);
|
||||
|
||||
try jws.objectField("version");
|
||||
try jws.write(build_options.version);
|
||||
|
||||
try jws.objectField("target");
|
||||
try jws.write(triple);
|
||||
|
||||
try jws.objectField("env");
|
||||
try jws.beginObject();
|
||||
try root.field("zig_exe", self_exe_path, .{});
|
||||
try root.field("lib_dir", zig_lib_directory.path.?, .{});
|
||||
try root.field("std_dir", zig_std_dir, .{});
|
||||
try root.field("global_cache_dir", global_cache_dir, .{});
|
||||
try root.field("version", build_options.version, .{});
|
||||
try root.field("target", triple, .{});
|
||||
var env = try root.beginStructField("env", .{});
|
||||
inline for (@typeInfo(std.zig.EnvVar).@"enum".fields) |field| {
|
||||
try jws.objectField(field.name);
|
||||
try jws.write(try @field(std.zig.EnvVar, field.name).get(arena));
|
||||
try env.field(field.name, try @field(std.zig.EnvVar, field.name).get(arena), .{});
|
||||
}
|
||||
try jws.endObject();
|
||||
try env.end();
|
||||
try root.end();
|
||||
|
||||
try jws.endObject();
|
||||
try w.writeByte('\n');
|
||||
|
||||
try bw.flush();
|
||||
try out.writeByte('\n');
|
||||
}
|
||||
|
||||
@@ -14,8 +14,7 @@ const introspect = @import("introspect.zig");
|
||||
pub fn cmdTargets(
|
||||
allocator: Allocator,
|
||||
args: []const []const u8,
|
||||
/// Output stream
|
||||
stdout: anytype,
|
||||
out: *std.Io.Writer,
|
||||
native_target: *const Target,
|
||||
) !void {
|
||||
_ = args;
|
||||
@@ -38,12 +37,10 @@ pub fn cmdTargets(
|
||||
const glibc_abi = try glibc.loadMetaData(allocator, abilists_contents);
|
||||
defer glibc_abi.destroy(allocator);
|
||||
|
||||
var bw = io.bufferedWriter(stdout);
|
||||
const w = bw.writer();
|
||||
var sz = std.zon.stringify.serializer(w, .{});
|
||||
var serializer: std.zon.Serializer = .{ .writer = out };
|
||||
|
||||
{
|
||||
var root_obj = try sz.beginStruct(.{});
|
||||
var root_obj = try serializer.beginStruct(.{});
|
||||
|
||||
try root_obj.field("arch", meta.fieldNames(Target.Cpu.Arch), .{});
|
||||
try root_obj.field("os", meta.fieldNames(Target.Os.Tag), .{});
|
||||
@@ -136,6 +133,5 @@ pub fn cmdTargets(
|
||||
try root_obj.end();
|
||||
}
|
||||
|
||||
try w.writeByte('\n');
|
||||
return bw.flush();
|
||||
try out.writeByte('\n');
|
||||
}
|
||||
|
||||
@@ -3338,7 +3338,7 @@ fn transPredefinedExpr(c: *Context, scope: *Scope, expr: *const clang.Predefined
|
||||
|
||||
fn transCreateCharLitNode(c: *Context, narrow: bool, val: u32) TransError!Node {
|
||||
return Tag.char_literal.create(c.arena, if (narrow)
|
||||
try std.fmt.allocPrint(c.arena, "'{f}'", .{std.zig.fmtChar(&.{@as(u8, @intCast(val))})})
|
||||
try std.fmt.allocPrint(c.arena, "'{f}'", .{std.zig.fmtChar(@intCast(val))})
|
||||
else
|
||||
try std.fmt.allocPrint(c.arena, "'\\u{{{x}}}'", .{val}));
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user