diff --git a/lib/std/Build.zig b/lib/std/Build.zig index 8a11e9848f..1ebd26a59f 100644 --- a/lib/std/Build.zig +++ b/lib/std/Build.zig @@ -2766,8 +2766,9 @@ fn dumpBadDirnameHelp( comptime msg: []const u8, args: anytype, ) anyerror!void { - var w = debug.lockStdErr2(); + var buffered_writer = debug.lockStdErr2(); defer debug.unlockStdErr(); + const w = &buffered_writer; const stderr = io.getStdErr(); try w.print(msg, args); @@ -2784,7 +2785,7 @@ fn dumpBadDirnameHelp( if (asking_step) |as| { tty_config.setColor(w, .red) catch {}; - try stderr.writer().print(" The step '{s}' that is missing a dependency on the above step was created by this stack trace:\n", .{as.name}); + try w.print(" The step '{s}' that is missing a dependency on the above step was created by this stack trace:\n", .{as.name}); tty_config.setColor(w, .reset) catch {}; as.dump(stderr); @@ -2802,7 +2803,8 @@ pub fn dumpBadGetPathHelp( src_builder: *Build, asking_step: ?*Step, ) anyerror!void { - var w = stderr.unbufferedWriter(); + var buffered_writer = stderr.unbufferedWriter(); + const w = &buffered_writer; try w.print( \\getPath() was called on a GeneratedFile that wasn't built yet. \\ source package path: {s} @@ -2821,7 +2823,7 @@ pub fn dumpBadGetPathHelp( s.dump(stderr); if (asking_step) |as| { tty_config.setColor(w, .red) catch {}; - try stderr.writer().print(" The step '{s}' that is missing a dependency on the above step was created by this stack trace:\n", .{as.name}); + try w.print(" The step '{s}' that is missing a dependency on the above step was created by this stack trace:\n", .{as.name}); tty_config.setColor(w, .reset) catch {}; as.dump(stderr); diff --git a/lib/std/Build/Cache.zig b/lib/std/Build/Cache.zig index 9628423504..8f59c0e6e8 100644 --- a/lib/std/Build/Cache.zig +++ b/lib/std/Build/Cache.zig @@ -1061,14 +1061,17 @@ pub const Manifest = struct { } fn addDepFileMaybePost(self: *Manifest, dir: fs.Dir, dep_file_basename: []const u8) !void { - const dep_file_contents = try dir.readFileAlloc(self.cache.gpa, dep_file_basename, manifest_file_size_max); - defer self.cache.gpa.free(dep_file_contents); + const gpa = self.cache.gpa; + const dep_file_contents = try dir.readFileAlloc(gpa, dep_file_basename, manifest_file_size_max); + defer gpa.free(dep_file_contents); - var error_buf = std.ArrayList(u8).init(self.cache.gpa); - defer error_buf.deinit(); + var error_buf: std.ArrayListUnmanaged(u8) = .empty; + defer error_buf.deinit(gpa); + + var resolve_buf: std.ArrayListUnmanaged(u8) = .empty; + defer resolve_buf.deinit(gpa); var it: DepTokenizer = .{ .bytes = dep_file_contents }; - while (it.next()) |token| { switch (token) { // We don't care about targets, we only want the prereqs @@ -1078,16 +1081,14 @@ pub const Manifest = struct { _ = try self.addFile(file_path, null); } else try self.addFilePost(file_path), .prereq_must_resolve => { - var resolve_buf = std.ArrayList(u8).init(self.cache.gpa); - defer resolve_buf.deinit(); - - try token.resolve(resolve_buf.writer()); + resolve_buf.clearRetainingCapacity(); + try token.resolve(gpa, &resolve_buf); if (self.manifest_file == null) { _ = try self.addFile(resolve_buf.items, null); } else try self.addFilePost(resolve_buf.items); }, else => |err| { - try err.printError(error_buf.writer()); + try err.printError(gpa, &error_buf); log.err("failed parsing {s}: {s}", .{ dep_file_basename, error_buf.items }); return error.InvalidDepFile; }, @@ -1125,13 +1126,13 @@ pub const Manifest = struct { if (self.manifest_dirty) { self.manifest_dirty = false; - var contents = std.ArrayList(u8).init(self.cache.gpa); - defer contents.deinit(); + const gpa = self.cache.gpa; + var contents: std.ArrayListUnmanaged(u8) = .empty; + defer contents.deinit(gpa); - const writer = contents.writer(); - try writer.writeAll(manifest_header ++ "\n"); + try contents.appendSlice(gpa, manifest_header ++ "\n"); for (self.files.keys()) |file| { - try writer.print("{d} {d} {d} {x} {d} {s}\n", .{ + try contents.print(gpa, "{d} {d} {d} {x} {d} {s}\n", .{ file.stat.size, file.stat.inode, file.stat.mtime, diff --git a/lib/std/Build/Cache/DepTokenizer.zig b/lib/std/Build/Cache/DepTokenizer.zig index a1e64c006d..bc0823fa6c 100644 --- a/lib/std/Build/Cache/DepTokenizer.zig +++ b/lib/std/Build/Cache/DepTokenizer.zig @@ -7,6 +7,7 @@ state: State = .lhs, const std = @import("std"); const testing = std.testing; const assert = std.debug.assert; +const Allocator = std.mem.Allocator; pub fn next(self: *Tokenizer) ?Token { var start = self.index; @@ -362,7 +363,7 @@ pub const Token = union(enum) { }; /// Resolve escapes in target or prereq. Only valid with .target_must_resolve or .prereq_must_resolve. - pub fn resolve(self: Token, writer: anytype) @TypeOf(writer).Error!void { + pub fn resolve(self: Token, gpa: Allocator, list: *std.ArrayListUnmanaged(u8)) error{OutOfMemory}!void { switch (self) { .target_must_resolve => |bytes| { var state: enum { start, escape, dollar } = .start; @@ -372,27 +373,27 @@ pub const Token = union(enum) { switch (c) { '\\' => state = .escape, '$' => state = .dollar, - else => try writer.writeByte(c), + else => try list.append(gpa, c), } }, .escape => { switch (c) { ' ', '#', '\\' => {}, '$' => { - try writer.writeByte('\\'); + try list.append(gpa, '\\'); state = .dollar; continue; }, - else => try writer.writeByte('\\'), + else => try list.append(gpa, '\\'), } - try writer.writeByte(c); + try list.append(gpa, c); state = .start; }, .dollar => { - try writer.writeByte('$'); + try list.append(gpa, '$'); switch (c) { '$' => {}, - else => try writer.writeByte(c), + else => try list.append(gpa, c), } state = .start; }, @@ -406,19 +407,19 @@ pub const Token = union(enum) { .start => { switch (c) { '\\' => state = .escape, - else => try writer.writeByte(c), + else => try list.append(gpa, c), } }, .escape => { switch (c) { ' ' => {}, '\\' => { - try writer.writeByte(c); + try list.append(gpa, c); continue; }, - else => try writer.writeByte('\\'), + else => try list.append(gpa, '\\'), } - try writer.writeByte(c); + try list.append(gpa, c); state = .start; }, } @@ -428,20 +429,20 @@ pub const Token = union(enum) { } } - pub fn printError(self: Token, writer: anytype) @TypeOf(writer).Error!void { + pub fn printError(self: Token, gpa: Allocator, list: *std.ArrayListUnmanaged(u8)) error{OutOfMemory}!void { switch (self) { .target, .target_must_resolve, .prereq, .prereq_must_resolve => unreachable, // not an error .incomplete_quoted_prerequisite, .incomplete_target, => |index_and_bytes| { - try writer.print("{s} '", .{self.errStr()}); + try list.print("{s} '", .{self.errStr()}); if (self == .incomplete_target) { const tmp = Token{ .target_must_resolve = index_and_bytes.bytes }; - try tmp.resolve(writer); + try tmp.resolve(gpa, list); } else { - try printCharValues(writer, index_and_bytes.bytes); + try printCharValues(gpa, list, index_and_bytes.bytes); } - try writer.print("' at position {d}", .{index_and_bytes.index}); + try list.print(gpa, "' at position {d}", .{index_and_bytes.index}); }, .invalid_target, .bad_target_escape, @@ -450,9 +451,9 @@ pub const Token = union(enum) { .incomplete_escape, .expected_colon, => |index_and_char| { - try writer.writeAll("illegal char "); - try printUnderstandableChar(writer, index_and_char.char); - try writer.print(" at position {d}: {s}", .{ index_and_char.index, self.errStr() }); + try list.appendSlice("illegal char "); + try printUnderstandableChar(gpa, list, index_and_char.char); + try list.print(gpa, " at position {d}: {s}", .{ index_and_char.index, self.errStr() }); }, } } @@ -1026,41 +1027,41 @@ fn depTokenizer(input: []const u8, expect: []const u8) !void { defer arena_allocator.deinit(); var it: Tokenizer = .{ .bytes = input }; - var buffer = std.ArrayList(u8).init(arena); - var resolve_buf = std.ArrayList(u8).init(arena); + var buffer: std.ArrayListUnmanaged(u8) = .empty; + var resolve_buf: std.ArrayListUnmanaged(u8) = .empty; var i: usize = 0; while (it.next()) |token| { - if (i != 0) try buffer.appendSlice("\n"); + if (i != 0) try buffer.appendSlice(arena, "\n"); switch (token) { .target, .prereq => |bytes| { - try buffer.appendSlice(@tagName(token)); - try buffer.appendSlice(" = {"); + try buffer.appendSlice(arena, @tagName(token)); + try buffer.appendSlice(arena, " = {"); for (bytes) |b| { - try buffer.append(printable_char_tab[b]); + try buffer.append(arena, printable_char_tab[b]); } - try buffer.appendSlice("}"); + try buffer.appendSlice(arena, "}"); }, .target_must_resolve => { - try buffer.appendSlice("target = {"); - try token.resolve(resolve_buf.writer()); + try buffer.appendSlice(arena, "target = {"); + try token.resolve(arena, &resolve_buf); for (resolve_buf.items) |b| { - try buffer.append(printable_char_tab[b]); + try buffer.append(arena, printable_char_tab[b]); } resolve_buf.items.len = 0; - try buffer.appendSlice("}"); + try buffer.appendSlice(arena, "}"); }, .prereq_must_resolve => { - try buffer.appendSlice("prereq = {"); - try token.resolve(resolve_buf.writer()); + try buffer.appendSlice(arena, "prereq = {"); + try token.resolve(arena, &resolve_buf); for (resolve_buf.items) |b| { - try buffer.append(printable_char_tab[b]); + try buffer.append(arena, printable_char_tab[b]); } resolve_buf.items.len = 0; - try buffer.appendSlice("}"); + try buffer.appendSlice(arena, "}"); }, else => { - try buffer.appendSlice("ERROR: "); - try token.printError(buffer.writer()); + try buffer.appendSlice(arena, "ERROR: "); + try token.printError(arena, &buffer); break; }, } @@ -1072,121 +1073,7 @@ fn depTokenizer(input: []const u8, expect: []const u8) !void { return; } - const out = std.io.getStdErr().writer(); - - try out.writeAll("\n"); - try printSection(out, "<<<< input", input); - try printSection(out, "==== expect", expect); - try printSection(out, ">>>> got", buffer.items); - try printRuler(out); - - try testing.expect(false); -} - -fn printSection(out: anytype, label: []const u8, bytes: []const u8) !void { - try printLabel(out, label, bytes); - try hexDump(out, bytes); - try printRuler(out); - try out.writeAll(bytes); - try out.writeAll("\n"); -} - -fn printLabel(out: anytype, label: []const u8, bytes: []const u8) !void { - var buf: [80]u8 = undefined; - const text = try std.fmt.bufPrint(buf[0..], "{s} {d} bytes ", .{ label, bytes.len }); - try out.writeAll(text); - var i: usize = text.len; - const end = 79; - while (i < end) : (i += 1) { - try out.writeAll(&[_]u8{label[0]}); - } - try out.writeAll("\n"); -} - -fn printRuler(out: anytype) !void { - var i: usize = 0; - const end = 79; - while (i < end) : (i += 1) { - try out.writeAll("-"); - } - try out.writeAll("\n"); -} - -fn hexDump(out: anytype, bytes: []const u8) !void { - const n16 = bytes.len >> 4; - var line: usize = 0; - var offset: usize = 0; - while (line < n16) : (line += 1) { - try hexDump16(out, offset, bytes[offset..][0..16]); - offset += 16; - } - - const n = bytes.len & 0x0f; - if (n > 0) { - try printDecValue(out, offset, 8); - try out.writeAll(":"); - try out.writeAll(" "); - const end1 = @min(offset + n, offset + 8); - for (bytes[offset..end1]) |b| { - try out.writeAll(" "); - try printHexValue(out, b, 2); - } - const end2 = offset + n; - if (end2 > end1) { - try out.writeAll(" "); - for (bytes[end1..end2]) |b| { - try out.writeAll(" "); - try printHexValue(out, b, 2); - } - } - const short = 16 - n; - var i: usize = 0; - while (i < short) : (i += 1) { - try out.writeAll(" "); - } - if (end2 > end1) { - try out.writeAll(" |"); - } else { - try out.writeAll(" |"); - } - try printCharValues(out, bytes[offset..end2]); - try out.writeAll("|\n"); - offset += n; - } - - try printDecValue(out, offset, 8); - try out.writeAll(":"); - try out.writeAll("\n"); -} - -fn hexDump16(out: anytype, offset: usize, bytes: []const u8) !void { - try printDecValue(out, offset, 8); - try out.writeAll(":"); - try out.writeAll(" "); - for (bytes[0..8]) |b| { - try out.writeAll(" "); - try printHexValue(out, b, 2); - } - try out.writeAll(" "); - for (bytes[8..16]) |b| { - try out.writeAll(" "); - try printHexValue(out, b, 2); - } - try out.writeAll(" |"); - try printCharValues(out, bytes); - try out.writeAll("|\n"); -} - -fn printDecValue(out: anytype, value: u64, width: u8) !void { - var buffer: [20]u8 = undefined; - const len = std.fmt.formatIntBuf(buffer[0..], value, 10, .lower, .{ .width = width, .fill = '0' }); - try out.writeAll(buffer[0..len]); -} - -fn printHexValue(out: anytype, value: u64, width: u8) !void { - var buffer: [16]u8 = undefined; - const len = std.fmt.formatIntBuf(buffer[0..], value, 16, .lower, .{ .width = width, .fill = '0' }); - try out.writeAll(buffer[0..len]); + try testing.expectEqualStrings(expect, buffer.items); } fn printCharValues(out: anytype, bytes: []const u8) !void { diff --git a/lib/std/array_list.zig b/lib/std/array_list.zig index febf3596c2..6d05996c1a 100644 --- a/lib/std/array_list.zig +++ b/lib/std/array_list.zig @@ -976,37 +976,12 @@ pub fn ArrayListAlignedUnmanaged(comptime T: type, comptime alignment: ?mem.Alig @memcpy(self.items[old_len..][0..items.len], items); } - pub const WriterContext = struct { - self: *Self, - allocator: Allocator, - }; - - pub const Writer = if (T != u8) - @compileError("The Writer interface is only defined for ArrayList(u8) " ++ - "but the given type is ArrayList(" ++ @typeName(T) ++ ")") - else - std.io.Writer(WriterContext, Allocator.Error, appendWrite); - - /// Initializes a Writer which will append to the list. - pub fn writer(self: *Self, gpa: Allocator) Writer { - return .{ .context = .{ .self = self, .allocator = gpa } }; - } - - /// Same as `append` except it returns the number of bytes written, - /// which is always the same as `m.len`. The purpose of this function - /// existing is to match `std.io.Writer` API. - /// Invalidates element pointers if additional memory is needed. - fn appendWrite(context: WriterContext, m: []const u8) Allocator.Error!usize { - try context.self.appendSlice(context.allocator, m); - return m.len; - } - pub fn print(self: *Self, gpa: Allocator, comptime fmt: []const u8, args: anytype) error{OutOfMemory}!void { comptime assert(T == u8); try self.ensureUnusedCapacity(gpa, fmt.len); - var alw: std.io.ArrayListWriter = undefined; - const bw = alw.fromOwned(gpa, self); - defer self.* = alw.toOwned(); + var aw: std.io.AllocatingWriter = undefined; + const bw = aw.fromArrayList(gpa, self); + defer self.* = aw.toArrayList(); bw.print(fmt, args) catch return error.OutOfMemory; } diff --git a/lib/std/io.zig b/lib/std/io.zig index 266688ba71..3f38e1fc93 100644 --- a/lib/std/io.zig +++ b/lib/std/io.zig @@ -301,7 +301,7 @@ pub const AnyWriter = Writer; pub const SeekableStream = @import("io/seekable_stream.zig").SeekableStream; pub const BufferedWriter = @import("io/BufferedWriter.zig"); -pub const ArrayListWriter = @import("io/ArrayListWriter.zig"); +pub const AllocatingWriter = @import("io/AllocatingWriter.zig"); pub const BufferedReader = @import("io/buffered_reader.zig").BufferedReader; pub const bufferedReader = @import("io/buffered_reader.zig").bufferedReader; @@ -784,7 +784,7 @@ test { _ = Writer; _ = CountingWriter; _ = FixedBufferStream; - _ = ArrayListWriter; + _ = AllocatingWriter; _ = @import("io/bit_reader.zig"); _ = @import("io/bit_writer.zig"); _ = @import("io/buffered_atomic_file.zig"); diff --git a/lib/std/io/ArrayListWriter.zig b/lib/std/io/AllocatingWriter.zig similarity index 53% rename from lib/std/io/ArrayListWriter.zig rename to lib/std/io/AllocatingWriter.zig index 76f6a4e6f6..8aaf028ad0 100644 --- a/lib/std/io/ArrayListWriter.zig +++ b/lib/std/io/AllocatingWriter.zig @@ -1,73 +1,119 @@ -//! The straightforward way to use `std.ArrayList` as the underlying writer -//! when using `std.io.BufferedWriter` is to populate the `std.io.Writer` -//! interface and then use an empty buffer. However, this means that every use -//! of `std.io.BufferedWriter` will go through the vtable, including for +//! TODO rename to AllocatingWriter. +//! While it is possible to use `std.ArrayList` as the underlying writer when +//! using `std.io.BufferedWriter` by populating the `std.io.Writer` interface +//! and then using an empty buffer, it means that every use of +//! `std.io.BufferedWriter` will go through the vtable, including for //! functions such as `writeByte`. This API instead maintains //! `std.io.BufferedWriter` state such that it writes to the unused capacity of -//! the array list, filling it up completely before making a call through the +//! an array list, filling it up completely before making a call through the //! vtable, causing a resize. Consequently, the same, optimized, non-generic //! machine code that uses `std.io.BufferedReader`, such as formatted printing, -//! is also used when the underlying writer is backed by `std.ArrayList`. +//! takes the hot paths when using this API. const std = @import("../std.zig"); -const ArrayListWriter = @This(); +const AllocatingWriter = @This(); const assert = std.debug.assert; -items: []u8, +/// This is missing the data stored in `buffered_writer`. See `getWritten` for +/// returning a slice that includes both. +written: []u8, allocator: std.mem.Allocator, buffered_writer: std.io.BufferedWriter, -/// Replaces `array_list` with empty, taking ownership of the memory. -pub fn fromOwned( - alw: *ArrayListWriter, - allocator: std.mem.Allocator, - array_list: *std.ArrayListUnmanaged(u8), -) *std.io.BufferedWriter { - alw.* = .{ - .allocated_slice = array_list.items, +const vtable: std.io.Writer.VTable = .{ + .writev = writev, + .writeFile = writeFile, +}; + +/// Sets the `AllocatingWriter` to an empty state. +pub fn init(aw: *AllocatingWriter, allocator: std.mem.Allocator) *std.io.BufferedWriter { + aw.* = .{ + .written = &.{}, .allocator = allocator, .buffered_writer = .{ .unbuffered_writer = .{ - .context = alw, - .vtable = &.{ - .writev = writev, - .writeFile = writeFile, - }, + .context = aw, + .vtable = &vtable, + }, + .buffer = &.{}, + }, + }; + return &aw.buffered_writer; +} + +/// Replaces `array_list` with empty, taking ownership of the memory. +pub fn fromArrayList( + aw: *AllocatingWriter, + allocator: std.mem.Allocator, + array_list: *std.ArrayListUnmanaged(u8), +) *std.io.BufferedWriter { + aw.* = .{ + .written = array_list.items, + .allocator = allocator, + .buffered_writer = .{ + .unbuffered_writer = .{ + .context = aw, + .vtable = &vtable, }, .buffer = array_list.unusedCapacitySlice(), }, }; array_list.* = .empty; - return &alw.buffered_writer; + return &aw.buffered_writer; } -/// Returns the memory back that was borrowed with `fromOwned`. -pub fn toOwned(alw: *ArrayListWriter) std.ArrayListUnmanaged(u8) { - const end = alw.buffered_writer.end; +/// Returns an array list that takes ownership of the allocated memory. +/// Resets the `AllocatingWriter` to an empty state. +pub fn toArrayList(aw: *AllocatingWriter) std.ArrayListUnmanaged(u8) { + const bw = &aw.buffered_writer; + const written = aw.written; const result: std.ArrayListUnmanaged(u8) = .{ - .items = alw.items.ptr[0 .. alw.items.len + end], - .capacity = alw.buffered_writer.buffer.len - end, + .items = written.ptr[0 .. written.len + bw.end], + .capacity = written.len + bw.buffer.len, }; - alw.* = undefined; + aw.written = &.{}; + bw.buffer = &.{}; + bw.end = 0; return result; } +fn setArrayList(aw: *AllocatingWriter, list: std.ArrayListUnmanaged(u8)) void { + aw.written = list.items; + aw.buffered_writer.buffer = list.unusedCapacitySlice(); +} + +pub fn getWritten(aw: *AllocatingWriter) []u8 { + const bw = &aw.buffered_writer; + const end = aw.buffered_writer.end; + const result = aw.written.ptr[0 .. aw.written.len + end]; + bw.buffer = bw.buffer[end..]; + bw.end = 0; + return result; +} + +pub fn clearRetainingCapacity(aw: *AllocatingWriter) void { + const bw = &aw.buffered_writer; + bw.buffer = aw.written.ptr[0 .. aw.written.len + bw.buffer.len]; + bw.end = 0; + aw.written.len = 0; +} + fn writev(context: *anyopaque, data: []const []const u8) anyerror!usize { - const alw: *ArrayListWriter = @alignCast(@ptrCast(context)); - const start_len = alw.items.len; - const bw = &alw.buffered_writer; - assert(data[0].ptr == alw.items.ptr + start_len); - const bw_end = data[0].len; + const aw: *AllocatingWriter = @alignCast(@ptrCast(context)); + const start_len = aw.written.len; + const bw = &aw.buffered_writer; + assert(data[0].ptr == aw.written.ptr + start_len); var list: std.ArrayListUnmanaged(u8) = .{ - .items = alw.items.ptr[0 .. start_len + bw_end], - .capacity = bw.buffer.len - bw_end, + .items = aw.written.ptr[0 .. start_len + data[0].len], + .capacity = start_len + bw.buffer.len, }; + defer setArrayList(aw, list); const rest = data[1..]; var new_capacity: usize = list.capacity; for (rest) |bytes| new_capacity += bytes.len; - try list.ensureTotalCapacity(alw.allocator, new_capacity + 1); + try list.ensureTotalCapacity(aw.allocator, new_capacity + 1); for (rest) |bytes| list.appendSliceAssumeCapacity(bytes); - alw.items = list.items; + aw.written = list.items; bw.buffer = list.unusedCapacitySlice(); return list.items.len - start_len; } @@ -80,16 +126,16 @@ fn writeFile( headers_and_trailers_full: []const []const u8, headers_len_full: usize, ) anyerror!usize { - const alw: *ArrayListWriter = @alignCast(@ptrCast(context)); - const list = alw.array_list; - const bw = &alw.buffered_writer; + const aw: *AllocatingWriter = @alignCast(@ptrCast(context)); + const gpa = aw.allocator; + var list = aw.toArrayList(); + defer setArrayList(aw, list); const start_len = list.items.len; const headers_and_trailers, const headers_len = if (headers_len_full >= 1) b: { assert(headers_and_trailers_full[0].ptr == list.items.ptr + start_len); list.items.len += headers_and_trailers_full[0].len; break :b .{ headers_and_trailers_full[1..], headers_len_full - 1 }; } else .{ headers_and_trailers_full, headers_len_full }; - const gpa = alw.allocator; const trailers = headers_and_trailers[headers_len..]; if (len == .entire_file) { var new_capacity: usize = list.capacity + std.atomic.cache_line; @@ -103,11 +149,9 @@ fn writeFile( for (trailers) |bytes| new_capacity += bytes.len; try list.ensureTotalCapacity(gpa, new_capacity); for (trailers) |bytes| list.appendSliceAssumeCapacity(bytes); - bw.buffer = list.unusedCapacitySlice(); return list.items.len - start_len; } list.items.len += n; - bw.buffer = list.unusedCapacitySlice(); return list.items.len - start_len; } var new_capacity: usize = list.capacity + len.int(); @@ -118,10 +162,8 @@ fn writeFile( const n = try file.pread(dest, offset); list.items.len += n; if (n < dest.len) { - bw.buffer = list.unusedCapacitySlice(); return list.items.len - start_len; } for (trailers) |bytes| list.appendSliceAssumeCapacity(bytes); - bw.buffer = list.unusedCapacitySlice(); return list.items.len - start_len; } diff --git a/lib/std/io/BufferedWriter.zig b/lib/std/io/BufferedWriter.zig index 3bd431eb13..679fc82ae1 100644 --- a/lib/std/io/BufferedWriter.zig +++ b/lib/std/io/BufferedWriter.zig @@ -19,6 +19,9 @@ end: usize = 0, /// vectors through the underlying write calls as possible. pub const max_buffers_len = 8; +/// Although `BufferedWriter` can easily satisfy the `Writer` interface, it's +/// generally more practical to pass a `BufferedWriter` instance itself around, +/// since it will result in fewer calls across vtable boundaries. pub fn writer(bw: *BufferedWriter) Writer { return .{ .context = bw, @@ -212,6 +215,7 @@ pub fn splatByte(bw: *BufferedWriter, byte: u8, n: usize) anyerror!usize { const new_end = end + n; if (new_end <= buffer.len) { + @branchHint(.likely); @memset(buffer[end..][0..n], byte); bw.end = new_end; return n; @@ -226,6 +230,7 @@ pub fn splatByte(bw: *BufferedWriter, byte: u8, n: usize) anyerror!usize { bw.end = remainder.len; return 0; } + assert(bw.buffer.ptr == buffer.ptr); // TODO this is not a valid assertion @memset(buffer[0..n], byte); bw.end = n; return n; diff --git a/lib/std/io/tty.zig b/lib/std/io/tty.zig index c220e87880..8381f3910b 100644 --- a/lib/std/io/tty.zig +++ b/lib/std/io/tty.zig @@ -71,12 +71,7 @@ pub const Config = union(enum) { reset_attributes: u16, }; - pub fn setColor( - conf: Config, - writer: anytype, - color: Color, - ) (@typeInfo(@TypeOf(writer.writeAll(""))).error_union.error_set || - windows.SetConsoleTextAttributeError)!void { + pub fn setColor(conf: Config, bw: *std.io.BufferedWriter, color: Color) anyerror!void { nosuspend switch (conf) { .no_color => return, .escape_codes => { @@ -101,7 +96,7 @@ pub const Config = union(enum) { .dim => "\x1b[2m", .reset => "\x1b[0m", }; - try writer.writeAll(color_string); + try bw.writeAll(color_string); }, .windows_api => |ctx| if (native_os == .windows) { const attributes = switch (color) { diff --git a/lib/std/process/Child.zig b/lib/std/process/Child.zig index f9dc28aaaf..ef00319948 100644 --- a/lib/std/process/Child.zig +++ b/lib/std/process/Child.zig @@ -1004,7 +1004,8 @@ fn forkChildErrReport(fd: i32, err: ChildProcess.SpawnError) noreturn { fn writeIntFd(fd: i32, value: ErrInt) !void { const file: File = .{ .handle = fd }; - file.writer().writeInt(u64, @intCast(value), .little) catch return error.SystemResources; + var bw = file.unbufferedWriter(); + bw.writeInt(u64, @intCast(value), .little) catch return error.SystemResources; } fn readIntFd(fd: i32) !ErrInt { diff --git a/lib/std/zig.zig b/lib/std/zig.zig index cf7992baed..8cf49b9e7b 100644 --- a/lib/std/zig.zig +++ b/lib/std/zig.zig @@ -475,37 +475,37 @@ pub fn stringEscape( bytes: []const u8, comptime f: []const u8, options: std.fmt.FormatOptions, - writer: anytype, + bw: *std.io.BufferedWriter, ) !void { _ = options; for (bytes) |byte| switch (byte) { - '\n' => try writer.writeAll("\\n"), - '\r' => try writer.writeAll("\\r"), - '\t' => try writer.writeAll("\\t"), - '\\' => try writer.writeAll("\\\\"), + '\n' => try bw.writeAll("\\n"), + '\r' => try bw.writeAll("\\r"), + '\t' => try bw.writeAll("\\t"), + '\\' => try bw.writeAll("\\\\"), '"' => { if (f.len == 1 and f[0] == '\'') { - try writer.writeByte('"'); + try bw.writeByte('"'); } else if (f.len == 0) { - try writer.writeAll("\\\""); + try bw.writeAll("\\\""); } else { @compileError("expected {} or {'}, found {" ++ f ++ "}"); } }, '\'' => { if (f.len == 1 and f[0] == '\'') { - try writer.writeAll("\\'"); + try bw.writeAll("\\'"); } else if (f.len == 0) { - try writer.writeByte('\''); + try bw.writeByte('\''); } else { @compileError("expected {} or {'}, found {" ++ f ++ "}"); } }, - ' ', '!', '#'...'&', '('...'[', ']'...'~' => try writer.writeByte(byte), + ' ', '!', '#'...'&', '('...'[', ']'...'~' => try bw.writeByte(byte), // Use hex escapes for rest any unprintable characters. else => { - try writer.writeAll("\\x"); - try std.fmt.formatInt(byte, 16, .lower, .{ .width = 2, .fill = '0' }, writer); + try bw.writeAll("\\x"); + try bw.printIntOptions(byte, 16, .lower, .{ .width = 2, .fill = '0' }); }, }; } diff --git a/lib/std/zon/stringify.zig b/lib/std/zon/stringify.zig index 8682fdc5f4..ea1e298f9a 100644 --- a/lib/std/zon/stringify.zig +++ b/lib/std/zon/stringify.zig @@ -40,15 +40,12 @@ pub const SerializeOptions = struct { /// Serialize the given value as ZON. /// /// It is asserted at comptime that `@TypeOf(val)` is not a recursive type. -pub fn serialize( - val: anytype, - options: SerializeOptions, - writer: anytype, -) @TypeOf(writer).Error!void { - var sz = serializer(writer, .{ - .whitespace = options.whitespace, - }); - try sz.value(val, .{ +pub fn serialize(val: anytype, options: SerializeOptions, writer: *std.io.BufferedWriter) anyerror!void { + var s: Serializer = .{ + .writer = writer, + .options = .{ .whitespace = options.whitespace }, + }; + try s.value(val, .{ .emit_codepoint_literals = options.emit_codepoint_literals, .emit_strings_as_containers = options.emit_strings_as_containers, .emit_default_optional_fields = options.emit_default_optional_fields, @@ -62,13 +59,14 @@ pub fn serialize( pub fn serializeMaxDepth( val: anytype, options: SerializeOptions, - writer: anytype, + writer: *std.io.BufferedWriter, depth: usize, -) (@TypeOf(writer).Error || error{ExceededMaxDepth})!void { - var sz = serializer(writer, .{ - .whitespace = options.whitespace, - }); - try sz.valueMaxDepth(val, .{ +) anyerror!void { + var s: Serializer = .{ + .writer = writer, + .options = .{ .whitespace = options.whitespace }, + }; + try s.valueMaxDepth(val, .{ .emit_codepoint_literals = options.emit_codepoint_literals, .emit_strings_as_containers = options.emit_strings_as_containers, .emit_default_optional_fields = options.emit_default_optional_fields, @@ -81,44 +79,45 @@ pub fn serializeMaxDepth( pub fn serializeArbitraryDepth( val: anytype, options: SerializeOptions, - writer: anytype, -) @TypeOf(writer).Error!void { - var sz = serializer(writer, .{ - .whitespace = options.whitespace, - }); - try sz.valueArbitraryDepth(val, .{ + writer: *std.io.BufferedWriter, +) anyerror!void { + var s: Serializer = .{ + .writer = writer, + .options = .{ .whitespace = options.whitespace }, + }; + try s.valueArbitraryDepth(val, .{ .emit_codepoint_literals = options.emit_codepoint_literals, .emit_strings_as_containers = options.emit_strings_as_containers, .emit_default_optional_fields = options.emit_default_optional_fields, }); } -fn typeIsRecursive(comptime T: type) bool { - return comptime typeIsRecursiveImpl(T, &.{}); +inline fn typeIsRecursive(comptime T: type) bool { + return comptime typeIsRecursiveInner(T, &.{}); } -fn typeIsRecursiveImpl(comptime T: type, comptime prev_visited: []const type) bool { +fn typeIsRecursiveInner(comptime T: type, comptime prev_visited: []const type) bool { for (prev_visited) |V| { if (V == T) return true; } const visited = prev_visited ++ .{T}; return switch (@typeInfo(T)) { - .pointer => |pointer| typeIsRecursiveImpl(pointer.child, visited), - .optional => |optional| typeIsRecursiveImpl(optional.child, visited), - .array => |array| typeIsRecursiveImpl(array.child, visited), - .vector => |vector| typeIsRecursiveImpl(vector.child, visited), + .pointer => |pointer| typeIsRecursiveInner(pointer.child, visited), + .optional => |optional| typeIsRecursiveInner(optional.child, visited), + .array => |array| typeIsRecursiveInner(array.child, visited), + .vector => |vector| typeIsRecursiveInner(vector.child, visited), .@"struct" => |@"struct"| for (@"struct".fields) |field| { - if (typeIsRecursiveImpl(field.type, visited)) break true; + if (typeIsRecursiveInner(field.type, visited)) break true; } else false, .@"union" => |@"union"| inline for (@"union".fields) |field| { - if (typeIsRecursiveImpl(field.type, visited)) break true; + if (typeIsRecursiveInner(field.type, visited)) break true; } else false, else => false, }; } -fn canSerializeType(T: type) bool { +inline fn canSerializeType(T: type) bool { comptime return canSerializeTypeInner(T, &.{}, false); } @@ -343,12 +342,6 @@ test "std.zon checkValueDepth" { try expectValueDepthEquals(3, @as([]const []const u8, &.{&.{ 1, 2, 3 }})); } -/// Options for `Serializer`. -pub const SerializerOptions = struct { - /// If false, only syntactically necessary whitespace is emitted. - whitespace: bool = true, -}; - /// Determines when to emit Unicode code point literals as opposed to integer literals. pub const EmitCodepointLiterals = enum { /// Never emit Unicode code point literals. @@ -440,633 +433,610 @@ pub const SerializeContainerOptions = struct { /// For manual serialization of containers, see: /// * `beginStruct` /// * `beginTuple` -/// -/// # Example -/// ```zig -/// var sz = serializer(writer, .{}); -/// var vec2 = try sz.beginStruct(.{}); -/// try vec2.field("x", 1.5, .{}); -/// try vec2.fieldPrefix(); -/// try sz.value(2.5); -/// try vec2.end(); -/// ``` -pub fn Serializer(Writer: type) type { - return struct { - const Self = @This(); +pub const Serializer = struct { + options: Options, + indent_level: u8 = 0, + writer: *std.io.BufferedWriter, - options: SerializerOptions, - indent_level: u8, - writer: Writer, + pub const Options = struct { + /// If false, only syntactically necessary whitespace is emitted. + whitespace: bool = true, + }; - /// Initialize a serializer. - fn init(writer: Writer, options: SerializerOptions) Self { + /// Serialize a value, similar to `serialize`. + pub fn value(self: *Serializer, val: anytype, options: ValueOptions) anyerror!void { + comptime assert(!typeIsRecursive(@TypeOf(val))); + return self.valueArbitraryDepth(val, options); + } + + /// Serialize a value, similar to `serializeMaxDepth`. + /// Can return `error.ExceededMaxDepth`. + pub fn valueMaxDepth(self: *Serializer, val: anytype, options: ValueOptions, depth: usize) anyerror!void { + try checkValueDepth(val, depth); + return self.valueArbitraryDepth(val, options); + } + + /// Serialize a value, similar to `serializeArbitraryDepth`. + pub fn valueArbitraryDepth(self: *Serializer, val: anytype, options: ValueOptions) anyerror!void { + comptime assert(canSerializeType(@TypeOf(val))); + switch (@typeInfo(@TypeOf(val))) { + .int, .comptime_int => if (options.emit_codepoint_literals.emitAsCodepoint(val)) |c| { + self.codePoint(c) catch |err| switch (err) { + error.InvalidCodepoint => unreachable, // Already validated + else => |e| return e, + }; + } else { + try self.int(val); + }, + .float, .comptime_float => try self.float(val), + .bool, .null => try std.fmt.format(self.writer, "{}", .{val}), + .enum_literal => try self.ident(@tagName(val)), + .@"enum" => try self.ident(@tagName(val)), + .pointer => |pointer| { + // Try to serialize as a string + const item: ?type = switch (@typeInfo(pointer.child)) { + .array => |array| array.child, + else => if (pointer.size == .slice) pointer.child else null, + }; + if (item == u8 and + (pointer.sentinel() == null or pointer.sentinel() == 0) and + !options.emit_strings_as_containers) + { + return try self.string(val); + } + + // Serialize as either a tuple or as the child type + switch (pointer.size) { + .slice => try self.tupleImpl(val, options), + .one => try self.valueArbitraryDepth(val.*, options), + else => comptime unreachable, + } + }, + .array => { + var container = try self.beginTuple( + .{ .whitespace_style = .{ .fields = val.len } }, + ); + for (val) |item_val| { + try container.fieldArbitraryDepth(item_val, options); + } + try container.end(); + }, + .@"struct" => |@"struct"| if (@"struct".is_tuple) { + var container = try self.beginTuple( + .{ .whitespace_style = .{ .fields = @"struct".fields.len } }, + ); + inline for (val) |field_value| { + try container.fieldArbitraryDepth(field_value, options); + } + try container.end(); + } else { + // Decide which fields to emit + const fields, const skipped: [@"struct".fields.len]bool = if (options.emit_default_optional_fields) b: { + break :b .{ @"struct".fields.len, @splat(false) }; + } else b: { + var fields = @"struct".fields.len; + var skipped: [@"struct".fields.len]bool = @splat(false); + inline for (@"struct".fields, &skipped) |field_info, *skip| { + if (field_info.default_value_ptr) |ptr| { + const default: *const field_info.type = @ptrCast(@alignCast(ptr)); + const field_value = @field(val, field_info.name); + if (std.meta.eql(field_value, default.*)) { + skip.* = true; + fields -= 1; + } + } + } + break :b .{ fields, skipped }; + }; + + // Emit those fields + var container = try self.beginStruct( + .{ .whitespace_style = .{ .fields = fields } }, + ); + inline for (@"struct".fields, skipped) |field_info, skip| { + if (!skip) { + try container.fieldArbitraryDepth( + field_info.name, + @field(val, field_info.name), + options, + ); + } + } + try container.end(); + }, + .@"union" => |@"union"| { + comptime assert(@"union".tag_type != null); + switch (val) { + inline else => |pl, tag| if (@TypeOf(pl) == void) + try self.writer.print(".{s}", .{@tagName(tag)}) + else { + var container = try self.beginStruct(.{ .whitespace_style = .{ .fields = 1 } }); + + try container.fieldArbitraryDepth( + @tagName(tag), + pl, + options, + ); + + try container.end(); + }, + } + }, + .optional => if (val) |inner| { + try self.valueArbitraryDepth(inner, options); + } else { + try self.writer.writeAll("null"); + }, + .vector => |vector| { + var container = try self.beginTuple( + .{ .whitespace_style = .{ .fields = vector.len } }, + ); + for (0..vector.len) |i| { + try container.fieldArbitraryDepth(val[i], options); + } + try container.end(); + }, + + else => comptime unreachable, + } + } + + /// Serialize an integer. + pub fn int(self: *Serializer, val: anytype) anyerror!void { + try std.fmt.formatInt(val, 10, .lower, .{}, self.writer); + } + + /// Serialize a float. + pub fn float(self: *Serializer, val: anytype) anyerror!void { + switch (@typeInfo(@TypeOf(val))) { + .float => if (std.math.isNan(val)) { + return self.writer.writeAll("nan"); + } else if (std.math.isPositiveInf(val)) { + return self.writer.writeAll("inf"); + } else if (std.math.isNegativeInf(val)) { + return self.writer.writeAll("-inf"); + } else { + try std.fmt.format(self.writer, "{d}", .{val}); + }, + .comptime_float => try std.fmt.format(self.writer, "{d}", .{val}), + else => comptime unreachable, + } + } + + /// Serialize `name` as an identifier prefixed with `.`. + /// + /// Escapes the identifier if necessary. + pub fn ident(self: *Serializer, name: []const u8) anyerror!void { + try self.writer.print(".{p_}", .{std.zig.fmtId(name)}); + } + + /// Serialize `val` as a Unicode codepoint. + /// + /// Returns `error.InvalidCodepoint` if `val` is not a valid Unicode codepoint. + pub fn codePoint( + self: *Serializer, + val: u21, + ) anyerror!void { + var buf: [8]u8 = undefined; + const len = std.unicode.utf8Encode(val, &buf) catch return error.InvalidCodepoint; + const str = buf[0..len]; + try std.fmt.format(self.writer, "'{'}'", .{std.zig.fmtEscapes(str)}); + } + + /// Like `value`, but always serializes `val` as a tuple. + /// + /// Will fail at comptime if `val` is not a tuple, array, pointer to an array, or slice. + pub fn tuple(self: *Serializer, val: anytype, options: ValueOptions) anyerror!void { + comptime assert(!typeIsRecursive(@TypeOf(val))); + try self.tupleArbitraryDepth(val, options); + } + + /// Like `tuple`, but recursive types are allowed. + /// + /// Returns `error.ExceededMaxDepth` if `depth` is exceeded. + pub fn tupleMaxDepth( + self: *Serializer, + val: anytype, + options: ValueOptions, + depth: usize, + ) anyerror!void { + try checkValueDepth(val, depth); + try self.tupleArbitraryDepth(val, options); + } + + /// Like `tuple`, but recursive types are allowed. + /// + /// It is the caller's responsibility to ensure that `val` does not contain cycles. + pub fn tupleArbitraryDepth( + self: *Serializer, + val: anytype, + options: ValueOptions, + ) anyerror!void { + try self.tupleImpl(val, options); + } + + fn tupleImpl(self: *Serializer, val: anytype, options: ValueOptions) anyerror!void { + comptime assert(canSerializeType(@TypeOf(val))); + switch (@typeInfo(@TypeOf(val))) { + .@"struct" => { + var container = try self.beginTuple(.{ .whitespace_style = .{ .fields = val.len } }); + inline for (val) |item_val| { + try container.fieldArbitraryDepth(item_val, options); + } + try container.end(); + }, + .pointer, .array => { + var container = try self.beginTuple(.{ .whitespace_style = .{ .fields = val.len } }); + for (val) |item_val| { + try container.fieldArbitraryDepth(item_val, options); + } + try container.end(); + }, + else => comptime unreachable, + } + } + + /// Like `value`, but always serializes `val` as a string. + pub fn string(self: *Serializer, val: []const u8) anyerror!void { + try std.fmt.format(self.writer, "\"{}\"", .{std.zig.fmtEscapes(val)}); + } + + /// Options for formatting multiline strings. + pub const MultilineStringOptions = struct { + /// If top level is true, whitespace before and after the multiline string is elided. + /// If it is true, a newline is printed, then the value, followed by a newline, and if + /// whitespace is true any necessary indentation follows. + top_level: bool = false, + }; + + /// Like `value`, but always serializes to a multiline string literal. + /// + /// Returns `error.InnerCarriageReturn` if `val` contains a CR not followed by a newline, + /// since multiline strings cannot represent CR without a following newline. + pub fn multilineString( + self: *Serializer, + val: []const u8, + options: MultilineStringOptions, + ) anyerror!void { + // Make sure the string does not contain any carriage returns not followed by a newline + var i: usize = 0; + while (i < val.len) : (i += 1) { + if (val[i] == '\r') { + if (i + 1 < val.len) { + if (val[i + 1] == '\n') { + i += 1; + continue; + } + } + return error.InnerCarriageReturn; + } + } + + if (!options.top_level) { + try self.newline(); + try self.indent(); + } + + try self.writer.writeAll("\\\\"); + for (val) |c| { + if (c != '\r') { + try self.writer.writeByte(c); // We write newlines here even if whitespace off + if (c == '\n') { + try self.indent(); + try self.writer.writeAll("\\\\"); + } + } + } + + if (!options.top_level) { + try self.writer.writeByte('\n'); // Even if whitespace off + try self.indent(); + } + } + + /// Create a `Struct` for writing ZON structs field by field. + pub fn beginStruct( + self: *Serializer, + options: SerializeContainerOptions, + ) anyerror!Struct { + return Struct.begin(self, options); + } + + /// Creates a `Tuple` for writing ZON tuples field by field. + pub fn beginTuple( + self: *Serializer, + options: SerializeContainerOptions, + ) anyerror!Tuple { + return Tuple.begin(self, options); + } + + fn indent(self: *Serializer) anyerror!void { + if (self.options.whitespace) { + try self.writer.writeByteNTimes(' ', 4 * self.indent_level); + } + } + + fn newline(self: *Serializer) anyerror!void { + if (self.options.whitespace) { + try self.writer.writeByte('\n'); + } + } + + fn newlineOrSpace(self: *Serializer, len: usize) anyerror!void { + if (self.containerShouldWrap(len)) { + try self.newline(); + } else { + try self.space(); + } + } + + fn space(self: *Serializer) anyerror!void { + if (self.options.whitespace) { + try self.writer.writeByte(' '); + } + } + + /// Writes ZON tuples field by field. + pub const Tuple = struct { + container: Container, + + fn begin(parent: *Serializer, options: SerializeContainerOptions) anyerror!Tuple { return .{ - .options = options, - .writer = writer, - .indent_level = 0, + .container = try Container.begin(parent, .anon, options), }; } - /// Serialize a value, similar to `serialize`. - pub fn value(self: *Self, val: anytype, options: ValueOptions) Writer.Error!void { - comptime assert(!typeIsRecursive(@TypeOf(val))); - return self.valueArbitraryDepth(val, options); + /// Finishes serializing the tuple. + /// + /// Prints a trailing comma as configured when appropriate, and the closing bracket. + pub fn end(self: *Tuple) anyerror!void { + try self.container.end(); + self.* = undefined; } - /// Serialize a value, similar to `serializeMaxDepth`. - pub fn valueMaxDepth( - self: *Self, + /// Serialize a field. Equivalent to calling `fieldPrefix` followed by `value`. + pub fn field( + self: *Tuple, val: anytype, options: ValueOptions, - depth: usize, - ) (Writer.Error || error{ExceededMaxDepth})!void { - try checkValueDepth(val, depth); - return self.valueArbitraryDepth(val, options); + ) anyerror!void { + try self.container.field(null, val, options); } - /// Serialize a value, similar to `serializeArbitraryDepth`. - pub fn valueArbitraryDepth( - self: *Self, - val: anytype, - options: ValueOptions, - ) Writer.Error!void { - comptime assert(canSerializeType(@TypeOf(val))); - switch (@typeInfo(@TypeOf(val))) { - .int, .comptime_int => if (options.emit_codepoint_literals.emitAsCodepoint(val)) |c| { - self.codePoint(c) catch |err| switch (err) { - error.InvalidCodepoint => unreachable, // Already validated - else => |e| return e, - }; - } else { - try self.int(val); - }, - .float, .comptime_float => try self.float(val), - .bool, .null => try std.fmt.format(self.writer, "{}", .{val}), - .enum_literal => try self.ident(@tagName(val)), - .@"enum" => try self.ident(@tagName(val)), - .pointer => |pointer| { - // Try to serialize as a string - const item: ?type = switch (@typeInfo(pointer.child)) { - .array => |array| array.child, - else => if (pointer.size == .slice) pointer.child else null, - }; - if (item == u8 and - (pointer.sentinel() == null or pointer.sentinel() == 0) and - !options.emit_strings_as_containers) - { - return try self.string(val); - } - - // Serialize as either a tuple or as the child type - switch (pointer.size) { - .slice => try self.tupleImpl(val, options), - .one => try self.valueArbitraryDepth(val.*, options), - else => comptime unreachable, - } - }, - .array => { - var container = try self.beginTuple( - .{ .whitespace_style = .{ .fields = val.len } }, - ); - for (val) |item_val| { - try container.fieldArbitraryDepth(item_val, options); - } - try container.end(); - }, - .@"struct" => |@"struct"| if (@"struct".is_tuple) { - var container = try self.beginTuple( - .{ .whitespace_style = .{ .fields = @"struct".fields.len } }, - ); - inline for (val) |field_value| { - try container.fieldArbitraryDepth(field_value, options); - } - try container.end(); - } else { - // Decide which fields to emit - const fields, const skipped: [@"struct".fields.len]bool = if (options.emit_default_optional_fields) b: { - break :b .{ @"struct".fields.len, @splat(false) }; - } else b: { - var fields = @"struct".fields.len; - var skipped: [@"struct".fields.len]bool = @splat(false); - inline for (@"struct".fields, &skipped) |field_info, *skip| { - if (field_info.default_value_ptr) |ptr| { - const default: *const field_info.type = @ptrCast(@alignCast(ptr)); - const field_value = @field(val, field_info.name); - if (std.meta.eql(field_value, default.*)) { - skip.* = true; - fields -= 1; - } - } - } - break :b .{ fields, skipped }; - }; - - // Emit those fields - var container = try self.beginStruct( - .{ .whitespace_style = .{ .fields = fields } }, - ); - inline for (@"struct".fields, skipped) |field_info, skip| { - if (!skip) { - try container.fieldArbitraryDepth( - field_info.name, - @field(val, field_info.name), - options, - ); - } - } - try container.end(); - }, - .@"union" => |@"union"| { - comptime assert(@"union".tag_type != null); - switch (val) { - inline else => |pl, tag| if (@TypeOf(pl) == void) - try self.writer.print(".{s}", .{@tagName(tag)}) - else { - var container = try self.beginStruct(.{ .whitespace_style = .{ .fields = 1 } }); - - try container.fieldArbitraryDepth( - @tagName(tag), - pl, - options, - ); - - try container.end(); - }, - } - }, - .optional => if (val) |inner| { - try self.valueArbitraryDepth(inner, options); - } else { - try self.writer.writeAll("null"); - }, - .vector => |vector| { - var container = try self.beginTuple( - .{ .whitespace_style = .{ .fields = vector.len } }, - ); - for (0..vector.len) |i| { - try container.fieldArbitraryDepth(val[i], options); - } - try container.end(); - }, - - else => comptime unreachable, - } - } - - /// Serialize an integer. - pub fn int(self: *Self, val: anytype) Writer.Error!void { - try std.fmt.formatInt(val, 10, .lower, .{}, self.writer); - } - - /// Serialize a float. - pub fn float(self: *Self, val: anytype) Writer.Error!void { - switch (@typeInfo(@TypeOf(val))) { - .float => if (std.math.isNan(val)) { - return self.writer.writeAll("nan"); - } else if (std.math.isPositiveInf(val)) { - return self.writer.writeAll("inf"); - } else if (std.math.isNegativeInf(val)) { - return self.writer.writeAll("-inf"); - } else if (std.math.isNegativeZero(val)) { - return self.writer.writeAll("-0.0"); - } else { - try std.fmt.format(self.writer, "{d}", .{val}); - }, - .comptime_float => if (val == 0) { - return self.writer.writeAll("0"); - } else { - try std.fmt.format(self.writer, "{d}", .{val}); - }, - else => comptime unreachable, - } - } - - /// Serialize `name` as an identifier prefixed with `.`. - /// - /// Escapes the identifier if necessary. - pub fn ident(self: *Self, name: []const u8) Writer.Error!void { - try self.writer.print(".{p_}", .{std.zig.fmtId(name)}); - } - - /// Serialize `val` as a Unicode codepoint. - /// - /// Returns `error.InvalidCodepoint` if `val` is not a valid Unicode codepoint. - pub fn codePoint( - self: *Self, - val: u21, - ) (Writer.Error || error{InvalidCodepoint})!void { - var buf: [8]u8 = undefined; - const len = std.unicode.utf8Encode(val, &buf) catch return error.InvalidCodepoint; - const str = buf[0..len]; - try std.fmt.format(self.writer, "'{'}'", .{std.zig.fmtEscapes(str)}); - } - - /// Like `value`, but always serializes `val` as a tuple. - /// - /// Will fail at comptime if `val` is not a tuple, array, pointer to an array, or slice. - pub fn tuple(self: *Self, val: anytype, options: ValueOptions) Writer.Error!void { - comptime assert(!typeIsRecursive(@TypeOf(val))); - try self.tupleArbitraryDepth(val, options); - } - - /// Like `tuple`, but recursive types are allowed. - /// + /// Serialize a field. Equivalent to calling `fieldPrefix` followed by `valueMaxDepth`. /// Returns `error.ExceededMaxDepth` if `depth` is exceeded. - pub fn tupleMaxDepth( - self: *Self, + pub fn fieldMaxDepth( + self: *Tuple, val: anytype, options: ValueOptions, depth: usize, - ) (Writer.Error || error{ExceededMaxDepth})!void { - try checkValueDepth(val, depth); - try self.tupleArbitraryDepth(val, options); + ) anyerror!void { + try self.container.fieldMaxDepth(null, val, options, depth); } - /// Like `tuple`, but recursive types are allowed. - /// - /// It is the caller's responsibility to ensure that `val` does not contain cycles. - pub fn tupleArbitraryDepth( - self: *Self, + /// Serialize a field. Equivalent to calling `fieldPrefix` followed by + /// `valueArbitraryDepth`. + pub fn fieldArbitraryDepth( + self: *Tuple, val: anytype, options: ValueOptions, - ) Writer.Error!void { - try self.tupleImpl(val, options); + ) anyerror!void { + try self.container.fieldArbitraryDepth(null, val, options); } - fn tupleImpl(self: *Self, val: anytype, options: ValueOptions) Writer.Error!void { - comptime assert(canSerializeType(@TypeOf(val))); - switch (@typeInfo(@TypeOf(val))) { - .@"struct" => { - var container = try self.beginTuple(.{ .whitespace_style = .{ .fields = val.len } }); - inline for (val) |item_val| { - try container.fieldArbitraryDepth(item_val, options); - } - try container.end(); - }, - .pointer, .array => { - var container = try self.beginTuple(.{ .whitespace_style = .{ .fields = val.len } }); - for (val) |item_val| { - try container.fieldArbitraryDepth(item_val, options); - } - try container.end(); - }, - else => comptime unreachable, - } + /// Starts a field with a struct as a value. Returns the struct. + pub fn beginStructField( + self: *Tuple, + options: SerializeContainerOptions, + ) anyerror!Struct { + try self.fieldPrefix(); + return self.container.serializer.beginStruct(options); } - /// Like `value`, but always serializes `val` as a string. - pub fn string(self: *Self, val: []const u8) Writer.Error!void { - try std.fmt.format(self.writer, "\"{}\"", .{std.zig.fmtEscapes(val)}); + /// Starts a field with a tuple as a value. Returns the tuple. + pub fn beginTupleField( + self: *Tuple, + options: SerializeContainerOptions, + ) anyerror!Tuple { + try self.fieldPrefix(); + return self.container.serializer.beginTuple(options); } - /// Options for formatting multiline strings. - pub const MultilineStringOptions = struct { - /// If top level is true, whitespace before and after the multiline string is elided. - /// If it is true, a newline is printed, then the value, followed by a newline, and if - /// whitespace is true any necessary indentation follows. - top_level: bool = false, - }; + /// Print a field prefix. This prints any necessary commas, and whitespace as + /// configured. Useful if you want to serialize the field value yourself. + pub fn fieldPrefix(self: *Tuple) anyerror!void { + try self.container.fieldPrefix(null); + } + }; - /// Like `value`, but always serializes to a multiline string literal. + /// Writes ZON structs field by field. + pub const Struct = struct { + container: Container, + + fn begin(parent: *Serializer, options: SerializeContainerOptions) anyerror!Struct { + return .{ + .container = try Container.begin(parent, .named, options), + }; + } + + /// Finishes serializing the struct. /// - /// Returns `error.InnerCarriageReturn` if `val` contains a CR not followed by a newline, - /// since multiline strings cannot represent CR without a following newline. - pub fn multilineString( - self: *Self, - val: []const u8, - options: MultilineStringOptions, - ) (Writer.Error || error{InnerCarriageReturn})!void { - // Make sure the string does not contain any carriage returns not followed by a newline - var i: usize = 0; - while (i < val.len) : (i += 1) { - if (val[i] == '\r') { - if (i + 1 < val.len) { - if (val[i + 1] == '\n') { - i += 1; - continue; - } - } - return error.InnerCarriageReturn; - } - } - - if (!options.top_level) { - try self.newline(); - try self.indent(); - } - - try self.writer.writeAll("\\\\"); - for (val) |c| { - if (c != '\r') { - try self.writer.writeByte(c); // We write newlines here even if whitespace off - if (c == '\n') { - try self.indent(); - try self.writer.writeAll("\\\\"); - } - } - } - - if (!options.top_level) { - try self.writer.writeByte('\n'); // Even if whitespace off - try self.indent(); - } + /// Prints a trailing comma as configured when appropriate, and the closing bracket. + pub fn end(self: *Struct) anyerror!void { + try self.container.end(); + self.* = undefined; } - /// Create a `Struct` for writing ZON structs field by field. - pub fn beginStruct( - self: *Self, + /// Serialize a field. Equivalent to calling `fieldPrefix` followed by `value`. + pub fn field( + self: *Struct, + name: []const u8, + val: anytype, + options: ValueOptions, + ) anyerror!void { + try self.container.field(name, val, options); + } + + /// Serialize a field. Equivalent to calling `fieldPrefix` followed by `valueMaxDepth`. + /// Returns `error.ExceededMaxDepth` if `depth` is exceeded. + pub fn fieldMaxDepth( + self: *Struct, + name: []const u8, + val: anytype, + options: ValueOptions, + depth: usize, + ) anyerror!void { + try self.container.fieldMaxDepth(name, val, options, depth); + } + + /// Serialize a field. Equivalent to calling `fieldPrefix` followed by + /// `valueArbitraryDepth`. + pub fn fieldArbitraryDepth( + self: *Struct, + name: []const u8, + val: anytype, + options: ValueOptions, + ) anyerror!void { + try self.container.fieldArbitraryDepth(name, val, options); + } + + /// Starts a field with a struct as a value. Returns the struct. + pub fn beginStructField( + self: *Struct, + name: []const u8, options: SerializeContainerOptions, - ) Writer.Error!Struct { - return Struct.begin(self, options); + ) anyerror!Struct { + try self.fieldPrefix(name); + return self.container.serializer.beginStruct(options); } - /// Creates a `Tuple` for writing ZON tuples field by field. - pub fn beginTuple( - self: *Self, + /// Starts a field with a tuple as a value. Returns the tuple. + pub fn beginTupleField( + self: *Struct, + name: []const u8, options: SerializeContainerOptions, - ) Writer.Error!Tuple { - return Tuple.begin(self, options); + ) anyerror!Tuple { + try self.fieldPrefix(name); + return self.container.serializer.beginTuple(options); } - fn indent(self: *Self) Writer.Error!void { - if (self.options.whitespace) { - try self.writer.writeByteNTimes(' ', 4 * self.indent_level); - } + /// Print a field prefix. This prints any necessary commas, the field name (escaped if + /// necessary) and whitespace as configured. Useful if you want to serialize the field + /// value yourself. + pub fn fieldPrefix(self: *Struct, name: []const u8) anyerror!void { + try self.container.fieldPrefix(name); } + }; - fn newline(self: *Self) Writer.Error!void { - if (self.options.whitespace) { - try self.writer.writeByte('\n'); - } - } + const Container = struct { + const FieldStyle = enum { named, anon }; - fn newlineOrSpace(self: *Self, len: usize) Writer.Error!void { - if (self.containerShouldWrap(len)) { - try self.newline(); - } else { - try self.space(); - } - } + serializer: *Serializer, + field_style: FieldStyle, + options: SerializeContainerOptions, + empty: bool, - fn space(self: *Self) Writer.Error!void { - if (self.options.whitespace) { - try self.writer.writeByte(' '); - } - } - - /// Writes ZON tuples field by field. - pub const Tuple = struct { - container: Container, - - fn begin(parent: *Self, options: SerializeContainerOptions) Writer.Error!Tuple { - return .{ - .container = try Container.begin(parent, .anon, options), - }; - } - - /// Finishes serializing the tuple. - /// - /// Prints a trailing comma as configured when appropriate, and the closing bracket. - pub fn end(self: *Tuple) Writer.Error!void { - try self.container.end(); - self.* = undefined; - } - - /// Serialize a field. Equivalent to calling `fieldPrefix` followed by `value`. - pub fn field( - self: *Tuple, - val: anytype, - options: ValueOptions, - ) Writer.Error!void { - try self.container.field(null, val, options); - } - - /// Serialize a field. Equivalent to calling `fieldPrefix` followed by `valueMaxDepth`. - pub fn fieldMaxDepth( - self: *Tuple, - val: anytype, - options: ValueOptions, - depth: usize, - ) (Writer.Error || error{ExceededMaxDepth})!void { - try self.container.fieldMaxDepth(null, val, options, depth); - } - - /// Serialize a field. Equivalent to calling `fieldPrefix` followed by - /// `valueArbitraryDepth`. - pub fn fieldArbitraryDepth( - self: *Tuple, - val: anytype, - options: ValueOptions, - ) Writer.Error!void { - try self.container.fieldArbitraryDepth(null, val, options); - } - - /// Starts a field with a struct as a value. Returns the struct. - pub fn beginStructField( - self: *Tuple, - options: SerializeContainerOptions, - ) Writer.Error!Struct { - try self.fieldPrefix(); - return self.container.serializer.beginStruct(options); - } - - /// Starts a field with a tuple as a value. Returns the tuple. - pub fn beginTupleField( - self: *Tuple, - options: SerializeContainerOptions, - ) Writer.Error!Tuple { - try self.fieldPrefix(); - return self.container.serializer.beginTuple(options); - } - - /// Print a field prefix. This prints any necessary commas, and whitespace as - /// configured. Useful if you want to serialize the field value yourself. - pub fn fieldPrefix(self: *Tuple) Writer.Error!void { - try self.container.fieldPrefix(null); - } - }; - - /// Writes ZON structs field by field. - pub const Struct = struct { - container: Container, - - fn begin(parent: *Self, options: SerializeContainerOptions) Writer.Error!Struct { - return .{ - .container = try Container.begin(parent, .named, options), - }; - } - - /// Finishes serializing the struct. - /// - /// Prints a trailing comma as configured when appropriate, and the closing bracket. - pub fn end(self: *Struct) Writer.Error!void { - try self.container.end(); - self.* = undefined; - } - - /// Serialize a field. Equivalent to calling `fieldPrefix` followed by `value`. - pub fn field( - self: *Struct, - name: []const u8, - val: anytype, - options: ValueOptions, - ) Writer.Error!void { - try self.container.field(name, val, options); - } - - /// Serialize a field. Equivalent to calling `fieldPrefix` followed by `valueMaxDepth`. - pub fn fieldMaxDepth( - self: *Struct, - name: []const u8, - val: anytype, - options: ValueOptions, - depth: usize, - ) (Writer.Error || error{ExceededMaxDepth})!void { - try self.container.fieldMaxDepth(name, val, options, depth); - } - - /// Serialize a field. Equivalent to calling `fieldPrefix` followed by - /// `valueArbitraryDepth`. - pub fn fieldArbitraryDepth( - self: *Struct, - name: []const u8, - val: anytype, - options: ValueOptions, - ) Writer.Error!void { - try self.container.fieldArbitraryDepth(name, val, options); - } - - /// Starts a field with a struct as a value. Returns the struct. - pub fn beginStructField( - self: *Struct, - name: []const u8, - options: SerializeContainerOptions, - ) Writer.Error!Struct { - try self.fieldPrefix(name); - return self.container.serializer.beginStruct(options); - } - - /// Starts a field with a tuple as a value. Returns the tuple. - pub fn beginTupleField( - self: *Struct, - name: []const u8, - options: SerializeContainerOptions, - ) Writer.Error!Tuple { - try self.fieldPrefix(name); - return self.container.serializer.beginTuple(options); - } - - /// Print a field prefix. This prints any necessary commas, the field name (escaped if - /// necessary) and whitespace as configured. Useful if you want to serialize the field - /// value yourself. - pub fn fieldPrefix(self: *Struct, name: []const u8) Writer.Error!void { - try self.container.fieldPrefix(name); - } - }; - - const Container = struct { - const FieldStyle = enum { named, anon }; - - serializer: *Self, + fn begin( + sz: *Serializer, field_style: FieldStyle, options: SerializeContainerOptions, - empty: bool, + ) anyerror!Container { + if (options.shouldWrap()) sz.indent_level +|= 1; + try sz.writer.writeAll(".{"); + return .{ + .serializer = sz, + .field_style = field_style, + .options = options, + .empty = true, + }; + } - fn begin( - sz: *Self, - field_style: FieldStyle, - options: SerializeContainerOptions, - ) Writer.Error!Container { - if (options.shouldWrap()) sz.indent_level +|= 1; - try sz.writer.writeAll(".{"); - return .{ - .serializer = sz, - .field_style = field_style, - .options = options, - .empty = true, - }; - } - - fn end(self: *Container) Writer.Error!void { - if (self.options.shouldWrap()) self.serializer.indent_level -|= 1; - if (!self.empty) { - if (self.options.shouldWrap()) { - if (self.serializer.options.whitespace) { - try self.serializer.writer.writeByte(','); - } - try self.serializer.newline(); - try self.serializer.indent(); - } else if (!self.shouldElideSpaces()) { - try self.serializer.space(); - } - } - try self.serializer.writer.writeByte('}'); - self.* = undefined; - } - - fn fieldPrefix(self: *Container, name: ?[]const u8) Writer.Error!void { - if (!self.empty) { - try self.serializer.writer.writeByte(','); - } - self.empty = false; + fn end(self: *Container) anyerror!void { + if (self.options.shouldWrap()) self.serializer.indent_level -|= 1; + if (!self.empty) { if (self.options.shouldWrap()) { + if (self.serializer.options.whitespace) { + try self.serializer.writer.writeByte(','); + } try self.serializer.newline(); + try self.serializer.indent(); } else if (!self.shouldElideSpaces()) { try self.serializer.space(); } - if (self.options.shouldWrap()) try self.serializer.indent(); - if (name) |n| { - try self.serializer.ident(n); - try self.serializer.space(); - try self.serializer.writer.writeByte('='); - try self.serializer.space(); - } } + try self.serializer.writer.writeByte('}'); + self.* = undefined; + } - fn field( - self: *Container, - name: ?[]const u8, - val: anytype, - options: ValueOptions, - ) Writer.Error!void { - comptime assert(!typeIsRecursive(@TypeOf(val))); - try self.fieldArbitraryDepth(name, val, options); + fn fieldPrefix(self: *Container, name: ?[]const u8) anyerror!void { + if (!self.empty) { + try self.serializer.writer.writeByte(','); } + self.empty = false; + if (self.options.shouldWrap()) { + try self.serializer.newline(); + } else if (!self.shouldElideSpaces()) { + try self.serializer.space(); + } + if (self.options.shouldWrap()) try self.serializer.indent(); + if (name) |n| { + try self.serializer.ident(n); + try self.serializer.space(); + try self.serializer.writer.writeByte('='); + try self.serializer.space(); + } + } - fn fieldMaxDepth( - self: *Container, - name: ?[]const u8, - val: anytype, - options: ValueOptions, - depth: usize, - ) (Writer.Error || error{ExceededMaxDepth})!void { - try checkValueDepth(val, depth); - try self.fieldArbitraryDepth(name, val, options); - } + fn field( + self: *Container, + name: ?[]const u8, + val: anytype, + options: ValueOptions, + ) anyerror!void { + comptime assert(!typeIsRecursive(@TypeOf(val))); + try self.fieldArbitraryDepth(name, val, options); + } - fn fieldArbitraryDepth( - self: *Container, - name: ?[]const u8, - val: anytype, - options: ValueOptions, - ) Writer.Error!void { - try self.fieldPrefix(name); - try self.serializer.valueArbitraryDepth(val, options); - } + /// Returns `error.ExceededMaxDepth` if `depth` is exceeded. + fn fieldMaxDepth( + self: *Container, + name: ?[]const u8, + val: anytype, + options: ValueOptions, + depth: usize, + ) anyerror!void { + try checkValueDepth(val, depth); + try self.fieldArbitraryDepth(name, val, options); + } - fn shouldElideSpaces(self: *const Container) bool { - return switch (self.options.whitespace_style) { - .fields => |fields| self.field_style != .named and fields == 1, - else => false, - }; - } - }; + fn fieldArbitraryDepth( + self: *Container, + name: ?[]const u8, + val: anytype, + options: ValueOptions, + ) anyerror!void { + try self.fieldPrefix(name); + try self.serializer.valueArbitraryDepth(val, options); + } + + fn shouldElideSpaces(self: *const Container) bool { + return switch (self.options.whitespace_style) { + .fields => |fields| self.field_style != .named and fields == 1, + else => false, + }; + } }; -} +}; -/// Creates a new `Serializer` with the given writer and options. -pub fn serializer(writer: anytype, options: SerializerOptions) Serializer(@TypeOf(writer)) { - return .init(writer, options); +test Serializer { + var s: Serializer = .{ + .writer = std.io.null_writer, + }; + var vec2 = try s.beginStruct(.{}); + try vec2.field("x", 1.5, .{}); + try vec2.fieldPrefix(); + try s.value(2.5); + try vec2.end(); } fn expectSerializeEqual( @@ -1074,10 +1044,12 @@ fn expectSerializeEqual( value: anytype, options: SerializeOptions, ) !void { - var buf = std.ArrayList(u8).init(std.testing.allocator); - defer buf.deinit(); - try serialize(value, options, buf.writer()); - try std.testing.expectEqualStrings(expected, buf.items); + var aw: std.io.AllocatingWriter = undefined; + defer aw.deinit(); + const bw = aw.init(std.testing.allocator); + + try serialize(value, options, bw); + try std.testing.expectEqualStrings(expected, aw.getWritten()); } test "std.zon stringify whitespace, high level API" { @@ -1174,59 +1146,59 @@ test "std.zon stringify whitespace, high level API" { } test "std.zon stringify whitespace, low level API" { - var buf = std.ArrayList(u8).init(std.testing.allocator); - defer buf.deinit(); - var sz = serializer(buf.writer(), .{}); + var aw: std.io.AllocatingWriter = undefined; + defer aw.deinit(); + var s: Serializer = .{ .writer = aw.init(std.testing.allocator) }; - inline for (.{ true, false }) |whitespace| { - sz.options = .{ .whitespace = whitespace }; + for ([2]bool{ true, false }) |whitespace| { + s.options = .{ .whitespace = whitespace }; // Empty containers { - var container = try sz.beginStruct(.{}); + var container = try s.beginStruct(.{}); try container.end(); - try std.testing.expectEqualStrings(".{}", buf.items); - buf.clearRetainingCapacity(); + try std.testing.expectEqualStrings(".{}", aw.getWritten()); + aw.clearRetainingCapacity(); } { - var container = try sz.beginTuple(.{}); + var container = try s.beginTuple(.{}); try container.end(); - try std.testing.expectEqualStrings(".{}", buf.items); - buf.clearRetainingCapacity(); + try std.testing.expectEqualStrings(".{}", aw.getWritten()); + aw.clearRetainingCapacity(); } { - var container = try sz.beginStruct(.{ .whitespace_style = .{ .wrap = false } }); + var container = try s.beginStruct(.{ .whitespace_style = .{ .wrap = false } }); try container.end(); - try std.testing.expectEqualStrings(".{}", buf.items); - buf.clearRetainingCapacity(); + try std.testing.expectEqualStrings(".{}", aw.getWritten()); + aw.clearRetainingCapacity(); } { - var container = try sz.beginTuple(.{ .whitespace_style = .{ .wrap = false } }); + var container = try s.beginTuple(.{ .whitespace_style = .{ .wrap = false } }); try container.end(); - try std.testing.expectEqualStrings(".{}", buf.items); - buf.clearRetainingCapacity(); + try std.testing.expectEqualStrings(".{}", aw.getWritten()); + aw.clearRetainingCapacity(); } { - var container = try sz.beginStruct(.{ .whitespace_style = .{ .fields = 0 } }); + var container = try s.beginStruct(.{ .whitespace_style = .{ .fields = 0 } }); try container.end(); - try std.testing.expectEqualStrings(".{}", buf.items); - buf.clearRetainingCapacity(); + try std.testing.expectEqualStrings(".{}", aw.getWritten()); + aw.clearRetainingCapacity(); } { - var container = try sz.beginTuple(.{ .whitespace_style = .{ .fields = 0 } }); + var container = try s.beginTuple(.{ .whitespace_style = .{ .fields = 0 } }); try container.end(); - try std.testing.expectEqualStrings(".{}", buf.items); - buf.clearRetainingCapacity(); + try std.testing.expectEqualStrings(".{}", aw.getWritten()); + aw.clearRetainingCapacity(); } // Size 1 { - var container = try sz.beginStruct(.{}); + var container = try s.beginStruct(.{}); try container.field("a", 1, .{}); try container.end(); if (whitespace) { @@ -1234,15 +1206,15 @@ test "std.zon stringify whitespace, low level API" { \\.{ \\ .a = 1, \\} - , buf.items); + , aw.getWritten()); } else { - try std.testing.expectEqualStrings(".{.a=1}", buf.items); + try std.testing.expectEqualStrings(".{.a=1}", aw.getWritten()); } - buf.clearRetainingCapacity(); + aw.clearRetainingCapacity(); } { - var container = try sz.beginTuple(.{}); + var container = try s.beginTuple(.{}); try container.field(1, .{}); try container.end(); if (whitespace) { @@ -1250,62 +1222,62 @@ test "std.zon stringify whitespace, low level API" { \\.{ \\ 1, \\} - , buf.items); + , aw.getWritten()); } else { - try std.testing.expectEqualStrings(".{1}", buf.items); + try std.testing.expectEqualStrings(".{1}", aw.getWritten()); } - buf.clearRetainingCapacity(); + aw.clearRetainingCapacity(); } { - var container = try sz.beginStruct(.{ .whitespace_style = .{ .wrap = false } }); + var container = try s.beginStruct(.{ .whitespace_style = .{ .wrap = false } }); try container.field("a", 1, .{}); try container.end(); if (whitespace) { - try std.testing.expectEqualStrings(".{ .a = 1 }", buf.items); + try std.testing.expectEqualStrings(".{ .a = 1 }", aw.getWritten()); } else { - try std.testing.expectEqualStrings(".{.a=1}", buf.items); + try std.testing.expectEqualStrings(".{.a=1}", aw.getWritten()); } - buf.clearRetainingCapacity(); + aw.clearRetainingCapacity(); } { // We get extra spaces here, since we didn't know up front that there would only be one // field. - var container = try sz.beginTuple(.{ .whitespace_style = .{ .wrap = false } }); + var container = try s.beginTuple(.{ .whitespace_style = .{ .wrap = false } }); try container.field(1, .{}); try container.end(); if (whitespace) { - try std.testing.expectEqualStrings(".{ 1 }", buf.items); + try std.testing.expectEqualStrings(".{ 1 }", aw.getWritten()); } else { - try std.testing.expectEqualStrings(".{1}", buf.items); + try std.testing.expectEqualStrings(".{1}", aw.getWritten()); } - buf.clearRetainingCapacity(); + aw.clearRetainingCapacity(); } { - var container = try sz.beginStruct(.{ .whitespace_style = .{ .fields = 1 } }); + var container = try s.beginStruct(.{ .whitespace_style = .{ .fields = 1 } }); try container.field("a", 1, .{}); try container.end(); if (whitespace) { - try std.testing.expectEqualStrings(".{ .a = 1 }", buf.items); + try std.testing.expectEqualStrings(".{ .a = 1 }", aw.getWritten()); } else { - try std.testing.expectEqualStrings(".{.a=1}", buf.items); + try std.testing.expectEqualStrings(".{.a=1}", aw.getWritten()); } - buf.clearRetainingCapacity(); + aw.clearRetainingCapacity(); } { - var container = try sz.beginTuple(.{ .whitespace_style = .{ .fields = 1 } }); + var container = try s.beginTuple(.{ .whitespace_style = .{ .fields = 1 } }); try container.field(1, .{}); try container.end(); - try std.testing.expectEqualStrings(".{1}", buf.items); - buf.clearRetainingCapacity(); + try std.testing.expectEqualStrings(".{1}", aw.getWritten()); + aw.clearRetainingCapacity(); } // Size 2 { - var container = try sz.beginStruct(.{}); + var container = try s.beginStruct(.{}); try container.field("a", 1, .{}); try container.field("b", 2, .{}); try container.end(); @@ -1315,15 +1287,15 @@ test "std.zon stringify whitespace, low level API" { \\ .a = 1, \\ .b = 2, \\} - , buf.items); + , aw.getWritten()); } else { - try std.testing.expectEqualStrings(".{.a=1,.b=2}", buf.items); + try std.testing.expectEqualStrings(".{.a=1,.b=2}", aw.getWritten()); } - buf.clearRetainingCapacity(); + aw.clearRetainingCapacity(); } { - var container = try sz.beginTuple(.{}); + var container = try s.beginTuple(.{}); try container.field(1, .{}); try container.field(2, .{}); try container.end(); @@ -1333,68 +1305,68 @@ test "std.zon stringify whitespace, low level API" { \\ 1, \\ 2, \\} - , buf.items); + , aw.getWritten()); } else { - try std.testing.expectEqualStrings(".{1,2}", buf.items); + try std.testing.expectEqualStrings(".{1,2}", aw.getWritten()); } - buf.clearRetainingCapacity(); + aw.clearRetainingCapacity(); } { - var container = try sz.beginStruct(.{ .whitespace_style = .{ .wrap = false } }); + var container = try s.beginStruct(.{ .whitespace_style = .{ .wrap = false } }); try container.field("a", 1, .{}); try container.field("b", 2, .{}); try container.end(); if (whitespace) { - try std.testing.expectEqualStrings(".{ .a = 1, .b = 2 }", buf.items); + try std.testing.expectEqualStrings(".{ .a = 1, .b = 2 }", aw.getWritten()); } else { - try std.testing.expectEqualStrings(".{.a=1,.b=2}", buf.items); + try std.testing.expectEqualStrings(".{.a=1,.b=2}", aw.getWritten()); } - buf.clearRetainingCapacity(); + aw.clearRetainingCapacity(); } { - var container = try sz.beginTuple(.{ .whitespace_style = .{ .wrap = false } }); + var container = try s.beginTuple(.{ .whitespace_style = .{ .wrap = false } }); try container.field(1, .{}); try container.field(2, .{}); try container.end(); if (whitespace) { - try std.testing.expectEqualStrings(".{ 1, 2 }", buf.items); + try std.testing.expectEqualStrings(".{ 1, 2 }", aw.getWritten()); } else { - try std.testing.expectEqualStrings(".{1,2}", buf.items); + try std.testing.expectEqualStrings(".{1,2}", aw.getWritten()); } - buf.clearRetainingCapacity(); + aw.clearRetainingCapacity(); } { - var container = try sz.beginStruct(.{ .whitespace_style = .{ .fields = 2 } }); + var container = try s.beginStruct(.{ .whitespace_style = .{ .fields = 2 } }); try container.field("a", 1, .{}); try container.field("b", 2, .{}); try container.end(); if (whitespace) { - try std.testing.expectEqualStrings(".{ .a = 1, .b = 2 }", buf.items); + try std.testing.expectEqualStrings(".{ .a = 1, .b = 2 }", aw.getWritten()); } else { - try std.testing.expectEqualStrings(".{.a=1,.b=2}", buf.items); + try std.testing.expectEqualStrings(".{.a=1,.b=2}", aw.getWritten()); } - buf.clearRetainingCapacity(); + aw.clearRetainingCapacity(); } { - var container = try sz.beginTuple(.{ .whitespace_style = .{ .fields = 2 } }); + var container = try s.beginTuple(.{ .whitespace_style = .{ .fields = 2 } }); try container.field(1, .{}); try container.field(2, .{}); try container.end(); if (whitespace) { - try std.testing.expectEqualStrings(".{ 1, 2 }", buf.items); + try std.testing.expectEqualStrings(".{ 1, 2 }", aw.getWritten()); } else { - try std.testing.expectEqualStrings(".{1,2}", buf.items); + try std.testing.expectEqualStrings(".{1,2}", aw.getWritten()); } - buf.clearRetainingCapacity(); + aw.clearRetainingCapacity(); } // Size 3 { - var container = try sz.beginStruct(.{}); + var container = try s.beginStruct(.{}); try container.field("a", 1, .{}); try container.field("b", 2, .{}); try container.field("c", 3, .{}); @@ -1406,15 +1378,15 @@ test "std.zon stringify whitespace, low level API" { \\ .b = 2, \\ .c = 3, \\} - , buf.items); + , aw.getWritten()); } else { - try std.testing.expectEqualStrings(".{.a=1,.b=2,.c=3}", buf.items); + try std.testing.expectEqualStrings(".{.a=1,.b=2,.c=3}", aw.getWritten()); } - buf.clearRetainingCapacity(); + aw.clearRetainingCapacity(); } { - var container = try sz.beginTuple(.{}); + var container = try s.beginTuple(.{}); try container.field(1, .{}); try container.field(2, .{}); try container.field(3, .{}); @@ -1426,43 +1398,43 @@ test "std.zon stringify whitespace, low level API" { \\ 2, \\ 3, \\} - , buf.items); + , aw.getWritten()); } else { - try std.testing.expectEqualStrings(".{1,2,3}", buf.items); + try std.testing.expectEqualStrings(".{1,2,3}", aw.getWritten()); } - buf.clearRetainingCapacity(); + aw.clearRetainingCapacity(); } { - var container = try sz.beginStruct(.{ .whitespace_style = .{ .wrap = false } }); + var container = try s.beginStruct(.{ .whitespace_style = .{ .wrap = false } }); try container.field("a", 1, .{}); try container.field("b", 2, .{}); try container.field("c", 3, .{}); try container.end(); if (whitespace) { - try std.testing.expectEqualStrings(".{ .a = 1, .b = 2, .c = 3 }", buf.items); + try std.testing.expectEqualStrings(".{ .a = 1, .b = 2, .c = 3 }", aw.getWritten()); } else { - try std.testing.expectEqualStrings(".{.a=1,.b=2,.c=3}", buf.items); + try std.testing.expectEqualStrings(".{.a=1,.b=2,.c=3}", aw.getWritten()); } - buf.clearRetainingCapacity(); + aw.clearRetainingCapacity(); } { - var container = try sz.beginTuple(.{ .whitespace_style = .{ .wrap = false } }); + var container = try s.beginTuple(.{ .whitespace_style = .{ .wrap = false } }); try container.field(1, .{}); try container.field(2, .{}); try container.field(3, .{}); try container.end(); if (whitespace) { - try std.testing.expectEqualStrings(".{ 1, 2, 3 }", buf.items); + try std.testing.expectEqualStrings(".{ 1, 2, 3 }", aw.getWritten()); } else { - try std.testing.expectEqualStrings(".{1,2,3}", buf.items); + try std.testing.expectEqualStrings(".{1,2,3}", aw.getWritten()); } - buf.clearRetainingCapacity(); + aw.clearRetainingCapacity(); } { - var container = try sz.beginStruct(.{ .whitespace_style = .{ .fields = 3 } }); + var container = try s.beginStruct(.{ .whitespace_style = .{ .fields = 3 } }); try container.field("a", 1, .{}); try container.field("b", 2, .{}); try container.field("c", 3, .{}); @@ -1474,15 +1446,15 @@ test "std.zon stringify whitespace, low level API" { \\ .b = 2, \\ .c = 3, \\} - , buf.items); + , aw.getWritten()); } else { - try std.testing.expectEqualStrings(".{.a=1,.b=2,.c=3}", buf.items); + try std.testing.expectEqualStrings(".{.a=1,.b=2,.c=3}", aw.getWritten()); } - buf.clearRetainingCapacity(); + aw.clearRetainingCapacity(); } { - var container = try sz.beginTuple(.{ .whitespace_style = .{ .fields = 3 } }); + var container = try s.beginTuple(.{ .whitespace_style = .{ .fields = 3 } }); try container.field(1, .{}); try container.field(2, .{}); try container.field(3, .{}); @@ -1494,16 +1466,16 @@ test "std.zon stringify whitespace, low level API" { \\ 2, \\ 3, \\} - , buf.items); + , aw.getWritten()); } else { - try std.testing.expectEqualStrings(".{1,2,3}", buf.items); + try std.testing.expectEqualStrings(".{1,2,3}", aw.getWritten()); } - buf.clearRetainingCapacity(); + aw.clearRetainingCapacity(); } // Nested objects where the outer container doesn't wrap but the inner containers do { - var container = try sz.beginStruct(.{ .whitespace_style = .{ .wrap = false } }); + var container = try s.beginStruct(.{ .whitespace_style = .{ .wrap = false } }); try container.field("first", .{ 1, 2, 3 }, .{}); try container.field("second", .{ 4, 5, 6 }, .{}); try container.end(); @@ -1518,139 +1490,139 @@ test "std.zon stringify whitespace, low level API" { \\ 5, \\ 6, \\} } - , buf.items); + , aw.getWritten()); } else { try std.testing.expectEqualStrings( ".{.first=.{1,2,3},.second=.{4,5,6}}", - buf.items, + aw.getWritten(), ); } - buf.clearRetainingCapacity(); + aw.clearRetainingCapacity(); } } } test "std.zon stringify utf8 codepoints" { - var buf = std.ArrayList(u8).init(std.testing.allocator); - defer buf.deinit(); - var sz = serializer(buf.writer(), .{}); + var aw: std.io.AllocatingWriter = undefined; + defer aw.deinit(); + var s: Serializer = .{ .writer = aw.init(std.testing.allocator) }; // Printable ASCII - try sz.int('a'); - try std.testing.expectEqualStrings("97", buf.items); - buf.clearRetainingCapacity(); + try s.int('a'); + try std.testing.expectEqualStrings("97", aw.getWritten()); + aw.clearRetainingCapacity(); - try sz.codePoint('a'); - try std.testing.expectEqualStrings("'a'", buf.items); - buf.clearRetainingCapacity(); + try s.codePoint('a'); + try std.testing.expectEqualStrings("'a'", aw.getWritten()); + aw.clearRetainingCapacity(); - try sz.value('a', .{ .emit_codepoint_literals = .always }); - try std.testing.expectEqualStrings("'a'", buf.items); - buf.clearRetainingCapacity(); + try s.value('a', .{ .emit_codepoint_literals = .always }); + try std.testing.expectEqualStrings("'a'", aw.getWritten()); + aw.clearRetainingCapacity(); - try sz.value('a', .{ .emit_codepoint_literals = .printable_ascii }); - try std.testing.expectEqualStrings("'a'", buf.items); - buf.clearRetainingCapacity(); + try s.value('a', .{ .emit_codepoint_literals = .printable_ascii }); + try std.testing.expectEqualStrings("'a'", aw.getWritten()); + aw.clearRetainingCapacity(); - try sz.value('a', .{ .emit_codepoint_literals = .never }); - try std.testing.expectEqualStrings("97", buf.items); - buf.clearRetainingCapacity(); + try s.value('a', .{ .emit_codepoint_literals = .never }); + try std.testing.expectEqualStrings("97", aw.getWritten()); + aw.clearRetainingCapacity(); // Short escaped codepoint - try sz.int('\n'); - try std.testing.expectEqualStrings("10", buf.items); - buf.clearRetainingCapacity(); + try s.int('\n'); + try std.testing.expectEqualStrings("10", aw.getWritten()); + aw.clearRetainingCapacity(); - try sz.codePoint('\n'); - try std.testing.expectEqualStrings("'\\n'", buf.items); - buf.clearRetainingCapacity(); + try s.codePoint('\n'); + try std.testing.expectEqualStrings("'\\n'", aw.getWritten()); + aw.clearRetainingCapacity(); - try sz.value('\n', .{ .emit_codepoint_literals = .always }); - try std.testing.expectEqualStrings("'\\n'", buf.items); - buf.clearRetainingCapacity(); + try s.value('\n', .{ .emit_codepoint_literals = .always }); + try std.testing.expectEqualStrings("'\\n'", aw.getWritten()); + aw.clearRetainingCapacity(); - try sz.value('\n', .{ .emit_codepoint_literals = .printable_ascii }); - try std.testing.expectEqualStrings("10", buf.items); - buf.clearRetainingCapacity(); + try s.value('\n', .{ .emit_codepoint_literals = .printable_ascii }); + try std.testing.expectEqualStrings("10", aw.getWritten()); + aw.clearRetainingCapacity(); - try sz.value('\n', .{ .emit_codepoint_literals = .never }); - try std.testing.expectEqualStrings("10", buf.items); - buf.clearRetainingCapacity(); + try s.value('\n', .{ .emit_codepoint_literals = .never }); + try std.testing.expectEqualStrings("10", aw.getWritten()); + aw.clearRetainingCapacity(); // Large codepoint - try sz.int('⚡'); - try std.testing.expectEqualStrings("9889", buf.items); - buf.clearRetainingCapacity(); + try s.int('⚡'); + try std.testing.expectEqualStrings("9889", aw.getWritten()); + aw.clearRetainingCapacity(); - try sz.codePoint('⚡'); - try std.testing.expectEqualStrings("'\\xe2\\x9a\\xa1'", buf.items); - buf.clearRetainingCapacity(); + try s.codePoint('⚡'); + try std.testing.expectEqualStrings("'\\xe2\\x9a\\xa1'", aw.getWritten()); + aw.clearRetainingCapacity(); - try sz.value('⚡', .{ .emit_codepoint_literals = .always }); - try std.testing.expectEqualStrings("'\\xe2\\x9a\\xa1'", buf.items); - buf.clearRetainingCapacity(); + try s.value('⚡', .{ .emit_codepoint_literals = .always }); + try std.testing.expectEqualStrings("'\\xe2\\x9a\\xa1'", aw.getWritten()); + aw.clearRetainingCapacity(); - try sz.value('⚡', .{ .emit_codepoint_literals = .printable_ascii }); - try std.testing.expectEqualStrings("9889", buf.items); - buf.clearRetainingCapacity(); + try s.value('⚡', .{ .emit_codepoint_literals = .printable_ascii }); + try std.testing.expectEqualStrings("9889", aw.getWritten()); + aw.clearRetainingCapacity(); - try sz.value('⚡', .{ .emit_codepoint_literals = .never }); - try std.testing.expectEqualStrings("9889", buf.items); - buf.clearRetainingCapacity(); + try s.value('⚡', .{ .emit_codepoint_literals = .never }); + try std.testing.expectEqualStrings("9889", aw.getWritten()); + aw.clearRetainingCapacity(); // Invalid codepoint - try std.testing.expectError(error.InvalidCodepoint, sz.codePoint(0x110000 + 1)); + try std.testing.expectError(error.InvalidCodepoint, s.codePoint(0x110000 + 1)); - try sz.int(0x110000 + 1); - try std.testing.expectEqualStrings("1114113", buf.items); - buf.clearRetainingCapacity(); + try s.int(0x110000 + 1); + try std.testing.expectEqualStrings("1114113", aw.getWritten()); + aw.clearRetainingCapacity(); - try sz.value(0x110000 + 1, .{ .emit_codepoint_literals = .always }); - try std.testing.expectEqualStrings("1114113", buf.items); - buf.clearRetainingCapacity(); + try s.value(0x110000 + 1, .{ .emit_codepoint_literals = .always }); + try std.testing.expectEqualStrings("1114113", aw.getWritten()); + aw.clearRetainingCapacity(); - try sz.value(0x110000 + 1, .{ .emit_codepoint_literals = .printable_ascii }); - try std.testing.expectEqualStrings("1114113", buf.items); - buf.clearRetainingCapacity(); + try s.value(0x110000 + 1, .{ .emit_codepoint_literals = .printable_ascii }); + try std.testing.expectEqualStrings("1114113", aw.getWritten()); + aw.clearRetainingCapacity(); - try sz.value(0x110000 + 1, .{ .emit_codepoint_literals = .never }); - try std.testing.expectEqualStrings("1114113", buf.items); - buf.clearRetainingCapacity(); + try s.value(0x110000 + 1, .{ .emit_codepoint_literals = .never }); + try std.testing.expectEqualStrings("1114113", aw.getWritten()); + aw.clearRetainingCapacity(); // Valid codepoint, not a codepoint type - try sz.value(@as(u22, 'a'), .{ .emit_codepoint_literals = .always }); - try std.testing.expectEqualStrings("97", buf.items); - buf.clearRetainingCapacity(); + try s.value(@as(u22, 'a'), .{ .emit_codepoint_literals = .always }); + try std.testing.expectEqualStrings("97", aw.getWritten()); + aw.clearRetainingCapacity(); - try sz.value(@as(u22, 'a'), .{ .emit_codepoint_literals = .printable_ascii }); - try std.testing.expectEqualStrings("97", buf.items); - buf.clearRetainingCapacity(); + try s.value(@as(u22, 'a'), .{ .emit_codepoint_literals = .printable_ascii }); + try std.testing.expectEqualStrings("97", aw.getWritten()); + aw.clearRetainingCapacity(); - try sz.value(@as(i32, 'a'), .{ .emit_codepoint_literals = .never }); - try std.testing.expectEqualStrings("97", buf.items); - buf.clearRetainingCapacity(); + try s.value(@as(i32, 'a'), .{ .emit_codepoint_literals = .never }); + try std.testing.expectEqualStrings("97", aw.getWritten()); + aw.clearRetainingCapacity(); // Make sure value options are passed to children - try sz.value(.{ .c = '⚡' }, .{ .emit_codepoint_literals = .always }); - try std.testing.expectEqualStrings(".{ .c = '\\xe2\\x9a\\xa1' }", buf.items); - buf.clearRetainingCapacity(); + try s.value(.{ .c = '⚡' }, .{ .emit_codepoint_literals = .always }); + try std.testing.expectEqualStrings(".{ .c = '\\xe2\\x9a\\xa1' }", aw.getWritten()); + aw.clearRetainingCapacity(); - try sz.value(.{ .c = '⚡' }, .{ .emit_codepoint_literals = .never }); - try std.testing.expectEqualStrings(".{ .c = 9889 }", buf.items); - buf.clearRetainingCapacity(); + try s.value(.{ .c = '⚡' }, .{ .emit_codepoint_literals = .never }); + try std.testing.expectEqualStrings(".{ .c = 9889 }", aw.getWritten()); + aw.clearRetainingCapacity(); } test "std.zon stringify strings" { - var buf = std.ArrayList(u8).init(std.testing.allocator); - defer buf.deinit(); - var sz = serializer(buf.writer(), .{}); + var aw: std.io.AllocatingWriter = undefined; + defer aw.deinit(); + var s: Serializer = .{ .writer = aw.init(std.testing.allocator) }; // Minimal case - try sz.string("abc⚡\n"); - try std.testing.expectEqualStrings("\"abc\\xe2\\x9a\\xa1\\n\"", buf.items); - buf.clearRetainingCapacity(); + try s.string("abc⚡\n"); + try std.testing.expectEqualStrings("\"abc\\xe2\\x9a\\xa1\\n\"", aw.getWritten()); + aw.clearRetainingCapacity(); - try sz.tuple("abc⚡\n", .{}); + try s.tuple("abc⚡\n", .{}); try std.testing.expectEqualStrings( \\.{ \\ 97, @@ -1661,14 +1633,14 @@ test "std.zon stringify strings" { \\ 161, \\ 10, \\} - , buf.items); - buf.clearRetainingCapacity(); + , aw.getWritten()); + aw.clearRetainingCapacity(); - try sz.value("abc⚡\n", .{}); - try std.testing.expectEqualStrings("\"abc\\xe2\\x9a\\xa1\\n\"", buf.items); - buf.clearRetainingCapacity(); + try s.value("abc⚡\n", .{}); + try std.testing.expectEqualStrings("\"abc\\xe2\\x9a\\xa1\\n\"", aw.getWritten()); + aw.clearRetainingCapacity(); - try sz.value("abc⚡\n", .{ .emit_strings_as_containers = true }); + try s.value("abc⚡\n", .{ .emit_strings_as_containers = true }); try std.testing.expectEqualStrings( \\.{ \\ 97, @@ -1679,113 +1651,113 @@ test "std.zon stringify strings" { \\ 161, \\ 10, \\} - , buf.items); - buf.clearRetainingCapacity(); + , aw.getWritten()); + aw.clearRetainingCapacity(); // Value options are inherited by children - try sz.value(.{ .str = "abc" }, .{}); - try std.testing.expectEqualStrings(".{ .str = \"abc\" }", buf.items); - buf.clearRetainingCapacity(); + try s.value(.{ .str = "abc" }, .{}); + try std.testing.expectEqualStrings(".{ .str = \"abc\" }", aw.getWritten()); + aw.clearRetainingCapacity(); - try sz.value(.{ .str = "abc" }, .{ .emit_strings_as_containers = true }); + try s.value(.{ .str = "abc" }, .{ .emit_strings_as_containers = true }); try std.testing.expectEqualStrings( \\.{ .str = .{ \\ 97, \\ 98, \\ 99, \\} } - , buf.items); - buf.clearRetainingCapacity(); + , aw.getWritten()); + aw.clearRetainingCapacity(); // Arrays (rather than pointers to arrays) of u8s are not considered strings, so that data can // round trip correctly. - try sz.value("abc".*, .{}); + try s.value("abc".*, .{}); try std.testing.expectEqualStrings( \\.{ \\ 97, \\ 98, \\ 99, \\} - , buf.items); - buf.clearRetainingCapacity(); + , aw.getWritten()); + aw.clearRetainingCapacity(); } test "std.zon stringify multiline strings" { - var buf = std.ArrayList(u8).init(std.testing.allocator); - defer buf.deinit(); - var sz = serializer(buf.writer(), .{}); + var aw: std.io.AllocatingWriter = undefined; + defer aw.deinit(); + var s: Serializer = .{ .writer = aw.init(std.testing.allocator) }; inline for (.{ true, false }) |whitespace| { - sz.options.whitespace = whitespace; + s.options.whitespace = whitespace; { - try sz.multilineString("", .{ .top_level = true }); - try std.testing.expectEqualStrings("\\\\", buf.items); - buf.clearRetainingCapacity(); + try s.multilineString("", .{ .top_level = true }); + try std.testing.expectEqualStrings("\\\\", aw.getWritten()); + aw.clearRetainingCapacity(); } { - try sz.multilineString("abc⚡", .{ .top_level = true }); - try std.testing.expectEqualStrings("\\\\abc⚡", buf.items); - buf.clearRetainingCapacity(); + try s.multilineString("abc⚡", .{ .top_level = true }); + try std.testing.expectEqualStrings("\\\\abc⚡", aw.getWritten()); + aw.clearRetainingCapacity(); } { - try sz.multilineString("abc⚡\ndef", .{ .top_level = true }); - try std.testing.expectEqualStrings("\\\\abc⚡\n\\\\def", buf.items); - buf.clearRetainingCapacity(); + try s.multilineString("abc⚡\ndef", .{ .top_level = true }); + try std.testing.expectEqualStrings("\\\\abc⚡\n\\\\def", aw.getWritten()); + aw.clearRetainingCapacity(); } { - try sz.multilineString("abc⚡\r\ndef", .{ .top_level = true }); - try std.testing.expectEqualStrings("\\\\abc⚡\n\\\\def", buf.items); - buf.clearRetainingCapacity(); + try s.multilineString("abc⚡\r\ndef", .{ .top_level = true }); + try std.testing.expectEqualStrings("\\\\abc⚡\n\\\\def", aw.getWritten()); + aw.clearRetainingCapacity(); } { - try sz.multilineString("\nabc⚡", .{ .top_level = true }); - try std.testing.expectEqualStrings("\\\\\n\\\\abc⚡", buf.items); - buf.clearRetainingCapacity(); + try s.multilineString("\nabc⚡", .{ .top_level = true }); + try std.testing.expectEqualStrings("\\\\\n\\\\abc⚡", aw.getWritten()); + aw.clearRetainingCapacity(); } { - try sz.multilineString("\r\nabc⚡", .{ .top_level = true }); - try std.testing.expectEqualStrings("\\\\\n\\\\abc⚡", buf.items); - buf.clearRetainingCapacity(); + try s.multilineString("\r\nabc⚡", .{ .top_level = true }); + try std.testing.expectEqualStrings("\\\\\n\\\\abc⚡", aw.getWritten()); + aw.clearRetainingCapacity(); } { - try sz.multilineString("abc\ndef", .{}); + try s.multilineString("abc\ndef", .{}); if (whitespace) { - try std.testing.expectEqualStrings("\n\\\\abc\n\\\\def\n", buf.items); + try std.testing.expectEqualStrings("\n\\\\abc\n\\\\def\n", aw.getWritten()); } else { - try std.testing.expectEqualStrings("\\\\abc\n\\\\def\n", buf.items); + try std.testing.expectEqualStrings("\\\\abc\n\\\\def\n", aw.getWritten()); } - buf.clearRetainingCapacity(); + aw.clearRetainingCapacity(); } { const str: []const u8 = &.{ 'a', '\r', 'c' }; - try sz.string(str); - try std.testing.expectEqualStrings("\"a\\rc\"", buf.items); - buf.clearRetainingCapacity(); + try s.string(str); + try std.testing.expectEqualStrings("\"a\\rc\"", aw.getWritten()); + aw.clearRetainingCapacity(); } { try std.testing.expectError( error.InnerCarriageReturn, - sz.multilineString(@as([]const u8, &.{ 'a', '\r', 'c' }), .{}), + s.multilineString(@as([]const u8, &.{ 'a', '\r', 'c' }), .{}), ); try std.testing.expectError( error.InnerCarriageReturn, - sz.multilineString(@as([]const u8, &.{ 'a', '\r', 'c', '\n' }), .{}), + s.multilineString(@as([]const u8, &.{ 'a', '\r', 'c', '\n' }), .{}), ); try std.testing.expectError( error.InnerCarriageReturn, - sz.multilineString(@as([]const u8, &.{ 'a', '\r', 'c', '\r', '\n' }), .{}), + s.multilineString(@as([]const u8, &.{ 'a', '\r', 'c', '\r', '\n' }), .{}), ); - try std.testing.expectEqualStrings("", buf.items); - buf.clearRetainingCapacity(); + try std.testing.expectEqualStrings("", aw.getWritten()); + aw.clearRetainingCapacity(); } } } @@ -1931,42 +1903,43 @@ test "std.zon stringify skip default fields" { } test "std.zon depth limits" { - var buf = std.ArrayList(u8).init(std.testing.allocator); - defer buf.deinit(); + var aw: std.io.AllocatingWriter = undefined; + defer aw.deinit(); + const bw = aw.init(std.testing.allocator); const Recurse = struct { r: []const @This() }; // Normal operation - try serializeMaxDepth(.{ 1, .{ 2, 3 } }, .{}, buf.writer(), 16); - try std.testing.expectEqualStrings(".{ 1, .{ 2, 3 } }", buf.items); - buf.clearRetainingCapacity(); + try serializeMaxDepth(.{ 1, .{ 2, 3 } }, .{}, bw, 16); + try std.testing.expectEqualStrings(".{ 1, .{ 2, 3 } }", aw.getWritten()); + aw.clearRetainingCapacity(); - try serializeArbitraryDepth(.{ 1, .{ 2, 3 } }, .{}, buf.writer()); - try std.testing.expectEqualStrings(".{ 1, .{ 2, 3 } }", buf.items); - buf.clearRetainingCapacity(); + try serializeArbitraryDepth(.{ 1, .{ 2, 3 } }, .{}, bw); + try std.testing.expectEqualStrings(".{ 1, .{ 2, 3 } }", aw.getWritten()); + aw.clearRetainingCapacity(); // Max depth failing on non recursive type try std.testing.expectError( error.ExceededMaxDepth, - serializeMaxDepth(.{ 1, .{ 2, .{ 3, 4 } } }, .{}, buf.writer(), 3), + serializeMaxDepth(.{ 1, .{ 2, .{ 3, 4 } } }, .{}, bw, 3), ); - try std.testing.expectEqualStrings("", buf.items); - buf.clearRetainingCapacity(); + try std.testing.expectEqualStrings("", aw.getWritten()); + aw.clearRetainingCapacity(); // Max depth passing on recursive type { const maybe_recurse = Recurse{ .r = &.{} }; - try serializeMaxDepth(maybe_recurse, .{}, buf.writer(), 2); - try std.testing.expectEqualStrings(".{ .r = .{} }", buf.items); - buf.clearRetainingCapacity(); + try serializeMaxDepth(maybe_recurse, .{}, bw, 2); + try std.testing.expectEqualStrings(".{ .r = .{} }", aw.getWritten()); + aw.clearRetainingCapacity(); } // Unchecked passing on recursive type { const maybe_recurse = Recurse{ .r = &.{} }; - try serializeArbitraryDepth(maybe_recurse, .{}, buf.writer()); - try std.testing.expectEqualStrings(".{ .r = .{} }", buf.items); - buf.clearRetainingCapacity(); + try serializeArbitraryDepth(maybe_recurse, .{}, bw); + try std.testing.expectEqualStrings(".{ .r = .{} }", aw.getWritten()); + aw.clearRetainingCapacity(); } // Max depth failing on recursive type due to depth @@ -1975,10 +1948,10 @@ test "std.zon depth limits" { maybe_recurse.r = &.{.{ .r = &.{} }}; try std.testing.expectError( error.ExceededMaxDepth, - serializeMaxDepth(maybe_recurse, .{}, buf.writer(), 2), + serializeMaxDepth(maybe_recurse, .{}, bw, 2), ); - try std.testing.expectEqualStrings("", buf.items); - buf.clearRetainingCapacity(); + try std.testing.expectEqualStrings("", aw.getWritten()); + aw.clearRetainingCapacity(); } // Same but for a slice @@ -1988,23 +1961,23 @@ test "std.zon depth limits" { try std.testing.expectError( error.ExceededMaxDepth, - serializeMaxDepth(maybe_recurse, .{}, buf.writer(), 2), + serializeMaxDepth(maybe_recurse, .{}, bw, 2), ); - try std.testing.expectEqualStrings("", buf.items); - buf.clearRetainingCapacity(); + try std.testing.expectEqualStrings("", aw.getWritten()); + aw.clearRetainingCapacity(); - var sz = serializer(buf.writer(), .{}); + var s: Serializer = .{ .writer = bw }; try std.testing.expectError( error.ExceededMaxDepth, - sz.tupleMaxDepth(maybe_recurse, .{}, 2), + s.tupleMaxDepth(maybe_recurse, .{}, 2), ); - try std.testing.expectEqualStrings("", buf.items); - buf.clearRetainingCapacity(); + try std.testing.expectEqualStrings("", aw.getWritten()); + aw.clearRetainingCapacity(); - try sz.tupleArbitraryDepth(maybe_recurse, .{}); - try std.testing.expectEqualStrings(".{.{ .r = .{} }}", buf.items); - buf.clearRetainingCapacity(); + try s.tupleArbitraryDepth(maybe_recurse, .{}); + try std.testing.expectEqualStrings(".{.{ .r = .{} }}", aw.getWritten()); + aw.clearRetainingCapacity(); } // A slice succeeding @@ -2012,19 +1985,19 @@ test "std.zon depth limits" { var temp: [1]Recurse = .{.{ .r = &.{} }}; const maybe_recurse: []const Recurse = &temp; - try serializeMaxDepth(maybe_recurse, .{}, buf.writer(), 3); - try std.testing.expectEqualStrings(".{.{ .r = .{} }}", buf.items); - buf.clearRetainingCapacity(); + try serializeMaxDepth(maybe_recurse, .{}, bw, 3); + try std.testing.expectEqualStrings(".{.{ .r = .{} }}", aw.getWritten()); + aw.clearRetainingCapacity(); - var sz = serializer(buf.writer(), .{}); + var s: Serializer = .{ .writer = bw }; - try sz.tupleMaxDepth(maybe_recurse, .{}, 3); - try std.testing.expectEqualStrings(".{.{ .r = .{} }}", buf.items); - buf.clearRetainingCapacity(); + try s.tupleMaxDepth(maybe_recurse, .{}, 3); + try std.testing.expectEqualStrings(".{.{ .r = .{} }}", aw.getWritten()); + aw.clearRetainingCapacity(); - try sz.tupleArbitraryDepth(maybe_recurse, .{}); - try std.testing.expectEqualStrings(".{.{ .r = .{} }}", buf.items); - buf.clearRetainingCapacity(); + try s.tupleArbitraryDepth(maybe_recurse, .{}); + try std.testing.expectEqualStrings(".{.{ .r = .{} }}", aw.getWritten()); + aw.clearRetainingCapacity(); } // Max depth failing on recursive type due to recursion @@ -2035,46 +2008,46 @@ test "std.zon depth limits" { try std.testing.expectError( error.ExceededMaxDepth, - serializeMaxDepth(maybe_recurse, .{}, buf.writer(), 128), + serializeMaxDepth(maybe_recurse, .{}, bw, 128), ); - try std.testing.expectEqualStrings("", buf.items); - buf.clearRetainingCapacity(); + try std.testing.expectEqualStrings("", aw.getWritten()); + aw.clearRetainingCapacity(); - var sz = serializer(buf.writer(), .{}); + var s: Serializer = .{ .writer = bw }; try std.testing.expectError( error.ExceededMaxDepth, - sz.tupleMaxDepth(maybe_recurse, .{}, 128), + s.tupleMaxDepth(maybe_recurse, .{}, 128), ); - try std.testing.expectEqualStrings("", buf.items); - buf.clearRetainingCapacity(); + try std.testing.expectEqualStrings("", aw.getWritten()); + aw.clearRetainingCapacity(); } // Max depth on other parts of the lower level API { - var sz = serializer(buf.writer(), .{}); + var s: Serializer = .{ .writer = bw }; const maybe_recurse: []const Recurse = &.{}; - try std.testing.expectError(error.ExceededMaxDepth, sz.valueMaxDepth(1, .{}, 0)); - try sz.valueMaxDepth(2, .{}, 1); - try sz.value(3, .{}); - try sz.valueArbitraryDepth(maybe_recurse, .{}); + try std.testing.expectError(error.ExceededMaxDepth, s.valueMaxDepth(1, .{}, 0)); + try s.valueMaxDepth(2, .{}, 1); + try s.value(3, .{}); + try s.valueArbitraryDepth(maybe_recurse, .{}); - var s = try sz.beginStruct(.{}); - try std.testing.expectError(error.ExceededMaxDepth, s.fieldMaxDepth("a", 1, .{}, 0)); - try s.fieldMaxDepth("b", 4, .{}, 1); - try s.field("c", 5, .{}); - try s.fieldArbitraryDepth("d", maybe_recurse, .{}); - try s.end(); + var wip_struct = try s.beginStruct(.{}); + try std.testing.expectError(error.ExceededMaxDepth, wip_struct.fieldMaxDepth("a", 1, .{}, 0)); + try wip_struct.fieldMaxDepth("b", 4, .{}, 1); + try wip_struct.field("c", 5, .{}); + try wip_struct.fieldArbitraryDepth("d", maybe_recurse, .{}); + try wip_struct.end(); - var t = try sz.beginTuple(.{}); + var t = try s.beginTuple(.{}); try std.testing.expectError(error.ExceededMaxDepth, t.fieldMaxDepth(1, .{}, 0)); try t.fieldMaxDepth(6, .{}, 1); try t.field(7, .{}); try t.fieldArbitraryDepth(maybe_recurse, .{}); try t.end(); - var a = try sz.beginTuple(.{}); + var a = try s.beginTuple(.{}); try std.testing.expectError(error.ExceededMaxDepth, a.fieldMaxDepth(1, .{}, 0)); try a.fieldMaxDepth(8, .{}, 1); try a.field(9, .{}); @@ -2095,7 +2068,7 @@ test "std.zon depth limits" { \\ 9, \\ .{}, \\} - , buf.items); + , aw.getWritten()); } } @@ -2191,42 +2164,42 @@ test "std.zon stringify primitives" { } test "std.zon stringify ident" { - var buf = std.ArrayList(u8).init(std.testing.allocator); - defer buf.deinit(); - var sz = serializer(buf.writer(), .{}); + var aw: std.io.AllocatingWriter = undefined; + defer aw.deinit(); + var s: Serializer = .{ .writer = aw.init(std.testing.allocator) }; try expectSerializeEqual(".{ .a = 0 }", .{ .a = 0 }, .{}); - try sz.ident("a"); - try std.testing.expectEqualStrings(".a", buf.items); - buf.clearRetainingCapacity(); + try s.ident("a"); + try std.testing.expectEqualStrings(".a", aw.getWritten()); + aw.clearRetainingCapacity(); - try sz.ident("foo_1"); - try std.testing.expectEqualStrings(".foo_1", buf.items); - buf.clearRetainingCapacity(); + try s.ident("foo_1"); + try std.testing.expectEqualStrings(".foo_1", aw.getWritten()); + aw.clearRetainingCapacity(); - try sz.ident("_foo_1"); - try std.testing.expectEqualStrings("._foo_1", buf.items); - buf.clearRetainingCapacity(); + try s.ident("_foo_1"); + try std.testing.expectEqualStrings("._foo_1", aw.getWritten()); + aw.clearRetainingCapacity(); - try sz.ident("foo bar"); - try std.testing.expectEqualStrings(".@\"foo bar\"", buf.items); - buf.clearRetainingCapacity(); + try s.ident("foo bar"); + try std.testing.expectEqualStrings(".@\"foo bar\"", aw.getWritten()); + aw.clearRetainingCapacity(); - try sz.ident("1foo"); - try std.testing.expectEqualStrings(".@\"1foo\"", buf.items); - buf.clearRetainingCapacity(); + try s.ident("1foo"); + try std.testing.expectEqualStrings(".@\"1foo\"", aw.getWritten()); + aw.clearRetainingCapacity(); - try sz.ident("var"); - try std.testing.expectEqualStrings(".@\"var\"", buf.items); - buf.clearRetainingCapacity(); + try s.ident("var"); + try std.testing.expectEqualStrings(".@\"var\"", aw.getWritten()); + aw.clearRetainingCapacity(); - try sz.ident("true"); - try std.testing.expectEqualStrings(".true", buf.items); - buf.clearRetainingCapacity(); + try s.ident("true"); + try std.testing.expectEqualStrings(".true", aw.getWritten()); + aw.clearRetainingCapacity(); - try sz.ident("_"); - try std.testing.expectEqualStrings("._", buf.items); - buf.clearRetainingCapacity(); + try s.ident("_"); + try std.testing.expectEqualStrings("._", aw.getWritten()); + aw.clearRetainingCapacity(); const Enum = enum { @"foo bar", @@ -2238,40 +2211,40 @@ test "std.zon stringify ident" { } test "std.zon stringify as tuple" { - var buf = std.ArrayList(u8).init(std.testing.allocator); - defer buf.deinit(); - var sz = serializer(buf.writer(), .{}); + var aw: std.io.AllocatingWriter = undefined; + defer aw.deinit(); + var s: Serializer = .{ .writer = aw.init(std.testing.allocator) }; // Tuples - try sz.tuple(.{ 1, 2 }, .{}); - try std.testing.expectEqualStrings(".{ 1, 2 }", buf.items); - buf.clearRetainingCapacity(); + try s.tuple(.{ 1, 2 }, .{}); + try std.testing.expectEqualStrings(".{ 1, 2 }", aw.getWritten()); + aw.clearRetainingCapacity(); // Slice - try sz.tuple(@as([]const u8, &.{ 1, 2 }), .{}); - try std.testing.expectEqualStrings(".{ 1, 2 }", buf.items); - buf.clearRetainingCapacity(); + try s.tuple(@as([]const u8, &.{ 1, 2 }), .{}); + try std.testing.expectEqualStrings(".{ 1, 2 }", aw.getWritten()); + aw.clearRetainingCapacity(); // Array - try sz.tuple([2]u8{ 1, 2 }, .{}); - try std.testing.expectEqualStrings(".{ 1, 2 }", buf.items); - buf.clearRetainingCapacity(); + try s.tuple([2]u8{ 1, 2 }, .{}); + try std.testing.expectEqualStrings(".{ 1, 2 }", aw.getWritten()); + aw.clearRetainingCapacity(); } test "std.zon stringify as float" { - var buf = std.ArrayList(u8).init(std.testing.allocator); - defer buf.deinit(); - var sz = serializer(buf.writer(), .{}); + var aw: std.io.AllocatingWriter = undefined; + defer aw.deinit(); + var s: Serializer = .{ .writer = aw.init(std.testing.allocator) }; // Comptime float - try sz.float(2.5); - try std.testing.expectEqualStrings("2.5", buf.items); - buf.clearRetainingCapacity(); + try s.float(2.5); + try std.testing.expectEqualStrings("2.5", aw.getWritten()); + aw.clearRetainingCapacity(); // Sized float - try sz.float(@as(f32, 2.5)); - try std.testing.expectEqualStrings("2.5", buf.items); - buf.clearRetainingCapacity(); + try s.float(@as(f32, 2.5)); + try std.testing.expectEqualStrings("2.5", aw.getWritten()); + aw.clearRetainingCapacity(); } test "std.zon stringify vector" { @@ -2363,13 +2336,13 @@ test "std.zon pointers" { } test "std.zon tuple/struct field" { - var buf = std.ArrayList(u8).init(std.testing.allocator); - defer buf.deinit(); - var sz = serializer(buf.writer(), .{}); + var aw: std.io.AllocatingWriter = undefined; + defer aw.deinit(); + var s: Serializer = .{ .writer = aw.init(std.testing.allocator) }; // Test on structs { - var root = try sz.beginStruct(.{}); + var root = try s.beginStruct(.{}); { var tuple = try root.beginTupleField("foo", .{}); try tuple.field(0, .{}); @@ -2395,13 +2368,13 @@ test "std.zon tuple/struct field" { \\ .b = 1, \\ }, \\} - , buf.items); - buf.clearRetainingCapacity(); + , aw.getWritten()); + aw.clearRetainingCapacity(); } // Test on tuples { - var root = try sz.beginTuple(.{}); + var root = try s.beginTuple(.{}); { var tuple = try root.beginTupleField(.{}); try tuple.field(0, .{}); @@ -2427,7 +2400,7 @@ test "std.zon tuple/struct field" { \\ .b = 1, \\ }, \\} - , buf.items); - buf.clearRetainingCapacity(); + , aw.getWritten()); + aw.clearRetainingCapacity(); } }