mirror of
https://github.com/ziglang/zig.git
synced 2025-12-30 18:13:19 +00:00
std.zig.Render: update it and references
This commit is contained in:
parent
0389b4c7b9
commit
2d5d2ba4f5
@ -2475,6 +2475,12 @@ pub const Allocating = struct {
|
||||
return result;
|
||||
}
|
||||
|
||||
pub fn ensureUnusedCapacity(a: *Allocating, additional_count: usize) Allocator.Error!void {
|
||||
var list = a.toArrayList();
|
||||
defer a.setArrayList(list);
|
||||
return list.ensureUnusedCapacity(a.allocator, additional_count);
|
||||
}
|
||||
|
||||
pub fn toOwnedSlice(a: *Allocating) error{OutOfMemory}![]u8 {
|
||||
var list = a.toArrayList();
|
||||
defer a.setArrayList(list);
|
||||
|
||||
@ -207,7 +207,7 @@ pub fn renderAlloc(tree: Ast, gpa: Allocator) error{OutOfMemory}![]u8 {
|
||||
var aw: std.io.Writer.Allocating = .init(gpa);
|
||||
defer aw.deinit();
|
||||
render(tree, gpa, &aw.writer, .{}) catch |err| switch (err) {
|
||||
error.WriteFailed => return error.OutOfMemory,
|
||||
error.WriteFailed, error.OutOfMemory => return error.OutOfMemory,
|
||||
};
|
||||
return aw.toOwnedSlice();
|
||||
}
|
||||
@ -215,7 +215,7 @@ pub fn renderAlloc(tree: Ast, gpa: Allocator) error{OutOfMemory}![]u8 {
|
||||
pub const Render = @import("Ast/Render.zig");
|
||||
|
||||
pub fn render(tree: Ast, gpa: Allocator, w: *Writer, fixups: Render.Fixups) Render.Error!void {
|
||||
return Render.tree(gpa, w, tree, fixups);
|
||||
return Render.renderTree(gpa, w, tree, fixups);
|
||||
}
|
||||
|
||||
/// Returns an extra offset for column and byte offset of errors that
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
const std = @import("../std.zig");
|
||||
const std = @import("../../std.zig");
|
||||
const assert = std.debug.assert;
|
||||
const mem = std.mem;
|
||||
const Allocator = std.mem.Allocator;
|
||||
@ -6,13 +6,24 @@ const meta = std.meta;
|
||||
const Ast = std.zig.Ast;
|
||||
const Token = std.zig.Token;
|
||||
const primitives = std.zig.primitives;
|
||||
const Writer = std.io.Writer;
|
||||
|
||||
const Render = @This();
|
||||
|
||||
gpa: Allocator,
|
||||
ais: *AutoIndentingStream,
|
||||
tree: Ast,
|
||||
fixups: Fixups,
|
||||
|
||||
const indent_delta = 4;
|
||||
const asm_indent_delta = 2;
|
||||
|
||||
pub const Error = Ast.RenderError;
|
||||
|
||||
const Ais = AutoIndentingStream(std.ArrayList(u8).Writer);
|
||||
pub const Error = error{
|
||||
/// Ran out of memory allocating call stack frames to complete rendering.
|
||||
OutOfMemory,
|
||||
/// Transitive failure from
|
||||
WriteFailed,
|
||||
};
|
||||
|
||||
pub const Fixups = struct {
|
||||
/// The key is the mut token (`var`/`const`) of the variable declaration
|
||||
@ -72,19 +83,12 @@ pub const Fixups = struct {
|
||||
}
|
||||
};
|
||||
|
||||
const Render = struct {
|
||||
gpa: Allocator,
|
||||
ais: *Ais,
|
||||
tree: Ast,
|
||||
fixups: Fixups,
|
||||
};
|
||||
|
||||
pub fn renderTree(buffer: *std.ArrayList(u8), tree: Ast, fixups: Fixups) Error!void {
|
||||
pub fn renderTree(gpa: Allocator, w: *Writer, tree: Ast, fixups: Fixups) Error!void {
|
||||
assert(tree.errors.len == 0); // Cannot render an invalid tree.
|
||||
var auto_indenting_stream = Ais.init(buffer, indent_delta);
|
||||
var auto_indenting_stream: AutoIndentingStream = .init(gpa, w, indent_delta);
|
||||
defer auto_indenting_stream.deinit();
|
||||
var r: Render = .{
|
||||
.gpa = buffer.allocator,
|
||||
.gpa = gpa,
|
||||
.ais = &auto_indenting_stream,
|
||||
.tree = tree,
|
||||
.fixups = fixups,
|
||||
@ -186,7 +190,7 @@ fn renderMember(
|
||||
if (opt_callconv_expr.unwrap()) |callconv_expr| {
|
||||
if (tree.nodeTag(callconv_expr) == .enum_literal) {
|
||||
if (mem.eql(u8, "@\"inline\"", tree.tokenSlice(tree.nodeMainToken(callconv_expr)))) {
|
||||
try ais.writer().writeAll("inline ");
|
||||
try ais.underlying_writer.writeAll("inline ");
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -200,7 +204,7 @@ fn renderMember(
|
||||
const lbrace = tree.nodeMainToken(body_node);
|
||||
try renderToken(r, lbrace, .newline);
|
||||
try discardAllParams(r, fn_proto);
|
||||
try ais.writer().writeAll("@trap();");
|
||||
try ais.writeAll("@trap();");
|
||||
ais.popIndent();
|
||||
try ais.insertNewline();
|
||||
try renderToken(r, tree.lastToken(body_node), space); // rbrace
|
||||
@ -216,10 +220,9 @@ fn renderMember(
|
||||
const name_ident = param.name_token.?;
|
||||
assert(tree.tokenTag(name_ident) == .identifier);
|
||||
if (r.fixups.unused_var_decls.contains(name_ident)) {
|
||||
const w = ais.writer();
|
||||
try w.writeAll("_ = ");
|
||||
try w.writeAll(tokenSliceForRender(r.tree, name_ident));
|
||||
try w.writeAll(";\n");
|
||||
try ais.writeAll("_ = ");
|
||||
try ais.writeAll(tokenSliceForRender(r.tree, name_ident));
|
||||
try ais.writeAll(";\n");
|
||||
}
|
||||
}
|
||||
var statements_buf: [2]Ast.Node.Index = undefined;
|
||||
@ -312,7 +315,7 @@ fn renderExpression(r: *Render, node: Ast.Node.Index, space: Space) Error!void {
|
||||
const tree = r.tree;
|
||||
const ais = r.ais;
|
||||
if (r.fixups.replace_nodes_with_string.get(node)) |replacement| {
|
||||
try ais.writer().writeAll(replacement);
|
||||
try ais.writeAll(replacement);
|
||||
try renderOnlySpace(r, space);
|
||||
return;
|
||||
} else if (r.fixups.replace_nodes_with_node.get(node)) |replacement| {
|
||||
@ -881,7 +884,7 @@ fn renderExpressionFixup(r: *Render, node: Ast.Node.Index, space: Space) Error!v
|
||||
const ais = r.ais;
|
||||
try renderExpression(r, node, space);
|
||||
if (r.fixups.append_string_after_node.get(node)) |bytes| {
|
||||
try ais.writer().writeAll(bytes);
|
||||
try ais.writeAll(bytes);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1086,10 +1089,10 @@ fn renderVarDecl(
|
||||
try renderVarDeclWithoutFixups(r, var_decl, ignore_comptime_token, space);
|
||||
if (r.fixups.unused_var_decls.contains(var_decl.ast.mut_token + 1)) {
|
||||
// Discard the variable like this: `_ = foo;`
|
||||
const w = r.ais.writer();
|
||||
try w.writeAll("_ = ");
|
||||
try w.writeAll(tokenSliceForRender(r.tree, var_decl.ast.mut_token + 1));
|
||||
try w.writeAll(";\n");
|
||||
const ais = r.ais;
|
||||
try ais.writeAll("_ = ");
|
||||
try ais.writeAll(tokenSliceForRender(r.tree, var_decl.ast.mut_token + 1));
|
||||
try ais.writeAll(";\n");
|
||||
}
|
||||
}
|
||||
|
||||
@ -1567,7 +1570,7 @@ fn renderBuiltinCall(
|
||||
defer r.gpa.free(new_string);
|
||||
|
||||
try renderToken(r, builtin_token + 1, .none); // (
|
||||
try ais.writer().print("\"{f}\"", .{std.zig.fmtString(new_string)});
|
||||
try ais.print("\"{f}\"", .{std.zig.fmtString(new_string)});
|
||||
return renderToken(r, str_lit_token + 1, space); // )
|
||||
}
|
||||
}
|
||||
@ -2125,13 +2128,13 @@ fn renderArrayInit(
|
||||
|
||||
const section_exprs = row_exprs[0..section_end];
|
||||
|
||||
var sub_expr_buffer = std.ArrayList(u8).init(gpa);
|
||||
var sub_expr_buffer: std.io.Writer.Allocating = .init(gpa);
|
||||
defer sub_expr_buffer.deinit();
|
||||
|
||||
const sub_expr_buffer_starts = try gpa.alloc(usize, section_exprs.len + 1);
|
||||
defer gpa.free(sub_expr_buffer_starts);
|
||||
|
||||
var auto_indenting_stream = Ais.init(&sub_expr_buffer, indent_delta);
|
||||
var auto_indenting_stream: AutoIndentingStream = .init(gpa, &sub_expr_buffer.writer, indent_delta);
|
||||
defer auto_indenting_stream.deinit();
|
||||
var sub_render: Render = .{
|
||||
.gpa = r.gpa,
|
||||
@ -2145,13 +2148,14 @@ fn renderArrayInit(
|
||||
var single_line = true;
|
||||
var contains_newline = false;
|
||||
for (section_exprs, 0..) |expr, i| {
|
||||
const start = sub_expr_buffer.items.len;
|
||||
const start = sub_expr_buffer.getWritten().len;
|
||||
sub_expr_buffer_starts[i] = start;
|
||||
|
||||
if (i + 1 < section_exprs.len) {
|
||||
try renderExpression(&sub_render, expr, .none);
|
||||
const width = sub_expr_buffer.items.len - start;
|
||||
const this_contains_newline = mem.indexOfScalar(u8, sub_expr_buffer.items[start..], '\n') != null;
|
||||
const written = sub_expr_buffer.getWritten();
|
||||
const width = written.len - start;
|
||||
const this_contains_newline = mem.indexOfScalar(u8, written[start..], '\n') != null;
|
||||
contains_newline = contains_newline or this_contains_newline;
|
||||
expr_widths[i] = width;
|
||||
expr_newlines[i] = this_contains_newline;
|
||||
@ -2173,8 +2177,9 @@ fn renderArrayInit(
|
||||
try renderExpression(&sub_render, expr, .comma);
|
||||
ais.popSpace();
|
||||
|
||||
const width = sub_expr_buffer.items.len - start - 2;
|
||||
const this_contains_newline = mem.indexOfScalar(u8, sub_expr_buffer.items[start .. sub_expr_buffer.items.len - 1], '\n') != null;
|
||||
const written = sub_expr_buffer.getWritten();
|
||||
const width = written.len - start - 2;
|
||||
const this_contains_newline = mem.indexOfScalar(u8, written[start .. written.len - 1], '\n') != null;
|
||||
contains_newline = contains_newline or this_contains_newline;
|
||||
expr_widths[i] = width;
|
||||
expr_newlines[i] = contains_newline;
|
||||
@ -2185,20 +2190,20 @@ fn renderArrayInit(
|
||||
}
|
||||
}
|
||||
}
|
||||
sub_expr_buffer_starts[section_exprs.len] = sub_expr_buffer.items.len;
|
||||
sub_expr_buffer_starts[section_exprs.len] = sub_expr_buffer.getWritten().len;
|
||||
|
||||
// Render exprs in current section.
|
||||
column_counter = 0;
|
||||
for (section_exprs, 0..) |expr, i| {
|
||||
const start = sub_expr_buffer_starts[i];
|
||||
const end = sub_expr_buffer_starts[i + 1];
|
||||
const expr_text = sub_expr_buffer.items[start..end];
|
||||
const expr_text = sub_expr_buffer.getWritten()[start..end];
|
||||
if (!expr_newlines[i]) {
|
||||
try ais.writer().writeAll(expr_text);
|
||||
try ais.writeAll(expr_text);
|
||||
} else {
|
||||
var by_line = std.mem.splitScalar(u8, expr_text, '\n');
|
||||
var last_line_was_empty = false;
|
||||
try ais.writer().writeAll(by_line.first());
|
||||
try ais.writeAll(by_line.first());
|
||||
while (by_line.next()) |line| {
|
||||
if (std.mem.startsWith(u8, line, "//") and last_line_was_empty) {
|
||||
try ais.insertNewline();
|
||||
@ -2206,7 +2211,7 @@ fn renderArrayInit(
|
||||
try ais.maybeInsertNewline();
|
||||
}
|
||||
last_line_was_empty = (line.len == 0);
|
||||
try ais.writer().writeAll(line);
|
||||
try ais.writeAll(line);
|
||||
}
|
||||
}
|
||||
|
||||
@ -2220,7 +2225,7 @@ fn renderArrayInit(
|
||||
try renderToken(r, comma, .space); // ,
|
||||
assert(column_widths[column_counter % row_size] >= expr_widths[i]);
|
||||
const padding = column_widths[column_counter % row_size] - expr_widths[i];
|
||||
try ais.writer().writeByteNTimes(' ', padding);
|
||||
try ais.splatByteAll(' ', padding);
|
||||
|
||||
column_counter += 1;
|
||||
continue;
|
||||
@ -2799,7 +2804,7 @@ fn renderToken(r: *Render, token_index: Ast.TokenIndex, space: Space) Error!void
|
||||
const tree = r.tree;
|
||||
const ais = r.ais;
|
||||
const lexeme = tokenSliceForRender(tree, token_index);
|
||||
try ais.writer().writeAll(lexeme);
|
||||
try ais.writeAll(lexeme);
|
||||
try renderSpace(r, token_index, lexeme.len, space);
|
||||
}
|
||||
|
||||
@ -2807,7 +2812,7 @@ fn renderTokenOverrideSpaceMode(r: *Render, token_index: Ast.TokenIndex, space:
|
||||
const tree = r.tree;
|
||||
const ais = r.ais;
|
||||
const lexeme = tokenSliceForRender(tree, token_index);
|
||||
try ais.writer().writeAll(lexeme);
|
||||
try ais.writeAll(lexeme);
|
||||
ais.enableSpaceMode(override_space);
|
||||
defer ais.disableSpaceMode();
|
||||
try renderSpace(r, token_index, lexeme.len, space);
|
||||
@ -2822,7 +2827,7 @@ fn renderSpace(r: *Render, token_index: Ast.TokenIndex, lexeme_len: usize, space
|
||||
if (space == .skip) return;
|
||||
|
||||
if (space == .comma and next_token_tag != .comma) {
|
||||
try ais.writer().writeByte(',');
|
||||
try ais.writeByte(',');
|
||||
}
|
||||
if (space == .semicolon or space == .comma) ais.enableSpaceMode(space);
|
||||
defer ais.disableSpaceMode();
|
||||
@ -2833,7 +2838,7 @@ fn renderSpace(r: *Render, token_index: Ast.TokenIndex, lexeme_len: usize, space
|
||||
);
|
||||
switch (space) {
|
||||
.none => {},
|
||||
.space => if (!comment) try ais.writer().writeByte(' '),
|
||||
.space => if (!comment) try ais.writeByte(' '),
|
||||
.newline => if (!comment) try ais.insertNewline(),
|
||||
|
||||
.comma => if (next_token_tag == .comma) {
|
||||
@ -2845,7 +2850,7 @@ fn renderSpace(r: *Render, token_index: Ast.TokenIndex, lexeme_len: usize, space
|
||||
.comma_space => if (next_token_tag == .comma) {
|
||||
try renderToken(r, token_index + 1, .space);
|
||||
} else if (!comment) {
|
||||
try ais.writer().writeByte(' ');
|
||||
try ais.writeByte(' ');
|
||||
},
|
||||
|
||||
.semicolon => if (next_token_tag == .semicolon) {
|
||||
@ -2862,11 +2867,11 @@ fn renderOnlySpace(r: *Render, space: Space) Error!void {
|
||||
const ais = r.ais;
|
||||
switch (space) {
|
||||
.none => {},
|
||||
.space => try ais.writer().writeByte(' '),
|
||||
.space => try ais.writeByte(' '),
|
||||
.newline => try ais.insertNewline(),
|
||||
.comma => try ais.writer().writeAll(",\n"),
|
||||
.comma_space => try ais.writer().writeAll(", "),
|
||||
.semicolon => try ais.writer().writeAll(";\n"),
|
||||
.comma => try ais.writeAll(",\n"),
|
||||
.comma_space => try ais.writeAll(", "),
|
||||
.semicolon => try ais.writeAll(";\n"),
|
||||
.skip => unreachable,
|
||||
}
|
||||
}
|
||||
@ -2883,7 +2888,7 @@ fn renderIdentifier(r: *Render, token_index: Ast.TokenIndex, space: Space, quote
|
||||
const lexeme = tokenSliceForRender(tree, token_index);
|
||||
|
||||
if (r.fixups.rename_identifiers.get(lexeme)) |mangled| {
|
||||
try r.ais.writer().writeAll(mangled);
|
||||
try r.ais.writeAll(mangled);
|
||||
try renderSpace(r, token_index, lexeme.len, space);
|
||||
return;
|
||||
}
|
||||
@ -2992,15 +2997,15 @@ fn renderQuotedIdentifier(r: *Render, token_index: Ast.TokenIndex, space: Space,
|
||||
const lexeme = tokenSliceForRender(tree, token_index);
|
||||
assert(lexeme.len >= 3 and lexeme[0] == '@');
|
||||
|
||||
if (!unquote) try ais.writer().writeAll("@\"");
|
||||
if (!unquote) try ais.writeAll("@\"");
|
||||
const contents = lexeme[2 .. lexeme.len - 1];
|
||||
try renderIdentifierContents(ais.writer(), contents);
|
||||
if (!unquote) try ais.writer().writeByte('\"');
|
||||
try renderIdentifierContents(ais, contents);
|
||||
if (!unquote) try ais.writeByte('\"');
|
||||
|
||||
try renderSpace(r, token_index, lexeme.len, space);
|
||||
}
|
||||
|
||||
fn renderIdentifierContents(writer: anytype, bytes: []const u8) !void {
|
||||
fn renderIdentifierContents(ais: *AutoIndentingStream, bytes: []const u8) !void {
|
||||
var pos: usize = 0;
|
||||
while (pos < bytes.len) {
|
||||
const byte = bytes[pos];
|
||||
@ -3013,23 +3018,23 @@ fn renderIdentifierContents(writer: anytype, bytes: []const u8) !void {
|
||||
.success => |codepoint| {
|
||||
if (codepoint <= 0x7f) {
|
||||
const buf = [1]u8{@as(u8, @intCast(codepoint))};
|
||||
try std.fmt.format(writer, "{f}", .{std.zig.fmtString(&buf)});
|
||||
try ais.print("{f}", .{std.zig.fmtString(&buf)});
|
||||
} else {
|
||||
try writer.writeAll(escape_sequence);
|
||||
try ais.writeAll(escape_sequence);
|
||||
}
|
||||
},
|
||||
.failure => {
|
||||
try writer.writeAll(escape_sequence);
|
||||
try ais.writeAll(escape_sequence);
|
||||
},
|
||||
}
|
||||
},
|
||||
0x00...('\\' - 1), ('\\' + 1)...0x7f => {
|
||||
const buf = [1]u8{byte};
|
||||
try std.fmt.format(writer, "{f}", .{std.zig.fmtString(&buf)});
|
||||
try ais.print("{f}", .{std.zig.fmtString(&buf)});
|
||||
pos += 1;
|
||||
},
|
||||
0x80...0xff => {
|
||||
try writer.writeByte(byte);
|
||||
try ais.writeByte(byte);
|
||||
pos += 1;
|
||||
},
|
||||
}
|
||||
@ -3091,7 +3096,7 @@ fn renderComments(r: *Render, start: usize, end: usize) Error!bool {
|
||||
} else if (index == start) {
|
||||
// Otherwise if the first comment is on the same line as
|
||||
// the token before it, prefix it with a single space.
|
||||
try ais.writer().writeByte(' ');
|
||||
try ais.writeByte(' ');
|
||||
}
|
||||
}
|
||||
|
||||
@ -3108,11 +3113,11 @@ fn renderComments(r: *Render, start: usize, end: usize) Error!bool {
|
||||
ais.disabled_offset = null;
|
||||
} else if (ais.disabled_offset == null and mem.eql(u8, comment_content, "zig fmt: off")) {
|
||||
// Write with the canonical single space.
|
||||
try ais.writer().writeAll("// zig fmt: off\n");
|
||||
try ais.writeAll("// zig fmt: off\n");
|
||||
ais.disabled_offset = index;
|
||||
} else {
|
||||
// Write the comment minus trailing whitespace.
|
||||
try ais.writer().print("{s}\n", .{trimmed_comment});
|
||||
try ais.print("{s}\n", .{trimmed_comment});
|
||||
}
|
||||
}
|
||||
|
||||
@ -3213,10 +3218,9 @@ fn discardAllParams(r: *Render, fn_proto_node: Ast.Node.Index) Error!void {
|
||||
while (it.next()) |param| {
|
||||
const name_ident = param.name_token.?;
|
||||
assert(tree.tokenTag(name_ident) == .identifier);
|
||||
const w = ais.writer();
|
||||
try w.writeAll("_ = ");
|
||||
try w.writeAll(tokenSliceForRender(r.tree, name_ident));
|
||||
try w.writeAll(";\n");
|
||||
try ais.writeAll("_ = ");
|
||||
try ais.writeAll(tokenSliceForRender(r.tree, name_ident));
|
||||
try ais.writeAll(";\n");
|
||||
}
|
||||
}
|
||||
|
||||
@ -3269,11 +3273,11 @@ fn anythingBetween(tree: Ast, start_token: Ast.TokenIndex, end_token: Ast.TokenI
|
||||
return false;
|
||||
}
|
||||
|
||||
fn writeFixingWhitespace(writer: std.ArrayList(u8).Writer, slice: []const u8) Error!void {
|
||||
fn writeFixingWhitespace(w: *Writer, slice: []const u8) Error!void {
|
||||
for (slice) |byte| switch (byte) {
|
||||
'\t' => try writer.writeAll(" " ** indent_delta),
|
||||
'\t' => try w.splatByteAll(' ', indent_delta),
|
||||
'\r' => {},
|
||||
else => try writer.writeByte(byte),
|
||||
else => try w.writeByte(byte),
|
||||
};
|
||||
}
|
||||
|
||||
@ -3398,224 +3402,244 @@ fn rowSize(tree: Ast, exprs: []const Ast.Node.Index, rtoken: Ast.TokenIndex) usi
|
||||
/// of the appropriate indentation level for them with pushSpace/popSpace.
|
||||
/// This should be done whenever a scope that ends in a .semicolon or a
|
||||
/// .comma is introduced.
|
||||
fn AutoIndentingStream(comptime UnderlyingWriter: type) type {
|
||||
return struct {
|
||||
const Self = @This();
|
||||
pub const WriteError = UnderlyingWriter.Error;
|
||||
pub const Writer = std.io.GenericWriter(*Self, WriteError, write);
|
||||
const AutoIndentingStream = struct {
|
||||
underlying_writer: *Writer,
|
||||
|
||||
pub const IndentType = enum {
|
||||
normal,
|
||||
after_equals,
|
||||
binop,
|
||||
field_access,
|
||||
};
|
||||
const StackElem = struct {
|
||||
indent_type: IndentType,
|
||||
realized: bool,
|
||||
};
|
||||
const SpaceElem = struct {
|
||||
space: Space,
|
||||
indent_count: usize,
|
||||
};
|
||||
/// Offset into the source at which formatting has been disabled with
|
||||
/// a `zig fmt: off` comment.
|
||||
///
|
||||
/// If non-null, the AutoIndentingStream will not write any bytes
|
||||
/// to the underlying writer. It will however continue to track the
|
||||
/// indentation level.
|
||||
disabled_offset: ?usize = null,
|
||||
|
||||
underlying_writer: UnderlyingWriter,
|
||||
indent_count: usize = 0,
|
||||
indent_delta: usize,
|
||||
indent_stack: std.ArrayList(StackElem),
|
||||
space_stack: std.ArrayList(SpaceElem),
|
||||
space_mode: ?usize = null,
|
||||
disable_indent_committing: usize = 0,
|
||||
current_line_empty: bool = true,
|
||||
/// the most recently applied indent
|
||||
applied_indent: usize = 0,
|
||||
|
||||
/// Offset into the source at which formatting has been disabled with
|
||||
/// a `zig fmt: off` comment.
|
||||
///
|
||||
/// If non-null, the AutoIndentingStream will not write any bytes
|
||||
/// to the underlying writer. It will however continue to track the
|
||||
/// indentation level.
|
||||
disabled_offset: ?usize = null,
|
||||
|
||||
indent_count: usize = 0,
|
||||
indent_delta: usize,
|
||||
indent_stack: std.ArrayList(StackElem),
|
||||
space_stack: std.ArrayList(SpaceElem),
|
||||
space_mode: ?usize = null,
|
||||
disable_indent_committing: usize = 0,
|
||||
current_line_empty: bool = true,
|
||||
/// the most recently applied indent
|
||||
applied_indent: usize = 0,
|
||||
|
||||
pub fn init(buffer: *std.ArrayList(u8), indent_delta_: usize) Self {
|
||||
return .{
|
||||
.underlying_writer = buffer.writer(),
|
||||
.indent_delta = indent_delta_,
|
||||
.indent_stack = std.ArrayList(StackElem).init(buffer.allocator),
|
||||
.space_stack = std.ArrayList(SpaceElem).init(buffer.allocator),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn deinit(self: *Self) void {
|
||||
self.indent_stack.deinit();
|
||||
self.space_stack.deinit();
|
||||
}
|
||||
|
||||
pub fn writer(self: *Self) Writer {
|
||||
return .{ .context = self };
|
||||
}
|
||||
|
||||
pub fn write(self: *Self, bytes: []const u8) WriteError!usize {
|
||||
if (bytes.len == 0)
|
||||
return @as(usize, 0);
|
||||
|
||||
try self.applyIndent();
|
||||
return self.writeNoIndent(bytes);
|
||||
}
|
||||
|
||||
// Change the indent delta without changing the final indentation level
|
||||
pub fn setIndentDelta(self: *Self, new_indent_delta: usize) void {
|
||||
if (self.indent_delta == new_indent_delta) {
|
||||
return;
|
||||
} else if (self.indent_delta > new_indent_delta) {
|
||||
assert(self.indent_delta % new_indent_delta == 0);
|
||||
self.indent_count = self.indent_count * (self.indent_delta / new_indent_delta);
|
||||
} else {
|
||||
// assert that the current indentation (in spaces) in a multiple of the new delta
|
||||
assert((self.indent_count * self.indent_delta) % new_indent_delta == 0);
|
||||
self.indent_count = self.indent_count / (new_indent_delta / self.indent_delta);
|
||||
}
|
||||
self.indent_delta = new_indent_delta;
|
||||
}
|
||||
|
||||
fn writeNoIndent(self: *Self, bytes: []const u8) WriteError!usize {
|
||||
if (bytes.len == 0)
|
||||
return @as(usize, 0);
|
||||
|
||||
if (self.disabled_offset == null) try self.underlying_writer.writeAll(bytes);
|
||||
if (bytes[bytes.len - 1] == '\n')
|
||||
self.resetLine();
|
||||
return bytes.len;
|
||||
}
|
||||
|
||||
pub fn insertNewline(self: *Self) WriteError!void {
|
||||
_ = try self.writeNoIndent("\n");
|
||||
}
|
||||
|
||||
fn resetLine(self: *Self) void {
|
||||
self.current_line_empty = true;
|
||||
|
||||
if (self.disable_indent_committing > 0) return;
|
||||
|
||||
if (self.indent_stack.items.len > 0) {
|
||||
// By default, we realize the most recent indentation scope.
|
||||
var to_realize = self.indent_stack.items.len - 1;
|
||||
|
||||
if (self.indent_stack.items.len >= 2 and
|
||||
self.indent_stack.items[to_realize - 1].indent_type == .after_equals and
|
||||
self.indent_stack.items[to_realize - 1].realized and
|
||||
self.indent_stack.items[to_realize].indent_type == .binop)
|
||||
{
|
||||
// If we are in a .binop scope and our direct parent is .after_equals, don't indent.
|
||||
// This ensures correct indentation in the below example:
|
||||
//
|
||||
// const foo =
|
||||
// (x >= 'a' and x <= 'z') or //<-- we are here
|
||||
// (x >= 'A' and x <= 'Z');
|
||||
//
|
||||
return;
|
||||
}
|
||||
|
||||
if (self.indent_stack.items[to_realize].indent_type == .field_access) {
|
||||
// Only realize the top-most field_access in a chain.
|
||||
while (to_realize > 0 and self.indent_stack.items[to_realize - 1].indent_type == .field_access)
|
||||
to_realize -= 1;
|
||||
}
|
||||
|
||||
if (self.indent_stack.items[to_realize].realized) return;
|
||||
self.indent_stack.items[to_realize].realized = true;
|
||||
self.indent_count += 1;
|
||||
}
|
||||
}
|
||||
|
||||
/// Disables indentation level changes during the next newlines until re-enabled.
|
||||
pub fn disableIndentCommitting(self: *Self) void {
|
||||
self.disable_indent_committing += 1;
|
||||
}
|
||||
|
||||
pub fn enableIndentCommitting(self: *Self) void {
|
||||
assert(self.disable_indent_committing > 0);
|
||||
self.disable_indent_committing -= 1;
|
||||
}
|
||||
|
||||
pub fn pushSpace(self: *Self, space: Space) !void {
|
||||
try self.space_stack.append(.{ .space = space, .indent_count = self.indent_count });
|
||||
}
|
||||
|
||||
pub fn popSpace(self: *Self) void {
|
||||
_ = self.space_stack.pop();
|
||||
}
|
||||
|
||||
/// Sets current indentation level to be the same as that of the last pushSpace.
|
||||
pub fn enableSpaceMode(self: *Self, space: Space) void {
|
||||
if (self.space_stack.items.len == 0) return;
|
||||
const curr = self.space_stack.getLast();
|
||||
if (curr.space != space) return;
|
||||
self.space_mode = curr.indent_count;
|
||||
}
|
||||
|
||||
pub fn disableSpaceMode(self: *Self) void {
|
||||
self.space_mode = null;
|
||||
}
|
||||
|
||||
pub fn lastSpaceModeIndent(self: *Self) usize {
|
||||
if (self.space_stack.items.len == 0) return 0;
|
||||
return self.space_stack.getLast().indent_count * self.indent_delta;
|
||||
}
|
||||
|
||||
/// Insert a newline unless the current line is blank
|
||||
pub fn maybeInsertNewline(self: *Self) WriteError!void {
|
||||
if (!self.current_line_empty)
|
||||
try self.insertNewline();
|
||||
}
|
||||
|
||||
/// Push default indentation
|
||||
/// Doesn't actually write any indentation.
|
||||
/// Just primes the stream to be able to write the correct indentation if it needs to.
|
||||
pub fn pushIndent(self: *Self, indent_type: IndentType) !void {
|
||||
try self.indent_stack.append(.{ .indent_type = indent_type, .realized = false });
|
||||
}
|
||||
|
||||
/// Forces an indentation level to be realized.
|
||||
pub fn forcePushIndent(self: *Self, indent_type: IndentType) !void {
|
||||
try self.indent_stack.append(.{ .indent_type = indent_type, .realized = true });
|
||||
self.indent_count += 1;
|
||||
}
|
||||
|
||||
pub fn popIndent(self: *Self) void {
|
||||
if (self.indent_stack.pop().?.realized) {
|
||||
assert(self.indent_count > 0);
|
||||
self.indent_count -= 1;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn indentStackEmpty(self: *Self) bool {
|
||||
return self.indent_stack.items.len == 0;
|
||||
}
|
||||
|
||||
/// Writes ' ' bytes if the current line is empty
|
||||
fn applyIndent(self: *Self) WriteError!void {
|
||||
const current_indent = self.currentIndent();
|
||||
if (self.current_line_empty and current_indent > 0) {
|
||||
if (self.disabled_offset == null) {
|
||||
try self.underlying_writer.writeByteNTimes(' ', current_indent);
|
||||
}
|
||||
self.applied_indent = current_indent;
|
||||
}
|
||||
self.current_line_empty = false;
|
||||
}
|
||||
|
||||
/// Checks to see if the most recent indentation exceeds the currently pushed indents
|
||||
pub fn isLineOverIndented(self: *Self) bool {
|
||||
if (self.current_line_empty) return false;
|
||||
return self.applied_indent > self.currentIndent();
|
||||
}
|
||||
|
||||
fn currentIndent(self: *Self) usize {
|
||||
const indent_count = self.space_mode orelse self.indent_count;
|
||||
return indent_count * self.indent_delta;
|
||||
}
|
||||
pub const IndentType = enum {
|
||||
normal,
|
||||
after_equals,
|
||||
binop,
|
||||
field_access,
|
||||
};
|
||||
}
|
||||
const StackElem = struct {
|
||||
indent_type: IndentType,
|
||||
realized: bool,
|
||||
};
|
||||
const SpaceElem = struct {
|
||||
space: Space,
|
||||
indent_count: usize,
|
||||
};
|
||||
|
||||
pub fn init(gpa: Allocator, w: *Writer, starting_indent_delta: usize) AutoIndentingStream {
|
||||
return .{
|
||||
.underlying_writer = w,
|
||||
.indent_delta = starting_indent_delta,
|
||||
.indent_stack = .init(gpa),
|
||||
.space_stack = .init(gpa),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn deinit(self: *AutoIndentingStream) void {
|
||||
self.indent_stack.deinit();
|
||||
self.space_stack.deinit();
|
||||
}
|
||||
|
||||
pub fn writeAll(ais: *AutoIndentingStream, bytes: []const u8) Error!void {
|
||||
if (bytes.len == 0) return;
|
||||
try ais.applyIndent();
|
||||
if (ais.disabled_offset == null) try ais.underlying_writer.writeAll(bytes);
|
||||
if (bytes[bytes.len - 1] == '\n') ais.resetLine();
|
||||
}
|
||||
|
||||
/// Assumes that if the printed data ends with a newline, it is directly
|
||||
/// contained in the format string.
|
||||
pub fn print(ais: *AutoIndentingStream, comptime format: []const u8, args: anytype) Error!void {
|
||||
try ais.applyIndent();
|
||||
if (ais.disabled_offset == null) try ais.underlying_writer.print(format, args);
|
||||
if (format[format.len - 1] == '\n') ais.resetLine();
|
||||
}
|
||||
|
||||
pub fn writeByte(ais: *AutoIndentingStream, byte: u8) Error!void {
|
||||
try ais.applyIndent();
|
||||
if (ais.disabled_offset == null) try ais.underlying_writer.writeByte(byte);
|
||||
assert(byte != '\n');
|
||||
}
|
||||
|
||||
pub fn splatByteAll(ais: *AutoIndentingStream, byte: u8, n: usize) Error!void {
|
||||
assert(byte != '\n');
|
||||
try ais.applyIndent();
|
||||
if (ais.disabled_offset == null) try ais.underlying_writer.splatByteAll(byte, n);
|
||||
}
|
||||
|
||||
// Change the indent delta without changing the final indentation level
|
||||
pub fn setIndentDelta(ais: *AutoIndentingStream, new_indent_delta: usize) void {
|
||||
if (ais.indent_delta == new_indent_delta) {
|
||||
return;
|
||||
} else if (ais.indent_delta > new_indent_delta) {
|
||||
assert(ais.indent_delta % new_indent_delta == 0);
|
||||
ais.indent_count = ais.indent_count * (ais.indent_delta / new_indent_delta);
|
||||
} else {
|
||||
// assert that the current indentation (in spaces) in a multiple of the new delta
|
||||
assert((ais.indent_count * ais.indent_delta) % new_indent_delta == 0);
|
||||
ais.indent_count = ais.indent_count / (new_indent_delta / ais.indent_delta);
|
||||
}
|
||||
ais.indent_delta = new_indent_delta;
|
||||
}
|
||||
|
||||
pub fn insertNewline(ais: *AutoIndentingStream) Error!void {
|
||||
if (ais.disabled_offset == null) try ais.underlying_writer.writeByte('\n');
|
||||
ais.resetLine();
|
||||
}
|
||||
|
||||
/// Insert a newline unless the current line is blank
|
||||
pub fn maybeInsertNewline(ais: *AutoIndentingStream) Error!void {
|
||||
if (!ais.current_line_empty)
|
||||
try ais.insertNewline();
|
||||
}
|
||||
|
||||
/// Push an indent that is automatically popped after being applied
|
||||
pub fn pushIndentOneShot(ais: *AutoIndentingStream) void {
|
||||
ais.indent_one_shot_count += 1;
|
||||
ais.pushIndent();
|
||||
}
|
||||
|
||||
/// Turns all one-shot indents into regular indents
|
||||
/// Returns number of indents that must now be manually popped
|
||||
pub fn lockOneShotIndent(ais: *AutoIndentingStream) usize {
|
||||
const locked_count = ais.indent_one_shot_count;
|
||||
ais.indent_one_shot_count = 0;
|
||||
return locked_count;
|
||||
}
|
||||
|
||||
/// Push an indent that should not take effect until the next line
|
||||
pub fn pushIndentNextLine(ais: *AutoIndentingStream) void {
|
||||
ais.indent_next_line += 1;
|
||||
ais.pushIndent();
|
||||
}
|
||||
|
||||
/// Checks to see if the most recent indentation exceeds the currently pushed indents
|
||||
pub fn isLineOverIndented(ais: *AutoIndentingStream) bool {
|
||||
if (ais.current_line_empty) return false;
|
||||
return ais.applied_indent > ais.currentIndent();
|
||||
}
|
||||
|
||||
fn resetLine(ais: *AutoIndentingStream) void {
|
||||
ais.current_line_empty = true;
|
||||
|
||||
if (ais.disable_indent_committing > 0) return;
|
||||
|
||||
if (ais.indent_stack.items.len > 0) {
|
||||
// By default, we realize the most recent indentation scope.
|
||||
var to_realize = ais.indent_stack.items.len - 1;
|
||||
|
||||
if (ais.indent_stack.items.len >= 2 and
|
||||
ais.indent_stack.items[to_realize - 1].indent_type == .after_equals and
|
||||
ais.indent_stack.items[to_realize - 1].realized and
|
||||
ais.indent_stack.items[to_realize].indent_type == .binop)
|
||||
{
|
||||
// If we are in a .binop scope and our direct parent is .after_equals, don't indent.
|
||||
// This ensures correct indentation in the below example:
|
||||
//
|
||||
// const foo =
|
||||
// (x >= 'a' and x <= 'z') or //<-- we are here
|
||||
// (x >= 'A' and x <= 'Z');
|
||||
//
|
||||
return;
|
||||
}
|
||||
|
||||
if (ais.indent_stack.items[to_realize].indent_type == .field_access) {
|
||||
// Only realize the top-most field_access in a chain.
|
||||
while (to_realize > 0 and ais.indent_stack.items[to_realize - 1].indent_type == .field_access)
|
||||
to_realize -= 1;
|
||||
}
|
||||
|
||||
if (ais.indent_stack.items[to_realize].realized) return;
|
||||
ais.indent_stack.items[to_realize].realized = true;
|
||||
ais.indent_count += 1;
|
||||
}
|
||||
}
|
||||
|
||||
/// Disables indentation level changes during the next newlines until re-enabled.
|
||||
pub fn disableIndentCommitting(ais: *AutoIndentingStream) void {
|
||||
ais.disable_indent_committing += 1;
|
||||
}
|
||||
|
||||
pub fn enableIndentCommitting(ais: *AutoIndentingStream) void {
|
||||
assert(ais.disable_indent_committing > 0);
|
||||
ais.disable_indent_committing -= 1;
|
||||
}
|
||||
|
||||
pub fn pushSpace(ais: *AutoIndentingStream, space: Space) !void {
|
||||
try ais.space_stack.append(.{ .space = space, .indent_count = ais.indent_count });
|
||||
}
|
||||
|
||||
pub fn popSpace(ais: *AutoIndentingStream) void {
|
||||
_ = ais.space_stack.pop();
|
||||
}
|
||||
|
||||
/// Sets current indentation level to be the same as that of the last pushSpace.
|
||||
pub fn enableSpaceMode(ais: *AutoIndentingStream, space: Space) void {
|
||||
if (ais.space_stack.items.len == 0) return;
|
||||
const curr = ais.space_stack.getLast();
|
||||
if (curr.space != space) return;
|
||||
ais.space_mode = curr.indent_count;
|
||||
}
|
||||
|
||||
pub fn disableSpaceMode(ais: *AutoIndentingStream) void {
|
||||
ais.space_mode = null;
|
||||
}
|
||||
|
||||
pub fn lastSpaceModeIndent(ais: *AutoIndentingStream) usize {
|
||||
if (ais.space_stack.items.len == 0) return 0;
|
||||
return ais.space_stack.getLast().indent_count * ais.indent_delta;
|
||||
}
|
||||
|
||||
/// Push default indentation
|
||||
/// Doesn't actually write any indentation.
|
||||
/// Just primes the stream to be able to write the correct indentation if it needs to.
|
||||
pub fn pushIndent(ais: *AutoIndentingStream, indent_type: IndentType) !void {
|
||||
try ais.indent_stack.append(.{ .indent_type = indent_type, .realized = false });
|
||||
}
|
||||
|
||||
/// Forces an indentation level to be realized.
|
||||
pub fn forcePushIndent(ais: *AutoIndentingStream, indent_type: IndentType) !void {
|
||||
try ais.indent_stack.append(.{ .indent_type = indent_type, .realized = true });
|
||||
ais.indent_count += 1;
|
||||
}
|
||||
|
||||
pub fn popIndent(ais: *AutoIndentingStream) void {
|
||||
if (ais.indent_stack.pop().?.realized) {
|
||||
assert(ais.indent_count > 0);
|
||||
ais.indent_count -= 1;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn indentStackEmpty(ais: *AutoIndentingStream) bool {
|
||||
return ais.indent_stack.items.len == 0;
|
||||
}
|
||||
|
||||
/// Writes ' ' bytes if the current line is empty
|
||||
fn applyIndent(ais: *AutoIndentingStream) Error!void {
|
||||
const current_indent = ais.currentIndent();
|
||||
if (ais.current_line_empty and current_indent > 0) {
|
||||
if (ais.disabled_offset == null) {
|
||||
try ais.underlying_writer.splatByteAll(' ', current_indent);
|
||||
}
|
||||
ais.applied_indent = current_indent;
|
||||
}
|
||||
ais.current_line_empty = false;
|
||||
}
|
||||
|
||||
fn currentIndent(ais: *AutoIndentingStream) usize {
|
||||
const indent_count = ais.space_mode orelse ais.indent_count;
|
||||
return indent_count * ais.indent_delta;
|
||||
}
|
||||
};
|
||||
|
||||
@ -1,5 +1,16 @@
|
||||
//! Ingests an `Ast` and produces a `Zoir`.
|
||||
|
||||
const std = @import("std");
|
||||
const assert = std.debug.assert;
|
||||
const mem = std.mem;
|
||||
const Allocator = mem.Allocator;
|
||||
const StringIndexAdapter = std.hash_map.StringIndexAdapter;
|
||||
const StringIndexContext = std.hash_map.StringIndexContext;
|
||||
const ZonGen = @This();
|
||||
const Zoir = @import("Zoir.zig");
|
||||
const Ast = @import("Ast.zig");
|
||||
const Writer = std.io.Writer;
|
||||
|
||||
gpa: Allocator,
|
||||
tree: Ast,
|
||||
|
||||
@ -446,37 +457,44 @@ fn expr(zg: *ZonGen, node: Ast.Node.Index, dest_node: Zoir.Node.Index) Allocator
|
||||
}
|
||||
}
|
||||
|
||||
fn appendIdentStr(zg: *ZonGen, ident_token: Ast.TokenIndex) !u32 {
|
||||
fn appendIdentStr(zg: *ZonGen, ident_token: Ast.TokenIndex) error{ OutOfMemory, BadString }!u32 {
|
||||
const gpa = zg.gpa;
|
||||
const tree = zg.tree;
|
||||
assert(tree.tokenTag(ident_token) == .identifier);
|
||||
const ident_name = tree.tokenSlice(ident_token);
|
||||
if (!mem.startsWith(u8, ident_name, "@")) {
|
||||
const start = zg.string_bytes.items.len;
|
||||
try zg.string_bytes.appendSlice(zg.gpa, ident_name);
|
||||
try zg.string_bytes.appendSlice(gpa, ident_name);
|
||||
return @intCast(start);
|
||||
} else {
|
||||
const offset = 1;
|
||||
const start: u32 = @intCast(zg.string_bytes.items.len);
|
||||
const raw_string = zg.tree.tokenSlice(ident_token)[offset..];
|
||||
try zg.string_bytes.ensureUnusedCapacity(zg.gpa, raw_string.len);
|
||||
switch (try std.zig.string_literal.parseWrite(zg.string_bytes.writer(zg.gpa), raw_string)) {
|
||||
.success => {},
|
||||
.failure => |err| {
|
||||
try zg.lowerStrLitError(err, ident_token, raw_string, offset);
|
||||
return error.BadString;
|
||||
},
|
||||
}
|
||||
|
||||
const slice = zg.string_bytes.items[start..];
|
||||
if (mem.indexOfScalar(u8, slice, 0) != null) {
|
||||
try zg.addErrorTok(ident_token, "identifier cannot contain null bytes", .{});
|
||||
return error.BadString;
|
||||
} else if (slice.len == 0) {
|
||||
try zg.addErrorTok(ident_token, "identifier cannot be empty", .{});
|
||||
return error.BadString;
|
||||
}
|
||||
return start;
|
||||
}
|
||||
const offset = 1;
|
||||
const start: u32 = @intCast(zg.string_bytes.items.len);
|
||||
const raw_string = zg.tree.tokenSlice(ident_token)[offset..];
|
||||
try zg.string_bytes.ensureUnusedCapacity(gpa, raw_string.len);
|
||||
const result = r: {
|
||||
var aw: std.io.Writer.Allocating = .fromArrayList(gpa, &zg.string_bytes);
|
||||
defer zg.string_bytes = aw.toArrayList();
|
||||
break :r std.zig.string_literal.parseWrite(&aw.writer, raw_string) catch |err| switch (err) {
|
||||
error.WriteFailed => return error.OutOfMemory,
|
||||
};
|
||||
};
|
||||
switch (result) {
|
||||
.success => {},
|
||||
.failure => |err| {
|
||||
try zg.lowerStrLitError(err, ident_token, raw_string, offset);
|
||||
return error.BadString;
|
||||
},
|
||||
}
|
||||
|
||||
const slice = zg.string_bytes.items[start..];
|
||||
if (mem.indexOfScalar(u8, slice, 0) != null) {
|
||||
try zg.addErrorTok(ident_token, "identifier cannot contain null bytes", .{});
|
||||
return error.BadString;
|
||||
} else if (slice.len == 0) {
|
||||
try zg.addErrorTok(ident_token, "identifier cannot be empty", .{});
|
||||
return error.BadString;
|
||||
}
|
||||
return start;
|
||||
}
|
||||
|
||||
/// Estimates the size of a string node without parsing it.
|
||||
@ -507,8 +525,8 @@ pub fn strLitSizeHint(tree: Ast, node: Ast.Node.Index) usize {
|
||||
pub fn parseStrLit(
|
||||
tree: Ast,
|
||||
node: Ast.Node.Index,
|
||||
writer: anytype,
|
||||
) error{OutOfMemory}!std.zig.string_literal.Result {
|
||||
writer: *Writer,
|
||||
) Writer.Error!std.zig.string_literal.Result {
|
||||
switch (tree.nodeTag(node)) {
|
||||
.string_literal => {
|
||||
const token = tree.nodeMainToken(node);
|
||||
@ -543,15 +561,22 @@ const StringLiteralResult = union(enum) {
|
||||
slice: struct { start: u32, len: u32 },
|
||||
};
|
||||
|
||||
fn strLitAsString(zg: *ZonGen, str_node: Ast.Node.Index) !StringLiteralResult {
|
||||
fn strLitAsString(zg: *ZonGen, str_node: Ast.Node.Index) error{ OutOfMemory, BadString }!StringLiteralResult {
|
||||
if (!zg.options.parse_str_lits) return .{ .slice = .{ .start = 0, .len = 0 } };
|
||||
|
||||
const gpa = zg.gpa;
|
||||
const string_bytes = &zg.string_bytes;
|
||||
const str_index: u32 = @intCast(zg.string_bytes.items.len);
|
||||
const size_hint = strLitSizeHint(zg.tree, str_node);
|
||||
try string_bytes.ensureUnusedCapacity(zg.gpa, size_hint);
|
||||
switch (try parseStrLit(zg.tree, str_node, zg.string_bytes.writer(zg.gpa))) {
|
||||
try string_bytes.ensureUnusedCapacity(gpa, size_hint);
|
||||
const result = r: {
|
||||
var aw: std.io.Writer.Allocating = .fromArrayList(gpa, &zg.string_bytes);
|
||||
defer zg.string_bytes = aw.toArrayList();
|
||||
break :r parseStrLit(zg.tree, str_node, &aw.writer) catch |err| switch (err) {
|
||||
error.WriteFailed => return error.OutOfMemory,
|
||||
};
|
||||
};
|
||||
switch (result) {
|
||||
.success => {},
|
||||
.failure => |err| {
|
||||
const token = zg.tree.nodeMainToken(str_node);
|
||||
@ -793,10 +818,7 @@ fn lowerNumberError(zg: *ZonGen, err: std.zig.number_literal.Error, token: Ast.T
|
||||
|
||||
fn errNoteNode(zg: *ZonGen, node: Ast.Node.Index, comptime format: []const u8, args: anytype) Allocator.Error!Zoir.CompileError.Note {
|
||||
const message_idx: u32 = @intCast(zg.string_bytes.items.len);
|
||||
const writer = zg.string_bytes.writer(zg.gpa);
|
||||
try writer.print(format, args);
|
||||
try writer.writeByte(0);
|
||||
|
||||
try zg.string_bytes.print(zg.gpa, format ++ "\x00", args);
|
||||
return .{
|
||||
.msg = @enumFromInt(message_idx),
|
||||
.token = .none,
|
||||
@ -806,10 +828,7 @@ fn errNoteNode(zg: *ZonGen, node: Ast.Node.Index, comptime format: []const u8, a
|
||||
|
||||
fn errNoteTok(zg: *ZonGen, tok: Ast.TokenIndex, comptime format: []const u8, args: anytype) Allocator.Error!Zoir.CompileError.Note {
|
||||
const message_idx: u32 = @intCast(zg.string_bytes.items.len);
|
||||
const writer = zg.string_bytes.writer(zg.gpa);
|
||||
try writer.print(format, args);
|
||||
try writer.writeByte(0);
|
||||
|
||||
try zg.string_bytes.print(zg.gpa, format ++ "\x00", args);
|
||||
return .{
|
||||
.msg = @enumFromInt(message_idx),
|
||||
.token = .fromToken(tok),
|
||||
@ -850,9 +869,7 @@ fn addErrorInner(
|
||||
try zg.error_notes.appendSlice(gpa, notes);
|
||||
|
||||
const message_idx: u32 = @intCast(zg.string_bytes.items.len);
|
||||
const writer = zg.string_bytes.writer(zg.gpa);
|
||||
try writer.print(format, args);
|
||||
try writer.writeByte(0);
|
||||
try zg.string_bytes.print(gpa, format ++ "\x00", args);
|
||||
|
||||
try zg.compile_errors.append(gpa, .{
|
||||
.msg = @enumFromInt(message_idx),
|
||||
@ -868,8 +885,9 @@ fn lowerAstErrors(zg: *ZonGen) Allocator.Error!void {
|
||||
const tree = zg.tree;
|
||||
assert(tree.errors.len > 0);
|
||||
|
||||
var msg: std.ArrayListUnmanaged(u8) = .empty;
|
||||
defer msg.deinit(gpa);
|
||||
var msg: std.io.Writer.Allocating = .init(gpa);
|
||||
defer msg.deinit();
|
||||
const msg_bw = &msg.writer;
|
||||
|
||||
var notes: std.ArrayListUnmanaged(Zoir.CompileError.Note) = .empty;
|
||||
defer notes.deinit(gpa);
|
||||
@ -877,18 +895,20 @@ fn lowerAstErrors(zg: *ZonGen) Allocator.Error!void {
|
||||
var cur_err = tree.errors[0];
|
||||
for (tree.errors[1..]) |err| {
|
||||
if (err.is_note) {
|
||||
try tree.renderError(err, msg.writer(gpa));
|
||||
try notes.append(gpa, try zg.errNoteTok(err.token, "{s}", .{msg.items}));
|
||||
tree.renderError(err, msg_bw) catch return error.OutOfMemory;
|
||||
try notes.append(gpa, try zg.errNoteTok(err.token, "{s}", .{msg.getWritten()}));
|
||||
} else {
|
||||
// Flush error
|
||||
try tree.renderError(cur_err, msg.writer(gpa));
|
||||
tree.renderError(cur_err, msg_bw) catch return error.OutOfMemory;
|
||||
const extra_offset = tree.errorOffset(cur_err);
|
||||
try zg.addErrorTokNotesOff(cur_err.token, extra_offset, "{s}", .{msg.items}, notes.items);
|
||||
try zg.addErrorTokNotesOff(cur_err.token, extra_offset, "{s}", .{msg.getWritten()}, notes.items);
|
||||
notes.clearRetainingCapacity();
|
||||
cur_err = err;
|
||||
|
||||
// TODO: `Parse` currently does not have good error recovery mechanisms, so the remaining errors could be bogus.
|
||||
// As such, we'll ignore all remaining errors for now. We should improve `Parse` so that we can report all the errors.
|
||||
// TODO: `Parse` currently does not have good error recovery
|
||||
// mechanisms, so the remaining errors could be bogus. As such,
|
||||
// we'll ignore all remaining errors for now. We should improve
|
||||
// `Parse` so that we can report all the errors.
|
||||
return;
|
||||
}
|
||||
msg.clearRetainingCapacity();
|
||||
@ -896,16 +916,6 @@ fn lowerAstErrors(zg: *ZonGen) Allocator.Error!void {
|
||||
|
||||
// Flush error
|
||||
const extra_offset = tree.errorOffset(cur_err);
|
||||
try tree.renderError(cur_err, msg.writer(gpa));
|
||||
try zg.addErrorTokNotesOff(cur_err.token, extra_offset, "{s}", .{msg.items}, notes.items);
|
||||
tree.renderError(cur_err, msg_bw) catch return error.OutOfMemory;
|
||||
try zg.addErrorTokNotesOff(cur_err.token, extra_offset, "{s}", .{msg.getWritten()}, notes.items);
|
||||
}
|
||||
|
||||
const std = @import("std");
|
||||
const assert = std.debug.assert;
|
||||
const mem = std.mem;
|
||||
const Allocator = mem.Allocator;
|
||||
const StringIndexAdapter = std.hash_map.StringIndexAdapter;
|
||||
const StringIndexContext = std.hash_map.StringIndexContext;
|
||||
const ZonGen = @This();
|
||||
const Zoir = @import("Zoir.zig");
|
||||
const Ast = @import("Ast.zig");
|
||||
|
||||
@ -6367,7 +6367,9 @@ test "ampersand" {
|
||||
var fixed_buffer_mem: [100 * 1024]u8 = undefined;
|
||||
|
||||
fn testParse(source: [:0]const u8, allocator: mem.Allocator, anything_changed: *bool) ![]u8 {
|
||||
const stderr = std.fs.File.stderr().deprecatedWriter();
|
||||
var buffer: [64]u8 = undefined;
|
||||
const stderr = std.debug.lockStderrWriter(&buffer);
|
||||
defer std.debug.unlockStderrWriter();
|
||||
|
||||
var tree = try std.zig.Ast.parse(allocator, source, .zig);
|
||||
defer tree.deinit(allocator);
|
||||
@ -6390,7 +6392,7 @@ fn testParse(source: [:0]const u8, allocator: mem.Allocator, anything_changed: *
|
||||
return error.ParseError;
|
||||
}
|
||||
|
||||
const formatted = try tree.render(allocator);
|
||||
const formatted = try tree.renderAlloc(allocator);
|
||||
anything_changed.* = !mem.eql(u8, formatted, source);
|
||||
return formatted;
|
||||
}
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
const std = @import("../std.zig");
|
||||
const assert = std.debug.assert;
|
||||
const utf8Encode = std.unicode.utf8Encode;
|
||||
const Writer = std.io.Writer;
|
||||
|
||||
pub const ParseError = error{
|
||||
OutOfMemory,
|
||||
@ -315,9 +316,10 @@ test parseCharLiteral {
|
||||
);
|
||||
}
|
||||
|
||||
/// Parses `bytes` as a Zig string literal and writes the result to the `std.io.GenericWriter` type.
|
||||
/// Parses `bytes` as a Zig string literal and writes the result to the `Writer` type.
|
||||
///
|
||||
/// Asserts `bytes` has '"' at beginning and end.
|
||||
pub fn parseWrite(writer: anytype, bytes: []const u8) error{OutOfMemory}!Result {
|
||||
pub fn parseWrite(writer: *Writer, bytes: []const u8) Writer.Error!Result {
|
||||
assert(bytes.len >= 2 and bytes[0] == '"' and bytes[bytes.len - 1] == '"');
|
||||
|
||||
var index: usize = 1;
|
||||
@ -333,18 +335,18 @@ pub fn parseWrite(writer: anytype, bytes: []const u8) error{OutOfMemory}!Result
|
||||
if (bytes[escape_char_index] == 'u') {
|
||||
var buf: [4]u8 = undefined;
|
||||
const len = utf8Encode(codepoint, &buf) catch {
|
||||
return Result{ .failure = .{ .invalid_unicode_codepoint = escape_char_index + 1 } };
|
||||
return .{ .failure = .{ .invalid_unicode_codepoint = escape_char_index + 1 } };
|
||||
};
|
||||
try writer.writeAll(buf[0..len]);
|
||||
} else {
|
||||
try writer.writeByte(@as(u8, @intCast(codepoint)));
|
||||
}
|
||||
},
|
||||
.failure => |err| return Result{ .failure = err },
|
||||
.failure => |err| return .{ .failure = err },
|
||||
}
|
||||
},
|
||||
'\n' => return Result{ .failure = .{ .invalid_character = index } },
|
||||
'"' => return Result.success,
|
||||
'\n' => return .{ .failure = .{ .invalid_character = index } },
|
||||
'"' => return .success,
|
||||
else => {
|
||||
try writer.writeByte(b);
|
||||
index += 1;
|
||||
@ -356,11 +358,13 @@ pub fn parseWrite(writer: anytype, bytes: []const u8) error{OutOfMemory}!Result
|
||||
/// Higher level API. Does not return extra info about parse errors.
|
||||
/// Caller owns returned memory.
|
||||
pub fn parseAlloc(allocator: std.mem.Allocator, bytes: []const u8) ParseError![]u8 {
|
||||
var buf = std.ArrayList(u8).init(allocator);
|
||||
defer buf.deinit();
|
||||
|
||||
switch (try parseWrite(buf.writer(), bytes)) {
|
||||
.success => return buf.toOwnedSlice(),
|
||||
var aw: std.io.Writer.Allocating = .init(allocator);
|
||||
defer aw.deinit();
|
||||
const result = parseWrite(&aw.writer, bytes) catch |err| switch (err) {
|
||||
error.WriteFailed => return error.OutOfMemory,
|
||||
};
|
||||
switch (result) {
|
||||
.success => return aw.toOwnedSlice(),
|
||||
.failure => return error.InvalidLiteral,
|
||||
}
|
||||
}
|
||||
|
||||
@ -411,18 +411,22 @@ const Parser = struct {
|
||||
diag: ?*Diagnostics,
|
||||
options: Options,
|
||||
|
||||
fn parseExpr(self: *@This(), T: type, node: Zoir.Node.Index) error{ ParseZon, OutOfMemory }!T {
|
||||
const ParseExprError = error{ ParseZon, OutOfMemory };
|
||||
|
||||
fn parseExpr(self: *@This(), T: type, node: Zoir.Node.Index) ParseExprError!T {
|
||||
return self.parseExprInner(T, node) catch |err| switch (err) {
|
||||
error.WrongType => return self.failExpectedType(T, node),
|
||||
else => |e| return e,
|
||||
};
|
||||
}
|
||||
|
||||
const ParseExprInnerError = error{ ParseZon, OutOfMemory, WrongType };
|
||||
|
||||
fn parseExprInner(
|
||||
self: *@This(),
|
||||
T: type,
|
||||
node: Zoir.Node.Index,
|
||||
) error{ ParseZon, OutOfMemory, WrongType }!T {
|
||||
) ParseExprInnerError!T {
|
||||
if (T == Zoir.Node.Index) {
|
||||
return node;
|
||||
}
|
||||
@ -611,15 +615,17 @@ const Parser = struct {
|
||||
}
|
||||
}
|
||||
|
||||
fn parseString(self: *@This(), T: type, node: Zoir.Node.Index) !T {
|
||||
fn parseString(self: *@This(), T: type, node: Zoir.Node.Index) ParseExprInnerError!T {
|
||||
const ast_node = node.getAstNode(self.zoir);
|
||||
const pointer = @typeInfo(T).pointer;
|
||||
var size_hint = ZonGen.strLitSizeHint(self.ast, ast_node);
|
||||
if (pointer.sentinel() != null) size_hint += 1;
|
||||
|
||||
var buf: std.ArrayListUnmanaged(u8) = try .initCapacity(self.gpa, size_hint);
|
||||
defer buf.deinit(self.gpa);
|
||||
switch (try ZonGen.parseStrLit(self.ast, ast_node, buf.writer(self.gpa))) {
|
||||
var aw: std.Io.Writer.Allocating = .init(self.gpa);
|
||||
try aw.ensureUnusedCapacity(size_hint);
|
||||
defer aw.deinit();
|
||||
const result = ZonGen.parseStrLit(self.ast, ast_node, &aw.writer) catch return error.OutOfMemory;
|
||||
switch (result) {
|
||||
.success => {},
|
||||
.failure => |err| {
|
||||
const token = self.ast.nodeMainToken(ast_node);
|
||||
@ -638,9 +644,9 @@ const Parser = struct {
|
||||
}
|
||||
|
||||
if (pointer.sentinel() != null) {
|
||||
return buf.toOwnedSliceSentinel(self.gpa, 0);
|
||||
return aw.toOwnedSliceSentinel(0);
|
||||
} else {
|
||||
return buf.toOwnedSlice(self.gpa);
|
||||
return aw.toOwnedSlice();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user