stage2 parser performance/API improvements

* Extract Call ast node tag out of SuffixOp; parameters go in memory
   after Call.
 * Demote AsmInput and AsmOutput from AST nodes to structs inside the
   Asm node.
 * The following ast nodes get their sub-node lists directly following
   them in memory:
   - ErrorSetDecl
   - Switch
   - BuiltinCall
 * ast.Node.Asm gets slices for inputs, outputs, clobbers instead of
   singly linked lists

Performance changes:

throughput: 72.7 MiB/s => 74.0 MiB/s
maxrss: 72 KB => 69 KB (nice)
This commit is contained in:
Andrew Kelley 2020-05-21 21:48:01 -04:00
parent 32ecb416f3
commit d37b81d43b
3 changed files with 516 additions and 433 deletions

View File

@ -323,7 +323,7 @@ pub const Error = union(enum) {
node: *Node,
pub fn render(self: *const ExpectedCall, tokens: []const Token, stream: var) !void {
return stream.print("expected " ++ @tagName(@TagType(Node.SuffixOp.Op).Call) ++ ", found {}", .{
return stream.print("expected " ++ @tagName(Node.Id.Call) ++ ", found {}", .{
@tagName(self.node.id),
});
}
@ -333,7 +333,7 @@ pub const Error = union(enum) {
node: *Node,
pub fn render(self: *const ExpectedCallOrFnProto, tokens: []const Token, stream: var) !void {
return stream.print("expected " ++ @tagName(@TagType(Node.SuffixOp.Op).Call) ++ " or " ++
return stream.print("expected " ++ @tagName(Node.Id.Call) ++ " or " ++
@tagName(Node.Id.FnProto) ++ ", found {}", .{@tagName(self.node.id)});
}
};
@ -428,15 +428,19 @@ pub const Node = struct {
// Operators
InfixOp,
PrefixOp,
/// Not all suffix operations are under this tag. To save memory, some
/// suffix operations have dedicated Node tags.
SuffixOp,
/// This is a suffix operation but to save memory we have a dedicated Node id for it.
/// `T{a, b}`
ArrayInitializer,
/// ArrayInitializer but with `.` instead of a left-hand-side operand.
ArrayInitializerDot,
/// This is a suffix operation but to save memory we have a dedicated Node id for it.
/// `T{.a = b}`
StructInitializer,
/// StructInitializer but with `.` instead of a left-hand-side operand.
StructInitializerDot,
/// `foo()`
Call,
// Control flow
Switch,
@ -483,8 +487,6 @@ pub const Node = struct {
PointerIndexPayload,
ContainerField,
ErrorTag,
AsmInput,
AsmOutput,
FieldInitializer,
};
@ -780,13 +782,22 @@ pub const Node = struct {
pub const ErrorSetDecl = struct {
base: Node = Node{ .id = .ErrorSetDecl },
error_token: TokenIndex,
decls: DeclList,
rbrace_token: TokenIndex,
decls_len: NodeIndex,
pub const DeclList = LinkedList(*Node);
/// After this the caller must initialize the decls list.
pub fn alloc(allocator: *mem.Allocator, decls_len: NodeIndex) !*ErrorSetDecl {
const bytes = try allocator.alignedAlloc(u8, @alignOf(ErrorSetDecl), sizeInBytes(decls_len));
return @ptrCast(*ErrorSetDecl, bytes.ptr);
}
pub fn free(self: *ErrorSetDecl, allocator: *mem.Allocator) void {
const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.decls_len)];
allocator.free(bytes);
}
pub fn iterate(self: *const ErrorSetDecl) Node.Iterator {
return .{ .parent_node = &self.base, .index = 0, .node = self.decls.first };
return .{ .parent_node = &self.base, .index = 0, .node = null };
}
pub fn iterateNext(self: *const ErrorSetDecl, it: *Node.Iterator) ?*Node {
@ -802,6 +813,20 @@ pub const Node = struct {
pub fn lastToken(self: *const ErrorSetDecl) TokenIndex {
return self.rbrace_token;
}
pub fn decls(self: *ErrorSetDecl) []*Node {
const decls_start = @ptrCast([*]u8, self) + @sizeOf(ErrorSetDecl);
return @ptrCast([*]*Node, decls_start)[0..self.decls_len];
}
pub fn declsConst(self: *const ErrorSetDecl) []const *Node {
const decls_start = @ptrCast([*]const u8, self) + @sizeOf(ErrorSetDecl);
return @ptrCast([*]const *Node, decls_start)[0..self.decls_len];
}
fn sizeInBytes(decls_len: NodeIndex) usize {
return @sizeOf(ErrorSetDecl) + @sizeOf(*Node) * @as(usize, decls_len);
}
};
/// The fields and decls Node pointers directly follow this struct in memory.
@ -1464,19 +1489,28 @@ pub const Node = struct {
}
};
/// The cases node pointers are found in memory after Switch.
/// They must be SwitchCase or SwitchElse nodes.
pub const Switch = struct {
base: Node = Node{ .id = .Switch },
switch_token: TokenIndex,
rbrace: TokenIndex,
cases_len: NodeIndex,
expr: *Node,
/// these must be SwitchCase nodes
cases: CaseList,
rbrace: TokenIndex,
/// After this the caller must initialize the fields_and_decls list.
pub fn alloc(allocator: *mem.Allocator, cases_len: NodeIndex) !*Switch {
const bytes = try allocator.alignedAlloc(u8, @alignOf(Switch), sizeInBytes(cases_len));
return @ptrCast(*Switch, bytes.ptr);
}
pub const CaseList = LinkedList(*Node);
pub fn free(self: *Switch, allocator: *mem.Allocator) void {
const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.cases_len)];
allocator.free(bytes);
}
pub fn iterate(self: *const Switch) Node.Iterator {
return .{ .parent_node = &self.base, .index = 0, .node = self.cases.first };
return .{ .parent_node = &self.base, .index = 0, .node = null };
}
pub fn iterateNext(self: *const Switch, it: *Node.Iterator) ?*Node {
@ -1502,6 +1536,20 @@ pub const Node = struct {
pub fn lastToken(self: *const Switch) TokenIndex {
return self.rbrace;
}
pub fn cases(self: *Switch) []*Node {
const decls_start = @ptrCast([*]u8, self) + @sizeOf(Switch);
return @ptrCast([*]*Node, decls_start)[0..self.cases_len];
}
pub fn casesConst(self: *const Switch) []const *Node {
const decls_start = @ptrCast([*]const u8, self) + @sizeOf(Switch);
return @ptrCast([*]const *Node, decls_start)[0..self.cases_len];
}
fn sizeInBytes(cases_len: NodeIndex) usize {
return @sizeOf(Switch) + @sizeOf(*Node) * @as(usize, cases_len);
}
};
pub const SwitchCase = struct {
@ -2120,6 +2168,66 @@ pub const Node = struct {
}
};
/// Parameter nodes directly follow Call in memory.
pub const Call = struct {
base: Node = Node{ .id = .Call },
lhs: *Node,
rtoken: TokenIndex,
params_len: NodeIndex,
async_token: ?TokenIndex,
/// After this the caller must initialize the fields_and_decls list.
pub fn alloc(allocator: *mem.Allocator, params_len: NodeIndex) !*Call {
const bytes = try allocator.alignedAlloc(u8, @alignOf(Call), sizeInBytes(params_len));
return @ptrCast(*Call, bytes.ptr);
}
pub fn free(self: *Call, allocator: *mem.Allocator) void {
const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.params_len)];
allocator.free(bytes);
}
pub fn iterate(self: *const Call) Node.Iterator {
return .{ .parent_node = &self.base, .index = 0, .node = null};
}
pub fn iterateNext(self: *const Call, it: *Node.Iterator) ?*Node {
var i = it.index;
it.index += 1;
if (i < 1) return self.lhs;
i -= 1;
if (i < self.params_len) return self.paramsConst()[i];
i -= self.params_len;
return null;
}
pub fn firstToken(self: *const Call) TokenIndex {
if (self.async_token) |async_token| return async_token;
return self.lhs.firstToken();
}
pub fn lastToken(self: *const Call) TokenIndex {
return self.rtoken;
}
pub fn params(self: *Call) []*Node {
const decls_start = @ptrCast([*]u8, self) + @sizeOf(Call);
return @ptrCast([*]*Node, decls_start)[0..self.params_len];
}
pub fn paramsConst(self: *const Call) []const *Node {
const decls_start = @ptrCast([*]const u8, self) + @sizeOf(Call);
return @ptrCast([*]const *Node, decls_start)[0..self.params_len];
}
fn sizeInBytes(params_len: NodeIndex) usize {
return @sizeOf(Call) + @sizeOf(*Node) * @as(usize, params_len);
}
};
pub const SuffixOp = struct {
base: Node = Node{ .id = .SuffixOp },
op: Op,
@ -2127,19 +2235,11 @@ pub const Node = struct {
rtoken: TokenIndex,
pub const Op = union(enum) {
Call: Call,
ArrayAccess: *Node,
Slice: Slice,
Deref,
UnwrapOptional,
pub const Call = struct {
params: ParamList,
async_token: ?TokenIndex,
pub const ParamList = LinkedList(*Node);
};
pub const Slice = struct {
start: *Node,
end: ?*Node,
@ -2148,12 +2248,7 @@ pub const Node = struct {
};
pub fn iterate(self: *const SuffixOp) Node.Iterator {
return .{ .parent_node = &self.base, .index = 0,
.node = switch(self.op) {
.Call => |call| call.params.first,
else => null,
},
};
return .{ .parent_node = &self.base, .index = 0, .node = null};
}
pub fn iterateNext(self: *const SuffixOp, it: *Node.Iterator) ?*Node {
@ -2164,13 +2259,6 @@ pub const Node = struct {
i -= 1;
switch (self.op) {
.Call => |call_info| {
if (it.node) |child| {
it.index -= 1;
it.node = child.next;
return child.data;
}
},
.ArrayAccess => |index_expr| {
if (i < 1) return index_expr;
i -= 1;
@ -2197,10 +2285,6 @@ pub const Node = struct {
}
pub fn firstToken(self: *const SuffixOp) TokenIndex {
switch (self.op) {
.Call => |*call_info| if (call_info.async_token) |async_token| return async_token,
else => {},
}
return self.lhs.firstToken();
}
@ -2396,22 +2480,36 @@ pub const Node = struct {
}
};
/// Parameters are in memory following BuiltinCall.
pub const BuiltinCall = struct {
base: Node = Node{ .id = .BuiltinCall },
params_len: NodeIndex,
builtin_token: TokenIndex,
params: ParamList,
rparen_token: TokenIndex,
pub const ParamList = LinkedList(*Node);
/// After this the caller must initialize the fields_and_decls list.
pub fn alloc(allocator: *mem.Allocator, params_len: NodeIndex) !*BuiltinCall {
const bytes = try allocator.alignedAlloc(u8, @alignOf(BuiltinCall), sizeInBytes(params_len));
return @ptrCast(*BuiltinCall, bytes.ptr);
}
pub fn free(self: *BuiltinCall, allocator: *mem.Allocator) void {
const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.params_len)];
allocator.free(bytes);
}
pub fn iterate(self: *const BuiltinCall) Node.Iterator {
return .{ .parent_node = &self.base, .index = 0, .node = self.params.first };
return .{ .parent_node = &self.base, .index = 0, .node = null };
}
pub fn iterateNext(self: *const BuiltinCall, it: *Node.Iterator) ?*Node {
const param = it.node orelse return null;
it.node = param.next;
return param.data;
var i = it.index;
it.index += 1;
if (i < self.params_len) return self.paramsConst()[i];
i -= self.params_len;
return null;
}
pub fn firstToken(self: *const BuiltinCall) TokenIndex {
@ -2421,6 +2519,20 @@ pub const Node = struct {
pub fn lastToken(self: *const BuiltinCall) TokenIndex {
return self.rparen_token;
}
pub fn params(self: *BuiltinCall) []*Node {
const decls_start = @ptrCast([*]u8, self) + @sizeOf(BuiltinCall);
return @ptrCast([*]*Node, decls_start)[0..self.params_len];
}
pub fn paramsConst(self: *const BuiltinCall) []const *Node {
const decls_start = @ptrCast([*]const u8, self) + @sizeOf(BuiltinCall);
return @ptrCast([*]const *Node, decls_start)[0..self.params_len];
}
fn sizeInBytes(params_len: NodeIndex) usize {
return @sizeOf(BuiltinCall) + @sizeOf(*Node) * @as(usize, params_len);
}
};
pub const StringLiteral = struct {
@ -2554,106 +2666,102 @@ pub const Node = struct {
}
};
pub const AsmOutput = struct {
base: Node = Node{ .id = .AsmOutput },
lbracket: TokenIndex,
symbolic_name: *Node,
constraint: *Node,
kind: Kind,
rparen: TokenIndex,
pub const Kind = union(enum) {
Variable: *Identifier,
Return: *Node,
};
pub fn iterate(self: *const AsmOutput) Node.Iterator {
return .{ .parent_node = &self.base, .index = 0, .node = null };
}
pub fn iterateNext(self: *const AsmOutput, it: *Node.Iterator) ?*Node {
var i = it.index;
it.index += 1;
if (i < 1) return self.symbolic_name;
i -= 1;
if (i < 1) return self.constraint;
i -= 1;
switch (self.kind) {
.Variable => |variable_name| {
if (i < 1) return &variable_name.base;
i -= 1;
},
.Return => |return_type| {
if (i < 1) return return_type;
i -= 1;
},
}
return null;
}
pub fn firstToken(self: *const AsmOutput) TokenIndex {
return self.lbracket;
}
pub fn lastToken(self: *const AsmOutput) TokenIndex {
return self.rparen;
}
};
pub const AsmInput = struct {
base: Node = Node{ .id = .AsmInput },
lbracket: TokenIndex,
symbolic_name: *Node,
constraint: *Node,
expr: *Node,
rparen: TokenIndex,
pub fn iterate(self: *const AsmInput) Node.Iterator {
return .{ .parent_node = &self.base, .index = 0, .node = null };
}
pub fn iterateNext(self: *const AsmInput, it: *Node.Iterator) ?*Node {
var i = it.index;
it.index += 1;
if (i < 1) return self.symbolic_name;
i -= 1;
if (i < 1) return self.constraint;
i -= 1;
if (i < 1) return self.expr;
i -= 1;
return null;
}
pub fn firstToken(self: *const AsmInput) TokenIndex {
return self.lbracket;
}
pub fn lastToken(self: *const AsmInput) TokenIndex {
return self.rparen;
}
};
pub const Asm = struct {
base: Node = Node{ .id = .Asm },
asm_token: TokenIndex,
rparen: TokenIndex,
volatile_token: ?TokenIndex,
template: *Node,
outputs: OutputList,
inputs: InputList,
clobbers: ClobberList,
rparen: TokenIndex,
outputs: []Output,
inputs: []Input,
/// A clobber node must be a StringLiteral or MultilineStringLiteral.
clobbers: []*Node,
pub const Output = struct {
lbracket: TokenIndex,
symbolic_name: *Node,
constraint: *Node,
kind: Kind,
rparen: TokenIndex,
pub const Kind = union(enum) {
Variable: *Identifier,
Return: *Node,
};
pub fn iterate(self: *const Output) Node.Iterator {
return .{ .parent_node = &self.base, .index = 0, .node = null };
}
pub fn iterateNext(self: *const Output, it: *Node.Iterator) ?*Node {
var i = it.index;
it.index += 1;
if (i < 1) return self.symbolic_name;
i -= 1;
if (i < 1) return self.constraint;
i -= 1;
switch (self.kind) {
.Variable => |variable_name| {
if (i < 1) return &variable_name.base;
i -= 1;
},
.Return => |return_type| {
if (i < 1) return return_type;
i -= 1;
},
}
return null;
}
pub fn firstToken(self: *const Output) TokenIndex {
return self.lbracket;
}
pub fn lastToken(self: *const Output) TokenIndex {
return self.rparen;
}
};
pub const Input = struct {
lbracket: TokenIndex,
symbolic_name: *Node,
constraint: *Node,
expr: *Node,
rparen: TokenIndex,
pub fn iterate(self: *const Input) Node.Iterator {
return .{ .parent_node = &self.base, .index = 0, .node = null };
}
pub fn iterateNext(self: *const Input, it: *Node.Iterator) ?*Node {
var i = it.index;
it.index += 1;
if (i < 1) return self.symbolic_name;
i -= 1;
if (i < 1) return self.constraint;
i -= 1;
if (i < 1) return self.expr;
i -= 1;
return null;
}
pub fn firstToken(self: *const Input) TokenIndex {
return self.lbracket;
}
pub fn lastToken(self: *const Input) TokenIndex {
return self.rparen;
}
};
pub const OutputList = LinkedList(*AsmOutput);
pub const InputList = LinkedList(*AsmInput);
pub const ClobberList = LinkedList(*Node);
pub fn iterate(self: *const Asm) Node.Iterator {
return .{ .parent_node = &self.base, .index = 0, .node = null};
@ -2663,19 +2771,24 @@ pub const Node = struct {
var i = it.index;
it.index += 1;
var output: ?*LinkedList(*AsmOutput).Node = self.outputs.first;
while (output) |o| {
if (i < 1) return &o.data.base;
i -= 1;
output = o.next;
}
if (i < self.outputs.len * 3) switch (i % 3) {
0 => return self.outputs[i / 3].symbolic_name,
1 => return self.outputs[i / 3].constraint,
2 => switch (self.outputs[i / 3].kind) {
.Variable => |variable_name| return &variable_name.base,
.Return => |return_type| return return_type,
},
else => unreachable,
};
i -= self.outputs.len * 3;
var input: ?*LinkedList(*AsmInput).Node = self.inputs.first;
while (input) |o| {
if (i < 1) return &o.data.base;
i -= 1;
input = o.next;
}
if (i < self.inputs.len * 3) switch (i % 3) {
0 => return self.inputs[i / 3].symbolic_name,
1 => return self.inputs[i / 3].constraint,
2 => return self.inputs[i / 3].expr,
else => unreachable,
};
i -= self.inputs.len * 3;
return null;
}

View File

@ -520,10 +520,9 @@ const Parser = struct {
p.putBackToken(token);
return null;
};
var var_args_token: ?TokenIndex = null;
const name_token = p.eatToken(.Identifier);
const lparen = try p.expectToken(.LParen);
const params = try p.parseParamDeclList(&var_args_token);
const params = try p.parseParamDeclList();
defer p.gpa.free(params);
const rparen = try p.expectToken(.RParen);
const align_expr = try p.parseByteAlign();
@ -547,15 +546,19 @@ const Parser = struct {
else
R{ .Explicit = return_type_expr.? };
const params_len = @intCast(NodeIndex, params.len);
const var_args_token = if (params.len > 0) blk: {
const param_type = params[params.len - 1].param_type;
break :blk if (param_type == .var_args) param_type.var_args else null;
} else
null;
const fn_proto_node = try Node.FnProto.alloc(&p.arena.allocator, params_len);
const fn_proto_node = try Node.FnProto.alloc(&p.arena.allocator, params.len);
fn_proto_node.* = .{
.doc_comments = null,
.visib_token = null,
.fn_token = fn_token,
.name_token = name_token,
.params_len = params_len,
.params_len = params.len,
.return_type = return_type,
.var_args_token = var_args_token,
.extern_export_inline_token = null,
@ -1455,17 +1458,15 @@ const Parser = struct {
// ignore this, continue parsing
return res;
};
const node = try p.arena.allocator.create(Node.SuffixOp);
defer p.gpa.free(params.list);
const node = try Node.Call.alloc(&p.arena.allocator, params.list.len);
node.* = .{
.lhs = res,
.op = .{
.Call = .{
.params = params.list,
.async_token = async_token,
},
},
.params_len = params.list.len,
.async_token = async_token,
.rtoken = params.rparen,
};
std.mem.copy(*Node, node.params(), params.list);
return &node.base;
}
if (try p.parsePrimaryTypeExpr()) |expr| {
@ -1482,17 +1483,15 @@ const Parser = struct {
continue;
}
if (try p.parseFnCallArguments()) |params| {
const call = try p.arena.allocator.create(Node.SuffixOp);
defer p.gpa.free(params.list);
const call = try Node.Call.alloc(&p.arena.allocator, params.list.len);
call.* = .{
.lhs = res,
.op = .{
.Call = .{
.params = params.list,
.async_token = null,
},
},
.params_len = params.list.len,
.async_token = null,
.rtoken = params.rparen,
};
std.mem.copy(*Node, call.params(), params.list);
res = &call.base;
continue;
}
@ -1615,14 +1614,16 @@ const Parser = struct {
return null;
}
const decls = try p.parseErrorTagList();
defer p.gpa.free(decls);
const rbrace = try p.expectToken(.RBrace);
const node = try p.arena.allocator.create(Node.ErrorSetDecl);
const node = try Node.ErrorSetDecl.alloc(&p.arena.allocator, decls.len);
node.* = .{
.error_token = error_token,
.decls = decls,
.decls_len = decls.len,
.rbrace_token = rbrace,
};
std.mem.copy(*Node, node.decls(), decls);
return &node.base;
}
@ -1769,19 +1770,25 @@ const Parser = struct {
_ = try p.expectToken(.RParen);
_ = try p.expectToken(.LBrace);
const cases = try p.parseSwitchProngList();
defer p.gpa.free(cases);
const rbrace = try p.expectToken(.RBrace);
const node = try p.arena.allocator.create(Node.Switch);
const node = try Node.Switch.alloc(&p.arena.allocator, cases.len);
node.* = .{
.switch_token = switch_token,
.expr = expr_node,
.cases = cases,
.cases_len = cases.len,
.rbrace = rbrace,
};
std.mem.copy(*Node, node.cases(), cases);
return &node.base;
}
/// AsmExpr <- KEYWORD_asm KEYWORD_volatile? LPAREN Expr AsmOutput? RPAREN
/// AsmOutput <- COLON AsmOutputList AsmInput?
/// AsmInput <- COLON AsmInputList AsmClobbers?
/// AsmClobbers <- COLON StringList
/// StringList <- (STRINGLITERAL COMMA)* STRINGLITERAL?
fn parseAsmExpr(p: *Parser) !?*Node {
const asm_token = p.eatToken(.Keyword_asm) orelse return null;
const volatile_token = p.eatToken(.Keyword_volatile);
@ -1790,19 +1797,39 @@ const Parser = struct {
.ExpectedExpr = .{ .token = p.tok_i },
});
var arena_outputs: []Node.Asm.Output = &[0]Node.Asm.Output{};
var arena_inputs: []Node.Asm.Input = &[0]Node.Asm.Input{};
var arena_clobbers: []*Node = &[0]*Node{};
if (p.eatToken(.Colon) != null) {
const outputs = try p.parseAsmOutputList();
defer p.gpa.free(outputs);
arena_outputs = try p.arena.allocator.dupe(Node.Asm.Output, outputs);
if (p.eatToken(.Colon) != null) {
const inputs = try p.parseAsmInputList();
defer p.gpa.free(inputs);
arena_inputs = try p.arena.allocator.dupe(Node.Asm.Input, inputs);
if (p.eatToken(.Colon) != null) {
const clobbers = try ListParseFn(*Node, parseStringLiteral)(p);
defer p.gpa.free(clobbers);
arena_clobbers = try p.arena.allocator.dupe(*Node, clobbers);
}
}
}
const node = try p.arena.allocator.create(Node.Asm);
node.* = .{
.asm_token = asm_token,
.volatile_token = volatile_token,
.template = template,
.outputs = Node.Asm.OutputList{},
.inputs = Node.Asm.InputList{},
.clobbers = Node.Asm.ClobberList{},
.rparen = undefined,
.outputs = arena_outputs,
.inputs = arena_inputs,
.clobbers = arena_clobbers,
.rparen = try p.expectToken(.RParen),
};
try p.parseAsmOutput(node);
node.rparen = try p.expectToken(.RParen);
return &node.base;
}
@ -1828,15 +1855,8 @@ const Parser = struct {
return null;
}
/// AsmOutput <- COLON AsmOutputList AsmInput?
fn parseAsmOutput(p: *Parser, asm_node: *Node.Asm) !void {
if (p.eatToken(.Colon) == null) return;
asm_node.outputs = try p.parseAsmOutputList();
try p.parseAsmInput(asm_node);
}
/// AsmOutputItem <- LBRACKET IDENTIFIER RBRACKET STRINGLITERAL LPAREN (MINUSRARROW TypeExpr / IDENTIFIER) RPAREN
fn parseAsmOutputItem(p: *Parser) !?*Node.AsmOutput {
fn parseAsmOutputItem(p: *Parser) !?Node.Asm.Output {
const lbracket = p.eatToken(.LBracket) orelse return null;
const name = try p.expectNode(parseIdentifier, .{
.ExpectedIdentifier = .{ .token = p.tok_i },
@ -1848,7 +1868,7 @@ const Parser = struct {
});
_ = try p.expectToken(.LParen);
const kind: Node.AsmOutput.Kind = blk: {
const kind: Node.Asm.Output.Kind = blk: {
if (p.eatToken(.Arrow) != null) {
const return_ident = try p.expectNode(parseTypeExpr, .{
.ExpectedTypeExpr = .{ .token = p.tok_i },
@ -1862,26 +1882,17 @@ const Parser = struct {
};
const rparen = try p.expectToken(.RParen);
const node = try p.arena.allocator.create(Node.AsmOutput);
node.* = .{
return Node.Asm.Output{
.lbracket = lbracket,
.symbolic_name = name,
.constraint = constraint,
.kind = kind,
.rparen = rparen,
};
return node;
}
/// AsmInput <- COLON AsmInputList AsmClobbers?
fn parseAsmInput(p: *Parser, asm_node: *Node.Asm) !void {
if (p.eatToken(.Colon) == null) return;
asm_node.inputs = try p.parseAsmInputList();
try p.parseAsmClobbers(asm_node);
}
/// AsmInputItem <- LBRACKET IDENTIFIER RBRACKET STRINGLITERAL LPAREN Expr RPAREN
fn parseAsmInputItem(p: *Parser) !?*Node.AsmInput {
fn parseAsmInputItem(p: *Parser) !?Node.Asm.Input {
const lbracket = p.eatToken(.LBracket) orelse return null;
const name = try p.expectNode(parseIdentifier, .{
.ExpectedIdentifier = .{ .token = p.tok_i },
@ -1898,25 +1909,13 @@ const Parser = struct {
});
const rparen = try p.expectToken(.RParen);
const node = try p.arena.allocator.create(Node.AsmInput);
node.* = .{
return Node.Asm.Input{
.lbracket = lbracket,
.symbolic_name = name,
.constraint = constraint,
.expr = expr,
.rparen = rparen,
};
return node;
}
/// AsmClobbers <- COLON StringList
/// StringList <- (STRINGLITERAL COMMA)* STRINGLITERAL?
fn parseAsmClobbers(p: *Parser, asm_node: *Node.Asm) !void {
if (p.eatToken(.Colon) == null) return;
asm_node.clobbers = try ListParseFn(
Node.Asm.ClobberList,
parseStringLiteral,
)(p);
}
/// BreakLabel <- COLON IDENTIFIER
@ -1999,7 +1998,7 @@ const Parser = struct {
}
/// ParamDecl <- (KEYWORD_noalias / KEYWORD_comptime)? (IDENTIFIER COLON)? ParamType
fn parseParamDecl(p: *Parser, list: *std.ArrayList(Node.FnProto.ParamDecl)) !bool {
fn parseParamDecl(p: *Parser) !?Node.FnProto.ParamDecl {
const doc_comments = try p.parseDocComment();
const noalias_token = p.eatToken(.Keyword_noalias);
const comptime_token = if (noalias_token == null) p.eatToken(.Keyword_comptime) else null;
@ -2014,21 +2013,23 @@ const Parser = struct {
if (noalias_token == null and
comptime_token == null and
name_token == null and
doc_comments == null) return false;
doc_comments == null)
{
return null;
}
try p.errors.append(p.gpa, .{
.ExpectedParamType = .{ .token = p.tok_i },
});
return error.ParseError;
};
(try list.addOne()).* = .{
return Node.FnProto.ParamDecl{
.doc_comments = doc_comments,
.comptime_token = comptime_token,
.noalias_token = noalias_token,
.name_token = name_token,
.param_type = param_type,
};
return true;
}
/// ParamType
@ -2714,13 +2715,14 @@ const Parser = struct {
/// ExprList <- (Expr COMMA)* Expr?
fn parseFnCallArguments(p: *Parser) !?AnnotatedParamList {
if (p.eatToken(.LParen) == null) return null;
const list = try ListParseFn(std.SinglyLinkedList(*Node), parseExpr)(p);
const list = try ListParseFn(*Node, parseExpr)(p);
errdefer p.gpa.free(list);
const rparen = try p.expectToken(.RParen);
return AnnotatedParamList{ .list = list, .rparen = rparen };
}
const AnnotatedParamList = struct {
list: std.SinglyLinkedList(*Node),
list: []*Node,
rparen: TokenIndex,
};
@ -2936,62 +2938,40 @@ const Parser = struct {
/// IdentifierList <- (IDENTIFIER COMMA)* IDENTIFIER?
/// Only ErrorSetDecl parses an IdentifierList
fn parseErrorTagList(p: *Parser) !Node.ErrorSetDecl.DeclList {
return ListParseFn(Node.ErrorSetDecl.DeclList, parseErrorTag)(p);
fn parseErrorTagList(p: *Parser) ![]*Node {
return ListParseFn(*Node, parseErrorTag)(p);
}
/// SwitchProngList <- (SwitchProng COMMA)* SwitchProng?
fn parseSwitchProngList(p: *Parser) !Node.Switch.CaseList {
return ListParseFn(Node.Switch.CaseList, parseSwitchProng)(p);
fn parseSwitchProngList(p: *Parser) ![]*Node {
return ListParseFn(*Node, parseSwitchProng)(p);
}
/// AsmOutputList <- (AsmOutputItem COMMA)* AsmOutputItem?
fn parseAsmOutputList(p: *Parser) Error!Node.Asm.OutputList {
return ListParseFn(Node.Asm.OutputList, parseAsmOutputItem)(p);
fn parseAsmOutputList(p: *Parser) Error![]Node.Asm.Output {
return ListParseFn(Node.Asm.Output, parseAsmOutputItem)(p);
}
/// AsmInputList <- (AsmInputItem COMMA)* AsmInputItem?
fn parseAsmInputList(p: *Parser) Error!Node.Asm.InputList {
return ListParseFn(Node.Asm.InputList, parseAsmInputItem)(p);
fn parseAsmInputList(p: *Parser) Error![]Node.Asm.Input {
return ListParseFn(Node.Asm.Input, parseAsmInputItem)(p);
}
/// ParamDeclList <- (ParamDecl COMMA)* ParamDecl?
fn parseParamDeclList(p: *Parser, var_args_token: *?TokenIndex) ![]Node.FnProto.ParamDecl {
var list = std.ArrayList(Node.FnProto.ParamDecl).init(p.gpa);
defer list.deinit();
while (try p.parseParamDecl(&list)) {
switch (p.tokens[p.tok_i].id) {
.Comma => _ = p.nextToken(),
// all possible delimiters
.Colon, .RParen, .RBrace, .RBracket => break,
else => {
// this is likely just a missing comma,
// continue parsing this list and give an error
try p.errors.append(p.gpa, .{
.ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma },
});
},
}
}
if (list.items.len != 0) {
const param_type = list.items[list.items.len - 1].param_type;
if (param_type == .var_args) {
var_args_token.* = param_type.var_args;
}
}
return list.toOwnedSlice();
fn parseParamDeclList(p: *Parser) ![]Node.FnProto.ParamDecl {
return ListParseFn(Node.FnProto.ParamDecl, parseParamDecl)(p);
}
const NodeParseFn = fn (p: *Parser) Error!?*Node;
fn ListParseFn(comptime L: type, comptime nodeParseFn: var) ParseFn(L) {
fn ListParseFn(comptime E: type, comptime nodeParseFn: var) ParseFn([]E) {
return struct {
pub fn parse(p: *Parser) !L {
var list = L{};
var list_it = &list.first;
while (try nodeParseFn(p)) |node| {
list_it = try p.llpush(L.Node.Data, list_it, node);
pub fn parse(p: *Parser) ![]E {
var list = std.ArrayList(E).init(p.gpa);
defer list.deinit();
while (try nodeParseFn(p)) |item| {
try list.append(item);
switch (p.tokens[p.tok_i].id) {
.Comma => _ = p.nextToken(),
@ -3006,7 +2986,7 @@ const Parser = struct {
},
}
}
return list;
return list.toOwnedSlice();
}
}.parse;
}
@ -3053,12 +3033,15 @@ const Parser = struct {
};
return &node.base;
};
const node = try p.arena.allocator.create(Node.BuiltinCall);
defer p.gpa.free(params.list);
const node = try Node.BuiltinCall.alloc(&p.arena.allocator, params.list.len);
node.* = .{
.builtin_token = token,
.params = params.list,
.params_len = params.list.len,
.rparen_token = params.rparen,
};
std.mem.copy(*Node, node.params(), params.list);
return &node.base;
}

View File

@ -187,7 +187,10 @@ fn renderRoot(
}
fn renderExtraNewline(tree: *ast.Tree, stream: var, start_col: *usize, node: *ast.Node) @TypeOf(stream).Error!void {
const first_token = node.firstToken();
return renderExtraNewlineToken(tree, stream, start_col, node.firstToken());
}
fn renderExtraNewlineToken(tree: *ast.Tree, stream: var, start_col: *usize, first_token: ast.TokenIndex,) @TypeOf(stream).Error!void {
var prev_token = first_token;
if (prev_token == 0) return;
var newline_threshold: usize = 2;
@ -902,74 +905,70 @@ fn renderExpression(
return renderToken(tree, stream, rtoken, indent, start_col, space);
},
.Call => {
const call = @fieldParentPtr(ast.Node.Call, "base", base);
if (call.async_token) |async_token| {
try renderToken(tree, stream, async_token, indent, start_col, Space.Space);
}
try renderExpression(allocator, stream, tree, indent, start_col, call.lhs, Space.None);
const lparen = tree.nextToken(call.lhs.lastToken());
if (call.params_len == 0) {
try renderToken(tree, stream, lparen, indent, start_col, Space.None);
return renderToken(tree, stream, call.rtoken, indent, start_col, space);
}
const src_has_trailing_comma = blk: {
const maybe_comma = tree.prevToken(call.rtoken);
break :blk tree.tokens[maybe_comma].id == .Comma;
};
if (src_has_trailing_comma) {
const new_indent = indent + indent_delta;
try renderToken(tree, stream, lparen, new_indent, start_col, Space.Newline);
const params = call.params();
for (params) |param_node, i| {
const param_node_new_indent = if (param_node.id == .MultilineStringLiteral) blk: {
break :blk indent;
} else blk: {
try stream.writeByteNTimes(' ', new_indent);
break :blk new_indent;
};
if (i + 1 < params.len) {
try renderExpression(allocator, stream, tree, param_node_new_indent, start_col, param_node, Space.None);
const comma = tree.nextToken(param_node.lastToken());
try renderToken(tree, stream, comma, new_indent, start_col, Space.Newline); // ,
try renderExtraNewline(tree, stream, start_col, params[i + 1]);
} else {
try renderExpression(allocator, stream, tree, param_node_new_indent, start_col, param_node, Space.Comma);
try stream.writeByteNTimes(' ', indent);
return renderToken(tree, stream, call.rtoken, indent, start_col, space);
}
}
}
try renderToken(tree, stream, lparen, indent, start_col, Space.None); // (
const params = call.params();
for (params) |param_node, i| {
try renderExpression(allocator, stream, tree, indent, start_col, param_node, Space.None);
if (i + 1 < params.len) {
const comma = tree.nextToken(param_node.lastToken());
try renderToken(tree, stream, comma, indent, start_col, Space.Space);
}
}
return renderToken(tree, stream, call.rtoken, indent, start_col, space);
},
.SuffixOp => {
const suffix_op = @fieldParentPtr(ast.Node.SuffixOp, "base", base);
switch (suffix_op.op) {
.Call => |*call_info| {
if (call_info.async_token) |async_token| {
try renderToken(tree, stream, async_token, indent, start_col, Space.Space);
}
try renderExpression(allocator, stream, tree, indent, start_col, suffix_op.lhs, Space.None);
const lparen = tree.nextToken(suffix_op.lhs.lastToken());
if (call_info.params.first == null) {
try renderToken(tree, stream, lparen, indent, start_col, Space.None);
return renderToken(tree, stream, suffix_op.rtoken, indent, start_col, space);
}
const src_has_trailing_comma = blk: {
const maybe_comma = tree.prevToken(suffix_op.rtoken);
break :blk tree.tokens[maybe_comma].id == .Comma;
};
if (src_has_trailing_comma) {
const new_indent = indent + indent_delta;
try renderToken(tree, stream, lparen, new_indent, start_col, Space.Newline);
var it = call_info.params.first;
while (true) {
const param_node_node = it.?;
it = param_node_node.next;
const param_node = param_node_node.data;
const param_node_new_indent = if (param_node.id == .MultilineStringLiteral) blk: {
break :blk indent;
} else blk: {
try stream.writeByteNTimes(' ', new_indent);
break :blk new_indent;
};
if (it) |next_node| {
try renderExpression(allocator, stream, tree, param_node_new_indent, start_col, param_node, Space.None);
const comma = tree.nextToken(param_node.lastToken());
try renderToken(tree, stream, comma, new_indent, start_col, Space.Newline); // ,
try renderExtraNewline(tree, stream, start_col, next_node.data);
} else {
try renderExpression(allocator, stream, tree, param_node_new_indent, start_col, param_node, Space.Comma);
try stream.writeByteNTimes(' ', indent);
return renderToken(tree, stream, suffix_op.rtoken, indent, start_col, space);
}
}
}
try renderToken(tree, stream, lparen, indent, start_col, Space.None); // (
var it = call_info.params.first;
while (it) |param_node_node| : (it = param_node_node.next) {
const param_node = param_node_node.data;
try renderExpression(allocator, stream, tree, indent, start_col, param_node, Space.None);
if (param_node_node.next != null) {
const comma = tree.nextToken(param_node.lastToken());
try renderToken(tree, stream, comma, indent, start_col, Space.Space);
}
}
return renderToken(tree, stream, suffix_op.rtoken, indent, start_col, space);
},
.ArrayAccess => |index_expr| {
const lbracket = tree.nextToken(suffix_op.lhs.lastToken());
const rbracket = tree.nextToken(index_expr.lastToken());
@ -1288,14 +1287,14 @@ fn renderExpression(
const lbrace = tree.nextToken(err_set_decl.error_token);
if (err_set_decl.decls.first == null) {
if (err_set_decl.decls_len == 0) {
try renderToken(tree, stream, err_set_decl.error_token, indent, start_col, Space.None);
try renderToken(tree, stream, lbrace, indent, start_col, Space.None);
return renderToken(tree, stream, err_set_decl.rbrace_token, indent, start_col, space);
}
if (err_set_decl.decls.first.?.next == null) blk: {
const node = err_set_decl.decls.first.?.data;
if (err_set_decl.decls_len == 1) blk: {
const node = err_set_decl.decls()[0];
// if there are any doc comments or same line comments
// don't try to put it all on one line
@ -1322,16 +1321,15 @@ fn renderExpression(
try renderToken(tree, stream, lbrace, indent, start_col, Space.Newline); // {
const new_indent = indent + indent_delta;
var it = err_set_decl.decls.first;
while (it) |node_node| : (it = node_node.next) {
const node = node_node.data;
const decls = err_set_decl.decls();
for (decls) |node, i| {
try stream.writeByteNTimes(' ', new_indent);
if (node_node.next) |next_node| {
if (i + 1 < decls.len) {
try renderExpression(allocator, stream, tree, new_indent, start_col, node, Space.None);
try renderToken(tree, stream, tree.nextToken(node.lastToken()), new_indent, start_col, Space.Newline); // ,
try renderExtraNewline(tree, stream, start_col, next_node.data);
try renderExtraNewline(tree, stream, start_col, decls[i + 1]);
} else {
try renderExpression(allocator, stream, tree, new_indent, start_col, node, Space.Comma);
}
@ -1342,16 +1340,15 @@ fn renderExpression(
} else {
try renderToken(tree, stream, lbrace, indent, start_col, Space.Space); // {
var it = err_set_decl.decls.first;
while (it) |node_node| : (it = node_node.next) {
const node = node_node.data;
if (node_node.next) |next_node| {
const decls = err_set_decl.decls();
for (decls) |node, i| {
if (i + 1 < decls.len) {
try renderExpression(allocator, stream, tree, indent, start_col, node, Space.None);
const comma_token = tree.nextToken(node.lastToken());
assert(tree.tokens[comma_token].id == .Comma);
try renderToken(tree, stream, comma_token, indent, start_col, Space.Space); // ,
try renderExtraNewline(tree, stream, start_col, next_node.data);
try renderExtraNewline(tree, stream, start_col, decls[i + 1]);
} else {
try renderExpression(allocator, stream, tree, indent, start_col, node, Space.Space);
}
@ -1401,12 +1398,8 @@ fn renderExpression(
try renderToken(tree, stream, builtin_call.builtin_token, indent, start_col, Space.None); // @name
const src_params_trailing_comma = blk: {
if (builtin_call.params.first == null or
builtin_call.params.first.?.next == null)
{
break :blk false;
}
const last_node = builtin_call.params.first.?.findLast().data;
if (builtin_call.params_len < 2) break :blk false;
const last_node = builtin_call.params()[builtin_call.params_len - 1];
const maybe_comma = tree.nextToken(last_node.lastToken());
break :blk tree.tokens[maybe_comma].id == .Comma;
};
@ -1417,12 +1410,11 @@ fn renderExpression(
try renderToken(tree, stream, lparen, indent, start_col, Space.None); // (
// render all on one line, no trailing comma
var it = builtin_call.params.first;
while (it) |param_node_node| : (it = param_node_node.next) {
const param_node = param_node_node.data;
const params = builtin_call.params();
for (params) |param_node, i| {
try renderExpression(allocator, stream, tree, indent, start_col, param_node, Space.None);
if (param_node_node.next != null) {
if (i + 1 < params.len) {
const comma_token = tree.nextToken(param_node.lastToken());
try renderToken(tree, stream, comma_token, indent, start_col, Space.Space); // ,
}
@ -1432,9 +1424,7 @@ fn renderExpression(
const new_indent = indent + indent_delta;
try renderToken(tree, stream, lparen, new_indent, start_col, Space.Newline); // (
var it = builtin_call.params.first;
while (it) |param_node_node| : (it = param_node_node.next) {
const param_node = param_node_node.data;
for (builtin_call.params()) |param_node| {
try stream.writeByteNTimes(' ', new_indent);
try renderExpression(allocator, stream, tree, indent, start_col, param_node, Space.Comma);
}
@ -1592,7 +1582,7 @@ fn renderExpression(
const rparen = tree.nextToken(switch_node.expr.lastToken());
const lbrace = tree.nextToken(rparen);
if (switch_node.cases.first == null) {
if (switch_node.cases_len == 0) {
try renderExpression(allocator, stream, tree, indent, start_col, switch_node.expr, Space.None);
try renderToken(tree, stream, rparen, indent, start_col, Space.Space); // )
try renderToken(tree, stream, lbrace, indent, start_col, Space.None); // {
@ -1606,14 +1596,13 @@ fn renderExpression(
try renderToken(tree, stream, rparen, indent, start_col, Space.Space); // )
try renderToken(tree, stream, lbrace, new_indent, start_col, Space.Newline); // {
var it = switch_node.cases.first;
while (it) |node_node| : (it = node_node.next) {
const node = node_node.data;
const cases = switch_node.cases();
for (cases) |node, i| {
try stream.writeByteNTimes(' ', new_indent);
try renderExpression(allocator, stream, tree, new_indent, start_col, node, Space.Comma);
if (node_node.next) |next_node| {
try renderExtraNewline(tree, stream, start_col, next_node.data);
if (i + 1 < cases.len) {
try renderExtraNewline(tree, stream, start_col, cases[i + 1]);
}
}
@ -1929,7 +1918,7 @@ fn renderExpression(
try renderToken(tree, stream, tree.nextToken(asm_node.asm_token), indent, start_col, Space.None); // (
}
if (asm_node.outputs.first == null and asm_node.inputs.first == null and asm_node.clobbers.first == null) {
if (asm_node.outputs.len == 0 and asm_node.inputs.len == 0 and asm_node.clobbers.len == 0) {
try renderExpression(allocator, stream, tree, indent, start_col, asm_node.template, Space.None);
return renderToken(tree, stream, asm_node.rparen, indent, start_col, space);
}
@ -1949,7 +1938,7 @@ fn renderExpression(
const colon1 = tree.nextToken(asm_node.template.lastToken());
const indent_extra = indent_once + 2;
const colon2 = if (asm_node.outputs.first == null) blk: {
const colon2 = if (asm_node.outputs.len == 0) blk: {
try renderToken(tree, stream, colon1, indent, start_col, Space.Newline); // :
try stream.writeByteNTimes(' ', indent_once);
@ -1957,39 +1946,34 @@ fn renderExpression(
} else blk: {
try renderToken(tree, stream, colon1, indent, start_col, Space.Space); // :
var it = asm_node.outputs.first;
while (true) {
const asm_output_node = it.?;
it = asm_output_node.next;
const asm_output = asm_output_node.data;
const node = &asm_output.base;
for (asm_node.outputs) |*asm_output, i| {
if (i + 1 < asm_node.outputs.len) {
const next_asm_output = asm_node.outputs[i + 1];
try renderAsmOutput(allocator, stream, tree, indent_extra, start_col, asm_output, Space.None);
if (asm_output_node.next) |next_asm_output| {
try renderExpression(allocator, stream, tree, indent_extra, start_col, node, Space.None);
const next_node = &next_asm_output.data.base;
const comma = tree.prevToken(next_asm_output.data.firstToken());
const comma = tree.prevToken(next_asm_output.firstToken());
try renderToken(tree, stream, comma, indent_extra, start_col, Space.Newline); // ,
try renderExtraNewline(tree, stream, start_col, next_node);
try renderExtraNewlineToken(tree, stream, start_col, next_asm_output.firstToken());
try stream.writeByteNTimes(' ', indent_extra);
} else if (asm_node.inputs.first == null and asm_node.clobbers.first == null) {
try renderExpression(allocator, stream, tree, indent_extra, start_col, node, Space.Newline);
} else if (asm_node.inputs.len == 0 and asm_node.clobbers.len == 0) {
try renderAsmOutput(allocator, stream, tree, indent_extra, start_col, asm_output, Space.Newline);
try stream.writeByteNTimes(' ', indent);
return renderToken(tree, stream, asm_node.rparen, indent, start_col, space);
} else {
try renderExpression(allocator, stream, tree, indent_extra, start_col, node, Space.Newline);
try renderAsmOutput(allocator, stream, tree, indent_extra, start_col, asm_output, Space.Newline);
try stream.writeByteNTimes(' ', indent_once);
const comma_or_colon = tree.nextToken(node.lastToken());
const comma_or_colon = tree.nextToken(asm_output.lastToken());
break :blk switch (tree.tokens[comma_or_colon].id) {
.Comma => tree.nextToken(comma_or_colon),
else => comma_or_colon,
};
}
}
unreachable;
};
const colon3 = if (asm_node.inputs.first == null) blk: {
const colon3 = if (asm_node.inputs.len == 0) blk: {
try renderToken(tree, stream, colon2, indent, start_col, Space.Newline); // :
try stream.writeByteNTimes(' ', indent_once);
@ -1997,46 +1981,37 @@ fn renderExpression(
} else blk: {
try renderToken(tree, stream, colon2, indent, start_col, Space.Space); // :
var it = asm_node.inputs.first;
while (true) {
const asm_input_node = it.?;
it = asm_input_node.next;
const node = &asm_input_node.data.base;
for (asm_node.inputs) |*asm_input, i| {
if (i + 1 < asm_node.inputs.len) {
const next_asm_input = &asm_node.inputs[i + 1];
try renderAsmInput(allocator, stream, tree, indent_extra, start_col, asm_input, Space.None);
if (it) |next_asm_input| {
try renderExpression(allocator, stream, tree, indent_extra, start_col, node, Space.None);
const next_node = &next_asm_input.data.base;
const comma = tree.prevToken(next_asm_input.data.firstToken());
const comma = tree.prevToken(next_asm_input.firstToken());
try renderToken(tree, stream, comma, indent_extra, start_col, Space.Newline); // ,
try renderExtraNewline(tree, stream, start_col, next_node);
try renderExtraNewlineToken(tree, stream, start_col, next_asm_input.firstToken());
try stream.writeByteNTimes(' ', indent_extra);
} else if (asm_node.clobbers.first == null) {
try renderExpression(allocator, stream, tree, indent_extra, start_col, node, Space.Newline);
} else if (asm_node.clobbers.len == 0) {
try renderAsmInput(allocator, stream, tree, indent_extra, start_col, asm_input, Space.Newline);
try stream.writeByteNTimes(' ', indent);
return renderToken(tree, stream, asm_node.rparen, indent, start_col, space); // )
} else {
try renderExpression(allocator, stream, tree, indent_extra, start_col, node, Space.Newline);
try renderAsmInput(allocator, stream, tree, indent_extra, start_col, asm_input, Space.Newline);
try stream.writeByteNTimes(' ', indent_once);
const comma_or_colon = tree.nextToken(node.lastToken());
const comma_or_colon = tree.nextToken(asm_input.lastToken());
break :blk switch (tree.tokens[comma_or_colon].id) {
.Comma => tree.nextToken(comma_or_colon),
else => comma_or_colon,
};
}
}
unreachable;
};
try renderToken(tree, stream, colon3, indent, start_col, Space.Space); // :
var it = asm_node.clobbers.first;
while (true) {
const clobber_node_node = it.?;
it = clobber_node_node.next;
const clobber_node = clobber_node_node.data;
if (it == null) {
for (asm_node.clobbers) |clobber_node, i| {
if (i + 1 >= asm_node.clobbers.len) {
try renderExpression(allocator, stream, tree, indent_extra, start_col, clobber_node, Space.Newline);
try stream.writeByteNTimes(' ', indent);
return renderToken(tree, stream, asm_node.rparen, indent, start_col, space);
@ -2048,40 +2023,6 @@ fn renderExpression(
}
},
.AsmInput => {
const asm_input = @fieldParentPtr(ast.Node.AsmInput, "base", base);
try stream.writeAll("[");
try renderExpression(allocator, stream, tree, indent, start_col, asm_input.symbolic_name, Space.None);
try stream.writeAll("] ");
try renderExpression(allocator, stream, tree, indent, start_col, asm_input.constraint, Space.None);
try stream.writeAll(" (");
try renderExpression(allocator, stream, tree, indent, start_col, asm_input.expr, Space.None);
return renderToken(tree, stream, asm_input.lastToken(), indent, start_col, space); // )
},
.AsmOutput => {
const asm_output = @fieldParentPtr(ast.Node.AsmOutput, "base", base);
try stream.writeAll("[");
try renderExpression(allocator, stream, tree, indent, start_col, asm_output.symbolic_name, Space.None);
try stream.writeAll("] ");
try renderExpression(allocator, stream, tree, indent, start_col, asm_output.constraint, Space.None);
try stream.writeAll(" (");
switch (asm_output.kind) {
ast.Node.AsmOutput.Kind.Variable => |variable_name| {
try renderExpression(allocator, stream, tree, indent, start_col, &variable_name.base, Space.None);
},
ast.Node.AsmOutput.Kind.Return => |return_type| {
try stream.writeAll("-> ");
try renderExpression(allocator, stream, tree, indent, start_col, return_type, Space.None);
},
}
return renderToken(tree, stream, asm_output.lastToken(), indent, start_col, space); // )
},
.EnumLiteral => {
const enum_literal = @fieldParentPtr(ast.Node.EnumLiteral, "base", base);
@ -2098,6 +2039,52 @@ fn renderExpression(
}
}
fn renderAsmOutput(
allocator: *mem.Allocator,
stream: var,
tree: *ast.Tree,
indent: usize,
start_col: *usize,
asm_output: *const ast.Node.Asm.Output,
space: Space,
) (@TypeOf(stream).Error || Error)!void {
try stream.writeAll("[");
try renderExpression(allocator, stream, tree, indent, start_col, asm_output.symbolic_name, Space.None);
try stream.writeAll("] ");
try renderExpression(allocator, stream, tree, indent, start_col, asm_output.constraint, Space.None);
try stream.writeAll(" (");
switch (asm_output.kind) {
ast.Node.Asm.Output.Kind.Variable => |variable_name| {
try renderExpression(allocator, stream, tree, indent, start_col, &variable_name.base, Space.None);
},
ast.Node.Asm.Output.Kind.Return => |return_type| {
try stream.writeAll("-> ");
try renderExpression(allocator, stream, tree, indent, start_col, return_type, Space.None);
},
}
return renderToken(tree, stream, asm_output.lastToken(), indent, start_col, space); // )
}
fn renderAsmInput(
allocator: *mem.Allocator,
stream: var,
tree: *ast.Tree,
indent: usize,
start_col: *usize,
asm_input: *const ast.Node.Asm.Input,
space: Space,
) (@TypeOf(stream).Error || Error)!void {
try stream.writeAll("[");
try renderExpression(allocator, stream, tree, indent, start_col, asm_input.symbolic_name, Space.None);
try stream.writeAll("] ");
try renderExpression(allocator, stream, tree, indent, start_col, asm_input.constraint, Space.None);
try stream.writeAll(" (");
try renderExpression(allocator, stream, tree, indent, start_col, asm_input.expr, Space.None);
return renderToken(tree, stream, asm_input.lastToken(), indent, start_col, space); // )
}
fn renderVarDecl(
allocator: *mem.Allocator,
stream: var,