mirror of
https://github.com/ziglang/zig.git
synced 2026-01-20 22:35:24 +00:00
zig fmt: anytype, fn calls with one param, trailing commas
and extra newlines between top level declarations
This commit is contained in:
parent
25bcf4eb99
commit
36eee7bc6c
@ -196,6 +196,7 @@ pub const Tree = struct {
|
||||
const datas = tree.nodes.items(.data);
|
||||
const main_tokens = tree.nodes.items(.main_token);
|
||||
const token_tags = tree.tokens.items(.tag);
|
||||
var end_offset: TokenIndex = 0;
|
||||
var n = node;
|
||||
while (true) switch (tags[n]) {
|
||||
.Root => return 0,
|
||||
@ -251,7 +252,7 @@ pub const Tree = struct {
|
||||
.ArrayType,
|
||||
.ArrayTypeSentinel,
|
||||
.ErrorValue,
|
||||
=> return main_tokens[n],
|
||||
=> return main_tokens[n] - end_offset,
|
||||
|
||||
.ArrayInitDot,
|
||||
.ArrayInitDotTwo,
|
||||
@ -260,7 +261,7 @@ pub const Tree = struct {
|
||||
.StructInitDotTwo,
|
||||
.StructInitDotTwoComma,
|
||||
.EnumLiteral,
|
||||
=> return main_tokens[n] - 1,
|
||||
=> return main_tokens[n] - 1 - end_offset,
|
||||
|
||||
.Catch,
|
||||
.FieldAccess,
|
||||
@ -314,24 +315,32 @@ pub const Tree = struct {
|
||||
.StructInitOne,
|
||||
.StructInit,
|
||||
.CallOne,
|
||||
.CallOneComma,
|
||||
.Call,
|
||||
.CallComma,
|
||||
.SwitchRange,
|
||||
.FnDecl,
|
||||
.ErrorUnion,
|
||||
=> n = datas[n].lhs,
|
||||
|
||||
.AsyncCallOne,
|
||||
.AsyncCallOneComma,
|
||||
.AsyncCall,
|
||||
.AsyncCallComma,
|
||||
=> {
|
||||
end_offset += 1; // async token
|
||||
n = datas[n].lhs;
|
||||
},
|
||||
|
||||
.ContainerFieldInit,
|
||||
.ContainerFieldAlign,
|
||||
.ContainerField,
|
||||
=> {
|
||||
const name_token = main_tokens[n];
|
||||
if (name_token > 0 and
|
||||
token_tags[name_token - 1] == .Keyword_comptime)
|
||||
{
|
||||
return name_token - 1;
|
||||
} else {
|
||||
return name_token;
|
||||
if (name_token > 0 and token_tags[name_token - 1] == .Keyword_comptime) {
|
||||
end_offset += 1;
|
||||
}
|
||||
return name_token - end_offset;
|
||||
},
|
||||
|
||||
.GlobalVarDecl,
|
||||
@ -351,10 +360,10 @@ pub const Tree = struct {
|
||||
.StringLiteral,
|
||||
=> continue,
|
||||
|
||||
else => return i + 1,
|
||||
else => return i + 1 - end_offset,
|
||||
}
|
||||
}
|
||||
return i;
|
||||
return i - end_offset;
|
||||
},
|
||||
|
||||
.Block,
|
||||
@ -365,10 +374,9 @@ pub const Tree = struct {
|
||||
// Look for a label.
|
||||
const lbrace = main_tokens[n];
|
||||
if (token_tags[lbrace - 1] == .Colon) {
|
||||
return lbrace - 2;
|
||||
} else {
|
||||
return lbrace;
|
||||
end_offset += 2;
|
||||
}
|
||||
return lbrace - end_offset;
|
||||
},
|
||||
|
||||
.ContainerDecl,
|
||||
@ -386,9 +394,10 @@ pub const Tree = struct {
|
||||
=> {
|
||||
const main_token = main_tokens[n];
|
||||
switch (token_tags[main_token - 1]) {
|
||||
.Keyword_packed, .Keyword_extern => return main_token - 1,
|
||||
else => return main_token,
|
||||
.Keyword_packed, .Keyword_extern => end_offset += 1,
|
||||
else => {},
|
||||
}
|
||||
return main_token - end_offset;
|
||||
},
|
||||
|
||||
.PtrTypeAligned,
|
||||
@ -404,12 +413,12 @@ pub const Tree = struct {
|
||||
},
|
||||
.LBrace => main_token,
|
||||
else => unreachable,
|
||||
};
|
||||
} - end_offset;
|
||||
},
|
||||
|
||||
.SwitchCaseOne => {
|
||||
if (datas[n].lhs == 0) {
|
||||
return main_tokens[n] - 1; // else token
|
||||
return main_tokens[n] - 1 - end_offset; // else token
|
||||
} else {
|
||||
n = datas[n].lhs;
|
||||
}
|
||||
@ -422,7 +431,7 @@ pub const Tree = struct {
|
||||
|
||||
.AsmOutput, .AsmInput => {
|
||||
assert(token_tags[main_tokens[n] - 1] == .LBracket);
|
||||
return main_tokens[n] - 1;
|
||||
return main_tokens[n] - 1 - end_offset;
|
||||
},
|
||||
|
||||
.WhileSimple,
|
||||
@ -435,7 +444,7 @@ pub const Tree = struct {
|
||||
return switch (token_tags[main_token - 1]) {
|
||||
.Keyword_inline => main_token - 1,
|
||||
else => main_token,
|
||||
};
|
||||
} - end_offset;
|
||||
},
|
||||
};
|
||||
}
|
||||
@ -555,7 +564,7 @@ pub const Tree = struct {
|
||||
return main_tokens[n] + end_offset;
|
||||
},
|
||||
|
||||
.Call => {
|
||||
.Call, .AsyncCall => {
|
||||
end_offset += 1; // for the rparen
|
||||
const params = tree.extraData(datas[n].rhs, Node.SubRange);
|
||||
if (params.end - params.start == 0) {
|
||||
@ -563,6 +572,12 @@ pub const Tree = struct {
|
||||
}
|
||||
n = tree.extra_data[params.end - 1]; // last parameter
|
||||
},
|
||||
.CallComma, .AsyncCallComma => {
|
||||
end_offset += 2; // for the comma+rparen
|
||||
const params = tree.extraData(datas[n].rhs, Node.SubRange);
|
||||
assert(params.end > params.start);
|
||||
n = tree.extra_data[params.end - 1]; // last parameter
|
||||
},
|
||||
.Switch => {
|
||||
const cases = tree.extraData(datas[n].rhs, Node.SubRange);
|
||||
if (cases.end - cases.start == 0) {
|
||||
@ -614,6 +629,7 @@ pub const Tree = struct {
|
||||
n = tree.extra_data[datas[n].rhs - 1]; // last member
|
||||
},
|
||||
.CallOne,
|
||||
.AsyncCallOne,
|
||||
.ArrayAccess,
|
||||
=> {
|
||||
end_offset += 1; // for the rparen/rbracket
|
||||
@ -622,7 +638,6 @@ pub const Tree = struct {
|
||||
}
|
||||
n = datas[n].rhs;
|
||||
},
|
||||
|
||||
.ArrayInitDotTwo,
|
||||
.BlockTwo,
|
||||
.StructInitDotTwo,
|
||||
@ -755,9 +770,10 @@ pub const Tree = struct {
|
||||
}
|
||||
},
|
||||
|
||||
.SliceOpen => {
|
||||
end_offset += 2; // ellipsis2 and rbracket
|
||||
.SliceOpen, .CallOneComma, .AsyncCallOneComma => {
|
||||
end_offset += 2; // ellipsis2 + rbracket, or comma + rparen
|
||||
n = datas[n].rhs;
|
||||
assert(n != 0);
|
||||
},
|
||||
.Slice => {
|
||||
const extra = tree.extraData(datas[n].rhs, Node.Slice);
|
||||
@ -1496,6 +1512,27 @@ pub const Tree = struct {
|
||||
});
|
||||
}
|
||||
|
||||
pub fn callOne(tree: Tree, buffer: *[1]Node.Index, node: Node.Index) full.Call {
|
||||
const data = tree.nodes.items(.data)[node];
|
||||
buffer.* = .{data.rhs};
|
||||
const params = if (data.rhs != 0) buffer[0..1] else buffer[0..0];
|
||||
return tree.fullCall(.{
|
||||
.lparen = tree.nodes.items(.main_token)[node],
|
||||
.fn_expr = data.lhs,
|
||||
.params = params,
|
||||
});
|
||||
}
|
||||
|
||||
pub fn callFull(tree: Tree, node: Node.Index) full.Call {
|
||||
const data = tree.nodes.items(.data)[node];
|
||||
const extra = tree.extraData(data.rhs, Node.SubRange);
|
||||
return tree.fullCall(.{
|
||||
.lparen = tree.nodes.items(.main_token)[node],
|
||||
.fn_expr = data.lhs,
|
||||
.params = tree.extra_data[extra.start..extra.end],
|
||||
});
|
||||
}
|
||||
|
||||
fn fullVarDecl(tree: Tree, info: full.VarDecl.Ast) full.VarDecl {
|
||||
const token_tags = tree.tokens.items(.tag);
|
||||
var result: full.VarDecl = .{
|
||||
@ -1750,6 +1787,19 @@ pub const Tree = struct {
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
fn fullCall(tree: Tree, info: full.Call.Ast) full.Call {
|
||||
const token_tags = tree.tokens.items(.tag);
|
||||
var result: full.Call = .{
|
||||
.ast = info,
|
||||
.async_token = null,
|
||||
};
|
||||
const maybe_async_token = tree.firstToken(info.fn_expr) - 1;
|
||||
if (token_tags[maybe_async_token] == .Keyword_async) {
|
||||
result.async_token = maybe_async_token;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
};
|
||||
|
||||
/// Fully assembled AST node information.
|
||||
@ -1942,6 +1992,17 @@ pub const full = struct {
|
||||
rparen: TokenIndex,
|
||||
};
|
||||
};
|
||||
|
||||
pub const Call = struct {
|
||||
ast: Ast,
|
||||
async_token: ?TokenIndex,
|
||||
|
||||
pub const Ast = struct {
|
||||
lparen: TokenIndex,
|
||||
fn_expr: Node.Index,
|
||||
params: []const Node.Index,
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
pub const Error = union(enum) {
|
||||
@ -2383,9 +2444,24 @@ pub const Node = struct {
|
||||
StructInit,
|
||||
/// `lhs(rhs)`. rhs can be omitted.
|
||||
CallOne,
|
||||
/// `lhs(a, b, c)`. `sub_range_list[rhs]`.
|
||||
/// `lhs(rhs,)`. rhs can be omitted.
|
||||
CallOneComma,
|
||||
/// `async lhs(rhs)`. rhs can be omitted.
|
||||
AsyncCallOne,
|
||||
/// `async lhs(rhs,)`.
|
||||
AsyncCallOneComma,
|
||||
/// `lhs(a, b, c)`. `SubRange[rhs]`.
|
||||
/// main_token is the `(`.
|
||||
Call,
|
||||
/// `lhs(a, b, c,)`. `SubRange[rhs]`.
|
||||
/// main_token is the `(`.
|
||||
CallComma,
|
||||
/// `async lhs(a, b, c)`. `SubRange[rhs]`.
|
||||
/// main_token is the `(`.
|
||||
AsyncCall,
|
||||
/// `async lhs(a, b, c,)`. `SubRange[rhs]`.
|
||||
/// main_token is the `(`.
|
||||
AsyncCallComma,
|
||||
/// `switch(lhs) {}`. `SubRange[rhs]`.
|
||||
Switch,
|
||||
/// Same as Switch except there is known to be a trailing comma
|
||||
|
||||
@ -2254,8 +2254,6 @@ const Parser = struct {
|
||||
/// / PrimaryTypeExpr (SuffixOp / FnCallArguments)*
|
||||
/// FnCallArguments <- LPAREN ExprList RPAREN
|
||||
/// ExprList <- (Expr COMMA)* Expr?
|
||||
/// TODO detect when there is 1 or less parameter to the call and emit
|
||||
/// CallOne instead of Call.
|
||||
fn parseSuffixExpr(p: *Parser) !Node.Index {
|
||||
if (p.eatToken(.Keyword_async)) |async_token| {
|
||||
var res = try p.expectPrimaryTypeExpr();
|
||||
@ -2269,20 +2267,95 @@ const Parser = struct {
|
||||
try p.warn(.{ .ExpectedParamList = .{ .token = p.tok_i } });
|
||||
return res;
|
||||
};
|
||||
const params = try ListParseFn(parseExpr)(p);
|
||||
_ = try p.expectToken(.RParen);
|
||||
if (p.eatToken(.RParen)) |_| {
|
||||
return p.addNode(.{
|
||||
.tag = .AsyncCallOne,
|
||||
.main_token = lparen,
|
||||
.data = .{
|
||||
.lhs = res,
|
||||
.rhs = 0,
|
||||
},
|
||||
});
|
||||
}
|
||||
const param_one = try p.expectExpr();
|
||||
const comma_one = p.eatToken(.Comma);
|
||||
if (p.eatToken(.RParen)) |_| {
|
||||
return p.addNode(.{
|
||||
.tag = if (comma_one == null) .AsyncCallOne else .AsyncCallOneComma,
|
||||
.main_token = lparen,
|
||||
.data = .{
|
||||
.lhs = res,
|
||||
.rhs = param_one,
|
||||
},
|
||||
});
|
||||
}
|
||||
if (comma_one == null) {
|
||||
try p.warn(.{
|
||||
.ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma },
|
||||
});
|
||||
}
|
||||
|
||||
return p.addNode(.{
|
||||
.tag = .Call,
|
||||
.main_token = lparen,
|
||||
.data = .{
|
||||
.lhs = res,
|
||||
.rhs = try p.addExtra(Node.SubRange{
|
||||
.start = params.start,
|
||||
.end = params.end,
|
||||
}),
|
||||
},
|
||||
});
|
||||
var param_list = std.ArrayList(Node.Index).init(p.gpa);
|
||||
defer param_list.deinit();
|
||||
|
||||
try param_list.append(param_one);
|
||||
|
||||
while (true) {
|
||||
const next = try p.expectExpr();
|
||||
try param_list.append(next);
|
||||
switch (p.token_tags[p.nextToken()]) {
|
||||
.Comma => {
|
||||
if (p.eatToken(.RParen)) |_| {
|
||||
const span = try p.listToSpan(param_list.items);
|
||||
return p.addNode(.{
|
||||
.tag = .AsyncCallComma,
|
||||
.main_token = lparen,
|
||||
.data = .{
|
||||
.lhs = res,
|
||||
.rhs = try p.addExtra(Node.SubRange{
|
||||
.start = span.start,
|
||||
.end = span.end,
|
||||
}),
|
||||
},
|
||||
});
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
},
|
||||
.RParen => {
|
||||
const span = try p.listToSpan(param_list.items);
|
||||
return p.addNode(.{
|
||||
.tag = .AsyncCall,
|
||||
.main_token = lparen,
|
||||
.data = .{
|
||||
.lhs = res,
|
||||
.rhs = try p.addExtra(Node.SubRange{
|
||||
.start = span.start,
|
||||
.end = span.end,
|
||||
}),
|
||||
},
|
||||
});
|
||||
},
|
||||
.Colon, .RBrace, .RBracket => {
|
||||
p.tok_i -= 1;
|
||||
return p.fail(.{
|
||||
.ExpectedToken = .{
|
||||
.token = p.tok_i,
|
||||
.expected_id = .RParen,
|
||||
},
|
||||
});
|
||||
},
|
||||
else => {
|
||||
p.tok_i -= 1;
|
||||
try p.warn(.{
|
||||
.ExpectedToken = .{
|
||||
.token = p.tok_i,
|
||||
.expected_id = .Comma,
|
||||
},
|
||||
});
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
var res = try p.parsePrimaryTypeExpr();
|
||||
if (res == 0) return res;
|
||||
@ -2293,21 +2366,98 @@ const Parser = struct {
|
||||
res = suffix_op;
|
||||
continue;
|
||||
}
|
||||
const lparen = p.eatToken(.LParen) orelse return res;
|
||||
const params = try ListParseFn(parseExpr)(p);
|
||||
_ = try p.expectToken(.RParen);
|
||||
res = res: {
|
||||
const lparen = p.eatToken(.LParen) orelse return res;
|
||||
if (p.eatToken(.RParen)) |_| {
|
||||
break :res try p.addNode(.{
|
||||
.tag = .CallOne,
|
||||
.main_token = lparen,
|
||||
.data = .{
|
||||
.lhs = res,
|
||||
.rhs = 0,
|
||||
},
|
||||
});
|
||||
}
|
||||
const param_one = try p.expectExpr();
|
||||
const comma_one = p.eatToken(.Comma);
|
||||
if (p.eatToken(.RParen)) |_| {
|
||||
break :res try p.addNode(.{
|
||||
.tag = if (comma_one == null) .CallOne else .CallOneComma,
|
||||
.main_token = lparen,
|
||||
.data = .{
|
||||
.lhs = res,
|
||||
.rhs = param_one,
|
||||
},
|
||||
});
|
||||
}
|
||||
if (comma_one == null) {
|
||||
try p.warn(.{
|
||||
.ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma },
|
||||
});
|
||||
}
|
||||
|
||||
res = try p.addNode(.{
|
||||
.tag = .Call,
|
||||
.main_token = lparen,
|
||||
.data = .{
|
||||
.lhs = res,
|
||||
.rhs = try p.addExtra(Node.SubRange{
|
||||
.start = params.start,
|
||||
.end = params.end,
|
||||
}),
|
||||
},
|
||||
});
|
||||
var param_list = std.ArrayList(Node.Index).init(p.gpa);
|
||||
defer param_list.deinit();
|
||||
|
||||
try param_list.append(param_one);
|
||||
|
||||
while (true) {
|
||||
const next = try p.expectExpr();
|
||||
try param_list.append(next);
|
||||
switch (p.token_tags[p.nextToken()]) {
|
||||
.Comma => {
|
||||
if (p.eatToken(.RParen)) |_| {
|
||||
const span = try p.listToSpan(param_list.items);
|
||||
break :res try p.addNode(.{
|
||||
.tag = .CallComma,
|
||||
.main_token = lparen,
|
||||
.data = .{
|
||||
.lhs = res,
|
||||
.rhs = try p.addExtra(Node.SubRange{
|
||||
.start = span.start,
|
||||
.end = span.end,
|
||||
}),
|
||||
},
|
||||
});
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
},
|
||||
.RParen => {
|
||||
const span = try p.listToSpan(param_list.items);
|
||||
break :res try p.addNode(.{
|
||||
.tag = .Call,
|
||||
.main_token = lparen,
|
||||
.data = .{
|
||||
.lhs = res,
|
||||
.rhs = try p.addExtra(Node.SubRange{
|
||||
.start = span.start,
|
||||
.end = span.end,
|
||||
}),
|
||||
},
|
||||
});
|
||||
},
|
||||
.Colon, .RBrace, .RBracket => {
|
||||
p.tok_i -= 1;
|
||||
return p.fail(.{
|
||||
.ExpectedToken = .{
|
||||
.token = p.tok_i,
|
||||
.expected_id = .RParen,
|
||||
},
|
||||
});
|
||||
},
|
||||
else => {
|
||||
p.tok_i -= 1;
|
||||
try p.warn(.{
|
||||
.ExpectedToken = .{
|
||||
.token = p.tok_i,
|
||||
.expected_id = .Comma,
|
||||
},
|
||||
});
|
||||
},
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@ -2588,7 +2738,7 @@ const Parser = struct {
|
||||
|
||||
while (true) {
|
||||
const next = try p.expectFieldInit();
|
||||
if (next == 0) break;
|
||||
assert(next != 0);
|
||||
try init_list.append(next);
|
||||
switch (p.token_tags[p.nextToken()]) {
|
||||
.Comma => {
|
||||
|
||||
@ -3046,35 +3046,35 @@ test "zig fmt: for" {
|
||||
// \\
|
||||
// );
|
||||
//}
|
||||
//
|
||||
//test "zig fmt: async functions" {
|
||||
// try testCanonical(
|
||||
// \\fn simpleAsyncFn() void {
|
||||
// \\ const a = async a.b();
|
||||
// \\ x += 1;
|
||||
// \\ suspend;
|
||||
// \\ x += 1;
|
||||
// \\ suspend;
|
||||
// \\ const p: anyframe->void = async simpleAsyncFn() catch unreachable;
|
||||
// \\ await p;
|
||||
// \\}
|
||||
// \\
|
||||
// \\test "suspend, resume, await" {
|
||||
// \\ const p: anyframe = async testAsyncSeq();
|
||||
// \\ resume p;
|
||||
// \\ await p;
|
||||
// \\}
|
||||
// \\
|
||||
// );
|
||||
//}
|
||||
//
|
||||
//test "zig fmt: nosuspend" {
|
||||
// try testCanonical(
|
||||
// \\const a = nosuspend foo();
|
||||
// \\
|
||||
// );
|
||||
//}
|
||||
//
|
||||
|
||||
test "zig fmt: async functions" {
|
||||
try testCanonical(
|
||||
\\fn simpleAsyncFn() void {
|
||||
\\ const a = async a.b();
|
||||
\\ x += 1;
|
||||
\\ suspend;
|
||||
\\ x += 1;
|
||||
\\ suspend;
|
||||
\\ const p: anyframe->void = async simpleAsyncFn() catch unreachable;
|
||||
\\ await p;
|
||||
\\}
|
||||
\\
|
||||
\\test "suspend, resume, await" {
|
||||
\\ const p: anyframe = async testAsyncSeq();
|
||||
\\ resume p;
|
||||
\\ await p;
|
||||
\\}
|
||||
\\
|
||||
);
|
||||
}
|
||||
|
||||
test "zig fmt: nosuspend" {
|
||||
try testCanonical(
|
||||
\\const a = nosuspend foo();
|
||||
\\
|
||||
);
|
||||
}
|
||||
|
||||
//test "zig fmt: Block after if" {
|
||||
// try testCanonical(
|
||||
// \\test "Block after if" {
|
||||
|
||||
@ -73,8 +73,18 @@ fn renderRoot(ais: *Ais, tree: ast.Tree) Error!void {
|
||||
const nodes_data = tree.nodes.items(.data);
|
||||
const root_decls = tree.extra_data[nodes_data[0].lhs..nodes_data[0].rhs];
|
||||
|
||||
for (root_decls) |decl| {
|
||||
try renderMember(ais, tree, decl, .Newline);
|
||||
return renderAllMembers(ais, tree, root_decls);
|
||||
}
|
||||
|
||||
fn renderAllMembers(ais: *Ais, tree: ast.Tree, members: []const ast.Node.Index) Error!void {
|
||||
if (members.len == 0) return;
|
||||
|
||||
const first_member = members[0];
|
||||
try renderMember(ais, tree, first_member, .Newline);
|
||||
|
||||
for (members[1..]) |member| {
|
||||
try renderExtraNewline(ais, tree, member);
|
||||
try renderMember(ais, tree, member, .Newline);
|
||||
}
|
||||
}
|
||||
|
||||
@ -391,65 +401,17 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac
|
||||
.StructInitDot => return renderStructInit(ais, tree, tree.structInitDot(node), space),
|
||||
.StructInit => return renderStructInit(ais, tree, tree.structInit(node), space),
|
||||
|
||||
.CallOne => unreachable, // TODO
|
||||
.Call => {
|
||||
const call = datas[node];
|
||||
const params_range = tree.extraData(call.rhs, ast.Node.SubRange);
|
||||
const params = tree.extra_data[params_range.start..params_range.end];
|
||||
const async_token = tree.firstToken(call.lhs) - 1;
|
||||
if (token_tags[async_token] == .Keyword_async) {
|
||||
try renderToken(ais, tree, async_token, .Space);
|
||||
}
|
||||
try renderExpression(ais, tree, call.lhs, .None);
|
||||
|
||||
const lparen = main_tokens[node];
|
||||
|
||||
if (params.len == 0) {
|
||||
try renderToken(ais, tree, lparen, .None);
|
||||
return renderToken(ais, tree, lparen + 1, space); // )
|
||||
}
|
||||
|
||||
const last_param = params[params.len - 1];
|
||||
const after_last_param_tok = tree.lastToken(last_param) + 1;
|
||||
if (token_tags[after_last_param_tok] == .Comma) {
|
||||
ais.pushIndent();
|
||||
try renderToken(ais, tree, lparen, Space.Newline); // (
|
||||
for (params) |param_node, i| {
|
||||
if (i + 1 < params.len) {
|
||||
try renderExpression(ais, tree, param_node, Space.None);
|
||||
|
||||
// Unindent the comma for multiline string literals
|
||||
const is_multiline_string = node_tags[param_node] == .StringLiteral and
|
||||
token_tags[main_tokens[param_node]] == .MultilineStringLiteralLine;
|
||||
if (is_multiline_string) ais.popIndent();
|
||||
|
||||
const comma = tree.lastToken(param_node) + 1;
|
||||
try renderToken(ais, tree, comma, Space.Newline); // ,
|
||||
|
||||
if (is_multiline_string) ais.pushIndent();
|
||||
|
||||
try renderExtraNewline(ais, tree, params[i + 1]);
|
||||
} else {
|
||||
try renderExpression(ais, tree, param_node, Space.Comma);
|
||||
}
|
||||
}
|
||||
ais.popIndent();
|
||||
return renderToken(ais, tree, after_last_param_tok + 1, space); // )
|
||||
}
|
||||
|
||||
try renderToken(ais, tree, lparen, Space.None); // (
|
||||
|
||||
for (params) |param_node, i| {
|
||||
try renderExpression(ais, tree, param_node, Space.None);
|
||||
|
||||
if (i + 1 < params.len) {
|
||||
const comma = tree.lastToken(param_node) + 1;
|
||||
try renderToken(ais, tree, comma, Space.Space);
|
||||
}
|
||||
}
|
||||
return renderToken(ais, tree, after_last_param_tok, space); // )
|
||||
.CallOne, .CallOneComma, .AsyncCallOne, .AsyncCallOneComma => {
|
||||
var params: [1]ast.Node.Index = undefined;
|
||||
return renderCall(ais, tree, tree.callOne(¶ms, node), space);
|
||||
},
|
||||
|
||||
.Call,
|
||||
.CallComma,
|
||||
.AsyncCall,
|
||||
.AsyncCallComma,
|
||||
=> return renderCall(ais, tree, tree.callFull(node), space),
|
||||
|
||||
.ArrayAccess => {
|
||||
const suffix = datas[node];
|
||||
const lbracket = tree.firstToken(suffix.rhs) - 1;
|
||||
@ -625,18 +587,16 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac
|
||||
},
|
||||
.FnProto => return renderFnProto(ais, tree, tree.fnProto(node), space),
|
||||
|
||||
.AnyFrameType => unreachable, // TODO
|
||||
//.AnyFrameType => {
|
||||
// const anyframe_type = @fieldParentPtr(ast.Node.AnyFrameType, "base", base);
|
||||
|
||||
// if (anyframe_type.result) |result| {
|
||||
// try renderToken(ais, tree, anyframe_type.anyframe_token, Space.None); // anyframe
|
||||
// try renderToken(ais, tree, result.arrow_token, Space.None); // ->
|
||||
// return renderExpression(ais, tree, result.return_type, space);
|
||||
// } else {
|
||||
// return renderToken(ais, tree, anyframe_type.anyframe_token, space); // anyframe
|
||||
// }
|
||||
//},
|
||||
.AnyFrameType => {
|
||||
const main_token = main_tokens[node];
|
||||
if (datas[node].rhs != 0) {
|
||||
try renderToken(ais, tree, main_token, .None); // anyframe
|
||||
try renderToken(ais, tree, main_token + 1, .None); // ->
|
||||
return renderExpression(ais, tree, datas[node].rhs, space);
|
||||
} else {
|
||||
return renderToken(ais, tree, main_token, space); // anyframe
|
||||
}
|
||||
},
|
||||
|
||||
.Switch,
|
||||
.SwitchComma,
|
||||
@ -1730,13 +1690,7 @@ fn renderContainerDecl(
|
||||
// One member per line.
|
||||
ais.pushIndent();
|
||||
try renderToken(ais, tree, lbrace, .Newline); // lbrace
|
||||
for (container_decl.ast.members) |member, i| {
|
||||
try renderMember(ais, tree, member, .Newline);
|
||||
|
||||
if (i + 1 < container_decl.ast.members.len) {
|
||||
try renderExtraNewline(ais, tree, container_decl.ast.members[i + 1]);
|
||||
}
|
||||
}
|
||||
try renderAllMembers(ais, tree, container_decl.ast.members);
|
||||
ais.popIndent();
|
||||
|
||||
return renderToken(ais, tree, rbrace, space); // rbrace
|
||||
@ -1871,6 +1825,69 @@ fn renderAsm(
|
||||
} else unreachable; // TODO shouldn't need this on while(true)
|
||||
}
|
||||
|
||||
fn renderCall(
|
||||
ais: *Ais,
|
||||
tree: ast.Tree,
|
||||
call: ast.full.Call,
|
||||
space: Space,
|
||||
) Error!void {
|
||||
const token_tags = tree.tokens.items(.tag);
|
||||
const node_tags = tree.nodes.items(.tag);
|
||||
const main_tokens = tree.nodes.items(.main_token);
|
||||
|
||||
if (call.async_token) |async_token| {
|
||||
try renderToken(ais, tree, async_token, .Space);
|
||||
}
|
||||
try renderExpression(ais, tree, call.ast.fn_expr, .None);
|
||||
|
||||
const lparen = call.ast.lparen;
|
||||
const params = call.ast.params;
|
||||
if (params.len == 0) {
|
||||
try renderToken(ais, tree, lparen, .None);
|
||||
return renderToken(ais, tree, lparen + 1, space); // )
|
||||
}
|
||||
|
||||
const last_param = params[params.len - 1];
|
||||
const after_last_param_tok = tree.lastToken(last_param) + 1;
|
||||
if (token_tags[after_last_param_tok] == .Comma) {
|
||||
ais.pushIndent();
|
||||
try renderToken(ais, tree, lparen, Space.Newline); // (
|
||||
for (params) |param_node, i| {
|
||||
if (i + 1 < params.len) {
|
||||
try renderExpression(ais, tree, param_node, Space.None);
|
||||
|
||||
// Unindent the comma for multiline string literals
|
||||
const is_multiline_string = node_tags[param_node] == .StringLiteral and
|
||||
token_tags[main_tokens[param_node]] == .MultilineStringLiteralLine;
|
||||
if (is_multiline_string) ais.popIndent();
|
||||
|
||||
const comma = tree.lastToken(param_node) + 1;
|
||||
try renderToken(ais, tree, comma, Space.Newline); // ,
|
||||
|
||||
if (is_multiline_string) ais.pushIndent();
|
||||
|
||||
try renderExtraNewline(ais, tree, params[i + 1]);
|
||||
} else {
|
||||
try renderExpression(ais, tree, param_node, Space.Comma);
|
||||
}
|
||||
}
|
||||
ais.popIndent();
|
||||
return renderToken(ais, tree, after_last_param_tok + 1, space); // )
|
||||
}
|
||||
|
||||
try renderToken(ais, tree, lparen, Space.None); // (
|
||||
|
||||
for (params) |param_node, i| {
|
||||
try renderExpression(ais, tree, param_node, Space.None);
|
||||
|
||||
if (i + 1 < params.len) {
|
||||
const comma = tree.lastToken(param_node) + 1;
|
||||
try renderToken(ais, tree, comma, Space.Space);
|
||||
}
|
||||
}
|
||||
return renderToken(ais, tree, after_last_param_tok, space); // )
|
||||
}
|
||||
|
||||
/// Render an expression, and the comma that follows it, if it is present in the source.
|
||||
fn renderExpressionComma(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Space) Error!void {
|
||||
const token_tags = tree.tokens.items(.tag);
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user