parse and render new for loop syntax

This commit is contained in:
Veikka Tuominen 2023-01-30 19:10:03 +02:00 committed by Andrew Kelley
parent 5e7b09ce9f
commit 1b7055b514
4 changed files with 410 additions and 105 deletions

View File

@ -386,6 +386,12 @@ pub fn renderError(tree: Ast, parse_error: Error, stream: anytype) !void {
.expected_comma_after_switch_prong => {
return stream.writeAll("expected ',' after switch prong");
},
.expected_comma_after_for_operand => {
return stream.writeAll("expected ',' after for operand");
},
.expected_comma_after_capture => {
return stream.writeAll("expected ',' after for capture");
},
.expected_initializer => {
return stream.writeAll("expected field initializer");
},
@ -420,6 +426,12 @@ pub fn renderError(tree: Ast, parse_error: Error, stream: anytype) !void {
.var_const_decl => {
return stream.writeAll("use 'var' or 'const' to declare variable");
},
.extra_for_capture => {
return stream.writeAll("excess for captures");
},
.for_input_not_captured => {
return stream.writeAll("for input is not captured");
},
.expected_token => {
const found_tag = token_tags[parse_error.token + @boolToInt(parse_error.token_is_prev)];
@ -568,6 +580,7 @@ pub fn firstToken(tree: Ast, node: Node.Index) TokenIndex {
.call,
.call_comma,
.switch_range,
.for_range,
.error_union,
=> n = datas[n].lhs,
@ -845,6 +858,12 @@ pub fn lastToken(tree: Ast, node: Node.Index) TokenIndex {
.switch_range,
=> n = datas[n].rhs,
.for_range => if (datas[n].rhs != 0) {
n = datas[n].rhs;
} else {
return main_tokens[n] + end_offset;
},
.field_access,
.unwrap_optional,
.grouped_expression,
@ -1263,11 +1282,15 @@ pub fn lastToken(tree: Ast, node: Node.Index) TokenIndex {
assert(extra.else_expr != 0);
n = extra.else_expr;
},
.@"if", .@"for" => {
.@"if" => {
const extra = tree.extraData(datas[n].rhs, Node.If);
assert(extra.else_expr != 0);
n = extra.else_expr;
},
.@"for" => {
const extra = @bitCast(Node.For, datas[n].rhs);
n = tree.extra_data[datas[n].lhs + extra.inputs + @boolToInt(extra.has_else)];
},
.@"suspend" => {
if (datas[n].lhs != 0) {
n = datas[n].lhs;
@ -1916,26 +1939,28 @@ pub fn whileFull(tree: Ast, node: Node.Index) full.While {
});
}
pub fn forSimple(tree: Ast, node: Node.Index) full.While {
const data = tree.nodes.items(.data)[node];
return tree.fullWhileComponents(.{
.while_token = tree.nodes.items(.main_token)[node],
.cond_expr = data.lhs,
.cont_expr = 0,
pub fn forSimple(tree: Ast, node: Node.Index) full.For {
const data = &tree.nodes.items(.data)[node];
const inputs: *[1]Node.Index = &data.lhs;
return tree.fullForComponents(.{
.for_token = tree.nodes.items(.main_token)[node],
.inputs = inputs[0..1],
.then_expr = data.rhs,
.else_expr = 0,
});
}
pub fn forFull(tree: Ast, node: Node.Index) full.While {
pub fn forFull(tree: Ast, node: Node.Index) full.For {
const data = tree.nodes.items(.data)[node];
const extra = tree.extraData(data.rhs, Node.If);
return tree.fullWhileComponents(.{
.while_token = tree.nodes.items(.main_token)[node],
.cond_expr = data.lhs,
.cont_expr = 0,
.then_expr = extra.then_expr,
.else_expr = extra.else_expr,
const extra = @bitCast(Node.For, data.rhs);
const inputs = tree.extra_data[data.lhs..][0..extra.inputs];
const then_expr = tree.extra_data[data.lhs + extra.inputs];
const else_expr = if (extra.has_else) tree.extra_data[data.lhs + extra.inputs + 1] else 0;
return tree.fullForComponents(.{
.for_token = tree.nodes.items(.main_token)[node],
.inputs = inputs,
.then_expr = then_expr,
.else_expr = else_expr,
});
}
@ -2243,6 +2268,33 @@ fn fullWhileComponents(tree: Ast, info: full.While.Components) full.While {
return result;
}
fn fullForComponents(tree: Ast, info: full.For.Components) full.For {
const token_tags = tree.tokens.items(.tag);
var result: full.For = .{
.ast = info,
.inline_token = null,
.label_token = null,
.payload_token = undefined,
.else_token = undefined,
};
var tok_i = info.for_token - 1;
if (token_tags[tok_i] == .keyword_inline) {
result.inline_token = tok_i;
tok_i -= 1;
}
if (token_tags[tok_i] == .colon and
token_tags[tok_i - 1] == .identifier)
{
result.label_token = tok_i - 1;
}
const last_cond_token = tree.lastToken(info.inputs[info.inputs.len - 1]);
result.payload_token = last_cond_token + 3 + @boolToInt(token_tags[last_cond_token + 1] == .comma);
if (info.else_expr != 0) {
result.else_token = tree.lastToken(info.then_expr) + 1;
}
return result;
}
fn fullCallComponents(tree: Ast, info: full.Call.Components) full.Call {
const token_tags = tree.tokens.items(.tag);
var result: full.Call = .{
@ -2279,6 +2331,12 @@ pub fn fullWhile(tree: Ast, node: Node.Index) ?full.While {
.while_simple => tree.whileSimple(node),
.while_cont => tree.whileCont(node),
.@"while" => tree.whileFull(node),
else => null,
};
}
pub fn fullFor(tree: Ast, node: Node.Index) ?full.For {
return switch (tree.nodes.items(.tag)[node]) {
.for_simple => tree.forSimple(node),
.@"for" => tree.forFull(node),
else => null,
@ -2453,6 +2511,22 @@ pub const full = struct {
};
};
pub const For = struct {
ast: Components,
inline_token: ?TokenIndex,
label_token: ?TokenIndex,
payload_token: TokenIndex,
/// Populated only if else_expr != 0.
else_token: TokenIndex,
pub const Components = struct {
for_token: TokenIndex,
inputs: []const Node.Index,
then_expr: Node.Index,
else_expr: Node.Index,
};
};
pub const ContainerField = struct {
comptime_token: ?TokenIndex,
ast: Components,
@ -2795,6 +2869,8 @@ pub const Error = struct {
expected_comma_after_param,
expected_comma_after_initializer,
expected_comma_after_switch_prong,
expected_comma_after_for_operand,
expected_comma_after_capture,
expected_initializer,
mismatched_binary_op_whitespace,
invalid_ampersand_ampersand,
@ -2802,6 +2878,8 @@ pub const Error = struct {
expected_var_const,
wrong_equal_var_decl,
var_const_decl,
extra_for_capture,
for_input_not_captured,
zig_style_container,
previous_field,
@ -3112,8 +3190,10 @@ pub const Node = struct {
@"while",
/// `for (lhs) rhs`.
for_simple,
/// `for (lhs) a else b`. `if_list[rhs]`.
/// `for (lhs[0..inputs]) lhs[inputs + 1] else lhs[inputs + 2]`. `For[rhs]`.
@"for",
/// `lhs..rhs`.
for_range,
/// `if (lhs) rhs`.
/// `if (lhs) |a| rhs`.
if_simple,
@ -3369,6 +3449,11 @@ pub const Node = struct {
then_expr: Index,
};
pub const For = packed struct(u32) {
inputs: u31,
has_else: bool,
};
pub const FnProtoOne = struct {
/// Populated if there is exactly 1 parameter. Otherwise there are 0 parameters.
param: Index,

View File

@ -104,6 +104,8 @@ fn warnMsg(p: *Parse, msg: Ast.Error) error{OutOfMemory}!void {
.expected_comma_after_param,
.expected_comma_after_initializer,
.expected_comma_after_switch_prong,
.expected_comma_after_for_operand,
.expected_comma_after_capture,
.expected_semi_or_else,
.expected_semi_or_lbrace,
.expected_token,
@ -1149,22 +1151,18 @@ fn parseLoopStatement(p: *Parse) !Node.Index {
return p.fail(.expected_inlinable);
}
/// ForPrefix <- KEYWORD_for LPAREN Expr RPAREN PtrIndexPayload
///
/// ForStatement
/// <- ForPrefix BlockExpr ( KEYWORD_else Statement )?
/// / ForPrefix AssignExpr ( SEMICOLON / KEYWORD_else Statement )
fn parseForStatement(p: *Parse) !Node.Index {
const for_token = p.eatToken(.keyword_for) orelse return null_node;
_ = try p.expectToken(.l_paren);
const array_expr = try p.expectExpr();
_ = try p.expectToken(.r_paren);
const found_payload = try p.parsePtrIndexPayload();
if (found_payload == 0) try p.warn(.expected_loop_payload);
// TODO propose to change the syntax so that semicolons are always required
// inside while statements, even if there is an `else`.
const scratch_top = p.scratch.items.len;
defer p.scratch.shrinkRetainingCapacity(scratch_top);
const inputs = try p.forPrefix();
var else_required = false;
var seen_semicolon = false;
const then_expr = blk: {
const block_expr = try p.parseBlockExpr();
if (block_expr != 0) break :blk block_expr;
@ -1173,39 +1171,40 @@ fn parseForStatement(p: *Parse) !Node.Index {
return p.fail(.expected_block_or_assignment);
}
if (p.eatToken(.semicolon)) |_| {
return p.addNode(.{
.tag = .for_simple,
.main_token = for_token,
.data = .{
.lhs = array_expr,
.rhs = assign_expr,
},
});
seen_semicolon = true;
break :blk assign_expr;
}
else_required = true;
break :blk assign_expr;
};
_ = p.eatToken(.keyword_else) orelse {
if (else_required) {
try p.warn(.expected_semi_or_else);
}
var has_else = false;
if (!seen_semicolon and p.eatToken(.keyword_else) != null) {
try p.scratch.append(p.gpa, then_expr);
const else_stmt = try p.expectStatement(false);
try p.scratch.append(p.gpa, else_stmt);
has_else = true;
} else if (inputs == 1) {
if (else_required) try p.warn(.expected_semi_or_else);
return p.addNode(.{
.tag = .for_simple,
.main_token = for_token,
.data = .{
.lhs = array_expr,
.lhs = p.scratch.items[scratch_top],
.rhs = then_expr,
},
});
};
} else {
if (else_required) try p.warn(.expected_semi_or_else);
try p.scratch.append(p.gpa, then_expr);
}
return p.addNode(.{
.tag = .@"for",
.main_token = for_token,
.data = .{
.lhs = array_expr,
.rhs = try p.addExtra(Node.If{
.then_expr = then_expr,
.else_expr = try p.expectStatement(false),
.lhs = (try p.listToSpan(p.scratch.items[scratch_top..])).start,
.rhs = @bitCast(u32, Node.For{
.inputs = @intCast(u31, inputs),
.has_else = has_else,
}),
},
});
@ -2056,42 +2055,118 @@ fn parseBlock(p: *Parse) !Node.Index {
}
}
/// ForPrefix <- KEYWORD_for LPAREN Expr RPAREN PtrIndexPayload
///
/// ForExpr <- ForPrefix Expr (KEYWORD_else Expr)?
fn parseForExpr(p: *Parse) !Node.Index {
const for_token = p.eatToken(.keyword_for) orelse return null_node;
_ = try p.expectToken(.l_paren);
const array_expr = try p.expectExpr();
_ = try p.expectToken(.r_paren);
const found_payload = try p.parsePtrIndexPayload();
if (found_payload == 0) try p.warn(.expected_loop_payload);
const scratch_top = p.scratch.items.len;
defer p.scratch.shrinkRetainingCapacity(scratch_top);
const inputs = try p.forPrefix();
const then_expr = try p.expectExpr();
_ = p.eatToken(.keyword_else) orelse {
var has_else = false;
if (p.eatToken(.keyword_else)) |_| {
try p.scratch.append(p.gpa, then_expr);
const else_expr = try p.expectExpr();
try p.scratch.append(p.gpa, else_expr);
has_else = true;
} else if (inputs == 1) {
return p.addNode(.{
.tag = .for_simple,
.main_token = for_token,
.data = .{
.lhs = array_expr,
.lhs = p.scratch.items[scratch_top],
.rhs = then_expr,
},
});
};
const else_expr = try p.expectExpr();
} else {
try p.scratch.append(p.gpa, then_expr);
}
return p.addNode(.{
.tag = .@"for",
.main_token = for_token,
.data = .{
.lhs = array_expr,
.rhs = try p.addExtra(Node.If{
.then_expr = then_expr,
.else_expr = else_expr,
.lhs = (try p.listToSpan(p.scratch.items[scratch_top..])).start,
.rhs = @bitCast(u32, Node.For{
.inputs = @intCast(u31, inputs),
.has_else = has_else,
}),
},
});
}
/// ForPrefix <- KEYWORD_for LPAREN ForInput (COMMA ForInput)* COMMA? RPAREN ForPayload
///
/// ForInput <- Expr (DOT2 Expr?)?
///
/// ForPayload <- PIPE ASTERISK? IDENTIFIER (COMMA ASTERISK? IDENTIFIER)* PIPE
fn forPrefix(p: *Parse) Error!usize {
const start = p.scratch.items.len;
_ = try p.expectToken(.l_paren);
while (true) {
var input = try p.expectExpr();
if (p.eatToken(.ellipsis2)) |ellipsis| {
input = try p.addNode(.{
.tag = .for_range,
.main_token = ellipsis,
.data = .{
.lhs = input,
.rhs = try p.parseExpr(),
},
});
}
try p.scratch.append(p.gpa, input);
switch (p.token_tags[p.tok_i]) {
.comma => p.tok_i += 1,
.r_paren => {
p.tok_i += 1;
break;
},
.colon, .r_brace, .r_bracket => return p.failExpected(.r_paren),
// Likely just a missing comma; give error but continue parsing.
else => try p.warn(.expected_comma_after_for_operand),
}
if (p.eatToken(.r_paren)) |_| break;
}
const inputs = p.scratch.items.len - start;
_ = p.eatToken(.pipe) orelse {
try p.warn(.expected_loop_payload);
return inputs;
};
var warned_excess = false;
var captures: u32 = 0;
while (true) {
_ = p.eatToken(.asterisk);
const identifier = try p.expectToken(.identifier);
captures += 1;
if (captures > inputs and !warned_excess) {
try p.warnMsg(.{ .tag = .extra_for_capture, .token = identifier });
warned_excess = true;
}
switch (p.token_tags[p.tok_i]) {
.comma => p.tok_i += 1,
.pipe => {
p.tok_i += 1;
break;
},
// Likely just a missing comma; give error but continue parsing.
else => try p.warn(.expected_comma_after_capture),
}
if (p.eatToken(.pipe)) |_| break;
}
if (captures < inputs) {
const index = p.scratch.items.len - captures;
const input = p.nodes.items(.main_token)[p.scratch.items[index]];
try p.warnMsg(.{ .tag = .for_input_not_captured, .token = input });
}
return inputs;
}
/// WhilePrefix <- KEYWORD_while LPAREN Expr RPAREN PtrPayload? WhileContinueExpr?
///
/// WhileExpr <- WhilePrefix Expr (KEYWORD_else Payload? Expr)?
@ -2752,37 +2827,41 @@ fn expectPrimaryTypeExpr(p: *Parse) !Node.Index {
return node;
}
/// ForPrefix <- KEYWORD_for LPAREN Expr RPAREN PtrIndexPayload
///
/// ForTypeExpr <- ForPrefix TypeExpr (KEYWORD_else TypeExpr)?
fn parseForTypeExpr(p: *Parse) !Node.Index {
const for_token = p.eatToken(.keyword_for) orelse return null_node;
_ = try p.expectToken(.l_paren);
const array_expr = try p.expectExpr();
_ = try p.expectToken(.r_paren);
const found_payload = try p.parsePtrIndexPayload();
if (found_payload == 0) try p.warn(.expected_loop_payload);
const scratch_top = p.scratch.items.len;
defer p.scratch.shrinkRetainingCapacity(scratch_top);
const inputs = try p.forPrefix();
const then_expr = try p.expectTypeExpr();
_ = p.eatToken(.keyword_else) orelse {
var has_else = false;
if (p.eatToken(.keyword_else)) |_| {
try p.scratch.append(p.gpa, then_expr);
const else_expr = try p.expectTypeExpr();
try p.scratch.append(p.gpa, else_expr);
has_else = true;
} else if (inputs == 1) {
return p.addNode(.{
.tag = .for_simple,
.main_token = for_token,
.data = .{
.lhs = array_expr,
.lhs = p.scratch.items[scratch_top],
.rhs = then_expr,
},
});
};
const else_expr = try p.expectTypeExpr();
} else {
try p.scratch.append(p.gpa, then_expr);
}
return p.addNode(.{
.tag = .@"for",
.main_token = for_token,
.data = .{
.lhs = array_expr,
.rhs = try p.addExtra(Node.If{
.then_expr = then_expr,
.else_expr = else_expr,
.lhs = (try p.listToSpan(p.scratch.items[scratch_top..])).start,
.rhs = @bitCast(u32, Node.For{
.inputs = @intCast(u31, inputs),
.has_else = has_else,
}),
},
});

View File

@ -3457,11 +3457,11 @@ test "zig fmt: for" {
\\ for (a) |*v|
\\ continue;
\\
\\ for (a) |v, i| {
\\ for (a, 0..) |v, i| {
\\ continue;
\\ }
\\
\\ for (a) |v, i|
\\ for (a, 0..) |v, i|
\\ continue;
\\
\\ for (a) |b| switch (b) {
@ -3469,17 +3469,24 @@ test "zig fmt: for" {
\\ d => {},
\\ };
\\
\\ const res = for (a) |v, i| {
\\ const res = for (a, 0..) |v, i| {
\\ break v;
\\ } else {
\\ unreachable;
\\ };
\\
\\ var num: usize = 0;
\\ inline for (a) |v, i| {
\\ inline for (a, 0..1) |v, i| {
\\ num += v;
\\ num += i;
\\ }
\\
\\ for (a, b) |
\\ long_name,
\\ another_long_name,
\\ | {
\\ continue;
\\ }
\\}
\\
);
@ -3499,6 +3506,26 @@ test "zig fmt: for" {
\\}
\\
);
try testTransform(
\\test "fix for" {
\\ for (a, b, c,) |long, another, third,| {}
\\}
\\
,
\\test "fix for" {
\\ for (
\\ a,
\\ b,
\\ c,
\\ ) |
\\ long,
\\ another,
\\ third,
\\ | {}
\\}
\\
);
}
test "zig fmt: for if" {
@ -4358,7 +4385,7 @@ test "zig fmt: hex literals with underscore separators" {
try testTransform(
\\pub fn orMask(a: [ 1_000 ]u64, b: [ 1_000] u64) [1_000]u64 {
\\ var c: [1_000]u64 = [1]u64{ 0xFFFF_FFFF_FFFF_FFFF}**1_000;
\\ for (c [ 1_0 .. ]) |_, i| {
\\ for (c [ 1_0 .. ], 0..) |_, i| {
\\ c[i] = (a[i] | b[i]) & 0xCCAA_CCAA_CCAA_CCAA;
\\ }
\\ return c;
@ -4368,7 +4395,7 @@ test "zig fmt: hex literals with underscore separators" {
,
\\pub fn orMask(a: [1_000]u64, b: [1_000]u64) [1_000]u64 {
\\ var c: [1_000]u64 = [1]u64{0xFFFF_FFFF_FFFF_FFFF} ** 1_000;
\\ for (c[1_0..]) |_, i| {
\\ for (c[1_0..], 0..) |_, i| {
\\ c[i] = (a[i] | b[i]) & 0xCCAA_CCAA_CCAA_CCAA;
\\ }
\\ return c;
@ -4880,10 +4907,10 @@ test "zig fmt: remove trailing whitespace after doc comment" {
test "zig fmt: for loop with ptr payload and index" {
try testCanonical(
\\test {
\\ for (self.entries.items) |*item, i| {}
\\ for (self.entries.items) |*item, i|
\\ for (self.entries.items, 0..) |*item, i| {}
\\ for (self.entries.items, 0..) |*item, i|
\\ a = b;
\\ for (self.entries.items) |*item, i| a = b;
\\ for (self.entries.items, 0..) |*item, i| a = b;
\\}
\\
);
@ -5471,7 +5498,7 @@ test "zig fmt: canonicalize symbols (primitive types)" {
\\ _ = @"void": {
\\ break :@"void";
\\ };
\\ for ("hi") |@"u3", @"i4"| {
\\ for ("hi", 0..) |@"u3", @"i4"| {
\\ _ = @"u3";
\\ _ = @"i4";
\\ }
@ -5523,7 +5550,7 @@ test "zig fmt: canonicalize symbols (primitive types)" {
\\ _ = void: {
\\ break :void;
\\ };
\\ for ("hi") |@"u3", @"i4"| {
\\ for ("hi", 0..) |@"u3", @"i4"| {
\\ _ = @"u3";
\\ _ = @"i4";
\\ }

View File

@ -353,6 +353,16 @@ fn renderExpression(gpa: Allocator, ais: *Ais, tree: Ast, node: Ast.Node.Index,
try renderToken(ais, tree, main_tokens[node], .none);
return renderExpression(gpa, ais, tree, infix.rhs, space);
},
.for_range => {
const infix = datas[node];
try renderExpression(gpa, ais, tree, infix.lhs, .none);
if (infix.rhs != 0) {
try renderToken(ais, tree, main_tokens[node], .none);
return renderExpression(gpa, ais, tree, infix.rhs, space);
} else {
return renderToken(ais, tree, main_tokens[node], space);
}
},
.add,
.add_wrap,
@ -694,9 +704,11 @@ fn renderExpression(gpa: Allocator, ais: *Ais, tree: Ast, node: Ast.Node.Index,
.while_simple,
.while_cont,
.@"while",
=> return renderWhile(gpa, ais, tree, tree.fullWhile(node).?, space),
.for_simple,
.@"for",
=> return renderWhile(gpa, ais, tree, tree.fullWhile(node).?, space),
=> return renderFor(gpa, ais, tree, tree.fullFor(node).?, space),
.if_simple,
.@"if",
@ -1054,10 +1066,9 @@ fn renderIf(gpa: Allocator, ais: *Ais, tree: Ast, if_node: Ast.full.If, space: S
}, space);
}
/// Note that this function is additionally used to render if and for expressions, with
/// Note that this function is additionally used to render if expressions, with
/// respective values set to null.
fn renderWhile(gpa: Allocator, ais: *Ais, tree: Ast, while_node: Ast.full.While, space: Space) Error!void {
const node_tags = tree.nodes.items(.tag);
const token_tags = tree.tokens.items(.tag);
if (while_node.label_token) |label| {
@ -1108,9 +1119,34 @@ fn renderWhile(gpa: Allocator, ais: *Ais, tree: Ast, while_node: Ast.full.While,
last_prefix_token = tree.lastToken(while_node.ast.cont_expr) + 1; // rparen
}
const then_expr_is_block = nodeIsBlock(node_tags[while_node.ast.then_expr]);
try renderThenElse(
gpa,
ais,
tree,
last_prefix_token,
while_node.ast.then_expr,
while_node.else_token,
while_node.error_token,
while_node.ast.else_expr,
space,
);
}
fn renderThenElse(
gpa: Allocator,
ais: *Ais,
tree: Ast,
last_prefix_token: Ast.TokenIndex,
then_expr: Ast.Node.Index,
else_token: Ast.TokenIndex,
maybe_error_token: ?Ast.TokenIndex,
else_expr: Ast.Node.Index,
space: Space,
) Error!void {
const node_tags = tree.nodes.items(.tag);
const then_expr_is_block = nodeIsBlock(node_tags[then_expr]);
const indent_then_expr = !then_expr_is_block and
!tree.tokensOnSameLine(last_prefix_token, tree.firstToken(while_node.ast.then_expr));
!tree.tokensOnSameLine(last_prefix_token, tree.firstToken(then_expr));
if (indent_then_expr or (then_expr_is_block and ais.isLineOverIndented())) {
ais.pushIndentNextLine();
try renderToken(ais, tree, last_prefix_token, .newline);
@ -1119,45 +1155,115 @@ fn renderWhile(gpa: Allocator, ais: *Ais, tree: Ast, while_node: Ast.full.While,
try renderToken(ais, tree, last_prefix_token, .space);
}
if (while_node.ast.else_expr != 0) {
if (else_expr != 0) {
if (indent_then_expr) {
ais.pushIndent();
try renderExpression(gpa, ais, tree, while_node.ast.then_expr, .newline);
try renderExpression(gpa, ais, tree, then_expr, .newline);
ais.popIndent();
} else {
try renderExpression(gpa, ais, tree, while_node.ast.then_expr, .space);
try renderExpression(gpa, ais, tree, then_expr, .space);
}
var last_else_token = while_node.else_token;
var last_else_token = else_token;
if (while_node.error_token) |error_token| {
try renderToken(ais, tree, while_node.else_token, .space); // else
if (maybe_error_token) |error_token| {
try renderToken(ais, tree, else_token, .space); // else
try renderToken(ais, tree, error_token - 1, .none); // |
try renderIdentifier(ais, tree, error_token, .none, .preserve_when_shadowing); // identifier
last_else_token = error_token + 1; // |
}
const indent_else_expr = indent_then_expr and
!nodeIsBlock(node_tags[while_node.ast.else_expr]) and
!nodeIsIfForWhileSwitch(node_tags[while_node.ast.else_expr]);
!nodeIsBlock(node_tags[else_expr]) and
!nodeIsIfForWhileSwitch(node_tags[else_expr]);
if (indent_else_expr) {
ais.pushIndentNextLine();
try renderToken(ais, tree, last_else_token, .newline);
ais.popIndent();
try renderExpressionIndented(gpa, ais, tree, while_node.ast.else_expr, space);
try renderExpressionIndented(gpa, ais, tree, else_expr, space);
} else {
try renderToken(ais, tree, last_else_token, .space);
try renderExpression(gpa, ais, tree, while_node.ast.else_expr, space);
try renderExpression(gpa, ais, tree, else_expr, space);
}
} else {
if (indent_then_expr) {
try renderExpressionIndented(gpa, ais, tree, while_node.ast.then_expr, space);
try renderExpressionIndented(gpa, ais, tree, then_expr, space);
} else {
try renderExpression(gpa, ais, tree, while_node.ast.then_expr, space);
try renderExpression(gpa, ais, tree, then_expr, space);
}
}
}
fn renderFor(gpa: Allocator, ais: *Ais, tree: Ast, for_node: Ast.full.For, space: Space) Error!void {
const token_tags = tree.tokens.items(.tag);
if (for_node.label_token) |label| {
try renderIdentifier(ais, tree, label, .none, .eagerly_unquote); // label
try renderToken(ais, tree, label + 1, .space); // :
}
if (for_node.inline_token) |inline_token| {
try renderToken(ais, tree, inline_token, .space); // inline
}
try renderToken(ais, tree, for_node.ast.for_token, .space); // if/for/while
const lparen = for_node.ast.for_token + 1;
try renderParamList(gpa, ais, tree, lparen, for_node.ast.inputs, .space);
var cur = for_node.payload_token;
const pipe = std.mem.indexOfScalarPos(std.zig.Token.Tag, token_tags, cur, .pipe).?;
if (token_tags[pipe - 1] == .comma) {
ais.pushIndentNextLine();
try renderToken(ais, tree, cur - 1, .newline); // |
while (true) {
if (token_tags[cur] == .asterisk) {
try renderToken(ais, tree, cur, .none); // *
cur += 1;
}
try renderIdentifier(ais, tree, cur, .none, .preserve_when_shadowing); // identifier
cur += 1;
if (token_tags[cur] == .comma) {
try renderToken(ais, tree, cur, .newline); // ,
cur += 1;
}
if (token_tags[cur] == .pipe) {
break;
}
}
ais.popIndent();
} else {
try renderToken(ais, tree, cur - 1, .none); // |
while (true) {
if (token_tags[cur] == .asterisk) {
try renderToken(ais, tree, cur, .none); // *
cur += 1;
}
try renderIdentifier(ais, tree, cur, .none, .preserve_when_shadowing); // identifier
cur += 1;
if (token_tags[cur] == .comma) {
try renderToken(ais, tree, cur, .space); // ,
cur += 1;
}
if (token_tags[cur] == .pipe) {
break;
}
}
}
try renderThenElse(
gpa,
ais,
tree,
cur,
for_node.ast.then_expr,
for_node.else_token,
null,
for_node.ast.else_expr,
space,
);
}
fn renderContainerField(
gpa: Allocator,
ais: *Ais,
@ -2206,15 +2312,23 @@ fn renderCall(
call: Ast.full.Call,
space: Space,
) Error!void {
const token_tags = tree.tokens.items(.tag);
if (call.async_token) |async_token| {
try renderToken(ais, tree, async_token, .space);
}
try renderExpression(gpa, ais, tree, call.ast.fn_expr, .none);
try renderParamList(gpa, ais, tree, call.ast.lparen, call.ast.params, space);
}
fn renderParamList(
gpa: Allocator,
ais: *Ais,
tree: Ast,
lparen: Ast.TokenIndex,
params: []const Ast.Node.Index,
space: Space,
) Error!void {
const token_tags = tree.tokens.items(.tag);
const lparen = call.ast.lparen;
const params = call.ast.params;
if (params.len == 0) {
ais.pushIndentNextLine();
try renderToken(ais, tree, lparen, .none);