mirror of
https://github.com/ziglang/zig.git
synced 2026-02-09 11:03:30 +00:00
parser: allow unnamed fields in structs
This commit is contained in:
parent
32ce2f91a9
commit
6fb689e97a
@ -559,6 +559,7 @@ pub fn firstToken(tree: Ast, node: Node.Index) TokenIndex {
|
||||
.container_field,
|
||||
=> {
|
||||
const name_token = main_tokens[n];
|
||||
if (token_tags[name_token + 1] != .colon) return name_token - end_offset;
|
||||
if (name_token > 0 and token_tags[name_token - 1] == .keyword_comptime) {
|
||||
end_offset += 1;
|
||||
}
|
||||
@ -1320,33 +1321,39 @@ pub fn containerField(tree: Ast, node: Node.Index) full.ContainerField {
|
||||
assert(tree.nodes.items(.tag)[node] == .container_field);
|
||||
const data = tree.nodes.items(.data)[node];
|
||||
const extra = tree.extraData(data.rhs, Node.ContainerField);
|
||||
const main_token = tree.nodes.items(.main_token)[node];
|
||||
return tree.fullContainerField(.{
|
||||
.name_token = tree.nodes.items(.main_token)[node],
|
||||
.main_token = main_token,
|
||||
.type_expr = data.lhs,
|
||||
.value_expr = extra.value_expr,
|
||||
.align_expr = extra.align_expr,
|
||||
.tuple_like = tree.tokens.items(.tag)[main_token + 1] != .colon,
|
||||
});
|
||||
}
|
||||
|
||||
pub fn containerFieldInit(tree: Ast, node: Node.Index) full.ContainerField {
|
||||
assert(tree.nodes.items(.tag)[node] == .container_field_init);
|
||||
const data = tree.nodes.items(.data)[node];
|
||||
const main_token = tree.nodes.items(.main_token)[node];
|
||||
return tree.fullContainerField(.{
|
||||
.name_token = tree.nodes.items(.main_token)[node],
|
||||
.main_token = main_token,
|
||||
.type_expr = data.lhs,
|
||||
.value_expr = data.rhs,
|
||||
.align_expr = 0,
|
||||
.tuple_like = tree.tokens.items(.tag)[main_token + 1] != .colon,
|
||||
});
|
||||
}
|
||||
|
||||
pub fn containerFieldAlign(tree: Ast, node: Node.Index) full.ContainerField {
|
||||
assert(tree.nodes.items(.tag)[node] == .container_field_align);
|
||||
const data = tree.nodes.items(.data)[node];
|
||||
const main_token = tree.nodes.items(.main_token)[node];
|
||||
return tree.fullContainerField(.{
|
||||
.name_token = tree.nodes.items(.main_token)[node],
|
||||
.main_token = main_token,
|
||||
.type_expr = data.lhs,
|
||||
.value_expr = 0,
|
||||
.align_expr = data.rhs,
|
||||
.tuple_like = tree.tokens.items(.tag)[main_token + 1] != .colon,
|
||||
});
|
||||
}
|
||||
|
||||
@ -1944,10 +1951,14 @@ fn fullContainerField(tree: Ast, info: full.ContainerField.Components) full.Cont
|
||||
.ast = info,
|
||||
.comptime_token = null,
|
||||
};
|
||||
// comptime name: type = init,
|
||||
// ^
|
||||
if (info.name_token > 0 and token_tags[info.name_token - 1] == .keyword_comptime) {
|
||||
result.comptime_token = info.name_token - 1;
|
||||
if (token_tags[info.main_token] == .keyword_comptime) {
|
||||
// comptime type = init,
|
||||
// ^
|
||||
result.comptime_token = info.main_token;
|
||||
} else if (info.main_token > 0 and token_tags[info.main_token - 1] == .keyword_comptime) {
|
||||
// comptime name: type = init,
|
||||
// ^
|
||||
result.comptime_token = info.main_token - 1;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
@ -2256,14 +2267,15 @@ pub const full = struct {
|
||||
ast: Components,
|
||||
|
||||
pub const Components = struct {
|
||||
name_token: TokenIndex,
|
||||
main_token: TokenIndex,
|
||||
type_expr: Node.Index,
|
||||
value_expr: Node.Index,
|
||||
align_expr: Node.Index,
|
||||
tuple_like: bool,
|
||||
};
|
||||
|
||||
pub fn firstToken(cf: ContainerField) TokenIndex {
|
||||
return cf.comptime_token orelse cf.ast.name_token;
|
||||
return cf.comptime_token orelse cf.ast.main_token;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@ -272,53 +272,6 @@ const Parser = struct {
|
||||
trailing = false;
|
||||
},
|
||||
.keyword_comptime => switch (p.token_tags[p.tok_i + 1]) {
|
||||
.identifier => {
|
||||
p.tok_i += 1;
|
||||
const identifier = p.tok_i;
|
||||
defer last_field = identifier;
|
||||
const container_field = try p.expectContainerFieldRecoverable();
|
||||
if (container_field != 0) {
|
||||
switch (field_state) {
|
||||
.none => field_state = .seen,
|
||||
.err, .seen => {},
|
||||
.end => |node| {
|
||||
try p.warnMsg(.{
|
||||
.tag = .decl_between_fields,
|
||||
.token = p.nodes.items(.main_token)[node],
|
||||
});
|
||||
try p.warnMsg(.{
|
||||
.tag = .previous_field,
|
||||
.is_note = true,
|
||||
.token = last_field,
|
||||
});
|
||||
try p.warnMsg(.{
|
||||
.tag = .next_field,
|
||||
.is_note = true,
|
||||
.token = identifier,
|
||||
});
|
||||
// Continue parsing; error will be reported later.
|
||||
field_state = .err;
|
||||
},
|
||||
}
|
||||
try p.scratch.append(p.gpa, container_field);
|
||||
switch (p.token_tags[p.tok_i]) {
|
||||
.comma => {
|
||||
p.tok_i += 1;
|
||||
trailing = true;
|
||||
continue;
|
||||
},
|
||||
.r_brace, .eof => {
|
||||
trailing = false;
|
||||
break;
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
// There is not allowed to be a decl after a field with no comma.
|
||||
// Report error but recover parser.
|
||||
try p.warn(.expected_comma_after_field);
|
||||
p.findNextContainerMember();
|
||||
}
|
||||
},
|
||||
.l_brace => {
|
||||
if (doc_comment) |some| {
|
||||
try p.warnMsg(.{ .tag = .test_doc_comment, .token = some });
|
||||
@ -349,7 +302,54 @@ const Parser = struct {
|
||||
},
|
||||
else => {
|
||||
p.tok_i += 1;
|
||||
try p.warn(.expected_block_or_field);
|
||||
const identifier = p.tok_i;
|
||||
defer last_field = identifier;
|
||||
const container_field = p.expectContainerField() catch |err| switch (err) {
|
||||
error.OutOfMemory => return error.OutOfMemory,
|
||||
error.ParseError => {
|
||||
p.findNextContainerMember();
|
||||
continue;
|
||||
},
|
||||
};
|
||||
switch (field_state) {
|
||||
.none => field_state = .seen,
|
||||
.err, .seen => {},
|
||||
.end => |node| {
|
||||
try p.warnMsg(.{
|
||||
.tag = .decl_between_fields,
|
||||
.token = p.nodes.items(.main_token)[node],
|
||||
});
|
||||
try p.warnMsg(.{
|
||||
.tag = .previous_field,
|
||||
.is_note = true,
|
||||
.token = last_field,
|
||||
});
|
||||
try p.warnMsg(.{
|
||||
.tag = .next_field,
|
||||
.is_note = true,
|
||||
.token = identifier,
|
||||
});
|
||||
// Continue parsing; error will be reported later.
|
||||
field_state = .err;
|
||||
},
|
||||
}
|
||||
try p.scratch.append(p.gpa, container_field);
|
||||
switch (p.token_tags[p.tok_i]) {
|
||||
.comma => {
|
||||
p.tok_i += 1;
|
||||
trailing = true;
|
||||
continue;
|
||||
},
|
||||
.r_brace, .eof => {
|
||||
trailing = false;
|
||||
break;
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
// There is not allowed to be a decl after a field with no comma.
|
||||
// Report error but recover parser.
|
||||
try p.warn(.expected_comma_after_field);
|
||||
p.findNextContainerMember();
|
||||
},
|
||||
},
|
||||
.keyword_pub => {
|
||||
@ -391,52 +391,6 @@ const Parser = struct {
|
||||
}
|
||||
trailing = p.token_tags[p.tok_i - 1] == .semicolon;
|
||||
},
|
||||
.identifier => {
|
||||
const identifier = p.tok_i;
|
||||
defer last_field = identifier;
|
||||
const container_field = try p.expectContainerFieldRecoverable();
|
||||
if (container_field != 0) {
|
||||
switch (field_state) {
|
||||
.none => field_state = .seen,
|
||||
.err, .seen => {},
|
||||
.end => |node| {
|
||||
try p.warnMsg(.{
|
||||
.tag = .decl_between_fields,
|
||||
.token = p.nodes.items(.main_token)[node],
|
||||
});
|
||||
try p.warnMsg(.{
|
||||
.tag = .previous_field,
|
||||
.is_note = true,
|
||||
.token = last_field,
|
||||
});
|
||||
try p.warnMsg(.{
|
||||
.tag = .next_field,
|
||||
.is_note = true,
|
||||
.token = identifier,
|
||||
});
|
||||
// Continue parsing; error will be reported later.
|
||||
field_state = .err;
|
||||
},
|
||||
}
|
||||
try p.scratch.append(p.gpa, container_field);
|
||||
switch (p.token_tags[p.tok_i]) {
|
||||
.comma => {
|
||||
p.tok_i += 1;
|
||||
trailing = true;
|
||||
continue;
|
||||
},
|
||||
.r_brace, .eof => {
|
||||
trailing = false;
|
||||
break;
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
// There is not allowed to be a decl after a field with no comma.
|
||||
// Report error but recover parser.
|
||||
try p.warn(.expected_comma_after_field);
|
||||
p.findNextContainerMember();
|
||||
}
|
||||
},
|
||||
.eof, .r_brace => {
|
||||
if (doc_comment) |tok| {
|
||||
try p.warnMsg(.{
|
||||
@ -451,11 +405,57 @@ const Parser = struct {
|
||||
error.OutOfMemory => return error.OutOfMemory,
|
||||
error.ParseError => false,
|
||||
};
|
||||
if (!c_container) {
|
||||
try p.warn(.expected_container_members);
|
||||
// This was likely not supposed to end yet; try to find the next declaration.
|
||||
p.findNextContainerMember();
|
||||
if (c_container) continue;
|
||||
|
||||
const identifier = p.tok_i;
|
||||
defer last_field = identifier;
|
||||
const container_field = p.expectContainerField() catch |err| switch (err) {
|
||||
error.OutOfMemory => return error.OutOfMemory,
|
||||
error.ParseError => {
|
||||
p.findNextContainerMember();
|
||||
continue;
|
||||
},
|
||||
};
|
||||
switch (field_state) {
|
||||
.none => field_state = .seen,
|
||||
.err, .seen => {},
|
||||
.end => |node| {
|
||||
try p.warnMsg(.{
|
||||
.tag = .decl_between_fields,
|
||||
.token = p.nodes.items(.main_token)[node],
|
||||
});
|
||||
try p.warnMsg(.{
|
||||
.tag = .previous_field,
|
||||
.is_note = true,
|
||||
.token = last_field,
|
||||
});
|
||||
try p.warnMsg(.{
|
||||
.tag = .next_field,
|
||||
.is_note = true,
|
||||
.token = identifier,
|
||||
});
|
||||
// Continue parsing; error will be reported later.
|
||||
field_state = .err;
|
||||
},
|
||||
}
|
||||
try p.scratch.append(p.gpa, container_field);
|
||||
switch (p.token_tags[p.tok_i]) {
|
||||
.comma => {
|
||||
p.tok_i += 1;
|
||||
trailing = true;
|
||||
continue;
|
||||
},
|
||||
.r_brace, .eof => {
|
||||
trailing = false;
|
||||
break;
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
// There is not allowed to be a decl after a field with no comma.
|
||||
// Report error but recover parser.
|
||||
try p.warn(.expected_comma_after_field);
|
||||
p.findNextContainerMember();
|
||||
continue;
|
||||
},
|
||||
}
|
||||
}
|
||||
@ -875,12 +875,16 @@ const Parser = struct {
|
||||
|
||||
/// ContainerField <- KEYWORD_comptime? IDENTIFIER (COLON TypeExpr ByteAlign?)? (EQUAL Expr)?
|
||||
fn expectContainerField(p: *Parser) !Node.Index {
|
||||
var main_token = p.tok_i;
|
||||
_ = p.eatToken(.keyword_comptime);
|
||||
const name_token = p.assertToken(.identifier);
|
||||
const tuple_like = p.token_tags[p.tok_i] != .identifier or p.token_tags[p.tok_i + 1] != .colon;
|
||||
if (!tuple_like) {
|
||||
main_token = p.assertToken(.identifier);
|
||||
}
|
||||
|
||||
var align_expr: Node.Index = 0;
|
||||
var type_expr: Node.Index = 0;
|
||||
if (p.eatToken(.colon)) |_| {
|
||||
if (p.eatToken(.colon) != null or tuple_like) |_| {
|
||||
type_expr = try p.expectTypeExpr();
|
||||
align_expr = try p.parseByteAlign();
|
||||
}
|
||||
@ -890,7 +894,7 @@ const Parser = struct {
|
||||
if (align_expr == 0) {
|
||||
return p.addNode(.{
|
||||
.tag = .container_field_init,
|
||||
.main_token = name_token,
|
||||
.main_token = main_token,
|
||||
.data = .{
|
||||
.lhs = type_expr,
|
||||
.rhs = value_expr,
|
||||
@ -899,7 +903,7 @@ const Parser = struct {
|
||||
} else if (value_expr == 0) {
|
||||
return p.addNode(.{
|
||||
.tag = .container_field_align,
|
||||
.main_token = name_token,
|
||||
.main_token = main_token,
|
||||
.data = .{
|
||||
.lhs = type_expr,
|
||||
.rhs = align_expr,
|
||||
@ -908,7 +912,7 @@ const Parser = struct {
|
||||
} else {
|
||||
return p.addNode(.{
|
||||
.tag = .container_field,
|
||||
.main_token = name_token,
|
||||
.main_token = main_token,
|
||||
.data = .{
|
||||
.lhs = type_expr,
|
||||
.rhs = try p.addExtra(Node.ContainerField{
|
||||
@ -920,16 +924,6 @@ const Parser = struct {
|
||||
}
|
||||
}
|
||||
|
||||
fn expectContainerFieldRecoverable(p: *Parser) error{OutOfMemory}!Node.Index {
|
||||
return p.expectContainerField() catch |err| switch (err) {
|
||||
error.OutOfMemory => return error.OutOfMemory,
|
||||
error.ParseError => {
|
||||
p.findNextContainerMember();
|
||||
return null_node;
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/// Statement
|
||||
/// <- KEYWORD_comptime? VarDecl
|
||||
/// / KEYWORD_comptime BlockExprStatement
|
||||
|
||||
@ -1,3 +1,15 @@
|
||||
test "zig fmt: tuple struct" {
|
||||
try testCanonical(
|
||||
\\const T = struct {
|
||||
\\ comptime u32,
|
||||
\\ *u32 = 1,
|
||||
\\ // needs to be wrapped in parentheses to not be parsed as a function decl
|
||||
\\ (fn () void) align(1),
|
||||
\\};
|
||||
\\
|
||||
);
|
||||
}
|
||||
|
||||
test "zig fmt: preserves clobbers in inline asm with stray comma" {
|
||||
try testCanonical(
|
||||
\\fn foo() void {
|
||||
@ -265,14 +277,6 @@ test "zig fmt: decl between fields" {
|
||||
});
|
||||
}
|
||||
|
||||
test "zig fmt: eof after missing comma" {
|
||||
try testError(
|
||||
\\foo()
|
||||
, &[_]Error{
|
||||
.expected_comma_after_field,
|
||||
});
|
||||
}
|
||||
|
||||
test "zig fmt: errdefer with payload" {
|
||||
try testCanonical(
|
||||
\\pub fn main() anyerror!void {
|
||||
@ -5732,8 +5736,8 @@ test "recovery: missing semicolon" {
|
||||
test "recovery: invalid container members" {
|
||||
try testError(
|
||||
\\usingnamespace;
|
||||
\\foo+
|
||||
\\bar@,
|
||||
\\@foo()+
|
||||
\\@bar()@,
|
||||
\\while (a == 2) { test "" {}}
|
||||
\\test "" {
|
||||
\\ a & b
|
||||
@ -5741,7 +5745,7 @@ test "recovery: invalid container members" {
|
||||
, &[_]Error{
|
||||
.expected_expr,
|
||||
.expected_comma_after_field,
|
||||
.expected_container_members,
|
||||
.expected_type_expr,
|
||||
.expected_semi_after_stmt,
|
||||
});
|
||||
}
|
||||
|
||||
@ -40,14 +40,34 @@ pub fn renderTree(buffer: *std.ArrayList(u8), tree: Ast) Error!void {
|
||||
/// Render all members in the given slice, keeping empty lines where appropriate
|
||||
fn renderMembers(gpa: Allocator, ais: *Ais, tree: Ast, members: []const Ast.Node.Index) Error!void {
|
||||
if (members.len == 0) return;
|
||||
try renderMember(gpa, ais, tree, members[0], .newline);
|
||||
var any_non_tuple_like_fields = false;
|
||||
for (members) |member| {
|
||||
const tuple_like = switch (tree.nodes.items(.tag)[member]) {
|
||||
.container_field_init => tree.containerFieldInit(member).ast.tuple_like,
|
||||
.container_field_align => tree.containerFieldAlign(member).ast.tuple_like,
|
||||
.container_field => tree.containerField(member).ast.tuple_like,
|
||||
else => continue,
|
||||
};
|
||||
if (!tuple_like) {
|
||||
any_non_tuple_like_fields = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
try renderMember(gpa, ais, tree, members[0], any_non_tuple_like_fields, .newline);
|
||||
for (members[1..]) |member| {
|
||||
try renderExtraNewline(ais, tree, member);
|
||||
try renderMember(gpa, ais, tree, member, .newline);
|
||||
try renderMember(gpa, ais, tree, member, any_non_tuple_like_fields, .newline);
|
||||
}
|
||||
}
|
||||
|
||||
fn renderMember(gpa: Allocator, ais: *Ais, tree: Ast, decl: Ast.Node.Index, space: Space) Error!void {
|
||||
fn renderMember(
|
||||
gpa: Allocator,
|
||||
ais: *Ais,
|
||||
tree: Ast,
|
||||
decl: Ast.Node.Index,
|
||||
any_non_tuple_like_fields: bool,
|
||||
space: Space,
|
||||
) Error!void {
|
||||
const token_tags = tree.tokens.items(.tag);
|
||||
const main_tokens = tree.nodes.items(.main_token);
|
||||
const datas = tree.nodes.items(.data);
|
||||
@ -161,9 +181,9 @@ fn renderMember(gpa: Allocator, ais: *Ais, tree: Ast, decl: Ast.Node.Index, spac
|
||||
try renderExpression(gpa, ais, tree, datas[decl].rhs, space);
|
||||
},
|
||||
|
||||
.container_field_init => return renderContainerField(gpa, ais, tree, tree.containerFieldInit(decl), space),
|
||||
.container_field_align => return renderContainerField(gpa, ais, tree, tree.containerFieldAlign(decl), space),
|
||||
.container_field => return renderContainerField(gpa, ais, tree, tree.containerField(decl), space),
|
||||
.container_field_init => return renderContainerField(gpa, ais, tree, tree.containerFieldInit(decl), any_non_tuple_like_fields, space),
|
||||
.container_field_align => return renderContainerField(gpa, ais, tree, tree.containerFieldAlign(decl), any_non_tuple_like_fields, space),
|
||||
.container_field => return renderContainerField(gpa, ais, tree, tree.containerField(decl), any_non_tuple_like_fields, space),
|
||||
.@"comptime" => return renderExpression(gpa, ais, tree, decl, space),
|
||||
|
||||
.root => unreachable,
|
||||
@ -1158,18 +1178,31 @@ fn renderContainerField(
|
||||
gpa: Allocator,
|
||||
ais: *Ais,
|
||||
tree: Ast,
|
||||
field: Ast.full.ContainerField,
|
||||
field_param: Ast.full.ContainerField,
|
||||
any_non_tuple_like_fields: bool,
|
||||
space: Space,
|
||||
) Error!void {
|
||||
var field = field_param;
|
||||
if (field.ast.tuple_like and any_non_tuple_like_fields and field.ast.type_expr != 0
|
||||
and tree.nodes.items(.tag)[field.ast.type_expr] == .identifier
|
||||
) {
|
||||
const ident = tree.nodes.items(.main_token)[field.ast.type_expr];
|
||||
field.ast.tuple_like = false;
|
||||
field.ast.main_token = ident;
|
||||
field.ast.type_expr = 0;
|
||||
}
|
||||
|
||||
if (field.comptime_token) |t| {
|
||||
try renderToken(ais, tree, t, .space); // comptime
|
||||
}
|
||||
if (field.ast.type_expr == 0 and field.ast.value_expr == 0) {
|
||||
return renderIdentifierComma(ais, tree, field.ast.name_token, space, .eagerly_unquote); // name
|
||||
return renderIdentifierComma(ais, tree, field.ast.main_token, space, .eagerly_unquote); // name
|
||||
}
|
||||
if (field.ast.type_expr != 0 and field.ast.value_expr == 0) {
|
||||
try renderIdentifier(ais, tree, field.ast.name_token, .none, .eagerly_unquote); // name
|
||||
try renderToken(ais, tree, field.ast.name_token + 1, .space); // :
|
||||
if (!field.ast.tuple_like) {
|
||||
try renderIdentifier(ais, tree, field.ast.main_token, .none, .eagerly_unquote); // name
|
||||
try renderToken(ais, tree, field.ast.main_token + 1, .space); // :
|
||||
}
|
||||
|
||||
if (field.ast.align_expr != 0) {
|
||||
try renderExpression(gpa, ais, tree, field.ast.type_expr, .space); // type
|
||||
@ -1184,13 +1217,14 @@ fn renderContainerField(
|
||||
}
|
||||
}
|
||||
if (field.ast.type_expr == 0 and field.ast.value_expr != 0) {
|
||||
try renderIdentifier(ais, tree, field.ast.name_token, .space, .eagerly_unquote); // name
|
||||
try renderToken(ais, tree, field.ast.name_token + 1, .space); // =
|
||||
try renderIdentifier(ais, tree, field.ast.main_token, .space, .eagerly_unquote); // name
|
||||
try renderToken(ais, tree, field.ast.main_token + 1, .space); // =
|
||||
return renderExpressionComma(gpa, ais, tree, field.ast.value_expr, space); // value
|
||||
}
|
||||
|
||||
try renderIdentifier(ais, tree, field.ast.name_token, .none, .eagerly_unquote); // name
|
||||
try renderToken(ais, tree, field.ast.name_token + 1, .space); // :
|
||||
if (!field.ast.tuple_like) {
|
||||
try renderIdentifier(ais, tree, field.ast.main_token, .none, .eagerly_unquote); // name
|
||||
try renderToken(ais, tree, field.ast.main_token + 1, .space); // :
|
||||
}
|
||||
try renderExpression(gpa, ais, tree, field.ast.type_expr, .space); // type
|
||||
|
||||
if (field.ast.align_expr != 0) {
|
||||
@ -1901,6 +1935,20 @@ fn renderContainerDecl(
|
||||
try renderToken(ais, tree, layout_token, .space);
|
||||
}
|
||||
|
||||
var any_non_tuple_like_fields = token_tags[container_decl.ast.main_token] != .keyword_struct;
|
||||
if (!any_non_tuple_like_fields) for (container_decl.ast.members) |member| {
|
||||
const tuple_like = switch (tree.nodes.items(.tag)[member]) {
|
||||
.container_field_init => tree.containerFieldInit(member).ast.tuple_like,
|
||||
.container_field_align => tree.containerFieldAlign(member).ast.tuple_like,
|
||||
.container_field => tree.containerField(member).ast.tuple_like,
|
||||
else => continue,
|
||||
};
|
||||
if (!tuple_like) {
|
||||
any_non_tuple_like_fields = true;
|
||||
break;
|
||||
}
|
||||
};
|
||||
|
||||
var lbrace: Ast.TokenIndex = undefined;
|
||||
if (container_decl.ast.enum_token) |enum_token| {
|
||||
try renderToken(ais, tree, container_decl.ast.main_token, .none); // union
|
||||
@ -1967,7 +2015,7 @@ fn renderContainerDecl(
|
||||
// Print all the declarations on the same line.
|
||||
try renderToken(ais, tree, lbrace, .space); // lbrace
|
||||
for (container_decl.ast.members) |member| {
|
||||
try renderMember(gpa, ais, tree, member, .space);
|
||||
try renderMember(gpa, ais, tree, member, any_non_tuple_like_fields, .space);
|
||||
}
|
||||
return renderToken(ais, tree, rbrace, space); // rbrace
|
||||
}
|
||||
@ -1985,9 +2033,9 @@ fn renderContainerDecl(
|
||||
.container_field_init,
|
||||
.container_field_align,
|
||||
.container_field,
|
||||
=> try renderMember(gpa, ais, tree, member, .comma),
|
||||
=> try renderMember(gpa, ais, tree, member, any_non_tuple_like_fields, .comma),
|
||||
|
||||
else => try renderMember(gpa, ais, tree, member, .newline),
|
||||
else => try renderMember(gpa, ais, tree, member, any_non_tuple_like_fields, .newline),
|
||||
}
|
||||
}
|
||||
ais.popIndent();
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user