zig fmt: implement container decls

This commit is contained in:
Andrew Kelley 2021-02-05 15:47:18 -07:00
parent cf42ae178d
commit 16a2562c3f
4 changed files with 766 additions and 368 deletions

View File

@ -365,6 +365,26 @@ pub const Tree = struct {
} }
}, },
.ContainerDecl,
.ContainerDeclComma,
.ContainerDeclTwo,
.ContainerDeclTwoComma,
.ContainerDeclArg,
.ContainerDeclArgComma,
.TaggedUnion,
.TaggedUnionComma,
.TaggedUnionTwo,
.TaggedUnionTwoComma,
.TaggedUnionEnumTag,
.TaggedUnionEnumTagComma,
=> {
const main_token = main_tokens[n];
switch (token_tags[main_token - 1]) {
.Keyword_packed, .Keyword_extern => return main_token - 1,
else => return main_token,
}
},
.PtrTypeAligned => unreachable, // TODO .PtrTypeAligned => unreachable, // TODO
.PtrTypeSentinel => unreachable, // TODO .PtrTypeSentinel => unreachable, // TODO
.PtrType => unreachable, // TODO .PtrType => unreachable, // TODO
@ -375,10 +395,6 @@ pub const Tree = struct {
.While => unreachable, // TODO .While => unreachable, // TODO
.ForSimple => unreachable, // TODO .ForSimple => unreachable, // TODO
.For => unreachable, // TODO .For => unreachable, // TODO
.ContainerDecl => unreachable, // TODO
.ContainerDeclArg => unreachable, // TODO
.TaggedUnion => unreachable, // TODO
.TaggedUnionEnumTag => unreachable, // TODO
.AsmOutput => unreachable, // TODO .AsmOutput => unreachable, // TODO
.AsmInput => unreachable, // TODO .AsmInput => unreachable, // TODO
.ErrorValue => unreachable, // TODO .ErrorValue => unreachable, // TODO
@ -408,6 +424,7 @@ pub const Tree = struct {
.Break, .Break,
.Return, .Return,
.Nosuspend, .Nosuspend,
.Comptime,
=> n = datas[n].lhs, => n = datas[n].lhs,
.TestDecl, .TestDecl,
@ -455,7 +472,6 @@ pub const Tree = struct {
.BoolOr, .BoolOr,
.AnyFrameType, .AnyFrameType,
.ErrorUnion, .ErrorUnion,
.Comptime,
.IfSimple, .IfSimple,
.WhileSimple, .WhileSimple,
=> n = datas[n].rhs, => n = datas[n].rhs,
@ -490,13 +506,37 @@ pub const Tree = struct {
} }
n = tree.extra_data[params.end - 1]; // last parameter n = tree.extra_data[params.end - 1]; // last parameter
}, },
.Block => { .ContainerDeclArg => {
const members = tree.extraData(datas[n].rhs, Node.SubRange);
if (members.end - members.start == 0) {
end_offset += 1; // for the rparen
n = datas[n].lhs;
} else {
end_offset += 1; // for the rbrace
n = tree.extra_data[members.end - 1]; // last parameter
}
},
.ContainerDeclArgComma => {
const members = tree.extraData(datas[n].rhs, Node.SubRange);
assert(members.end - members.start > 0);
end_offset += 2; // for the comma + rbrace
n = tree.extra_data[members.end - 1]; // last parameter
},
.Block,
.ContainerDecl,
.TaggedUnion,
=> {
end_offset += 1; // for the rbrace end_offset += 1; // for the rbrace
if (datas[n].rhs - datas[n].lhs == 0) { if (datas[n].rhs - datas[n].lhs == 0) {
return main_tokens[n] + end_offset; return main_tokens[n] + end_offset;
} }
n = tree.extra_data[datas[n].rhs - 1]; // last statement n = tree.extra_data[datas[n].rhs - 1]; // last statement
}, },
.ContainerDeclComma, .TaggedUnionComma => {
assert(datas[n].rhs - datas[n].lhs > 0);
end_offset += 2; // for the comma + rbrace
n = tree.extra_data[datas[n].rhs - 1]; // last member
},
.CallOne, .CallOne,
.ArrayAccess, .ArrayAccess,
=> { => {
@ -511,6 +551,8 @@ pub const Tree = struct {
.BuiltinCallTwo, .BuiltinCallTwo,
.BlockTwo, .BlockTwo,
.StructInitDotTwo, .StructInitDotTwo,
.ContainerDeclTwo,
.TaggedUnionTwo,
=> { => {
end_offset += 1; // for the rparen/rbrace end_offset += 1; // for the rparen/rbrace
if (datas[n].rhs != 0) { if (datas[n].rhs != 0) {
@ -523,6 +565,8 @@ pub const Tree = struct {
}, },
.ArrayInitDotTwoComma, .ArrayInitDotTwoComma,
.StructInitDotTwoComma, .StructInitDotTwoComma,
.ContainerDeclTwoComma,
.TaggedUnionTwoComma,
=> { => {
end_offset += 2; // for the comma + rbrace end_offset += 2; // for the comma + rbrace
if (datas[n].rhs != 0) { if (datas[n].rhs != 0) {
@ -589,6 +633,38 @@ pub const Tree = struct {
} }
} }
}, },
.ContainerFieldInit => {
if (datas[n].rhs != 0) {
n = datas[n].rhs;
} else if (datas[n].lhs != 0) {
n = datas[n].lhs;
} else {
return main_tokens[n] + end_offset;
}
},
.ContainerFieldAlign => {
if (datas[n].rhs != 0) {
end_offset += 1; // for the rparen
n = datas[n].rhs;
} else if (datas[n].lhs != 0) {
n = datas[n].lhs;
} else {
return main_tokens[n] + end_offset;
}
},
.ContainerField => {
const extra = tree.extraData(datas[n].rhs, Node.ContainerField);
if (extra.value_expr != 0) {
n = extra.value_expr;
} else if (extra.align_expr != 0) {
end_offset += 1; // for the rparen
n = extra.align_expr;
} else if (datas[n].lhs != 0) {
n = datas[n].lhs;
} else {
return main_tokens[n] + end_offset;
}
},
// These are not supported by lastToken() because implementation would // These are not supported by lastToken() because implementation would
// require recursion due to the optional comma followed by rbrace. // require recursion due to the optional comma followed by rbrace.
@ -600,10 +676,9 @@ pub const Tree = struct {
.StructInit => unreachable, .StructInit => unreachable,
.StructInitOne => unreachable, .StructInitOne => unreachable,
.StructInitDot => unreachable, .StructInitDot => unreachable,
.ContainerFieldInit => unreachable,
.ContainerFieldAlign => unreachable,
.ContainerField => unreachable,
.TaggedUnionEnumTag => unreachable, // TODO
.TaggedUnionEnumTagComma => unreachable, // TODO
.Switch => unreachable, // TODO .Switch => unreachable, // TODO
.If => unreachable, // TODO .If => unreachable, // TODO
.Continue => unreachable, // TODO .Continue => unreachable, // TODO
@ -631,10 +706,6 @@ pub const Tree = struct {
.FnProtoMulti => unreachable, // TODO .FnProtoMulti => unreachable, // TODO
.FnProtoOne => unreachable, // TODO .FnProtoOne => unreachable, // TODO
.FnProto => unreachable, // TODO .FnProto => unreachable, // TODO
.ContainerDecl => unreachable, // TODO
.ContainerDeclArg => unreachable, // TODO
.TaggedUnion => unreachable, // TODO
.TaggedUnionEnumTag => unreachable, // TODO
.AsmOutput => unreachable, // TODO .AsmOutput => unreachable, // TODO
.AsmInput => unreachable, // TODO .AsmInput => unreachable, // TODO
.ErrorValue => unreachable, // TODO .ErrorValue => unreachable, // TODO
@ -952,6 +1023,93 @@ pub const Tree = struct {
}; };
} }
pub fn containerDeclTwo(tree: Tree, buffer: *[2]Node.Index, node: Node.Index) Full.ContainerDecl {
assert(tree.nodes.items(.tag)[node] == .ContainerDeclTwo or
tree.nodes.items(.tag)[node] == .ContainerDeclTwoComma);
const data = tree.nodes.items(.data)[node];
buffer.* = .{ data.lhs, data.rhs };
const members = if (data.rhs != 0)
buffer[0..2]
else if (data.lhs != 0)
buffer[0..1]
else
buffer[0..0];
return tree.fullContainerDecl(.{
.main_token = tree.nodes.items(.main_token)[node],
.enum_token = null,
.members = members,
.arg = 0,
});
}
pub fn containerDecl(tree: Tree, node: Node.Index) Full.ContainerDecl {
assert(tree.nodes.items(.tag)[node] == .ContainerDecl or
tree.nodes.items(.tag)[node] == .ContainerDeclComma);
const data = tree.nodes.items(.data)[node];
return tree.fullContainerDecl(.{
.main_token = tree.nodes.items(.main_token)[node],
.enum_token = null,
.members = tree.extra_data[data.lhs..data.rhs],
.arg = 0,
});
}
pub fn containerDeclArg(tree: Tree, node: Node.Index) Full.ContainerDecl {
assert(tree.nodes.items(.tag)[node] == .ContainerDeclArg);
const data = tree.nodes.items(.data)[node];
const members_range = tree.extraData(data.rhs, Node.SubRange);
return tree.fullContainerDecl(.{
.main_token = tree.nodes.items(.main_token)[node],
.enum_token = null,
.members = tree.extra_data[members_range.start..members_range.end],
.arg = data.lhs,
});
}
pub fn taggedUnionTwo(tree: Tree, buffer: *[2]Node.Index, node: Node.Index) Full.ContainerDecl {
assert(tree.nodes.items(.tag)[node] == .TaggedUnionTwo);
const data = tree.nodes.items(.data)[node];
buffer.* = .{ data.lhs, data.rhs };
const members = if (data.rhs != 0)
buffer[0..2]
else if (data.lhs != 0)
buffer[0..1]
else
buffer[0..0];
const main_token = tree.nodes.items(.main_token)[node];
return tree.fullContainerDecl(.{
.main_token = main_token,
.enum_token = main_token + 2, // union lparen enum
.members = members,
.arg = 0,
});
}
pub fn taggedUnion(tree: Tree, node: Node.Index) Full.ContainerDecl {
assert(tree.nodes.items(.tag)[node] == .TaggedUnion);
const data = tree.nodes.items(.data)[node];
const main_token = tree.nodes.items(.main_token)[node];
return tree.fullContainerDecl(.{
.main_token = main_token,
.enum_token = main_token + 2, // union lparen enum
.members = tree.extra_data[data.lhs..data.rhs],
.arg = 0,
});
}
pub fn taggedUnionEnumTag(tree: Tree, node: Node.Index) Full.ContainerDecl {
assert(tree.nodes.items(.tag)[node] == .TaggedUnionEnumTag);
const data = tree.nodes.items(.data)[node];
const members_range = tree.extraData(data.rhs, Node.SubRange);
const main_token = tree.nodes.items(.main_token)[node];
return tree.fullContainerDecl(.{
.main_token = main_token,
.enum_token = main_token + 2, // union lparen enum
.members = tree.extra_data[data.lhs..data.rhs],
.arg = data.lhs,
});
}
fn fullVarDecl(tree: Tree, info: Full.VarDecl.Ast) Full.VarDecl { fn fullVarDecl(tree: Tree, info: Full.VarDecl.Ast) Full.VarDecl {
const token_tags = tree.tokens.items(.tag); const token_tags = tree.tokens.items(.tag);
var result: Full.VarDecl = .{ var result: Full.VarDecl = .{
@ -1031,6 +1189,19 @@ pub const Tree = struct {
}; };
return result; return result;
} }
fn fullContainerDecl(tree: Tree, info: Full.ContainerDecl.Ast) Full.ContainerDecl {
const token_tags = tree.tokens.items(.tag);
var result: Full.ContainerDecl = .{
.ast = info,
.layout_token = null,
};
switch (token_tags[info.main_token - 1]) {
.Keyword_extern, .Keyword_packed => result.layout_token = info.main_token - 1,
else => {},
}
return result;
}
}; };
/// Fully assembled AST node information. /// Fully assembled AST node information.
@ -1125,6 +1296,19 @@ pub const Full = struct {
elem_type: Node.Index, elem_type: Node.Index,
}; };
}; };
pub const ContainerDecl = struct {
layout_token: ?TokenIndex,
ast: Ast,
pub const Ast = struct {
main_token: TokenIndex,
/// Populated when main_token is Keyword_union.
enum_token: ?TokenIndex,
members: []const Node.Index,
arg: Node.Index,
};
};
}; };
pub const Error = union(enum) { pub const Error = union(enum) {
@ -1543,9 +1727,11 @@ pub const Node = struct {
StructInitOne, StructInitOne,
/// `.{.a = lhs, .b = rhs}`. lhs and rhs can be omitted. /// `.{.a = lhs, .b = rhs}`. lhs and rhs can be omitted.
/// main_token is the lbrace. /// main_token is the lbrace.
/// No trailing comma before the rbrace.
StructInitDotTwo, StructInitDotTwo,
/// Same as `StructInitDotTwo` except there is known to be a trailing comma /// Same as `StructInitDotTwo` except there is known to be a trailing comma
/// before the final rbrace. /// before the final rbrace. This tag exists to facilitate lastToken() implemented
/// without recursion.
StructInitDotTwoComma, StructInitDotTwoComma,
/// `.{.a = b, .c = d}`. `sub_list[lhs..rhs]`. /// `.{.a = b, .c = d}`. `sub_list[lhs..rhs]`.
/// main_token is the lbrace. /// main_token is the lbrace.
@ -1655,21 +1841,50 @@ pub const Node = struct {
/// `error{a, b}`. /// `error{a, b}`.
/// lhs and rhs both unused. /// lhs and rhs both unused.
ErrorSetDecl, ErrorSetDecl,
/// `struct {}`, `union {}`, etc. `sub_list[lhs..rhs]`. /// `struct {}`, `union {}`, `opaque {}`, `enum {}`. `extra_data[lhs..rhs]`.
/// main_token is `struct`, `union`, `opaque`, `enum` keyword.
ContainerDecl, ContainerDecl,
/// `union(lhs)` / `enum(lhs)`. `sub_range_list[rhs]`. /// Same as ContainerDecl but there is known to be a trailing comma before the rbrace.
ContainerDeclComma,
/// `struct {lhs, rhs}`, `union {lhs, rhs}`, `opaque {lhs, rhs}`, `enum {lhs, rhs}`.
/// lhs or rhs can be omitted.
/// main_token is `struct`, `union`, `opaque`, `enum` keyword.
ContainerDeclTwo,
/// Same as ContainerDeclTwo except there is known to be a trailing comma
/// before the rbrace.
ContainerDeclTwoComma,
/// `union(lhs)` / `enum(lhs)`. `SubRange[rhs]`.
ContainerDeclArg, ContainerDeclArg,
/// Same as ContainerDeclArg but there is known to be a trailing comma before the rbrace.
ContainerDeclArgComma,
/// `union(enum) {}`. `sub_list[lhs..rhs]`. /// `union(enum) {}`. `sub_list[lhs..rhs]`.
/// Note that tagged unions with explicitly provided enums are represented /// Note that tagged unions with explicitly provided enums are represented
/// by `ContainerDeclArg`. /// by `ContainerDeclArg`.
TaggedUnion, TaggedUnion,
/// `union(enum(lhs)) {}`. `sub_list_range[rhs]`. /// Same as TaggedUnion but there is known to be a trailing comma before the rbrace.
TaggedUnionComma,
/// `union(enum) {lhs, rhs}`. lhs or rhs may be omitted.
/// Note that tagged unions with explicitly provided enums are represented
/// by `ContainerDeclArg`.
TaggedUnionTwo,
/// Same as TaggedUnionTwo but there is known to be a trailing comma before the rbrace.
TaggedUnionTwoComma,
/// `union(enum(lhs)) {}`. `SubRange[rhs]`.
TaggedUnionEnumTag, TaggedUnionEnumTag,
/// Same as TaggedUnionEnumTag but there is known to be a trailing comma
/// before the rbrace.
TaggedUnionEnumTagComma,
/// `a: lhs = rhs,`. lhs and rhs can be omitted. /// `a: lhs = rhs,`. lhs and rhs can be omitted.
/// main_token is the field name identifier.
/// lastToken() does not include the possible trailing comma.
ContainerFieldInit, ContainerFieldInit,
/// `a: lhs align(rhs),`. rhs can be omitted. /// `a: lhs align(rhs),`. rhs can be omitted.
/// main_token is the field name identifier.
/// lastToken() does not include the possible trailing comma.
ContainerFieldAlign, ContainerFieldAlign,
/// `a: lhs align(c) = d,`. `container_field_list[rhs]`. /// `a: lhs align(c) = d,`. `container_field_list[rhs]`.
/// main_token is the field name identifier.
/// lastToken() does not include the possible trailing comma.
ContainerField, ContainerField,
/// `anytype`. both lhs and rhs unused. /// `anytype`. both lhs and rhs unused.
/// Used by `ContainerField`. /// Used by `ContainerField`.
@ -1699,6 +1914,17 @@ pub const Node = struct {
ErrorValue, ErrorValue,
/// `lhs!rhs`. main_token is the `!`. /// `lhs!rhs`. main_token is the `!`.
ErrorUnion, ErrorUnion,
pub fn isContainerField(tag: Tag) bool {
return switch (tag) {
.ContainerFieldInit,
.ContainerFieldAlign,
.ContainerField,
=> true,
else => false,
};
}
}; };
pub const Data = struct { pub const Data = struct {

View File

@ -64,9 +64,10 @@ pub fn parse(gpa: *Allocator, source: []const u8) Allocator.Error!Tree {
.rhs = undefined, .rhs = undefined,
}, },
}); });
const root_decls = try parser.parseContainerMembers(true); const root_members = try parser.parseContainerMembers();
// parseContainerMembers will try to skip as much const root_decls = try root_members.toSpan(&parser);
// invalid tokens as it can, so we are now at EOF. // parseContainerMembers will try to skip as much invalid tokens as
// it can, so we are now at EOF.
assert(parser.token_tags[parser.tok_i] == .Eof); assert(parser.token_tags[parser.tok_i] == .Eof);
parser.nodes.items(.data)[0] = .{ parser.nodes.items(.data)[0] = .{
.lhs = root_decls.start, .lhs = root_decls.start,
@ -108,6 +109,22 @@ const Parser = struct {
} }
}; };
const Members = struct {
len: usize,
lhs: Node.Index,
rhs: Node.Index,
trailing_comma: bool,
fn toSpan(self: Members, p: *Parser) !Node.SubRange {
if (self.len <= 2) {
const nodes = [2]Node.Index{ self.lhs, self.rhs };
return p.listToSpan(nodes[0..self.len]);
} else {
return Node.SubRange{ .start = self.lhs, .end = self.rhs };
}
}
};
fn listToSpan(p: *Parser, list: []const Node.Index) !Node.SubRange { fn listToSpan(p: *Parser, list: []const Node.Index) !Node.SubRange {
try p.extra_data.appendSlice(p.gpa, list); try p.extra_data.appendSlice(p.gpa, list);
return Node.SubRange{ return Node.SubRange{
@ -151,171 +168,227 @@ const Parser = struct {
/// / ContainerField COMMA ContainerMembers /// / ContainerField COMMA ContainerMembers
/// / ContainerField /// / ContainerField
/// / /// /
fn parseContainerMembers(p: *Parser, top_level: bool) !Node.SubRange { /// TopLevelComptime <- KEYWORD_comptime BlockExpr
fn parseContainerMembers(p: *Parser) !Members {
var list = std.ArrayList(Node.Index).init(p.gpa); var list = std.ArrayList(Node.Index).init(p.gpa);
defer list.deinit(); defer list.deinit();
var field_state: union(enum) { var field_state: union(enum) {
/// no fields have been seen /// No fields have been seen.
none, none,
/// currently parsing fields /// Currently parsing fields.
seen, seen,
/// saw fields and then a declaration after them. /// Saw fields and then a declaration after them.
/// payload is first token of previous declaration. /// Payload is first token of previous declaration.
end: TokenIndex, end: Node.Index,
/// ther was a declaration between fields, don't report more errors /// There was a declaration between fields, don't report more errors.
err, err,
} = .none; } = .none;
// Skip container doc comments. // Skip container doc comments.
while (p.eatToken(.ContainerDocComment)) |_| {} while (p.eatToken(.ContainerDocComment)) |_| {}
var trailing_comma = false;
while (true) { while (true) {
const doc_comment = p.eatDocComments(); const doc_comment = p.eatDocComments();
const test_decl_node = p.parseTestDecl() catch |err| switch (err) { switch (p.token_tags[p.tok_i]) {
error.OutOfMemory => return error.OutOfMemory, .Keyword_test => {
error.ParseError => { const test_decl_node = try p.expectTestDeclRecoverable();
p.findNextContainerMember();
continue;
},
};
if (test_decl_node != 0) { if (test_decl_node != 0) {
if (field_state == .seen) { if (field_state == .seen) {
field_state = .{ .end = p.nodes.items(.main_token)[test_decl_node] }; field_state = .{ .end = test_decl_node };
} }
try list.append(test_decl_node); try list.append(test_decl_node);
continue;
} }
trailing_comma = false;
const comptime_node = p.parseTopLevelComptime() catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
error.ParseError => {
p.findNextContainerMember();
continue;
}, },
}; .Keyword_comptime => switch (p.token_tags[p.tok_i + 1]) {
if (comptime_node != 0) { .Identifier => {
if (field_state == .seen) { p.tok_i += 1;
field_state = .{ .end = p.nodes.items(.main_token)[comptime_node] }; const container_field = try p.expectContainerFieldRecoverable();
}
try list.append(comptime_node);
continue;
}
const visib_token = p.eatToken(.Keyword_pub);
const top_level_decl = p.parseTopLevelDecl() catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
error.ParseError => {
p.findNextContainerMember();
continue;
},
};
if (top_level_decl != 0) {
if (field_state == .seen) {
field_state = .{
.end = visib_token orelse p.nodes.items(.main_token)[top_level_decl],
};
}
try list.append(top_level_decl);
continue;
}
if (visib_token != null) {
try p.warn(.{ .ExpectedPubItem = .{ .token = p.tok_i } });
// ignore this pub
continue;
}
const container_field = p.parseContainerField() catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
error.ParseError => {
// attempt to recover
p.findNextContainerMember();
continue;
},
};
if (container_field != 0) { if (container_field != 0) {
switch (field_state) { switch (field_state) {
.none => field_state = .seen, .none => field_state = .seen,
.err, .seen => {}, .err, .seen => {},
.end => |tok| { .end => |node| {
try p.warn(.{ .DeclBetweenFields = .{ .token = tok } }); try p.warn(.{
// continue parsing, error will be reported later .DeclBetweenFields = .{ .token = p.nodes.items(.main_token)[node] },
});
// Continue parsing; error will be reported later.
field_state = .err; field_state = .err;
}, },
} }
try list.append(container_field); try list.append(container_field);
const comma = p.eatToken(.Comma) orelse { switch (p.token_tags[p.tok_i]) {
// try to continue parsing .Comma => {
const index = p.tok_i; p.tok_i += 1;
p.findNextContainerMember(); trailing_comma = true;
const next = p.token_tags[p.tok_i]; continue;
switch (next) { },
.Eof => { .RBrace, .Eof => {
// no invalid tokens were found trailing_comma = false;
if (index == p.tok_i) break; break;
},
// Invalid tokens, add error and exit else => {},
}
// There is not allowed to be a decl after a field with no comma.
// Report error but recover parser.
try p.warn(.{ try p.warn(.{
.ExpectedToken = .{ .token = index, .expected_id = .Comma }, .ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma },
}); });
p.findNextContainerMember();
}
},
.LBrace => {
const comptime_token = p.nextToken();
const block = p.parseBlock() catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
error.ParseError => blk: {
p.findNextContainerMember();
break :blk null_node;
},
};
if (block != 0) {
const comptime_node = try p.addNode(.{
.tag = .Comptime,
.main_token = comptime_token,
.data = .{
.lhs = block,
.rhs = undefined,
},
});
if (field_state == .seen) {
field_state = .{ .end = comptime_node };
}
try list.append(comptime_node);
}
trailing_comma = false;
},
else => {
p.tok_i += 1;
try p.warn(.{ .ExpectedBlockOrField = .{ .token = p.tok_i } });
},
},
.Keyword_pub => {
p.tok_i += 1;
const top_level_decl = try p.expectTopLevelDeclRecoverable();
if (top_level_decl != 0) {
if (field_state == .seen) {
field_state = .{ .end = top_level_decl };
}
try list.append(top_level_decl);
}
trailing_comma = false;
},
.Keyword_usingnamespace => {
const node = try p.expectUsingNamespaceRecoverable();
if (node != 0) {
if (field_state == .seen) {
field_state = .{ .end = node };
}
try list.append(node);
}
trailing_comma = false;
},
.Keyword_const,
.Keyword_var,
.Keyword_threadlocal,
.Keyword_export,
.Keyword_extern,
.Keyword_inline,
.Keyword_noinline,
.Keyword_fn,
=> {
const top_level_decl = try p.expectTopLevelDeclRecoverable();
if (top_level_decl != 0) {
if (field_state == .seen) {
field_state = .{ .end = top_level_decl };
}
try list.append(top_level_decl);
}
trailing_comma = false;
},
.Identifier => {
const container_field = try p.expectContainerFieldRecoverable();
if (container_field != 0) {
switch (field_state) {
.none => field_state = .seen,
.err, .seen => {},
.end => |node| {
try p.warn(.{
.DeclBetweenFields = .{ .token = p.nodes.items(.main_token)[node] },
});
// Continue parsing; error will be reported later.
field_state = .err;
},
}
try list.append(container_field);
switch (p.token_tags[p.tok_i]) {
.Comma => {
p.tok_i += 1;
trailing_comma = true;
continue;
},
.RBrace, .Eof => {
trailing_comma = false;
break;
},
else => {},
}
// There is not allowed to be a decl after a field with no comma.
// Report error but recover parser.
try p.warn(.{
.ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma },
});
p.findNextContainerMember();
}
},
.Eof, .RBrace => {
if (doc_comment) |tok| {
try p.warn(.{ .UnattachedDocComment = .{ .token = tok } });
}
break; break;
}, },
else => { else => {
if (next == .RBrace) { try p.warn(.{ .ExpectedContainerMembers = .{ .token = p.tok_i } });
if (!top_level) break; // This was likely not supposed to end yet; try to find the next declaration.
p.tok_i += 1; p.findNextContainerMember();
}
// add error and continue
try p.warn(.{
.ExpectedToken = .{ .token = index, .expected_id = .Comma },
});
continue;
}, },
} }
};
continue;
} }
// Dangling doc comment switch (list.items.len) {
if (doc_comment) |tok| { 0 => return Members{
try p.warn(.{ .len = 0,
.UnattachedDocComment = .{ .token = tok }, .lhs = 0,
}); .rhs = 0,
} .trailing_comma = trailing_comma,
},
const next = p.token_tags[p.tok_i]; 1 => return Members{
switch (next) { .len = 1,
.Eof => break, .lhs = list.items[0],
.Keyword_comptime => { .rhs = 0,
p.tok_i += 1; .trailing_comma = trailing_comma,
try p.warn(.{ },
.ExpectedBlockOrField = .{ .token = p.tok_i }, 2 => return Members{
}); .len = 2,
.lhs = list.items[0],
.rhs = list.items[1],
.trailing_comma = trailing_comma,
}, },
else => { else => {
const index = p.tok_i; const span = try p.listToSpan(list.items);
if (next == .RBrace) { return Members{
if (!top_level) break; .len = list.items.len,
p.tok_i += 1; .lhs = span.start,
} .rhs = span.end,
.trailing_comma = trailing_comma,
// this was likely not supposed to end yet, };
// try to find the next declaration
p.findNextContainerMember();
try p.warn(.{
.ExpectedContainerMembers = .{ .token = index },
});
}, },
} }
} }
return p.listToSpan(list.items);
}
/// Attempts to find next container member by searching for certain tokens /// Attempts to find next container member by searching for certain tokens
fn findNextContainerMember(p: *Parser) void { fn findNextContainerMember(p: *Parser) void {
var level: u32 = 0; var level: u32 = 0;
@ -398,44 +471,36 @@ const Parser = struct {
} }
/// TestDecl <- KEYWORD_test STRINGLITERALSINGLE? Block /// TestDecl <- KEYWORD_test STRINGLITERALSINGLE? Block
fn parseTestDecl(p: *Parser) !Node.Index { fn expectTestDecl(p: *Parser) !Node.Index {
const test_token = p.eatToken(.Keyword_test) orelse return null_node; const test_token = try p.expectToken(.Keyword_test);
const name_token = try p.expectToken(.StringLiteral); const name_token = p.eatToken(.StringLiteral);
const block_node = try p.parseBlock(); const block_node = try p.parseBlock();
if (block_node == 0) return p.fail(.{ .ExpectedLBrace = .{ .token = p.tok_i } }); if (block_node == 0) return p.fail(.{ .ExpectedLBrace = .{ .token = p.tok_i } });
return p.addNode(.{ return p.addNode(.{
.tag = .TestDecl, .tag = .TestDecl,
.main_token = test_token, .main_token = test_token,
.data = .{ .data = .{
.lhs = name_token, .lhs = name_token orelse 0,
.rhs = block_node, .rhs = block_node,
}, },
}); });
} }
/// TopLevelComptime <- KEYWORD_comptime BlockExpr fn expectTestDeclRecoverable(p: *Parser) error{OutOfMemory}!Node.Index {
fn parseTopLevelComptime(p: *Parser) !Node.Index { return p.expectTestDecl() catch |err| switch (err) {
if (p.token_tags[p.tok_i] == .Keyword_comptime and error.OutOfMemory => return error.OutOfMemory,
p.token_tags[p.tok_i + 1] == .LBrace) error.ParseError => {
{ p.findNextContainerMember();
return p.addNode(.{
.tag = .Comptime,
.main_token = p.nextToken(),
.data = .{
.lhs = try p.parseBlock(),
.rhs = undefined,
},
});
} else {
return null_node; return null_node;
} },
};
} }
/// TopLevelDecl /// TopLevelDecl
/// <- (KEYWORD_export / KEYWORD_extern STRINGLITERALSINGLE? / (KEYWORD_inline / KEYWORD_noinline))? FnProto (SEMICOLON / Block) /// <- (KEYWORD_export / KEYWORD_extern STRINGLITERALSINGLE? / (KEYWORD_inline / KEYWORD_noinline))? FnProto (SEMICOLON / Block)
/// / (KEYWORD_export / KEYWORD_extern STRINGLITERALSINGLE?)? KEYWORD_threadlocal? VarDecl /// / (KEYWORD_export / KEYWORD_extern STRINGLITERALSINGLE?)? KEYWORD_threadlocal? VarDecl
/// / KEYWORD_usingnamespace Expr SEMICOLON /// / KEYWORD_usingnamespace Expr SEMICOLON
fn parseTopLevelDecl(p: *Parser) !Node.Index { fn expectTopLevelDecl(p: *Parser) !Node.Index {
const extern_export_inline_token = p.nextToken(); const extern_export_inline_token = p.nextToken();
var expect_fn: bool = false; var expect_fn: bool = false;
var exported: bool = false; var exported: bool = false;
@ -496,7 +561,21 @@ const Parser = struct {
return p.fail(.{ .ExpectedVarDeclOrFn = .{ .token = p.tok_i } }); return p.fail(.{ .ExpectedVarDeclOrFn = .{ .token = p.tok_i } });
} }
const usingnamespace_token = p.eatToken(.Keyword_usingnamespace) orelse return null_node; return p.expectUsingNamespace();
}
fn expectTopLevelDeclRecoverable(p: *Parser) error{OutOfMemory}!Node.Index {
return p.expectTopLevelDecl() catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
error.ParseError => {
p.findNextContainerMember();
return null_node;
},
};
}
fn expectUsingNamespace(p: *Parser) !Node.Index {
const usingnamespace_token = try p.expectToken(.Keyword_usingnamespace);
const expr = try p.expectExpr(); const expr = try p.expectExpr();
const semicolon_token = try p.expectToken(.Semicolon); const semicolon_token = try p.expectToken(.Semicolon);
try p.parseAppendedDocComment(semicolon_token); try p.parseAppendedDocComment(semicolon_token);
@ -510,6 +589,16 @@ const Parser = struct {
}); });
} }
fn expectUsingNamespaceRecoverable(p: *Parser) error{OutOfMemory}!Node.Index {
return p.expectUsingNamespace() catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
error.ParseError => {
p.findNextContainerMember();
return null_node;
},
};
}
/// FnProto <- KEYWORD_fn IDENTIFIER? LPAREN ParamDeclList RPAREN ByteAlign? LinkSection? CallConv? EXCLAMATIONMARK? (Keyword_anytype / TypeExpr) /// FnProto <- KEYWORD_fn IDENTIFIER? LPAREN ParamDeclList RPAREN ByteAlign? LinkSection? CallConv? EXCLAMATIONMARK? (Keyword_anytype / TypeExpr)
fn parseFnProto(p: *Parser) !Node.Index { fn parseFnProto(p: *Parser) !Node.Index {
const fn_token = p.eatToken(.Keyword_fn) orelse return null_node; const fn_token = p.eatToken(.Keyword_fn) orelse return null_node;
@ -648,12 +737,9 @@ const Parser = struct {
} }
/// ContainerField <- KEYWORD_comptime? IDENTIFIER (COLON TypeExpr ByteAlign?)? (EQUAL Expr)? /// ContainerField <- KEYWORD_comptime? IDENTIFIER (COLON TypeExpr ByteAlign?)? (EQUAL Expr)?
fn parseContainerField(p: *Parser) !Node.Index { fn expectContainerField(p: *Parser) !Node.Index {
const comptime_token = p.eatToken(.Keyword_comptime); const comptime_token = p.eatToken(.Keyword_comptime);
const name_token = p.eatToken(.Identifier) orelse { const name_token = try p.expectToken(.Identifier);
if (comptime_token) |_| p.tok_i -= 1;
return null_node;
};
var align_expr: Node.Index = 0; var align_expr: Node.Index = 0;
var type_expr: Node.Index = 0; var type_expr: Node.Index = 0;
@ -708,6 +794,16 @@ const Parser = struct {
} }
} }
fn expectContainerFieldRecoverable(p: *Parser) error{OutOfMemory}!Node.Index {
return p.expectContainerField() catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
error.ParseError => {
p.findNextContainerMember();
return null_node;
},
};
}
/// Statement /// Statement
/// <- KEYWORD_comptime? VarDecl /// <- KEYWORD_comptime? VarDecl
/// / KEYWORD_comptime BlockExprStatement /// / KEYWORD_comptime BlockExprStatement
@ -3333,16 +3429,20 @@ const Parser = struct {
_ = try p.expectToken(.RParen); _ = try p.expectToken(.RParen);
_ = try p.expectToken(.LBrace); _ = try p.expectToken(.LBrace);
const members = try p.parseContainerMembers(false); const members = try p.parseContainerMembers();
const members_span = try members.toSpan(p);
_ = try p.expectToken(.RBrace); _ = try p.expectToken(.RBrace);
return p.addNode(.{ return p.addNode(.{
.tag = .TaggedUnionEnumTag, .tag = switch (members.trailing_comma) {
true => .TaggedUnionEnumTagComma,
false => .TaggedUnionEnumTag,
},
.main_token = main_token, .main_token = main_token,
.data = .{ .data = .{
.lhs = enum_tag_expr, .lhs = enum_tag_expr,
.rhs = try p.addExtra(Node.SubRange{ .rhs = try p.addExtra(Node.SubRange{
.start = members.start, .start = members_span.start,
.end = members.end, .end = members_span.end,
}), }),
}, },
}); });
@ -3350,16 +3450,34 @@ const Parser = struct {
_ = try p.expectToken(.RParen); _ = try p.expectToken(.RParen);
_ = try p.expectToken(.LBrace); _ = try p.expectToken(.LBrace);
const members = try p.parseContainerMembers(false); const members = try p.parseContainerMembers();
_ = try p.expectToken(.RBrace); _ = try p.expectToken(.RBrace);
if (members.len <= 2) {
return p.addNode(.{ return p.addNode(.{
.tag = .TaggedUnion, .tag = switch (members.trailing_comma) {
true => .TaggedUnionTwoComma,
false => .TaggedUnionTwo,
},
.main_token = main_token, .main_token = main_token,
.data = .{ .data = .{
.lhs = members.start, .lhs = members.lhs,
.rhs = members.end, .rhs = members.rhs,
}, },
}); });
} else {
const span = try members.toSpan(p);
return p.addNode(.{
.tag = switch (members.trailing_comma) {
true => .TaggedUnionComma,
false => .TaggedUnion,
},
.main_token = main_token,
.data = .{
.lhs = span.start,
.rhs = span.end,
},
});
}
} }
} else { } else {
const expr = try p.expectExpr(); const expr = try p.expectExpr();
@ -3373,26 +3491,48 @@ const Parser = struct {
else => unreachable, else => unreachable,
}; };
_ = try p.expectToken(.LBrace); _ = try p.expectToken(.LBrace);
const members = try p.parseContainerMembers(false); const members = try p.parseContainerMembers();
_ = try p.expectToken(.RBrace); _ = try p.expectToken(.RBrace);
if (arg_expr == 0) { if (arg_expr == 0) {
if (members.len <= 2) {
return p.addNode(.{ return p.addNode(.{
.tag = .ContainerDecl, .tag = switch (members.trailing_comma) {
true => .ContainerDeclTwoComma,
false => .ContainerDeclTwo,
},
.main_token = main_token, .main_token = main_token,
.data = .{ .data = .{
.lhs = members.start, .lhs = members.lhs,
.rhs = members.end, .rhs = members.rhs,
}, },
}); });
} else { } else {
const span = try members.toSpan(p);
return p.addNode(.{ return p.addNode(.{
.tag = .ContainerDeclArg, .tag = switch (members.trailing_comma) {
true => .ContainerDeclComma,
false => .ContainerDecl,
},
.main_token = main_token,
.data = .{
.lhs = span.start,
.rhs = span.end,
},
});
}
} else {
const span = try members.toSpan(p);
return p.addNode(.{
.tag = switch (members.trailing_comma) {
true => .ContainerDeclArgComma,
false => .ContainerDeclArg,
},
.main_token = main_token, .main_token = main_token,
.data = .{ .data = .{
.lhs = arg_expr, .lhs = arg_expr,
.rhs = try p.addExtra(Node.SubRange{ .rhs = try p.addExtra(Node.SubRange{
.start = members.start, .start = span.start,
.end = members.end, .end = span.end,
}), }),
}, },
}); });

View File

@ -149,45 +149,82 @@ test "zig fmt: nosuspend block" {
); );
} }
//test "zig fmt: nosuspend await" { test "zig fmt: nosuspend await" {
// try testCanonical( try testCanonical(
// \\fn foo() void { \\fn foo() void {
// \\ x = nosuspend await y; \\ x = nosuspend await y;
// \\} \\}
// \\ \\
// ); );
//} }
//
//test "zig fmt: trailing comma in container declaration" { test "zig fmt: container declaration, single line" {
// try testCanonical( try testCanonical(
// \\const X = struct { foo: i32 }; \\const X = struct { foo: i32 };
// \\const X = struct { foo: i32, bar: i32 }; \\const X = struct { foo: i32, bar: i32 };
// \\const X = struct { foo: i32 = 1, bar: i32 = 2 }; \\const X = struct { foo: i32 = 1, bar: i32 = 2 };
// \\const X = struct { foo: i32 align(4), bar: i32 align(4) }; \\const X = struct { foo: i32 align(4), bar: i32 align(4) };
// \\const X = struct { foo: i32 align(4) = 1, bar: i32 align(4) = 2 }; \\const X = struct { foo: i32 align(4) = 1, bar: i32 align(4) = 2 };
// \\ \\
// ); );
// try testCanonical( }
// \\test "" {
// \\ comptime { test "zig fmt: container declaration, one item, multi line trailing comma" {
// \\ const X = struct { try testCanonical(
// \\ x: i32 \\test "" {
// \\ }; \\ comptime {
// \\ } \\ const X = struct {
// \\} \\ x: i32,
// \\ \\ };
// ); \\ }
// try testTransform( \\}
// \\const X = struct { \\
// \\ foo: i32, bar: i8 }; );
// , }
// \\const X = struct {
// \\ foo: i32, bar: i8 test "zig fmt: container declaration, no trailing comma on separate line" {
// \\}; try testTransform(
// \\ \\test "" {
// ); \\ comptime {
//} \\ const X = struct {
// \\ x: i32
\\ };
\\ }
\\}
\\
,
\\test "" {
\\ comptime {
\\ const X = struct { x: i32 };
\\ }
\\}
\\
);
}
test "zig fmt: container declaration, line break, no trailing comma" {
try testTransform(
\\const X = struct {
\\ foo: i32, bar: i8 };
,
\\const X = struct { foo: i32, bar: i8 };
\\
);
}
test "zig fmt: container declaration, transform trailing comma" {
try testTransform(
\\const X = struct {
\\ foo: i32, bar: i8, };
,
\\const X = struct {
\\ foo: i32,
\\ bar: i8,
\\};
\\
);
}
//test "zig fmt: trailing comma in fn parameter list" { //test "zig fmt: trailing comma in fn parameter list" {
// try testCanonical( // try testCanonical(
// \\pub fn f( // \\pub fn f(

View File

@ -68,7 +68,7 @@ fn renderRoot(ais: *Ais, tree: ast.Tree) Error!void {
const root_decls = tree.extra_data[nodes_data[0].lhs..nodes_data[0].rhs]; const root_decls = tree.extra_data[nodes_data[0].lhs..nodes_data[0].rhs];
for (root_decls) |decl| { for (root_decls) |decl| {
try renderContainerDecl(ais, tree, decl, .Newline); try renderMember(ais, tree, decl, .Newline);
} }
} }
@ -84,7 +84,7 @@ fn renderExtraNewlineToken(ais: *Ais, tree: ast.Tree, first_token: ast.TokenInde
} }
} }
fn renderContainerDecl(ais: *Ais, tree: ast.Tree, decl: ast.Node.Index, space: Space) Error!void { fn renderMember(ais: *Ais, tree: ast.Tree, decl: ast.Node.Index, space: Space) Error!void {
const token_tags = tree.tokens.items(.tag); const token_tags = tree.tokens.items(.tag);
const main_tokens = tree.nodes.items(.main_token); const main_tokens = tree.nodes.items(.main_token);
const datas = tree.nodes.items(.data); const datas = tree.nodes.items(.data);
@ -158,6 +158,8 @@ fn renderContainerDecl(ais: *Ais, tree: ast.Tree, decl: ast.Node.Index, space: S
.ContainerFieldAlign => return renderContainerField(ais, tree, tree.containerFieldAlign(decl), space), .ContainerFieldAlign => return renderContainerField(ais, tree, tree.containerFieldAlign(decl), space),
.ContainerField => return renderContainerField(ais, tree, tree.containerField(decl), space), .ContainerField => return renderContainerField(ais, tree, tree.containerField(decl), space),
.Comptime => return renderExpression(ais, tree, decl, space), .Comptime => return renderExpression(ais, tree, decl, space),
.Root => unreachable,
else => unreachable, else => unreachable,
} }
} }
@ -195,7 +197,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac
// return renderToken(ais, tree, any_type.token, space); // return renderToken(ais, tree, any_type.token, space);
//}, //},
.BlockTwo => { .BlockTwo => {
var statements = [2]ast.Node.Index{ datas[node].lhs, datas[node].rhs }; const statements = [2]ast.Node.Index{ datas[node].lhs, datas[node].rhs };
if (datas[node].lhs == 0) { if (datas[node].lhs == 0) {
return renderBlock(ais, tree, main_tokens[node], statements[0..0], space); return renderBlock(ais, tree, main_tokens[node], statements[0..0], space);
} else if (datas[node].rhs == 0) { } else if (datas[node].rhs == 0) {
@ -667,124 +669,29 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac
// return renderToken(ais, tree, grouped_expr.rparen, space); // return renderToken(ais, tree, grouped_expr.rparen, space);
//}, //},
.ContainerDecl => unreachable, // TODO .ContainerDecl,
.ContainerDeclArg => unreachable, // TODO .ContainerDeclComma,
.TaggedUnion => unreachable, // TODO => return renderContainerDecl(ais, tree, tree.containerDecl(node), space),
.TaggedUnionEnumTag => unreachable, // TODO
//.ContainerDecl => {
// const container_decl = @fieldParentPtr(ast.Node.ContainerDecl, "base", base);
// if (container_decl.layout_token) |layout_token| { .ContainerDeclTwo, .ContainerDeclTwoComma => {
// try renderToken(ais, tree, layout_token, Space.Space); var buffer: [2]ast.Node.Index = undefined;
// } return renderContainerDecl(ais, tree, tree.containerDeclTwo(&buffer, node), space);
},
.ContainerDeclArg,
.ContainerDeclArgComma,
=> return renderContainerDecl(ais, tree, tree.containerDeclArg(node), space),
// switch (container_decl.init_arg_expr) { .TaggedUnion,
// .None => { .TaggedUnionComma,
// try renderToken(ais, tree, container_decl.kind_token, Space.Space); // union => return renderContainerDecl(ais, tree, tree.taggedUnion(node), space),
// },
// .Enum => |enum_tag_type| {
// try renderToken(ais, tree, container_decl.kind_token, Space.None); // union
// const lparen = tree.nextToken(container_decl.kind_token); .TaggedUnionTwo, .TaggedUnionTwoComma => {
// const enum_token = tree.nextToken(lparen); var buffer: [2]ast.Node.Index = undefined;
return renderContainerDecl(ais, tree, tree.taggedUnionTwo(&buffer, node), space);
// try renderToken(ais, tree, lparen, Space.None); // ( },
// try renderToken(ais, tree, enum_token, Space.None); // enum .TaggedUnionEnumTag,
.TaggedUnionEnumTagComma,
// if (enum_tag_type) |expr| { => return renderContainerDecl(ais, tree, tree.taggedUnionEnumTag(node), space),
// try renderToken(ais, tree, tree.nextToken(enum_token), Space.None); // (
// try renderExpression(ais, tree, expr, Space.None);
// const rparen = tree.nextToken(expr.lastToken());
// try renderToken(ais, tree, rparen, Space.None); // )
// try renderToken(ais, tree, tree.nextToken(rparen), Space.Space); // )
// } else {
// try renderToken(ais, tree, tree.nextToken(enum_token), Space.Space); // )
// }
// },
// .Type => |type_expr| {
// try renderToken(ais, tree, container_decl.kind_token, Space.None); // union
// const lparen = tree.nextToken(container_decl.kind_token);
// const rparen = tree.nextToken(type_expr.lastToken());
// try renderToken(ais, tree, lparen, Space.None); // (
// try renderExpression(ais, tree, type_expr, Space.None);
// try renderToken(ais, tree, rparen, Space.Space); // )
// },
// }
// if (container_decl.fields_and_decls_len == 0) {
// {
// ais.pushIndentNextLine();
// defer ais.popIndent();
// try renderToken(ais, tree, container_decl.lbrace_token, Space.None); // lbrace
// }
// return renderToken(ais, tree, container_decl.rbrace_token, space); // rbrace
// }
// const src_has_trailing_comma = blk: {
// var maybe_comma = tree.prevToken(container_decl.lastToken());
// // Doc comments for a field may also appear after the comma, eg.
// // field_name: T, // comment attached to field_name
// if (tree.token_tags[maybe_comma] == .DocComment)
// maybe_comma = tree.prevToken(maybe_comma);
// break :blk tree.token_tags[maybe_comma] == .Comma;
// };
// const fields_and_decls = container_decl.fieldsAndDecls();
// // Check if the first declaration and the { are on the same line
// const src_has_newline = !tree.tokensOnSameLine(
// container_decl.lbrace_token,
// fields_and_decls[0].firstToken(),
// );
// // We can only print all the elements in-line if all the
// // declarations inside are fields
// const src_has_only_fields = blk: {
// for (fields_and_decls) |decl| {
// if (decl.tag != .ContainerField) break :blk false;
// }
// break :blk true;
// };
// if (src_has_trailing_comma or !src_has_only_fields) {
// // One declaration per line
// ais.pushIndentNextLine();
// defer ais.popIndent();
// try renderToken(ais, tree, container_decl.lbrace_token, .Newline); // lbrace
// for (fields_and_decls) |decl, i| {
// try renderContainerDecl(allocator, ais, tree, decl, .Newline);
// if (i + 1 < fields_and_decls.len) {
// try renderExtraNewline(ais, tree, fields_and_decls[i + 1]);
// }
// }
// } else if (src_has_newline) {
// // All the declarations on the same line, but place the items on
// // their own line
// try renderToken(ais, tree, container_decl.lbrace_token, .Newline); // lbrace
// ais.pushIndent();
// defer ais.popIndent();
// for (fields_and_decls) |decl, i| {
// const space_after_decl: Space = if (i + 1 >= fields_and_decls.len) .Newline else .Space;
// try renderContainerDecl(allocator, ais, tree, decl, space_after_decl);
// }
// } else {
// // All the declarations on the same line
// try renderToken(ais, tree, container_decl.lbrace_token, .Space); // lbrace
// for (fields_and_decls) |decl| {
// try renderContainerDecl(allocator, ais, tree, decl, .Space);
// }
// }
// return renderToken(ais, tree, container_decl.rbrace_token, space); // rbrace
//},
.ErrorSetDecl => unreachable, // TODO .ErrorSetDecl => unreachable, // TODO
//.ErrorSetDecl => { //.ErrorSetDecl => {
@ -1949,6 +1856,94 @@ fn renderArrayInit(
} }
} }
fn renderContainerDecl(
ais: *Ais,
tree: ast.Tree,
container_decl: ast.Full.ContainerDecl,
space: Space,
) Error!void {
const token_tags = tree.tokens.items(.tag);
const node_tags = tree.nodes.items(.tag);
if (container_decl.layout_token) |layout_token| {
try renderToken(ais, tree, layout_token, .Space);
}
var lbrace: ast.TokenIndex = undefined;
if (container_decl.ast.enum_token) |enum_token| {
try renderToken(ais, tree, container_decl.ast.main_token, .None); // union
try renderToken(ais, tree, enum_token - 1, .None); // lparen
try renderToken(ais, tree, enum_token, .None); // enum
if (container_decl.ast.arg != 0) {
try renderToken(ais, tree, enum_token + 1, .None); // lparen
try renderExpression(ais, tree, container_decl.ast.arg, .None);
const rparen = tree.lastToken(container_decl.ast.arg) + 1;
try renderToken(ais, tree, rparen, .None); // rparen
try renderToken(ais, tree, rparen + 1, .Space); // rparen
lbrace = rparen + 2;
} else {
try renderToken(ais, tree, enum_token + 1, .Space); // rparen
lbrace = enum_token + 2;
}
} else if (container_decl.ast.arg != 0) {
try renderToken(ais, tree, container_decl.ast.main_token, .None); // union
try renderToken(ais, tree, container_decl.ast.main_token + 1, .None); // lparen
try renderExpression(ais, tree, container_decl.ast.arg, .None);
const rparen = tree.lastToken(container_decl.ast.arg) + 1;
try renderToken(ais, tree, rparen, .Space); // rparen
lbrace = rparen + 1;
} else {
try renderToken(ais, tree, container_decl.ast.main_token, .Space); // union
lbrace = container_decl.ast.main_token + 1;
}
if (container_decl.ast.members.len == 0) {
try renderToken(ais, tree, lbrace, Space.None); // lbrace
return renderToken(ais, tree, lbrace + 1, space); // rbrace
}
const last_member = container_decl.ast.members[container_decl.ast.members.len - 1];
const last_member_token = tree.lastToken(last_member);
const rbrace = switch (token_tags[last_member_token + 1]) {
.DocComment => last_member_token + 2,
.Comma => switch (token_tags[last_member_token + 2]) {
.DocComment => last_member_token + 3,
.RBrace => last_member_token + 2,
else => unreachable,
},
.RBrace => last_member_token + 1,
else => unreachable,
};
const src_has_trailing_comma = token_tags[last_member_token + 1] == .Comma;
if (!src_has_trailing_comma) one_line: {
// We can only print all the members in-line if all the members are fields.
for (container_decl.ast.members) |member| {
if (!node_tags[member].isContainerField()) break :one_line;
}
// All the declarations on the same line.
try renderToken(ais, tree, lbrace, .Space); // lbrace
for (container_decl.ast.members) |member| {
try renderMember(ais, tree, member, .Space);
}
return renderToken(ais, tree, rbrace, space); // rbrace
}
// One member per line.
ais.pushIndent();
try renderToken(ais, tree, lbrace, .Newline); // lbrace
for (container_decl.ast.members) |member, i| {
try renderMember(ais, tree, member, .Newline);
if (i + 1 < container_decl.ast.members.len) {
try renderExtraNewline(ais, tree, container_decl.ast.members[i + 1]);
}
}
ais.popIndent();
return renderToken(ais, tree, rbrace, space); // rbrace
}
/// Render an expression, and the comma that follows it, if it is present in the source. /// Render an expression, and the comma that follows it, if it is present in the source.
fn renderExpressionComma(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Space) Error!void { fn renderExpressionComma(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Space) Error!void {
const token_tags = tree.tokens.items(.tag); const token_tags = tree.tokens.items(.tag);