zig fmt: implement container decls

This commit is contained in:
Andrew Kelley 2021-02-05 15:47:18 -07:00
parent cf42ae178d
commit 16a2562c3f
4 changed files with 766 additions and 368 deletions

View File

@ -365,6 +365,26 @@ pub const Tree = struct {
}
},
.ContainerDecl,
.ContainerDeclComma,
.ContainerDeclTwo,
.ContainerDeclTwoComma,
.ContainerDeclArg,
.ContainerDeclArgComma,
.TaggedUnion,
.TaggedUnionComma,
.TaggedUnionTwo,
.TaggedUnionTwoComma,
.TaggedUnionEnumTag,
.TaggedUnionEnumTagComma,
=> {
const main_token = main_tokens[n];
switch (token_tags[main_token - 1]) {
.Keyword_packed, .Keyword_extern => return main_token - 1,
else => return main_token,
}
},
.PtrTypeAligned => unreachable, // TODO
.PtrTypeSentinel => unreachable, // TODO
.PtrType => unreachable, // TODO
@ -375,10 +395,6 @@ pub const Tree = struct {
.While => unreachable, // TODO
.ForSimple => unreachable, // TODO
.For => unreachable, // TODO
.ContainerDecl => unreachable, // TODO
.ContainerDeclArg => unreachable, // TODO
.TaggedUnion => unreachable, // TODO
.TaggedUnionEnumTag => unreachable, // TODO
.AsmOutput => unreachable, // TODO
.AsmInput => unreachable, // TODO
.ErrorValue => unreachable, // TODO
@ -408,6 +424,7 @@ pub const Tree = struct {
.Break,
.Return,
.Nosuspend,
.Comptime,
=> n = datas[n].lhs,
.TestDecl,
@ -455,7 +472,6 @@ pub const Tree = struct {
.BoolOr,
.AnyFrameType,
.ErrorUnion,
.Comptime,
.IfSimple,
.WhileSimple,
=> n = datas[n].rhs,
@ -490,13 +506,37 @@ pub const Tree = struct {
}
n = tree.extra_data[params.end - 1]; // last parameter
},
.Block => {
.ContainerDeclArg => {
const members = tree.extraData(datas[n].rhs, Node.SubRange);
if (members.end - members.start == 0) {
end_offset += 1; // for the rparen
n = datas[n].lhs;
} else {
end_offset += 1; // for the rbrace
n = tree.extra_data[members.end - 1]; // last parameter
}
},
.ContainerDeclArgComma => {
const members = tree.extraData(datas[n].rhs, Node.SubRange);
assert(members.end - members.start > 0);
end_offset += 2; // for the comma + rbrace
n = tree.extra_data[members.end - 1]; // last parameter
},
.Block,
.ContainerDecl,
.TaggedUnion,
=> {
end_offset += 1; // for the rbrace
if (datas[n].rhs - datas[n].lhs == 0) {
return main_tokens[n] + end_offset;
}
n = tree.extra_data[datas[n].rhs - 1]; // last statement
},
.ContainerDeclComma, .TaggedUnionComma => {
assert(datas[n].rhs - datas[n].lhs > 0);
end_offset += 2; // for the comma + rbrace
n = tree.extra_data[datas[n].rhs - 1]; // last member
},
.CallOne,
.ArrayAccess,
=> {
@ -511,6 +551,8 @@ pub const Tree = struct {
.BuiltinCallTwo,
.BlockTwo,
.StructInitDotTwo,
.ContainerDeclTwo,
.TaggedUnionTwo,
=> {
end_offset += 1; // for the rparen/rbrace
if (datas[n].rhs != 0) {
@ -523,6 +565,8 @@ pub const Tree = struct {
},
.ArrayInitDotTwoComma,
.StructInitDotTwoComma,
.ContainerDeclTwoComma,
.TaggedUnionTwoComma,
=> {
end_offset += 2; // for the comma + rbrace
if (datas[n].rhs != 0) {
@ -589,6 +633,38 @@ pub const Tree = struct {
}
}
},
.ContainerFieldInit => {
if (datas[n].rhs != 0) {
n = datas[n].rhs;
} else if (datas[n].lhs != 0) {
n = datas[n].lhs;
} else {
return main_tokens[n] + end_offset;
}
},
.ContainerFieldAlign => {
if (datas[n].rhs != 0) {
end_offset += 1; // for the rparen
n = datas[n].rhs;
} else if (datas[n].lhs != 0) {
n = datas[n].lhs;
} else {
return main_tokens[n] + end_offset;
}
},
.ContainerField => {
const extra = tree.extraData(datas[n].rhs, Node.ContainerField);
if (extra.value_expr != 0) {
n = extra.value_expr;
} else if (extra.align_expr != 0) {
end_offset += 1; // for the rparen
n = extra.align_expr;
} else if (datas[n].lhs != 0) {
n = datas[n].lhs;
} else {
return main_tokens[n] + end_offset;
}
},
// These are not supported by lastToken() because implementation would
// require recursion due to the optional comma followed by rbrace.
@ -600,10 +676,9 @@ pub const Tree = struct {
.StructInit => unreachable,
.StructInitOne => unreachable,
.StructInitDot => unreachable,
.ContainerFieldInit => unreachable,
.ContainerFieldAlign => unreachable,
.ContainerField => unreachable,
.TaggedUnionEnumTag => unreachable, // TODO
.TaggedUnionEnumTagComma => unreachable, // TODO
.Switch => unreachable, // TODO
.If => unreachable, // TODO
.Continue => unreachable, // TODO
@ -631,10 +706,6 @@ pub const Tree = struct {
.FnProtoMulti => unreachable, // TODO
.FnProtoOne => unreachable, // TODO
.FnProto => unreachable, // TODO
.ContainerDecl => unreachable, // TODO
.ContainerDeclArg => unreachable, // TODO
.TaggedUnion => unreachable, // TODO
.TaggedUnionEnumTag => unreachable, // TODO
.AsmOutput => unreachable, // TODO
.AsmInput => unreachable, // TODO
.ErrorValue => unreachable, // TODO
@ -952,6 +1023,93 @@ pub const Tree = struct {
};
}
pub fn containerDeclTwo(tree: Tree, buffer: *[2]Node.Index, node: Node.Index) Full.ContainerDecl {
assert(tree.nodes.items(.tag)[node] == .ContainerDeclTwo or
tree.nodes.items(.tag)[node] == .ContainerDeclTwoComma);
const data = tree.nodes.items(.data)[node];
buffer.* = .{ data.lhs, data.rhs };
const members = if (data.rhs != 0)
buffer[0..2]
else if (data.lhs != 0)
buffer[0..1]
else
buffer[0..0];
return tree.fullContainerDecl(.{
.main_token = tree.nodes.items(.main_token)[node],
.enum_token = null,
.members = members,
.arg = 0,
});
}
pub fn containerDecl(tree: Tree, node: Node.Index) Full.ContainerDecl {
assert(tree.nodes.items(.tag)[node] == .ContainerDecl or
tree.nodes.items(.tag)[node] == .ContainerDeclComma);
const data = tree.nodes.items(.data)[node];
return tree.fullContainerDecl(.{
.main_token = tree.nodes.items(.main_token)[node],
.enum_token = null,
.members = tree.extra_data[data.lhs..data.rhs],
.arg = 0,
});
}
pub fn containerDeclArg(tree: Tree, node: Node.Index) Full.ContainerDecl {
assert(tree.nodes.items(.tag)[node] == .ContainerDeclArg);
const data = tree.nodes.items(.data)[node];
const members_range = tree.extraData(data.rhs, Node.SubRange);
return tree.fullContainerDecl(.{
.main_token = tree.nodes.items(.main_token)[node],
.enum_token = null,
.members = tree.extra_data[members_range.start..members_range.end],
.arg = data.lhs,
});
}
pub fn taggedUnionTwo(tree: Tree, buffer: *[2]Node.Index, node: Node.Index) Full.ContainerDecl {
assert(tree.nodes.items(.tag)[node] == .TaggedUnionTwo);
const data = tree.nodes.items(.data)[node];
buffer.* = .{ data.lhs, data.rhs };
const members = if (data.rhs != 0)
buffer[0..2]
else if (data.lhs != 0)
buffer[0..1]
else
buffer[0..0];
const main_token = tree.nodes.items(.main_token)[node];
return tree.fullContainerDecl(.{
.main_token = main_token,
.enum_token = main_token + 2, // union lparen enum
.members = members,
.arg = 0,
});
}
pub fn taggedUnion(tree: Tree, node: Node.Index) Full.ContainerDecl {
assert(tree.nodes.items(.tag)[node] == .TaggedUnion);
const data = tree.nodes.items(.data)[node];
const main_token = tree.nodes.items(.main_token)[node];
return tree.fullContainerDecl(.{
.main_token = main_token,
.enum_token = main_token + 2, // union lparen enum
.members = tree.extra_data[data.lhs..data.rhs],
.arg = 0,
});
}
pub fn taggedUnionEnumTag(tree: Tree, node: Node.Index) Full.ContainerDecl {
assert(tree.nodes.items(.tag)[node] == .TaggedUnionEnumTag);
const data = tree.nodes.items(.data)[node];
const members_range = tree.extraData(data.rhs, Node.SubRange);
const main_token = tree.nodes.items(.main_token)[node];
return tree.fullContainerDecl(.{
.main_token = main_token,
.enum_token = main_token + 2, // union lparen enum
.members = tree.extra_data[data.lhs..data.rhs],
.arg = data.lhs,
});
}
fn fullVarDecl(tree: Tree, info: Full.VarDecl.Ast) Full.VarDecl {
const token_tags = tree.tokens.items(.tag);
var result: Full.VarDecl = .{
@ -1031,6 +1189,19 @@ pub const Tree = struct {
};
return result;
}
fn fullContainerDecl(tree: Tree, info: Full.ContainerDecl.Ast) Full.ContainerDecl {
const token_tags = tree.tokens.items(.tag);
var result: Full.ContainerDecl = .{
.ast = info,
.layout_token = null,
};
switch (token_tags[info.main_token - 1]) {
.Keyword_extern, .Keyword_packed => result.layout_token = info.main_token - 1,
else => {},
}
return result;
}
};
/// Fully assembled AST node information.
@ -1125,6 +1296,19 @@ pub const Full = struct {
elem_type: Node.Index,
};
};
pub const ContainerDecl = struct {
layout_token: ?TokenIndex,
ast: Ast,
pub const Ast = struct {
main_token: TokenIndex,
/// Populated when main_token is Keyword_union.
enum_token: ?TokenIndex,
members: []const Node.Index,
arg: Node.Index,
};
};
};
pub const Error = union(enum) {
@ -1543,9 +1727,11 @@ pub const Node = struct {
StructInitOne,
/// `.{.a = lhs, .b = rhs}`. lhs and rhs can be omitted.
/// main_token is the lbrace.
/// No trailing comma before the rbrace.
StructInitDotTwo,
/// Same as `StructInitDotTwo` except there is known to be a trailing comma
/// before the final rbrace.
/// before the final rbrace. This tag exists to facilitate lastToken() implemented
/// without recursion.
StructInitDotTwoComma,
/// `.{.a = b, .c = d}`. `sub_list[lhs..rhs]`.
/// main_token is the lbrace.
@ -1655,21 +1841,50 @@ pub const Node = struct {
/// `error{a, b}`.
/// lhs and rhs both unused.
ErrorSetDecl,
/// `struct {}`, `union {}`, etc. `sub_list[lhs..rhs]`.
/// `struct {}`, `union {}`, `opaque {}`, `enum {}`. `extra_data[lhs..rhs]`.
/// main_token is `struct`, `union`, `opaque`, `enum` keyword.
ContainerDecl,
/// `union(lhs)` / `enum(lhs)`. `sub_range_list[rhs]`.
/// Same as ContainerDecl but there is known to be a trailing comma before the rbrace.
ContainerDeclComma,
/// `struct {lhs, rhs}`, `union {lhs, rhs}`, `opaque {lhs, rhs}`, `enum {lhs, rhs}`.
/// lhs or rhs can be omitted.
/// main_token is `struct`, `union`, `opaque`, `enum` keyword.
ContainerDeclTwo,
/// Same as ContainerDeclTwo except there is known to be a trailing comma
/// before the rbrace.
ContainerDeclTwoComma,
/// `union(lhs)` / `enum(lhs)`. `SubRange[rhs]`.
ContainerDeclArg,
/// Same as ContainerDeclArg but there is known to be a trailing comma before the rbrace.
ContainerDeclArgComma,
/// `union(enum) {}`. `sub_list[lhs..rhs]`.
/// Note that tagged unions with explicitly provided enums are represented
/// by `ContainerDeclArg`.
TaggedUnion,
/// `union(enum(lhs)) {}`. `sub_list_range[rhs]`.
/// Same as TaggedUnion but there is known to be a trailing comma before the rbrace.
TaggedUnionComma,
/// `union(enum) {lhs, rhs}`. lhs or rhs may be omitted.
/// Note that tagged unions with explicitly provided enums are represented
/// by `ContainerDeclArg`.
TaggedUnionTwo,
/// Same as TaggedUnionTwo but there is known to be a trailing comma before the rbrace.
TaggedUnionTwoComma,
/// `union(enum(lhs)) {}`. `SubRange[rhs]`.
TaggedUnionEnumTag,
/// Same as TaggedUnionEnumTag but there is known to be a trailing comma
/// before the rbrace.
TaggedUnionEnumTagComma,
/// `a: lhs = rhs,`. lhs and rhs can be omitted.
/// main_token is the field name identifier.
/// lastToken() does not include the possible trailing comma.
ContainerFieldInit,
/// `a: lhs align(rhs),`. rhs can be omitted.
/// main_token is the field name identifier.
/// lastToken() does not include the possible trailing comma.
ContainerFieldAlign,
/// `a: lhs align(c) = d,`. `container_field_list[rhs]`.
/// main_token is the field name identifier.
/// lastToken() does not include the possible trailing comma.
ContainerField,
/// `anytype`. both lhs and rhs unused.
/// Used by `ContainerField`.
@ -1699,6 +1914,17 @@ pub const Node = struct {
ErrorValue,
/// `lhs!rhs`. main_token is the `!`.
ErrorUnion,
pub fn isContainerField(tag: Tag) bool {
return switch (tag) {
.ContainerFieldInit,
.ContainerFieldAlign,
.ContainerField,
=> true,
else => false,
};
}
};
pub const Data = struct {

View File

@ -64,9 +64,10 @@ pub fn parse(gpa: *Allocator, source: []const u8) Allocator.Error!Tree {
.rhs = undefined,
},
});
const root_decls = try parser.parseContainerMembers(true);
// parseContainerMembers will try to skip as much
// invalid tokens as it can, so we are now at EOF.
const root_members = try parser.parseContainerMembers();
const root_decls = try root_members.toSpan(&parser);
// parseContainerMembers will try to skip as much invalid tokens as
// it can, so we are now at EOF.
assert(parser.token_tags[parser.tok_i] == .Eof);
parser.nodes.items(.data)[0] = .{
.lhs = root_decls.start,
@ -108,6 +109,22 @@ const Parser = struct {
}
};
const Members = struct {
len: usize,
lhs: Node.Index,
rhs: Node.Index,
trailing_comma: bool,
fn toSpan(self: Members, p: *Parser) !Node.SubRange {
if (self.len <= 2) {
const nodes = [2]Node.Index{ self.lhs, self.rhs };
return p.listToSpan(nodes[0..self.len]);
} else {
return Node.SubRange{ .start = self.lhs, .end = self.rhs };
}
}
};
fn listToSpan(p: *Parser, list: []const Node.Index) !Node.SubRange {
try p.extra_data.appendSlice(p.gpa, list);
return Node.SubRange{
@ -151,169 +168,225 @@ const Parser = struct {
/// / ContainerField COMMA ContainerMembers
/// / ContainerField
/// /
fn parseContainerMembers(p: *Parser, top_level: bool) !Node.SubRange {
/// TopLevelComptime <- KEYWORD_comptime BlockExpr
fn parseContainerMembers(p: *Parser) !Members {
var list = std.ArrayList(Node.Index).init(p.gpa);
defer list.deinit();
var field_state: union(enum) {
/// no fields have been seen
/// No fields have been seen.
none,
/// currently parsing fields
/// Currently parsing fields.
seen,
/// saw fields and then a declaration after them.
/// payload is first token of previous declaration.
end: TokenIndex,
/// ther was a declaration between fields, don't report more errors
/// Saw fields and then a declaration after them.
/// Payload is first token of previous declaration.
end: Node.Index,
/// There was a declaration between fields, don't report more errors.
err,
} = .none;
// Skip container doc comments.
while (p.eatToken(.ContainerDocComment)) |_| {}
var trailing_comma = false;
while (true) {
const doc_comment = p.eatDocComments();
const test_decl_node = p.parseTestDecl() catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
error.ParseError => {
p.findNextContainerMember();
continue;
},
};
if (test_decl_node != 0) {
if (field_state == .seen) {
field_state = .{ .end = p.nodes.items(.main_token)[test_decl_node] };
}
try list.append(test_decl_node);
continue;
}
const comptime_node = p.parseTopLevelComptime() catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
error.ParseError => {
p.findNextContainerMember();
continue;
},
};
if (comptime_node != 0) {
if (field_state == .seen) {
field_state = .{ .end = p.nodes.items(.main_token)[comptime_node] };
}
try list.append(comptime_node);
continue;
}
const visib_token = p.eatToken(.Keyword_pub);
const top_level_decl = p.parseTopLevelDecl() catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
error.ParseError => {
p.findNextContainerMember();
continue;
},
};
if (top_level_decl != 0) {
if (field_state == .seen) {
field_state = .{
.end = visib_token orelse p.nodes.items(.main_token)[top_level_decl],
};
}
try list.append(top_level_decl);
continue;
}
if (visib_token != null) {
try p.warn(.{ .ExpectedPubItem = .{ .token = p.tok_i } });
// ignore this pub
continue;
}
const container_field = p.parseContainerField() catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
error.ParseError => {
// attempt to recover
p.findNextContainerMember();
continue;
},
};
if (container_field != 0) {
switch (field_state) {
.none => field_state = .seen,
.err, .seen => {},
.end => |tok| {
try p.warn(.{ .DeclBetweenFields = .{ .token = tok } });
// continue parsing, error will be reported later
field_state = .err;
},
}
try list.append(container_field);
const comma = p.eatToken(.Comma) orelse {
// try to continue parsing
const index = p.tok_i;
p.findNextContainerMember();
const next = p.token_tags[p.tok_i];
switch (next) {
.Eof => {
// no invalid tokens were found
if (index == p.tok_i) break;
// Invalid tokens, add error and exit
try p.warn(.{
.ExpectedToken = .{ .token = index, .expected_id = .Comma },
});
break;
},
else => {
if (next == .RBrace) {
if (!top_level) break;
p.tok_i += 1;
}
// add error and continue
try p.warn(.{
.ExpectedToken = .{ .token = index, .expected_id = .Comma },
});
continue;
},
switch (p.token_tags[p.tok_i]) {
.Keyword_test => {
const test_decl_node = try p.expectTestDeclRecoverable();
if (test_decl_node != 0) {
if (field_state == .seen) {
field_state = .{ .end = test_decl_node };
}
try list.append(test_decl_node);
}
};
continue;
}
// Dangling doc comment
if (doc_comment) |tok| {
try p.warn(.{
.UnattachedDocComment = .{ .token = tok },
});
}
const next = p.token_tags[p.tok_i];
switch (next) {
.Eof => break,
.Keyword_comptime => {
trailing_comma = false;
},
.Keyword_comptime => switch (p.token_tags[p.tok_i + 1]) {
.Identifier => {
p.tok_i += 1;
const container_field = try p.expectContainerFieldRecoverable();
if (container_field != 0) {
switch (field_state) {
.none => field_state = .seen,
.err, .seen => {},
.end => |node| {
try p.warn(.{
.DeclBetweenFields = .{ .token = p.nodes.items(.main_token)[node] },
});
// Continue parsing; error will be reported later.
field_state = .err;
},
}
try list.append(container_field);
switch (p.token_tags[p.tok_i]) {
.Comma => {
p.tok_i += 1;
trailing_comma = true;
continue;
},
.RBrace, .Eof => {
trailing_comma = false;
break;
},
else => {},
}
// There is not allowed to be a decl after a field with no comma.
// Report error but recover parser.
try p.warn(.{
.ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma },
});
p.findNextContainerMember();
}
},
.LBrace => {
const comptime_token = p.nextToken();
const block = p.parseBlock() catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
error.ParseError => blk: {
p.findNextContainerMember();
break :blk null_node;
},
};
if (block != 0) {
const comptime_node = try p.addNode(.{
.tag = .Comptime,
.main_token = comptime_token,
.data = .{
.lhs = block,
.rhs = undefined,
},
});
if (field_state == .seen) {
field_state = .{ .end = comptime_node };
}
try list.append(comptime_node);
}
trailing_comma = false;
},
else => {
p.tok_i += 1;
try p.warn(.{ .ExpectedBlockOrField = .{ .token = p.tok_i } });
},
},
.Keyword_pub => {
p.tok_i += 1;
try p.warn(.{
.ExpectedBlockOrField = .{ .token = p.tok_i },
});
const top_level_decl = try p.expectTopLevelDeclRecoverable();
if (top_level_decl != 0) {
if (field_state == .seen) {
field_state = .{ .end = top_level_decl };
}
try list.append(top_level_decl);
}
trailing_comma = false;
},
.Keyword_usingnamespace => {
const node = try p.expectUsingNamespaceRecoverable();
if (node != 0) {
if (field_state == .seen) {
field_state = .{ .end = node };
}
try list.append(node);
}
trailing_comma = false;
},
.Keyword_const,
.Keyword_var,
.Keyword_threadlocal,
.Keyword_export,
.Keyword_extern,
.Keyword_inline,
.Keyword_noinline,
.Keyword_fn,
=> {
const top_level_decl = try p.expectTopLevelDeclRecoverable();
if (top_level_decl != 0) {
if (field_state == .seen) {
field_state = .{ .end = top_level_decl };
}
try list.append(top_level_decl);
}
trailing_comma = false;
},
.Identifier => {
const container_field = try p.expectContainerFieldRecoverable();
if (container_field != 0) {
switch (field_state) {
.none => field_state = .seen,
.err, .seen => {},
.end => |node| {
try p.warn(.{
.DeclBetweenFields = .{ .token = p.nodes.items(.main_token)[node] },
});
// Continue parsing; error will be reported later.
field_state = .err;
},
}
try list.append(container_field);
switch (p.token_tags[p.tok_i]) {
.Comma => {
p.tok_i += 1;
trailing_comma = true;
continue;
},
.RBrace, .Eof => {
trailing_comma = false;
break;
},
else => {},
}
// There is not allowed to be a decl after a field with no comma.
// Report error but recover parser.
try p.warn(.{
.ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma },
});
p.findNextContainerMember();
}
},
.Eof, .RBrace => {
if (doc_comment) |tok| {
try p.warn(.{ .UnattachedDocComment = .{ .token = tok } });
}
break;
},
else => {
const index = p.tok_i;
if (next == .RBrace) {
if (!top_level) break;
p.tok_i += 1;
}
// this was likely not supposed to end yet,
// try to find the next declaration
try p.warn(.{ .ExpectedContainerMembers = .{ .token = p.tok_i } });
// This was likely not supposed to end yet; try to find the next declaration.
p.findNextContainerMember();
try p.warn(.{
.ExpectedContainerMembers = .{ .token = index },
});
},
}
}
return p.listToSpan(list.items);
switch (list.items.len) {
0 => return Members{
.len = 0,
.lhs = 0,
.rhs = 0,
.trailing_comma = trailing_comma,
},
1 => return Members{
.len = 1,
.lhs = list.items[0],
.rhs = 0,
.trailing_comma = trailing_comma,
},
2 => return Members{
.len = 2,
.lhs = list.items[0],
.rhs = list.items[1],
.trailing_comma = trailing_comma,
},
else => {
const span = try p.listToSpan(list.items);
return Members{
.len = list.items.len,
.lhs = span.start,
.rhs = span.end,
.trailing_comma = trailing_comma,
};
},
}
}
/// Attempts to find next container member by searching for certain tokens
@ -398,44 +471,36 @@ const Parser = struct {
}
/// TestDecl <- KEYWORD_test STRINGLITERALSINGLE? Block
fn parseTestDecl(p: *Parser) !Node.Index {
const test_token = p.eatToken(.Keyword_test) orelse return null_node;
const name_token = try p.expectToken(.StringLiteral);
fn expectTestDecl(p: *Parser) !Node.Index {
const test_token = try p.expectToken(.Keyword_test);
const name_token = p.eatToken(.StringLiteral);
const block_node = try p.parseBlock();
if (block_node == 0) return p.fail(.{ .ExpectedLBrace = .{ .token = p.tok_i } });
return p.addNode(.{
.tag = .TestDecl,
.main_token = test_token,
.data = .{
.lhs = name_token,
.lhs = name_token orelse 0,
.rhs = block_node,
},
});
}
/// TopLevelComptime <- KEYWORD_comptime BlockExpr
fn parseTopLevelComptime(p: *Parser) !Node.Index {
if (p.token_tags[p.tok_i] == .Keyword_comptime and
p.token_tags[p.tok_i + 1] == .LBrace)
{
return p.addNode(.{
.tag = .Comptime,
.main_token = p.nextToken(),
.data = .{
.lhs = try p.parseBlock(),
.rhs = undefined,
},
});
} else {
return null_node;
}
fn expectTestDeclRecoverable(p: *Parser) error{OutOfMemory}!Node.Index {
return p.expectTestDecl() catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
error.ParseError => {
p.findNextContainerMember();
return null_node;
},
};
}
/// TopLevelDecl
/// <- (KEYWORD_export / KEYWORD_extern STRINGLITERALSINGLE? / (KEYWORD_inline / KEYWORD_noinline))? FnProto (SEMICOLON / Block)
/// / (KEYWORD_export / KEYWORD_extern STRINGLITERALSINGLE?)? KEYWORD_threadlocal? VarDecl
/// / KEYWORD_usingnamespace Expr SEMICOLON
fn parseTopLevelDecl(p: *Parser) !Node.Index {
fn expectTopLevelDecl(p: *Parser) !Node.Index {
const extern_export_inline_token = p.nextToken();
var expect_fn: bool = false;
var exported: bool = false;
@ -496,7 +561,21 @@ const Parser = struct {
return p.fail(.{ .ExpectedVarDeclOrFn = .{ .token = p.tok_i } });
}
const usingnamespace_token = p.eatToken(.Keyword_usingnamespace) orelse return null_node;
return p.expectUsingNamespace();
}
fn expectTopLevelDeclRecoverable(p: *Parser) error{OutOfMemory}!Node.Index {
return p.expectTopLevelDecl() catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
error.ParseError => {
p.findNextContainerMember();
return null_node;
},
};
}
fn expectUsingNamespace(p: *Parser) !Node.Index {
const usingnamespace_token = try p.expectToken(.Keyword_usingnamespace);
const expr = try p.expectExpr();
const semicolon_token = try p.expectToken(.Semicolon);
try p.parseAppendedDocComment(semicolon_token);
@ -510,6 +589,16 @@ const Parser = struct {
});
}
fn expectUsingNamespaceRecoverable(p: *Parser) error{OutOfMemory}!Node.Index {
return p.expectUsingNamespace() catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
error.ParseError => {
p.findNextContainerMember();
return null_node;
},
};
}
/// FnProto <- KEYWORD_fn IDENTIFIER? LPAREN ParamDeclList RPAREN ByteAlign? LinkSection? CallConv? EXCLAMATIONMARK? (Keyword_anytype / TypeExpr)
fn parseFnProto(p: *Parser) !Node.Index {
const fn_token = p.eatToken(.Keyword_fn) orelse return null_node;
@ -648,12 +737,9 @@ const Parser = struct {
}
/// ContainerField <- KEYWORD_comptime? IDENTIFIER (COLON TypeExpr ByteAlign?)? (EQUAL Expr)?
fn parseContainerField(p: *Parser) !Node.Index {
fn expectContainerField(p: *Parser) !Node.Index {
const comptime_token = p.eatToken(.Keyword_comptime);
const name_token = p.eatToken(.Identifier) orelse {
if (comptime_token) |_| p.tok_i -= 1;
return null_node;
};
const name_token = try p.expectToken(.Identifier);
var align_expr: Node.Index = 0;
var type_expr: Node.Index = 0;
@ -708,6 +794,16 @@ const Parser = struct {
}
}
fn expectContainerFieldRecoverable(p: *Parser) error{OutOfMemory}!Node.Index {
return p.expectContainerField() catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
error.ParseError => {
p.findNextContainerMember();
return null_node;
},
};
}
/// Statement
/// <- KEYWORD_comptime? VarDecl
/// / KEYWORD_comptime BlockExprStatement
@ -3333,16 +3429,20 @@ const Parser = struct {
_ = try p.expectToken(.RParen);
_ = try p.expectToken(.LBrace);
const members = try p.parseContainerMembers(false);
const members = try p.parseContainerMembers();
const members_span = try members.toSpan(p);
_ = try p.expectToken(.RBrace);
return p.addNode(.{
.tag = .TaggedUnionEnumTag,
.tag = switch (members.trailing_comma) {
true => .TaggedUnionEnumTagComma,
false => .TaggedUnionEnumTag,
},
.main_token = main_token,
.data = .{
.lhs = enum_tag_expr,
.rhs = try p.addExtra(Node.SubRange{
.start = members.start,
.end = members.end,
.start = members_span.start,
.end = members_span.end,
}),
},
});
@ -3350,16 +3450,34 @@ const Parser = struct {
_ = try p.expectToken(.RParen);
_ = try p.expectToken(.LBrace);
const members = try p.parseContainerMembers(false);
const members = try p.parseContainerMembers();
_ = try p.expectToken(.RBrace);
return p.addNode(.{
.tag = .TaggedUnion,
.main_token = main_token,
.data = .{
.lhs = members.start,
.rhs = members.end,
},
});
if (members.len <= 2) {
return p.addNode(.{
.tag = switch (members.trailing_comma) {
true => .TaggedUnionTwoComma,
false => .TaggedUnionTwo,
},
.main_token = main_token,
.data = .{
.lhs = members.lhs,
.rhs = members.rhs,
},
});
} else {
const span = try members.toSpan(p);
return p.addNode(.{
.tag = switch (members.trailing_comma) {
true => .TaggedUnionComma,
false => .TaggedUnion,
},
.main_token = main_token,
.data = .{
.lhs = span.start,
.rhs = span.end,
},
});
}
}
} else {
const expr = try p.expectExpr();
@ -3373,26 +3491,48 @@ const Parser = struct {
else => unreachable,
};
_ = try p.expectToken(.LBrace);
const members = try p.parseContainerMembers(false);
const members = try p.parseContainerMembers();
_ = try p.expectToken(.RBrace);
if (arg_expr == 0) {
return p.addNode(.{
.tag = .ContainerDecl,
.main_token = main_token,
.data = .{
.lhs = members.start,
.rhs = members.end,
},
});
if (members.len <= 2) {
return p.addNode(.{
.tag = switch (members.trailing_comma) {
true => .ContainerDeclTwoComma,
false => .ContainerDeclTwo,
},
.main_token = main_token,
.data = .{
.lhs = members.lhs,
.rhs = members.rhs,
},
});
} else {
const span = try members.toSpan(p);
return p.addNode(.{
.tag = switch (members.trailing_comma) {
true => .ContainerDeclComma,
false => .ContainerDecl,
},
.main_token = main_token,
.data = .{
.lhs = span.start,
.rhs = span.end,
},
});
}
} else {
const span = try members.toSpan(p);
return p.addNode(.{
.tag = .ContainerDeclArg,
.tag = switch (members.trailing_comma) {
true => .ContainerDeclArgComma,
false => .ContainerDeclArg,
},
.main_token = main_token,
.data = .{
.lhs = arg_expr,
.rhs = try p.addExtra(Node.SubRange{
.start = members.start,
.end = members.end,
.start = span.start,
.end = span.end,
}),
},
});

View File

@ -149,45 +149,82 @@ test "zig fmt: nosuspend block" {
);
}
//test "zig fmt: nosuspend await" {
// try testCanonical(
// \\fn foo() void {
// \\ x = nosuspend await y;
// \\}
// \\
// );
//}
//
//test "zig fmt: trailing comma in container declaration" {
// try testCanonical(
// \\const X = struct { foo: i32 };
// \\const X = struct { foo: i32, bar: i32 };
// \\const X = struct { foo: i32 = 1, bar: i32 = 2 };
// \\const X = struct { foo: i32 align(4), bar: i32 align(4) };
// \\const X = struct { foo: i32 align(4) = 1, bar: i32 align(4) = 2 };
// \\
// );
// try testCanonical(
// \\test "" {
// \\ comptime {
// \\ const X = struct {
// \\ x: i32
// \\ };
// \\ }
// \\}
// \\
// );
// try testTransform(
// \\const X = struct {
// \\ foo: i32, bar: i8 };
// ,
// \\const X = struct {
// \\ foo: i32, bar: i8
// \\};
// \\
// );
//}
//
test "zig fmt: nosuspend await" {
try testCanonical(
\\fn foo() void {
\\ x = nosuspend await y;
\\}
\\
);
}
test "zig fmt: container declaration, single line" {
try testCanonical(
\\const X = struct { foo: i32 };
\\const X = struct { foo: i32, bar: i32 };
\\const X = struct { foo: i32 = 1, bar: i32 = 2 };
\\const X = struct { foo: i32 align(4), bar: i32 align(4) };
\\const X = struct { foo: i32 align(4) = 1, bar: i32 align(4) = 2 };
\\
);
}
test "zig fmt: container declaration, one item, multi line trailing comma" {
try testCanonical(
\\test "" {
\\ comptime {
\\ const X = struct {
\\ x: i32,
\\ };
\\ }
\\}
\\
);
}
test "zig fmt: container declaration, no trailing comma on separate line" {
try testTransform(
\\test "" {
\\ comptime {
\\ const X = struct {
\\ x: i32
\\ };
\\ }
\\}
\\
,
\\test "" {
\\ comptime {
\\ const X = struct { x: i32 };
\\ }
\\}
\\
);
}
test "zig fmt: container declaration, line break, no trailing comma" {
try testTransform(
\\const X = struct {
\\ foo: i32, bar: i8 };
,
\\const X = struct { foo: i32, bar: i8 };
\\
);
}
test "zig fmt: container declaration, transform trailing comma" {
try testTransform(
\\const X = struct {
\\ foo: i32, bar: i8, };
,
\\const X = struct {
\\ foo: i32,
\\ bar: i8,
\\};
\\
);
}
//test "zig fmt: trailing comma in fn parameter list" {
// try testCanonical(
// \\pub fn f(

View File

@ -68,7 +68,7 @@ fn renderRoot(ais: *Ais, tree: ast.Tree) Error!void {
const root_decls = tree.extra_data[nodes_data[0].lhs..nodes_data[0].rhs];
for (root_decls) |decl| {
try renderContainerDecl(ais, tree, decl, .Newline);
try renderMember(ais, tree, decl, .Newline);
}
}
@ -84,7 +84,7 @@ fn renderExtraNewlineToken(ais: *Ais, tree: ast.Tree, first_token: ast.TokenInde
}
}
fn renderContainerDecl(ais: *Ais, tree: ast.Tree, decl: ast.Node.Index, space: Space) Error!void {
fn renderMember(ais: *Ais, tree: ast.Tree, decl: ast.Node.Index, space: Space) Error!void {
const token_tags = tree.tokens.items(.tag);
const main_tokens = tree.nodes.items(.main_token);
const datas = tree.nodes.items(.data);
@ -158,6 +158,8 @@ fn renderContainerDecl(ais: *Ais, tree: ast.Tree, decl: ast.Node.Index, space: S
.ContainerFieldAlign => return renderContainerField(ais, tree, tree.containerFieldAlign(decl), space),
.ContainerField => return renderContainerField(ais, tree, tree.containerField(decl), space),
.Comptime => return renderExpression(ais, tree, decl, space),
.Root => unreachable,
else => unreachable,
}
}
@ -195,7 +197,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac
// return renderToken(ais, tree, any_type.token, space);
//},
.BlockTwo => {
var statements = [2]ast.Node.Index{ datas[node].lhs, datas[node].rhs };
const statements = [2]ast.Node.Index{ datas[node].lhs, datas[node].rhs };
if (datas[node].lhs == 0) {
return renderBlock(ais, tree, main_tokens[node], statements[0..0], space);
} else if (datas[node].rhs == 0) {
@ -667,124 +669,29 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac
// return renderToken(ais, tree, grouped_expr.rparen, space);
//},
.ContainerDecl => unreachable, // TODO
.ContainerDeclArg => unreachable, // TODO
.TaggedUnion => unreachable, // TODO
.TaggedUnionEnumTag => unreachable, // TODO
//.ContainerDecl => {
// const container_decl = @fieldParentPtr(ast.Node.ContainerDecl, "base", base);
.ContainerDecl,
.ContainerDeclComma,
=> return renderContainerDecl(ais, tree, tree.containerDecl(node), space),
// if (container_decl.layout_token) |layout_token| {
// try renderToken(ais, tree, layout_token, Space.Space);
// }
.ContainerDeclTwo, .ContainerDeclTwoComma => {
var buffer: [2]ast.Node.Index = undefined;
return renderContainerDecl(ais, tree, tree.containerDeclTwo(&buffer, node), space);
},
.ContainerDeclArg,
.ContainerDeclArgComma,
=> return renderContainerDecl(ais, tree, tree.containerDeclArg(node), space),
// switch (container_decl.init_arg_expr) {
// .None => {
// try renderToken(ais, tree, container_decl.kind_token, Space.Space); // union
// },
// .Enum => |enum_tag_type| {
// try renderToken(ais, tree, container_decl.kind_token, Space.None); // union
.TaggedUnion,
.TaggedUnionComma,
=> return renderContainerDecl(ais, tree, tree.taggedUnion(node), space),
// const lparen = tree.nextToken(container_decl.kind_token);
// const enum_token = tree.nextToken(lparen);
// try renderToken(ais, tree, lparen, Space.None); // (
// try renderToken(ais, tree, enum_token, Space.None); // enum
// if (enum_tag_type) |expr| {
// try renderToken(ais, tree, tree.nextToken(enum_token), Space.None); // (
// try renderExpression(ais, tree, expr, Space.None);
// const rparen = tree.nextToken(expr.lastToken());
// try renderToken(ais, tree, rparen, Space.None); // )
// try renderToken(ais, tree, tree.nextToken(rparen), Space.Space); // )
// } else {
// try renderToken(ais, tree, tree.nextToken(enum_token), Space.Space); // )
// }
// },
// .Type => |type_expr| {
// try renderToken(ais, tree, container_decl.kind_token, Space.None); // union
// const lparen = tree.nextToken(container_decl.kind_token);
// const rparen = tree.nextToken(type_expr.lastToken());
// try renderToken(ais, tree, lparen, Space.None); // (
// try renderExpression(ais, tree, type_expr, Space.None);
// try renderToken(ais, tree, rparen, Space.Space); // )
// },
// }
// if (container_decl.fields_and_decls_len == 0) {
// {
// ais.pushIndentNextLine();
// defer ais.popIndent();
// try renderToken(ais, tree, container_decl.lbrace_token, Space.None); // lbrace
// }
// return renderToken(ais, tree, container_decl.rbrace_token, space); // rbrace
// }
// const src_has_trailing_comma = blk: {
// var maybe_comma = tree.prevToken(container_decl.lastToken());
// // Doc comments for a field may also appear after the comma, eg.
// // field_name: T, // comment attached to field_name
// if (tree.token_tags[maybe_comma] == .DocComment)
// maybe_comma = tree.prevToken(maybe_comma);
// break :blk tree.token_tags[maybe_comma] == .Comma;
// };
// const fields_and_decls = container_decl.fieldsAndDecls();
// // Check if the first declaration and the { are on the same line
// const src_has_newline = !tree.tokensOnSameLine(
// container_decl.lbrace_token,
// fields_and_decls[0].firstToken(),
// );
// // We can only print all the elements in-line if all the
// // declarations inside are fields
// const src_has_only_fields = blk: {
// for (fields_and_decls) |decl| {
// if (decl.tag != .ContainerField) break :blk false;
// }
// break :blk true;
// };
// if (src_has_trailing_comma or !src_has_only_fields) {
// // One declaration per line
// ais.pushIndentNextLine();
// defer ais.popIndent();
// try renderToken(ais, tree, container_decl.lbrace_token, .Newline); // lbrace
// for (fields_and_decls) |decl, i| {
// try renderContainerDecl(allocator, ais, tree, decl, .Newline);
// if (i + 1 < fields_and_decls.len) {
// try renderExtraNewline(ais, tree, fields_and_decls[i + 1]);
// }
// }
// } else if (src_has_newline) {
// // All the declarations on the same line, but place the items on
// // their own line
// try renderToken(ais, tree, container_decl.lbrace_token, .Newline); // lbrace
// ais.pushIndent();
// defer ais.popIndent();
// for (fields_and_decls) |decl, i| {
// const space_after_decl: Space = if (i + 1 >= fields_and_decls.len) .Newline else .Space;
// try renderContainerDecl(allocator, ais, tree, decl, space_after_decl);
// }
// } else {
// // All the declarations on the same line
// try renderToken(ais, tree, container_decl.lbrace_token, .Space); // lbrace
// for (fields_and_decls) |decl| {
// try renderContainerDecl(allocator, ais, tree, decl, .Space);
// }
// }
// return renderToken(ais, tree, container_decl.rbrace_token, space); // rbrace
//},
.TaggedUnionTwo, .TaggedUnionTwoComma => {
var buffer: [2]ast.Node.Index = undefined;
return renderContainerDecl(ais, tree, tree.taggedUnionTwo(&buffer, node), space);
},
.TaggedUnionEnumTag,
.TaggedUnionEnumTagComma,
=> return renderContainerDecl(ais, tree, tree.taggedUnionEnumTag(node), space),
.ErrorSetDecl => unreachable, // TODO
//.ErrorSetDecl => {
@ -1949,6 +1856,94 @@ fn renderArrayInit(
}
}
fn renderContainerDecl(
ais: *Ais,
tree: ast.Tree,
container_decl: ast.Full.ContainerDecl,
space: Space,
) Error!void {
const token_tags = tree.tokens.items(.tag);
const node_tags = tree.nodes.items(.tag);
if (container_decl.layout_token) |layout_token| {
try renderToken(ais, tree, layout_token, .Space);
}
var lbrace: ast.TokenIndex = undefined;
if (container_decl.ast.enum_token) |enum_token| {
try renderToken(ais, tree, container_decl.ast.main_token, .None); // union
try renderToken(ais, tree, enum_token - 1, .None); // lparen
try renderToken(ais, tree, enum_token, .None); // enum
if (container_decl.ast.arg != 0) {
try renderToken(ais, tree, enum_token + 1, .None); // lparen
try renderExpression(ais, tree, container_decl.ast.arg, .None);
const rparen = tree.lastToken(container_decl.ast.arg) + 1;
try renderToken(ais, tree, rparen, .None); // rparen
try renderToken(ais, tree, rparen + 1, .Space); // rparen
lbrace = rparen + 2;
} else {
try renderToken(ais, tree, enum_token + 1, .Space); // rparen
lbrace = enum_token + 2;
}
} else if (container_decl.ast.arg != 0) {
try renderToken(ais, tree, container_decl.ast.main_token, .None); // union
try renderToken(ais, tree, container_decl.ast.main_token + 1, .None); // lparen
try renderExpression(ais, tree, container_decl.ast.arg, .None);
const rparen = tree.lastToken(container_decl.ast.arg) + 1;
try renderToken(ais, tree, rparen, .Space); // rparen
lbrace = rparen + 1;
} else {
try renderToken(ais, tree, container_decl.ast.main_token, .Space); // union
lbrace = container_decl.ast.main_token + 1;
}
if (container_decl.ast.members.len == 0) {
try renderToken(ais, tree, lbrace, Space.None); // lbrace
return renderToken(ais, tree, lbrace + 1, space); // rbrace
}
const last_member = container_decl.ast.members[container_decl.ast.members.len - 1];
const last_member_token = tree.lastToken(last_member);
const rbrace = switch (token_tags[last_member_token + 1]) {
.DocComment => last_member_token + 2,
.Comma => switch (token_tags[last_member_token + 2]) {
.DocComment => last_member_token + 3,
.RBrace => last_member_token + 2,
else => unreachable,
},
.RBrace => last_member_token + 1,
else => unreachable,
};
const src_has_trailing_comma = token_tags[last_member_token + 1] == .Comma;
if (!src_has_trailing_comma) one_line: {
// We can only print all the members in-line if all the members are fields.
for (container_decl.ast.members) |member| {
if (!node_tags[member].isContainerField()) break :one_line;
}
// All the declarations on the same line.
try renderToken(ais, tree, lbrace, .Space); // lbrace
for (container_decl.ast.members) |member| {
try renderMember(ais, tree, member, .Space);
}
return renderToken(ais, tree, rbrace, space); // rbrace
}
// One member per line.
ais.pushIndent();
try renderToken(ais, tree, lbrace, .Newline); // lbrace
for (container_decl.ast.members) |member, i| {
try renderMember(ais, tree, member, .Newline);
if (i + 1 < container_decl.ast.members.len) {
try renderExtraNewline(ais, tree, container_decl.ast.members[i + 1]);
}
}
ais.popIndent();
return renderToken(ais, tree, rbrace, space); // rbrace
}
/// Render an expression, and the comma that follows it, if it is present in the source.
fn renderExpressionComma(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Space) Error!void {
const token_tags = tree.tokens.items(.tag);