implemented container doc comments in stage 2

This commit is contained in:
Vexu 2019-10-12 12:38:09 +03:00
parent d89f39d719
commit b92f42d1f4
No known key found for this signature in database
GPG Key ID: 5AEABFCAFF5CD8D6
6 changed files with 108 additions and 10 deletions

View File

@ -576,7 +576,6 @@ pub const Node = struct {
pub const Root = struct {
base: Node,
doc_comments: ?*DocComment,
decls: DeclList,
eof_token: TokenIndex,

View File

@ -58,13 +58,6 @@ fn parseRoot(arena: *Allocator, it: *TokenIterator, tree: *Tree) Allocator.Error
node.* = Node.Root{
.base = Node{ .id = .Root },
.decls = undefined,
// TODO: Because zig fmt collapses consecutive comments separated by blank lines into
// a single multi-line comment, it is currently impossible to have a container-level
// doc comment and NO doc comment on the first decl. For now, simply
// ignore the problem and assume that there will be no container-level
// doc comments.
// See: https://github.com/ziglang/zig/issues/2288
.doc_comments = null,
.eof_token = undefined,
};
node.decls = parseContainerMembers(arena, it, tree) catch |err| {
@ -94,6 +87,11 @@ fn parseContainerMembers(arena: *Allocator, it: *TokenIterator, tree: *Tree) !No
var list = Node.Root.DeclList.init(arena);
while (true) {
if (try parseContainerDocComments(arena, it, tree)) |node| {
try list.push(node);
continue;
}
const doc_comments = try parseDocComment(arena, it, tree);
if (try parseTestDecl(arena, it, tree)) |node| {
@ -155,12 +153,35 @@ fn parseContainerMembers(arena: *Allocator, it: *TokenIterator, tree: *Tree) !No
continue;
}
// Dangling doc comment
if (doc_comments != null) {
try tree.errors.push(AstError{
.UnattachedDocComment = AstError.UnattachedDocComment{ .token = doc_comments.?.firstToken() },
});
}
break;
}
return list;
}
/// Eat a multiline container doc comment
fn parseContainerDocComments(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
var lines = Node.DocComment.LineList.init(arena);
while (eatToken(it, .ContainerDocComment)) |line| {
try lines.push(line);
}
if (lines.len == 0) return null;
const node = try arena.create(Node.DocComment);
node.* = Node.DocComment{
.base = Node{ .id = .DocComment },
.lines = lines,
};
return &node.base;
}
/// TestDecl <- KEYWORD_test STRINGLITERAL Block
fn parseTestDecl(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
const test_token = eatToken(it, .Keyword_test) orelse return null;

View File

@ -2556,6 +2556,62 @@ test "zig fmt: comments at several places in struct init" {
);
}
test "zig fmt: top level doc comments" {
try testCanonical(
\\//! tld 1
\\//! tld 2
\\//! tld 3
\\
\\// comment
\\
\\/// A doc
\\const A = struct {
\\ //! A tld 1
\\ //! A tld 2
\\ //! A tld 3
\\};
\\
\\/// B doc
\\const B = struct {
\\ //! B tld 1
\\ //! B tld 2
\\ //! B tld 3
\\
\\ /// b doc
\\ b: u32,
\\};
\\
\\/// C doc
\\const C = struct {
\\ //! C tld 1
\\ //! C tld 2
\\ //! C tld 3
\\
\\ /// c1 doc
\\ c1: u32,
\\
\\ //! C tld 4
\\ //! C tld 5
\\ //! C tld 6
\\
\\ /// c2 doc
\\ c2: u32,
\\};
\\
);
try testCanonical(
\\//! Top-level documentation.
\\
\\/// This is A
\\pub const A = usize;
\\
);
try testCanonical(
\\//! Nothing here
\\
);
}
const std = @import("std");
const mem = std.mem;
const warn = std.debug.warn;

View File

@ -299,6 +299,17 @@ fn renderTopLevelDecl(allocator: *mem.Allocator, stream: var, tree: *ast.Tree, i
assert(!decl.requireSemiColon());
try renderExpression(allocator, stream, tree, indent, start_col, decl, Space.Newline);
},
ast.Node.Id.DocComment => {
const comment = @fieldParentPtr(ast.Node.DocComment, "base", decl);
var it = comment.lines.iterator(0);
while (it.next()) |line_token_index| {
try renderToken(tree, stream, line_token_index.*, indent, start_col, Space.Newline);
if (it.peek()) |_| {
try stream.writeByteNTimes(' ', indent);
}
}
},
else => unreachable,
}
}

View File

@ -142,6 +142,7 @@ pub const Token = struct {
FloatLiteral,
LineComment,
DocComment,
ContainerDocComment,
BracketStarBracket,
BracketStarCBracket,
ShebangLine,
@ -211,6 +212,7 @@ pub const Token = struct {
.FloatLiteral => "FloatLiteral",
.LineComment => "LineComment",
.DocComment => "DocComment",
.ContainerDocComment => "ContainerDocComment",
.ShebangLine => "ShebangLine",
.Bang => "!",
@ -387,6 +389,7 @@ pub const Tokenizer = struct {
LineComment,
DocCommentStart,
DocComment,
ContainerDocComment,
Zero,
IntegerLiteral,
IntegerLiteralWithRadix,
@ -1076,6 +1079,10 @@ pub const Tokenizer = struct {
'/' => {
state = State.DocCommentStart;
},
'!' => {
result.id = Token.Id.ContainerDocComment;
state = State.ContainerDocComment;
},
'\n' => break,
else => {
state = State.LineComment;
@ -1096,7 +1103,7 @@ pub const Tokenizer = struct {
self.checkLiteralCharacter();
},
},
State.LineComment, State.DocComment => switch (c) {
State.LineComment, State.DocComment, State.ContainerDocComment => switch (c) {
'\n' => break,
else => self.checkLiteralCharacter(),
},
@ -1234,6 +1241,9 @@ pub const Tokenizer = struct {
State.DocComment, State.DocCommentStart => {
result.id = Token.Id.DocComment;
},
State.ContainerDocComment => {
result.id = Token.Id.ContainerDocComment;
},
State.NumberDot,
State.NumberDotHex,
@ -1601,6 +1611,8 @@ test "tokenizer - line comment and doc comment" {
testTokenize("/// a", [_]Token.Id{Token.Id.DocComment});
testTokenize("///", [_]Token.Id{Token.Id.DocComment});
testTokenize("////", [_]Token.Id{Token.Id.LineComment});
testTokenize("//!", [_]Token.Id{Token.Id.ContainerDocComment});
testTokenize("//!!", [_]Token.Id{Token.Id.ContainerDocComment});
}
test "tokenizer - line comment followed by identifier" {

View File

@ -174,7 +174,6 @@ pub fn translate(
tree.root_node.* = ast.Node.Root{
.base = ast.Node{ .id = ast.Node.Id.Root },
.decls = ast.Node.Root.DeclList.init(arena),
.doc_comments = null,
// initialized with the eof token at the end
.eof_token = undefined,
};