Merge pull request #5872 from ziglang/ast-flatten-2

stage2: introduce the concept of "trailer flags" to the AST
This commit is contained in:
Andrew Kelley 2020-07-15 18:34:06 +00:00 committed by GitHub
commit 6b103324d2
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 775 additions and 423 deletions

View File

@ -6,6 +6,7 @@ const math = std.math;
const testing = std.testing;
pub const trait = @import("meta/trait.zig");
pub const TrailerFlags = @import("meta/trailer_flags.zig").TrailerFlags;
const TypeInfo = builtin.TypeInfo;

View File

@ -0,0 +1,145 @@
const std = @import("../std.zig");
const meta = std.meta;
const testing = std.testing;
const mem = std.mem;
const assert = std.debug.assert;
/// This is useful for saving memory when allocating an object that has many
/// optional components. The optional objects are allocated sequentially in
/// memory, and a single integer is used to represent each optional object
/// and whether it is present based on each corresponding bit.
pub fn TrailerFlags(comptime Fields: type) type {
return struct {
bits: Int,
pub const Int = @Type(.{ .Int = .{ .bits = bit_count, .is_signed = false } });
pub const bit_count = @typeInfo(Fields).Struct.fields.len;
pub const Self = @This();
pub fn has(self: Self, comptime name: []const u8) bool {
const field_index = meta.fieldIndex(Fields, name).?;
return (self.bits & (1 << field_index)) != 0;
}
pub fn get(self: Self, p: [*]align(@alignOf(Fields)) const u8, comptime name: []const u8) ?Field(name) {
if (!self.has(name))
return null;
return self.ptrConst(p, name).*;
}
pub fn setFlag(self: *Self, comptime name: []const u8) void {
const field_index = meta.fieldIndex(Fields, name).?;
self.bits |= 1 << field_index;
}
/// `fields` is a struct with each field set to an optional value.
/// Missing fields are assumed to be `null`.
/// Only the non-null bits are observed and are used to set the flag bits.
pub fn init(fields: anytype) Self {
var self: Self = .{ .bits = 0 };
inline for (@typeInfo(@TypeOf(fields)).Struct.fields) |field| {
const opt: ?Field(field.name) = @field(fields, field.name);
const field_index = meta.fieldIndex(Fields, field.name).?;
self.bits |= @as(Int, @boolToInt(opt != null)) << field_index;
}
return self;
}
/// `fields` is a struct with each field set to an optional value (same as `init`).
/// Missing fields are assumed to be `null`.
pub fn setMany(self: Self, p: [*]align(@alignOf(Fields)) u8, fields: anytype) void {
inline for (@typeInfo(@TypeOf(fields)).Struct.fields) |field| {
const opt: ?Field(field.name) = @field(fields, field.name);
if (opt) |value| {
self.set(p, field.name, value);
}
}
}
pub fn set(
self: Self,
p: [*]align(@alignOf(Fields)) u8,
comptime name: []const u8,
value: Field(name),
) void {
self.ptr(p, name).* = value;
}
pub fn ptr(self: Self, p: [*]align(@alignOf(Fields)) u8, comptime name: []const u8) *Field(name) {
if (@sizeOf(Field(name)) == 0)
return undefined;
const off = self.offset(p, name);
return @ptrCast(*Field(name), @alignCast(@alignOf(Field(name)), p + off));
}
pub fn ptrConst(self: Self, p: [*]align(@alignOf(Fields)) const u8, comptime name: []const u8) *const Field(name) {
if (@sizeOf(Field(name)) == 0)
return undefined;
const off = self.offset(p, name);
return @ptrCast(*const Field(name), @alignCast(@alignOf(Field(name)), p + off));
}
pub fn offset(self: Self, p: [*]align(@alignOf(Fields)) const u8, comptime name: []const u8) usize {
var off: usize = 0;
inline for (@typeInfo(Fields).Struct.fields) |field, i| {
const active = (self.bits & (1 << i)) != 0;
if (comptime mem.eql(u8, field.name, name)) {
assert(active);
return mem.alignForwardGeneric(usize, off, @alignOf(field.field_type));
} else if (active) {
off = mem.alignForwardGeneric(usize, off, @alignOf(field.field_type));
off += @sizeOf(field.field_type);
}
}
@compileError("no field named " ++ name ++ " in type " ++ @typeName(Fields));
}
pub fn Field(comptime name: []const u8) type {
return meta.fieldInfo(Fields, name).field_type;
}
pub fn sizeInBytes(self: Self) usize {
var off: usize = 0;
inline for (@typeInfo(Fields).Struct.fields) |field, i| {
if (@sizeOf(field.field_type) == 0)
continue;
if ((self.bits & (1 << i)) != 0) {
off = mem.alignForwardGeneric(usize, off, @alignOf(field.field_type));
off += @sizeOf(field.field_type);
}
}
return off;
}
};
}
test "TrailerFlags" {
const Flags = TrailerFlags(struct {
a: i32,
b: bool,
c: u64,
});
var flags = Flags.init(.{
.b = true,
.c = 1234,
});
const slice = try testing.allocator.allocAdvanced(u8, 8, flags.sizeInBytes(), .exact);
defer testing.allocator.free(slice);
flags.set(slice.ptr, "b", false);
flags.set(slice.ptr, "c", 12345678);
testing.expect(flags.get(slice.ptr, "a") == null);
testing.expect(!flags.get(slice.ptr, "b").?);
testing.expect(flags.get(slice.ptr, "c").? == 12345678);
flags.setMany(slice.ptr, .{
.b = true,
.c = 5678,
});
testing.expect(flags.get(slice.ptr, "a") == null);
testing.expect(flags.get(slice.ptr, "b").?);
testing.expect(flags.get(slice.ptr, "c").? == 5678);
}

View File

@ -675,42 +675,84 @@ pub const Node = struct {
}
};
/// Trailed in memory by possibly many things, with each optional thing
/// determined by a bit in `trailer_flags`.
pub const VarDecl = struct {
base: Node = Node{ .id = .VarDecl },
doc_comments: ?*DocComment,
visib_token: ?TokenIndex,
thread_local_token: ?TokenIndex,
name_token: TokenIndex,
eq_token: ?TokenIndex,
trailer_flags: TrailerFlags,
mut_token: TokenIndex,
comptime_token: ?TokenIndex,
extern_export_token: ?TokenIndex,
lib_name: ?*Node,
type_node: ?*Node,
align_node: ?*Node,
section_node: ?*Node,
init_node: ?*Node,
name_token: TokenIndex,
semicolon_token: TokenIndex,
pub const TrailerFlags = std.meta.TrailerFlags(struct {
doc_comments: *DocComment,
visib_token: TokenIndex,
thread_local_token: TokenIndex,
eq_token: TokenIndex,
comptime_token: TokenIndex,
extern_export_token: TokenIndex,
lib_name: *Node,
type_node: *Node,
align_node: *Node,
section_node: *Node,
init_node: *Node,
});
pub const RequiredFields = struct {
mut_token: TokenIndex,
name_token: TokenIndex,
semicolon_token: TokenIndex,
};
pub fn getTrailer(self: *const VarDecl, comptime name: []const u8) ?TrailerFlags.Field(name) {
const trailers_start = @ptrCast([*]const u8, self) + @sizeOf(VarDecl);
return self.trailer_flags.get(trailers_start, name);
}
pub fn setTrailer(self: *VarDecl, comptime name: []const u8, value: TrailerFlags.Field(name)) void {
const trailers_start = @ptrCast([*]u8, self) + @sizeOf(VarDecl);
self.trailer_flags.set(trailers_start, name, value);
}
pub fn create(allocator: *mem.Allocator, required: RequiredFields, trailers: anytype) !*VarDecl {
const trailer_flags = TrailerFlags.init(trailers);
const bytes = try allocator.alignedAlloc(u8, @alignOf(VarDecl), sizeInBytes(trailer_flags));
const var_decl = @ptrCast(*VarDecl, bytes.ptr);
var_decl.* = .{
.trailer_flags = trailer_flags,
.mut_token = required.mut_token,
.name_token = required.name_token,
.semicolon_token = required.semicolon_token,
};
const trailers_start = bytes.ptr + @sizeOf(VarDecl);
trailer_flags.setMany(trailers_start, trailers);
return var_decl;
}
pub fn destroy(self: *VarDecl, allocator: *mem.Allocator) void {
const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.trailer_flags)];
allocator.free(bytes);
}
pub fn iterate(self: *const VarDecl, index: usize) ?*Node {
var i = index;
if (self.type_node) |type_node| {
if (self.getTrailer("type_node")) |type_node| {
if (i < 1) return type_node;
i -= 1;
}
if (self.align_node) |align_node| {
if (self.getTrailer("align_node")) |align_node| {
if (i < 1) return align_node;
i -= 1;
}
if (self.section_node) |section_node| {
if (self.getTrailer("section_node")) |section_node| {
if (i < 1) return section_node;
i -= 1;
}
if (self.init_node) |init_node| {
if (self.getTrailer("init_node")) |init_node| {
if (i < 1) return init_node;
i -= 1;
}
@ -719,17 +761,21 @@ pub const Node = struct {
}
pub fn firstToken(self: *const VarDecl) TokenIndex {
if (self.visib_token) |visib_token| return visib_token;
if (self.thread_local_token) |thread_local_token| return thread_local_token;
if (self.comptime_token) |comptime_token| return comptime_token;
if (self.extern_export_token) |extern_export_token| return extern_export_token;
assert(self.lib_name == null);
if (self.getTrailer("visib_token")) |visib_token| return visib_token;
if (self.getTrailer("thread_local_token")) |thread_local_token| return thread_local_token;
if (self.getTrailer("comptime_token")) |comptime_token| return comptime_token;
if (self.getTrailer("extern_export_token")) |extern_export_token| return extern_export_token;
assert(self.getTrailer("lib_name") == null);
return self.mut_token;
}
pub fn lastToken(self: *const VarDecl) TokenIndex {
return self.semicolon_token;
}
fn sizeInBytes(trailer_flags: TrailerFlags) usize {
return @sizeOf(VarDecl) + trailer_flags.sizeInBytes();
}
};
pub const Use = struct {
@ -972,25 +1018,34 @@ pub const Node = struct {
};
/// The params are directly after the FnProto in memory.
/// TODO have a flags field for the optional nodes, and have them appended
/// before or after the parameters in memory.
/// Next, each optional thing determined by a bit in `trailer_flags`.
pub const FnProto = struct {
base: Node = Node{ .id = .FnProto },
doc_comments: ?*DocComment,
visib_token: ?TokenIndex,
trailer_flags: TrailerFlags,
fn_token: TokenIndex,
name_token: ?TokenIndex,
params_len: NodeIndex,
return_type: ReturnType,
var_args_token: ?TokenIndex,
extern_export_inline_token: ?TokenIndex,
body_node: ?*Node,
lib_name: ?*Node, // populated if this is an extern declaration
align_expr: ?*Node, // populated if align(A) is present
section_expr: ?*Node, // populated if linksection(A) is present
callconv_expr: ?*Node, // populated if callconv(A) is present
is_extern_prototype: bool = false, // TODO: Remove once extern fn rewriting is
is_async: bool = false, // TODO: remove once async fn rewriting is
pub const TrailerFlags = std.meta.TrailerFlags(struct {
doc_comments: *DocComment,
body_node: *Node,
lib_name: *Node, // populated if this is an extern declaration
align_expr: *Node, // populated if align(A) is present
section_expr: *Node, // populated if linksection(A) is present
callconv_expr: *Node, // populated if callconv(A) is present
visib_token: TokenIndex,
name_token: TokenIndex,
var_args_token: TokenIndex,
extern_export_inline_token: TokenIndex,
is_extern_prototype: void, // TODO: Remove once extern fn rewriting is
is_async: void, // TODO: remove once async fn rewriting is
});
pub const RequiredFields = struct {
fn_token: TokenIndex,
params_len: NodeIndex,
return_type: ReturnType,
};
pub const ReturnType = union(enum) {
Explicit: *Node,
@ -1007,7 +1062,6 @@ pub const Node = struct {
pub const ParamType = union(enum) {
any_type: *Node,
var_args: TokenIndex,
type_expr: *Node,
};
@ -1016,7 +1070,6 @@ pub const Node = struct {
if (i < 1) {
switch (self.param_type) {
.var_args => return null,
.any_type, .type_expr => |node| return node,
}
}
@ -1030,34 +1083,79 @@ pub const Node = struct {
if (self.noalias_token) |noalias_token| return noalias_token;
if (self.name_token) |name_token| return name_token;
switch (self.param_type) {
.var_args => |tok| return tok,
.any_type, .type_expr => |node| return node.firstToken(),
}
}
pub fn lastToken(self: *const ParamDecl) TokenIndex {
switch (self.param_type) {
.var_args => |tok| return tok,
.any_type, .type_expr => |node| return node.lastToken(),
}
}
};
/// After this the caller must initialize the params list.
pub fn alloc(allocator: *mem.Allocator, params_len: NodeIndex) !*FnProto {
const bytes = try allocator.alignedAlloc(u8, @alignOf(FnProto), sizeInBytes(params_len));
return @ptrCast(*FnProto, bytes.ptr);
/// For debugging purposes.
pub fn dump(self: *const FnProto) void {
const trailers_start = @alignCast(
@alignOf(ParamDecl),
@ptrCast([*]const u8, self) + @sizeOf(FnProto) + @sizeOf(ParamDecl) * self.params_len,
);
std.debug.print("{*} flags: {b} name_token: {} {*} params_len: {}\n", .{
self,
self.trailer_flags.bits,
self.getTrailer("name_token"),
self.trailer_flags.ptrConst(trailers_start, "name_token"),
self.params_len,
});
}
pub fn free(self: *FnProto, allocator: *mem.Allocator) void {
const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.params_len)];
pub fn getTrailer(self: *const FnProto, comptime name: []const u8) ?TrailerFlags.Field(name) {
const trailers_start = @alignCast(
@alignOf(ParamDecl),
@ptrCast([*]const u8, self) + @sizeOf(FnProto) + @sizeOf(ParamDecl) * self.params_len,
);
return self.trailer_flags.get(trailers_start, name);
}
pub fn setTrailer(self: *FnProto, comptime name: []const u8, value: TrailerFlags.Field(name)) void {
const trailers_start = @alignCast(
@alignOf(ParamDecl),
@ptrCast([*]u8, self) + @sizeOf(FnProto) + @sizeOf(ParamDecl) * self.params_len,
);
self.trailer_flags.set(trailers_start, name, value);
}
/// After this the caller must initialize the params list.
pub fn create(allocator: *mem.Allocator, required: RequiredFields, trailers: anytype) !*FnProto {
const trailer_flags = TrailerFlags.init(trailers);
const bytes = try allocator.alignedAlloc(u8, @alignOf(FnProto), sizeInBytes(
required.params_len,
trailer_flags,
));
const fn_proto = @ptrCast(*FnProto, bytes.ptr);
fn_proto.* = .{
.trailer_flags = trailer_flags,
.fn_token = required.fn_token,
.params_len = required.params_len,
.return_type = required.return_type,
};
const trailers_start = @alignCast(
@alignOf(ParamDecl),
bytes.ptr + @sizeOf(FnProto) + @sizeOf(ParamDecl) * required.params_len,
);
trailer_flags.setMany(trailers_start, trailers);
return fn_proto;
}
pub fn destroy(self: *FnProto, allocator: *mem.Allocator) void {
const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.params_len, self.trailer_flags)];
allocator.free(bytes);
}
pub fn iterate(self: *const FnProto, index: usize) ?*Node {
var i = index;
if (self.lib_name) |lib_name| {
if (self.getTrailer("lib_name")) |lib_name| {
if (i < 1) return lib_name;
i -= 1;
}
@ -1066,23 +1164,21 @@ pub const Node = struct {
0
else switch (self.paramsConst()[self.params_len - 1].param_type) {
.any_type, .type_expr => self.params_len,
.var_args => self.params_len - 1,
};
if (i < params_len) {
switch (self.paramsConst()[i].param_type) {
.any_type => |n| return n,
.var_args => unreachable,
.type_expr => |n| return n,
}
}
i -= params_len;
if (self.align_expr) |align_expr| {
if (self.getTrailer("align_expr")) |align_expr| {
if (i < 1) return align_expr;
i -= 1;
}
if (self.section_expr) |section_expr| {
if (self.getTrailer("section_expr")) |section_expr| {
if (i < 1) return section_expr;
i -= 1;
}
@ -1095,7 +1191,7 @@ pub const Node = struct {
.Invalid => {},
}
if (self.body_node) |body_node| {
if (self.getTrailer("body_node")) |body_node| {
if (i < 1) return body_node;
i -= 1;
}
@ -1104,14 +1200,14 @@ pub const Node = struct {
}
pub fn firstToken(self: *const FnProto) TokenIndex {
if (self.visib_token) |visib_token| return visib_token;
if (self.extern_export_inline_token) |extern_export_inline_token| return extern_export_inline_token;
assert(self.lib_name == null);
if (self.getTrailer("visib_token")) |visib_token| return visib_token;
if (self.getTrailer("extern_export_inline_token")) |extern_export_inline_token| return extern_export_inline_token;
assert(self.getTrailer("lib_name") == null);
return self.fn_token;
}
pub fn lastToken(self: *const FnProto) TokenIndex {
if (self.body_node) |body_node| return body_node.lastToken();
if (self.getTrailer("body_node")) |body_node| return body_node.lastToken();
switch (self.return_type) {
.Explicit, .InferErrorSet => |node| return node.lastToken(),
.Invalid => |tok| return tok,
@ -1119,17 +1215,17 @@ pub const Node = struct {
}
pub fn params(self: *FnProto) []ParamDecl {
const decls_start = @ptrCast([*]u8, self) + @sizeOf(FnProto);
return @ptrCast([*]ParamDecl, decls_start)[0..self.params_len];
const params_start = @ptrCast([*]u8, self) + @sizeOf(FnProto);
return @ptrCast([*]ParamDecl, params_start)[0..self.params_len];
}
pub fn paramsConst(self: *const FnProto) []const ParamDecl {
const decls_start = @ptrCast([*]const u8, self) + @sizeOf(FnProto);
return @ptrCast([*]const ParamDecl, decls_start)[0..self.params_len];
const params_start = @ptrCast([*]const u8, self) + @sizeOf(FnProto);
return @ptrCast([*]const ParamDecl, params_start)[0..self.params_len];
}
fn sizeInBytes(params_len: NodeIndex) usize {
return @sizeOf(FnProto) + @sizeOf(ParamDecl) * @as(usize, params_len);
fn sizeInBytes(params_len: NodeIndex, trailer_flags: TrailerFlags) usize {
return @sizeOf(FnProto) + @sizeOf(ParamDecl) * @as(usize, params_len) + trailer_flags.sizeInBytes();
}
};
@ -2829,6 +2925,9 @@ pub const Node = struct {
}
};
/// TODO remove from the Node base struct
/// TODO actually maybe remove entirely in favor of iterating backward from Node.firstToken()
/// and forwards to find same-line doc comments.
pub const DocComment = struct {
base: Node = Node{ .id = .DocComment },
/// Points to the first doc comment token. API users are expected to iterate over the

View File

@ -150,7 +150,7 @@ const Parser = struct {
const visib_token = p.eatToken(.Keyword_pub);
if (p.parseTopLevelDecl() catch |err| switch (err) {
if (p.parseTopLevelDecl(doc_comments, visib_token) catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
error.ParseError => {
p.findNextContainerMember();
@ -160,30 +160,7 @@ const Parser = struct {
if (field_state == .seen) {
field_state = .{ .end = visib_token orelse node.firstToken() };
}
switch (node.id) {
.FnProto => {
node.cast(Node.FnProto).?.doc_comments = doc_comments;
node.cast(Node.FnProto).?.visib_token = visib_token;
},
.VarDecl => {
node.cast(Node.VarDecl).?.doc_comments = doc_comments;
node.cast(Node.VarDecl).?.visib_token = visib_token;
},
.Use => {
node.cast(Node.Use).?.doc_comments = doc_comments;
node.cast(Node.Use).?.visib_token = visib_token;
},
else => unreachable,
}
try list.append(node);
if (try p.parseAppendedDocComment(node.lastToken())) |appended_comment| {
switch (node.id) {
.FnProto => {},
.VarDecl => node.cast(Node.VarDecl).?.doc_comments = appended_comment,
.Use => node.cast(Node.Use).?.doc_comments = appended_comment,
else => unreachable,
}
}
continue;
}
@ -417,7 +394,7 @@ const Parser = struct {
/// <- (KEYWORD_export / KEYWORD_extern STRINGLITERALSINGLE? / (KEYWORD_inline / KEYWORD_noinline))? FnProto (SEMICOLON / Block)
/// / (KEYWORD_export / KEYWORD_extern STRINGLITERALSINGLE?)? KEYWORD_threadlocal? VarDecl
/// / KEYWORD_usingnamespace Expr SEMICOLON
fn parseTopLevelDecl(p: *Parser) !?*Node {
fn parseTopLevelDecl(p: *Parser, doc_comments: ?*Node.DocComment, visib_token: ?TokenIndex) !?*Node {
var lib_name: ?*Node = null;
const extern_export_inline_token = blk: {
if (p.eatToken(.Keyword_export)) |token| break :blk token;
@ -430,20 +407,12 @@ const Parser = struct {
break :blk null;
};
if (try p.parseFnProto()) |node| {
const fn_node = node.cast(Node.FnProto).?;
fn_node.*.extern_export_inline_token = extern_export_inline_token;
fn_node.*.lib_name = lib_name;
if (p.eatToken(.Semicolon)) |_| return node;
if (try p.expectNodeRecoverable(parseBlock, .{
// since parseBlock only return error.ParseError on
// a missing '}' we can assume this function was
// supposed to end here.
.ExpectedSemiOrLBrace = .{ .token = p.tok_i },
})) |body_node| {
fn_node.body_node = body_node;
}
if (try p.parseFnProto(.top_level, .{
.doc_comments = doc_comments,
.visib_token = visib_token,
.extern_export_inline_token = extern_export_inline_token,
.lib_name = lib_name,
})) |node| {
return node;
}
@ -460,12 +429,13 @@ const Parser = struct {
const thread_local_token = p.eatToken(.Keyword_threadlocal);
if (try p.parseVarDecl()) |node| {
var var_decl = node.cast(Node.VarDecl).?;
var_decl.*.thread_local_token = thread_local_token;
var_decl.*.comptime_token = null;
var_decl.*.extern_export_token = extern_export_inline_token;
var_decl.*.lib_name = lib_name;
if (try p.parseVarDecl(.{
.doc_comments = doc_comments,
.visib_token = visib_token,
.thread_local_token = thread_local_token,
.extern_export_token = extern_export_inline_token,
.lib_name = lib_name,
})) |node| {
return node;
}
@ -485,21 +455,41 @@ const Parser = struct {
return error.ParseError;
}
return p.parseUse();
const use_token = p.eatToken(.Keyword_usingnamespace) orelse return null;
const expr = try p.expectNode(parseExpr, .{
.ExpectedExpr = .{ .token = p.tok_i },
});
const semicolon_token = try p.expectToken(.Semicolon);
const node = try p.arena.allocator.create(Node.Use);
node.* = .{
.doc_comments = doc_comments orelse try p.parseAppendedDocComment(semicolon_token),
.visib_token = visib_token,
.use_token = use_token,
.expr = expr,
.semicolon_token = semicolon_token,
};
return &node.base;
}
/// FnProto <- KEYWORD_fn IDENTIFIER? LPAREN ParamDeclList RPAREN ByteAlign? LinkSection? EXCLAMATIONMARK? (Keyword_anytype / TypeExpr)
fn parseFnProto(p: *Parser) !?*Node {
fn parseFnProto(p: *Parser, level: enum { top_level, as_type }, fields: struct {
doc_comments: ?*Node.DocComment = null,
visib_token: ?TokenIndex = null,
extern_export_inline_token: ?TokenIndex = null,
lib_name: ?*Node = null,
}) !?*Node {
// TODO: Remove once extern/async fn rewriting is
var is_async = false;
var is_extern = false;
var is_async: ?void = null;
var is_extern_prototype: ?void = null;
const cc_token: ?TokenIndex = blk: {
if (p.eatToken(.Keyword_extern)) |token| {
is_extern = true;
is_extern_prototype = {};
break :blk token;
}
if (p.eatToken(.Keyword_async)) |token| {
is_async = true;
is_async = {};
break :blk token;
}
break :blk null;
@ -513,6 +503,7 @@ const Parser = struct {
const lparen = try p.expectToken(.LParen);
const params = try p.parseParamDeclList();
defer p.gpa.free(params);
const var_args_token = p.eatToken(.Ellipsis3);
const rparen = try p.expectToken(.RParen);
const align_expr = try p.parseByteAlign();
const section_expr = try p.parseLinkSection();
@ -535,37 +526,53 @@ const Parser = struct {
else
R{ .Explicit = return_type_expr.? };
const var_args_token = if (params.len > 0) blk: {
const param_type = params[params.len - 1].param_type;
break :blk if (param_type == .var_args) param_type.var_args else null;
} else
null;
const body_node: ?*Node = switch (level) {
.top_level => blk: {
if (p.eatToken(.Semicolon)) |_| {
break :blk null;
}
break :blk try p.expectNodeRecoverable(parseBlock, .{
// Since parseBlock only return error.ParseError on
// a missing '}' we can assume this function was
// supposed to end here.
.ExpectedSemiOrLBrace = .{ .token = p.tok_i },
});
},
.as_type => null,
};
const fn_proto_node = try Node.FnProto.alloc(&p.arena.allocator, params.len);
fn_proto_node.* = .{
.doc_comments = null,
.visib_token = null,
.fn_token = fn_token,
.name_token = name_token,
const fn_proto_node = try Node.FnProto.create(&p.arena.allocator, .{
.params_len = params.len,
.fn_token = fn_token,
.return_type = return_type,
}, .{
.doc_comments = fields.doc_comments,
.visib_token = fields.visib_token,
.name_token = name_token,
.var_args_token = var_args_token,
.extern_export_inline_token = null,
.body_node = null,
.lib_name = null,
.extern_export_inline_token = fields.extern_export_inline_token,
.body_node = body_node,
.lib_name = fields.lib_name,
.align_expr = align_expr,
.section_expr = section_expr,
.callconv_expr = callconv_expr,
.is_extern_prototype = is_extern,
.is_extern_prototype = is_extern_prototype,
.is_async = is_async,
};
});
std.mem.copy(Node.FnProto.ParamDecl, fn_proto_node.params(), params);
return &fn_proto_node.base;
}
/// VarDecl <- (KEYWORD_const / KEYWORD_var) IDENTIFIER (COLON TypeExpr)? ByteAlign? LinkSection? (EQUAL Expr)? SEMICOLON
fn parseVarDecl(p: *Parser) !?*Node {
fn parseVarDecl(p: *Parser, fields: struct {
doc_comments: ?*Node.DocComment = null,
visib_token: ?TokenIndex = null,
thread_local_token: ?TokenIndex = null,
extern_export_token: ?TokenIndex = null,
lib_name: ?*Node = null,
comptime_token: ?TokenIndex = null,
}) !?*Node {
const mut_token = p.eatToken(.Keyword_const) orelse
p.eatToken(.Keyword_var) orelse
return null;
@ -587,23 +594,25 @@ const Parser = struct {
} else null;
const semicolon_token = try p.expectToken(.Semicolon);
const node = try p.arena.allocator.create(Node.VarDecl);
node.* = .{
.doc_comments = null,
.visib_token = null,
.thread_local_token = null,
.name_token = name_token,
.eq_token = eq_token,
const doc_comments = fields.doc_comments orelse try p.parseAppendedDocComment(semicolon_token);
const node = try Node.VarDecl.create(&p.arena.allocator, .{
.mut_token = mut_token,
.comptime_token = null,
.extern_export_token = null,
.lib_name = null,
.name_token = name_token,
.semicolon_token = semicolon_token,
}, .{
.doc_comments = doc_comments,
.visib_token = fields.visib_token,
.thread_local_token = fields.thread_local_token,
.eq_token = eq_token,
.comptime_token = fields.comptime_token,
.extern_export_token = fields.extern_export_token,
.lib_name = fields.lib_name,
.type_node = type_node,
.align_node = align_node,
.section_node = section_node,
.init_node = init_node,
.semicolon_token = semicolon_token,
};
});
return &node.base;
}
@ -663,10 +672,9 @@ const Parser = struct {
fn parseStatement(p: *Parser) Error!?*Node {
const comptime_token = p.eatToken(.Keyword_comptime);
const var_decl_node = try p.parseVarDecl();
if (var_decl_node) |node| {
const var_decl = node.cast(Node.VarDecl).?;
var_decl.comptime_token = comptime_token;
if (try p.parseVarDecl(.{
.comptime_token = comptime_token,
})) |node| {
return node;
}
@ -1527,7 +1535,7 @@ const Parser = struct {
if (try p.parseAnonLiteral()) |node| return node;
if (try p.parseErrorSetDecl()) |node| return node;
if (try p.parseFloatLiteral()) |node| return node;
if (try p.parseFnProto()) |node| return node;
if (try p.parseFnProto(.as_type, .{})) |node| return node;
if (try p.parseGroupedExpr()) |node| return node;
if (try p.parseLabeledTypeExpr()) |node| return node;
if (try p.parseIdentifier()) |node| return node;
@ -2028,7 +2036,6 @@ const Parser = struct {
// TODO cast from tuple to error union is broken
const P = Node.FnProto.ParamDecl.ParamType;
if (try p.parseAnyType()) |node| return P{ .any_type = node };
if (p.eatToken(.Ellipsis3)) |token| return P{ .var_args = token };
if (try p.parseTypeExpr()) |node| return P{ .type_expr = node };
return null;
}
@ -3149,21 +3156,6 @@ const Parser = struct {
return &node.base;
}
fn parseUse(p: *Parser) !?*Node {
const token = p.eatToken(.Keyword_usingnamespace) orelse return null;
const node = try p.arena.allocator.create(Node.Use);
node.* = .{
.doc_comments = null,
.visib_token = null,
.use_token = token,
.expr = try p.expectNode(parseExpr, .{
.ExpectedExpr = .{ .token = p.tok_i },
}),
.semicolon_token = try p.expectToken(.Semicolon),
};
return &node.base;
}
/// IfPrefix Body (KEYWORD_else Payload? Body)?
fn parseIf(p: *Parser, bodyParseFn: NodeParseFn) !?*Node {
const node = (try p.parseIfPrefix()) orelse return null;

View File

@ -1,4 +1,32 @@
const builtin = @import("builtin");
test "zig fmt: convert var to anytype" {
// TODO remove in next release cycle
try testTransform(
\\pub fn main(
\\ a: var,
\\ bar: var,
\\) void {}
,
\\pub fn main(
\\ a: anytype,
\\ bar: anytype,
\\) void {}
\\
);
}
test "zig fmt: noasync to nosuspend" {
// TODO: remove this
try testTransform(
\\pub fn main() void {
\\ noasync call();
\\}
,
\\pub fn main() void {
\\ nosuspend call();
\\}
\\
);
}
test "recovery: top level" {
try testError(
@ -3146,20 +3174,6 @@ test "zig fmt: hexadeciaml float literals with underscore separators" {
);
}
test "zig fmt: noasync to nosuspend" {
// TODO: remove this
try testTransform(
\\pub fn main() void {
\\ noasync call();
\\}
,
\\pub fn main() void {
\\ nosuspend call();
\\}
\\
);
}
test "zig fmt: convert async fn into callconv(.Async)" {
try testTransform(
\\async fn foo() void {}
@ -3180,18 +3194,9 @@ test "zig fmt: convert extern fn proto into callconv(.C)" {
);
}
test "zig fmt: convert var to anytype" {
// TODO remove in next release cycle
try testTransform(
\\pub fn main(
\\ a: var,
\\ bar: var,
\\) void {}
,
\\pub fn main(
\\ a: anytype,
\\ bar: anytype,
\\) void {}
test "zig fmt: C var args" {
try testCanonical(
\\pub extern "c" fn printf(format: [*:0]const u8, ...) c_int;
\\
);
}

View File

@ -227,9 +227,9 @@ fn renderContainerDecl(allocator: *mem.Allocator, stream: anytype, tree: *ast.Tr
.FnProto => {
const fn_proto = @fieldParentPtr(ast.Node.FnProto, "base", decl);
try renderDocComments(tree, stream, fn_proto, indent, start_col);
try renderDocComments(tree, stream, fn_proto, fn_proto.getTrailer("doc_comments"), indent, start_col);
if (fn_proto.body_node) |body_node| {
if (fn_proto.getTrailer("body_node")) |body_node| {
try renderExpression(allocator, stream, tree, indent, start_col, decl, .Space);
try renderExpression(allocator, stream, tree, indent, start_col, body_node, space);
} else {
@ -252,14 +252,14 @@ fn renderContainerDecl(allocator: *mem.Allocator, stream: anytype, tree: *ast.Tr
.VarDecl => {
const var_decl = @fieldParentPtr(ast.Node.VarDecl, "base", decl);
try renderDocComments(tree, stream, var_decl, indent, start_col);
try renderDocComments(tree, stream, var_decl, var_decl.getTrailer("doc_comments"), indent, start_col);
try renderVarDecl(allocator, stream, tree, indent, start_col, var_decl);
},
.TestDecl => {
const test_decl = @fieldParentPtr(ast.Node.TestDecl, "base", decl);
try renderDocComments(tree, stream, test_decl, indent, start_col);
try renderDocComments(tree, stream, test_decl, test_decl.doc_comments, indent, start_col);
try renderToken(tree, stream, test_decl.test_token, indent, start_col, .Space);
try renderExpression(allocator, stream, tree, indent, start_col, test_decl.name, .Space);
try renderExpression(allocator, stream, tree, indent, start_col, test_decl.body_node, space);
@ -268,7 +268,7 @@ fn renderContainerDecl(allocator: *mem.Allocator, stream: anytype, tree: *ast.Tr
.ContainerField => {
const field = @fieldParentPtr(ast.Node.ContainerField, "base", decl);
try renderDocComments(tree, stream, field, indent, start_col);
try renderDocComments(tree, stream, field, field.doc_comments, indent, start_col);
if (field.comptime_token) |t| {
try renderToken(tree, stream, t, indent, start_col, .Space); // comptime
}
@ -1409,7 +1409,7 @@ fn renderExpression(
.ErrorTag => {
const tag = @fieldParentPtr(ast.Node.ErrorTag, "base", base);
try renderDocComments(tree, stream, tag, indent, start_col);
try renderDocComments(tree, stream, tag, tag.doc_comments, indent, start_col);
return renderToken(tree, stream, tag.name_token, indent, start_col, space); // name
},
@ -1483,23 +1483,23 @@ fn renderExpression(
.FnProto => {
const fn_proto = @fieldParentPtr(ast.Node.FnProto, "base", base);
if (fn_proto.visib_token) |visib_token_index| {
if (fn_proto.getTrailer("visib_token")) |visib_token_index| {
const visib_token = tree.token_ids[visib_token_index];
assert(visib_token == .Keyword_pub or visib_token == .Keyword_export);
try renderToken(tree, stream, visib_token_index, indent, start_col, Space.Space); // pub
}
if (fn_proto.extern_export_inline_token) |extern_export_inline_token| {
if (!fn_proto.is_extern_prototype)
if (fn_proto.getTrailer("extern_export_inline_token")) |extern_export_inline_token| {
if (fn_proto.getTrailer("is_extern_prototype") == null)
try renderToken(tree, stream, extern_export_inline_token, indent, start_col, Space.Space); // extern/export/inline
}
if (fn_proto.lib_name) |lib_name| {
if (fn_proto.getTrailer("lib_name")) |lib_name| {
try renderExpression(allocator, stream, tree, indent, start_col, lib_name, Space.Space);
}
const lparen = if (fn_proto.name_token) |name_token| blk: {
const lparen = if (fn_proto.getTrailer("name_token")) |name_token| blk: {
try renderToken(tree, stream, fn_proto.fn_token, indent, start_col, Space.Space); // fn
try renderToken(tree, stream, name_token, indent, start_col, Space.None); // name
break :blk tree.nextToken(name_token);
@ -1512,11 +1512,11 @@ fn renderExpression(
const rparen = tree.prevToken(
// the first token for the annotation expressions is the left
// parenthesis, hence the need for two prevToken
if (fn_proto.align_expr) |align_expr|
if (fn_proto.getTrailer("align_expr")) |align_expr|
tree.prevToken(tree.prevToken(align_expr.firstToken()))
else if (fn_proto.section_expr) |section_expr|
else if (fn_proto.getTrailer("section_expr")) |section_expr|
tree.prevToken(tree.prevToken(section_expr.firstToken()))
else if (fn_proto.callconv_expr) |callconv_expr|
else if (fn_proto.getTrailer("callconv_expr")) |callconv_expr|
tree.prevToken(tree.prevToken(callconv_expr.firstToken()))
else switch (fn_proto.return_type) {
.Explicit => |node| node.firstToken(),
@ -1537,11 +1537,14 @@ fn renderExpression(
for (fn_proto.params()) |param_decl, i| {
try renderParamDecl(allocator, stream, tree, indent, start_col, param_decl, Space.None);
if (i + 1 < fn_proto.params_len) {
if (i + 1 < fn_proto.params_len or fn_proto.getTrailer("var_args_token") != null) {
const comma = tree.nextToken(param_decl.lastToken());
try renderToken(tree, stream, comma, indent, start_col, Space.Space); // ,
}
}
if (fn_proto.getTrailer("var_args_token")) |var_args_token| {
try renderToken(tree, stream, var_args_token, indent, start_col, Space.None);
}
} else {
// one param per line
const new_indent = indent + indent_delta;
@ -1551,12 +1554,16 @@ fn renderExpression(
try stream.writeByteNTimes(' ', new_indent);
try renderParamDecl(allocator, stream, tree, new_indent, start_col, param_decl, Space.Comma);
}
if (fn_proto.getTrailer("var_args_token")) |var_args_token| {
try stream.writeByteNTimes(' ', new_indent);
try renderToken(tree, stream, var_args_token, new_indent, start_col, Space.Comma);
}
try stream.writeByteNTimes(' ', indent);
}
try renderToken(tree, stream, rparen, indent, start_col, Space.Space); // )
if (fn_proto.align_expr) |align_expr| {
if (fn_proto.getTrailer("align_expr")) |align_expr| {
const align_rparen = tree.nextToken(align_expr.lastToken());
const align_lparen = tree.prevToken(align_expr.firstToken());
const align_kw = tree.prevToken(align_lparen);
@ -1567,7 +1574,7 @@ fn renderExpression(
try renderToken(tree, stream, align_rparen, indent, start_col, Space.Space); // )
}
if (fn_proto.section_expr) |section_expr| {
if (fn_proto.getTrailer("section_expr")) |section_expr| {
const section_rparen = tree.nextToken(section_expr.lastToken());
const section_lparen = tree.prevToken(section_expr.firstToken());
const section_kw = tree.prevToken(section_lparen);
@ -1578,7 +1585,7 @@ fn renderExpression(
try renderToken(tree, stream, section_rparen, indent, start_col, Space.Space); // )
}
if (fn_proto.callconv_expr) |callconv_expr| {
if (fn_proto.getTrailer("callconv_expr")) |callconv_expr| {
const callconv_rparen = tree.nextToken(callconv_expr.lastToken());
const callconv_lparen = tree.prevToken(callconv_expr.firstToken());
const callconv_kw = tree.prevToken(callconv_lparen);
@ -1587,9 +1594,9 @@ fn renderExpression(
try renderToken(tree, stream, callconv_lparen, indent, start_col, Space.None); // (
try renderExpression(allocator, stream, tree, indent, start_col, callconv_expr, Space.None);
try renderToken(tree, stream, callconv_rparen, indent, start_col, Space.Space); // )
} else if (fn_proto.is_extern_prototype) {
} else if (fn_proto.getTrailer("is_extern_prototype") != null) {
try stream.writeAll("callconv(.C) ");
} else if (fn_proto.is_async) {
} else if (fn_proto.getTrailer("is_async") != null) {
try stream.writeAll("callconv(.Async) ");
}
@ -2177,64 +2184,69 @@ fn renderVarDecl(
start_col: *usize,
var_decl: *ast.Node.VarDecl,
) (@TypeOf(stream).Error || Error)!void {
if (var_decl.visib_token) |visib_token| {
if (var_decl.getTrailer("visib_token")) |visib_token| {
try renderToken(tree, stream, visib_token, indent, start_col, Space.Space); // pub
}
if (var_decl.extern_export_token) |extern_export_token| {
if (var_decl.getTrailer("extern_export_token")) |extern_export_token| {
try renderToken(tree, stream, extern_export_token, indent, start_col, Space.Space); // extern
if (var_decl.lib_name) |lib_name| {
if (var_decl.getTrailer("lib_name")) |lib_name| {
try renderExpression(allocator, stream, tree, indent, start_col, lib_name, Space.Space); // "lib"
}
}
if (var_decl.comptime_token) |comptime_token| {
if (var_decl.getTrailer("comptime_token")) |comptime_token| {
try renderToken(tree, stream, comptime_token, indent, start_col, Space.Space); // comptime
}
if (var_decl.thread_local_token) |thread_local_token| {
if (var_decl.getTrailer("thread_local_token")) |thread_local_token| {
try renderToken(tree, stream, thread_local_token, indent, start_col, Space.Space); // threadlocal
}
try renderToken(tree, stream, var_decl.mut_token, indent, start_col, Space.Space); // var
const name_space = if (var_decl.type_node == null and (var_decl.align_node != null or
var_decl.section_node != null or var_decl.init_node != null)) Space.Space else Space.None;
const name_space = if (var_decl.getTrailer("type_node") == null and
(var_decl.getTrailer("align_node") != null or
var_decl.getTrailer("section_node") != null or
var_decl.getTrailer("init_node") != null))
Space.Space
else
Space.None;
try renderToken(tree, stream, var_decl.name_token, indent, start_col, name_space);
if (var_decl.type_node) |type_node| {
if (var_decl.getTrailer("type_node")) |type_node| {
try renderToken(tree, stream, tree.nextToken(var_decl.name_token), indent, start_col, Space.Space);
const s = if (var_decl.align_node != null or
var_decl.section_node != null or
var_decl.init_node != null) Space.Space else Space.None;
const s = if (var_decl.getTrailer("align_node") != null or
var_decl.getTrailer("section_node") != null or
var_decl.getTrailer("init_node") != null) Space.Space else Space.None;
try renderExpression(allocator, stream, tree, indent, start_col, type_node, s);
}
if (var_decl.align_node) |align_node| {
if (var_decl.getTrailer("align_node")) |align_node| {
const lparen = tree.prevToken(align_node.firstToken());
const align_kw = tree.prevToken(lparen);
const rparen = tree.nextToken(align_node.lastToken());
try renderToken(tree, stream, align_kw, indent, start_col, Space.None); // align
try renderToken(tree, stream, lparen, indent, start_col, Space.None); // (
try renderExpression(allocator, stream, tree, indent, start_col, align_node, Space.None);
const s = if (var_decl.section_node != null or var_decl.init_node != null) Space.Space else Space.None;
const s = if (var_decl.getTrailer("section_node") != null or var_decl.getTrailer("init_node") != null) Space.Space else Space.None;
try renderToken(tree, stream, rparen, indent, start_col, s); // )
}
if (var_decl.section_node) |section_node| {
if (var_decl.getTrailer("section_node")) |section_node| {
const lparen = tree.prevToken(section_node.firstToken());
const section_kw = tree.prevToken(lparen);
const rparen = tree.nextToken(section_node.lastToken());
try renderToken(tree, stream, section_kw, indent, start_col, Space.None); // linksection
try renderToken(tree, stream, lparen, indent, start_col, Space.None); // (
try renderExpression(allocator, stream, tree, indent, start_col, section_node, Space.None);
const s = if (var_decl.init_node != null) Space.Space else Space.None;
const s = if (var_decl.getTrailer("init_node") != null) Space.Space else Space.None;
try renderToken(tree, stream, rparen, indent, start_col, s); // )
}
if (var_decl.init_node) |init_node| {
if (var_decl.getTrailer("init_node")) |init_node| {
const s = if (init_node.id == .MultilineStringLiteral) Space.None else Space.Space;
try renderToken(tree, stream, var_decl.eq_token.?, indent, start_col, s); // =
try renderToken(tree, stream, var_decl.getTrailer("eq_token").?, indent, start_col, s); // =
try renderExpression(allocator, stream, tree, indent, start_col, init_node, Space.None);
}
@ -2250,7 +2262,7 @@ fn renderParamDecl(
param_decl: ast.Node.FnProto.ParamDecl,
space: Space,
) (@TypeOf(stream).Error || Error)!void {
try renderDocComments(tree, stream, param_decl, indent, start_col);
try renderDocComments(tree, stream, param_decl, param_decl.doc_comments, indent, start_col);
if (param_decl.comptime_token) |comptime_token| {
try renderToken(tree, stream, comptime_token, indent, start_col, Space.Space);
@ -2263,7 +2275,6 @@ fn renderParamDecl(
try renderToken(tree, stream, tree.nextToken(name_token), indent, start_col, Space.Space); // :
}
switch (param_decl.param_type) {
.var_args => |token| try renderToken(tree, stream, token, indent, start_col, space),
.any_type, .type_expr => |node| try renderExpression(allocator, stream, tree, indent, start_col, node, space),
}
}
@ -2519,10 +2530,11 @@ fn renderDocComments(
tree: *ast.Tree,
stream: anytype,
node: anytype,
doc_comments: ?*ast.Node.DocComment,
indent: usize,
start_col: *usize,
) (@TypeOf(stream).Error || Error)!void {
const comment = node.doc_comments orelse return;
const comment = doc_comments orelse return;
return renderDocCommentsToken(tree, stream, comment, node.firstToken(), indent, start_col);
}

View File

@ -1130,7 +1130,7 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
};
defer fn_type_scope.instructions.deinit(self.gpa);
const body_node = fn_proto.body_node orelse
const body_node = fn_proto.getTrailer("body_node") orelse
return self.failTok(&fn_type_scope.base, fn_proto.fn_token, "TODO implement extern functions", .{});
const param_decls = fn_proto.params();
@ -1138,21 +1138,23 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
for (param_decls) |param_decl, i| {
const param_type_node = switch (param_decl.param_type) {
.any_type => |node| return self.failNode(&fn_type_scope.base, node, "TODO implement anytype parameter", .{}),
.var_args => |tok| return self.failTok(&fn_type_scope.base, tok, "TODO implement var args", .{}),
.type_expr => |node| node,
};
param_types[i] = try self.astGenExpr(&fn_type_scope.base, param_type_node);
}
if (fn_proto.lib_name) |lib_name| {
if (fn_proto.getTrailer("var_args_token")) |var_args_token| {
return self.failTok(&fn_type_scope.base, var_args_token, "TODO implement var args", .{});
}
if (fn_proto.getTrailer("lib_name")) |lib_name| {
return self.failNode(&fn_type_scope.base, lib_name, "TODO implement function library name", .{});
}
if (fn_proto.align_expr) |align_expr| {
if (fn_proto.getTrailer("align_expr")) |align_expr| {
return self.failNode(&fn_type_scope.base, align_expr, "TODO implement function align expression", .{});
}
if (fn_proto.section_expr) |sect_expr| {
if (fn_proto.getTrailer("section_expr")) |sect_expr| {
return self.failNode(&fn_type_scope.base, sect_expr, "TODO implement function section expression", .{});
}
if (fn_proto.callconv_expr) |callconv_expr| {
if (fn_proto.getTrailer("callconv_expr")) |callconv_expr| {
return self.failNode(
&fn_type_scope.base,
callconv_expr,
@ -1265,10 +1267,10 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
self.bin_file.freeDecl(decl);
}
if (fn_proto.extern_export_inline_token) |maybe_export_token| {
if (fn_proto.getTrailer("extern_export_inline_token")) |maybe_export_token| {
if (tree.token_ids[maybe_export_token] == .Keyword_export) {
const export_src = tree.token_locs[maybe_export_token].start;
const name_loc = tree.token_locs[fn_proto.name_token.?];
const name_loc = tree.token_locs[fn_proto.getTrailer("name_token").?];
const name = tree.tokenSliceLoc(name_loc);
// The scope needs to have the decl in it.
try self.analyzeExport(&block_scope.base, export_src, name, decl);
@ -1867,7 +1869,7 @@ fn analyzeRootSrcFile(self: *Module, root_scope: *Scope.File) !void {
for (decls) |src_decl, decl_i| {
if (src_decl.cast(ast.Node.FnProto)) |fn_proto| {
// We will create a Decl for it regardless of analysis status.
const name_tok = fn_proto.name_token orelse {
const name_tok = fn_proto.getTrailer("name_token") orelse {
@panic("TODO missing function name");
};
@ -1893,7 +1895,7 @@ fn analyzeRootSrcFile(self: *Module, root_scope: *Scope.File) !void {
} else {
const new_decl = try self.createNewDecl(&root_scope.base, name, decl_i, name_hash, contents_hash);
root_scope.decls.appendAssumeCapacity(new_decl);
if (fn_proto.extern_export_inline_token) |maybe_export_token| {
if (fn_proto.getTrailer("extern_export_inline_token")) |maybe_export_token| {
if (tree.token_ids[maybe_export_token] == .Keyword_export) {
self.work_queue.writeItemAssumeCapacity(.{ .analyze_decl = new_decl });
}

View File

@ -586,11 +586,7 @@ fn visitFnDecl(c: *Context, fn_decl: *const ZigClangFunctionDecl) Error!void {
for (proto_node.params()) |*param, i| {
const param_name = if (param.name_token) |name_tok|
tokenSlice(c, name_tok)
else if (param.param_type == .var_args) {
assert(i + 1 == proto_node.params_len);
proto_node.params_len -= 1;
break;
} else
else
return failDecl(c, fn_decl_loc, fn_name, "function {} parameter has no name", .{fn_name});
const c_param = ZigClangFunctionDecl_getParamDecl(fn_decl, param_id);
@ -602,10 +598,20 @@ fn visitFnDecl(c: *Context, fn_decl: *const ZigClangFunctionDecl) Error!void {
if (!is_const) {
const bare_arg_name = try std.fmt.allocPrint(c.arena, "arg_{}", .{mangled_param_name});
const arg_name = try block_scope.makeMangledName(c, bare_arg_name);
const node = try transCreateNodeVarDecl(c, false, false, mangled_param_name);
node.eq_token = try appendToken(c, .Equal, "=");
node.init_node = try transCreateNodeIdentifier(c, arg_name);
node.semicolon_token = try appendToken(c, .Semicolon, ";");
const mut_tok = try appendToken(c, .Keyword_var, "var");
const name_tok = try appendIdentifier(c, mangled_param_name);
const eq_token = try appendToken(c, .Equal, "=");
const init_node = try transCreateNodeIdentifier(c, arg_name);
const semicolon_token = try appendToken(c, .Semicolon, ";");
const node = try ast.Node.VarDecl.create(c.arena, .{
.mut_token = mut_tok,
.name_token = name_tok,
.semicolon_token = semicolon_token,
}, .{
.eq_token = eq_token,
.init_node = init_node,
});
try block_scope.statements.append(&node.base);
param.name_token = try appendIdentifier(c, arg_name);
_ = try appendToken(c, .Colon, ":");
@ -622,7 +628,7 @@ fn visitFnDecl(c: *Context, fn_decl: *const ZigClangFunctionDecl) Error!void {
=> return failDecl(c, fn_decl_loc, fn_name, "unable to translate function", .{}),
};
const body_node = try block_scope.complete(rp.c);
proto_node.body_node = &body_node.base;
proto_node.setTrailer("body_node", &body_node.base);
return addTopLevelDecl(c, fn_name, &proto_node.base);
}
@ -725,23 +731,20 @@ fn visitVarDecl(c: *Context, var_decl: *const ZigClangVarDecl) Error!void {
break :blk null;
};
const node = try c.arena.create(ast.Node.VarDecl);
node.* = .{
.doc_comments = null,
const node = try ast.Node.VarDecl.create(c.arena, .{
.name_token = name_tok,
.mut_token = mut_tok,
.semicolon_token = try appendToken(c, .Semicolon, ";"),
}, .{
.visib_token = visib_tok,
.thread_local_token = thread_local_token,
.name_token = name_tok,
.eq_token = eq_tok,
.mut_token = mut_tok,
.comptime_token = null,
.extern_export_token = extern_tok,
.lib_name = null,
.type_node = type_node,
.align_node = align_expr,
.section_node = linksection_expr,
.init_node = init_node,
.semicolon_token = try appendToken(c, .Semicolon, ";"),
};
});
return addTopLevelDecl(c, checked_name, &node.base);
}
@ -795,26 +798,41 @@ fn transTypeDef(c: *Context, typedef_decl: *const ZigClangTypedefNameDecl, top_l
_ = try c.decl_table.put(@ptrToInt(ZigClangTypedefNameDecl_getCanonicalDecl(typedef_decl)), checked_name);
const node = (try transCreateNodeTypedef(rp, typedef_decl, true, checked_name)) orelse return null;
try addTopLevelDecl(c, checked_name, &node.base);
try addTopLevelDecl(c, checked_name, node);
return transCreateNodeIdentifier(c, checked_name);
}
fn transCreateNodeTypedef(rp: RestorePoint, typedef_decl: *const ZigClangTypedefNameDecl, toplevel: bool, checked_name: []const u8) Error!?*ast.Node.VarDecl {
const node = try transCreateNodeVarDecl(rp.c, toplevel, true, checked_name);
node.eq_token = try appendToken(rp.c, .Equal, "=");
fn transCreateNodeTypedef(
rp: RestorePoint,
typedef_decl: *const ZigClangTypedefNameDecl,
toplevel: bool,
checked_name: []const u8,
) Error!?*ast.Node {
const visib_tok = if (toplevel) try appendToken(rp.c, .Keyword_pub, "pub") else null;
const mut_tok = try appendToken(rp.c, .Keyword_const, "const");
const name_tok = try appendIdentifier(rp.c, checked_name);
const eq_token = try appendToken(rp.c, .Equal, "=");
const child_qt = ZigClangTypedefNameDecl_getUnderlyingType(typedef_decl);
const typedef_loc = ZigClangTypedefNameDecl_getLocation(typedef_decl);
node.init_node = transQualType(rp, child_qt, typedef_loc) catch |err| switch (err) {
const init_node = transQualType(rp, child_qt, typedef_loc) catch |err| switch (err) {
error.UnsupportedType => {
try failDecl(rp.c, typedef_loc, checked_name, "unable to resolve typedef child type", .{});
return null;
},
error.OutOfMemory => |e| return e,
};
const semicolon_token = try appendToken(rp.c, .Semicolon, ";");
node.semicolon_token = try appendToken(rp.c, .Semicolon, ";");
return node;
const node = try ast.Node.VarDecl.create(rp.c.arena, .{
.name_token = name_tok,
.mut_token = mut_tok,
.semicolon_token = semicolon_token,
}, .{
.visib_token = visib_tok,
.eq_token = eq_token,
.init_node = init_node,
});
return &node.base;
}
fn transRecordDecl(c: *Context, record_decl: *const ZigClangRecordDecl) Error!?*ast.Node {
@ -847,12 +865,14 @@ fn transRecordDecl(c: *Context, record_decl: *const ZigClangRecordDecl) Error!?*
const name = try std.fmt.allocPrint(c.arena, "{}_{}", .{ container_kind_name, bare_name });
_ = try c.decl_table.put(@ptrToInt(ZigClangRecordDecl_getCanonicalDecl(record_decl)), name);
const node = try transCreateNodeVarDecl(c, !is_unnamed, true, name);
const visib_tok = if (!is_unnamed) try appendToken(c, .Keyword_pub, "pub") else null;
const mut_tok = try appendToken(c, .Keyword_const, "const");
const name_tok = try appendIdentifier(c, name);
node.eq_token = try appendToken(c, .Equal, "=");
const eq_token = try appendToken(c, .Equal, "=");
var semicolon: ast.TokenIndex = undefined;
node.init_node = blk: {
const init_node = blk: {
const rp = makeRestorePoint(c);
const record_def = ZigClangRecordDecl_getDefinition(record_decl) orelse {
const opaque = try transCreateNodeOpaqueType(c);
@ -959,7 +979,16 @@ fn transRecordDecl(c: *Context, record_decl: *const ZigClangRecordDecl) Error!?*
semicolon = try appendToken(c, .Semicolon, ";");
break :blk &container_node.base;
};
node.semicolon_token = semicolon;
const node = try ast.Node.VarDecl.create(c.arena, .{
.name_token = name_tok,
.mut_token = mut_tok,
.semicolon_token = semicolon,
}, .{
.visib_token = visib_tok,
.eq_token = eq_token,
.init_node = init_node,
});
try addTopLevelDecl(c, name, &node.base);
if (!is_unnamed)
@ -982,10 +1011,13 @@ fn transEnumDecl(c: *Context, enum_decl: *const ZigClangEnumDecl) Error!?*ast.No
const name = try std.fmt.allocPrint(c.arena, "enum_{}", .{bare_name});
_ = try c.decl_table.put(@ptrToInt(ZigClangEnumDecl_getCanonicalDecl(enum_decl)), name);
const node = try transCreateNodeVarDecl(c, !is_unnamed, true, name);
node.eq_token = try appendToken(c, .Equal, "=");
node.init_node = if (ZigClangEnumDecl_getDefinition(enum_decl)) |enum_def| blk: {
const visib_tok = if (!is_unnamed) try appendToken(c, .Keyword_pub, "pub") else null;
const mut_tok = try appendToken(c, .Keyword_const, "const");
const name_tok = try appendIdentifier(c, name);
const eq_token = try appendToken(c, .Equal, "=");
const init_node = if (ZigClangEnumDecl_getDefinition(enum_decl)) |enum_def| blk: {
var pure_enum = true;
var it = ZigClangEnumDecl_enumerator_begin(enum_def);
var end_it = ZigClangEnumDecl_enumerator_end(enum_def);
@ -1063,8 +1095,10 @@ fn transEnumDecl(c: *Context, enum_decl: *const ZigClangEnumDecl) Error!?*ast.No
// In C each enum value is in the global namespace. So we put them there too.
// At this point we can rely on the enum emitting successfully.
const tld_node = try transCreateNodeVarDecl(c, true, true, enum_val_name);
tld_node.eq_token = try appendToken(c, .Equal, "=");
const tld_visib_tok = try appendToken(c, .Keyword_pub, "pub");
const tld_mut_tok = try appendToken(c, .Keyword_const, "const");
const tld_name_tok = try appendIdentifier(c, enum_val_name);
const tld_eq_token = try appendToken(c, .Equal, "=");
const cast_node = try rp.c.createBuiltinCall("@enumToInt", 1);
const enum_ident = try transCreateNodeIdentifier(c, name);
const period_tok = try appendToken(c, .Period, ".");
@ -1078,8 +1112,17 @@ fn transEnumDecl(c: *Context, enum_decl: *const ZigClangEnumDecl) Error!?*ast.No
};
cast_node.params()[0] = &field_access_node.base;
cast_node.rparen_token = try appendToken(rp.c, .RParen, ")");
tld_node.init_node = &cast_node.base;
tld_node.semicolon_token = try appendToken(c, .Semicolon, ";");
const tld_init_node = &cast_node.base;
const tld_semicolon_token = try appendToken(c, .Semicolon, ";");
const tld_node = try ast.Node.VarDecl.create(c.arena, .{
.name_token = tld_name_tok,
.mut_token = tld_mut_tok,
.semicolon_token = tld_semicolon_token,
}, .{
.visib_token = tld_visib_tok,
.eq_token = tld_eq_token,
.init_node = tld_init_node,
});
try addTopLevelDecl(c, field_name, &tld_node.base);
}
// make non exhaustive
@ -1109,7 +1152,16 @@ fn transEnumDecl(c: *Context, enum_decl: *const ZigClangEnumDecl) Error!?*ast.No
} else
try transCreateNodeOpaqueType(c);
node.semicolon_token = try appendToken(c, .Semicolon, ";");
const semicolon_token = try appendToken(c, .Semicolon, ";");
const node = try ast.Node.VarDecl.create(c.arena, .{
.name_token = name_tok,
.mut_token = mut_tok,
.semicolon_token = semicolon_token,
}, .{
.visib_token = visib_tok,
.eq_token = eq_token,
.init_node = init_node,
});
try addTopLevelDecl(c, name, &node.base);
if (!is_unnamed)
@ -1118,10 +1170,22 @@ fn transEnumDecl(c: *Context, enum_decl: *const ZigClangEnumDecl) Error!?*ast.No
}
fn createAlias(c: *Context, alias: anytype) !void {
const node = try transCreateNodeVarDecl(c, true, true, alias.alias);
node.eq_token = try appendToken(c, .Equal, "=");
node.init_node = try transCreateNodeIdentifier(c, alias.name);
node.semicolon_token = try appendToken(c, .Semicolon, ";");
const visib_tok = try appendToken(c, .Keyword_pub, "pub");
const mut_tok = try appendToken(c, .Keyword_const, "const");
const name_tok = try appendIdentifier(c, alias.alias);
const eq_token = try appendToken(c, .Equal, "=");
const init_node = try transCreateNodeIdentifier(c, alias.name);
const semicolon_token = try appendToken(c, .Semicolon, ";");
const node = try ast.Node.VarDecl.create(c.arena, .{
.name_token = name_tok,
.mut_token = mut_tok,
.semicolon_token = semicolon_token,
}, .{
.visib_token = visib_tok,
.eq_token = eq_token,
.init_node = init_node,
});
return addTopLevelDecl(c, alias.alias, &node.base);
}
@ -1461,13 +1525,17 @@ fn transDeclStmtOne(
@ptrCast(*const ZigClangNamedDecl, var_decl),
));
const mangled_name = try block_scope.makeMangledName(c, name);
const node = try transCreateNodeVarDecl(c, false, ZigClangQualType_isConstQualified(qual_type), mangled_name);
const mut_tok = if (ZigClangQualType_isConstQualified(qual_type))
try appendToken(c, .Keyword_const, "const")
else
try appendToken(c, .Keyword_var, "var");
const name_tok = try appendIdentifier(c, mangled_name);
_ = try appendToken(c, .Colon, ":");
const loc = ZigClangDecl_getLocation(decl);
node.type_node = try transQualType(rp, qual_type, loc);
const type_node = try transQualType(rp, qual_type, loc);
node.eq_token = try appendToken(c, .Equal, "=");
const eq_token = try appendToken(c, .Equal, "=");
var init_node = if (ZigClangVarDecl_getInit(var_decl)) |expr|
try transExprCoercing(rp, scope, expr, .used, .r_value)
else
@ -1478,8 +1546,17 @@ fn transDeclStmtOne(
builtin_node.rparen_token = try appendToken(rp.c, .RParen, ")");
init_node = &builtin_node.base;
}
node.init_node = init_node;
node.semicolon_token = try appendToken(c, .Semicolon, ";");
const semicolon_token = try appendToken(c, .Semicolon, ";");
const node = try ast.Node.VarDecl.create(c.arena, .{
.name_token = name_tok,
.mut_token = mut_tok,
.semicolon_token = semicolon_token,
}, .{
.thread_local_token = thread_local_token,
.eq_token = eq_token,
.type_node = type_node,
.init_node = init_node,
});
return &node.base;
},
.Typedef => {
@ -1494,7 +1571,7 @@ fn transDeclStmtOne(
const mangled_name = try block_scope.makeMangledName(c, name);
const node = (try transCreateNodeTypedef(rp, typedef_decl, false, mangled_name)) orelse
return error.UnsupportedTranslation;
return &node.base;
return node;
},
else => |kind| return revertAndWarn(
rp,
@ -3105,12 +3182,21 @@ fn transCreatePreCrement(
defer block_scope.deinit();
const ref = try block_scope.makeMangledName(rp.c, "ref");
const node = try transCreateNodeVarDecl(rp.c, false, true, ref);
node.eq_token = try appendToken(rp.c, .Equal, "=");
const mut_tok = try appendToken(rp.c, .Keyword_const, "const");
const name_tok = try appendIdentifier(rp.c, ref);
const eq_token = try appendToken(rp.c, .Equal, "=");
const rhs_node = try transCreateNodeSimplePrefixOp(rp.c, .AddressOf, .Ampersand, "&");
rhs_node.rhs = try transExpr(rp, scope, op_expr, .used, .r_value);
node.init_node = &rhs_node.base;
node.semicolon_token = try appendToken(rp.c, .Semicolon, ";");
const init_node = &rhs_node.base;
const semicolon_token = try appendToken(rp.c, .Semicolon, ";");
const node = try ast.Node.VarDecl.create(rp.c.arena, .{
.name_token = name_tok,
.mut_token = mut_tok,
.semicolon_token = semicolon_token,
}, .{
.eq_token = eq_token,
.init_node = init_node,
});
try block_scope.statements.append(&node.base);
const lhs_node = try transCreateNodeIdentifier(rp.c, ref);
@ -3171,12 +3257,21 @@ fn transCreatePostCrement(
defer block_scope.deinit();
const ref = try block_scope.makeMangledName(rp.c, "ref");
const node = try transCreateNodeVarDecl(rp.c, false, true, ref);
node.eq_token = try appendToken(rp.c, .Equal, "=");
const mut_tok = try appendToken(rp.c, .Keyword_const, "const");
const name_tok = try appendIdentifier(rp.c, ref);
const eq_token = try appendToken(rp.c, .Equal, "=");
const rhs_node = try transCreateNodeSimplePrefixOp(rp.c, .AddressOf, .Ampersand, "&");
rhs_node.rhs = try transExpr(rp, scope, op_expr, .used, .r_value);
node.init_node = &rhs_node.base;
node.semicolon_token = try appendToken(rp.c, .Semicolon, ";");
const init_node = &rhs_node.base;
const semicolon_token = try appendToken(rp.c, .Semicolon, ";");
const node = try ast.Node.VarDecl.create(rp.c.arena, .{
.name_token = name_tok,
.mut_token = mut_tok,
.semicolon_token = semicolon_token,
}, .{
.eq_token = eq_token,
.init_node = init_node,
});
try block_scope.statements.append(&node.base);
const lhs_node = try transCreateNodeIdentifier(rp.c, ref);
@ -3184,10 +3279,19 @@ fn transCreatePostCrement(
_ = try appendToken(rp.c, .Semicolon, ";");
const tmp = try block_scope.makeMangledName(rp.c, "tmp");
const tmp_node = try transCreateNodeVarDecl(rp.c, false, true, tmp);
tmp_node.eq_token = try appendToken(rp.c, .Equal, "=");
tmp_node.init_node = ref_node;
tmp_node.semicolon_token = try appendToken(rp.c, .Semicolon, ";");
const tmp_mut_tok = try appendToken(rp.c, .Keyword_const, "const");
const tmp_name_tok = try appendIdentifier(rp.c, tmp);
const tmp_eq_token = try appendToken(rp.c, .Equal, "=");
const tmp_init_node = ref_node;
const tmp_semicolon_token = try appendToken(rp.c, .Semicolon, ";");
const tmp_node = try ast.Node.VarDecl.create(rp.c.arena, .{
.name_token = tmp_name_tok,
.mut_token = tmp_mut_tok,
.semicolon_token = semicolon_token,
}, .{
.eq_token = tmp_eq_token,
.init_node = tmp_init_node,
});
try block_scope.statements.append(&tmp_node.base);
const token = try appendToken(rp.c, op_tok_id, bytes);
@ -3325,12 +3429,21 @@ fn transCreateCompoundAssign(
defer block_scope.deinit();
const ref = try block_scope.makeMangledName(rp.c, "ref");
const node = try transCreateNodeVarDecl(rp.c, false, true, ref);
node.eq_token = try appendToken(rp.c, .Equal, "=");
const mut_tok = try appendToken(rp.c, .Keyword_const, "const");
const name_tok = try appendIdentifier(rp.c, ref);
const eq_token = try appendToken(rp.c, .Equal, "=");
const addr_node = try transCreateNodeSimplePrefixOp(rp.c, .AddressOf, .Ampersand, "&");
addr_node.rhs = try transExpr(rp, scope, lhs, .used, .l_value);
node.init_node = &addr_node.base;
node.semicolon_token = try appendToken(rp.c, .Semicolon, ";");
const init_node = &addr_node.base;
const semicolon_token = try appendToken(rp.c, .Semicolon, ";");
const node = try ast.Node.VarDecl.create(rp.c.arena, .{
.name_token = name_tok,
.mut_token = mut_tok,
.semicolon_token = semicolon_token,
}, .{
.eq_token = eq_token,
.init_node = init_node,
});
try block_scope.statements.append(&node.base);
const lhs_node = try transCreateNodeIdentifier(rp.c, ref);
@ -3375,8 +3488,8 @@ fn transCreateCompoundAssign(
const rhs_bin = try transCreateNodeInfixOp(rp, scope, ref_node, bin_op, bin_token, rhs_node, .used, false);
_ = try appendToken(rp.c, .Semicolon, ";");
const eq_token = try appendToken(rp.c, .Equal, "=");
const assign = try transCreateNodeInfixOp(rp, scope, ref_node, .Assign, eq_token, rhs_bin, .used, false);
const ass_eq_token = try appendToken(rp.c, .Equal, "=");
const assign = try transCreateNodeInfixOp(rp, scope, ref_node, .Assign, ass_eq_token, rhs_bin, .used, false);
try block_scope.statements.append(assign);
}
@ -3494,10 +3607,19 @@ fn transBinaryConditionalOperator(rp: RestorePoint, scope: *Scope, stmt: *const
defer block_scope.deinit();
const mangled_name = try block_scope.makeMangledName(rp.c, "cond_temp");
const tmp_var = try transCreateNodeVarDecl(rp.c, false, true, mangled_name);
tmp_var.eq_token = try appendToken(rp.c, .Equal, "=");
tmp_var.init_node = try transExpr(rp, &block_scope.base, cond_expr, .used, .r_value);
tmp_var.semicolon_token = try appendToken(rp.c, .Semicolon, ";");
const mut_tok = try appendToken(rp.c, .Keyword_const, "const");
const name_tok = try appendIdentifier(rp.c, mangled_name);
const eq_token = try appendToken(rp.c, .Equal, "=");
const init_node = try transExpr(rp, &block_scope.base, cond_expr, .used, .r_value);
const semicolon_token = try appendToken(rp.c, .Semicolon, ";");
const tmp_var = try ast.Node.VarDecl.create(rp.c.arena, .{
.name_token = name_tok,
.mut_token = mut_tok,
.semicolon_token = semicolon_token,
}, .{
.eq_token = eq_token,
.init_node = init_node,
});
try block_scope.statements.append(&tmp_var.base);
const break_node = try transCreateNodeBreakToken(rp.c, block_scope.label);
@ -3932,9 +4054,9 @@ fn transCreateNodeAssign(
defer block_scope.deinit();
const tmp = try block_scope.makeMangledName(rp.c, "tmp");
const node = try transCreateNodeVarDecl(rp.c, false, true, tmp);
node.eq_token = try appendToken(rp.c, .Equal, "=");
const mut_tok = try appendToken(rp.c, .Keyword_const, "const");
const name_tok = try appendIdentifier(rp.c, tmp);
const eq_token = try appendToken(rp.c, .Equal, "=");
var rhs_node = try transExpr(rp, &block_scope.base, rhs, .used, .r_value);
if (!exprIsBooleanType(lhs) and isBoolRes(rhs_node)) {
const builtin_node = try rp.c.createBuiltinCall("@boolToInt", 1);
@ -3942,16 +4064,24 @@ fn transCreateNodeAssign(
builtin_node.rparen_token = try appendToken(rp.c, .RParen, ")");
rhs_node = &builtin_node.base;
}
node.init_node = rhs_node;
node.semicolon_token = try appendToken(rp.c, .Semicolon, ";");
const init_node = rhs_node;
const semicolon_token = try appendToken(rp.c, .Semicolon, ";");
const node = try ast.Node.VarDecl.create(rp.c.arena, .{
.name_token = name_tok,
.mut_token = mut_tok,
.semicolon_token = semicolon_token,
}, .{
.eq_token = eq_token,
.init_node = init_node,
});
try block_scope.statements.append(&node.base);
const lhs_node = try transExpr(rp, &block_scope.base, lhs, .used, .l_value);
const eq_token = try appendToken(rp.c, .Equal, "=");
const lhs_eq_token = try appendToken(rp.c, .Equal, "=");
const ident = try transCreateNodeIdentifier(rp.c, tmp);
_ = try appendToken(rp.c, .Semicolon, ";");
const assign = try transCreateNodeInfixOp(rp, &block_scope.base, lhs_node, .Assign, eq_token, ident, .used, false);
const assign = try transCreateNodeInfixOp(rp, &block_scope.base, lhs_node, .Assign, lhs_eq_token, ident, .used, false);
try block_scope.statements.append(assign);
const break_node = try transCreateNodeBreak(rp.c, label_name);
@ -4232,28 +4362,10 @@ fn transCreateNodeMacroFn(c: *Context, name: []const u8, ref: *ast.Node, proto_a
_ = try appendToken(c, .RParen, ")");
const fn_proto = try ast.Node.FnProto.alloc(c.arena, fn_params.items.len);
fn_proto.* = .{
.doc_comments = null,
.visib_token = pub_tok,
.fn_token = fn_tok,
.name_token = name_tok,
.params_len = fn_params.items.len,
.return_type = proto_alias.return_type,
.var_args_token = null,
.extern_export_inline_token = inline_tok,
.body_node = null,
.lib_name = null,
.align_expr = null,
.section_expr = null,
.callconv_expr = null,
};
mem.copy(ast.Node.FnProto.ParamDecl, fn_proto.params(), fn_params.items);
const block_lbrace = try appendToken(c, .LBrace, "{");
const return_expr = try transCreateNodeReturnExpr(c);
const unwrap_expr = try transCreateNodeUnwrapNull(c, ref.cast(ast.Node.VarDecl).?.init_node.?);
const unwrap_expr = try transCreateNodeUnwrapNull(c, ref.cast(ast.Node.VarDecl).?.getTrailer("init_node").?);
const call_expr = try c.createCall(unwrap_expr, fn_params.items.len);
const call_params = call_expr.params();
@ -4277,7 +4389,18 @@ fn transCreateNodeMacroFn(c: *Context, name: []const u8, ref: *ast.Node, proto_a
.rbrace = try appendToken(c, .RBrace, "}"),
};
block.statements()[0] = &return_expr.base;
fn_proto.body_node = &block.base;
const fn_proto = try ast.Node.FnProto.create(c.arena, .{
.params_len = fn_params.items.len,
.fn_token = fn_tok,
.return_type = proto_alias.return_type,
}, .{
.visib_token = pub_tok,
.name_token = name_tok,
.extern_export_inline_token = inline_tok,
.body_node = &block.base,
});
mem.copy(ast.Node.FnProto.ParamDecl, fn_proto.params(), fn_params.items);
return &fn_proto.base;
}
@ -4356,31 +4479,6 @@ fn transCreateNodeBreak(c: *Context, label: ?[]const u8) !*ast.Node.ControlFlowE
return node;
}
fn transCreateNodeVarDecl(c: *Context, is_pub: bool, is_const: bool, name: []const u8) !*ast.Node.VarDecl {
const visib_tok = if (is_pub) try appendToken(c, .Keyword_pub, "pub") else null;
const mut_tok = if (is_const) try appendToken(c, .Keyword_const, "const") else try appendToken(c, .Keyword_var, "var");
const name_tok = try appendIdentifier(c, name);
const node = try c.arena.create(ast.Node.VarDecl);
node.* = .{
.doc_comments = null,
.visib_token = visib_tok,
.thread_local_token = null,
.name_token = name_tok,
.eq_token = undefined,
.mut_token = mut_tok,
.comptime_token = null,
.extern_export_token = null,
.lib_name = null,
.type_node = null,
.align_node = null,
.section_node = null,
.init_node = null,
.semicolon_token = undefined,
};
return node;
}
fn transCreateNodeWhile(c: *Context) !*ast.Node.While {
const while_tok = try appendToken(c, .Keyword_while, "while");
_ = try appendToken(c, .LParen, "(");
@ -4782,19 +4880,12 @@ fn finishTransFnProto(
}
}
if (is_var_args) {
const var_args_token: ?ast.TokenIndex = if (is_var_args) blk: {
if (param_count > 0) {
_ = try appendToken(rp.c, .Comma, ",");
}
fn_params.addOneAssumeCapacity().* = .{
.doc_comments = null,
.comptime_token = null,
.noalias_token = null,
.name_token = null,
.param_type = .{ .var_args = try appendToken(rp.c, .Ellipsis3, "...") },
};
}
break :blk try appendToken(rp.c, .Ellipsis3, "...");
} else null;
const rparen_tok = try appendToken(rp.c, .RParen, ")");
@ -4860,22 +4951,31 @@ fn finishTransFnProto(
}
};
const fn_proto = try ast.Node.FnProto.alloc(rp.c.arena, fn_params.items.len);
fn_proto.* = .{
.doc_comments = null,
.visib_token = pub_tok,
.fn_token = fn_tok,
.name_token = name_tok,
// We need to reserve an undefined (but non-null) body node to set later.
var body_node: ?*ast.Node = null;
if (fn_decl_context) |ctx| {
if (ctx.has_body) {
// TODO: we should be able to use undefined here but
// it causes a bug. This is undefined without zig language
// being aware of it.
body_node = @intToPtr(*ast.Node, 0x08);
}
}
const fn_proto = try ast.Node.FnProto.create(rp.c.arena, .{
.params_len = fn_params.items.len,
.return_type = .{ .Explicit = return_type_node },
.var_args_token = null, // TODO this field is broken in the AST data model
.fn_token = fn_tok,
}, .{
.visib_token = pub_tok,
.name_token = name_tok,
.extern_export_inline_token = extern_export_inline_tok,
.body_node = null,
.lib_name = null,
.align_expr = align_expr,
.section_expr = linksection_expr,
.callconv_expr = callconv_expr,
};
.body_node = body_node,
.var_args_token = var_args_token,
});
mem.copy(ast.Node.FnProto.ParamDecl, fn_proto.params(), fn_params.items);
return fn_proto;
}
@ -4923,23 +5023,15 @@ pub fn failDecl(c: *Context, loc: ZigClangSourceLocation, name: []const u8, comp
};
call_node.params()[0] = &msg_node.base;
const var_decl_node = try c.arena.create(ast.Node.VarDecl);
var_decl_node.* = .{
.doc_comments = null,
.visib_token = pub_tok,
.thread_local_token = null,
const var_decl_node = try ast.Node.VarDecl.create(c.arena, .{
.name_token = name_tok,
.eq_token = eq_tok,
.mut_token = const_tok,
.comptime_token = null,
.extern_export_token = null,
.lib_name = null,
.type_node = null,
.align_node = null,
.section_node = null,
.init_node = &call_node.base,
.semicolon_token = semi_tok,
};
}, .{
.visib_token = pub_tok,
.eq_token = eq_tok,
.init_node = &call_node.base,
});
try addTopLevelDecl(c, name, &var_decl_node.base);
}
@ -5132,10 +5224,12 @@ fn transPreprocessorEntities(c: *Context, unit: *ZigClangASTUnit) Error!void {
fn transMacroDefine(c: *Context, it: *CTokenList.Iterator, source: []const u8, name: []const u8, source_loc: ZigClangSourceLocation) ParseError!void {
const scope = &c.global_scope.base;
const node = try transCreateNodeVarDecl(c, true, true, name);
node.eq_token = try appendToken(c, .Equal, "=");
const visib_tok = try appendToken(c, .Keyword_pub, "pub");
const mut_tok = try appendToken(c, .Keyword_const, "const");
const name_tok = try appendIdentifier(c, name);
const eq_token = try appendToken(c, .Equal, "=");
node.init_node = try parseCExpr(c, it, source, source_loc, scope);
const init_node = try parseCExpr(c, it, source, source_loc, scope);
const last = it.next().?;
if (last.id != .Eof and last.id != .Nl)
return failDecl(
@ -5146,7 +5240,16 @@ fn transMacroDefine(c: *Context, it: *CTokenList.Iterator, source: []const u8, n
.{@tagName(last.id)},
);
node.semicolon_token = try appendToken(c, .Semicolon, ";");
const semicolon_token = try appendToken(c, .Semicolon, ";");
const node = try ast.Node.VarDecl.create(c.arena, .{
.name_token = name_tok,
.mut_token = mut_tok,
.semicolon_token = semicolon_token,
}, .{
.visib_token = visib_tok,
.eq_token = eq_token,
.init_node = init_node,
});
_ = try c.global_scope.macro_table.put(name, &node.base);
}
@ -5223,24 +5326,6 @@ fn transMacroFnDefine(c: *Context, it: *CTokenList.Iterator, source: []const u8,
const type_of = try c.createBuiltinCall("@TypeOf", 1);
const fn_proto = try ast.Node.FnProto.alloc(c.arena, fn_params.items.len);
fn_proto.* = .{
.visib_token = pub_tok,
.extern_export_inline_token = inline_tok,
.fn_token = fn_tok,
.name_token = name_tok,
.params_len = fn_params.items.len,
.return_type = .{ .Explicit = &type_of.base },
.doc_comments = null,
.var_args_token = null,
.body_node = null,
.lib_name = null,
.align_expr = null,
.section_expr = null,
.callconv_expr = null,
};
mem.copy(ast.Node.FnProto.ParamDecl, fn_proto.params(), fn_params.items);
const return_expr = try transCreateNodeReturnExpr(c);
const expr = try parseCExpr(c, it, source, source_loc, scope);
const last = it.next().?;
@ -5266,7 +5351,18 @@ fn transMacroFnDefine(c: *Context, it: *CTokenList.Iterator, source: []const u8,
try block_scope.statements.append(&return_expr.base);
const block_node = try block_scope.complete(c);
fn_proto.body_node = &block_node.base;
const fn_proto = try ast.Node.FnProto.create(c.arena, .{
.fn_token = fn_tok,
.params_len = fn_params.items.len,
.return_type = .{ .Explicit = &type_of.base },
}, .{
.visib_token = pub_tok,
.extern_export_inline_token = inline_tok,
.name_token = name_tok,
.body_node = &block_node.base,
});
mem.copy(ast.Node.FnProto.ParamDecl, fn_proto.params(), fn_params.items);
_ = try c.global_scope.macro_table.put(name, &fn_proto.base);
}
@ -6030,7 +6126,7 @@ fn getContainer(c: *Context, node: *ast.Node) ?*ast.Node {
const ident = node.cast(ast.Node.Identifier).?;
if (c.global_scope.sym_table.get(tokenSlice(c, ident.token))) |value| {
if (value.cast(ast.Node.VarDecl)) |var_decl|
return getContainer(c, var_decl.init_node.?);
return getContainer(c, var_decl.getTrailer("init_node").?);
}
},
@ -6060,7 +6156,7 @@ fn getContainerTypeOf(c: *Context, ref: *ast.Node) ?*ast.Node {
if (ref.cast(ast.Node.Identifier)) |ident| {
if (c.global_scope.sym_table.get(tokenSlice(c, ident.token))) |value| {
if (value.cast(ast.Node.VarDecl)) |var_decl| {
if (var_decl.type_node) |ty|
if (var_decl.getTrailer("type_node")) |ty|
return getContainer(c, ty);
}
}
@ -6084,7 +6180,7 @@ fn getContainerTypeOf(c: *Context, ref: *ast.Node) ?*ast.Node {
}
fn getFnProto(c: *Context, ref: *ast.Node) ?*ast.Node.FnProto {
const init = if (ref.cast(ast.Node.VarDecl)) |v| v.init_node.? else return null;
const init = if (ref.cast(ast.Node.VarDecl)) |v| v.getTrailer("init_node").? else return null;
if (getContainerTypeOf(c, init)) |ty_node| {
if (ty_node.cast(ast.Node.OptionalType)) |prefix| {
if (prefix.rhs.cast(ast.Node.FnProto)) |fn_proto| {

View File

@ -2797,7 +2797,7 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
\\pub fn a() callconv(.C) void {}
\\pub fn b() callconv(.C) void {}
\\pub export fn c() void {}
\\pub fn foo() callconv(.C) void {}
\\pub fn foo(...) callconv(.C) void {}
});
cases.add("casting away const and volatile",