mirror of
https://github.com/ziglang/zig.git
synced 2026-01-20 22:35:24 +00:00
Merge pull request #6184 from tadeokondrak/TrailerFlags-update
std.meta.TrailerFlags: use @Type to improve API
This commit is contained in:
commit
43ac5e5dac
@ -8,6 +8,7 @@ const meta = std.meta;
|
||||
const testing = std.testing;
|
||||
const mem = std.mem;
|
||||
const assert = std.debug.assert;
|
||||
const TypeInfo = std.builtin.TypeInfo;
|
||||
|
||||
/// This is useful for saving memory when allocating an object that has many
|
||||
/// optional components. The optional objects are allocated sequentially in
|
||||
@ -17,91 +18,125 @@ pub fn TrailerFlags(comptime Fields: type) type {
|
||||
return struct {
|
||||
bits: Int,
|
||||
|
||||
pub const Int = @Type(.{ .Int = .{ .bits = bit_count, .is_signed = false } });
|
||||
pub const Int = meta.Int(false, bit_count);
|
||||
pub const bit_count = @typeInfo(Fields).Struct.fields.len;
|
||||
|
||||
pub const FieldEnum = blk: {
|
||||
comptime var fields: [bit_count]TypeInfo.EnumField = undefined;
|
||||
inline for (@typeInfo(Fields).Struct.fields) |struct_field, i|
|
||||
fields[i] = .{ .name = struct_field.name, .value = i };
|
||||
break :blk @Type(.{
|
||||
.Enum = .{
|
||||
.layout = .Auto,
|
||||
.tag_type = std.math.IntFittingRange(0, bit_count - 1),
|
||||
.fields = &fields,
|
||||
.decls = &[_]TypeInfo.Declaration{},
|
||||
.is_exhaustive = true,
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
pub const InitStruct = blk: {
|
||||
comptime var fields: [bit_count]TypeInfo.StructField = undefined;
|
||||
inline for (@typeInfo(Fields).Struct.fields) |struct_field, i| {
|
||||
fields[i] = TypeInfo.StructField{
|
||||
.name = struct_field.name,
|
||||
.field_type = ?struct_field.field_type,
|
||||
.default_value = @as(
|
||||
??struct_field.field_type,
|
||||
@as(?struct_field.field_type, null),
|
||||
),
|
||||
};
|
||||
}
|
||||
break :blk @Type(.{
|
||||
.Struct = .{
|
||||
.layout = .Auto,
|
||||
.fields = &fields,
|
||||
.decls = &[_]TypeInfo.Declaration{},
|
||||
.is_tuple = false,
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
pub const Self = @This();
|
||||
|
||||
pub fn has(self: Self, comptime name: []const u8) bool {
|
||||
const field_index = meta.fieldIndex(Fields, name).?;
|
||||
pub fn has(self: Self, comptime field: FieldEnum) bool {
|
||||
const field_index = @enumToInt(field);
|
||||
return (self.bits & (1 << field_index)) != 0;
|
||||
}
|
||||
|
||||
pub fn get(self: Self, p: [*]align(@alignOf(Fields)) const u8, comptime name: []const u8) ?Field(name) {
|
||||
if (!self.has(name))
|
||||
pub fn get(self: Self, p: [*]align(@alignOf(Fields)) const u8, comptime field: FieldEnum) ?Field(field) {
|
||||
if (!self.has(field))
|
||||
return null;
|
||||
return self.ptrConst(p, name).*;
|
||||
return self.ptrConst(p, field).*;
|
||||
}
|
||||
|
||||
pub fn setFlag(self: *Self, comptime name: []const u8) void {
|
||||
const field_index = meta.fieldIndex(Fields, name).?;
|
||||
pub fn setFlag(self: *Self, comptime field: FieldEnum) void {
|
||||
const field_index = @enumToInt(field);
|
||||
self.bits |= 1 << field_index;
|
||||
}
|
||||
|
||||
/// `fields` is a struct with each field set to an optional value.
|
||||
/// Missing fields are assumed to be `null`.
|
||||
/// Only the non-null bits are observed and are used to set the flag bits.
|
||||
pub fn init(fields: anytype) Self {
|
||||
pub fn init(fields: InitStruct) Self {
|
||||
var self: Self = .{ .bits = 0 };
|
||||
inline for (@typeInfo(@TypeOf(fields)).Struct.fields) |field| {
|
||||
const opt: ?Field(field.name) = @field(fields, field.name);
|
||||
const field_index = meta.fieldIndex(Fields, field.name).?;
|
||||
self.bits |= @as(Int, @boolToInt(opt != null)) << field_index;
|
||||
inline for (@typeInfo(Fields).Struct.fields) |field, i| {
|
||||
if (@field(fields, field.name)) |_|
|
||||
self.bits |= 1 << i;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
/// `fields` is a struct with each field set to an optional value (same as `init`).
|
||||
/// Missing fields are assumed to be `null`.
|
||||
pub fn setMany(self: Self, p: [*]align(@alignOf(Fields)) u8, fields: anytype) void {
|
||||
inline for (@typeInfo(@TypeOf(fields)).Struct.fields) |field| {
|
||||
const opt: ?Field(field.name) = @field(fields, field.name);
|
||||
if (opt) |value| {
|
||||
self.set(p, field.name, value);
|
||||
}
|
||||
pub fn setMany(self: Self, p: [*]align(@alignOf(Fields)) u8, fields: InitStruct) void {
|
||||
inline for (@typeInfo(Fields).Struct.fields) |field, i| {
|
||||
if (@field(fields, field.name)) |value|
|
||||
self.set(p, @intToEnum(FieldEnum, i), value);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set(
|
||||
self: Self,
|
||||
p: [*]align(@alignOf(Fields)) u8,
|
||||
comptime name: []const u8,
|
||||
value: Field(name),
|
||||
comptime field: FieldEnum,
|
||||
value: Field(field),
|
||||
) void {
|
||||
self.ptr(p, name).* = value;
|
||||
self.ptr(p, field).* = value;
|
||||
}
|
||||
|
||||
pub fn ptr(self: Self, p: [*]align(@alignOf(Fields)) u8, comptime name: []const u8) *Field(name) {
|
||||
if (@sizeOf(Field(name)) == 0)
|
||||
pub fn ptr(self: Self, p: [*]align(@alignOf(Fields)) u8, comptime field: FieldEnum) *Field(field) {
|
||||
if (@sizeOf(Field(field)) == 0)
|
||||
return undefined;
|
||||
const off = self.offset(p, name);
|
||||
return @ptrCast(*Field(name), @alignCast(@alignOf(Field(name)), p + off));
|
||||
const off = self.offset(p, field);
|
||||
return @ptrCast(*Field(field), @alignCast(@alignOf(Field(field)), p + off));
|
||||
}
|
||||
|
||||
pub fn ptrConst(self: Self, p: [*]align(@alignOf(Fields)) const u8, comptime name: []const u8) *const Field(name) {
|
||||
if (@sizeOf(Field(name)) == 0)
|
||||
pub fn ptrConst(self: Self, p: [*]align(@alignOf(Fields)) const u8, comptime field: FieldEnum) *const Field(field) {
|
||||
if (@sizeOf(Field(field)) == 0)
|
||||
return undefined;
|
||||
const off = self.offset(p, name);
|
||||
return @ptrCast(*const Field(name), @alignCast(@alignOf(Field(name)), p + off));
|
||||
const off = self.offset(p, field);
|
||||
return @ptrCast(*const Field(field), @alignCast(@alignOf(Field(field)), p + off));
|
||||
}
|
||||
|
||||
pub fn offset(self: Self, p: [*]align(@alignOf(Fields)) const u8, comptime name: []const u8) usize {
|
||||
pub fn offset(self: Self, p: [*]align(@alignOf(Fields)) const u8, comptime field: FieldEnum) usize {
|
||||
var off: usize = 0;
|
||||
inline for (@typeInfo(Fields).Struct.fields) |field, i| {
|
||||
inline for (@typeInfo(Fields).Struct.fields) |field_info, i| {
|
||||
const active = (self.bits & (1 << i)) != 0;
|
||||
if (comptime mem.eql(u8, field.name, name)) {
|
||||
if (i == @enumToInt(field)) {
|
||||
assert(active);
|
||||
return mem.alignForwardGeneric(usize, off, @alignOf(field.field_type));
|
||||
return mem.alignForwardGeneric(usize, off, @alignOf(field_info.field_type));
|
||||
} else if (active) {
|
||||
off = mem.alignForwardGeneric(usize, off, @alignOf(field.field_type));
|
||||
off += @sizeOf(field.field_type);
|
||||
off = mem.alignForwardGeneric(usize, off, @alignOf(field_info.field_type));
|
||||
off += @sizeOf(field_info.field_type);
|
||||
}
|
||||
}
|
||||
@compileError("no field named " ++ name ++ " in type " ++ @typeName(Fields));
|
||||
}
|
||||
|
||||
pub fn Field(comptime name: []const u8) type {
|
||||
return meta.fieldInfo(Fields, name).field_type;
|
||||
pub fn Field(comptime field: FieldEnum) type {
|
||||
inline for (@typeInfo(Fields).Struct.fields) |field_info, i| {
|
||||
if (i == @enumToInt(field))
|
||||
return field_info.field_type;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn sizeInBytes(self: Self) usize {
|
||||
@ -125,6 +160,8 @@ test "TrailerFlags" {
|
||||
b: bool,
|
||||
c: u64,
|
||||
});
|
||||
testing.expectEqual(u2, @TagType(Flags.FieldEnum));
|
||||
|
||||
var flags = Flags.init(.{
|
||||
.b = true,
|
||||
.c = 1234,
|
||||
@ -132,19 +169,19 @@ test "TrailerFlags" {
|
||||
const slice = try testing.allocator.allocAdvanced(u8, 8, flags.sizeInBytes(), .exact);
|
||||
defer testing.allocator.free(slice);
|
||||
|
||||
flags.set(slice.ptr, "b", false);
|
||||
flags.set(slice.ptr, "c", 12345678);
|
||||
flags.set(slice.ptr, .b, false);
|
||||
flags.set(slice.ptr, .c, 12345678);
|
||||
|
||||
testing.expect(flags.get(slice.ptr, "a") == null);
|
||||
testing.expect(!flags.get(slice.ptr, "b").?);
|
||||
testing.expect(flags.get(slice.ptr, "c").? == 12345678);
|
||||
testing.expect(flags.get(slice.ptr, .a) == null);
|
||||
testing.expect(!flags.get(slice.ptr, .b).?);
|
||||
testing.expect(flags.get(slice.ptr, .c).? == 12345678);
|
||||
|
||||
flags.setMany(slice.ptr, .{
|
||||
.b = true,
|
||||
.c = 5678,
|
||||
});
|
||||
|
||||
testing.expect(flags.get(slice.ptr, "a") == null);
|
||||
testing.expect(flags.get(slice.ptr, "b").?);
|
||||
testing.expect(flags.get(slice.ptr, "c").? == 5678);
|
||||
testing.expect(flags.get(slice.ptr, .a) == null);
|
||||
testing.expect(flags.get(slice.ptr, .b).?);
|
||||
testing.expect(flags.get(slice.ptr, .c).? == 5678);
|
||||
}
|
||||
|
||||
@ -915,23 +915,111 @@ pub const Node = struct {
|
||||
init_node: *Node,
|
||||
});
|
||||
|
||||
pub fn getDocComments(self: *const VarDecl) ?*DocComment {
|
||||
return self.getTrailer(.doc_comments);
|
||||
}
|
||||
|
||||
pub fn setDocComments(self: *VarDecl, value: *DocComment) void {
|
||||
self.setTrailer(.doc_comments, value);
|
||||
}
|
||||
|
||||
pub fn getVisibToken(self: *const VarDecl) ?TokenIndex {
|
||||
return self.getTrailer(.visib_token);
|
||||
}
|
||||
|
||||
pub fn setVisibToken(self: *VarDecl, value: TokenIndex) void {
|
||||
self.setTrailer(.visib_token, value);
|
||||
}
|
||||
|
||||
pub fn getThreadLocalToken(self: *const VarDecl) ?TokenIndex {
|
||||
return self.getTrailer(.thread_local_token);
|
||||
}
|
||||
|
||||
pub fn setThreadLocalToken(self: *VarDecl, value: TokenIndex) void {
|
||||
self.setTrailer(.thread_local_token, value);
|
||||
}
|
||||
|
||||
pub fn getEqToken(self: *const VarDecl) ?TokenIndex {
|
||||
return self.getTrailer(.eq_token);
|
||||
}
|
||||
|
||||
pub fn setEqToken(self: *VarDecl, value: TokenIndex) void {
|
||||
self.setTrailer(.eq_token, value);
|
||||
}
|
||||
|
||||
pub fn getComptimeToken(self: *const VarDecl) ?TokenIndex {
|
||||
return self.getTrailer(.comptime_token);
|
||||
}
|
||||
|
||||
pub fn setComptimeToken(self: *VarDecl, value: TokenIndex) void {
|
||||
self.setTrailer(.comptime_token, value);
|
||||
}
|
||||
|
||||
pub fn getExternExportToken(self: *const VarDecl) ?TokenIndex {
|
||||
return self.getTrailer(.extern_export_token);
|
||||
}
|
||||
|
||||
pub fn setExternExportToken(self: *VarDecl, value: TokenIndex) void {
|
||||
self.setTrailer(.extern_export_token, value);
|
||||
}
|
||||
|
||||
pub fn getLibName(self: *const VarDecl) ?*Node {
|
||||
return self.getTrailer(.lib_name);
|
||||
}
|
||||
|
||||
pub fn setLibName(self: *VarDecl, value: *Node) void {
|
||||
self.setTrailer(.lib_name, value);
|
||||
}
|
||||
|
||||
pub fn getTypeNode(self: *const VarDecl) ?*Node {
|
||||
return self.getTrailer(.type_node);
|
||||
}
|
||||
|
||||
pub fn setTypeNode(self: *VarDecl, value: *Node) void {
|
||||
self.setTrailer(.type_node, value);
|
||||
}
|
||||
|
||||
pub fn getAlignNode(self: *const VarDecl) ?*Node {
|
||||
return self.getTrailer(.align_node);
|
||||
}
|
||||
|
||||
pub fn setAlignNode(self: *VarDecl, value: *Node) void {
|
||||
self.setTrailer(.align_node, value);
|
||||
}
|
||||
|
||||
pub fn getSectionNode(self: *const VarDecl) ?*Node {
|
||||
return self.getTrailer(.section_node);
|
||||
}
|
||||
|
||||
pub fn setSectionNode(self: *VarDecl, value: *Node) void {
|
||||
self.setTrailer(.section_node, value);
|
||||
}
|
||||
|
||||
pub fn getInitNode(self: *const VarDecl) ?*Node {
|
||||
return self.getTrailer(.init_node);
|
||||
}
|
||||
|
||||
pub fn setInitNode(self: *VarDecl, value: *Node) void {
|
||||
self.setTrailer(.init_node, value);
|
||||
}
|
||||
|
||||
pub const RequiredFields = struct {
|
||||
mut_token: TokenIndex,
|
||||
name_token: TokenIndex,
|
||||
semicolon_token: TokenIndex,
|
||||
};
|
||||
|
||||
pub fn getTrailer(self: *const VarDecl, comptime name: []const u8) ?TrailerFlags.Field(name) {
|
||||
fn getTrailer(self: *const VarDecl, comptime field: TrailerFlags.FieldEnum) ?TrailerFlags.Field(field) {
|
||||
const trailers_start = @ptrCast([*]const u8, self) + @sizeOf(VarDecl);
|
||||
return self.trailer_flags.get(trailers_start, name);
|
||||
return self.trailer_flags.get(trailers_start, field);
|
||||
}
|
||||
|
||||
pub fn setTrailer(self: *VarDecl, comptime name: []const u8, value: TrailerFlags.Field(name)) void {
|
||||
fn setTrailer(self: *VarDecl, comptime field: TrailerFlags.FieldEnum, value: TrailerFlags.Field(field)) void {
|
||||
const trailers_start = @ptrCast([*]u8, self) + @sizeOf(VarDecl);
|
||||
self.trailer_flags.set(trailers_start, name, value);
|
||||
self.trailer_flags.set(trailers_start, field, value);
|
||||
}
|
||||
|
||||
pub fn create(allocator: *mem.Allocator, required: RequiredFields, trailers: anytype) !*VarDecl {
|
||||
pub fn create(allocator: *mem.Allocator, required: RequiredFields, trailers: TrailerFlags.InitStruct) !*VarDecl {
|
||||
const trailer_flags = TrailerFlags.init(trailers);
|
||||
const bytes = try allocator.alignedAlloc(u8, @alignOf(VarDecl), sizeInBytes(trailer_flags));
|
||||
const var_decl = @ptrCast(*VarDecl, bytes.ptr);
|
||||
@ -954,22 +1042,22 @@ pub const Node = struct {
|
||||
pub fn iterate(self: *const VarDecl, index: usize) ?*Node {
|
||||
var i = index;
|
||||
|
||||
if (self.getTrailer("type_node")) |type_node| {
|
||||
if (self.getTypeNode()) |type_node| {
|
||||
if (i < 1) return type_node;
|
||||
i -= 1;
|
||||
}
|
||||
|
||||
if (self.getTrailer("align_node")) |align_node| {
|
||||
if (self.getAlignNode()) |align_node| {
|
||||
if (i < 1) return align_node;
|
||||
i -= 1;
|
||||
}
|
||||
|
||||
if (self.getTrailer("section_node")) |section_node| {
|
||||
if (self.getSectionNode()) |section_node| {
|
||||
if (i < 1) return section_node;
|
||||
i -= 1;
|
||||
}
|
||||
|
||||
if (self.getTrailer("init_node")) |init_node| {
|
||||
if (self.getInitNode()) |init_node| {
|
||||
if (i < 1) return init_node;
|
||||
i -= 1;
|
||||
}
|
||||
@ -978,11 +1066,11 @@ pub const Node = struct {
|
||||
}
|
||||
|
||||
pub fn firstToken(self: *const VarDecl) TokenIndex {
|
||||
if (self.getTrailer("visib_token")) |visib_token| return visib_token;
|
||||
if (self.getTrailer("thread_local_token")) |thread_local_token| return thread_local_token;
|
||||
if (self.getTrailer("comptime_token")) |comptime_token| return comptime_token;
|
||||
if (self.getTrailer("extern_export_token")) |extern_export_token| return extern_export_token;
|
||||
assert(self.getTrailer("lib_name") == null);
|
||||
if (self.getVisibToken()) |visib_token| return visib_token;
|
||||
if (self.getThreadLocalToken()) |thread_local_token| return thread_local_token;
|
||||
if (self.getComptimeToken()) |comptime_token| return comptime_token;
|
||||
if (self.getExternExportToken()) |extern_export_token| return extern_export_token;
|
||||
assert(self.getLibName() == null);
|
||||
return self.mut_token;
|
||||
}
|
||||
|
||||
@ -1320,34 +1408,126 @@ pub const Node = struct {
|
||||
std.debug.print("{*} flags: {b} name_token: {} {*} params_len: {}\n", .{
|
||||
self,
|
||||
self.trailer_flags.bits,
|
||||
self.getTrailer("name_token"),
|
||||
self.trailer_flags.ptrConst(trailers_start, "name_token"),
|
||||
self.getNameToken(),
|
||||
self.trailer_flags.ptrConst(trailers_start, .name_token),
|
||||
self.params_len,
|
||||
});
|
||||
}
|
||||
|
||||
pub fn body(self: *const FnProto) ?*Node {
|
||||
return self.getTrailer("body_node");
|
||||
pub fn getDocComments(self: *const FnProto) ?*DocComment {
|
||||
return self.getTrailer(.doc_comments);
|
||||
}
|
||||
|
||||
pub fn getTrailer(self: *const FnProto, comptime name: []const u8) ?TrailerFlags.Field(name) {
|
||||
pub fn setDocComments(self: *FnProto, value: *DocComment) void {
|
||||
self.setTrailer(.doc_comments, value);
|
||||
}
|
||||
|
||||
pub fn getBodyNode(self: *const FnProto) ?*Node {
|
||||
return self.getTrailer(.body_node);
|
||||
}
|
||||
|
||||
pub fn setBodyNode(self: *FnProto, value: *Node) void {
|
||||
self.setTrailer(.body_node, value);
|
||||
}
|
||||
|
||||
pub fn getLibName(self: *const FnProto) ?*Node {
|
||||
return self.getTrailer(.lib_name);
|
||||
}
|
||||
|
||||
pub fn setLibName(self: *FnProto, value: *Node) void {
|
||||
self.setTrailer(.lib_name, value);
|
||||
}
|
||||
|
||||
pub fn getAlignExpr(self: *const FnProto) ?*Node {
|
||||
return self.getTrailer(.align_expr);
|
||||
}
|
||||
|
||||
pub fn setAlignExpr(self: *FnProto, value: *Node) void {
|
||||
self.setTrailer(.align_expr, value);
|
||||
}
|
||||
|
||||
pub fn getSectionExpr(self: *const FnProto) ?*Node {
|
||||
return self.getTrailer(.section_expr);
|
||||
}
|
||||
|
||||
pub fn setSectionExpr(self: *FnProto, value: *Node) void {
|
||||
self.setTrailer(.section_expr, value);
|
||||
}
|
||||
|
||||
pub fn getCallconvExpr(self: *const FnProto) ?*Node {
|
||||
return self.getTrailer(.callconv_expr);
|
||||
}
|
||||
|
||||
pub fn setCallconvExpr(self: *FnProto, value: *Node) void {
|
||||
self.setTrailer(.callconv_expr, value);
|
||||
}
|
||||
|
||||
pub fn getVisibToken(self: *const FnProto) ?TokenIndex {
|
||||
return self.getTrailer(.visib_token);
|
||||
}
|
||||
|
||||
pub fn setVisibToken(self: *FnProto, value: TokenIndex) void {
|
||||
self.setTrailer(.visib_token, value);
|
||||
}
|
||||
|
||||
pub fn getNameToken(self: *const FnProto) ?TokenIndex {
|
||||
return self.getTrailer(.name_token);
|
||||
}
|
||||
|
||||
pub fn setNameToken(self: *FnProto, value: TokenIndex) void {
|
||||
self.setTrailer(.name_token, value);
|
||||
}
|
||||
|
||||
pub fn getVarArgsToken(self: *const FnProto) ?TokenIndex {
|
||||
return self.getTrailer(.var_args_token);
|
||||
}
|
||||
|
||||
pub fn setVarArgsToken(self: *FnProto, value: TokenIndex) void {
|
||||
self.setTrailer(.var_args_token, value);
|
||||
}
|
||||
|
||||
pub fn getExternExportInlineToken(self: *const FnProto) ?TokenIndex {
|
||||
return self.getTrailer(.extern_export_inline_token);
|
||||
}
|
||||
|
||||
pub fn setExternExportInlineToken(self: *FnProto, value: TokenIndex) void {
|
||||
self.setTrailer(.extern_export_inline_token, value);
|
||||
}
|
||||
|
||||
pub fn getIsExternPrototype(self: *const FnProto) ?void {
|
||||
return self.getTrailer(.is_extern_prototype);
|
||||
}
|
||||
|
||||
pub fn setIsExternPrototype(self: *FnProto, value: void) void {
|
||||
self.setTrailer(.is_extern_prototype, value);
|
||||
}
|
||||
|
||||
pub fn getIsAsync(self: *const FnProto) ?void {
|
||||
return self.getTrailer(.is_async);
|
||||
}
|
||||
|
||||
pub fn setIsAsync(self: *FnProto, value: void) void {
|
||||
self.setTrailer(.is_async, value);
|
||||
}
|
||||
|
||||
fn getTrailer(self: *const FnProto, comptime field: TrailerFlags.FieldEnum) ?TrailerFlags.Field(field) {
|
||||
const trailers_start = @alignCast(
|
||||
@alignOf(ParamDecl),
|
||||
@ptrCast([*]const u8, self) + @sizeOf(FnProto) + @sizeOf(ParamDecl) * self.params_len,
|
||||
);
|
||||
return self.trailer_flags.get(trailers_start, name);
|
||||
return self.trailer_flags.get(trailers_start, field);
|
||||
}
|
||||
|
||||
pub fn setTrailer(self: *FnProto, comptime name: []const u8, value: TrailerFlags.Field(name)) void {
|
||||
fn setTrailer(self: *FnProto, comptime field: TrailerFlags.FieldEnum, value: TrailerFlags.Field(field)) void {
|
||||
const trailers_start = @alignCast(
|
||||
@alignOf(ParamDecl),
|
||||
@ptrCast([*]u8, self) + @sizeOf(FnProto) + @sizeOf(ParamDecl) * self.params_len,
|
||||
);
|
||||
self.trailer_flags.set(trailers_start, name, value);
|
||||
self.trailer_flags.set(trailers_start, field, value);
|
||||
}
|
||||
|
||||
/// After this the caller must initialize the params list.
|
||||
pub fn create(allocator: *mem.Allocator, required: RequiredFields, trailers: anytype) !*FnProto {
|
||||
pub fn create(allocator: *mem.Allocator, required: RequiredFields, trailers: TrailerFlags.InitStruct) !*FnProto {
|
||||
const trailer_flags = TrailerFlags.init(trailers);
|
||||
const bytes = try allocator.alignedAlloc(u8, @alignOf(FnProto), sizeInBytes(
|
||||
required.params_len,
|
||||
@ -1376,7 +1556,7 @@ pub const Node = struct {
|
||||
pub fn iterate(self: *const FnProto, index: usize) ?*Node {
|
||||
var i = index;
|
||||
|
||||
if (self.getTrailer("lib_name")) |lib_name| {
|
||||
if (self.getLibName()) |lib_name| {
|
||||
if (i < 1) return lib_name;
|
||||
i -= 1;
|
||||
}
|
||||
@ -1394,12 +1574,12 @@ pub const Node = struct {
|
||||
}
|
||||
i -= params_len;
|
||||
|
||||
if (self.getTrailer("align_expr")) |align_expr| {
|
||||
if (self.getAlignExpr()) |align_expr| {
|
||||
if (i < 1) return align_expr;
|
||||
i -= 1;
|
||||
}
|
||||
|
||||
if (self.getTrailer("section_expr")) |section_expr| {
|
||||
if (self.getSectionExpr()) |section_expr| {
|
||||
if (i < 1) return section_expr;
|
||||
i -= 1;
|
||||
}
|
||||
@ -1412,7 +1592,7 @@ pub const Node = struct {
|
||||
.Invalid => {},
|
||||
}
|
||||
|
||||
if (self.body()) |body_node| {
|
||||
if (self.getBodyNode()) |body_node| {
|
||||
if (i < 1) return body_node;
|
||||
i -= 1;
|
||||
}
|
||||
@ -1421,14 +1601,14 @@ pub const Node = struct {
|
||||
}
|
||||
|
||||
pub fn firstToken(self: *const FnProto) TokenIndex {
|
||||
if (self.getTrailer("visib_token")) |visib_token| return visib_token;
|
||||
if (self.getTrailer("extern_export_inline_token")) |extern_export_inline_token| return extern_export_inline_token;
|
||||
assert(self.getTrailer("lib_name") == null);
|
||||
if (self.getVisibToken()) |visib_token| return visib_token;
|
||||
if (self.getExternExportInlineToken()) |extern_export_inline_token| return extern_export_inline_token;
|
||||
assert(self.getLibName() == null);
|
||||
return self.fn_token;
|
||||
}
|
||||
|
||||
pub fn lastToken(self: *const FnProto) TokenIndex {
|
||||
if (self.body()) |body_node| return body_node.lastToken();
|
||||
if (self.getBodyNode()) |body_node| return body_node.lastToken();
|
||||
switch (self.return_type) {
|
||||
.Explicit, .InferErrorSet => |node| return node.lastToken(),
|
||||
.Invalid => |tok| return tok,
|
||||
@ -2673,24 +2853,32 @@ pub const Node = struct {
|
||||
};
|
||||
|
||||
pub fn getRHS(self: *const ControlFlowExpression) ?*Node {
|
||||
return self.getTrailer("rhs");
|
||||
return self.getTrailer(.rhs);
|
||||
}
|
||||
|
||||
pub fn setRHS(self: *ControlFlowExpression, value: *Node) void {
|
||||
self.setTrailer(.rhs, value);
|
||||
}
|
||||
|
||||
pub fn getLabel(self: *const ControlFlowExpression) ?TokenIndex {
|
||||
return self.getTrailer("label");
|
||||
return self.getTrailer(.label);
|
||||
}
|
||||
|
||||
pub fn getTrailer(self: *const ControlFlowExpression, comptime name: []const u8) ?TrailerFlags.Field(name) {
|
||||
pub fn setLabel(self: *ControlFlowExpression, value: TokenIndex) void {
|
||||
self.setTrailer(.label, value);
|
||||
}
|
||||
|
||||
fn getTrailer(self: *const ControlFlowExpression, comptime field: TrailerFlags.FieldEnum) ?TrailerFlags.Field(field) {
|
||||
const trailers_start = @ptrCast([*]const u8, self) + @sizeOf(ControlFlowExpression);
|
||||
return self.trailer_flags.get(trailers_start, name);
|
||||
return self.trailer_flags.get(trailers_start, field);
|
||||
}
|
||||
|
||||
pub fn setTrailer(self: *ControlFlowExpression, comptime name: []const u8, value: TrailerFlags.Field(name)) void {
|
||||
fn setTrailer(self: *ControlFlowExpression, comptime field: TrailerFlags.FieldEnum, value: TrailerFlags.Field(field)) void {
|
||||
const trailers_start = @ptrCast([*]u8, self) + @sizeOf(ControlFlowExpression);
|
||||
self.trailer_flags.set(trailers_start, name, value);
|
||||
self.trailer_flags.set(trailers_start, field, value);
|
||||
}
|
||||
|
||||
pub fn create(allocator: *mem.Allocator, required: RequiredFields, trailers: anytype) !*ControlFlowExpression {
|
||||
pub fn create(allocator: *mem.Allocator, required: RequiredFields, trailers: TrailerFlags.InitStruct) !*ControlFlowExpression {
|
||||
const trailer_flags = TrailerFlags.init(trailers);
|
||||
const bytes = try allocator.alignedAlloc(u8, @alignOf(ControlFlowExpression), sizeInBytes(trailer_flags));
|
||||
const ctrl_flow_expr = @ptrCast(*ControlFlowExpression, bytes.ptr);
|
||||
|
||||
@ -232,9 +232,9 @@ fn renderContainerDecl(allocator: *mem.Allocator, stream: anytype, tree: *ast.Tr
|
||||
.FnProto => {
|
||||
const fn_proto = @fieldParentPtr(ast.Node.FnProto, "base", decl);
|
||||
|
||||
try renderDocComments(tree, stream, fn_proto, fn_proto.getTrailer("doc_comments"), indent, start_col);
|
||||
try renderDocComments(tree, stream, fn_proto, fn_proto.getDocComments(), indent, start_col);
|
||||
|
||||
if (fn_proto.getTrailer("body_node")) |body_node| {
|
||||
if (fn_proto.getBodyNode()) |body_node| {
|
||||
try renderExpression(allocator, stream, tree, indent, start_col, decl, .Space);
|
||||
try renderExpression(allocator, stream, tree, indent, start_col, body_node, space);
|
||||
} else {
|
||||
@ -257,7 +257,7 @@ fn renderContainerDecl(allocator: *mem.Allocator, stream: anytype, tree: *ast.Tr
|
||||
.VarDecl => {
|
||||
const var_decl = @fieldParentPtr(ast.Node.VarDecl, "base", decl);
|
||||
|
||||
try renderDocComments(tree, stream, var_decl, var_decl.getTrailer("doc_comments"), indent, start_col);
|
||||
try renderDocComments(tree, stream, var_decl, var_decl.getDocComments(), indent, start_col);
|
||||
try renderVarDecl(allocator, stream, tree, indent, start_col, var_decl);
|
||||
},
|
||||
|
||||
@ -1520,23 +1520,23 @@ fn renderExpression(
|
||||
.FnProto => {
|
||||
const fn_proto = @fieldParentPtr(ast.Node.FnProto, "base", base);
|
||||
|
||||
if (fn_proto.getTrailer("visib_token")) |visib_token_index| {
|
||||
if (fn_proto.getVisibToken()) |visib_token_index| {
|
||||
const visib_token = tree.token_ids[visib_token_index];
|
||||
assert(visib_token == .Keyword_pub or visib_token == .Keyword_export);
|
||||
|
||||
try renderToken(tree, stream, visib_token_index, indent, start_col, Space.Space); // pub
|
||||
}
|
||||
|
||||
if (fn_proto.getTrailer("extern_export_inline_token")) |extern_export_inline_token| {
|
||||
if (fn_proto.getTrailer("is_extern_prototype") == null)
|
||||
if (fn_proto.getExternExportInlineToken()) |extern_export_inline_token| {
|
||||
if (fn_proto.getIsExternPrototype() == null)
|
||||
try renderToken(tree, stream, extern_export_inline_token, indent, start_col, Space.Space); // extern/export/inline
|
||||
}
|
||||
|
||||
if (fn_proto.getTrailer("lib_name")) |lib_name| {
|
||||
if (fn_proto.getLibName()) |lib_name| {
|
||||
try renderExpression(allocator, stream, tree, indent, start_col, lib_name, Space.Space);
|
||||
}
|
||||
|
||||
const lparen = if (fn_proto.getTrailer("name_token")) |name_token| blk: {
|
||||
const lparen = if (fn_proto.getNameToken()) |name_token| blk: {
|
||||
try renderToken(tree, stream, fn_proto.fn_token, indent, start_col, Space.Space); // fn
|
||||
try renderToken(tree, stream, name_token, indent, start_col, Space.None); // name
|
||||
break :blk tree.nextToken(name_token);
|
||||
@ -1549,11 +1549,11 @@ fn renderExpression(
|
||||
const rparen = tree.prevToken(
|
||||
// the first token for the annotation expressions is the left
|
||||
// parenthesis, hence the need for two prevToken
|
||||
if (fn_proto.getTrailer("align_expr")) |align_expr|
|
||||
if (fn_proto.getAlignExpr()) |align_expr|
|
||||
tree.prevToken(tree.prevToken(align_expr.firstToken()))
|
||||
else if (fn_proto.getTrailer("section_expr")) |section_expr|
|
||||
else if (fn_proto.getSectionExpr()) |section_expr|
|
||||
tree.prevToken(tree.prevToken(section_expr.firstToken()))
|
||||
else if (fn_proto.getTrailer("callconv_expr")) |callconv_expr|
|
||||
else if (fn_proto.getCallconvExpr()) |callconv_expr|
|
||||
tree.prevToken(tree.prevToken(callconv_expr.firstToken()))
|
||||
else switch (fn_proto.return_type) {
|
||||
.Explicit => |node| node.firstToken(),
|
||||
@ -1574,12 +1574,12 @@ fn renderExpression(
|
||||
for (fn_proto.params()) |param_decl, i| {
|
||||
try renderParamDecl(allocator, stream, tree, indent, start_col, param_decl, Space.None);
|
||||
|
||||
if (i + 1 < fn_proto.params_len or fn_proto.getTrailer("var_args_token") != null) {
|
||||
if (i + 1 < fn_proto.params_len or fn_proto.getVarArgsToken() != null) {
|
||||
const comma = tree.nextToken(param_decl.lastToken());
|
||||
try renderToken(tree, stream, comma, indent, start_col, Space.Space); // ,
|
||||
}
|
||||
}
|
||||
if (fn_proto.getTrailer("var_args_token")) |var_args_token| {
|
||||
if (fn_proto.getVarArgsToken()) |var_args_token| {
|
||||
try renderToken(tree, stream, var_args_token, indent, start_col, Space.None);
|
||||
}
|
||||
} else {
|
||||
@ -1591,7 +1591,7 @@ fn renderExpression(
|
||||
try stream.writeByteNTimes(' ', new_indent);
|
||||
try renderParamDecl(allocator, stream, tree, new_indent, start_col, param_decl, Space.Comma);
|
||||
}
|
||||
if (fn_proto.getTrailer("var_args_token")) |var_args_token| {
|
||||
if (fn_proto.getVarArgsToken()) |var_args_token| {
|
||||
try stream.writeByteNTimes(' ', new_indent);
|
||||
try renderToken(tree, stream, var_args_token, new_indent, start_col, Space.Comma);
|
||||
}
|
||||
@ -1600,7 +1600,7 @@ fn renderExpression(
|
||||
|
||||
try renderToken(tree, stream, rparen, indent, start_col, Space.Space); // )
|
||||
|
||||
if (fn_proto.getTrailer("align_expr")) |align_expr| {
|
||||
if (fn_proto.getAlignExpr()) |align_expr| {
|
||||
const align_rparen = tree.nextToken(align_expr.lastToken());
|
||||
const align_lparen = tree.prevToken(align_expr.firstToken());
|
||||
const align_kw = tree.prevToken(align_lparen);
|
||||
@ -1611,7 +1611,7 @@ fn renderExpression(
|
||||
try renderToken(tree, stream, align_rparen, indent, start_col, Space.Space); // )
|
||||
}
|
||||
|
||||
if (fn_proto.getTrailer("section_expr")) |section_expr| {
|
||||
if (fn_proto.getSectionExpr()) |section_expr| {
|
||||
const section_rparen = tree.nextToken(section_expr.lastToken());
|
||||
const section_lparen = tree.prevToken(section_expr.firstToken());
|
||||
const section_kw = tree.prevToken(section_lparen);
|
||||
@ -1622,7 +1622,7 @@ fn renderExpression(
|
||||
try renderToken(tree, stream, section_rparen, indent, start_col, Space.Space); // )
|
||||
}
|
||||
|
||||
if (fn_proto.getTrailer("callconv_expr")) |callconv_expr| {
|
||||
if (fn_proto.getCallconvExpr()) |callconv_expr| {
|
||||
const callconv_rparen = tree.nextToken(callconv_expr.lastToken());
|
||||
const callconv_lparen = tree.prevToken(callconv_expr.firstToken());
|
||||
const callconv_kw = tree.prevToken(callconv_lparen);
|
||||
@ -1631,9 +1631,9 @@ fn renderExpression(
|
||||
try renderToken(tree, stream, callconv_lparen, indent, start_col, Space.None); // (
|
||||
try renderExpression(allocator, stream, tree, indent, start_col, callconv_expr, Space.None);
|
||||
try renderToken(tree, stream, callconv_rparen, indent, start_col, Space.Space); // )
|
||||
} else if (fn_proto.getTrailer("is_extern_prototype") != null) {
|
||||
} else if (fn_proto.getIsExternPrototype() != null) {
|
||||
try stream.writeAll("callconv(.C) ");
|
||||
} else if (fn_proto.getTrailer("is_async") != null) {
|
||||
} else if (fn_proto.getIsAsync() != null) {
|
||||
try stream.writeAll("callconv(.Async) ");
|
||||
}
|
||||
|
||||
@ -2221,69 +2221,69 @@ fn renderVarDecl(
|
||||
start_col: *usize,
|
||||
var_decl: *ast.Node.VarDecl,
|
||||
) (@TypeOf(stream).Error || Error)!void {
|
||||
if (var_decl.getTrailer("visib_token")) |visib_token| {
|
||||
if (var_decl.getVisibToken()) |visib_token| {
|
||||
try renderToken(tree, stream, visib_token, indent, start_col, Space.Space); // pub
|
||||
}
|
||||
|
||||
if (var_decl.getTrailer("extern_export_token")) |extern_export_token| {
|
||||
if (var_decl.getExternExportToken()) |extern_export_token| {
|
||||
try renderToken(tree, stream, extern_export_token, indent, start_col, Space.Space); // extern
|
||||
|
||||
if (var_decl.getTrailer("lib_name")) |lib_name| {
|
||||
if (var_decl.getLibName()) |lib_name| {
|
||||
try renderExpression(allocator, stream, tree, indent, start_col, lib_name, Space.Space); // "lib"
|
||||
}
|
||||
}
|
||||
|
||||
if (var_decl.getTrailer("comptime_token")) |comptime_token| {
|
||||
if (var_decl.getComptimeToken()) |comptime_token| {
|
||||
try renderToken(tree, stream, comptime_token, indent, start_col, Space.Space); // comptime
|
||||
}
|
||||
|
||||
if (var_decl.getTrailer("thread_local_token")) |thread_local_token| {
|
||||
if (var_decl.getThreadLocalToken()) |thread_local_token| {
|
||||
try renderToken(tree, stream, thread_local_token, indent, start_col, Space.Space); // threadlocal
|
||||
}
|
||||
try renderToken(tree, stream, var_decl.mut_token, indent, start_col, Space.Space); // var
|
||||
|
||||
const name_space = if (var_decl.getTrailer("type_node") == null and
|
||||
(var_decl.getTrailer("align_node") != null or
|
||||
var_decl.getTrailer("section_node") != null or
|
||||
var_decl.getTrailer("init_node") != null))
|
||||
const name_space = if (var_decl.getTypeNode() == null and
|
||||
(var_decl.getAlignNode() != null or
|
||||
var_decl.getSectionNode() != null or
|
||||
var_decl.getInitNode() != null))
|
||||
Space.Space
|
||||
else
|
||||
Space.None;
|
||||
try renderToken(tree, stream, var_decl.name_token, indent, start_col, name_space);
|
||||
|
||||
if (var_decl.getTrailer("type_node")) |type_node| {
|
||||
if (var_decl.getTypeNode()) |type_node| {
|
||||
try renderToken(tree, stream, tree.nextToken(var_decl.name_token), indent, start_col, Space.Space);
|
||||
const s = if (var_decl.getTrailer("align_node") != null or
|
||||
var_decl.getTrailer("section_node") != null or
|
||||
var_decl.getTrailer("init_node") != null) Space.Space else Space.None;
|
||||
const s = if (var_decl.getAlignNode() != null or
|
||||
var_decl.getSectionNode() != null or
|
||||
var_decl.getInitNode() != null) Space.Space else Space.None;
|
||||
try renderExpression(allocator, stream, tree, indent, start_col, type_node, s);
|
||||
}
|
||||
|
||||
if (var_decl.getTrailer("align_node")) |align_node| {
|
||||
if (var_decl.getAlignNode()) |align_node| {
|
||||
const lparen = tree.prevToken(align_node.firstToken());
|
||||
const align_kw = tree.prevToken(lparen);
|
||||
const rparen = tree.nextToken(align_node.lastToken());
|
||||
try renderToken(tree, stream, align_kw, indent, start_col, Space.None); // align
|
||||
try renderToken(tree, stream, lparen, indent, start_col, Space.None); // (
|
||||
try renderExpression(allocator, stream, tree, indent, start_col, align_node, Space.None);
|
||||
const s = if (var_decl.getTrailer("section_node") != null or var_decl.getTrailer("init_node") != null) Space.Space else Space.None;
|
||||
const s = if (var_decl.getSectionNode() != null or var_decl.getInitNode() != null) Space.Space else Space.None;
|
||||
try renderToken(tree, stream, rparen, indent, start_col, s); // )
|
||||
}
|
||||
|
||||
if (var_decl.getTrailer("section_node")) |section_node| {
|
||||
if (var_decl.getSectionNode()) |section_node| {
|
||||
const lparen = tree.prevToken(section_node.firstToken());
|
||||
const section_kw = tree.prevToken(lparen);
|
||||
const rparen = tree.nextToken(section_node.lastToken());
|
||||
try renderToken(tree, stream, section_kw, indent, start_col, Space.None); // linksection
|
||||
try renderToken(tree, stream, lparen, indent, start_col, Space.None); // (
|
||||
try renderExpression(allocator, stream, tree, indent, start_col, section_node, Space.None);
|
||||
const s = if (var_decl.getTrailer("init_node") != null) Space.Space else Space.None;
|
||||
const s = if (var_decl.getInitNode() != null) Space.Space else Space.None;
|
||||
try renderToken(tree, stream, rparen, indent, start_col, s); // )
|
||||
}
|
||||
|
||||
if (var_decl.getTrailer("init_node")) |init_node| {
|
||||
if (var_decl.getInitNode()) |init_node| {
|
||||
const s = if (init_node.tag == .MultilineStringLiteral) Space.None else Space.Space;
|
||||
try renderToken(tree, stream, var_decl.getTrailer("eq_token").?, indent, start_col, s); // =
|
||||
try renderToken(tree, stream, var_decl.getEqToken().?, indent, start_col, s); // =
|
||||
try renderExpression(allocator, stream, tree, indent, start_col, init_node, Space.None);
|
||||
}
|
||||
|
||||
|
||||
@ -1256,8 +1256,8 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
|
||||
};
|
||||
defer fn_type_scope.instructions.deinit(self.gpa);
|
||||
|
||||
decl.is_pub = fn_proto.getTrailer("visib_token") != null;
|
||||
const body_node = fn_proto.getTrailer("body_node") orelse
|
||||
decl.is_pub = fn_proto.getVisibToken() != null;
|
||||
const body_node = fn_proto.getBodyNode() orelse
|
||||
return self.failTok(&fn_type_scope.base, fn_proto.fn_token, "TODO implement extern functions", .{});
|
||||
|
||||
const param_decls = fn_proto.params();
|
||||
@ -1276,19 +1276,19 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
|
||||
};
|
||||
param_types[i] = try astgen.expr(self, &fn_type_scope.base, type_type_rl, param_type_node);
|
||||
}
|
||||
if (fn_proto.getTrailer("var_args_token")) |var_args_token| {
|
||||
if (fn_proto.getVarArgsToken()) |var_args_token| {
|
||||
return self.failTok(&fn_type_scope.base, var_args_token, "TODO implement var args", .{});
|
||||
}
|
||||
if (fn_proto.getTrailer("lib_name")) |lib_name| {
|
||||
if (fn_proto.getLibName()) |lib_name| {
|
||||
return self.failNode(&fn_type_scope.base, lib_name, "TODO implement function library name", .{});
|
||||
}
|
||||
if (fn_proto.getTrailer("align_expr")) |align_expr| {
|
||||
if (fn_proto.getAlignExpr()) |align_expr| {
|
||||
return self.failNode(&fn_type_scope.base, align_expr, "TODO implement function align expression", .{});
|
||||
}
|
||||
if (fn_proto.getTrailer("section_expr")) |sect_expr| {
|
||||
if (fn_proto.getSectionExpr()) |sect_expr| {
|
||||
return self.failNode(&fn_type_scope.base, sect_expr, "TODO implement function section expression", .{});
|
||||
}
|
||||
if (fn_proto.getTrailer("callconv_expr")) |callconv_expr| {
|
||||
if (fn_proto.getCallconvExpr()) |callconv_expr| {
|
||||
return self.failNode(
|
||||
&fn_type_scope.base,
|
||||
callconv_expr,
|
||||
@ -1430,10 +1430,10 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
|
||||
self.bin_file.freeDecl(decl);
|
||||
}
|
||||
|
||||
if (fn_proto.getTrailer("extern_export_inline_token")) |maybe_export_token| {
|
||||
if (fn_proto.getExternExportInlineToken()) |maybe_export_token| {
|
||||
if (tree.token_ids[maybe_export_token] == .Keyword_export) {
|
||||
const export_src = tree.token_locs[maybe_export_token].start;
|
||||
const name_loc = tree.token_locs[fn_proto.getTrailer("name_token").?];
|
||||
const name_loc = tree.token_locs[fn_proto.getNameToken().?];
|
||||
const name = tree.tokenSliceLoc(name_loc);
|
||||
// The scope needs to have the decl in it.
|
||||
try self.analyzeExport(&block_scope.base, export_src, name, decl);
|
||||
@ -1460,37 +1460,37 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
|
||||
};
|
||||
defer block_scope.instructions.deinit(self.gpa);
|
||||
|
||||
decl.is_pub = var_decl.getTrailer("visib_token") != null;
|
||||
decl.is_pub = var_decl.getVisibToken() != null;
|
||||
const is_extern = blk: {
|
||||
const maybe_extern_token = var_decl.getTrailer("extern_export_token") orelse
|
||||
const maybe_extern_token = var_decl.getExternExportToken() orelse
|
||||
break :blk false;
|
||||
if (tree.token_ids[maybe_extern_token] != .Keyword_extern) break :blk false;
|
||||
if (var_decl.getTrailer("init_node")) |some| {
|
||||
if (var_decl.getInitNode()) |some| {
|
||||
return self.failNode(&block_scope.base, some, "extern variables have no initializers", .{});
|
||||
}
|
||||
break :blk true;
|
||||
};
|
||||
if (var_decl.getTrailer("lib_name")) |lib_name| {
|
||||
if (var_decl.getLibName()) |lib_name| {
|
||||
assert(is_extern);
|
||||
return self.failNode(&block_scope.base, lib_name, "TODO implement function library name", .{});
|
||||
}
|
||||
const is_mutable = tree.token_ids[var_decl.mut_token] == .Keyword_var;
|
||||
const is_threadlocal = if (var_decl.getTrailer("thread_local_token")) |some| blk: {
|
||||
const is_threadlocal = if (var_decl.getThreadLocalToken()) |some| blk: {
|
||||
if (!is_mutable) {
|
||||
return self.failTok(&block_scope.base, some, "threadlocal variable cannot be constant", .{});
|
||||
}
|
||||
break :blk true;
|
||||
} else false;
|
||||
assert(var_decl.getTrailer("comptime_token") == null);
|
||||
if (var_decl.getTrailer("align_node")) |align_expr| {
|
||||
assert(var_decl.getComptimeToken() == null);
|
||||
if (var_decl.getAlignNode()) |align_expr| {
|
||||
return self.failNode(&block_scope.base, align_expr, "TODO implement function align expression", .{});
|
||||
}
|
||||
if (var_decl.getTrailer("section_node")) |sect_expr| {
|
||||
if (var_decl.getSectionNode()) |sect_expr| {
|
||||
return self.failNode(&block_scope.base, sect_expr, "TODO implement function section expression", .{});
|
||||
}
|
||||
|
||||
const explicit_type = blk: {
|
||||
const type_node = var_decl.getTrailer("type_node") orelse
|
||||
const type_node = var_decl.getTypeNode() orelse
|
||||
break :blk null;
|
||||
|
||||
// Temporary arena for the zir instructions.
|
||||
@ -1517,7 +1517,7 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
|
||||
};
|
||||
|
||||
var var_type: Type = undefined;
|
||||
const value: ?Value = if (var_decl.getTrailer("init_node")) |init_node| blk: {
|
||||
const value: ?Value = if (var_decl.getInitNode()) |init_node| blk: {
|
||||
var gen_scope_arena = std.heap.ArenaAllocator.init(self.gpa);
|
||||
defer gen_scope_arena.deinit();
|
||||
var gen_scope: Scope.GenZIR = .{
|
||||
@ -1602,7 +1602,7 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
|
||||
decl.analysis = .complete;
|
||||
decl.generation = self.generation;
|
||||
|
||||
if (var_decl.getTrailer("extern_export_token")) |maybe_export_token| {
|
||||
if (var_decl.getExternExportToken()) |maybe_export_token| {
|
||||
if (tree.token_ids[maybe_export_token] == .Keyword_export) {
|
||||
const export_src = tree.token_locs[maybe_export_token].start;
|
||||
const name_loc = tree.token_locs[var_decl.name_token];
|
||||
@ -1768,7 +1768,7 @@ fn analyzeRootSrcFile(self: *Module, root_scope: *Scope.File) !void {
|
||||
for (decls) |src_decl, decl_i| {
|
||||
if (src_decl.cast(ast.Node.FnProto)) |fn_proto| {
|
||||
// We will create a Decl for it regardless of analysis status.
|
||||
const name_tok = fn_proto.getTrailer("name_token") orelse {
|
||||
const name_tok = fn_proto.getNameToken() orelse {
|
||||
@panic("TODO missing function name");
|
||||
};
|
||||
|
||||
@ -1804,7 +1804,7 @@ fn analyzeRootSrcFile(self: *Module, root_scope: *Scope.File) !void {
|
||||
} else {
|
||||
const new_decl = try self.createNewDecl(&root_scope.base, name, decl_i, name_hash, contents_hash);
|
||||
root_scope.decls.appendAssumeCapacity(new_decl);
|
||||
if (fn_proto.getTrailer("extern_export_inline_token")) |maybe_export_token| {
|
||||
if (fn_proto.getExternExportInlineToken()) |maybe_export_token| {
|
||||
if (tree.token_ids[maybe_export_token] == .Keyword_export) {
|
||||
self.work_queue.writeItemAssumeCapacity(.{ .analyze_decl = new_decl });
|
||||
}
|
||||
@ -1831,7 +1831,7 @@ fn analyzeRootSrcFile(self: *Module, root_scope: *Scope.File) !void {
|
||||
} else {
|
||||
const new_decl = try self.createNewDecl(&root_scope.base, name, decl_i, name_hash, contents_hash);
|
||||
root_scope.decls.appendAssumeCapacity(new_decl);
|
||||
if (var_decl.getTrailer("extern_export_token")) |maybe_export_token| {
|
||||
if (var_decl.getExternExportToken()) |maybe_export_token| {
|
||||
if (tree.token_ids[maybe_export_token] == .Keyword_export) {
|
||||
self.work_queue.writeItemAssumeCapacity(.{ .analyze_decl = new_decl });
|
||||
}
|
||||
|
||||
@ -451,16 +451,16 @@ fn varDecl(
|
||||
block_arena: *Allocator,
|
||||
) InnerError!*Scope {
|
||||
// TODO implement detection of shadowing
|
||||
if (node.getTrailer("comptime_token")) |comptime_token| {
|
||||
if (node.getComptimeToken()) |comptime_token| {
|
||||
return mod.failTok(scope, comptime_token, "TODO implement comptime locals", .{});
|
||||
}
|
||||
if (node.getTrailer("align_node")) |align_node| {
|
||||
if (node.getAlignNode()) |align_node| {
|
||||
return mod.failNode(scope, align_node, "TODO implement alignment on locals", .{});
|
||||
}
|
||||
const tree = scope.tree();
|
||||
const name_src = tree.token_locs[node.name_token].start;
|
||||
const ident_name = try identifierTokenString(mod, scope, node.name_token);
|
||||
const init_node = node.getTrailer("init_node") orelse
|
||||
const init_node = node.getInitNode() orelse
|
||||
return mod.fail(scope, name_src, "variables must be initialized", .{});
|
||||
|
||||
switch (tree.token_ids[node.mut_token]) {
|
||||
@ -469,7 +469,7 @@ fn varDecl(
|
||||
// or an rvalue as a result location. If it is an rvalue, we can use the instruction as
|
||||
// the variable, no memory location needed.
|
||||
const result_loc = if (nodeMayNeedMemoryLocation(init_node)) r: {
|
||||
if (node.getTrailer("type_node")) |type_node| {
|
||||
if (node.getTypeNode()) |type_node| {
|
||||
const type_inst = try typeExpr(mod, scope, type_node);
|
||||
const alloc = try addZIRUnOp(mod, scope, name_src, .alloc, type_inst);
|
||||
break :r ResultLoc{ .ptr = alloc };
|
||||
@ -478,7 +478,7 @@ fn varDecl(
|
||||
break :r ResultLoc{ .inferred_ptr = alloc };
|
||||
}
|
||||
} else r: {
|
||||
if (node.getTrailer("type_node")) |type_node|
|
||||
if (node.getTypeNode()) |type_node|
|
||||
break :r ResultLoc{ .ty = try typeExpr(mod, scope, type_node) }
|
||||
else
|
||||
break :r .none;
|
||||
@ -494,7 +494,7 @@ fn varDecl(
|
||||
return &sub_scope.base;
|
||||
},
|
||||
.Keyword_var => {
|
||||
const var_data: struct { result_loc: ResultLoc, alloc: *zir.Inst } = if (node.getTrailer("type_node")) |type_node| a: {
|
||||
const var_data: struct { result_loc: ResultLoc, alloc: *zir.Inst } = if (node.getTypeNode()) |type_node| a: {
|
||||
const type_inst = try typeExpr(mod, scope, type_node);
|
||||
const alloc = try addZIRUnOp(mod, scope, name_src, .alloc, type_inst);
|
||||
break :a .{ .alloc = alloc, .result_loc = .{ .ptr = alloc } };
|
||||
|
||||
@ -439,7 +439,7 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
|
||||
if (module_fn.owner_decl.scope.cast(Module.Scope.File)) |scope_file| {
|
||||
const tree = scope_file.contents.tree;
|
||||
const fn_proto = tree.root_node.decls()[module_fn.owner_decl.src_index].castTag(.FnProto).?;
|
||||
const block = fn_proto.body().?.castTag(.Block).?;
|
||||
const block = fn_proto.getBodyNode().?.castTag(.Block).?;
|
||||
const lbrace_src = tree.token_locs[block.lbrace].start;
|
||||
const rbrace_src = tree.token_locs[block.rbrace].start;
|
||||
break :blk .{ .lbrace_src = lbrace_src, .rbrace_src = rbrace_src, .source = tree.source };
|
||||
|
||||
@ -1661,7 +1661,7 @@ pub fn updateDecl(self: *Elf, module: *Module, decl: *Module.Decl) !void {
|
||||
// TODO Look into improving the performance here by adding a token-index-to-line
|
||||
// lookup table. Currently this involves scanning over the source code for newlines.
|
||||
const fn_proto = file_ast_decls[decl.src_index].castTag(.FnProto).?;
|
||||
const block = fn_proto.body().?.castTag(.Block).?;
|
||||
const block = fn_proto.getBodyNode().?.castTag(.Block).?;
|
||||
const line_delta = std.zig.lineDelta(tree.source, 0, tree.token_locs[block.lbrace].start);
|
||||
break :blk @intCast(u28, line_delta);
|
||||
} else if (decl.scope.cast(Module.Scope.ZIRModule)) |zir_module| {
|
||||
@ -2160,7 +2160,7 @@ pub fn updateDeclLineNumber(self: *Elf, module: *Module, decl: *const Module.Dec
|
||||
// TODO Look into improving the performance here by adding a token-index-to-line
|
||||
// lookup table. Currently this involves scanning over the source code for newlines.
|
||||
const fn_proto = file_ast_decls[decl.src_index].castTag(.FnProto).?;
|
||||
const block = fn_proto.body().?.castTag(.Block).?;
|
||||
const block = fn_proto.getBodyNode().?.castTag(.Block).?;
|
||||
const line_delta = std.zig.lineDelta(tree.source, 0, tree.token_locs[block.lbrace].start);
|
||||
const casted_line_off = @intCast(u28, line_delta);
|
||||
|
||||
|
||||
@ -675,7 +675,7 @@ fn visitFnDecl(c: *Context, fn_decl: *const ZigClangFunctionDecl) Error!void {
|
||||
}
|
||||
|
||||
const body_node = try block_scope.complete(rp.c);
|
||||
proto_node.setTrailer("body_node", body_node);
|
||||
proto_node.setBodyNode(body_node);
|
||||
return addTopLevelDecl(c, fn_name, &proto_node.base);
|
||||
}
|
||||
|
||||
@ -4493,7 +4493,7 @@ fn transCreateNodeMacroFn(c: *Context, name: []const u8, ref: *ast.Node, proto_a
|
||||
const block_lbrace = try appendToken(c, .LBrace, "{");
|
||||
|
||||
const return_kw = try appendToken(c, .Keyword_return, "return");
|
||||
const unwrap_expr = try transCreateNodeUnwrapNull(c, ref.cast(ast.Node.VarDecl).?.getTrailer("init_node").?);
|
||||
const unwrap_expr = try transCreateNodeUnwrapNull(c, ref.cast(ast.Node.VarDecl).?.getInitNode().?);
|
||||
|
||||
const call_expr = try c.createCall(unwrap_expr, fn_params.items.len);
|
||||
const call_params = call_expr.params();
|
||||
@ -6361,7 +6361,7 @@ fn getContainer(c: *Context, node: *ast.Node) ?*ast.Node {
|
||||
const ident = node.castTag(.Identifier).?;
|
||||
if (c.global_scope.sym_table.get(tokenSlice(c, ident.token))) |value| {
|
||||
if (value.cast(ast.Node.VarDecl)) |var_decl|
|
||||
return getContainer(c, var_decl.getTrailer("init_node").?);
|
||||
return getContainer(c, var_decl.getInitNode().?);
|
||||
}
|
||||
},
|
||||
|
||||
@ -6390,7 +6390,7 @@ fn getContainerTypeOf(c: *Context, ref: *ast.Node) ?*ast.Node {
|
||||
if (ref.castTag(.Identifier)) |ident| {
|
||||
if (c.global_scope.sym_table.get(tokenSlice(c, ident.token))) |value| {
|
||||
if (value.cast(ast.Node.VarDecl)) |var_decl| {
|
||||
if (var_decl.getTrailer("type_node")) |ty|
|
||||
if (var_decl.getTypeNode()) |ty|
|
||||
return getContainer(c, ty);
|
||||
}
|
||||
}
|
||||
@ -6412,7 +6412,7 @@ fn getContainerTypeOf(c: *Context, ref: *ast.Node) ?*ast.Node {
|
||||
}
|
||||
|
||||
fn getFnProto(c: *Context, ref: *ast.Node) ?*ast.Node.FnProto {
|
||||
const init = if (ref.cast(ast.Node.VarDecl)) |v| v.getTrailer("init_node").? else return null;
|
||||
const init = if (ref.cast(ast.Node.VarDecl)) |v| v.getInitNode().? else return null;
|
||||
if (getContainerTypeOf(c, init)) |ty_node| {
|
||||
if (ty_node.castTag(.OptionalType)) |prefix| {
|
||||
if (prefix.rhs.cast(ast.Node.FnProto)) |fn_proto| {
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user