stage2: breaking AST memory layout modifications

ast.Node.Id => ast.Node.Tag, matching recent style conventions.

Now multiple different AST node tags can map to the same AST node data
structures. In this commit, simple prefix operators now all map top
SimplePrefixOp.

`ast.Node.castTag` is now preferred over `ast.Node.cast`.

Upcoming: InfixOp flattened out.
This commit is contained in:
Andrew Kelley 2020-07-15 18:15:59 -07:00
parent e70d6d19f5
commit f119092273
6 changed files with 450 additions and 316 deletions

View File

@ -323,8 +323,8 @@ pub const Error = union(enum) {
node: *Node,
pub fn render(self: *const ExpectedCall, tokens: []const Token.Id, stream: anytype) !void {
return stream.print("expected " ++ @tagName(Node.Id.Call) ++ ", found {}", .{
@tagName(self.node.id),
return stream.print("expected " ++ @tagName(Node.Tag.Call) ++ ", found {}", .{
@tagName(self.node.tag),
});
}
};
@ -333,8 +333,8 @@ pub const Error = union(enum) {
node: *Node,
pub fn render(self: *const ExpectedCallOrFnProto, tokens: []const Token.Id, stream: anytype) !void {
return stream.print("expected " ++ @tagName(Node.Id.Call) ++ " or " ++
@tagName(Node.Id.FnProto) ++ ", found {}", .{@tagName(self.node.id)});
return stream.print("expected " ++ @tagName(Node.Tag.Call) ++ " or " ++
@tagName(Node.Tag.FnProto) ++ ", found {}", .{@tagName(self.node.tag)});
}
};
@ -396,9 +396,9 @@ pub const Error = union(enum) {
};
pub const Node = struct {
id: Id,
tag: Tag,
pub const Id = enum {
pub const Tag = enum {
// Top level
Root,
Use,
@ -484,49 +484,129 @@ pub const Node = struct {
ContainerField,
ErrorTag,
FieldInitializer,
pub fn Type(tag: Tag) type {
return switch (tag) {
.Root => Root,
.Use => Use,
.TestDecl => TestDecl,
.VarDecl => VarDecl,
.Defer => Defer,
.InfixOp => InfixOp,
.AddressOf,
.Await,
.BitNot,
.BoolNot,
.OptionalType,
.Negation,
.NegationWrap,
.Resume,
.Try,
=> SimplePrefixOp,
.ArrayType => ArrayType,
.ArrayTypeSentinel => ArrayTypeSentinel,
.PtrType => PtrType,
.SliceType => SliceType,
.SuffixOp => SuffixOp,
.ArrayInitializer => ArrayInitializer,
.ArrayInitializerDot => ArrayInitializerDot,
.StructInitializer => StructInitializer,
.StructInitializerDot => StructInitializerDot,
.Call => Call,
.Switch => Switch,
.While => While,
.For => For,
.If => If,
.ControlFlowExpression => ControlFlowExpression,
.Suspend => Suspend,
.AnyType => AnyType,
.ErrorType => ErrorType,
.FnProto => FnProto,
.AnyFrameType => AnyFrameType,
.IntegerLiteral => IntegerLiteral,
.FloatLiteral => FloatLiteral,
.EnumLiteral => EnumLiteral,
.StringLiteral => StringLiteral,
.MultilineStringLiteral => MultilineStringLiteral,
.CharLiteral => CharLiteral,
.BoolLiteral => BoolLiteral,
.NullLiteral => NullLiteral,
.UndefinedLiteral => UndefinedLiteral,
.Unreachable => Unreachable,
.Identifier => Identifier,
.GroupedExpression => GroupedExpression,
.BuiltinCall => BuiltinCall,
.ErrorSetDecl => ErrorSetDecl,
.ContainerDecl => ContainerDecl,
.Asm => Asm,
.Comptime => Comptime,
.Nosuspend => Nosuspend,
.Block => Block,
.DocComment => DocComment,
.SwitchCase => SwitchCase,
.SwitchElse => SwitchElse,
.Else => Else,
.Payload => Payload,
.PointerPayload => PointerPayload,
.PointerIndexPayload => PointerIndexPayload,
.ContainerField => ContainerField,
.ErrorTag => ErrorTag,
.FieldInitializer => FieldInitializer,
};
}
};
/// Prefer `castTag` to this.
pub fn cast(base: *Node, comptime T: type) ?*T {
if (base.id == comptime typeToId(T)) {
return @fieldParentPtr(T, "base", base);
if (std.meta.fieldInfo(T, "base").default_value) |default_base| {
return base.castTag(default_base.tag);
}
inline for (@typeInfo(Tag).Enum.fields) |field| {
const tag = @intToEnum(Tag, field.value);
if (base.tag == tag) {
if (T == tag.Type()) {
return @fieldParentPtr(T, "base", base);
}
return null;
}
}
unreachable;
}
pub fn castTag(base: *Node, comptime tag: Tag) ?*tag.Type() {
if (base.tag == tag) {
return @fieldParentPtr(tag.Type(), "base", base);
}
return null;
}
pub fn iterate(base: *Node, index: usize) ?*Node {
inline for (@typeInfo(Id).Enum.fields) |f| {
if (base.id == @field(Id, f.name)) {
const T = @field(Node, f.name);
return @fieldParentPtr(T, "base", base).iterate(index);
inline for (@typeInfo(Tag).Enum.fields) |field| {
const tag = @intToEnum(Tag, field.value);
if (base.tag == tag) {
return @fieldParentPtr(tag.Type(), "base", base).iterate(index);
}
}
unreachable;
}
pub fn firstToken(base: *const Node) TokenIndex {
inline for (@typeInfo(Id).Enum.fields) |f| {
if (base.id == @field(Id, f.name)) {
const T = @field(Node, f.name);
return @fieldParentPtr(T, "base", base).firstToken();
inline for (@typeInfo(Tag).Enum.fields) |field| {
const tag = @intToEnum(Tag, field.value);
if (base.tag == tag) {
return @fieldParentPtr(tag.Type(), "base", base).firstToken();
}
}
unreachable;
}
pub fn lastToken(base: *const Node) TokenIndex {
inline for (@typeInfo(Id).Enum.fields) |f| {
if (base.id == @field(Id, f.name)) {
const T = @field(Node, f.name);
return @fieldParentPtr(T, "base", base).lastToken();
}
}
unreachable;
}
pub fn typeToId(comptime T: type) Id {
inline for (@typeInfo(Id).Enum.fields) |f| {
if (T == @field(Node, f.name)) {
return @field(Id, f.name);
inline for (@typeInfo(Tag).Enum.fields) |field| {
const tag = @intToEnum(Tag, field.value);
if (base.tag == tag) {
return @fieldParentPtr(tag.Type(), "base", base).lastToken();
}
}
unreachable;
@ -535,7 +615,7 @@ pub const Node = struct {
pub fn requireSemiColon(base: *const Node) bool {
var n = base;
while (true) {
switch (n.id) {
switch (n.tag) {
.Root,
.ContainerField,
.Block,
@ -556,7 +636,7 @@ pub const Node = struct {
continue;
}
return while_node.body.id != .Block;
return while_node.body.tag != .Block;
},
.For => {
const for_node = @fieldParentPtr(For, "base", n);
@ -565,7 +645,7 @@ pub const Node = struct {
continue;
}
return for_node.body.id != .Block;
return for_node.body.tag != .Block;
},
.If => {
const if_node = @fieldParentPtr(If, "base", n);
@ -574,7 +654,7 @@ pub const Node = struct {
continue;
}
return if_node.body.id != .Block;
return if_node.body.tag != .Block;
},
.Else => {
const else_node = @fieldParentPtr(Else, "base", n);
@ -583,23 +663,23 @@ pub const Node = struct {
},
.Defer => {
const defer_node = @fieldParentPtr(Defer, "base", n);
return defer_node.expr.id != .Block;
return defer_node.expr.tag != .Block;
},
.Comptime => {
const comptime_node = @fieldParentPtr(Comptime, "base", n);
return comptime_node.expr.id != .Block;
return comptime_node.expr.tag != .Block;
},
.Suspend => {
const suspend_node = @fieldParentPtr(Suspend, "base", n);
if (suspend_node.body) |body| {
return body.id != .Block;
return body.tag != .Block;
}
return true;
},
.Nosuspend => {
const nosuspend_node = @fieldParentPtr(Nosuspend, "base", n);
return nosuspend_node.expr.id != .Block;
return nosuspend_node.expr.tag != .Block;
},
else => return true,
}
@ -613,7 +693,7 @@ pub const Node = struct {
std.debug.warn(" ", .{});
}
}
std.debug.warn("{}\n", .{@tagName(self.id)});
std.debug.warn("{}\n", .{@tagName(self.tag)});
var child_i: usize = 0;
while (self.iterate(child_i)) |child| : (child_i += 1) {
@ -623,7 +703,7 @@ pub const Node = struct {
/// The decls data follows this struct in memory as an array of Node pointers.
pub const Root = struct {
base: Node = Node{ .id = .Root },
base: Node = Node{ .tag = .Root },
eof_token: TokenIndex,
decls_len: NodeIndex,
@ -678,7 +758,7 @@ pub const Node = struct {
/// Trailed in memory by possibly many things, with each optional thing
/// determined by a bit in `trailer_flags`.
pub const VarDecl = struct {
base: Node = Node{ .id = .VarDecl },
base: Node = Node{ .tag = .VarDecl },
trailer_flags: TrailerFlags,
mut_token: TokenIndex,
name_token: TokenIndex,
@ -779,7 +859,7 @@ pub const Node = struct {
};
pub const Use = struct {
base: Node = Node{ .id = .Use },
base: Node = Node{ .tag = .Use },
doc_comments: ?*DocComment,
visib_token: ?TokenIndex,
use_token: TokenIndex,
@ -806,7 +886,7 @@ pub const Node = struct {
};
pub const ErrorSetDecl = struct {
base: Node = Node{ .id = .ErrorSetDecl },
base: Node = Node{ .tag = .ErrorSetDecl },
error_token: TokenIndex,
rbrace_token: TokenIndex,
decls_len: NodeIndex,
@ -856,7 +936,7 @@ pub const Node = struct {
/// The fields and decls Node pointers directly follow this struct in memory.
pub const ContainerDecl = struct {
base: Node = Node{ .id = .ContainerDecl },
base: Node = Node{ .tag = .ContainerDecl },
kind_token: TokenIndex,
layout_token: ?TokenIndex,
lbrace_token: TokenIndex,
@ -925,7 +1005,7 @@ pub const Node = struct {
};
pub const ContainerField = struct {
base: Node = Node{ .id = .ContainerField },
base: Node = Node{ .tag = .ContainerField },
doc_comments: ?*DocComment,
comptime_token: ?TokenIndex,
name_token: TokenIndex,
@ -976,7 +1056,7 @@ pub const Node = struct {
};
pub const ErrorTag = struct {
base: Node = Node{ .id = .ErrorTag },
base: Node = Node{ .tag = .ErrorTag },
doc_comments: ?*DocComment,
name_token: TokenIndex,
@ -1001,7 +1081,7 @@ pub const Node = struct {
};
pub const Identifier = struct {
base: Node = Node{ .id = .Identifier },
base: Node = Node{ .tag = .Identifier },
token: TokenIndex,
pub fn iterate(self: *const Identifier, index: usize) ?*Node {
@ -1020,7 +1100,7 @@ pub const Node = struct {
/// The params are directly after the FnProto in memory.
/// Next, each optional thing determined by a bit in `trailer_flags`.
pub const FnProto = struct {
base: Node = Node{ .id = .FnProto },
base: Node = Node{ .tag = .FnProto },
trailer_flags: TrailerFlags,
fn_token: TokenIndex,
params_len: NodeIndex,
@ -1230,7 +1310,7 @@ pub const Node = struct {
};
pub const AnyFrameType = struct {
base: Node = Node{ .id = .AnyFrameType },
base: Node = Node{ .tag = .AnyFrameType },
anyframe_token: TokenIndex,
result: ?Result,
@ -1262,7 +1342,7 @@ pub const Node = struct {
/// The statements of the block follow Block directly in memory.
pub const Block = struct {
base: Node = Node{ .id = .Block },
base: Node = Node{ .tag = .Block },
statements_len: NodeIndex,
lbrace: TokenIndex,
rbrace: TokenIndex,
@ -1316,7 +1396,7 @@ pub const Node = struct {
};
pub const Defer = struct {
base: Node = Node{ .id = .Defer },
base: Node = Node{ .tag = .Defer },
defer_token: TokenIndex,
payload: ?*Node,
expr: *Node,
@ -1340,7 +1420,7 @@ pub const Node = struct {
};
pub const Comptime = struct {
base: Node = Node{ .id = .Comptime },
base: Node = Node{ .tag = .Comptime },
doc_comments: ?*DocComment,
comptime_token: TokenIndex,
expr: *Node,
@ -1364,7 +1444,7 @@ pub const Node = struct {
};
pub const Nosuspend = struct {
base: Node = Node{ .id = .Nosuspend },
base: Node = Node{ .tag = .Nosuspend },
nosuspend_token: TokenIndex,
expr: *Node,
@ -1387,7 +1467,7 @@ pub const Node = struct {
};
pub const Payload = struct {
base: Node = Node{ .id = .Payload },
base: Node = Node{ .tag = .Payload },
lpipe: TokenIndex,
error_symbol: *Node,
rpipe: TokenIndex,
@ -1411,7 +1491,7 @@ pub const Node = struct {
};
pub const PointerPayload = struct {
base: Node = Node{ .id = .PointerPayload },
base: Node = Node{ .tag = .PointerPayload },
lpipe: TokenIndex,
ptr_token: ?TokenIndex,
value_symbol: *Node,
@ -1436,7 +1516,7 @@ pub const Node = struct {
};
pub const PointerIndexPayload = struct {
base: Node = Node{ .id = .PointerIndexPayload },
base: Node = Node{ .tag = .PointerIndexPayload },
lpipe: TokenIndex,
ptr_token: ?TokenIndex,
value_symbol: *Node,
@ -1467,7 +1547,7 @@ pub const Node = struct {
};
pub const Else = struct {
base: Node = Node{ .id = .Else },
base: Node = Node{ .tag = .Else },
else_token: TokenIndex,
payload: ?*Node,
body: *Node,
@ -1498,7 +1578,7 @@ pub const Node = struct {
/// The cases node pointers are found in memory after Switch.
/// They must be SwitchCase or SwitchElse nodes.
pub const Switch = struct {
base: Node = Node{ .id = .Switch },
base: Node = Node{ .tag = .Switch },
switch_token: TokenIndex,
rbrace: TokenIndex,
cases_len: NodeIndex,
@ -1552,7 +1632,7 @@ pub const Node = struct {
/// Items sub-nodes appear in memory directly following SwitchCase.
pub const SwitchCase = struct {
base: Node = Node{ .id = .SwitchCase },
base: Node = Node{ .tag = .SwitchCase },
arrow_token: TokenIndex,
payload: ?*Node,
expr: *Node,
@ -1610,7 +1690,7 @@ pub const Node = struct {
};
pub const SwitchElse = struct {
base: Node = Node{ .id = .SwitchElse },
base: Node = Node{ .tag = .SwitchElse },
token: TokenIndex,
pub fn iterate(self: *const SwitchElse, index: usize) ?*Node {
@ -1627,7 +1707,7 @@ pub const Node = struct {
};
pub const While = struct {
base: Node = Node{ .id = .While },
base: Node = Node{ .tag = .While },
label: ?TokenIndex,
inline_token: ?TokenIndex,
while_token: TokenIndex,
@ -1686,7 +1766,7 @@ pub const Node = struct {
};
pub const For = struct {
base: Node = Node{ .id = .For },
base: Node = Node{ .tag = .For },
label: ?TokenIndex,
inline_token: ?TokenIndex,
for_token: TokenIndex,
@ -1737,7 +1817,7 @@ pub const Node = struct {
};
pub const If = struct {
base: Node = Node{ .id = .If },
base: Node = Node{ .tag = .If },
if_token: TokenIndex,
condition: *Node,
payload: ?*Node,
@ -1779,8 +1859,9 @@ pub const Node = struct {
}
};
/// TODO split up and make every op its own AST Node tag
pub const InfixOp = struct {
base: Node = Node{ .id = .InfixOp },
base: Node = Node{ .tag = .InfixOp },
op_token: TokenIndex,
lhs: *Node,
op: Op,
@ -1906,41 +1987,29 @@ pub const Node = struct {
}
};
pub const AddressOf = SimplePrefixOp(.AddressOf);
pub const Await = SimplePrefixOp(.Await);
pub const BitNot = SimplePrefixOp(.BitNot);
pub const BoolNot = SimplePrefixOp(.BoolNot);
pub const OptionalType = SimplePrefixOp(.OptionalType);
pub const Negation = SimplePrefixOp(.Negation);
pub const NegationWrap = SimplePrefixOp(.NegationWrap);
pub const Resume = SimplePrefixOp(.Resume);
pub const Try = SimplePrefixOp(.Try);
pub const SimplePrefixOp = struct {
base: Node,
op_token: TokenIndex,
rhs: *Node,
pub fn SimplePrefixOp(comptime tag: Id) type {
return struct {
base: Node = Node{ .id = tag },
op_token: TokenIndex,
rhs: *Node,
const Self = @This();
const Self = @This();
pub fn iterate(self: *const Self, index: usize) ?*Node {
if (index == 0) return self.rhs;
return null;
}
pub fn iterate(self: *const Self, index: usize) ?*Node {
if (index == 0) return self.rhs;
return null;
}
pub fn firstToken(self: *const Self) TokenIndex {
return self.op_token;
}
pub fn firstToken(self: *const Self) TokenIndex {
return self.op_token;
}
pub fn lastToken(self: *const Self) TokenIndex {
return self.rhs.lastToken();
}
};
}
pub fn lastToken(self: *const Self) TokenIndex {
return self.rhs.lastToken();
}
};
pub const ArrayType = struct {
base: Node = Node{ .id = .ArrayType },
base: Node = Node{ .tag = .ArrayType },
op_token: TokenIndex,
rhs: *Node,
len_expr: *Node,
@ -1967,7 +2036,7 @@ pub const Node = struct {
};
pub const ArrayTypeSentinel = struct {
base: Node = Node{ .id = .ArrayTypeSentinel },
base: Node = Node{ .tag = .ArrayTypeSentinel },
op_token: TokenIndex,
rhs: *Node,
len_expr: *Node,
@ -1998,7 +2067,7 @@ pub const Node = struct {
};
pub const PtrType = struct {
base: Node = Node{ .id = .PtrType },
base: Node = Node{ .tag = .PtrType },
op_token: TokenIndex,
rhs: *Node,
/// TODO Add a u8 flags field to Node where it would otherwise be padding, and each bit represents
@ -2034,7 +2103,7 @@ pub const Node = struct {
};
pub const SliceType = struct {
base: Node = Node{ .id = .SliceType },
base: Node = Node{ .tag = .SliceType },
op_token: TokenIndex,
rhs: *Node,
/// TODO Add a u8 flags field to Node where it would otherwise be padding, and each bit represents
@ -2070,7 +2139,7 @@ pub const Node = struct {
};
pub const FieldInitializer = struct {
base: Node = Node{ .id = .FieldInitializer },
base: Node = Node{ .tag = .FieldInitializer },
period_token: TokenIndex,
name_token: TokenIndex,
expr: *Node,
@ -2095,7 +2164,7 @@ pub const Node = struct {
/// Elements occur directly in memory after ArrayInitializer.
pub const ArrayInitializer = struct {
base: Node = Node{ .id = .ArrayInitializer },
base: Node = Node{ .tag = .ArrayInitializer },
rtoken: TokenIndex,
list_len: NodeIndex,
lhs: *Node,
@ -2148,7 +2217,7 @@ pub const Node = struct {
/// Elements occur directly in memory after ArrayInitializerDot.
pub const ArrayInitializerDot = struct {
base: Node = Node{ .id = .ArrayInitializerDot },
base: Node = Node{ .tag = .ArrayInitializerDot },
dot: TokenIndex,
rtoken: TokenIndex,
list_len: NodeIndex,
@ -2198,7 +2267,7 @@ pub const Node = struct {
/// Elements occur directly in memory after StructInitializer.
pub const StructInitializer = struct {
base: Node = Node{ .id = .StructInitializer },
base: Node = Node{ .tag = .StructInitializer },
rtoken: TokenIndex,
list_len: NodeIndex,
lhs: *Node,
@ -2251,7 +2320,7 @@ pub const Node = struct {
/// Elements occur directly in memory after StructInitializerDot.
pub const StructInitializerDot = struct {
base: Node = Node{ .id = .StructInitializerDot },
base: Node = Node{ .tag = .StructInitializerDot },
dot: TokenIndex,
rtoken: TokenIndex,
list_len: NodeIndex,
@ -2301,7 +2370,7 @@ pub const Node = struct {
/// Parameter nodes directly follow Call in memory.
pub const Call = struct {
base: Node = Node{ .id = .Call },
base: Node = Node{ .tag = .Call },
lhs: *Node,
rtoken: TokenIndex,
params_len: NodeIndex,
@ -2355,7 +2424,7 @@ pub const Node = struct {
};
pub const SuffixOp = struct {
base: Node = Node{ .id = .SuffixOp },
base: Node = Node{ .tag = .SuffixOp },
op: Op,
lhs: *Node,
rtoken: TokenIndex,
@ -2415,7 +2484,7 @@ pub const Node = struct {
};
pub const GroupedExpression = struct {
base: Node = Node{ .id = .GroupedExpression },
base: Node = Node{ .tag = .GroupedExpression },
lparen: TokenIndex,
expr: *Node,
rparen: TokenIndex,
@ -2441,7 +2510,7 @@ pub const Node = struct {
/// TODO break this into separate Break, Continue, Return AST Nodes to save memory.
/// Could be further broken into LabeledBreak, LabeledContinue, and ReturnVoid to save even more.
pub const ControlFlowExpression = struct {
base: Node = Node{ .id = .ControlFlowExpression },
base: Node = Node{ .tag = .ControlFlowExpression },
ltoken: TokenIndex,
kind: Kind,
rhs: ?*Node,
@ -2496,7 +2565,7 @@ pub const Node = struct {
};
pub const Suspend = struct {
base: Node = Node{ .id = .Suspend },
base: Node = Node{ .tag = .Suspend },
suspend_token: TokenIndex,
body: ?*Node,
@ -2525,7 +2594,7 @@ pub const Node = struct {
};
pub const IntegerLiteral = struct {
base: Node = Node{ .id = .IntegerLiteral },
base: Node = Node{ .tag = .IntegerLiteral },
token: TokenIndex,
pub fn iterate(self: *const IntegerLiteral, index: usize) ?*Node {
@ -2542,7 +2611,7 @@ pub const Node = struct {
};
pub const EnumLiteral = struct {
base: Node = Node{ .id = .EnumLiteral },
base: Node = Node{ .tag = .EnumLiteral },
dot: TokenIndex,
name: TokenIndex,
@ -2560,7 +2629,7 @@ pub const Node = struct {
};
pub const FloatLiteral = struct {
base: Node = Node{ .id = .FloatLiteral },
base: Node = Node{ .tag = .FloatLiteral },
token: TokenIndex,
pub fn iterate(self: *const FloatLiteral, index: usize) ?*Node {
@ -2578,7 +2647,7 @@ pub const Node = struct {
/// Parameters are in memory following BuiltinCall.
pub const BuiltinCall = struct {
base: Node = Node{ .id = .BuiltinCall },
base: Node = Node{ .tag = .BuiltinCall },
params_len: NodeIndex,
builtin_token: TokenIndex,
rparen_token: TokenIndex,
@ -2627,7 +2696,7 @@ pub const Node = struct {
};
pub const StringLiteral = struct {
base: Node = Node{ .id = .StringLiteral },
base: Node = Node{ .tag = .StringLiteral },
token: TokenIndex,
pub fn iterate(self: *const StringLiteral, index: usize) ?*Node {
@ -2645,7 +2714,7 @@ pub const Node = struct {
/// The string literal tokens appear directly in memory after MultilineStringLiteral.
pub const MultilineStringLiteral = struct {
base: Node = Node{ .id = .MultilineStringLiteral },
base: Node = Node{ .tag = .MultilineStringLiteral },
lines_len: TokenIndex,
/// After this the caller must initialize the lines list.
@ -2687,7 +2756,7 @@ pub const Node = struct {
};
pub const CharLiteral = struct {
base: Node = Node{ .id = .CharLiteral },
base: Node = Node{ .tag = .CharLiteral },
token: TokenIndex,
pub fn iterate(self: *const CharLiteral, index: usize) ?*Node {
@ -2704,7 +2773,7 @@ pub const Node = struct {
};
pub const BoolLiteral = struct {
base: Node = Node{ .id = .BoolLiteral },
base: Node = Node{ .tag = .BoolLiteral },
token: TokenIndex,
pub fn iterate(self: *const BoolLiteral, index: usize) ?*Node {
@ -2721,7 +2790,7 @@ pub const Node = struct {
};
pub const NullLiteral = struct {
base: Node = Node{ .id = .NullLiteral },
base: Node = Node{ .tag = .NullLiteral },
token: TokenIndex,
pub fn iterate(self: *const NullLiteral, index: usize) ?*Node {
@ -2738,7 +2807,7 @@ pub const Node = struct {
};
pub const UndefinedLiteral = struct {
base: Node = Node{ .id = .UndefinedLiteral },
base: Node = Node{ .tag = .UndefinedLiteral },
token: TokenIndex,
pub fn iterate(self: *const UndefinedLiteral, index: usize) ?*Node {
@ -2755,7 +2824,7 @@ pub const Node = struct {
};
pub const Asm = struct {
base: Node = Node{ .id = .Asm },
base: Node = Node{ .tag = .Asm },
asm_token: TokenIndex,
rparen: TokenIndex,
volatile_token: ?TokenIndex,
@ -2875,7 +2944,7 @@ pub const Node = struct {
};
pub const Unreachable = struct {
base: Node = Node{ .id = .Unreachable },
base: Node = Node{ .tag = .Unreachable },
token: TokenIndex,
pub fn iterate(self: *const Unreachable, index: usize) ?*Node {
@ -2892,7 +2961,7 @@ pub const Node = struct {
};
pub const ErrorType = struct {
base: Node = Node{ .id = .ErrorType },
base: Node = Node{ .tag = .ErrorType },
token: TokenIndex,
pub fn iterate(self: *const ErrorType, index: usize) ?*Node {
@ -2909,7 +2978,7 @@ pub const Node = struct {
};
pub const AnyType = struct {
base: Node = Node{ .id = .AnyType },
base: Node = Node{ .tag = .AnyType },
token: TokenIndex,
pub fn iterate(self: *const AnyType, index: usize) ?*Node {
@ -2929,7 +2998,7 @@ pub const Node = struct {
/// TODO actually maybe remove entirely in favor of iterating backward from Node.firstToken()
/// and forwards to find same-line doc comments.
pub const DocComment = struct {
base: Node = Node{ .id = .DocComment },
base: Node = Node{ .tag = .DocComment },
/// Points to the first doc comment token. API users are expected to iterate over the
/// tokens array, looking for more doc comments, ignoring line comments, and stopping
/// at the first other token.
@ -2951,7 +3020,7 @@ pub const Node = struct {
};
pub const TestDecl = struct {
base: Node = Node{ .id = .TestDecl },
base: Node = Node{ .tag = .TestDecl },
doc_comments: ?*DocComment,
test_token: TokenIndex,
name: *Node,
@ -2996,7 +3065,7 @@ pub const PtrInfo = struct {
test "iterate" {
var root = Node.Root{
.base = Node{ .id = Node.Id.Root },
.base = Node{ .tag = Node.Tag.Root },
.decls_len = 0,
.eof_token = 0,
};

View File

@ -1128,8 +1128,9 @@ const Parser = struct {
const expr_node = try p.expectNode(parseExpr, .{
.ExpectedExpr = .{ .token = p.tok_i },
});
const node = try p.arena.allocator.create(Node.Resume);
const node = try p.arena.allocator.create(Node.SimplePrefixOp);
node.* = .{
.base = .{ .tag = .Resume },
.op_token = token,
.rhs = expr_node,
};
@ -1439,7 +1440,7 @@ const Parser = struct {
});
while (try p.parseSuffixOp()) |node| {
switch (node.id) {
switch (node.tag) {
.SuffixOp => node.cast(Node.SuffixOp).?.lhs = res,
.InfixOp => node.cast(Node.InfixOp).?.lhs = res,
else => unreachable,
@ -1470,7 +1471,7 @@ const Parser = struct {
while (true) {
if (try p.parseSuffixOp()) |node| {
switch (node.id) {
switch (node.tag) {
.SuffixOp => node.cast(Node.SuffixOp).?.lhs = res,
.InfixOp => node.cast(Node.InfixOp).?.lhs = res,
else => unreachable,
@ -1660,7 +1661,7 @@ const Parser = struct {
}
if (try p.parseLoopTypeExpr()) |node| {
switch (node.id) {
switch (node.tag) {
.For => node.cast(Node.For).?.label = label,
.While => node.cast(Node.While).?.label = label,
else => unreachable,
@ -2434,9 +2435,10 @@ const Parser = struct {
}
}
fn allocSimplePrefixOp(p: *Parser, comptime tag: Node.Id, token: TokenIndex) !?*Node {
const node = try p.arena.allocator.create(Node.SimplePrefixOp(tag));
fn allocSimplePrefixOp(p: *Parser, comptime tag: Node.Tag, token: TokenIndex) !?*Node {
const node = try p.arena.allocator.create(Node.SimplePrefixOp);
node.* = .{
.base = .{ .tag = tag },
.op_token = token,
.rhs = undefined, // set by caller
};
@ -2457,8 +2459,9 @@ const Parser = struct {
/// / PtrTypeStart (KEYWORD_align LPAREN Expr (COLON INTEGER COLON INTEGER)? RPAREN / KEYWORD_const / KEYWORD_volatile / KEYWORD_allowzero)*
fn parsePrefixTypeOp(p: *Parser) !?*Node {
if (p.eatToken(.QuestionMark)) |token| {
const node = try p.arena.allocator.create(Node.OptionalType);
const node = try p.arena.allocator.create(Node.SimplePrefixOp);
node.* = .{
.base = .{ .tag = .OptionalType },
.op_token = token,
.rhs = undefined, // set by caller
};
@ -3072,7 +3075,6 @@ const Parser = struct {
fn createLiteral(p: *Parser, comptime T: type, token: TokenIndex) !*Node {
const result = try p.arena.allocator.create(T);
result.* = T{
.base = Node{ .id = Node.typeToId(T) },
.token = token,
};
return &result.base;
@ -3148,8 +3150,9 @@ const Parser = struct {
fn parseTry(p: *Parser) !?*Node {
const token = p.eatToken(.Keyword_try) orelse return null;
const node = try p.arena.allocator.create(Node.Try);
const node = try p.arena.allocator.create(Node.SimplePrefixOp);
node.* = .{
.base = .{ .tag = .Try },
.op_token = token,
.rhs = undefined, // set by caller
};
@ -3213,58 +3216,19 @@ const Parser = struct {
if (try opParseFn(p)) |first_op| {
var rightmost_op = first_op;
while (true) {
switch (rightmost_op.id) {
.AddressOf => {
switch (rightmost_op.tag) {
.AddressOf,
.Await,
.BitNot,
.BoolNot,
.OptionalType,
.Negation,
.NegationWrap,
.Resume,
.Try,
=> {
if (try opParseFn(p)) |rhs| {
rightmost_op.cast(Node.AddressOf).?.rhs = rhs;
rightmost_op = rhs;
} else break;
},
.Await => {
if (try opParseFn(p)) |rhs| {
rightmost_op.cast(Node.Await).?.rhs = rhs;
rightmost_op = rhs;
} else break;
},
.BitNot => {
if (try opParseFn(p)) |rhs| {
rightmost_op.cast(Node.BitNot).?.rhs = rhs;
rightmost_op = rhs;
} else break;
},
.BoolNot => {
if (try opParseFn(p)) |rhs| {
rightmost_op.cast(Node.BoolNot).?.rhs = rhs;
rightmost_op = rhs;
} else break;
},
.OptionalType => {
if (try opParseFn(p)) |rhs| {
rightmost_op.cast(Node.OptionalType).?.rhs = rhs;
rightmost_op = rhs;
} else break;
},
.Negation => {
if (try opParseFn(p)) |rhs| {
rightmost_op.cast(Node.Negation).?.rhs = rhs;
rightmost_op = rhs;
} else break;
},
.NegationWrap => {
if (try opParseFn(p)) |rhs| {
rightmost_op.cast(Node.NegationWrap).?.rhs = rhs;
rightmost_op = rhs;
} else break;
},
.Resume => {
if (try opParseFn(p)) |rhs| {
rightmost_op.cast(Node.Resume).?.rhs = rhs;
rightmost_op = rhs;
} else break;
},
.Try => {
if (try opParseFn(p)) |rhs| {
rightmost_op.cast(Node.Try).?.rhs = rhs;
rightmost_op.cast(Node.SimplePrefixOp).?.rhs = rhs;
rightmost_op = rhs;
} else break;
},
@ -3310,57 +3274,18 @@ const Parser = struct {
}
// If any prefix op existed, a child node on the RHS is required
switch (rightmost_op.id) {
.AddressOf => {
const prefix_op = rightmost_op.cast(Node.AddressOf).?;
prefix_op.rhs = try p.expectNode(childParseFn, .{
.InvalidToken = .{ .token = p.tok_i },
});
},
.Await => {
const prefix_op = rightmost_op.cast(Node.Await).?;
prefix_op.rhs = try p.expectNode(childParseFn, .{
.InvalidToken = .{ .token = p.tok_i },
});
},
.BitNot => {
const prefix_op = rightmost_op.cast(Node.BitNot).?;
prefix_op.rhs = try p.expectNode(childParseFn, .{
.InvalidToken = .{ .token = p.tok_i },
});
},
.BoolNot => {
const prefix_op = rightmost_op.cast(Node.BoolNot).?;
prefix_op.rhs = try p.expectNode(childParseFn, .{
.InvalidToken = .{ .token = p.tok_i },
});
},
.OptionalType => {
const prefix_op = rightmost_op.cast(Node.OptionalType).?;
prefix_op.rhs = try p.expectNode(childParseFn, .{
.InvalidToken = .{ .token = p.tok_i },
});
},
.Negation => {
const prefix_op = rightmost_op.cast(Node.Negation).?;
prefix_op.rhs = try p.expectNode(childParseFn, .{
.InvalidToken = .{ .token = p.tok_i },
});
},
.NegationWrap => {
const prefix_op = rightmost_op.cast(Node.NegationWrap).?;
prefix_op.rhs = try p.expectNode(childParseFn, .{
.InvalidToken = .{ .token = p.tok_i },
});
},
.Resume => {
const prefix_op = rightmost_op.cast(Node.Resume).?;
prefix_op.rhs = try p.expectNode(childParseFn, .{
.InvalidToken = .{ .token = p.tok_i },
});
},
.Try => {
const prefix_op = rightmost_op.cast(Node.Try).?;
switch (rightmost_op.tag) {
.AddressOf,
.Await,
.BitNot,
.BoolNot,
.OptionalType,
.Negation,
.NegationWrap,
.Resume,
.Try,
=> {
const prefix_op = rightmost_op.cast(Node.SimplePrefixOp).?;
prefix_op.rhs = try p.expectNode(childParseFn, .{
.InvalidToken = .{ .token = p.tok_i },
});

View File

@ -223,7 +223,7 @@ fn renderTopLevelDecl(allocator: *mem.Allocator, stream: anytype, tree: *ast.Tre
}
fn renderContainerDecl(allocator: *mem.Allocator, stream: anytype, tree: *ast.Tree, indent: usize, start_col: *usize, decl: *ast.Node, space: Space) (@TypeOf(stream).Error || Error)!void {
switch (decl.id) {
switch (decl.tag) {
.FnProto => {
const fn_proto = @fieldParentPtr(ast.Node.FnProto, "base", decl);
@ -365,7 +365,7 @@ fn renderExpression(
base: *ast.Node,
space: Space,
) (@TypeOf(stream).Error || Error)!void {
switch (base.id) {
switch (base.tag) {
.Identifier => {
const identifier = @fieldParentPtr(ast.Node.Identifier, "base", base);
return renderToken(tree, stream, identifier.token, indent, start_col, space);
@ -468,50 +468,25 @@ fn renderExpression(
return renderExpression(allocator, stream, tree, indent, start_col, infix_op_node.rhs, space);
},
.BitNot => {
const bit_not = @fieldParentPtr(ast.Node.BitNot, "base", base);
try renderToken(tree, stream, bit_not.op_token, indent, start_col, Space.None);
return renderExpression(allocator, stream, tree, indent, start_col, bit_not.rhs, space);
.BitNot,
.BoolNot,
.Negation,
.NegationWrap,
.OptionalType,
.AddressOf,
=> {
const casted_node = @fieldParentPtr(ast.Node.SimplePrefixOp, "base", base);
try renderToken(tree, stream, casted_node.op_token, indent, start_col, Space.None);
return renderExpression(allocator, stream, tree, indent, start_col, casted_node.rhs, space);
},
.BoolNot => {
const bool_not = @fieldParentPtr(ast.Node.BoolNot, "base", base);
try renderToken(tree, stream, bool_not.op_token, indent, start_col, Space.None);
return renderExpression(allocator, stream, tree, indent, start_col, bool_not.rhs, space);
},
.Negation => {
const negation = @fieldParentPtr(ast.Node.Negation, "base", base);
try renderToken(tree, stream, negation.op_token, indent, start_col, Space.None);
return renderExpression(allocator, stream, tree, indent, start_col, negation.rhs, space);
},
.NegationWrap => {
const negation_wrap = @fieldParentPtr(ast.Node.NegationWrap, "base", base);
try renderToken(tree, stream, negation_wrap.op_token, indent, start_col, Space.None);
return renderExpression(allocator, stream, tree, indent, start_col, negation_wrap.rhs, space);
},
.OptionalType => {
const opt_type = @fieldParentPtr(ast.Node.OptionalType, "base", base);
try renderToken(tree, stream, opt_type.op_token, indent, start_col, Space.None);
return renderExpression(allocator, stream, tree, indent, start_col, opt_type.rhs, space);
},
.AddressOf => {
const addr_of = @fieldParentPtr(ast.Node.AddressOf, "base", base);
try renderToken(tree, stream, addr_of.op_token, indent, start_col, Space.None);
return renderExpression(allocator, stream, tree, indent, start_col, addr_of.rhs, space);
},
.Try => {
const try_node = @fieldParentPtr(ast.Node.Try, "base", base);
try renderToken(tree, stream, try_node.op_token, indent, start_col, Space.Space);
return renderExpression(allocator, stream, tree, indent, start_col, try_node.rhs, space);
},
.Resume => {
const resume_node = @fieldParentPtr(ast.Node.Resume, "base", base);
try renderToken(tree, stream, resume_node.op_token, indent, start_col, Space.Space);
return renderExpression(allocator, stream, tree, indent, start_col, resume_node.rhs, space);
},
.Await => {
const await_node = @fieldParentPtr(ast.Node.Await, "base", base);
try renderToken(tree, stream, await_node.op_token, indent, start_col, Space.Space);
return renderExpression(allocator, stream, tree, indent, start_col, await_node.rhs, space);
.Try,
.Resume,
.Await,
=> {
const casted_node = @fieldParentPtr(ast.Node.SimplePrefixOp, "base", base);
try renderToken(tree, stream, casted_node.op_token, indent, start_col, Space.Space);
return renderExpression(allocator, stream, tree, indent, start_col, casted_node.rhs, space);
},
.ArrayType => {
@ -659,7 +634,7 @@ fn renderExpression(
.ArrayInitializer, .ArrayInitializerDot => {
var rtoken: ast.TokenIndex = undefined;
var exprs: []*ast.Node = undefined;
const lhs: union(enum) { dot: ast.TokenIndex, node: *ast.Node } = switch (base.id) {
const lhs: union(enum) { dot: ast.TokenIndex, node: *ast.Node } = switch (base.tag) {
.ArrayInitializerDot => blk: {
const casted = @fieldParentPtr(ast.Node.ArrayInitializerDot, "base", base);
rtoken = casted.rtoken;
@ -793,14 +768,14 @@ fn renderExpression(
}
try renderExtraNewline(tree, stream, start_col, next_expr);
if (next_expr.id != .MultilineStringLiteral) {
if (next_expr.tag != .MultilineStringLiteral) {
try stream.writeByteNTimes(' ', new_indent);
}
} else {
try renderExpression(allocator, stream, tree, new_indent, start_col, expr, Space.Comma); // ,
}
}
if (exprs[exprs.len - 1].id != .MultilineStringLiteral) {
if (exprs[exprs.len - 1].tag != .MultilineStringLiteral) {
try stream.writeByteNTimes(' ', indent);
}
return renderToken(tree, stream, rtoken, indent, start_col, space);
@ -823,7 +798,7 @@ fn renderExpression(
.StructInitializer, .StructInitializerDot => {
var rtoken: ast.TokenIndex = undefined;
var field_inits: []*ast.Node = undefined;
const lhs: union(enum) { dot: ast.TokenIndex, node: *ast.Node } = switch (base.id) {
const lhs: union(enum) { dot: ast.TokenIndex, node: *ast.Node } = switch (base.tag) {
.StructInitializerDot => blk: {
const casted = @fieldParentPtr(ast.Node.StructInitializerDot, "base", base);
rtoken = casted.rtoken;
@ -877,7 +852,7 @@ fn renderExpression(
if (field_inits.len == 1) blk: {
const field_init = field_inits[0].cast(ast.Node.FieldInitializer).?;
switch (field_init.expr.id) {
switch (field_init.expr.tag) {
.StructInitializer,
.StructInitializerDot,
=> break :blk,
@ -974,7 +949,7 @@ fn renderExpression(
const params = call.params();
for (params) |param_node, i| {
const param_node_new_indent = if (param_node.id == .MultilineStringLiteral) blk: {
const param_node_new_indent = if (param_node.tag == .MultilineStringLiteral) blk: {
break :blk indent;
} else blk: {
try stream.writeByteNTimes(' ', new_indent);
@ -1284,7 +1259,7 @@ fn renderExpression(
// declarations inside are fields
const src_has_only_fields = blk: {
for (fields_and_decls) |decl| {
if (decl.id != .ContainerField) break :blk false;
if (decl.tag != .ContainerField) break :blk false;
}
break :blk true;
};
@ -1831,7 +1806,7 @@ fn renderExpression(
const rparen = tree.nextToken(for_node.array_expr.lastToken());
const body_is_block = for_node.body.id == .Block;
const body_is_block = for_node.body.tag == .Block;
const src_one_line_to_body = !body_is_block and tree.tokensOnSameLine(rparen, for_node.body.firstToken());
const body_on_same_line = body_is_block or src_one_line_to_body;
@ -1874,7 +1849,7 @@ fn renderExpression(
try renderExpression(allocator, stream, tree, indent, start_col, if_node.condition, Space.None); // condition
const body_is_if_block = if_node.body.id == .If;
const body_is_if_block = if_node.body.tag == .If;
const body_is_block = nodeIsBlock(if_node.body);
if (body_is_if_block) {
@ -1978,7 +1953,7 @@ fn renderExpression(
const indent_once = indent + indent_delta;
if (asm_node.template.id == .MultilineStringLiteral) {
if (asm_node.template.tag == .MultilineStringLiteral) {
// After rendering a multiline string literal the cursor is
// already offset by indent
try stream.writeByteNTimes(' ', indent_delta);
@ -2245,7 +2220,7 @@ fn renderVarDecl(
}
if (var_decl.getTrailer("init_node")) |init_node| {
const s = if (init_node.id == .MultilineStringLiteral) Space.None else Space.Space;
const s = if (init_node.tag == .MultilineStringLiteral) Space.None else Space.Space;
try renderToken(tree, stream, var_decl.getTrailer("eq_token").?, indent, start_col, s); // =
try renderExpression(allocator, stream, tree, indent, start_col, init_node, Space.None);
}
@ -2287,7 +2262,7 @@ fn renderStatement(
start_col: *usize,
base: *ast.Node,
) (@TypeOf(stream).Error || Error)!void {
switch (base.id) {
switch (base.tag) {
.VarDecl => {
const var_decl = @fieldParentPtr(ast.Node.VarDecl, "base", base);
try renderVarDecl(allocator, stream, tree, indent, start_col, var_decl);
@ -2566,7 +2541,7 @@ fn renderDocCommentsToken(
}
fn nodeIsBlock(base: *const ast.Node) bool {
return switch (base.id) {
return switch (base.tag) {
.Block,
.If,
.For,

View File

@ -212,6 +212,7 @@ pub const Decl = struct {
},
.block => unreachable,
.gen_zir => unreachable,
.local_var => unreachable,
.decl => unreachable,
}
}
@ -307,6 +308,7 @@ pub const Scope = struct {
.block => return self.cast(Block).?.arena,
.decl => return &self.cast(DeclAnalysis).?.arena.allocator,
.gen_zir => return self.cast(GenZIR).?.arena,
.local_var => return self.cast(LocalVar).?.gen_zir.arena,
.zir_module => return &self.cast(ZIRModule).?.contents.module.arena.allocator,
.file => unreachable,
}
@ -318,6 +320,7 @@ pub const Scope = struct {
return switch (self.tag) {
.block => self.cast(Block).?.decl,
.gen_zir => self.cast(GenZIR).?.decl,
.local_var => return self.cast(LocalVar).?.gen_zir.decl,
.decl => self.cast(DeclAnalysis).?.decl,
.zir_module => null,
.file => null,
@ -330,6 +333,7 @@ pub const Scope = struct {
switch (self.tag) {
.block => return self.cast(Block).?.decl.scope,
.gen_zir => return self.cast(GenZIR).?.decl.scope,
.local_var => return self.cast(LocalVar).?.gen_zir.decl.scope,
.decl => return self.cast(DeclAnalysis).?.decl.scope,
.zir_module, .file => return self,
}
@ -342,6 +346,7 @@ pub const Scope = struct {
switch (self.tag) {
.block => unreachable,
.gen_zir => unreachable,
.local_var => unreachable,
.decl => unreachable,
.zir_module => return self.cast(ZIRModule).?.fullyQualifiedNameHash(name),
.file => return self.cast(File).?.fullyQualifiedNameHash(name),
@ -356,9 +361,22 @@ pub const Scope = struct {
.decl => return self.cast(DeclAnalysis).?.decl.scope.cast(File).?.contents.tree,
.block => return self.cast(Block).?.decl.scope.cast(File).?.contents.tree,
.gen_zir => return self.cast(GenZIR).?.decl.scope.cast(File).?.contents.tree,
.local_var => return self.cast(LocalVar).?.gen_zir.decl.scope.cast(File).?.contents.tree,
}
}
/// Asserts the scope is a child of a `GenZIR` and returns it.
pub fn getGenZIR(self: *Scope) *GenZIR {
return switch (self.tag) {
.block => unreachable,
.gen_zir => self.cast(GenZIR).?,
.local_var => return self.cast(LocalVar).?.gen_zir,
.decl => unreachable,
.zir_module => unreachable,
.file => unreachable,
};
}
pub fn dumpInst(self: *Scope, inst: *Inst) void {
const zir_module = self.namespace();
const loc = std.zig.findLineColumn(zir_module.source.bytes, inst.src);
@ -379,6 +397,7 @@ pub const Scope = struct {
.zir_module => return @fieldParentPtr(ZIRModule, "base", base).sub_file_path,
.block => unreachable,
.gen_zir => unreachable,
.local_var => unreachable,
.decl => unreachable,
}
}
@ -389,6 +408,7 @@ pub const Scope = struct {
.zir_module => return @fieldParentPtr(ZIRModule, "base", base).unload(gpa),
.block => unreachable,
.gen_zir => unreachable,
.local_var => unreachable,
.decl => unreachable,
}
}
@ -398,6 +418,7 @@ pub const Scope = struct {
.file => return @fieldParentPtr(File, "base", base).getSource(module),
.zir_module => return @fieldParentPtr(ZIRModule, "base", base).getSource(module),
.gen_zir => unreachable,
.local_var => unreachable,
.block => unreachable,
.decl => unreachable,
}
@ -410,6 +431,7 @@ pub const Scope = struct {
.zir_module => return @fieldParentPtr(ZIRModule, "base", base).removeDecl(child),
.block => unreachable,
.gen_zir => unreachable,
.local_var => unreachable,
.decl => unreachable,
}
}
@ -429,6 +451,7 @@ pub const Scope = struct {
},
.block => unreachable,
.gen_zir => unreachable,
.local_var => unreachable,
.decl => unreachable,
}
}
@ -449,6 +472,7 @@ pub const Scope = struct {
block,
decl,
gen_zir,
local_var,
};
pub const File = struct {
@ -680,6 +704,18 @@ pub const Scope = struct {
arena: *Allocator,
instructions: std.ArrayListUnmanaged(*zir.Inst) = .{},
};
/// This structure lives as long as the AST generation of the Block
/// node that contains the variable. This struct's parents can be
/// other `LocalVar` and finally a `GenZIR` at the top.
pub const LocalVar = struct {
pub const base_tag: Tag = .local_var;
base: Scope = Scope{ .tag = base_tag },
gen_zir: *GenZIR,
parent: *Scope,
name: []const u8,
inst: *zir.Inst,
};
};
pub const AllErrors = struct {
@ -1114,7 +1150,7 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
const file_scope = decl.scope.cast(Scope.File).?;
const tree = try self.getAstTree(file_scope);
const ast_node = tree.root_node.decls()[decl.src_index];
switch (ast_node.id) {
switch (ast_node.tag) {
.FnProto => {
const fn_proto = @fieldParentPtr(ast.Node.FnProto, "base", ast_node);
@ -3247,6 +3283,12 @@ fn failWithOwnedErrorMsg(self: *Module, scope: *Scope, src: usize, err_msg: *Err
gen_zir.decl.generation = self.generation;
self.failed_decls.putAssumeCapacityNoClobber(gen_zir.decl, err_msg);
},
.local_var => {
const gen_zir = scope.cast(Scope.LocalVar).?.gen_zir;
gen_zir.decl.analysis = .sema_failure;
gen_zir.decl.generation = self.generation;
self.failed_decls.putAssumeCapacityNoClobber(gen_zir.decl, err_msg);
},
.zir_module => {
const zir_module = scope.cast(Scope.ZIRModule).?;
zir_module.status = .loaded_sema_failure;

View File

@ -11,8 +11,11 @@ const trace = @import("tracy.zig").trace;
const Scope = Module.Scope;
const InnerError = Module.InnerError;
/// Turn Zig AST into untyped ZIR istructions.
pub fn expr(mod: *Module, scope: *Scope, ast_node: *ast.Node) InnerError!*zir.Inst {
switch (ast_node.id) {
switch (ast_node.tag) {
.VarDecl => unreachable, // Handled in `blockExpr`.
.Identifier => return identifier(mod, scope, @fieldParentPtr(ast.Node.Identifier, "base", ast_node)),
.Asm => return assembly(mod, scope, @fieldParentPtr(ast.Node.Asm, "base", ast_node)),
.StringLiteral => return stringLiteral(mod, scope, @fieldParentPtr(ast.Node.StringLiteral, "base", ast_node)),
@ -23,29 +26,72 @@ pub fn expr(mod: *Module, scope: *Scope, ast_node: *ast.Node) InnerError!*zir.In
.ControlFlowExpression => return controlFlowExpr(mod, scope, @fieldParentPtr(ast.Node.ControlFlowExpression, "base", ast_node)),
.If => return ifExpr(mod, scope, @fieldParentPtr(ast.Node.If, "base", ast_node)),
.InfixOp => return infixOp(mod, scope, @fieldParentPtr(ast.Node.InfixOp, "base", ast_node)),
.BoolNot => return boolNot(mod, scope, @fieldParentPtr(ast.Node.BoolNot, "base", ast_node)),
.VarDecl => return varDecl(mod, scope, @fieldParentPtr(ast.Node.VarDecl, "base", ast_node)),
else => return mod.failNode(scope, ast_node, "TODO implement astgen.Expr for {}", .{@tagName(ast_node.id)}),
.BoolNot => return boolNot(mod, scope, @fieldParentPtr(ast.Node.SimplePrefixOp, "base", ast_node)),
else => return mod.failNode(scope, ast_node, "TODO implement astgen.Expr for {}", .{@tagName(ast_node.tag)}),
}
}
pub fn blockExpr(mod: *Module, scope: *Scope, block_node: *ast.Node.Block) !void {
pub fn blockExpr(mod: *Module, parent_scope: *Scope, block_node: *ast.Node.Block) !void {
const tracy = trace(@src());
defer tracy.end();
if (block_node.label) |label| {
return mod.failTok(scope, label, "TODO implement labeled blocks", .{});
return mod.failTok(parent_scope, label, "TODO implement labeled blocks", .{});
}
var block_arena = std.heap.ArenaAllocator.init(mod.gpa);
defer block_arena.deinit();
var scope = parent_scope;
for (block_node.statements()) |statement| {
_ = try expr(mod, scope, statement);
switch (statement.tag) {
.VarDecl => {
const sub_scope = try block_arena.allocator.create(Scope.LocalVar);
const var_decl_node = @fieldParentPtr(ast.Node.VarDecl, "base", statement);
sub_scope.* = try varDecl(mod, scope, var_decl_node);
scope = &sub_scope.base;
},
else => _ = try expr(mod, scope, statement),
}
}
}
fn varDecl(mod: *Module, scope: *Scope, node: *ast.Node.VarDecl) InnerError!*zir.Inst {
return mod.failNode(scope, &node.base, "TODO implement var decls", .{});
fn varDecl(mod: *Module, scope: *Scope, node: *ast.Node.VarDecl) InnerError!Scope.LocalVar {
if (node.getTrailer("comptime_token")) |comptime_token| {
return mod.failTok(scope, comptime_token, "TODO implement comptime locals", .{});
}
if (node.getTrailer("align_node")) |align_node| {
return mod.failNode(scope, align_node, "TODO implement alignment on locals", .{});
}
if (node.getTrailer("type_node")) |type_node| {
return mod.failNode(scope, type_node, "TODO implement typed locals", .{});
}
const tree = scope.tree();
switch (tree.token_ids[node.mut_token]) {
.Keyword_const => {},
.Keyword_var => {
return mod.failTok(scope, node.mut_token, "TODO implement mutable locals", .{});
},
else => unreachable,
}
// Depending on the type of AST the initialization expression is, we may need an lvalue
// or an rvalue as a result location. If it is an rvalue, we can use the instruction as
// the variable, no memory location needed.
const init_node = node.getTrailer("init_node").?;
if (nodeNeedsMemoryLocation(init_node)) {
return mod.failNode(scope, init_node, "TODO implement result locations", .{});
}
const init_inst = try expr(mod, scope, init_node);
const ident_name = tree.tokenSlice(node.name_token); // TODO support @"aoeu" identifiers
return Scope.LocalVar{
.parent = scope,
.gen_zir = scope.getGenZIR(),
.name = ident_name,
.inst = init_inst,
};
}
fn boolNot(mod: *Module, scope: *Scope, node: *ast.Node.BoolNot) InnerError!*zir.Inst {
fn boolNot(mod: *Module, scope: *Scope, node: *ast.Node.SimplePrefixOp) InnerError!*zir.Inst {
const operand = try expr(mod, scope, node.rhs);
const tree = scope.tree();
const src = tree.token_locs[node.op_token].start;
@ -55,7 +101,7 @@ fn boolNot(mod: *Module, scope: *Scope, node: *ast.Node.BoolNot) InnerError!*zir
fn infixOp(mod: *Module, scope: *Scope, infix_node: *ast.Node.InfixOp) InnerError!*zir.Inst {
switch (infix_node.op) {
.Assign => {
if (infix_node.lhs.id == .Identifier) {
if (infix_node.lhs.tag == .Identifier) {
const ident = @fieldParentPtr(ast.Node.Identifier, "base", infix_node.lhs);
const tree = scope.tree();
const ident_name = tree.tokenSlice(ident.token);
@ -474,3 +520,79 @@ fn getSimplePrimitiveValue(name: []const u8) ?TypedValue {
}
return null;
}
fn nodeNeedsMemoryLocation(node: *ast.Node) bool {
return switch (node.tag) {
.Root,
.Use,
.TestDecl,
.DocComment,
.SwitchCase,
.SwitchElse,
.Else,
.Payload,
.PointerPayload,
.PointerIndexPayload,
.ContainerField,
.ErrorTag,
.FieldInitializer,
=> unreachable,
.ControlFlowExpression,
.BitNot,
.BoolNot,
.VarDecl,
.Defer,
.AddressOf,
.OptionalType,
.Negation,
.NegationWrap,
.Resume,
.ArrayType,
.ArrayTypeSentinel,
.PtrType,
.SliceType,
.Suspend,
.AnyType,
.ErrorType,
.FnProto,
.AnyFrameType,
.IntegerLiteral,
.FloatLiteral,
.EnumLiteral,
.StringLiteral,
.MultilineStringLiteral,
.CharLiteral,
.BoolLiteral,
.NullLiteral,
.UndefinedLiteral,
.Unreachable,
.Identifier,
.ErrorSetDecl,
.ContainerDecl,
.Asm,
=> false,
.ArrayInitializer,
.ArrayInitializerDot,
.StructInitializer,
.StructInitializerDot,
=> true,
.GroupedExpression => nodeNeedsMemoryLocation(node.cast(ast.Node.GroupedExpression).?.expr),
.InfixOp => @panic("TODO nodeNeedsMemoryLocation for InfixOp"),
.Await => @panic("TODO nodeNeedsMemoryLocation for Await"),
.Try => @panic("TODO nodeNeedsMemoryLocation for Try"),
.If => @panic("TODO nodeNeedsMemoryLocation for If"),
.SuffixOp => @panic("TODO nodeNeedsMemoryLocation for SuffixOp"),
.Call => @panic("TODO nodeNeedsMemoryLocation for Call"),
.Switch => @panic("TODO nodeNeedsMemoryLocation for Switch"),
.While => @panic("TODO nodeNeedsMemoryLocation for While"),
.For => @panic("TODO nodeNeedsMemoryLocation for For"),
.BuiltinCall => @panic("TODO nodeNeedsMemoryLocation for BuiltinCall"),
.Comptime => @panic("TODO nodeNeedsMemoryLocation for Comptime"),
.Nosuspend => @panic("TODO nodeNeedsMemoryLocation for Nosuspend"),
.Block => @panic("TODO nodeNeedsMemoryLocation for Block"),
};
}

View File

@ -1219,7 +1219,7 @@ fn transStmt(
.StringLiteralClass => return transStringLiteral(rp, scope, @ptrCast(*const ZigClangStringLiteral, stmt), result_used),
.ParenExprClass => {
const expr = try transExpr(rp, scope, ZigClangParenExpr_getSubExpr(@ptrCast(*const ZigClangParenExpr, stmt)), .used, lrvalue);
if (expr.id == .GroupedExpression) return maybeSuppressResult(rp, scope, result_used, expr);
if (expr.tag == .GroupedExpression) return maybeSuppressResult(rp, scope, result_used, expr);
const node = try rp.c.arena.create(ast.Node.GroupedExpression);
node.* = .{
.lparen = try appendToken(rp.c, .LParen, "("),
@ -1264,7 +1264,7 @@ fn transStmt(
.OpaqueValueExprClass => {
const source_expr = ZigClangOpaqueValueExpr_getSourceExpr(@ptrCast(*const ZigClangOpaqueValueExpr, stmt)).?;
const expr = try transExpr(rp, scope, source_expr, .used, lrvalue);
if (expr.id == .GroupedExpression) return maybeSuppressResult(rp, scope, result_used, expr);
if (expr.tag == .GroupedExpression) return maybeSuppressResult(rp, scope, result_used, expr);
const node = try rp.c.arena.create(ast.Node.GroupedExpression);
node.* = .{
.lparen = try appendToken(rp.c, .LParen, "("),
@ -1693,7 +1693,7 @@ fn transBoolExpr(
var res = try transExpr(rp, scope, expr, used, lrvalue);
if (isBoolRes(res)) {
if (!grouped and res.id == .GroupedExpression) {
if (!grouped and res.tag == .GroupedExpression) {
const group = @fieldParentPtr(ast.Node.GroupedExpression, "base", res);
res = group.expr;
// get zig fmt to work properly
@ -1736,7 +1736,7 @@ fn exprIsStringLiteral(expr: *const ZigClangExpr) bool {
}
fn isBoolRes(res: *ast.Node) bool {
switch (res.id) {
switch (res.tag) {
.InfixOp => switch (@fieldParentPtr(ast.Node.InfixOp, "base", res).op) {
.BoolOr,
.BoolAnd,
@ -4107,12 +4107,13 @@ fn transCreateNodeFieldAccess(c: *Context, container: *ast.Node, field_name: []c
fn transCreateNodeSimplePrefixOp(
c: *Context,
comptime tag: ast.Node.Id,
comptime tag: ast.Node.Tag,
op_tok_id: std.zig.Token.Id,
bytes: []const u8,
) !*ast.Node.SimplePrefixOp(tag) {
const node = try c.arena.create(ast.Node.SimplePrefixOp(tag));
) !*ast.Node.SimplePrefixOp {
const node = try c.arena.create(ast.Node.SimplePrefixOp);
node.* = .{
.base = .{ .tag = tag },
.op_token = try appendToken(c, op_tok_id, bytes),
.rhs = undefined, // translate and set afterward
};
@ -5338,10 +5339,10 @@ fn transMacroFnDefine(c: *Context, it: *CTokenList.Iterator, source: []const u8,
.{@tagName(last.id)},
);
_ = try appendToken(c, .Semicolon, ";");
const type_of_arg = if (expr.id != .Block) expr else blk: {
const type_of_arg = if (expr.tag != .Block) expr else blk: {
const blk = @fieldParentPtr(ast.Node.Block, "base", expr);
const blk_last = blk.statements()[blk.statements_len - 1];
std.debug.assert(blk_last.id == .ControlFlowExpression);
std.debug.assert(blk_last.tag == .ControlFlowExpression);
const br = @fieldParentPtr(ast.Node.ControlFlowExpression, "base", blk_last);
break :blk br.rhs.?;
};
@ -5788,7 +5789,7 @@ fn parseCPrimaryExpr(c: *Context, it: *CTokenList.Iterator, source: []const u8,
fn macroBoolToInt(c: *Context, node: *ast.Node) !*ast.Node {
if (!isBoolRes(node)) {
if (node.id != .InfixOp) return node;
if (node.tag != .InfixOp) return node;
const group_node = try c.arena.create(ast.Node.GroupedExpression);
group_node.* = .{
@ -5807,7 +5808,7 @@ fn macroBoolToInt(c: *Context, node: *ast.Node) !*ast.Node {
fn macroIntToBool(c: *Context, node: *ast.Node) !*ast.Node {
if (isBoolRes(node)) {
if (node.id != .InfixOp) return node;
if (node.tag != .InfixOp) return node;
const group_node = try c.arena.create(ast.Node.GroupedExpression);
group_node.* = .{
@ -6105,7 +6106,7 @@ fn tokenSlice(c: *Context, token: ast.TokenIndex) []u8 {
}
fn getContainer(c: *Context, node: *ast.Node) ?*ast.Node {
switch (node.id) {
switch (node.tag) {
.ContainerDecl,
.AddressOf,
.Await,
@ -6182,7 +6183,7 @@ fn getContainerTypeOf(c: *Context, ref: *ast.Node) ?*ast.Node {
fn getFnProto(c: *Context, ref: *ast.Node) ?*ast.Node.FnProto {
const init = if (ref.cast(ast.Node.VarDecl)) |v| v.getTrailer("init_node").? else return null;
if (getContainerTypeOf(c, init)) |ty_node| {
if (ty_node.cast(ast.Node.OptionalType)) |prefix| {
if (ty_node.castTag(.OptionalType)) |prefix| {
if (prefix.rhs.cast(ast.Node.FnProto)) |fn_proto| {
return fn_proto;
}