Merge pull request #5885 from ziglang/stage2-locals

self-hosted compiler local consts
This commit is contained in:
Andrew Kelley 2020-07-16 05:57:32 +00:00 committed by GitHub
commit e079fdeee7
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 1505 additions and 1003 deletions

File diff suppressed because it is too large Load Diff

View File

@ -1015,7 +1015,7 @@ const Parser = struct {
/// BoolOrExpr <- BoolAndExpr (KEYWORD_or BoolAndExpr)*
fn parseBoolOrExpr(p: *Parser) !?*Node {
return p.parseBinOpExpr(
SimpleBinOpParseFn(.Keyword_or, Node.InfixOp.Op.BoolOr),
SimpleBinOpParseFn(.Keyword_or, .BoolOr),
parseBoolAndExpr,
.Infinitely,
);
@ -1128,8 +1128,9 @@ const Parser = struct {
const expr_node = try p.expectNode(parseExpr, .{
.ExpectedExpr = .{ .token = p.tok_i },
});
const node = try p.arena.allocator.create(Node.Resume);
const node = try p.arena.allocator.create(Node.SimplePrefixOp);
node.* = .{
.base = .{ .tag = .Resume },
.op_token = token,
.rhs = expr_node,
};
@ -1404,8 +1405,8 @@ const Parser = struct {
fn parseErrorUnionExpr(p: *Parser) !?*Node {
const suffix_expr = (try p.parseSuffixExpr()) orelse return null;
if (try SimpleBinOpParseFn(.Bang, Node.InfixOp.Op.ErrorUnion)(p)) |node| {
const error_union = node.cast(Node.InfixOp).?;
if (try SimpleBinOpParseFn(.Bang, .ErrorUnion)(p)) |node| {
const error_union = node.castTag(.ErrorUnion).?;
const type_expr = try p.expectNode(parseTypeExpr, .{
.ExpectedTypeExpr = .{ .token = p.tok_i },
});
@ -1438,10 +1439,56 @@ const Parser = struct {
.ExpectedPrimaryTypeExpr = .{ .token = p.tok_i },
});
// TODO pass `res` into `parseSuffixOp` rather than patching it up afterwards.
while (try p.parseSuffixOp()) |node| {
switch (node.id) {
switch (node.tag) {
.SuffixOp => node.cast(Node.SuffixOp).?.lhs = res,
.InfixOp => node.cast(Node.InfixOp).?.lhs = res,
.Catch => node.castTag(.Catch).?.lhs = res,
.Add,
.AddWrap,
.ArrayCat,
.ArrayMult,
.Assign,
.AssignBitAnd,
.AssignBitOr,
.AssignBitShiftLeft,
.AssignBitShiftRight,
.AssignBitXor,
.AssignDiv,
.AssignSub,
.AssignSubWrap,
.AssignMod,
.AssignAdd,
.AssignAddWrap,
.AssignMul,
.AssignMulWrap,
.BangEqual,
.BitAnd,
.BitOr,
.BitShiftLeft,
.BitShiftRight,
.BitXor,
.BoolAnd,
.BoolOr,
.Div,
.EqualEqual,
.ErrorUnion,
.GreaterOrEqual,
.GreaterThan,
.LessOrEqual,
.LessThan,
.MergeErrorSets,
.Mod,
.Mul,
.MulWrap,
.Period,
.Range,
.Sub,
.SubWrap,
.UnwrapOptional,
=> node.cast(Node.SimpleInfixOp).?.lhs = res,
else => unreachable,
}
res = node;
@ -1469,10 +1516,55 @@ const Parser = struct {
var res = expr;
while (true) {
// TODO pass `res` into `parseSuffixOp` rather than patching it up afterwards.
if (try p.parseSuffixOp()) |node| {
switch (node.id) {
switch (node.tag) {
.SuffixOp => node.cast(Node.SuffixOp).?.lhs = res,
.InfixOp => node.cast(Node.InfixOp).?.lhs = res,
.Catch => node.castTag(.Catch).?.lhs = res,
.Add,
.AddWrap,
.ArrayCat,
.ArrayMult,
.Assign,
.AssignBitAnd,
.AssignBitOr,
.AssignBitShiftLeft,
.AssignBitShiftRight,
.AssignBitXor,
.AssignDiv,
.AssignSub,
.AssignSubWrap,
.AssignMod,
.AssignAdd,
.AssignAddWrap,
.AssignMul,
.AssignMulWrap,
.BangEqual,
.BitAnd,
.BitOr,
.BitShiftLeft,
.BitShiftRight,
.BitXor,
.BoolAnd,
.BoolOr,
.Div,
.EqualEqual,
.ErrorUnion,
.GreaterOrEqual,
.GreaterThan,
.LessOrEqual,
.LessThan,
.MergeErrorSets,
.Mod,
.Mul,
.MulWrap,
.Period,
.Range,
.Sub,
.SubWrap,
.UnwrapOptional,
=> node.cast(Node.SimpleInfixOp).?.lhs = res,
else => unreachable,
}
res = node;
@ -1559,11 +1651,11 @@ const Parser = struct {
const global_error_set = try p.createLiteral(Node.ErrorType, token);
if (period == null or identifier == null) return global_error_set;
const node = try p.arena.allocator.create(Node.InfixOp);
const node = try p.arena.allocator.create(Node.SimpleInfixOp);
node.* = .{
.base = Node{ .tag = .Period },
.op_token = period.?,
.lhs = global_error_set,
.op = .Period,
.rhs = identifier.?,
};
return &node.base;
@ -1660,7 +1752,7 @@ const Parser = struct {
}
if (try p.parseLoopTypeExpr()) |node| {
switch (node.id) {
switch (node.tag) {
.For => node.cast(Node.For).?.label = label,
.While => node.cast(Node.While).?.label = label,
else => unreachable,
@ -2236,11 +2328,11 @@ const Parser = struct {
.ExpectedExpr = .{ .token = p.tok_i },
});
const node = try p.arena.allocator.create(Node.InfixOp);
const node = try p.arena.allocator.create(Node.SimpleInfixOp);
node.* = .{
.base = Node{ .tag = .Range },
.op_token = token,
.lhs = expr,
.op = .Range,
.rhs = range_end,
};
return &node.base;
@ -2265,7 +2357,7 @@ const Parser = struct {
/// / EQUAL
fn parseAssignOp(p: *Parser) !?*Node {
const token = p.nextToken();
const op: Node.InfixOp.Op = switch (p.token_ids[token]) {
const op: Node.Tag = switch (p.token_ids[token]) {
.AsteriskEqual => .AssignMul,
.SlashEqual => .AssignDiv,
.PercentEqual => .AssignMod,
@ -2286,11 +2378,11 @@ const Parser = struct {
},
};
const node = try p.arena.allocator.create(Node.InfixOp);
const node = try p.arena.allocator.create(Node.SimpleInfixOp);
node.* = .{
.base = .{ .tag = op },
.op_token = token,
.lhs = undefined, // set by caller
.op = op,
.rhs = undefined, // set by caller
};
return &node.base;
@ -2305,7 +2397,7 @@ const Parser = struct {
/// / RARROWEQUAL
fn parseCompareOp(p: *Parser) !?*Node {
const token = p.nextToken();
const op: Node.InfixOp.Op = switch (p.token_ids[token]) {
const op: Node.Tag = switch (p.token_ids[token]) {
.EqualEqual => .EqualEqual,
.BangEqual => .BangEqual,
.AngleBracketLeft => .LessThan,
@ -2329,12 +2421,22 @@ const Parser = struct {
/// / KEYWORD_catch Payload?
fn parseBitwiseOp(p: *Parser) !?*Node {
const token = p.nextToken();
const op: Node.InfixOp.Op = switch (p.token_ids[token]) {
const op: Node.Tag = switch (p.token_ids[token]) {
.Ampersand => .BitAnd,
.Caret => .BitXor,
.Pipe => .BitOr,
.Keyword_orelse => .UnwrapOptional,
.Keyword_catch => .{ .Catch = try p.parsePayload() },
.Keyword_catch => {
const payload = try p.parsePayload();
const node = try p.arena.allocator.create(Node.Catch);
node.* = .{
.op_token = token,
.lhs = undefined, // set by caller
.rhs = undefined, // set by caller
.payload = payload,
};
return &node.base;
},
else => {
p.putBackToken(token);
return null;
@ -2349,7 +2451,7 @@ const Parser = struct {
/// / RARROW2
fn parseBitShiftOp(p: *Parser) !?*Node {
const token = p.nextToken();
const op: Node.InfixOp.Op = switch (p.token_ids[token]) {
const op: Node.Tag = switch (p.token_ids[token]) {
.AngleBracketAngleBracketLeft => .BitShiftLeft,
.AngleBracketAngleBracketRight => .BitShiftRight,
else => {
@ -2369,7 +2471,7 @@ const Parser = struct {
/// / MINUSPERCENT
fn parseAdditionOp(p: *Parser) !?*Node {
const token = p.nextToken();
const op: Node.InfixOp.Op = switch (p.token_ids[token]) {
const op: Node.Tag = switch (p.token_ids[token]) {
.Plus => .Add,
.Minus => .Sub,
.PlusPlus => .ArrayCat,
@ -2393,7 +2495,7 @@ const Parser = struct {
/// / ASTERISKPERCENT
fn parseMultiplyOp(p: *Parser) !?*Node {
const token = p.nextToken();
const op: Node.InfixOp.Op = switch (p.token_ids[token]) {
const op: Node.Tag = switch (p.token_ids[token]) {
.PipePipe => .MergeErrorSets,
.Asterisk => .Mul,
.Slash => .Div,
@ -2434,9 +2536,10 @@ const Parser = struct {
}
}
fn allocSimplePrefixOp(p: *Parser, comptime tag: Node.Id, token: TokenIndex) !?*Node {
const node = try p.arena.allocator.create(Node.SimplePrefixOp(tag));
fn allocSimplePrefixOp(p: *Parser, comptime tag: Node.Tag, token: TokenIndex) !?*Node {
const node = try p.arena.allocator.create(Node.SimplePrefixOp);
node.* = .{
.base = .{ .tag = tag },
.op_token = token,
.rhs = undefined, // set by caller
};
@ -2457,8 +2560,9 @@ const Parser = struct {
/// / PtrTypeStart (KEYWORD_align LPAREN Expr (COLON INTEGER COLON INTEGER)? RPAREN / KEYWORD_const / KEYWORD_volatile / KEYWORD_allowzero)*
fn parsePrefixTypeOp(p: *Parser) !?*Node {
if (p.eatToken(.QuestionMark)) |token| {
const node = try p.arena.allocator.create(Node.OptionalType);
const node = try p.arena.allocator.create(Node.SimplePrefixOp);
node.* = .{
.base = .{ .tag = .OptionalType },
.op_token = token,
.rhs = undefined, // set by caller
};
@ -2670,14 +2774,14 @@ const Parser = struct {
if (p.eatToken(.Period)) |period| {
if (try p.parseIdentifier()) |identifier| {
// TODO: It's a bit weird to return an InfixOp from the SuffixOp parser.
// TODO: It's a bit weird to return a SimpleInfixOp from the SuffixOp parser.
// Should there be an Node.SuffixOp.FieldAccess variant? Or should
// this grammar rule be altered?
const node = try p.arena.allocator.create(Node.InfixOp);
const node = try p.arena.allocator.create(Node.SimpleInfixOp);
node.* = .{
.base = Node{ .tag = .Period },
.op_token = period,
.lhs = undefined, // set by caller
.op = .Period,
.rhs = identifier,
};
return &node.base;
@ -2984,7 +3088,7 @@ const Parser = struct {
}.parse;
}
fn SimpleBinOpParseFn(comptime token: Token.Id, comptime op: Node.InfixOp.Op) NodeParseFn {
fn SimpleBinOpParseFn(comptime token: Token.Id, comptime op: Node.Tag) NodeParseFn {
return struct {
pub fn parse(p: *Parser) Error!?*Node {
const op_token = if (token == .Keyword_and) switch (p.token_ids[p.tok_i]) {
@ -2998,11 +3102,11 @@ const Parser = struct {
else => return null,
} else p.eatToken(token) orelse return null;
const node = try p.arena.allocator.create(Node.InfixOp);
const node = try p.arena.allocator.create(Node.SimpleInfixOp);
node.* = .{
.base = .{ .tag = op },
.op_token = op_token,
.lhs = undefined, // set by caller
.op = op,
.rhs = undefined, // set by caller
};
return &node.base;
@ -3072,7 +3176,6 @@ const Parser = struct {
fn createLiteral(p: *Parser, comptime T: type, token: TokenIndex) !*Node {
const result = try p.arena.allocator.create(T);
result.* = T{
.base = Node{ .id = Node.typeToId(T) },
.token = token,
};
return &result.base;
@ -3148,8 +3251,9 @@ const Parser = struct {
fn parseTry(p: *Parser) !?*Node {
const token = p.eatToken(.Keyword_try) orelse return null;
const node = try p.arena.allocator.create(Node.Try);
const node = try p.arena.allocator.create(Node.SimplePrefixOp);
node.* = .{
.base = .{ .tag = .Try },
.op_token = token,
.rhs = undefined, // set by caller
};
@ -3213,58 +3317,19 @@ const Parser = struct {
if (try opParseFn(p)) |first_op| {
var rightmost_op = first_op;
while (true) {
switch (rightmost_op.id) {
.AddressOf => {
switch (rightmost_op.tag) {
.AddressOf,
.Await,
.BitNot,
.BoolNot,
.OptionalType,
.Negation,
.NegationWrap,
.Resume,
.Try,
=> {
if (try opParseFn(p)) |rhs| {
rightmost_op.cast(Node.AddressOf).?.rhs = rhs;
rightmost_op = rhs;
} else break;
},
.Await => {
if (try opParseFn(p)) |rhs| {
rightmost_op.cast(Node.Await).?.rhs = rhs;
rightmost_op = rhs;
} else break;
},
.BitNot => {
if (try opParseFn(p)) |rhs| {
rightmost_op.cast(Node.BitNot).?.rhs = rhs;
rightmost_op = rhs;
} else break;
},
.BoolNot => {
if (try opParseFn(p)) |rhs| {
rightmost_op.cast(Node.BoolNot).?.rhs = rhs;
rightmost_op = rhs;
} else break;
},
.OptionalType => {
if (try opParseFn(p)) |rhs| {
rightmost_op.cast(Node.OptionalType).?.rhs = rhs;
rightmost_op = rhs;
} else break;
},
.Negation => {
if (try opParseFn(p)) |rhs| {
rightmost_op.cast(Node.Negation).?.rhs = rhs;
rightmost_op = rhs;
} else break;
},
.NegationWrap => {
if (try opParseFn(p)) |rhs| {
rightmost_op.cast(Node.NegationWrap).?.rhs = rhs;
rightmost_op = rhs;
} else break;
},
.Resume => {
if (try opParseFn(p)) |rhs| {
rightmost_op.cast(Node.Resume).?.rhs = rhs;
rightmost_op = rhs;
} else break;
},
.Try => {
if (try opParseFn(p)) |rhs| {
rightmost_op.cast(Node.Try).?.rhs = rhs;
rightmost_op.cast(Node.SimplePrefixOp).?.rhs = rhs;
rightmost_op = rhs;
} else break;
},
@ -3310,57 +3375,18 @@ const Parser = struct {
}
// If any prefix op existed, a child node on the RHS is required
switch (rightmost_op.id) {
.AddressOf => {
const prefix_op = rightmost_op.cast(Node.AddressOf).?;
prefix_op.rhs = try p.expectNode(childParseFn, .{
.InvalidToken = .{ .token = p.tok_i },
});
},
.Await => {
const prefix_op = rightmost_op.cast(Node.Await).?;
prefix_op.rhs = try p.expectNode(childParseFn, .{
.InvalidToken = .{ .token = p.tok_i },
});
},
.BitNot => {
const prefix_op = rightmost_op.cast(Node.BitNot).?;
prefix_op.rhs = try p.expectNode(childParseFn, .{
.InvalidToken = .{ .token = p.tok_i },
});
},
.BoolNot => {
const prefix_op = rightmost_op.cast(Node.BoolNot).?;
prefix_op.rhs = try p.expectNode(childParseFn, .{
.InvalidToken = .{ .token = p.tok_i },
});
},
.OptionalType => {
const prefix_op = rightmost_op.cast(Node.OptionalType).?;
prefix_op.rhs = try p.expectNode(childParseFn, .{
.InvalidToken = .{ .token = p.tok_i },
});
},
.Negation => {
const prefix_op = rightmost_op.cast(Node.Negation).?;
prefix_op.rhs = try p.expectNode(childParseFn, .{
.InvalidToken = .{ .token = p.tok_i },
});
},
.NegationWrap => {
const prefix_op = rightmost_op.cast(Node.NegationWrap).?;
prefix_op.rhs = try p.expectNode(childParseFn, .{
.InvalidToken = .{ .token = p.tok_i },
});
},
.Resume => {
const prefix_op = rightmost_op.cast(Node.Resume).?;
prefix_op.rhs = try p.expectNode(childParseFn, .{
.InvalidToken = .{ .token = p.tok_i },
});
},
.Try => {
const prefix_op = rightmost_op.cast(Node.Try).?;
switch (rightmost_op.tag) {
.AddressOf,
.Await,
.BitNot,
.BoolNot,
.OptionalType,
.Negation,
.NegationWrap,
.Resume,
.Try,
=> {
const prefix_op = rightmost_op.cast(Node.SimplePrefixOp).?;
prefix_op.rhs = try p.expectNode(childParseFn, .{
.InvalidToken = .{ .token = p.tok_i },
});
@ -3425,9 +3451,13 @@ const Parser = struct {
const left = res;
res = node;
const op = node.cast(Node.InfixOp).?;
op.*.lhs = left;
op.*.rhs = right;
if (node.castTag(.Catch)) |op| {
op.lhs = left;
op.rhs = right;
} else if (node.cast(Node.SimpleInfixOp)) |op| {
op.lhs = left;
op.rhs = right;
}
switch (chain) {
.Once => break,
@ -3438,12 +3468,12 @@ const Parser = struct {
return res;
}
fn createInfixOp(p: *Parser, index: TokenIndex, op: Node.InfixOp.Op) !*Node {
const node = try p.arena.allocator.create(Node.InfixOp);
fn createInfixOp(p: *Parser, op_token: TokenIndex, tag: Node.Tag) !*Node {
const node = try p.arena.allocator.create(Node.SimpleInfixOp);
node.* = .{
.op_token = index,
.base = Node{ .tag = tag },
.op_token = op_token,
.lhs = undefined, // set by caller
.op = op,
.rhs = undefined, // set by caller
};
return &node.base;

View File

@ -223,7 +223,7 @@ fn renderTopLevelDecl(allocator: *mem.Allocator, stream: anytype, tree: *ast.Tre
}
fn renderContainerDecl(allocator: *mem.Allocator, stream: anytype, tree: *ast.Tree, indent: usize, start_col: *usize, decl: *ast.Node, space: Space) (@TypeOf(stream).Error || Error)!void {
switch (decl.id) {
switch (decl.tag) {
.FnProto => {
const fn_proto = @fieldParentPtr(ast.Node.FnProto, "base", decl);
@ -365,7 +365,7 @@ fn renderExpression(
base: *ast.Node,
space: Space,
) (@TypeOf(stream).Error || Error)!void {
switch (base.id) {
switch (base.tag) {
.Identifier => {
const identifier = @fieldParentPtr(ast.Node.Identifier, "base", base);
return renderToken(tree, stream, identifier.token, indent, start_col, space);
@ -436,11 +436,79 @@ fn renderExpression(
}
},
.InfixOp => {
const infix_op_node = @fieldParentPtr(ast.Node.InfixOp, "base", base);
.Catch => {
const infix_op_node = @fieldParentPtr(ast.Node.Catch, "base", base);
const op_space = switch (infix_op_node.op) {
ast.Node.InfixOp.Op.Period, ast.Node.InfixOp.Op.ErrorUnion, ast.Node.InfixOp.Op.Range => Space.None,
const op_space = Space.Space;
try renderExpression(allocator, stream, tree, indent, start_col, infix_op_node.lhs, op_space);
const after_op_space = blk: {
const loc = tree.tokenLocation(tree.token_locs[infix_op_node.op_token].end, tree.nextToken(infix_op_node.op_token));
break :blk if (loc.line == 0) op_space else Space.Newline;
};
try renderToken(tree, stream, infix_op_node.op_token, indent, start_col, after_op_space);
if (after_op_space == Space.Newline and
tree.token_ids[tree.nextToken(infix_op_node.op_token)] != .MultilineStringLiteralLine)
{
try stream.writeByteNTimes(' ', indent + indent_delta);
start_col.* = indent + indent_delta;
}
if (infix_op_node.payload) |payload| {
try renderExpression(allocator, stream, tree, indent, start_col, payload, Space.Space);
}
return renderExpression(allocator, stream, tree, indent, start_col, infix_op_node.rhs, space);
},
.Add,
.AddWrap,
.ArrayCat,
.ArrayMult,
.Assign,
.AssignBitAnd,
.AssignBitOr,
.AssignBitShiftLeft,
.AssignBitShiftRight,
.AssignBitXor,
.AssignDiv,
.AssignSub,
.AssignSubWrap,
.AssignMod,
.AssignAdd,
.AssignAddWrap,
.AssignMul,
.AssignMulWrap,
.BangEqual,
.BitAnd,
.BitOr,
.BitShiftLeft,
.BitShiftRight,
.BitXor,
.BoolAnd,
.BoolOr,
.Div,
.EqualEqual,
.ErrorUnion,
.GreaterOrEqual,
.GreaterThan,
.LessOrEqual,
.LessThan,
.MergeErrorSets,
.Mod,
.Mul,
.MulWrap,
.Period,
.Range,
.Sub,
.SubWrap,
.UnwrapOptional,
=> {
const infix_op_node = @fieldParentPtr(ast.Node.SimpleInfixOp, "base", base);
const op_space = switch (base.tag) {
.Period, .ErrorUnion, .Range => Space.None,
else => Space.Space,
};
try renderExpression(allocator, stream, tree, indent, start_col, infix_op_node.lhs, op_space);
@ -458,60 +526,28 @@ fn renderExpression(
start_col.* = indent + indent_delta;
}
switch (infix_op_node.op) {
ast.Node.InfixOp.Op.Catch => |maybe_payload| if (maybe_payload) |payload| {
try renderExpression(allocator, stream, tree, indent, start_col, payload, Space.Space);
},
else => {},
}
return renderExpression(allocator, stream, tree, indent, start_col, infix_op_node.rhs, space);
},
.BitNot => {
const bit_not = @fieldParentPtr(ast.Node.BitNot, "base", base);
try renderToken(tree, stream, bit_not.op_token, indent, start_col, Space.None);
return renderExpression(allocator, stream, tree, indent, start_col, bit_not.rhs, space);
.BitNot,
.BoolNot,
.Negation,
.NegationWrap,
.OptionalType,
.AddressOf,
=> {
const casted_node = @fieldParentPtr(ast.Node.SimplePrefixOp, "base", base);
try renderToken(tree, stream, casted_node.op_token, indent, start_col, Space.None);
return renderExpression(allocator, stream, tree, indent, start_col, casted_node.rhs, space);
},
.BoolNot => {
const bool_not = @fieldParentPtr(ast.Node.BoolNot, "base", base);
try renderToken(tree, stream, bool_not.op_token, indent, start_col, Space.None);
return renderExpression(allocator, stream, tree, indent, start_col, bool_not.rhs, space);
},
.Negation => {
const negation = @fieldParentPtr(ast.Node.Negation, "base", base);
try renderToken(tree, stream, negation.op_token, indent, start_col, Space.None);
return renderExpression(allocator, stream, tree, indent, start_col, negation.rhs, space);
},
.NegationWrap => {
const negation_wrap = @fieldParentPtr(ast.Node.NegationWrap, "base", base);
try renderToken(tree, stream, negation_wrap.op_token, indent, start_col, Space.None);
return renderExpression(allocator, stream, tree, indent, start_col, negation_wrap.rhs, space);
},
.OptionalType => {
const opt_type = @fieldParentPtr(ast.Node.OptionalType, "base", base);
try renderToken(tree, stream, opt_type.op_token, indent, start_col, Space.None);
return renderExpression(allocator, stream, tree, indent, start_col, opt_type.rhs, space);
},
.AddressOf => {
const addr_of = @fieldParentPtr(ast.Node.AddressOf, "base", base);
try renderToken(tree, stream, addr_of.op_token, indent, start_col, Space.None);
return renderExpression(allocator, stream, tree, indent, start_col, addr_of.rhs, space);
},
.Try => {
const try_node = @fieldParentPtr(ast.Node.Try, "base", base);
try renderToken(tree, stream, try_node.op_token, indent, start_col, Space.Space);
return renderExpression(allocator, stream, tree, indent, start_col, try_node.rhs, space);
},
.Resume => {
const resume_node = @fieldParentPtr(ast.Node.Resume, "base", base);
try renderToken(tree, stream, resume_node.op_token, indent, start_col, Space.Space);
return renderExpression(allocator, stream, tree, indent, start_col, resume_node.rhs, space);
},
.Await => {
const await_node = @fieldParentPtr(ast.Node.Await, "base", base);
try renderToken(tree, stream, await_node.op_token, indent, start_col, Space.Space);
return renderExpression(allocator, stream, tree, indent, start_col, await_node.rhs, space);
.Try,
.Resume,
.Await,
=> {
const casted_node = @fieldParentPtr(ast.Node.SimplePrefixOp, "base", base);
try renderToken(tree, stream, casted_node.op_token, indent, start_col, Space.Space);
return renderExpression(allocator, stream, tree, indent, start_col, casted_node.rhs, space);
},
.ArrayType => {
@ -659,7 +695,7 @@ fn renderExpression(
.ArrayInitializer, .ArrayInitializerDot => {
var rtoken: ast.TokenIndex = undefined;
var exprs: []*ast.Node = undefined;
const lhs: union(enum) { dot: ast.TokenIndex, node: *ast.Node } = switch (base.id) {
const lhs: union(enum) { dot: ast.TokenIndex, node: *ast.Node } = switch (base.tag) {
.ArrayInitializerDot => blk: {
const casted = @fieldParentPtr(ast.Node.ArrayInitializerDot, "base", base);
rtoken = casted.rtoken;
@ -793,14 +829,14 @@ fn renderExpression(
}
try renderExtraNewline(tree, stream, start_col, next_expr);
if (next_expr.id != .MultilineStringLiteral) {
if (next_expr.tag != .MultilineStringLiteral) {
try stream.writeByteNTimes(' ', new_indent);
}
} else {
try renderExpression(allocator, stream, tree, new_indent, start_col, expr, Space.Comma); // ,
}
}
if (exprs[exprs.len - 1].id != .MultilineStringLiteral) {
if (exprs[exprs.len - 1].tag != .MultilineStringLiteral) {
try stream.writeByteNTimes(' ', indent);
}
return renderToken(tree, stream, rtoken, indent, start_col, space);
@ -823,7 +859,7 @@ fn renderExpression(
.StructInitializer, .StructInitializerDot => {
var rtoken: ast.TokenIndex = undefined;
var field_inits: []*ast.Node = undefined;
const lhs: union(enum) { dot: ast.TokenIndex, node: *ast.Node } = switch (base.id) {
const lhs: union(enum) { dot: ast.TokenIndex, node: *ast.Node } = switch (base.tag) {
.StructInitializerDot => blk: {
const casted = @fieldParentPtr(ast.Node.StructInitializerDot, "base", base);
rtoken = casted.rtoken;
@ -877,7 +913,7 @@ fn renderExpression(
if (field_inits.len == 1) blk: {
const field_init = field_inits[0].cast(ast.Node.FieldInitializer).?;
switch (field_init.expr.id) {
switch (field_init.expr.tag) {
.StructInitializer,
.StructInitializerDot,
=> break :blk,
@ -974,7 +1010,7 @@ fn renderExpression(
const params = call.params();
for (params) |param_node, i| {
const param_node_new_indent = if (param_node.id == .MultilineStringLiteral) blk: {
const param_node_new_indent = if (param_node.tag == .MultilineStringLiteral) blk: {
break :blk indent;
} else blk: {
try stream.writeByteNTimes(' ', new_indent);
@ -1284,7 +1320,7 @@ fn renderExpression(
// declarations inside are fields
const src_has_only_fields = blk: {
for (fields_and_decls) |decl| {
if (decl.id != .ContainerField) break :blk false;
if (decl.tag != .ContainerField) break :blk false;
}
break :blk true;
};
@ -1831,7 +1867,7 @@ fn renderExpression(
const rparen = tree.nextToken(for_node.array_expr.lastToken());
const body_is_block = for_node.body.id == .Block;
const body_is_block = for_node.body.tag == .Block;
const src_one_line_to_body = !body_is_block and tree.tokensOnSameLine(rparen, for_node.body.firstToken());
const body_on_same_line = body_is_block or src_one_line_to_body;
@ -1874,7 +1910,7 @@ fn renderExpression(
try renderExpression(allocator, stream, tree, indent, start_col, if_node.condition, Space.None); // condition
const body_is_if_block = if_node.body.id == .If;
const body_is_if_block = if_node.body.tag == .If;
const body_is_block = nodeIsBlock(if_node.body);
if (body_is_if_block) {
@ -1978,7 +2014,7 @@ fn renderExpression(
const indent_once = indent + indent_delta;
if (asm_node.template.id == .MultilineStringLiteral) {
if (asm_node.template.tag == .MultilineStringLiteral) {
// After rendering a multiline string literal the cursor is
// already offset by indent
try stream.writeByteNTimes(' ', indent_delta);
@ -2245,7 +2281,7 @@ fn renderVarDecl(
}
if (var_decl.getTrailer("init_node")) |init_node| {
const s = if (init_node.id == .MultilineStringLiteral) Space.None else Space.Space;
const s = if (init_node.tag == .MultilineStringLiteral) Space.None else Space.Space;
try renderToken(tree, stream, var_decl.getTrailer("eq_token").?, indent, start_col, s); // =
try renderExpression(allocator, stream, tree, indent, start_col, init_node, Space.None);
}
@ -2287,7 +2323,7 @@ fn renderStatement(
start_col: *usize,
base: *ast.Node,
) (@TypeOf(stream).Error || Error)!void {
switch (base.id) {
switch (base.tag) {
.VarDecl => {
const var_decl = @fieldParentPtr(ast.Node.VarDecl, "base", base);
try renderVarDecl(allocator, stream, tree, indent, start_col, var_decl);
@ -2566,7 +2602,7 @@ fn renderDocCommentsToken(
}
fn nodeIsBlock(base: *const ast.Node) bool {
return switch (base.id) {
return switch (base.tag) {
.Block,
.If,
.For,
@ -2578,10 +2614,52 @@ fn nodeIsBlock(base: *const ast.Node) bool {
}
fn nodeCausesSliceOpSpace(base: *ast.Node) bool {
const infix_op = base.cast(ast.Node.InfixOp) orelse return false;
return switch (infix_op.op) {
ast.Node.InfixOp.Op.Period => false,
else => true,
return switch (base.tag) {
.Catch,
.Add,
.AddWrap,
.ArrayCat,
.ArrayMult,
.Assign,
.AssignBitAnd,
.AssignBitOr,
.AssignBitShiftLeft,
.AssignBitShiftRight,
.AssignBitXor,
.AssignDiv,
.AssignSub,
.AssignSubWrap,
.AssignMod,
.AssignAdd,
.AssignAddWrap,
.AssignMul,
.AssignMulWrap,
.BangEqual,
.BitAnd,
.BitOr,
.BitShiftLeft,
.BitShiftRight,
.BitXor,
.BoolAnd,
.BoolOr,
.Div,
.EqualEqual,
.ErrorUnion,
.GreaterOrEqual,
.GreaterThan,
.LessOrEqual,
.LessThan,
.MergeErrorSets,
.Mod,
.Mul,
.MulWrap,
.Range,
.Sub,
.SubWrap,
.UnwrapOptional,
=> true,
else => false,
};
}

View File

@ -19,6 +19,7 @@ const Body = ir.Body;
const ast = std.zig.ast;
const trace = @import("tracy.zig").trace;
const liveness = @import("liveness.zig");
const astgen = @import("astgen.zig");
/// General-purpose allocator. Used for both temporary and long-term storage.
gpa: *Allocator,
@ -76,6 +77,8 @@ deletion_set: std.ArrayListUnmanaged(*Decl) = .{},
keep_source_files_loaded: bool,
pub const InnerError = error{ OutOfMemory, AnalysisFail };
const WorkItem = union(enum) {
/// Write the machine code for a Decl to the output file.
codegen_decl: *Decl,
@ -209,6 +212,7 @@ pub const Decl = struct {
},
.block => unreachable,
.gen_zir => unreachable,
.local_var => unreachable,
.decl => unreachable,
}
}
@ -304,6 +308,7 @@ pub const Scope = struct {
.block => return self.cast(Block).?.arena,
.decl => return &self.cast(DeclAnalysis).?.arena.allocator,
.gen_zir => return self.cast(GenZIR).?.arena,
.local_var => return self.cast(LocalVar).?.gen_zir.arena,
.zir_module => return &self.cast(ZIRModule).?.contents.module.arena.allocator,
.file => unreachable,
}
@ -315,6 +320,7 @@ pub const Scope = struct {
return switch (self.tag) {
.block => self.cast(Block).?.decl,
.gen_zir => self.cast(GenZIR).?.decl,
.local_var => return self.cast(LocalVar).?.gen_zir.decl,
.decl => self.cast(DeclAnalysis).?.decl,
.zir_module => null,
.file => null,
@ -327,6 +333,7 @@ pub const Scope = struct {
switch (self.tag) {
.block => return self.cast(Block).?.decl.scope,
.gen_zir => return self.cast(GenZIR).?.decl.scope,
.local_var => return self.cast(LocalVar).?.gen_zir.decl.scope,
.decl => return self.cast(DeclAnalysis).?.decl.scope,
.zir_module, .file => return self,
}
@ -339,6 +346,7 @@ pub const Scope = struct {
switch (self.tag) {
.block => unreachable,
.gen_zir => unreachable,
.local_var => unreachable,
.decl => unreachable,
.zir_module => return self.cast(ZIRModule).?.fullyQualifiedNameHash(name),
.file => return self.cast(File).?.fullyQualifiedNameHash(name),
@ -353,9 +361,22 @@ pub const Scope = struct {
.decl => return self.cast(DeclAnalysis).?.decl.scope.cast(File).?.contents.tree,
.block => return self.cast(Block).?.decl.scope.cast(File).?.contents.tree,
.gen_zir => return self.cast(GenZIR).?.decl.scope.cast(File).?.contents.tree,
.local_var => return self.cast(LocalVar).?.gen_zir.decl.scope.cast(File).?.contents.tree,
}
}
/// Asserts the scope is a child of a `GenZIR` and returns it.
pub fn getGenZIR(self: *Scope) *GenZIR {
return switch (self.tag) {
.block => unreachable,
.gen_zir => self.cast(GenZIR).?,
.local_var => return self.cast(LocalVar).?.gen_zir,
.decl => unreachable,
.zir_module => unreachable,
.file => unreachable,
};
}
pub fn dumpInst(self: *Scope, inst: *Inst) void {
const zir_module = self.namespace();
const loc = std.zig.findLineColumn(zir_module.source.bytes, inst.src);
@ -376,6 +397,7 @@ pub const Scope = struct {
.zir_module => return @fieldParentPtr(ZIRModule, "base", base).sub_file_path,
.block => unreachable,
.gen_zir => unreachable,
.local_var => unreachable,
.decl => unreachable,
}
}
@ -386,6 +408,7 @@ pub const Scope = struct {
.zir_module => return @fieldParentPtr(ZIRModule, "base", base).unload(gpa),
.block => unreachable,
.gen_zir => unreachable,
.local_var => unreachable,
.decl => unreachable,
}
}
@ -395,6 +418,7 @@ pub const Scope = struct {
.file => return @fieldParentPtr(File, "base", base).getSource(module),
.zir_module => return @fieldParentPtr(ZIRModule, "base", base).getSource(module),
.gen_zir => unreachable,
.local_var => unreachable,
.block => unreachable,
.decl => unreachable,
}
@ -407,6 +431,7 @@ pub const Scope = struct {
.zir_module => return @fieldParentPtr(ZIRModule, "base", base).removeDecl(child),
.block => unreachable,
.gen_zir => unreachable,
.local_var => unreachable,
.decl => unreachable,
}
}
@ -426,6 +451,7 @@ pub const Scope = struct {
},
.block => unreachable,
.gen_zir => unreachable,
.local_var => unreachable,
.decl => unreachable,
}
}
@ -446,6 +472,7 @@ pub const Scope = struct {
block,
decl,
gen_zir,
local_var,
};
pub const File = struct {
@ -673,10 +700,25 @@ pub const Scope = struct {
pub const GenZIR = struct {
pub const base_tag: Tag = .gen_zir;
base: Scope = Scope{ .tag = base_tag },
/// Parents can be: `GenZIR`, `ZIRModule`, `File`
parent: *Scope,
decl: *Decl,
arena: *Allocator,
/// The first N instructions in a function body ZIR are arg instructions.
instructions: std.ArrayListUnmanaged(*zir.Inst) = .{},
};
/// This structure lives as long as the AST generation of the Block
/// node that contains the variable.
pub const LocalVar = struct {
pub const base_tag: Tag = .local_var;
base: Scope = Scope{ .tag = base_tag },
/// Parents can be: `LocalVar`, `GenZIR`.
parent: *Scope,
gen_zir: *GenZIR,
name: []const u8,
inst: *zir.Inst,
};
};
pub const AllErrors = struct {
@ -944,8 +986,6 @@ pub fn getAllErrorsAlloc(self: *Module) !AllErrors {
};
}
const InnerError = error{ OutOfMemory, AnalysisFail };
pub fn performAllTheWork(self: *Module) error{OutOfMemory}!void {
while (self.work_queue.readItem()) |work_item| switch (work_item) {
.codegen_decl => |decl| switch (decl.analysis) {
@ -1113,7 +1153,7 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
const file_scope = decl.scope.cast(Scope.File).?;
const tree = try self.getAstTree(file_scope);
const ast_node = tree.root_node.decls()[decl.src_index];
switch (ast_node.id) {
switch (ast_node.tag) {
.FnProto => {
const fn_proto = @fieldParentPtr(ast.Node.FnProto, "base", ast_node);
@ -1127,6 +1167,7 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
var fn_type_scope: Scope.GenZIR = .{
.decl = decl,
.arena = &fn_type_scope_arena.allocator,
.parent = decl.scope,
};
defer fn_type_scope.instructions.deinit(self.gpa);
@ -1140,7 +1181,7 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
.any_type => |node| return self.failNode(&fn_type_scope.base, node, "TODO implement anytype parameter", .{}),
.type_expr => |node| node,
};
param_types[i] = try self.astGenExpr(&fn_type_scope.base, param_type_node);
param_types[i] = try astgen.expr(self, &fn_type_scope.base, param_type_node);
}
if (fn_proto.getTrailer("var_args_token")) |var_args_token| {
return self.failTok(&fn_type_scope.base, var_args_token, "TODO implement var args", .{});
@ -1168,7 +1209,7 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
.Invalid => |tok| return self.failTok(&fn_type_scope.base, tok, "unable to parse return type", .{}),
};
const return_type_inst = try self.astGenExpr(&fn_type_scope.base, return_type_expr);
const return_type_inst = try astgen.expr(self, &fn_type_scope.base, return_type_expr);
const fn_src = tree.token_locs[fn_proto.fn_token].start;
const fn_type_inst = try self.addZIRInst(&fn_type_scope.base, fn_src, zir.Inst.FnType, .{
.return_type = return_type_inst,
@ -1204,12 +1245,32 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
var gen_scope: Scope.GenZIR = .{
.decl = decl,
.arena = &gen_scope_arena.allocator,
.parent = decl.scope,
};
defer gen_scope.instructions.deinit(self.gpa);
// We need an instruction for each parameter, and they must be first in the body.
try gen_scope.instructions.resize(self.gpa, fn_proto.params_len);
var params_scope = &gen_scope.base;
for (fn_proto.params()) |param, i| {
const name_token = param.name_token.?;
const src = tree.token_locs[name_token].start;
const param_name = tree.tokenSlice(name_token);
const arg = try newZIRInst(&gen_scope_arena.allocator, src, zir.Inst.Arg, .{}, .{});
gen_scope.instructions.items[i] = &arg.base;
const sub_scope = try gen_scope_arena.allocator.create(Scope.LocalVar);
sub_scope.* = .{
.parent = params_scope,
.gen_zir = &gen_scope,
.name = param_name,
.inst = &arg.base,
};
params_scope = &sub_scope.base;
}
const body_block = body_node.cast(ast.Node.Block).?;
try self.astGenBlock(&gen_scope.base, body_block);
try astgen.blockExpr(self, params_scope, body_block);
if (!fn_type.fnReturnType().isNoReturn() and (gen_scope.instructions.items.len == 0 or
!gen_scope.instructions.items[gen_scope.instructions.items.len - 1].tag.isNoReturn()))
@ -1298,465 +1359,6 @@ fn analyzeBodyValueAsType(self: *Module, block_scope: *Scope.Block, body: zir.Mo
unreachable;
}
fn astGenExpr(self: *Module, scope: *Scope, ast_node: *ast.Node) InnerError!*zir.Inst {
switch (ast_node.id) {
.Identifier => return self.astGenIdent(scope, @fieldParentPtr(ast.Node.Identifier, "base", ast_node)),
.Asm => return self.astGenAsm(scope, @fieldParentPtr(ast.Node.Asm, "base", ast_node)),
.StringLiteral => return self.astGenStringLiteral(scope, @fieldParentPtr(ast.Node.StringLiteral, "base", ast_node)),
.IntegerLiteral => return self.astGenIntegerLiteral(scope, @fieldParentPtr(ast.Node.IntegerLiteral, "base", ast_node)),
.BuiltinCall => return self.astGenBuiltinCall(scope, @fieldParentPtr(ast.Node.BuiltinCall, "base", ast_node)),
.Call => return self.astGenCall(scope, @fieldParentPtr(ast.Node.Call, "base", ast_node)),
.Unreachable => return self.astGenUnreachable(scope, @fieldParentPtr(ast.Node.Unreachable, "base", ast_node)),
.ControlFlowExpression => return self.astGenControlFlowExpression(scope, @fieldParentPtr(ast.Node.ControlFlowExpression, "base", ast_node)),
.If => return self.astGenIf(scope, @fieldParentPtr(ast.Node.If, "base", ast_node)),
.InfixOp => return self.astGenInfixOp(scope, @fieldParentPtr(ast.Node.InfixOp, "base", ast_node)),
.BoolNot => return self.astGenBoolNot(scope, @fieldParentPtr(ast.Node.BoolNot, "base", ast_node)),
else => return self.failNode(scope, ast_node, "TODO implement astGenExpr for {}", .{@tagName(ast_node.id)}),
}
}
fn astGenBoolNot(self: *Module, scope: *Scope, node: *ast.Node.BoolNot) InnerError!*zir.Inst {
const operand = try self.astGenExpr(scope, node.rhs);
const tree = scope.tree();
const src = tree.token_locs[node.op_token].start;
return self.addZIRInst(scope, src, zir.Inst.BoolNot, .{ .operand = operand }, .{});
}
fn astGenInfixOp(self: *Module, scope: *Scope, infix_node: *ast.Node.InfixOp) InnerError!*zir.Inst {
switch (infix_node.op) {
.Assign => {
if (infix_node.lhs.id == .Identifier) {
const ident = @fieldParentPtr(ast.Node.Identifier, "base", infix_node.lhs);
const tree = scope.tree();
const ident_name = tree.tokenSlice(ident.token);
if (std.mem.eql(u8, ident_name, "_")) {
return self.astGenExpr(scope, infix_node.rhs);
} else {
return self.failNode(scope, &infix_node.base, "TODO implement infix operator assign", .{});
}
} else {
return self.failNode(scope, &infix_node.base, "TODO implement infix operator assign", .{});
}
},
.Add => {
const lhs = try self.astGenExpr(scope, infix_node.lhs);
const rhs = try self.astGenExpr(scope, infix_node.rhs);
const tree = scope.tree();
const src = tree.token_locs[infix_node.op_token].start;
return self.addZIRInst(scope, src, zir.Inst.Add, .{ .lhs = lhs, .rhs = rhs }, .{});
},
.BangEqual,
.EqualEqual,
.GreaterThan,
.GreaterOrEqual,
.LessThan,
.LessOrEqual,
=> {
const lhs = try self.astGenExpr(scope, infix_node.lhs);
const rhs = try self.astGenExpr(scope, infix_node.rhs);
const tree = scope.tree();
const src = tree.token_locs[infix_node.op_token].start;
const op: std.math.CompareOperator = switch (infix_node.op) {
.BangEqual => .neq,
.EqualEqual => .eq,
.GreaterThan => .gt,
.GreaterOrEqual => .gte,
.LessThan => .lt,
.LessOrEqual => .lte,
else => unreachable,
};
return self.addZIRInst(scope, src, zir.Inst.Cmp, .{
.lhs = lhs,
.op = op,
.rhs = rhs,
}, .{});
},
else => |op| {
return self.failNode(scope, &infix_node.base, "TODO implement infix operator {}", .{op});
},
}
}
fn astGenIf(self: *Module, scope: *Scope, if_node: *ast.Node.If) InnerError!*zir.Inst {
if (if_node.payload) |payload| {
return self.failNode(scope, payload, "TODO implement astGenIf for optionals", .{});
}
if (if_node.@"else") |else_node| {
if (else_node.payload) |payload| {
return self.failNode(scope, payload, "TODO implement astGenIf for error unions", .{});
}
}
var block_scope: Scope.GenZIR = .{
.decl = scope.decl().?,
.arena = scope.arena(),
.instructions = .{},
};
defer block_scope.instructions.deinit(self.gpa);
const cond = try self.astGenExpr(&block_scope.base, if_node.condition);
const tree = scope.tree();
const if_src = tree.token_locs[if_node.if_token].start;
const condbr = try self.addZIRInstSpecial(&block_scope.base, if_src, zir.Inst.CondBr, .{
.condition = cond,
.true_body = undefined, // populated below
.false_body = undefined, // populated below
}, .{});
const block = try self.addZIRInstBlock(scope, if_src, .{
.instructions = try block_scope.arena.dupe(*zir.Inst, block_scope.instructions.items),
});
var then_scope: Scope.GenZIR = .{
.decl = block_scope.decl,
.arena = block_scope.arena,
.instructions = .{},
};
defer then_scope.instructions.deinit(self.gpa);
const then_result = try self.astGenExpr(&then_scope.base, if_node.body);
if (!then_result.tag.isNoReturn()) {
const then_src = tree.token_locs[if_node.body.lastToken()].start;
_ = try self.addZIRInst(&then_scope.base, then_src, zir.Inst.Break, .{
.block = block,
.operand = then_result,
}, .{});
}
condbr.positionals.true_body = .{
.instructions = try then_scope.arena.dupe(*zir.Inst, then_scope.instructions.items),
};
var else_scope: Scope.GenZIR = .{
.decl = block_scope.decl,
.arena = block_scope.arena,
.instructions = .{},
};
defer else_scope.instructions.deinit(self.gpa);
if (if_node.@"else") |else_node| {
const else_result = try self.astGenExpr(&else_scope.base, else_node.body);
if (!else_result.tag.isNoReturn()) {
const else_src = tree.token_locs[else_node.body.lastToken()].start;
_ = try self.addZIRInst(&else_scope.base, else_src, zir.Inst.Break, .{
.block = block,
.operand = else_result,
}, .{});
}
} else {
// TODO Optimization opportunity: we can avoid an allocation and a memcpy here
// by directly allocating the body for this one instruction.
const else_src = tree.token_locs[if_node.lastToken()].start;
_ = try self.addZIRInst(&else_scope.base, else_src, zir.Inst.BreakVoid, .{
.block = block,
}, .{});
}
condbr.positionals.false_body = .{
.instructions = try else_scope.arena.dupe(*zir.Inst, else_scope.instructions.items),
};
return &block.base;
}
fn astGenControlFlowExpression(
self: *Module,
scope: *Scope,
cfe: *ast.Node.ControlFlowExpression,
) InnerError!*zir.Inst {
switch (cfe.kind) {
.Break => return self.failNode(scope, &cfe.base, "TODO implement astGenExpr for Break", .{}),
.Continue => return self.failNode(scope, &cfe.base, "TODO implement astGenExpr for Continue", .{}),
.Return => {},
}
const tree = scope.tree();
const src = tree.token_locs[cfe.ltoken].start;
if (cfe.rhs) |rhs_node| {
const operand = try self.astGenExpr(scope, rhs_node);
return self.addZIRInst(scope, src, zir.Inst.Return, .{ .operand = operand }, .{});
} else {
return self.addZIRInst(scope, src, zir.Inst.ReturnVoid, .{}, .{});
}
}
fn astGenIdent(self: *Module, scope: *Scope, ident: *ast.Node.Identifier) InnerError!*zir.Inst {
const tree = scope.tree();
const ident_name = tree.tokenSlice(ident.token);
const src = tree.token_locs[ident.token].start;
if (mem.eql(u8, ident_name, "_")) {
return self.failNode(scope, &ident.base, "TODO implement '_' identifier", .{});
}
if (getSimplePrimitiveValue(ident_name)) |typed_value| {
return self.addZIRInstConst(scope, src, typed_value);
}
if (ident_name.len >= 2) integer: {
const first_c = ident_name[0];
if (first_c == 'i' or first_c == 'u') {
const is_signed = first_c == 'i';
const bit_count = std.fmt.parseInt(u16, ident_name[1..], 10) catch |err| switch (err) {
error.Overflow => return self.failNode(
scope,
&ident.base,
"primitive integer type '{}' exceeds maximum bit width of 65535",
.{ident_name},
),
error.InvalidCharacter => break :integer,
};
const val = switch (bit_count) {
8 => if (is_signed) Value.initTag(.i8_type) else Value.initTag(.u8_type),
16 => if (is_signed) Value.initTag(.i16_type) else Value.initTag(.u16_type),
32 => if (is_signed) Value.initTag(.i32_type) else Value.initTag(.u32_type),
64 => if (is_signed) Value.initTag(.i64_type) else Value.initTag(.u64_type),
else => return self.failNode(scope, &ident.base, "TODO implement arbitrary integer bitwidth types", .{}),
};
return self.addZIRInstConst(scope, src, .{
.ty = Type.initTag(.type),
.val = val,
});
}
}
if (self.lookupDeclName(scope, ident_name)) |decl| {
return try self.addZIRInst(scope, src, zir.Inst.DeclValInModule, .{ .decl = decl }, .{});
}
// Function parameter
if (scope.decl()) |decl| {
if (tree.root_node.decls()[decl.src_index].cast(ast.Node.FnProto)) |fn_proto| {
for (fn_proto.params()) |param, i| {
const param_name = tree.tokenSlice(param.name_token.?);
if (mem.eql(u8, param_name, ident_name)) {
return try self.addZIRInst(scope, src, zir.Inst.Arg, .{ .index = i }, .{});
}
}
}
}
return self.failNode(scope, &ident.base, "TODO implement local variable identifier lookup", .{});
}
fn astGenStringLiteral(self: *Module, scope: *Scope, str_lit: *ast.Node.StringLiteral) InnerError!*zir.Inst {
const tree = scope.tree();
const unparsed_bytes = tree.tokenSlice(str_lit.token);
const arena = scope.arena();
var bad_index: usize = undefined;
const bytes = std.zig.parseStringLiteral(arena, unparsed_bytes, &bad_index) catch |err| switch (err) {
error.InvalidCharacter => {
const bad_byte = unparsed_bytes[bad_index];
const src = tree.token_locs[str_lit.token].start;
return self.fail(scope, src + bad_index, "invalid string literal character: '{c}'\n", .{bad_byte});
},
else => |e| return e,
};
const src = tree.token_locs[str_lit.token].start;
return self.addZIRInst(scope, src, zir.Inst.Str, .{ .bytes = bytes }, .{});
}
fn astGenIntegerLiteral(self: *Module, scope: *Scope, int_lit: *ast.Node.IntegerLiteral) InnerError!*zir.Inst {
const arena = scope.arena();
const tree = scope.tree();
const prefixed_bytes = tree.tokenSlice(int_lit.token);
const base = if (mem.startsWith(u8, prefixed_bytes, "0x"))
16
else if (mem.startsWith(u8, prefixed_bytes, "0o"))
8
else if (mem.startsWith(u8, prefixed_bytes, "0b"))
2
else
@as(u8, 10);
const bytes = if (base == 10)
prefixed_bytes
else
prefixed_bytes[2..];
if (std.fmt.parseInt(u64, bytes, base)) |small_int| {
const int_payload = try arena.create(Value.Payload.Int_u64);
int_payload.* = .{ .int = small_int };
const src = tree.token_locs[int_lit.token].start;
return self.addZIRInstConst(scope, src, .{
.ty = Type.initTag(.comptime_int),
.val = Value.initPayload(&int_payload.base),
});
} else |err| {
return self.failTok(scope, int_lit.token, "TODO implement int literals that don't fit in a u64", .{});
}
}
fn astGenBlock(self: *Module, scope: *Scope, block_node: *ast.Node.Block) !void {
const tracy = trace(@src());
defer tracy.end();
if (block_node.label) |label| {
return self.failTok(scope, label, "TODO implement labeled blocks", .{});
}
for (block_node.statements()) |statement| {
_ = try self.astGenExpr(scope, statement);
}
}
fn astGenAsm(self: *Module, scope: *Scope, asm_node: *ast.Node.Asm) InnerError!*zir.Inst {
if (asm_node.outputs.len != 0) {
return self.failNode(scope, &asm_node.base, "TODO implement asm with an output", .{});
}
const arena = scope.arena();
const tree = scope.tree();
const inputs = try arena.alloc(*zir.Inst, asm_node.inputs.len);
const args = try arena.alloc(*zir.Inst, asm_node.inputs.len);
for (asm_node.inputs) |input, i| {
// TODO semantically analyze constraints
inputs[i] = try self.astGenExpr(scope, input.constraint);
args[i] = try self.astGenExpr(scope, input.expr);
}
const src = tree.token_locs[asm_node.asm_token].start;
const return_type = try self.addZIRInstConst(scope, src, .{
.ty = Type.initTag(.type),
.val = Value.initTag(.void_type),
});
const asm_inst = try self.addZIRInst(scope, src, zir.Inst.Asm, .{
.asm_source = try self.astGenExpr(scope, asm_node.template),
.return_type = return_type,
}, .{
.@"volatile" = asm_node.volatile_token != null,
//.clobbers = TODO handle clobbers
.inputs = inputs,
.args = args,
});
return asm_inst;
}
fn astGenBuiltinCall(self: *Module, scope: *Scope, call: *ast.Node.BuiltinCall) InnerError!*zir.Inst {
const tree = scope.tree();
const builtin_name = tree.tokenSlice(call.builtin_token);
const src = tree.token_locs[call.builtin_token].start;
inline for (std.meta.declarations(zir.Inst)) |inst| {
if (inst.data != .Type) continue;
const T = inst.data.Type;
if (!@hasDecl(T, "builtin_name")) continue;
if (std.mem.eql(u8, builtin_name, T.builtin_name)) {
var value: T = undefined;
const positionals = @typeInfo(std.meta.fieldInfo(T, "positionals").field_type).Struct;
if (positionals.fields.len == 0) {
return self.addZIRInst(scope, src, T, value.positionals, value.kw_args);
}
const arg_count: ?usize = if (positionals.fields[0].field_type == []*zir.Inst) null else positionals.fields.len;
if (arg_count) |some| {
if (call.params_len != some) {
return self.failTok(
scope,
call.builtin_token,
"expected {} parameter{}, found {}",
.{ some, if (some == 1) "" else "s", call.params_len },
);
}
const params = call.params();
inline for (positionals.fields) |p, i| {
@field(value.positionals, p.name) = try self.astGenExpr(scope, params[i]);
}
} else {
return self.failTok(scope, call.builtin_token, "TODO var args builtin '{}'", .{builtin_name});
}
return self.addZIRInst(scope, src, T, value.positionals, .{});
}
}
return self.failTok(scope, call.builtin_token, "TODO implement builtin call for '{}'", .{builtin_name});
}
fn astGenCall(self: *Module, scope: *Scope, call: *ast.Node.Call) InnerError!*zir.Inst {
const tree = scope.tree();
const lhs = try self.astGenExpr(scope, call.lhs);
const param_nodes = call.params();
const args = try scope.cast(Scope.GenZIR).?.arena.alloc(*zir.Inst, param_nodes.len);
for (param_nodes) |param_node, i| {
args[i] = try self.astGenExpr(scope, param_node);
}
const src = tree.token_locs[call.lhs.firstToken()].start;
return self.addZIRInst(scope, src, zir.Inst.Call, .{
.func = lhs,
.args = args,
}, .{});
}
fn astGenUnreachable(self: *Module, scope: *Scope, unreach_node: *ast.Node.Unreachable) InnerError!*zir.Inst {
const tree = scope.tree();
const src = tree.token_locs[unreach_node.token].start;
return self.addZIRInst(scope, src, zir.Inst.Unreachable, .{}, .{});
}
fn getSimplePrimitiveValue(name: []const u8) ?TypedValue {
const simple_types = std.ComptimeStringMap(Value.Tag, .{
.{ "u8", .u8_type },
.{ "i8", .i8_type },
.{ "isize", .isize_type },
.{ "usize", .usize_type },
.{ "c_short", .c_short_type },
.{ "c_ushort", .c_ushort_type },
.{ "c_int", .c_int_type },
.{ "c_uint", .c_uint_type },
.{ "c_long", .c_long_type },
.{ "c_ulong", .c_ulong_type },
.{ "c_longlong", .c_longlong_type },
.{ "c_ulonglong", .c_ulonglong_type },
.{ "c_longdouble", .c_longdouble_type },
.{ "f16", .f16_type },
.{ "f32", .f32_type },
.{ "f64", .f64_type },
.{ "f128", .f128_type },
.{ "c_void", .c_void_type },
.{ "bool", .bool_type },
.{ "void", .void_type },
.{ "type", .type_type },
.{ "anyerror", .anyerror_type },
.{ "comptime_int", .comptime_int_type },
.{ "comptime_float", .comptime_float_type },
.{ "noreturn", .noreturn_type },
});
if (simple_types.get(name)) |tag| {
return TypedValue{
.ty = Type.initTag(.type),
.val = Value.initTag(tag),
};
}
if (mem.eql(u8, name, "null")) {
return TypedValue{
.ty = Type.initTag(.@"null"),
.val = Value.initTag(.null_value),
};
}
if (mem.eql(u8, name, "undefined")) {
return TypedValue{
.ty = Type.initTag(.@"undefined"),
.val = Value.initTag(.undef),
};
}
if (mem.eql(u8, name, "true")) {
return TypedValue{
.ty = Type.initTag(.bool),
.val = Value.initTag(.bool_true),
};
}
if (mem.eql(u8, name, "false")) {
return TypedValue{
.ty = Type.initTag(.bool),
.val = Value.initTag(.bool_false),
};
}
return null;
}
fn declareDeclDependency(self: *Module, depender: *Decl, dependee: *Decl) !void {
try depender.dependencies.ensureCapacity(self.gpa, depender.dependencies.items().len + 1);
try dependee.dependants.ensureCapacity(self.gpa, dependee.dependants.items().len + 1);
@ -2368,7 +1970,7 @@ fn newZIRInst(
return inst;
}
fn addZIRInstSpecial(
pub fn addZIRInstSpecial(
self: *Module,
scope: *Scope,
src: usize,
@ -2376,14 +1978,14 @@ fn addZIRInstSpecial(
positionals: std.meta.fieldInfo(T, "positionals").field_type,
kw_args: std.meta.fieldInfo(T, "kw_args").field_type,
) !*T {
const gen_zir = scope.cast(Scope.GenZIR).?;
const gen_zir = scope.getGenZIR();
try gen_zir.instructions.ensureCapacity(self.gpa, gen_zir.instructions.items.len + 1);
const inst = try newZIRInst(gen_zir.arena, src, T, positionals, kw_args);
gen_zir.instructions.appendAssumeCapacity(&inst.base);
return inst;
}
fn addZIRInst(
pub fn addZIRInst(
self: *Module,
scope: *Scope,
src: usize,
@ -2396,13 +1998,13 @@ fn addZIRInst(
}
/// TODO The existence of this function is a workaround for a bug in stage1.
fn addZIRInstConst(self: *Module, scope: *Scope, src: usize, typed_value: TypedValue) !*zir.Inst {
pub fn addZIRInstConst(self: *Module, scope: *Scope, src: usize, typed_value: TypedValue) !*zir.Inst {
const P = std.meta.fieldInfo(zir.Inst.Const, "positionals").field_type;
return self.addZIRInst(scope, src, zir.Inst.Const, P{ .typed_value = typed_value }, .{});
}
/// TODO The existence of this function is a workaround for a bug in stage1.
fn addZIRInstBlock(self: *Module, scope: *Scope, src: usize, body: zir.Module.Body) !*zir.Inst.Block {
pub fn addZIRInstBlock(self: *Module, scope: *Scope, src: usize, body: zir.Module.Body) !*zir.Inst.Block {
const P = std.meta.fieldInfo(zir.Inst.Block, "positionals").field_type;
return self.addZIRInstSpecial(scope, src, zir.Inst.Block, P{ .body = body }, .{});
}
@ -2637,7 +2239,7 @@ fn getNextAnonNameIndex(self: *Module) usize {
return @atomicRmw(usize, &self.next_anon_name_index, .Add, 1, .Monotonic);
}
fn lookupDeclName(self: *Module, scope: *Scope, ident_name: []const u8) ?*Decl {
pub fn lookupDeclName(self: *Module, scope: *Scope, ident_name: []const u8) ?*Decl {
const namespace = scope.namespace();
const name_hash = namespace.fullyQualifiedNameHash(ident_name);
return self.decl_table.get(name_hash);
@ -2658,17 +2260,16 @@ fn analyzeInstCompileError(self: *Module, scope: *Scope, inst: *zir.Inst.Compile
fn analyzeInstArg(self: *Module, scope: *Scope, inst: *zir.Inst.Arg) InnerError!*Inst {
const b = try self.requireRuntimeBlock(scope, inst.base.src);
const fn_ty = b.func.?.owner_decl.typed_value.most_recent.typed_value.ty;
const param_index = b.instructions.items.len;
const param_count = fn_ty.fnParamLen();
if (inst.positionals.index >= param_count) {
if (param_index >= param_count) {
return self.fail(scope, inst.base.src, "parameter index {} outside list of length {}", .{
inst.positionals.index,
param_index,
param_count,
});
}
const param_type = fn_ty.fnParamType(inst.positionals.index);
return self.addNewInstArgs(b, inst.base.src, param_type, Inst.Arg, .{
.index = inst.positionals.index,
});
const param_type = fn_ty.fnParamType(param_index);
return self.addNewInstArgs(b, inst.base.src, param_type, Inst.Arg, {});
}
fn analyzeInstBlock(self: *Module, scope: *Scope, inst: *zir.Inst.Block) InnerError!*Inst {
@ -3646,13 +3247,13 @@ fn coerceArrayPtrToSlice(self: *Module, scope: *Scope, dest_type: Type, inst: *I
return self.fail(scope, inst.src, "TODO implement coerceArrayPtrToSlice runtime instruction", .{});
}
fn fail(self: *Module, scope: *Scope, src: usize, comptime format: []const u8, args: anytype) InnerError {
pub fn fail(self: *Module, scope: *Scope, src: usize, comptime format: []const u8, args: anytype) InnerError {
@setCold(true);
const err_msg = try ErrorMsg.create(self.gpa, src, format, args);
return self.failWithOwnedErrorMsg(scope, src, err_msg);
}
fn failTok(
pub fn failTok(
self: *Module,
scope: *Scope,
token_index: ast.TokenIndex,
@ -3664,7 +3265,7 @@ fn failTok(
return self.fail(scope, src, format, args);
}
fn failNode(
pub fn failNode(
self: *Module,
scope: *Scope,
ast_node: *ast.Node,
@ -3705,6 +3306,12 @@ fn failWithOwnedErrorMsg(self: *Module, scope: *Scope, src: usize, err_msg: *Err
gen_zir.decl.generation = self.generation;
self.failed_decls.putAssumeCapacityNoClobber(gen_zir.decl, err_msg);
},
.local_var => {
const gen_zir = scope.cast(Scope.LocalVar).?.gen_zir;
gen_zir.decl.analysis = .sema_failure;
gen_zir.decl.generation = self.generation;
self.failed_decls.putAssumeCapacityNoClobber(gen_zir.decl, err_msg);
},
.zir_module => {
const zir_module = scope.cast(Scope.ZIRModule).?;
zir_module.status = .loaded_sema_failure;

643
src-self-hosted/astgen.zig Normal file
View File

@ -0,0 +1,643 @@
const std = @import("std");
const mem = std.mem;
const Value = @import("value.zig").Value;
const Type = @import("type.zig").Type;
const TypedValue = @import("TypedValue.zig");
const assert = std.debug.assert;
const zir = @import("zir.zig");
const Module = @import("Module.zig");
const ast = std.zig.ast;
const trace = @import("tracy.zig").trace;
const Scope = Module.Scope;
const InnerError = Module.InnerError;
/// Turn Zig AST into untyped ZIR istructions.
pub fn expr(mod: *Module, scope: *Scope, node: *ast.Node) InnerError!*zir.Inst {
switch (node.tag) {
.VarDecl => unreachable, // Handled in `blockExpr`.
.Identifier => return identifier(mod, scope, node.castTag(.Identifier).?),
.Asm => return assembly(mod, scope, node.castTag(.Asm).?),
.StringLiteral => return stringLiteral(mod, scope, node.castTag(.StringLiteral).?),
.IntegerLiteral => return integerLiteral(mod, scope, node.castTag(.IntegerLiteral).?),
.BuiltinCall => return builtinCall(mod, scope, node.castTag(.BuiltinCall).?),
.Call => return callExpr(mod, scope, node.castTag(.Call).?),
.Unreachable => return unreach(mod, scope, node.castTag(.Unreachable).?),
.ControlFlowExpression => return controlFlowExpr(mod, scope, node.castTag(.ControlFlowExpression).?),
.If => return ifExpr(mod, scope, node.castTag(.If).?),
.Assign => return assign(mod, scope, node.castTag(.Assign).?),
.Add => return add(mod, scope, node.castTag(.Add).?),
.BangEqual => return cmp(mod, scope, node.castTag(.BangEqual).?, .neq),
.EqualEqual => return cmp(mod, scope, node.castTag(.EqualEqual).?, .eq),
.GreaterThan => return cmp(mod, scope, node.castTag(.GreaterThan).?, .gt),
.GreaterOrEqual => return cmp(mod, scope, node.castTag(.GreaterOrEqual).?, .gte),
.LessThan => return cmp(mod, scope, node.castTag(.LessThan).?, .lt),
.LessOrEqual => return cmp(mod, scope, node.castTag(.LessOrEqual).?, .lte),
.BoolNot => return boolNot(mod, scope, node.castTag(.BoolNot).?),
else => return mod.failNode(scope, node, "TODO implement astgen.Expr for {}", .{@tagName(node.tag)}),
}
}
pub fn blockExpr(mod: *Module, parent_scope: *Scope, block_node: *ast.Node.Block) !void {
const tracy = trace(@src());
defer tracy.end();
if (block_node.label) |label| {
return mod.failTok(parent_scope, label, "TODO implement labeled blocks", .{});
}
var block_arena = std.heap.ArenaAllocator.init(mod.gpa);
defer block_arena.deinit();
var scope = parent_scope;
for (block_node.statements()) |statement| {
switch (statement.tag) {
.VarDecl => {
const sub_scope = try block_arena.allocator.create(Scope.LocalVar);
const var_decl_node = @fieldParentPtr(ast.Node.VarDecl, "base", statement);
sub_scope.* = try varDecl(mod, scope, var_decl_node);
scope = &sub_scope.base;
},
else => _ = try expr(mod, scope, statement),
}
}
}
fn varDecl(mod: *Module, scope: *Scope, node: *ast.Node.VarDecl) InnerError!Scope.LocalVar {
// TODO implement detection of shadowing
if (node.getTrailer("comptime_token")) |comptime_token| {
return mod.failTok(scope, comptime_token, "TODO implement comptime locals", .{});
}
if (node.getTrailer("align_node")) |align_node| {
return mod.failNode(scope, align_node, "TODO implement alignment on locals", .{});
}
if (node.getTrailer("type_node")) |type_node| {
return mod.failNode(scope, type_node, "TODO implement typed locals", .{});
}
const tree = scope.tree();
switch (tree.token_ids[node.mut_token]) {
.Keyword_const => {},
.Keyword_var => {
return mod.failTok(scope, node.mut_token, "TODO implement mutable locals", .{});
},
else => unreachable,
}
// Depending on the type of AST the initialization expression is, we may need an lvalue
// or an rvalue as a result location. If it is an rvalue, we can use the instruction as
// the variable, no memory location needed.
const init_node = node.getTrailer("init_node").?;
if (nodeNeedsMemoryLocation(init_node)) {
return mod.failNode(scope, init_node, "TODO implement result locations", .{});
}
const init_inst = try expr(mod, scope, init_node);
const ident_name = tree.tokenSlice(node.name_token); // TODO support @"aoeu" identifiers
return Scope.LocalVar{
.parent = scope,
.gen_zir = scope.getGenZIR(),
.name = ident_name,
.inst = init_inst,
};
}
fn boolNot(mod: *Module, scope: *Scope, node: *ast.Node.SimplePrefixOp) InnerError!*zir.Inst {
const operand = try expr(mod, scope, node.rhs);
const tree = scope.tree();
const src = tree.token_locs[node.op_token].start;
return mod.addZIRInst(scope, src, zir.Inst.BoolNot, .{ .operand = operand }, .{});
}
fn assign(mod: *Module, scope: *Scope, infix_node: *ast.Node.SimpleInfixOp) InnerError!*zir.Inst {
if (infix_node.lhs.tag == .Identifier) {
const ident = @fieldParentPtr(ast.Node.Identifier, "base", infix_node.lhs);
const tree = scope.tree();
const ident_name = tree.tokenSlice(ident.token);
if (std.mem.eql(u8, ident_name, "_")) {
return expr(mod, scope, infix_node.rhs);
} else {
return mod.failNode(scope, &infix_node.base, "TODO implement infix operator assign", .{});
}
} else {
return mod.failNode(scope, &infix_node.base, "TODO implement infix operator assign", .{});
}
}
fn add(mod: *Module, scope: *Scope, infix_node: *ast.Node.SimpleInfixOp) InnerError!*zir.Inst {
const lhs = try expr(mod, scope, infix_node.lhs);
const rhs = try expr(mod, scope, infix_node.rhs);
const tree = scope.tree();
const src = tree.token_locs[infix_node.op_token].start;
return mod.addZIRInst(scope, src, zir.Inst.Add, .{ .lhs = lhs, .rhs = rhs }, .{});
}
fn cmp(
mod: *Module,
scope: *Scope,
infix_node: *ast.Node.SimpleInfixOp,
op: std.math.CompareOperator,
) InnerError!*zir.Inst {
const lhs = try expr(mod, scope, infix_node.lhs);
const rhs = try expr(mod, scope, infix_node.rhs);
const tree = scope.tree();
const src = tree.token_locs[infix_node.op_token].start;
return mod.addZIRInst(scope, src, zir.Inst.Cmp, .{
.lhs = lhs,
.op = op,
.rhs = rhs,
}, .{});
}
fn ifExpr(mod: *Module, scope: *Scope, if_node: *ast.Node.If) InnerError!*zir.Inst {
if (if_node.payload) |payload| {
return mod.failNode(scope, payload, "TODO implement astgen.IfExpr for optionals", .{});
}
if (if_node.@"else") |else_node| {
if (else_node.payload) |payload| {
return mod.failNode(scope, payload, "TODO implement astgen.IfExpr for error unions", .{});
}
}
var block_scope: Scope.GenZIR = .{
.parent = scope,
.decl = scope.decl().?,
.arena = scope.arena(),
.instructions = .{},
};
defer block_scope.instructions.deinit(mod.gpa);
const cond = try expr(mod, &block_scope.base, if_node.condition);
const tree = scope.tree();
const if_src = tree.token_locs[if_node.if_token].start;
const condbr = try mod.addZIRInstSpecial(&block_scope.base, if_src, zir.Inst.CondBr, .{
.condition = cond,
.true_body = undefined, // populated below
.false_body = undefined, // populated below
}, .{});
const block = try mod.addZIRInstBlock(scope, if_src, .{
.instructions = try block_scope.arena.dupe(*zir.Inst, block_scope.instructions.items),
});
var then_scope: Scope.GenZIR = .{
.parent = scope,
.decl = block_scope.decl,
.arena = block_scope.arena,
.instructions = .{},
};
defer then_scope.instructions.deinit(mod.gpa);
const then_result = try expr(mod, &then_scope.base, if_node.body);
if (!then_result.tag.isNoReturn()) {
const then_src = tree.token_locs[if_node.body.lastToken()].start;
_ = try mod.addZIRInst(&then_scope.base, then_src, zir.Inst.Break, .{
.block = block,
.operand = then_result,
}, .{});
}
condbr.positionals.true_body = .{
.instructions = try then_scope.arena.dupe(*zir.Inst, then_scope.instructions.items),
};
var else_scope: Scope.GenZIR = .{
.parent = scope,
.decl = block_scope.decl,
.arena = block_scope.arena,
.instructions = .{},
};
defer else_scope.instructions.deinit(mod.gpa);
if (if_node.@"else") |else_node| {
const else_result = try expr(mod, &else_scope.base, else_node.body);
if (!else_result.tag.isNoReturn()) {
const else_src = tree.token_locs[else_node.body.lastToken()].start;
_ = try mod.addZIRInst(&else_scope.base, else_src, zir.Inst.Break, .{
.block = block,
.operand = else_result,
}, .{});
}
} else {
// TODO Optimization opportunity: we can avoid an allocation and a memcpy here
// by directly allocating the body for this one instruction.
const else_src = tree.token_locs[if_node.lastToken()].start;
_ = try mod.addZIRInst(&else_scope.base, else_src, zir.Inst.BreakVoid, .{
.block = block,
}, .{});
}
condbr.positionals.false_body = .{
.instructions = try else_scope.arena.dupe(*zir.Inst, else_scope.instructions.items),
};
return &block.base;
}
fn controlFlowExpr(
mod: *Module,
scope: *Scope,
cfe: *ast.Node.ControlFlowExpression,
) InnerError!*zir.Inst {
switch (cfe.kind) {
.Break => return mod.failNode(scope, &cfe.base, "TODO implement astgen.Expr for Break", .{}),
.Continue => return mod.failNode(scope, &cfe.base, "TODO implement astgen.Expr for Continue", .{}),
.Return => {},
}
const tree = scope.tree();
const src = tree.token_locs[cfe.ltoken].start;
if (cfe.rhs) |rhs_node| {
const operand = try expr(mod, scope, rhs_node);
return mod.addZIRInst(scope, src, zir.Inst.Return, .{ .operand = operand }, .{});
} else {
return mod.addZIRInst(scope, src, zir.Inst.ReturnVoid, .{}, .{});
}
}
fn identifier(mod: *Module, scope: *Scope, ident: *ast.Node.Identifier) InnerError!*zir.Inst {
const tracy = trace(@src());
defer tracy.end();
const tree = scope.tree();
// TODO implement @"aoeu" identifiers
const ident_name = tree.tokenSlice(ident.token);
const src = tree.token_locs[ident.token].start;
if (mem.eql(u8, ident_name, "_")) {
return mod.failNode(scope, &ident.base, "TODO implement '_' identifier", .{});
}
if (getSimplePrimitiveValue(ident_name)) |typed_value| {
return mod.addZIRInstConst(scope, src, typed_value);
}
if (ident_name.len >= 2) integer: {
const first_c = ident_name[0];
if (first_c == 'i' or first_c == 'u') {
const is_signed = first_c == 'i';
const bit_count = std.fmt.parseInt(u16, ident_name[1..], 10) catch |err| switch (err) {
error.Overflow => return mod.failNode(
scope,
&ident.base,
"primitive integer type '{}' exceeds maximum bit width of 65535",
.{ident_name},
),
error.InvalidCharacter => break :integer,
};
const val = switch (bit_count) {
8 => if (is_signed) Value.initTag(.i8_type) else Value.initTag(.u8_type),
16 => if (is_signed) Value.initTag(.i16_type) else Value.initTag(.u16_type),
32 => if (is_signed) Value.initTag(.i32_type) else Value.initTag(.u32_type),
64 => if (is_signed) Value.initTag(.i64_type) else Value.initTag(.u64_type),
else => return mod.failNode(scope, &ident.base, "TODO implement arbitrary integer bitwidth types", .{}),
};
return mod.addZIRInstConst(scope, src, .{
.ty = Type.initTag(.type),
.val = val,
});
}
}
// Local variables, including function parameters.
{
var s = scope;
while (true) switch (s.tag) {
.local_var => {
const local_var = s.cast(Scope.LocalVar).?;
if (mem.eql(u8, local_var.name, ident_name)) {
return local_var.inst;
}
s = local_var.parent;
},
.gen_zir => s = s.cast(Scope.GenZIR).?.parent,
else => break,
};
}
if (mod.lookupDeclName(scope, ident_name)) |decl| {
return try mod.addZIRInst(scope, src, zir.Inst.DeclValInModule, .{ .decl = decl }, .{});
}
return mod.failNode(scope, &ident.base, "use of undeclared identifier '{}'", .{ident_name});
}
fn stringLiteral(mod: *Module, scope: *Scope, str_lit: *ast.Node.StringLiteral) InnerError!*zir.Inst {
const tree = scope.tree();
const unparsed_bytes = tree.tokenSlice(str_lit.token);
const arena = scope.arena();
var bad_index: usize = undefined;
const bytes = std.zig.parseStringLiteral(arena, unparsed_bytes, &bad_index) catch |err| switch (err) {
error.InvalidCharacter => {
const bad_byte = unparsed_bytes[bad_index];
const src = tree.token_locs[str_lit.token].start;
return mod.fail(scope, src + bad_index, "invalid string literal character: '{c}'\n", .{bad_byte});
},
else => |e| return e,
};
const src = tree.token_locs[str_lit.token].start;
return mod.addZIRInst(scope, src, zir.Inst.Str, .{ .bytes = bytes }, .{});
}
fn integerLiteral(mod: *Module, scope: *Scope, int_lit: *ast.Node.IntegerLiteral) InnerError!*zir.Inst {
const arena = scope.arena();
const tree = scope.tree();
const prefixed_bytes = tree.tokenSlice(int_lit.token);
const base = if (mem.startsWith(u8, prefixed_bytes, "0x"))
16
else if (mem.startsWith(u8, prefixed_bytes, "0o"))
8
else if (mem.startsWith(u8, prefixed_bytes, "0b"))
2
else
@as(u8, 10);
const bytes = if (base == 10)
prefixed_bytes
else
prefixed_bytes[2..];
if (std.fmt.parseInt(u64, bytes, base)) |small_int| {
const int_payload = try arena.create(Value.Payload.Int_u64);
int_payload.* = .{ .int = small_int };
const src = tree.token_locs[int_lit.token].start;
return mod.addZIRInstConst(scope, src, .{
.ty = Type.initTag(.comptime_int),
.val = Value.initPayload(&int_payload.base),
});
} else |err| {
return mod.failTok(scope, int_lit.token, "TODO implement int literals that don't fit in a u64", .{});
}
}
fn assembly(mod: *Module, scope: *Scope, asm_node: *ast.Node.Asm) InnerError!*zir.Inst {
if (asm_node.outputs.len != 0) {
return mod.failNode(scope, &asm_node.base, "TODO implement asm with an output", .{});
}
const arena = scope.arena();
const tree = scope.tree();
const inputs = try arena.alloc(*zir.Inst, asm_node.inputs.len);
const args = try arena.alloc(*zir.Inst, asm_node.inputs.len);
for (asm_node.inputs) |input, i| {
// TODO semantically analyze constraints
inputs[i] = try expr(mod, scope, input.constraint);
args[i] = try expr(mod, scope, input.expr);
}
const src = tree.token_locs[asm_node.asm_token].start;
const return_type = try mod.addZIRInstConst(scope, src, .{
.ty = Type.initTag(.type),
.val = Value.initTag(.void_type),
});
const asm_inst = try mod.addZIRInst(scope, src, zir.Inst.Asm, .{
.asm_source = try expr(mod, scope, asm_node.template),
.return_type = return_type,
}, .{
.@"volatile" = asm_node.volatile_token != null,
//.clobbers = TODO handle clobbers
.inputs = inputs,
.args = args,
});
return asm_inst;
}
fn builtinCall(mod: *Module, scope: *Scope, call: *ast.Node.BuiltinCall) InnerError!*zir.Inst {
const tree = scope.tree();
const builtin_name = tree.tokenSlice(call.builtin_token);
const src = tree.token_locs[call.builtin_token].start;
inline for (std.meta.declarations(zir.Inst)) |inst| {
if (inst.data != .Type) continue;
const T = inst.data.Type;
if (!@hasDecl(T, "builtin_name")) continue;
if (std.mem.eql(u8, builtin_name, T.builtin_name)) {
var value: T = undefined;
const positionals = @typeInfo(std.meta.fieldInfo(T, "positionals").field_type).Struct;
if (positionals.fields.len == 0) {
return mod.addZIRInst(scope, src, T, value.positionals, value.kw_args);
}
const arg_count: ?usize = if (positionals.fields[0].field_type == []*zir.Inst) null else positionals.fields.len;
if (arg_count) |some| {
if (call.params_len != some) {
return mod.failTok(
scope,
call.builtin_token,
"expected {} parameter{}, found {}",
.{ some, if (some == 1) "" else "s", call.params_len },
);
}
const params = call.params();
inline for (positionals.fields) |p, i| {
@field(value.positionals, p.name) = try expr(mod, scope, params[i]);
}
} else {
return mod.failTok(scope, call.builtin_token, "TODO var args builtin '{}'", .{builtin_name});
}
return mod.addZIRInst(scope, src, T, value.positionals, .{});
}
}
return mod.failTok(scope, call.builtin_token, "TODO implement builtin call for '{}'", .{builtin_name});
}
fn callExpr(mod: *Module, scope: *Scope, node: *ast.Node.Call) InnerError!*zir.Inst {
const tree = scope.tree();
const lhs = try expr(mod, scope, node.lhs);
const param_nodes = node.params();
const args = try scope.getGenZIR().arena.alloc(*zir.Inst, param_nodes.len);
for (param_nodes) |param_node, i| {
args[i] = try expr(mod, scope, param_node);
}
const src = tree.token_locs[node.lhs.firstToken()].start;
return mod.addZIRInst(scope, src, zir.Inst.Call, .{
.func = lhs,
.args = args,
}, .{});
}
fn unreach(mod: *Module, scope: *Scope, unreach_node: *ast.Node.Unreachable) InnerError!*zir.Inst {
const tree = scope.tree();
const src = tree.token_locs[unreach_node.token].start;
return mod.addZIRInst(scope, src, zir.Inst.Unreachable, .{}, .{});
}
fn getSimplePrimitiveValue(name: []const u8) ?TypedValue {
const simple_types = std.ComptimeStringMap(Value.Tag, .{
.{ "u8", .u8_type },
.{ "i8", .i8_type },
.{ "isize", .isize_type },
.{ "usize", .usize_type },
.{ "c_short", .c_short_type },
.{ "c_ushort", .c_ushort_type },
.{ "c_int", .c_int_type },
.{ "c_uint", .c_uint_type },
.{ "c_long", .c_long_type },
.{ "c_ulong", .c_ulong_type },
.{ "c_longlong", .c_longlong_type },
.{ "c_ulonglong", .c_ulonglong_type },
.{ "c_longdouble", .c_longdouble_type },
.{ "f16", .f16_type },
.{ "f32", .f32_type },
.{ "f64", .f64_type },
.{ "f128", .f128_type },
.{ "c_void", .c_void_type },
.{ "bool", .bool_type },
.{ "void", .void_type },
.{ "type", .type_type },
.{ "anyerror", .anyerror_type },
.{ "comptime_int", .comptime_int_type },
.{ "comptime_float", .comptime_float_type },
.{ "noreturn", .noreturn_type },
});
if (simple_types.get(name)) |tag| {
return TypedValue{
.ty = Type.initTag(.type),
.val = Value.initTag(tag),
};
}
if (mem.eql(u8, name, "null")) {
return TypedValue{
.ty = Type.initTag(.@"null"),
.val = Value.initTag(.null_value),
};
}
if (mem.eql(u8, name, "undefined")) {
return TypedValue{
.ty = Type.initTag(.@"undefined"),
.val = Value.initTag(.undef),
};
}
if (mem.eql(u8, name, "true")) {
return TypedValue{
.ty = Type.initTag(.bool),
.val = Value.initTag(.bool_true),
};
}
if (mem.eql(u8, name, "false")) {
return TypedValue{
.ty = Type.initTag(.bool),
.val = Value.initTag(.bool_false),
};
}
return null;
}
fn nodeNeedsMemoryLocation(node: *ast.Node) bool {
return switch (node.tag) {
.Root,
.Use,
.TestDecl,
.DocComment,
.SwitchCase,
.SwitchElse,
.Else,
.Payload,
.PointerPayload,
.PointerIndexPayload,
.ContainerField,
.ErrorTag,
.FieldInitializer,
=> unreachable,
.ControlFlowExpression,
.BitNot,
.BoolNot,
.VarDecl,
.Defer,
.AddressOf,
.OptionalType,
.Negation,
.NegationWrap,
.Resume,
.ArrayType,
.ArrayTypeSentinel,
.PtrType,
.SliceType,
.Suspend,
.AnyType,
.ErrorType,
.FnProto,
.AnyFrameType,
.IntegerLiteral,
.FloatLiteral,
.EnumLiteral,
.StringLiteral,
.MultilineStringLiteral,
.CharLiteral,
.BoolLiteral,
.NullLiteral,
.UndefinedLiteral,
.Unreachable,
.Identifier,
.ErrorSetDecl,
.ContainerDecl,
.Asm,
.Add,
.AddWrap,
.ArrayCat,
.ArrayMult,
.Assign,
.AssignBitAnd,
.AssignBitOr,
.AssignBitShiftLeft,
.AssignBitShiftRight,
.AssignBitXor,
.AssignDiv,
.AssignSub,
.AssignSubWrap,
.AssignMod,
.AssignAdd,
.AssignAddWrap,
.AssignMul,
.AssignMulWrap,
.BangEqual,
.BitAnd,
.BitOr,
.BitShiftLeft,
.BitShiftRight,
.BitXor,
.BoolAnd,
.BoolOr,
.Div,
.EqualEqual,
.ErrorUnion,
.GreaterOrEqual,
.GreaterThan,
.LessOrEqual,
.LessThan,
.MergeErrorSets,
.Mod,
.Mul,
.MulWrap,
.Range,
.Period,
.Sub,
.SubWrap,
=> false,
.ArrayInitializer,
.ArrayInitializerDot,
.StructInitializer,
.StructInitializerDot,
=> true,
.GroupedExpression => nodeNeedsMemoryLocation(node.castTag(.GroupedExpression).?.expr),
.UnwrapOptional => @panic("TODO nodeNeedsMemoryLocation for UnwrapOptional"),
.Catch => @panic("TODO nodeNeedsMemoryLocation for Catch"),
.Await => @panic("TODO nodeNeedsMemoryLocation for Await"),
.Try => @panic("TODO nodeNeedsMemoryLocation for Try"),
.If => @panic("TODO nodeNeedsMemoryLocation for If"),
.SuffixOp => @panic("TODO nodeNeedsMemoryLocation for SuffixOp"),
.Call => @panic("TODO nodeNeedsMemoryLocation for Call"),
.Switch => @panic("TODO nodeNeedsMemoryLocation for Switch"),
.While => @panic("TODO nodeNeedsMemoryLocation for While"),
.For => @panic("TODO nodeNeedsMemoryLocation for For"),
.BuiltinCall => @panic("TODO nodeNeedsMemoryLocation for BuiltinCall"),
.Comptime => @panic("TODO nodeNeedsMemoryLocation for Comptime"),
.Nosuspend => @panic("TODO nodeNeedsMemoryLocation for Nosuspend"),
.Block => @panic("TODO nodeNeedsMemoryLocation for Block"),
};
}

View File

@ -73,6 +73,7 @@ pub fn generateSymbol(
.code = code,
.err_msg = null,
.args = mc_args,
.arg_index = 0,
.branch_stack = &branch_stack,
.src = src,
};
@ -255,6 +256,7 @@ const Function = struct {
code: *std.ArrayList(u8),
err_msg: ?*ErrorMsg,
args: []MCValue,
arg_index: usize,
src: usize,
/// Whenever there is a runtime branch, we push a Branch onto this stack,
@ -603,7 +605,9 @@ const Function = struct {
}
fn genArg(self: *Function, inst: *ir.Inst.Arg) !MCValue {
return self.args[inst.args.index];
const i = self.arg_index;
self.arg_index += 1;
return self.args[i];
}
fn genBreakpoint(self: *Function, src: usize, comptime arch: std.Target.Cpu.Arch) !MCValue {

View File

@ -101,10 +101,7 @@ pub const Inst = struct {
pub const Arg = struct {
pub const base_tag = Tag.arg;
base: Inst,
args: struct {
index: usize,
},
args: void,
};
pub const Assembly = struct {

View File

@ -1103,11 +1103,11 @@ fn transEnumDecl(c: *Context, enum_decl: *const ZigClangEnumDecl) Error!?*ast.No
const enum_ident = try transCreateNodeIdentifier(c, name);
const period_tok = try appendToken(c, .Period, ".");
const field_ident = try transCreateNodeIdentifier(c, field_name);
const field_access_node = try c.arena.create(ast.Node.InfixOp);
const field_access_node = try c.arena.create(ast.Node.SimpleInfixOp);
field_access_node.* = .{
.base = .{ .tag = .Period },
.op_token = period_tok,
.lhs = enum_ident,
.op = .Period,
.rhs = field_ident,
};
cast_node.params()[0] = &field_access_node.base;
@ -1219,7 +1219,7 @@ fn transStmt(
.StringLiteralClass => return transStringLiteral(rp, scope, @ptrCast(*const ZigClangStringLiteral, stmt), result_used),
.ParenExprClass => {
const expr = try transExpr(rp, scope, ZigClangParenExpr_getSubExpr(@ptrCast(*const ZigClangParenExpr, stmt)), .used, lrvalue);
if (expr.id == .GroupedExpression) return maybeSuppressResult(rp, scope, result_used, expr);
if (expr.tag == .GroupedExpression) return maybeSuppressResult(rp, scope, result_used, expr);
const node = try rp.c.arena.create(ast.Node.GroupedExpression);
node.* = .{
.lparen = try appendToken(rp.c, .LParen, "("),
@ -1264,7 +1264,7 @@ fn transStmt(
.OpaqueValueExprClass => {
const source_expr = ZigClangOpaqueValueExpr_getSourceExpr(@ptrCast(*const ZigClangOpaqueValueExpr, stmt)).?;
const expr = try transExpr(rp, scope, source_expr, .used, lrvalue);
if (expr.id == .GroupedExpression) return maybeSuppressResult(rp, scope, result_used, expr);
if (expr.tag == .GroupedExpression) return maybeSuppressResult(rp, scope, result_used, expr);
const node = try rp.c.arena.create(ast.Node.GroupedExpression);
node.* = .{
.lparen = try appendToken(rp.c, .LParen, "("),
@ -1294,7 +1294,7 @@ fn transBinaryOperator(
const op = ZigClangBinaryOperator_getOpcode(stmt);
const qt = ZigClangBinaryOperator_getType(stmt);
var op_token: ast.TokenIndex = undefined;
var op_id: ast.Node.InfixOp.Op = undefined;
var op_id: ast.Node.Tag = undefined;
switch (op) {
.Assign => return try transCreateNodeAssign(rp, scope, result_used, ZigClangBinaryOperator_getLHS(stmt), ZigClangBinaryOperator_getRHS(stmt)),
.Comma => {
@ -1693,7 +1693,7 @@ fn transBoolExpr(
var res = try transExpr(rp, scope, expr, used, lrvalue);
if (isBoolRes(res)) {
if (!grouped and res.id == .GroupedExpression) {
if (!grouped and res.tag == .GroupedExpression) {
const group = @fieldParentPtr(ast.Node.GroupedExpression, "base", res);
res = group.expr;
// get zig fmt to work properly
@ -1736,26 +1736,23 @@ fn exprIsStringLiteral(expr: *const ZigClangExpr) bool {
}
fn isBoolRes(res: *ast.Node) bool {
switch (res.id) {
.InfixOp => switch (@fieldParentPtr(ast.Node.InfixOp, "base", res).op) {
.BoolOr,
.BoolAnd,
.EqualEqual,
.BangEqual,
.LessThan,
.GreaterThan,
.LessOrEqual,
.GreaterOrEqual,
=> return true,
switch (res.tag) {
.BoolOr,
.BoolAnd,
.EqualEqual,
.BangEqual,
.LessThan,
.GreaterThan,
.LessOrEqual,
.GreaterOrEqual,
.BoolNot,
.BoolLiteral,
=> return true,
else => {},
},
.BoolNot => return true,
.BoolLiteral => return true,
.GroupedExpression => return isBoolRes(@fieldParentPtr(ast.Node.GroupedExpression, "base", res).expr),
else => {},
else => return false,
}
return false;
}
fn finishBoolExpr(
@ -2312,11 +2309,11 @@ fn transInitListExprArray(
&filler_init_node.base
else blk: {
const mul_tok = try appendToken(rp.c, .AsteriskAsterisk, "**");
const mul_node = try rp.c.arena.create(ast.Node.InfixOp);
const mul_node = try rp.c.arena.create(ast.Node.SimpleInfixOp);
mul_node.* = .{
.base = .{ .tag = .ArrayMult },
.op_token = mul_tok,
.lhs = &filler_init_node.base,
.op = .ArrayMult,
.rhs = try transCreateNodeInt(rp.c, leftover_count),
};
break :blk &mul_node.base;
@ -2326,11 +2323,11 @@ fn transInitListExprArray(
return rhs_node;
}
const cat_node = try rp.c.arena.create(ast.Node.InfixOp);
const cat_node = try rp.c.arena.create(ast.Node.SimpleInfixOp);
cat_node.* = .{
.base = .{ .tag = .ArrayCat },
.op_token = cat_tok,
.lhs = &init_node.base,
.op = .ArrayCat,
.rhs = rhs_node,
};
return &cat_node.base;
@ -2723,11 +2720,11 @@ fn transCase(
const ellips = try appendToken(rp.c, .Ellipsis3, "...");
const rhs_node = try transExpr(rp, scope, rhs, .used, .r_value);
const node = try rp.c.arena.create(ast.Node.InfixOp);
const node = try rp.c.arena.create(ast.Node.SimpleInfixOp);
node.* = .{
.base = .{ .tag = .Range },
.op_token = ellips,
.lhs = lhs_node,
.op = .Range,
.rhs = rhs_node,
};
break :blk &node.base;
@ -3153,7 +3150,7 @@ fn transCreatePreCrement(
rp: RestorePoint,
scope: *Scope,
stmt: *const ZigClangUnaryOperator,
op: ast.Node.InfixOp.Op,
op: ast.Node.Tag,
op_tok_id: std.zig.Token.Id,
bytes: []const u8,
used: ResultUsed,
@ -3227,7 +3224,7 @@ fn transCreatePostCrement(
rp: RestorePoint,
scope: *Scope,
stmt: *const ZigClangUnaryOperator,
op: ast.Node.InfixOp.Op,
op: ast.Node.Tag,
op_tok_id: std.zig.Token.Id,
bytes: []const u8,
used: ResultUsed,
@ -3349,10 +3346,10 @@ fn transCreateCompoundAssign(
rp: RestorePoint,
scope: *Scope,
stmt: *const ZigClangCompoundAssignOperator,
assign_op: ast.Node.InfixOp.Op,
assign_op: ast.Node.Tag,
assign_tok_id: std.zig.Token.Id,
assign_bytes: []const u8,
bin_op: ast.Node.InfixOp.Op,
bin_op: ast.Node.Tag,
bin_tok_id: std.zig.Token.Id,
bin_bytes: []const u8,
used: ResultUsed,
@ -3377,7 +3374,7 @@ fn transCreateCompoundAssign(
// zig: lhs += rhs
if ((is_mod or is_div) and is_signed) {
const op_token = try appendToken(rp.c, .Equal, "=");
const op_node = try rp.c.arena.create(ast.Node.InfixOp);
const op_node = try rp.c.arena.create(ast.Node.SimpleInfixOp);
const builtin = if (is_mod) "@rem" else "@divTrunc";
const builtin_node = try rp.c.createBuiltinCall(builtin, 2);
const lhs_node = try transExpr(rp, scope, lhs, .used, .l_value);
@ -3386,9 +3383,9 @@ fn transCreateCompoundAssign(
builtin_node.params()[1] = try transExpr(rp, scope, rhs, .used, .r_value);
builtin_node.rparen_token = try appendToken(rp.c, .RParen, ")");
op_node.* = .{
.base = .{ .tag = .Assign },
.op_token = op_token,
.lhs = lhs_node,
.op = .Assign,
.rhs = &builtin_node.base,
};
_ = try appendToken(rp.c, .Semicolon, ";");
@ -3452,7 +3449,7 @@ fn transCreateCompoundAssign(
if ((is_mod or is_div) and is_signed) {
const op_token = try appendToken(rp.c, .Equal, "=");
const op_node = try rp.c.arena.create(ast.Node.InfixOp);
const op_node = try rp.c.arena.create(ast.Node.SimpleInfixOp);
const builtin = if (is_mod) "@rem" else "@divTrunc";
const builtin_node = try rp.c.createBuiltinCall(builtin, 2);
builtin_node.params()[0] = try transCreateNodePtrDeref(rp.c, lhs_node);
@ -3461,9 +3458,9 @@ fn transCreateCompoundAssign(
builtin_node.rparen_token = try appendToken(rp.c, .RParen, ")");
_ = try appendToken(rp.c, .Semicolon, ";");
op_node.* = .{
.base = .{ .tag = .Assign },
.op_token = op_token,
.lhs = ref_node,
.op = .Assign,
.rhs = &builtin_node.base,
};
_ = try appendToken(rp.c, .Semicolon, ";");
@ -3716,11 +3713,11 @@ fn maybeSuppressResult(
}
const lhs = try transCreateNodeIdentifier(rp.c, "_");
const op_token = try appendToken(rp.c, .Equal, "=");
const op_node = try rp.c.arena.create(ast.Node.InfixOp);
const op_node = try rp.c.arena.create(ast.Node.SimpleInfixOp);
op_node.* = .{
.base = .{ .tag = .Assign },
.op_token = op_token,
.lhs = lhs,
.op = .Assign,
.rhs = result,
};
return &op_node.base;
@ -4095,11 +4092,11 @@ fn transCreateNodeAssign(
}
fn transCreateNodeFieldAccess(c: *Context, container: *ast.Node, field_name: []const u8) !*ast.Node {
const field_access_node = try c.arena.create(ast.Node.InfixOp);
const field_access_node = try c.arena.create(ast.Node.SimpleInfixOp);
field_access_node.* = .{
.base = .{ .tag = .Period },
.op_token = try appendToken(c, .Period, "."),
.lhs = container,
.op = .Period,
.rhs = try transCreateNodeIdentifier(c, field_name),
};
return &field_access_node.base;
@ -4107,12 +4104,13 @@ fn transCreateNodeFieldAccess(c: *Context, container: *ast.Node, field_name: []c
fn transCreateNodeSimplePrefixOp(
c: *Context,
comptime tag: ast.Node.Id,
comptime tag: ast.Node.Tag,
op_tok_id: std.zig.Token.Id,
bytes: []const u8,
) !*ast.Node.SimplePrefixOp(tag) {
const node = try c.arena.create(ast.Node.SimplePrefixOp(tag));
) !*ast.Node.SimplePrefixOp {
const node = try c.arena.create(ast.Node.SimplePrefixOp);
node.* = .{
.base = .{ .tag = tag },
.op_token = try appendToken(c, op_tok_id, bytes),
.rhs = undefined, // translate and set afterward
};
@ -4123,7 +4121,7 @@ fn transCreateNodeInfixOp(
rp: RestorePoint,
scope: *Scope,
lhs_node: *ast.Node,
op: ast.Node.InfixOp.Op,
op: ast.Node.Tag,
op_token: ast.TokenIndex,
rhs_node: *ast.Node,
used: ResultUsed,
@ -4133,11 +4131,11 @@ fn transCreateNodeInfixOp(
try appendToken(rp.c, .LParen, "(")
else
null;
const node = try rp.c.arena.create(ast.Node.InfixOp);
const node = try rp.c.arena.create(ast.Node.SimpleInfixOp);
node.* = .{
.base = .{ .tag = op },
.op_token = op_token,
.lhs = lhs_node,
.op = op,
.rhs = rhs_node,
};
if (!grouped) return maybeSuppressResult(rp, scope, used, &node.base);
@ -4155,7 +4153,7 @@ fn transCreateNodeBoolInfixOp(
rp: RestorePoint,
scope: *Scope,
stmt: *const ZigClangBinaryOperator,
op: ast.Node.InfixOp.Op,
op: ast.Node.Tag,
used: ResultUsed,
grouped: bool,
) !*ast.Node {
@ -4535,7 +4533,7 @@ fn transCreateNodeShiftOp(
rp: RestorePoint,
scope: *Scope,
stmt: *const ZigClangBinaryOperator,
op: ast.Node.InfixOp.Op,
op: ast.Node.Tag,
op_tok_id: std.zig.Token.Id,
bytes: []const u8,
) !*ast.Node {
@ -4557,11 +4555,11 @@ fn transCreateNodeShiftOp(
cast_node.params()[1] = rhs;
cast_node.rparen_token = try appendToken(rp.c, .RParen, ")");
const node = try rp.c.arena.create(ast.Node.InfixOp);
const node = try rp.c.arena.create(ast.Node.SimpleInfixOp);
node.* = .{
.base = .{ .tag = op },
.op_token = op_token,
.lhs = lhs,
.op = op,
.rhs = &cast_node.base,
};
@ -5338,10 +5336,10 @@ fn transMacroFnDefine(c: *Context, it: *CTokenList.Iterator, source: []const u8,
.{@tagName(last.id)},
);
_ = try appendToken(c, .Semicolon, ";");
const type_of_arg = if (expr.id != .Block) expr else blk: {
const type_of_arg = if (expr.tag != .Block) expr else blk: {
const blk = @fieldParentPtr(ast.Node.Block, "base", expr);
const blk_last = blk.statements()[blk.statements_len - 1];
std.debug.assert(blk_last.id == .ControlFlowExpression);
std.debug.assert(blk_last.tag == .ControlFlowExpression);
const br = @fieldParentPtr(ast.Node.ControlFlowExpression, "base", blk_last);
break :blk br.rhs.?;
};
@ -5403,11 +5401,11 @@ fn parseCExpr(c: *Context, it: *CTokenList.Iterator, source: []const u8, source_
// suppress result
const lhs = try transCreateNodeIdentifier(c, "_");
const op_token = try appendToken(c, .Equal, "=");
const op_node = try c.arena.create(ast.Node.InfixOp);
const op_node = try c.arena.create(ast.Node.SimpleInfixOp);
op_node.* = .{
.base = .{ .tag = .Assign },
.op_token = op_token,
.lhs = lhs,
.op = .Assign,
.rhs = last,
};
try block_scope.statements.append(&op_node.base);
@ -5786,9 +5784,60 @@ fn parseCPrimaryExpr(c: *Context, it: *CTokenList.Iterator, source: []const u8,
}
}
fn nodeIsInfixOp(tag: ast.Node.Tag) bool {
return switch (tag) {
.Add,
.AddWrap,
.ArrayCat,
.ArrayMult,
.Assign,
.AssignBitAnd,
.AssignBitOr,
.AssignBitShiftLeft,
.AssignBitShiftRight,
.AssignBitXor,
.AssignDiv,
.AssignSub,
.AssignSubWrap,
.AssignMod,
.AssignAdd,
.AssignAddWrap,
.AssignMul,
.AssignMulWrap,
.BangEqual,
.BitAnd,
.BitOr,
.BitShiftLeft,
.BitShiftRight,
.BitXor,
.BoolAnd,
.BoolOr,
.Div,
.EqualEqual,
.ErrorUnion,
.GreaterOrEqual,
.GreaterThan,
.LessOrEqual,
.LessThan,
.MergeErrorSets,
.Mod,
.Mul,
.MulWrap,
.Period,
.Range,
.Sub,
.SubWrap,
.UnwrapOptional,
.Catch,
=> true,
else => false,
};
}
fn macroBoolToInt(c: *Context, node: *ast.Node) !*ast.Node {
if (!isBoolRes(node)) {
if (node.id != .InfixOp) return node;
if (!nodeIsInfixOp(node.tag)) return node;
const group_node = try c.arena.create(ast.Node.GroupedExpression);
group_node.* = .{
@ -5807,7 +5856,7 @@ fn macroBoolToInt(c: *Context, node: *ast.Node) !*ast.Node {
fn macroIntToBool(c: *Context, node: *ast.Node) !*ast.Node {
if (isBoolRes(node)) {
if (node.id != .InfixOp) return node;
if (!nodeIsInfixOp(node.tag)) return node;
const group_node = try c.arena.create(ast.Node.GroupedExpression);
group_node.* = .{
@ -5820,11 +5869,11 @@ fn macroIntToBool(c: *Context, node: *ast.Node) !*ast.Node {
const op_token = try appendToken(c, .BangEqual, "!=");
const zero = try transCreateNodeInt(c, 0);
const res = try c.arena.create(ast.Node.InfixOp);
const res = try c.arena.create(ast.Node.SimpleInfixOp);
res.* = .{
.base = .{ .tag = .BangEqual },
.op_token = op_token,
.lhs = node,
.op = .BangEqual,
.rhs = zero,
};
const group_node = try c.arena.create(ast.Node.GroupedExpression);
@ -5841,7 +5890,7 @@ fn parseCSuffixOpExpr(c: *Context, it: *CTokenList.Iterator, source: []const u8,
while (true) {
const tok = it.next().?;
var op_token: ast.TokenIndex = undefined;
var op_id: ast.Node.InfixOp.Op = undefined;
var op_id: ast.Node.Tag = undefined;
var bool_op = false;
switch (tok.id) {
.Period => {
@ -6048,11 +6097,11 @@ fn parseCSuffixOpExpr(c: *Context, it: *CTokenList.Iterator, source: []const u8,
const cast_fn = if (bool_op) macroIntToBool else macroBoolToInt;
const lhs_node = try cast_fn(c, node);
const rhs_node = try parseCPrefixOpExpr(c, it, source, source_loc, scope);
const op_node = try c.arena.create(ast.Node.InfixOp);
const op_node = try c.arena.create(ast.Node.SimpleInfixOp);
op_node.* = .{
.base = .{ .tag = op_id },
.op_token = op_token,
.lhs = lhs_node,
.op = op_id,
.rhs = try cast_fn(c, rhs_node),
};
node = &op_node.base;
@ -6105,7 +6154,7 @@ fn tokenSlice(c: *Context, token: ast.TokenIndex) []u8 {
}
fn getContainer(c: *Context, node: *ast.Node) ?*ast.Node {
switch (node.id) {
switch (node.tag) {
.ContainerDecl,
.AddressOf,
.Await,
@ -6130,10 +6179,9 @@ fn getContainer(c: *Context, node: *ast.Node) ?*ast.Node {
}
},
.InfixOp => {
const infix = node.cast(ast.Node.InfixOp).?;
if (infix.op != .Period)
return null;
.Period => {
const infix = node.castTag(.Period).?;
if (getContainerTypeOf(c, infix.lhs)) |ty_node| {
if (ty_node.cast(ast.Node.ContainerDecl)) |container| {
for (container.fieldsAndDecls()) |field_ref| {
@ -6160,9 +6208,7 @@ fn getContainerTypeOf(c: *Context, ref: *ast.Node) ?*ast.Node {
return getContainer(c, ty);
}
}
} else if (ref.cast(ast.Node.InfixOp)) |infix| {
if (infix.op != .Period)
return null;
} else if (ref.castTag(.Period)) |infix| {
if (getContainerTypeOf(c, infix.lhs)) |ty_node| {
if (ty_node.cast(ast.Node.ContainerDecl)) |container| {
for (container.fieldsAndDecls()) |field_ref| {
@ -6182,7 +6228,7 @@ fn getContainerTypeOf(c: *Context, ref: *ast.Node) ?*ast.Node {
fn getFnProto(c: *Context, ref: *ast.Node) ?*ast.Node.FnProto {
const init = if (ref.cast(ast.Node.VarDecl)) |v| v.getTrailer("init_node").? else return null;
if (getContainerTypeOf(c, init)) |ty_node| {
if (ty_node.cast(ast.Node.OptionalType)) |prefix| {
if (ty_node.castTag(.OptionalType)) |prefix| {
if (prefix.rhs.cast(ast.Node.FnProto)) |fn_proto| {
return fn_proto;
}

View File

@ -34,7 +34,8 @@ pub const Inst = struct {
/// These names are used directly as the instruction names in the text format.
pub const Tag = enum {
/// Function parameter value.
/// Function parameter value. These must be first in a function's main block,
/// in respective order with the parameters.
arg,
/// A labeled block of code, which can return a value.
block,
@ -184,9 +185,7 @@ pub const Inst = struct {
pub const base_tag = Tag.arg;
base: Inst,
positionals: struct {
index: usize,
},
positionals: struct {},
kw_args: struct {},
};
@ -1384,15 +1383,17 @@ const EmitZIR = struct {
for (src_decls.items) |ir_decl| {
switch (ir_decl.analysis) {
.unreferenced => continue,
.complete => {},
.codegen_failure => {}, // We still can emit the ZIR.
.codegen_failure_retryable => {}, // We still can emit the ZIR.
.in_progress => unreachable,
.outdated => unreachable,
.sema_failure,
.sema_failure_retryable,
.codegen_failure,
.dependency_failure,
.codegen_failure_retryable,
=> if (self.old_module.failed_decls.get(ir_decl)) |err_msg| {
const fail_inst = try self.arena.allocator.create(Inst.CompileError);
fail_inst.* = .{
@ -1728,7 +1729,7 @@ const EmitZIR = struct {
.src = inst.src,
.tag = Inst.Arg.base_tag,
},
.positionals = .{ .index = old_inst.args.index },
.positionals = .{},
.kw_args = .{},
};
break :blk &new_inst.base;