mirror of
https://github.com/ziglang/zig.git
synced 2025-12-06 14:23:09 +00:00
std.zig.Ast: improve type safety
This commits adds the following distinct integer types to std.zig.Ast: - OptionalTokenIndex - TokenOffset - OptionalTokenOffset - Node.OptionalIndex - Node.Offset - Node.OptionalOffset The `Node.Index` type has also been converted to a distinct type while `TokenIndex` remains unchanged. `Ast.Node.Data` has also been changed to a (untagged) union to provide safety checks.
This commit is contained in:
parent
6dcd8f4f75
commit
ca6fb30e99
File diff suppressed because it is too large
Load Diff
@ -220,7 +220,7 @@ pub fn main() !void {
|
|||||||
mem.eql(u8, msg, "unused function parameter") or
|
mem.eql(u8, msg, "unused function parameter") or
|
||||||
mem.eql(u8, msg, "unused capture"))
|
mem.eql(u8, msg, "unused capture"))
|
||||||
{
|
{
|
||||||
const ident_token = item.data.token;
|
const ident_token = item.data.token.unwrap().?;
|
||||||
try more_fixups.unused_var_decls.put(gpa, ident_token, {});
|
try more_fixups.unused_var_decls.put(gpa, ident_token, {});
|
||||||
} else {
|
} else {
|
||||||
std.debug.print("found other ZIR error: '{s}'\n", .{msg});
|
std.debug.print("found other ZIR error: '{s}'\n", .{msg});
|
||||||
|
|||||||
@ -98,29 +98,26 @@ const ScanDeclsAction = enum { add, remove };
|
|||||||
fn scanDecls(w: *Walk, members: []const Ast.Node.Index, action: ScanDeclsAction) Error!void {
|
fn scanDecls(w: *Walk, members: []const Ast.Node.Index, action: ScanDeclsAction) Error!void {
|
||||||
const ast = w.ast;
|
const ast = w.ast;
|
||||||
const gpa = w.gpa;
|
const gpa = w.gpa;
|
||||||
const node_tags = ast.nodes.items(.tag);
|
|
||||||
const main_tokens = ast.nodes.items(.main_token);
|
|
||||||
const token_tags = ast.tokens.items(.tag);
|
|
||||||
|
|
||||||
for (members) |member_node| {
|
for (members) |member_node| {
|
||||||
const name_token = switch (node_tags[member_node]) {
|
const name_token = switch (ast.nodeTag(member_node)) {
|
||||||
.global_var_decl,
|
.global_var_decl,
|
||||||
.local_var_decl,
|
.local_var_decl,
|
||||||
.simple_var_decl,
|
.simple_var_decl,
|
||||||
.aligned_var_decl,
|
.aligned_var_decl,
|
||||||
=> main_tokens[member_node] + 1,
|
=> ast.nodeMainToken(member_node) + 1,
|
||||||
|
|
||||||
.fn_proto_simple,
|
.fn_proto_simple,
|
||||||
.fn_proto_multi,
|
.fn_proto_multi,
|
||||||
.fn_proto_one,
|
.fn_proto_one,
|
||||||
.fn_proto,
|
.fn_proto,
|
||||||
.fn_decl,
|
.fn_decl,
|
||||||
=> main_tokens[member_node] + 1,
|
=> ast.nodeMainToken(member_node) + 1,
|
||||||
|
|
||||||
else => continue,
|
else => continue,
|
||||||
};
|
};
|
||||||
|
|
||||||
assert(token_tags[name_token] == .identifier);
|
assert(ast.tokenTag(name_token) == .identifier);
|
||||||
const name_bytes = ast.tokenSlice(name_token);
|
const name_bytes = ast.tokenSlice(name_token);
|
||||||
|
|
||||||
switch (action) {
|
switch (action) {
|
||||||
@ -145,12 +142,10 @@ fn scanDecls(w: *Walk, members: []const Ast.Node.Index, action: ScanDeclsAction)
|
|||||||
|
|
||||||
fn walkMember(w: *Walk, decl: Ast.Node.Index) Error!void {
|
fn walkMember(w: *Walk, decl: Ast.Node.Index) Error!void {
|
||||||
const ast = w.ast;
|
const ast = w.ast;
|
||||||
const datas = ast.nodes.items(.data);
|
switch (ast.nodeTag(decl)) {
|
||||||
switch (ast.nodes.items(.tag)[decl]) {
|
|
||||||
.fn_decl => {
|
.fn_decl => {
|
||||||
const fn_proto = datas[decl].lhs;
|
const fn_proto, const body_node = ast.nodeData(decl).node_and_node;
|
||||||
try walkExpression(w, fn_proto);
|
try walkExpression(w, fn_proto);
|
||||||
const body_node = datas[decl].rhs;
|
|
||||||
if (!isFnBodyGutted(ast, body_node)) {
|
if (!isFnBodyGutted(ast, body_node)) {
|
||||||
w.replace_names.clearRetainingCapacity();
|
w.replace_names.clearRetainingCapacity();
|
||||||
try w.transformations.append(.{ .gut_function = decl });
|
try w.transformations.append(.{ .gut_function = decl });
|
||||||
@ -167,7 +162,7 @@ fn walkMember(w: *Walk, decl: Ast.Node.Index) Error!void {
|
|||||||
|
|
||||||
.@"usingnamespace" => {
|
.@"usingnamespace" => {
|
||||||
try w.transformations.append(.{ .delete_node = decl });
|
try w.transformations.append(.{ .delete_node = decl });
|
||||||
const expr = datas[decl].lhs;
|
const expr = ast.nodeData(decl).node;
|
||||||
try walkExpression(w, expr);
|
try walkExpression(w, expr);
|
||||||
},
|
},
|
||||||
|
|
||||||
@ -179,7 +174,7 @@ fn walkMember(w: *Walk, decl: Ast.Node.Index) Error!void {
|
|||||||
|
|
||||||
.test_decl => {
|
.test_decl => {
|
||||||
try w.transformations.append(.{ .delete_node = decl });
|
try w.transformations.append(.{ .delete_node = decl });
|
||||||
try walkExpression(w, datas[decl].rhs);
|
try walkExpression(w, ast.nodeData(decl).opt_token_and_node[1]);
|
||||||
},
|
},
|
||||||
|
|
||||||
.container_field_init,
|
.container_field_init,
|
||||||
@ -202,14 +197,10 @@ fn walkMember(w: *Walk, decl: Ast.Node.Index) Error!void {
|
|||||||
|
|
||||||
fn walkExpression(w: *Walk, node: Ast.Node.Index) Error!void {
|
fn walkExpression(w: *Walk, node: Ast.Node.Index) Error!void {
|
||||||
const ast = w.ast;
|
const ast = w.ast;
|
||||||
const token_tags = ast.tokens.items(.tag);
|
switch (ast.nodeTag(node)) {
|
||||||
const main_tokens = ast.nodes.items(.main_token);
|
|
||||||
const node_tags = ast.nodes.items(.tag);
|
|
||||||
const datas = ast.nodes.items(.data);
|
|
||||||
switch (node_tags[node]) {
|
|
||||||
.identifier => {
|
.identifier => {
|
||||||
const name_ident = main_tokens[node];
|
const name_ident = ast.nodeMainToken(node);
|
||||||
assert(token_tags[name_ident] == .identifier);
|
assert(ast.tokenTag(name_ident) == .identifier);
|
||||||
const name_bytes = ast.tokenSlice(name_ident);
|
const name_bytes = ast.tokenSlice(name_ident);
|
||||||
_ = w.unreferenced_globals.swapRemove(name_bytes);
|
_ = w.unreferenced_globals.swapRemove(name_bytes);
|
||||||
if (w.replace_names.get(name_bytes)) |index| {
|
if (w.replace_names.get(name_bytes)) |index| {
|
||||||
@ -239,46 +230,27 @@ fn walkExpression(w: *Walk, node: Ast.Node.Index) Error!void {
|
|||||||
},
|
},
|
||||||
|
|
||||||
.@"errdefer" => {
|
.@"errdefer" => {
|
||||||
const expr = datas[node].rhs;
|
const expr = ast.nodeData(node).opt_token_and_node[1];
|
||||||
return walkExpression(w, expr);
|
return walkExpression(w, expr);
|
||||||
},
|
},
|
||||||
|
|
||||||
.@"defer" => {
|
.@"defer",
|
||||||
const expr = datas[node].rhs;
|
.@"comptime",
|
||||||
return walkExpression(w, expr);
|
.@"nosuspend",
|
||||||
},
|
.@"suspend",
|
||||||
.@"comptime", .@"nosuspend" => {
|
=> {
|
||||||
const block = datas[node].lhs;
|
return walkExpression(w, ast.nodeData(node).node);
|
||||||
return walkExpression(w, block);
|
|
||||||
},
|
|
||||||
|
|
||||||
.@"suspend" => {
|
|
||||||
const body = datas[node].lhs;
|
|
||||||
return walkExpression(w, body);
|
|
||||||
},
|
|
||||||
|
|
||||||
.@"catch" => {
|
|
||||||
try walkExpression(w, datas[node].lhs); // target
|
|
||||||
try walkExpression(w, datas[node].rhs); // fallback
|
|
||||||
},
|
},
|
||||||
|
|
||||||
.field_access => {
|
.field_access => {
|
||||||
const field_access = datas[node];
|
try walkExpression(w, ast.nodeData(node).node_and_token[0]);
|
||||||
try walkExpression(w, field_access.lhs);
|
|
||||||
},
|
},
|
||||||
|
|
||||||
.error_union,
|
|
||||||
.switch_range,
|
|
||||||
=> {
|
|
||||||
const infix = datas[node];
|
|
||||||
try walkExpression(w, infix.lhs);
|
|
||||||
return walkExpression(w, infix.rhs);
|
|
||||||
},
|
|
||||||
.for_range => {
|
.for_range => {
|
||||||
const infix = datas[node];
|
const start, const opt_end = ast.nodeData(node).node_and_opt_node;
|
||||||
try walkExpression(w, infix.lhs);
|
try walkExpression(w, start);
|
||||||
if (infix.rhs != 0) {
|
if (opt_end.unwrap()) |end| {
|
||||||
return walkExpression(w, infix.rhs);
|
return walkExpression(w, end);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
@ -328,17 +300,21 @@ fn walkExpression(w: *Walk, node: Ast.Node.Index) Error!void {
|
|||||||
.sub,
|
.sub,
|
||||||
.sub_wrap,
|
.sub_wrap,
|
||||||
.sub_sat,
|
.sub_sat,
|
||||||
|
.@"catch",
|
||||||
|
.error_union,
|
||||||
|
.switch_range,
|
||||||
.@"orelse",
|
.@"orelse",
|
||||||
|
.array_access,
|
||||||
=> {
|
=> {
|
||||||
const infix = datas[node];
|
const lhs, const rhs = ast.nodeData(node).node_and_node;
|
||||||
try walkExpression(w, infix.lhs);
|
try walkExpression(w, lhs);
|
||||||
try walkExpression(w, infix.rhs);
|
try walkExpression(w, rhs);
|
||||||
},
|
},
|
||||||
|
|
||||||
.assign_destructure => {
|
.assign_destructure => {
|
||||||
const full = ast.assignDestructure(node);
|
const full = ast.assignDestructure(node);
|
||||||
for (full.ast.variables) |variable_node| {
|
for (full.ast.variables) |variable_node| {
|
||||||
switch (node_tags[variable_node]) {
|
switch (ast.nodeTag(variable_node)) {
|
||||||
.global_var_decl,
|
.global_var_decl,
|
||||||
.local_var_decl,
|
.local_var_decl,
|
||||||
.simple_var_decl,
|
.simple_var_decl,
|
||||||
@ -357,15 +333,12 @@ fn walkExpression(w: *Walk, node: Ast.Node.Index) Error!void {
|
|||||||
.negation_wrap,
|
.negation_wrap,
|
||||||
.optional_type,
|
.optional_type,
|
||||||
.address_of,
|
.address_of,
|
||||||
=> {
|
|
||||||
return walkExpression(w, datas[node].lhs);
|
|
||||||
},
|
|
||||||
|
|
||||||
.@"try",
|
.@"try",
|
||||||
.@"resume",
|
.@"resume",
|
||||||
.@"await",
|
.@"await",
|
||||||
|
.deref,
|
||||||
=> {
|
=> {
|
||||||
return walkExpression(w, datas[node].lhs);
|
return walkExpression(w, ast.nodeData(node).node);
|
||||||
},
|
},
|
||||||
|
|
||||||
.array_type,
|
.array_type,
|
||||||
@ -417,51 +390,40 @@ fn walkExpression(w: *Walk, node: Ast.Node.Index) Error!void {
|
|||||||
return walkCall(w, ast.fullCall(&buf, node).?);
|
return walkCall(w, ast.fullCall(&buf, node).?);
|
||||||
},
|
},
|
||||||
|
|
||||||
.array_access => {
|
|
||||||
const suffix = datas[node];
|
|
||||||
try walkExpression(w, suffix.lhs);
|
|
||||||
try walkExpression(w, suffix.rhs);
|
|
||||||
},
|
|
||||||
|
|
||||||
.slice_open, .slice, .slice_sentinel => return walkSlice(w, node, ast.fullSlice(node).?),
|
.slice_open, .slice, .slice_sentinel => return walkSlice(w, node, ast.fullSlice(node).?),
|
||||||
|
|
||||||
.deref => {
|
|
||||||
try walkExpression(w, datas[node].lhs);
|
|
||||||
},
|
|
||||||
|
|
||||||
.unwrap_optional => {
|
.unwrap_optional => {
|
||||||
try walkExpression(w, datas[node].lhs);
|
try walkExpression(w, ast.nodeData(node).node_and_token[0]);
|
||||||
},
|
},
|
||||||
|
|
||||||
.@"break" => {
|
.@"break" => {
|
||||||
const label_token = datas[node].lhs;
|
const label_token, const target = ast.nodeData(node).opt_token_and_opt_node;
|
||||||
const target = datas[node].rhs;
|
if (label_token == .none and target == .none) {
|
||||||
if (label_token == 0 and target == 0) {
|
|
||||||
// no expressions
|
// no expressions
|
||||||
} else if (label_token == 0 and target != 0) {
|
} else if (label_token == .none and target != .none) {
|
||||||
try walkExpression(w, target);
|
try walkExpression(w, target.unwrap().?);
|
||||||
} else if (label_token != 0 and target == 0) {
|
} else if (label_token != .none and target == .none) {
|
||||||
try walkIdentifier(w, label_token);
|
try walkIdentifier(w, label_token.unwrap().?);
|
||||||
} else if (label_token != 0 and target != 0) {
|
} else if (label_token != .none and target != .none) {
|
||||||
try walkExpression(w, target);
|
try walkExpression(w, target.unwrap().?);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
.@"continue" => {
|
.@"continue" => {
|
||||||
const label = datas[node].lhs;
|
const opt_label = ast.nodeData(node).opt_token_and_opt_node[0];
|
||||||
if (label != 0) {
|
if (opt_label.unwrap()) |label| {
|
||||||
return walkIdentifier(w, label); // label
|
return walkIdentifier(w, label);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
.@"return" => {
|
.@"return" => {
|
||||||
if (datas[node].lhs != 0) {
|
if (ast.nodeData(node).opt_node.unwrap()) |lhs| {
|
||||||
try walkExpression(w, datas[node].lhs);
|
try walkExpression(w, lhs);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
.grouped_expression => {
|
.grouped_expression => {
|
||||||
try walkExpression(w, datas[node].lhs);
|
try walkExpression(w, ast.nodeData(node).node_and_token[0]);
|
||||||
},
|
},
|
||||||
|
|
||||||
.container_decl,
|
.container_decl,
|
||||||
@ -482,13 +444,13 @@ fn walkExpression(w: *Walk, node: Ast.Node.Index) Error!void {
|
|||||||
},
|
},
|
||||||
|
|
||||||
.error_set_decl => {
|
.error_set_decl => {
|
||||||
const error_token = main_tokens[node];
|
const error_token = ast.nodeMainToken(node);
|
||||||
const lbrace = error_token + 1;
|
const lbrace = error_token + 1;
|
||||||
const rbrace = datas[node].rhs;
|
const rbrace = ast.nodeData(node).token;
|
||||||
|
|
||||||
var i = lbrace + 1;
|
var i = lbrace + 1;
|
||||||
while (i < rbrace) : (i += 1) {
|
while (i < rbrace) : (i += 1) {
|
||||||
switch (token_tags[i]) {
|
switch (ast.tokenTag(i)) {
|
||||||
.doc_comment => unreachable, // TODO
|
.doc_comment => unreachable, // TODO
|
||||||
.identifier => try walkIdentifier(w, i),
|
.identifier => try walkIdentifier(w, i),
|
||||||
.comma => {},
|
.comma => {},
|
||||||
@ -517,20 +479,16 @@ fn walkExpression(w: *Walk, node: Ast.Node.Index) Error!void {
|
|||||||
},
|
},
|
||||||
|
|
||||||
.anyframe_type => {
|
.anyframe_type => {
|
||||||
if (datas[node].rhs != 0) {
|
_, const child_type = ast.nodeData(node).token_and_node;
|
||||||
return walkExpression(w, datas[node].rhs);
|
return walkExpression(w, child_type);
|
||||||
}
|
|
||||||
},
|
},
|
||||||
|
|
||||||
.@"switch",
|
.@"switch",
|
||||||
.switch_comma,
|
.switch_comma,
|
||||||
=> {
|
=> {
|
||||||
const condition = datas[node].lhs;
|
const full = ast.fullSwitch(node).?;
|
||||||
const extra = ast.extraData(datas[node].rhs, Ast.Node.SubRange);
|
try walkExpression(w, full.ast.condition); // condition expression
|
||||||
const cases = ast.extra_data[extra.start..extra.end];
|
try walkExpressions(w, full.ast.cases);
|
||||||
|
|
||||||
try walkExpression(w, condition); // condition expression
|
|
||||||
try walkExpressions(w, cases);
|
|
||||||
},
|
},
|
||||||
|
|
||||||
.switch_case_one,
|
.switch_case_one,
|
||||||
@ -557,7 +515,7 @@ fn walkExpression(w: *Walk, node: Ast.Node.Index) Error!void {
|
|||||||
=> return walkAsm(w, ast.fullAsm(node).?),
|
=> return walkAsm(w, ast.fullAsm(node).?),
|
||||||
|
|
||||||
.enum_literal => {
|
.enum_literal => {
|
||||||
return walkIdentifier(w, main_tokens[node]); // name
|
return walkIdentifier(w, ast.nodeMainToken(node)); // name
|
||||||
},
|
},
|
||||||
|
|
||||||
.fn_decl => unreachable,
|
.fn_decl => unreachable,
|
||||||
@ -579,66 +537,66 @@ fn walkExpression(w: *Walk, node: Ast.Node.Index) Error!void {
|
|||||||
fn walkGlobalVarDecl(w: *Walk, decl_node: Ast.Node.Index, var_decl: Ast.full.VarDecl) Error!void {
|
fn walkGlobalVarDecl(w: *Walk, decl_node: Ast.Node.Index, var_decl: Ast.full.VarDecl) Error!void {
|
||||||
_ = decl_node;
|
_ = decl_node;
|
||||||
|
|
||||||
if (var_decl.ast.type_node != 0) {
|
if (var_decl.ast.type_node.unwrap()) |type_node| {
|
||||||
try walkExpression(w, var_decl.ast.type_node);
|
try walkExpression(w, type_node);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (var_decl.ast.align_node != 0) {
|
if (var_decl.ast.align_node.unwrap()) |align_node| {
|
||||||
try walkExpression(w, var_decl.ast.align_node);
|
try walkExpression(w, align_node);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (var_decl.ast.addrspace_node != 0) {
|
if (var_decl.ast.addrspace_node.unwrap()) |addrspace_node| {
|
||||||
try walkExpression(w, var_decl.ast.addrspace_node);
|
try walkExpression(w, addrspace_node);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (var_decl.ast.section_node != 0) {
|
if (var_decl.ast.section_node.unwrap()) |section_node| {
|
||||||
try walkExpression(w, var_decl.ast.section_node);
|
try walkExpression(w, section_node);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (var_decl.ast.init_node != 0) {
|
if (var_decl.ast.init_node.unwrap()) |init_node| {
|
||||||
if (!isUndefinedIdent(w.ast, var_decl.ast.init_node)) {
|
if (!isUndefinedIdent(w.ast, init_node)) {
|
||||||
try w.transformations.append(.{ .replace_with_undef = var_decl.ast.init_node });
|
try w.transformations.append(.{ .replace_with_undef = init_node });
|
||||||
}
|
}
|
||||||
try walkExpression(w, var_decl.ast.init_node);
|
try walkExpression(w, init_node);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn walkLocalVarDecl(w: *Walk, var_decl: Ast.full.VarDecl) Error!void {
|
fn walkLocalVarDecl(w: *Walk, var_decl: Ast.full.VarDecl) Error!void {
|
||||||
try walkIdentifierNew(w, var_decl.ast.mut_token + 1); // name
|
try walkIdentifierNew(w, var_decl.ast.mut_token + 1); // name
|
||||||
|
|
||||||
if (var_decl.ast.type_node != 0) {
|
if (var_decl.ast.type_node.unwrap()) |type_node| {
|
||||||
try walkExpression(w, var_decl.ast.type_node);
|
try walkExpression(w, type_node);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (var_decl.ast.align_node != 0) {
|
if (var_decl.ast.align_node.unwrap()) |align_node| {
|
||||||
try walkExpression(w, var_decl.ast.align_node);
|
try walkExpression(w, align_node);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (var_decl.ast.addrspace_node != 0) {
|
if (var_decl.ast.addrspace_node.unwrap()) |addrspace_node| {
|
||||||
try walkExpression(w, var_decl.ast.addrspace_node);
|
try walkExpression(w, addrspace_node);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (var_decl.ast.section_node != 0) {
|
if (var_decl.ast.section_node.unwrap()) |section_node| {
|
||||||
try walkExpression(w, var_decl.ast.section_node);
|
try walkExpression(w, section_node);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (var_decl.ast.init_node != 0) {
|
if (var_decl.ast.init_node.unwrap()) |init_node| {
|
||||||
if (!isUndefinedIdent(w.ast, var_decl.ast.init_node)) {
|
if (!isUndefinedIdent(w.ast, init_node)) {
|
||||||
try w.transformations.append(.{ .replace_with_undef = var_decl.ast.init_node });
|
try w.transformations.append(.{ .replace_with_undef = init_node });
|
||||||
}
|
}
|
||||||
try walkExpression(w, var_decl.ast.init_node);
|
try walkExpression(w, init_node);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn walkContainerField(w: *Walk, field: Ast.full.ContainerField) Error!void {
|
fn walkContainerField(w: *Walk, field: Ast.full.ContainerField) Error!void {
|
||||||
if (field.ast.type_expr != 0) {
|
if (field.ast.type_expr.unwrap()) |type_expr| {
|
||||||
try walkExpression(w, field.ast.type_expr); // type
|
try walkExpression(w, type_expr); // type
|
||||||
}
|
}
|
||||||
if (field.ast.align_expr != 0) {
|
if (field.ast.align_expr.unwrap()) |align_expr| {
|
||||||
try walkExpression(w, field.ast.align_expr); // alignment
|
try walkExpression(w, align_expr); // alignment
|
||||||
}
|
}
|
||||||
if (field.ast.value_expr != 0) {
|
if (field.ast.value_expr.unwrap()) |value_expr| {
|
||||||
try walkExpression(w, field.ast.value_expr); // value
|
try walkExpression(w, value_expr); // value
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -649,18 +607,17 @@ fn walkBlock(
|
|||||||
) Error!void {
|
) Error!void {
|
||||||
_ = block_node;
|
_ = block_node;
|
||||||
const ast = w.ast;
|
const ast = w.ast;
|
||||||
const node_tags = ast.nodes.items(.tag);
|
|
||||||
|
|
||||||
for (statements) |stmt| {
|
for (statements) |stmt| {
|
||||||
switch (node_tags[stmt]) {
|
switch (ast.nodeTag(stmt)) {
|
||||||
.global_var_decl,
|
.global_var_decl,
|
||||||
.local_var_decl,
|
.local_var_decl,
|
||||||
.simple_var_decl,
|
.simple_var_decl,
|
||||||
.aligned_var_decl,
|
.aligned_var_decl,
|
||||||
=> {
|
=> {
|
||||||
const var_decl = ast.fullVarDecl(stmt).?;
|
const var_decl = ast.fullVarDecl(stmt).?;
|
||||||
if (var_decl.ast.init_node != 0 and
|
if (var_decl.ast.init_node != .none and
|
||||||
isUndefinedIdent(w.ast, var_decl.ast.init_node))
|
isUndefinedIdent(w.ast, var_decl.ast.init_node.unwrap().?))
|
||||||
{
|
{
|
||||||
try w.transformations.append(.{ .delete_var_decl = .{
|
try w.transformations.append(.{ .delete_var_decl = .{
|
||||||
.var_decl_node = stmt,
|
.var_decl_node = stmt,
|
||||||
@ -691,15 +648,15 @@ fn walkBlock(
|
|||||||
|
|
||||||
fn walkArrayType(w: *Walk, array_type: Ast.full.ArrayType) Error!void {
|
fn walkArrayType(w: *Walk, array_type: Ast.full.ArrayType) Error!void {
|
||||||
try walkExpression(w, array_type.ast.elem_count);
|
try walkExpression(w, array_type.ast.elem_count);
|
||||||
if (array_type.ast.sentinel != 0) {
|
if (array_type.ast.sentinel.unwrap()) |sentinel| {
|
||||||
try walkExpression(w, array_type.ast.sentinel);
|
try walkExpression(w, sentinel);
|
||||||
}
|
}
|
||||||
return walkExpression(w, array_type.ast.elem_type);
|
return walkExpression(w, array_type.ast.elem_type);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn walkArrayInit(w: *Walk, array_init: Ast.full.ArrayInit) Error!void {
|
fn walkArrayInit(w: *Walk, array_init: Ast.full.ArrayInit) Error!void {
|
||||||
if (array_init.ast.type_expr != 0) {
|
if (array_init.ast.type_expr.unwrap()) |type_expr| {
|
||||||
try walkExpression(w, array_init.ast.type_expr); // T
|
try walkExpression(w, type_expr); // T
|
||||||
}
|
}
|
||||||
for (array_init.ast.elements) |elem_init| {
|
for (array_init.ast.elements) |elem_init| {
|
||||||
try walkExpression(w, elem_init);
|
try walkExpression(w, elem_init);
|
||||||
@ -712,8 +669,8 @@ fn walkStructInit(
|
|||||||
struct_init: Ast.full.StructInit,
|
struct_init: Ast.full.StructInit,
|
||||||
) Error!void {
|
) Error!void {
|
||||||
_ = struct_node;
|
_ = struct_node;
|
||||||
if (struct_init.ast.type_expr != 0) {
|
if (struct_init.ast.type_expr.unwrap()) |type_expr| {
|
||||||
try walkExpression(w, struct_init.ast.type_expr); // T
|
try walkExpression(w, type_expr); // T
|
||||||
}
|
}
|
||||||
for (struct_init.ast.fields) |field_init| {
|
for (struct_init.ast.fields) |field_init| {
|
||||||
try walkExpression(w, field_init);
|
try walkExpression(w, field_init);
|
||||||
@ -733,18 +690,17 @@ fn walkSlice(
|
|||||||
_ = slice_node;
|
_ = slice_node;
|
||||||
try walkExpression(w, slice.ast.sliced);
|
try walkExpression(w, slice.ast.sliced);
|
||||||
try walkExpression(w, slice.ast.start);
|
try walkExpression(w, slice.ast.start);
|
||||||
if (slice.ast.end != 0) {
|
if (slice.ast.end.unwrap()) |end| {
|
||||||
try walkExpression(w, slice.ast.end);
|
try walkExpression(w, end);
|
||||||
}
|
}
|
||||||
if (slice.ast.sentinel != 0) {
|
if (slice.ast.sentinel.unwrap()) |sentinel| {
|
||||||
try walkExpression(w, slice.ast.sentinel);
|
try walkExpression(w, sentinel);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn walkIdentifier(w: *Walk, name_ident: Ast.TokenIndex) Error!void {
|
fn walkIdentifier(w: *Walk, name_ident: Ast.TokenIndex) Error!void {
|
||||||
const ast = w.ast;
|
const ast = w.ast;
|
||||||
const token_tags = ast.tokens.items(.tag);
|
assert(ast.tokenTag(name_ident) == .identifier);
|
||||||
assert(token_tags[name_ident] == .identifier);
|
|
||||||
const name_bytes = ast.tokenSlice(name_ident);
|
const name_bytes = ast.tokenSlice(name_ident);
|
||||||
_ = w.unreferenced_globals.swapRemove(name_bytes);
|
_ = w.unreferenced_globals.swapRemove(name_bytes);
|
||||||
}
|
}
|
||||||
@ -760,8 +716,8 @@ fn walkContainerDecl(
|
|||||||
container_decl: Ast.full.ContainerDecl,
|
container_decl: Ast.full.ContainerDecl,
|
||||||
) Error!void {
|
) Error!void {
|
||||||
_ = container_decl_node;
|
_ = container_decl_node;
|
||||||
if (container_decl.ast.arg != 0) {
|
if (container_decl.ast.arg.unwrap()) |arg| {
|
||||||
try walkExpression(w, container_decl.ast.arg);
|
try walkExpression(w, arg);
|
||||||
}
|
}
|
||||||
try walkMembers(w, container_decl.ast.members);
|
try walkMembers(w, container_decl.ast.members);
|
||||||
}
|
}
|
||||||
@ -772,14 +728,13 @@ fn walkBuiltinCall(
|
|||||||
params: []const Ast.Node.Index,
|
params: []const Ast.Node.Index,
|
||||||
) Error!void {
|
) Error!void {
|
||||||
const ast = w.ast;
|
const ast = w.ast;
|
||||||
const main_tokens = ast.nodes.items(.main_token);
|
const builtin_token = ast.nodeMainToken(call_node);
|
||||||
const builtin_token = main_tokens[call_node];
|
|
||||||
const builtin_name = ast.tokenSlice(builtin_token);
|
const builtin_name = ast.tokenSlice(builtin_token);
|
||||||
const info = BuiltinFn.list.get(builtin_name).?;
|
const info = BuiltinFn.list.get(builtin_name).?;
|
||||||
switch (info.tag) {
|
switch (info.tag) {
|
||||||
.import => {
|
.import => {
|
||||||
const operand_node = params[0];
|
const operand_node = params[0];
|
||||||
const str_lit_token = main_tokens[operand_node];
|
const str_lit_token = ast.nodeMainToken(operand_node);
|
||||||
const token_bytes = ast.tokenSlice(str_lit_token);
|
const token_bytes = ast.tokenSlice(str_lit_token);
|
||||||
if (std.mem.endsWith(u8, token_bytes, ".zig\"")) {
|
if (std.mem.endsWith(u8, token_bytes, ".zig\"")) {
|
||||||
const imported_string = std.zig.string_literal.parseAlloc(w.arena, token_bytes) catch
|
const imported_string = std.zig.string_literal.parseAlloc(w.arena, token_bytes) catch
|
||||||
@ -808,29 +763,30 @@ fn walkFnProto(w: *Walk, fn_proto: Ast.full.FnProto) Error!void {
|
|||||||
{
|
{
|
||||||
var it = fn_proto.iterate(ast);
|
var it = fn_proto.iterate(ast);
|
||||||
while (it.next()) |param| {
|
while (it.next()) |param| {
|
||||||
if (param.type_expr != 0) {
|
if (param.type_expr) |type_expr| {
|
||||||
try walkExpression(w, param.type_expr);
|
try walkExpression(w, type_expr);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (fn_proto.ast.align_expr != 0) {
|
if (fn_proto.ast.align_expr.unwrap()) |align_expr| {
|
||||||
try walkExpression(w, fn_proto.ast.align_expr);
|
try walkExpression(w, align_expr);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (fn_proto.ast.addrspace_expr != 0) {
|
if (fn_proto.ast.addrspace_expr.unwrap()) |addrspace_expr| {
|
||||||
try walkExpression(w, fn_proto.ast.addrspace_expr);
|
try walkExpression(w, addrspace_expr);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (fn_proto.ast.section_expr != 0) {
|
if (fn_proto.ast.section_expr.unwrap()) |section_expr| {
|
||||||
try walkExpression(w, fn_proto.ast.section_expr);
|
try walkExpression(w, section_expr);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (fn_proto.ast.callconv_expr != 0) {
|
if (fn_proto.ast.callconv_expr.unwrap()) |callconv_expr| {
|
||||||
try walkExpression(w, fn_proto.ast.callconv_expr);
|
try walkExpression(w, callconv_expr);
|
||||||
}
|
}
|
||||||
|
|
||||||
try walkExpression(w, fn_proto.ast.return_type);
|
const return_type = fn_proto.ast.return_type.unwrap().?;
|
||||||
|
try walkExpression(w, return_type);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn walkExpressions(w: *Walk, expressions: []const Ast.Node.Index) Error!void {
|
fn walkExpressions(w: *Walk, expressions: []const Ast.Node.Index) Error!void {
|
||||||
@ -847,16 +803,13 @@ fn walkSwitchCase(w: *Walk, switch_case: Ast.full.SwitchCase) Error!void {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn walkWhile(w: *Walk, node_index: Ast.Node.Index, while_node: Ast.full.While) Error!void {
|
fn walkWhile(w: *Walk, node_index: Ast.Node.Index, while_node: Ast.full.While) Error!void {
|
||||||
assert(while_node.ast.cond_expr != 0);
|
|
||||||
assert(while_node.ast.then_expr != 0);
|
|
||||||
|
|
||||||
// Perform these transformations in this priority order:
|
// Perform these transformations in this priority order:
|
||||||
// 1. If the `else` expression is missing or an empty block, replace the condition with `if (true)` if it is not already.
|
// 1. If the `else` expression is missing or an empty block, replace the condition with `if (true)` if it is not already.
|
||||||
// 2. If the `then` block is empty, replace the condition with `if (false)` if it is not already.
|
// 2. If the `then` block is empty, replace the condition with `if (false)` if it is not already.
|
||||||
// 3. If the condition is `if (true)`, replace the `if` expression with the contents of the `then` expression.
|
// 3. If the condition is `if (true)`, replace the `if` expression with the contents of the `then` expression.
|
||||||
// 4. If the condition is `if (false)`, replace the `if` expression with the contents of the `else` expression.
|
// 4. If the condition is `if (false)`, replace the `if` expression with the contents of the `else` expression.
|
||||||
if (!isTrueIdent(w.ast, while_node.ast.cond_expr) and
|
if (!isTrueIdent(w.ast, while_node.ast.cond_expr) and
|
||||||
(while_node.ast.else_expr == 0 or isEmptyBlock(w.ast, while_node.ast.else_expr)))
|
(while_node.ast.else_expr == .none or isEmptyBlock(w.ast, while_node.ast.else_expr.unwrap().?)))
|
||||||
{
|
{
|
||||||
try w.transformations.ensureUnusedCapacity(1);
|
try w.transformations.ensureUnusedCapacity(1);
|
||||||
w.transformations.appendAssumeCapacity(.{ .replace_with_true = while_node.ast.cond_expr });
|
w.transformations.appendAssumeCapacity(.{ .replace_with_true = while_node.ast.cond_expr });
|
||||||
@ -873,45 +826,39 @@ fn walkWhile(w: *Walk, node_index: Ast.Node.Index, while_node: Ast.full.While) E
|
|||||||
try w.transformations.ensureUnusedCapacity(1);
|
try w.transformations.ensureUnusedCapacity(1);
|
||||||
w.transformations.appendAssumeCapacity(.{ .replace_node = .{
|
w.transformations.appendAssumeCapacity(.{ .replace_node = .{
|
||||||
.to_replace = node_index,
|
.to_replace = node_index,
|
||||||
.replacement = while_node.ast.else_expr,
|
.replacement = while_node.ast.else_expr.unwrap().?,
|
||||||
} });
|
} });
|
||||||
}
|
}
|
||||||
|
|
||||||
try walkExpression(w, while_node.ast.cond_expr); // condition
|
try walkExpression(w, while_node.ast.cond_expr); // condition
|
||||||
|
|
||||||
if (while_node.ast.cont_expr != 0) {
|
if (while_node.ast.cont_expr.unwrap()) |cont_expr| {
|
||||||
try walkExpression(w, while_node.ast.cont_expr);
|
try walkExpression(w, cont_expr);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (while_node.ast.then_expr != 0) {
|
|
||||||
try walkExpression(w, while_node.ast.then_expr);
|
try walkExpression(w, while_node.ast.then_expr);
|
||||||
}
|
|
||||||
if (while_node.ast.else_expr != 0) {
|
if (while_node.ast.else_expr.unwrap()) |else_expr| {
|
||||||
try walkExpression(w, while_node.ast.else_expr);
|
try walkExpression(w, else_expr);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn walkFor(w: *Walk, for_node: Ast.full.For) Error!void {
|
fn walkFor(w: *Walk, for_node: Ast.full.For) Error!void {
|
||||||
try walkParamList(w, for_node.ast.inputs);
|
try walkParamList(w, for_node.ast.inputs);
|
||||||
if (for_node.ast.then_expr != 0) {
|
|
||||||
try walkExpression(w, for_node.ast.then_expr);
|
try walkExpression(w, for_node.ast.then_expr);
|
||||||
}
|
if (for_node.ast.else_expr.unwrap()) |else_expr| {
|
||||||
if (for_node.ast.else_expr != 0) {
|
try walkExpression(w, else_expr);
|
||||||
try walkExpression(w, for_node.ast.else_expr);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn walkIf(w: *Walk, node_index: Ast.Node.Index, if_node: Ast.full.If) Error!void {
|
fn walkIf(w: *Walk, node_index: Ast.Node.Index, if_node: Ast.full.If) Error!void {
|
||||||
assert(if_node.ast.cond_expr != 0);
|
|
||||||
assert(if_node.ast.then_expr != 0);
|
|
||||||
|
|
||||||
// Perform these transformations in this priority order:
|
// Perform these transformations in this priority order:
|
||||||
// 1. If the `else` expression is missing or an empty block, replace the condition with `if (true)` if it is not already.
|
// 1. If the `else` expression is missing or an empty block, replace the condition with `if (true)` if it is not already.
|
||||||
// 2. If the `then` block is empty, replace the condition with `if (false)` if it is not already.
|
// 2. If the `then` block is empty, replace the condition with `if (false)` if it is not already.
|
||||||
// 3. If the condition is `if (true)`, replace the `if` expression with the contents of the `then` expression.
|
// 3. If the condition is `if (true)`, replace the `if` expression with the contents of the `then` expression.
|
||||||
// 4. If the condition is `if (false)`, replace the `if` expression with the contents of the `else` expression.
|
// 4. If the condition is `if (false)`, replace the `if` expression with the contents of the `else` expression.
|
||||||
if (!isTrueIdent(w.ast, if_node.ast.cond_expr) and
|
if (!isTrueIdent(w.ast, if_node.ast.cond_expr) and
|
||||||
(if_node.ast.else_expr == 0 or isEmptyBlock(w.ast, if_node.ast.else_expr)))
|
(if_node.ast.else_expr == .none or isEmptyBlock(w.ast, if_node.ast.else_expr.unwrap().?)))
|
||||||
{
|
{
|
||||||
try w.transformations.ensureUnusedCapacity(1);
|
try w.transformations.ensureUnusedCapacity(1);
|
||||||
w.transformations.appendAssumeCapacity(.{ .replace_with_true = if_node.ast.cond_expr });
|
w.transformations.appendAssumeCapacity(.{ .replace_with_true = if_node.ast.cond_expr });
|
||||||
@ -928,17 +875,14 @@ fn walkIf(w: *Walk, node_index: Ast.Node.Index, if_node: Ast.full.If) Error!void
|
|||||||
try w.transformations.ensureUnusedCapacity(1);
|
try w.transformations.ensureUnusedCapacity(1);
|
||||||
w.transformations.appendAssumeCapacity(.{ .replace_node = .{
|
w.transformations.appendAssumeCapacity(.{ .replace_node = .{
|
||||||
.to_replace = node_index,
|
.to_replace = node_index,
|
||||||
.replacement = if_node.ast.else_expr,
|
.replacement = if_node.ast.else_expr.unwrap().?,
|
||||||
} });
|
} });
|
||||||
}
|
}
|
||||||
|
|
||||||
try walkExpression(w, if_node.ast.cond_expr); // condition
|
try walkExpression(w, if_node.ast.cond_expr); // condition
|
||||||
|
|
||||||
if (if_node.ast.then_expr != 0) {
|
|
||||||
try walkExpression(w, if_node.ast.then_expr);
|
try walkExpression(w, if_node.ast.then_expr);
|
||||||
}
|
if (if_node.ast.else_expr.unwrap()) |else_expr| {
|
||||||
if (if_node.ast.else_expr != 0) {
|
try walkExpression(w, else_expr);
|
||||||
try walkExpression(w, if_node.ast.else_expr);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -958,9 +902,8 @@ fn walkParamList(w: *Walk, params: []const Ast.Node.Index) Error!void {
|
|||||||
/// Check if it is already gutted (i.e. its body replaced with `@trap()`).
|
/// Check if it is already gutted (i.e. its body replaced with `@trap()`).
|
||||||
fn isFnBodyGutted(ast: *const Ast, body_node: Ast.Node.Index) bool {
|
fn isFnBodyGutted(ast: *const Ast, body_node: Ast.Node.Index) bool {
|
||||||
// skip over discards
|
// skip over discards
|
||||||
const node_tags = ast.nodes.items(.tag);
|
|
||||||
var statements_buf: [2]Ast.Node.Index = undefined;
|
var statements_buf: [2]Ast.Node.Index = undefined;
|
||||||
const statements = switch (node_tags[body_node]) {
|
const statements = switch (ast.nodeTag(body_node)) {
|
||||||
.block_two,
|
.block_two,
|
||||||
.block_two_semicolon,
|
.block_two_semicolon,
|
||||||
.block,
|
.block,
|
||||||
@ -988,10 +931,7 @@ const StmtCategory = enum {
|
|||||||
};
|
};
|
||||||
|
|
||||||
fn categorizeStmt(ast: *const Ast, stmt: Ast.Node.Index) StmtCategory {
|
fn categorizeStmt(ast: *const Ast, stmt: Ast.Node.Index) StmtCategory {
|
||||||
const node_tags = ast.nodes.items(.tag);
|
switch (ast.nodeTag(stmt)) {
|
||||||
const datas = ast.nodes.items(.data);
|
|
||||||
const main_tokens = ast.nodes.items(.main_token);
|
|
||||||
switch (node_tags[stmt]) {
|
|
||||||
.builtin_call_two,
|
.builtin_call_two,
|
||||||
.builtin_call_two_comma,
|
.builtin_call_two_comma,
|
||||||
.builtin_call,
|
.builtin_call,
|
||||||
@ -999,12 +939,12 @@ fn categorizeStmt(ast: *const Ast, stmt: Ast.Node.Index) StmtCategory {
|
|||||||
=> {
|
=> {
|
||||||
var buf: [2]Ast.Node.Index = undefined;
|
var buf: [2]Ast.Node.Index = undefined;
|
||||||
const params = ast.builtinCallParams(&buf, stmt).?;
|
const params = ast.builtinCallParams(&buf, stmt).?;
|
||||||
return categorizeBuiltinCall(ast, main_tokens[stmt], params);
|
return categorizeBuiltinCall(ast, ast.nodeMainToken(stmt), params);
|
||||||
},
|
},
|
||||||
.assign => {
|
.assign => {
|
||||||
const infix = datas[stmt];
|
const lhs, const rhs = ast.nodeData(stmt).node_and_node;
|
||||||
if (isDiscardIdent(ast, infix.lhs) and node_tags[infix.rhs] == .identifier) {
|
if (isDiscardIdent(ast, lhs) and ast.nodeTag(rhs) == .identifier) {
|
||||||
const name_bytes = ast.tokenSlice(main_tokens[infix.rhs]);
|
const name_bytes = ast.tokenSlice(ast.nodeMainToken(rhs));
|
||||||
if (std.mem.eql(u8, name_bytes, "undefined")) {
|
if (std.mem.eql(u8, name_bytes, "undefined")) {
|
||||||
return .discard_undefined;
|
return .discard_undefined;
|
||||||
} else {
|
} else {
|
||||||
@ -1046,11 +986,9 @@ fn isFalseIdent(ast: *const Ast, node: Ast.Node.Index) bool {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn isMatchingIdent(ast: *const Ast, node: Ast.Node.Index, string: []const u8) bool {
|
fn isMatchingIdent(ast: *const Ast, node: Ast.Node.Index, string: []const u8) bool {
|
||||||
const node_tags = ast.nodes.items(.tag);
|
switch (ast.nodeTag(node)) {
|
||||||
const main_tokens = ast.nodes.items(.main_token);
|
|
||||||
switch (node_tags[node]) {
|
|
||||||
.identifier => {
|
.identifier => {
|
||||||
const token_index = main_tokens[node];
|
const token_index = ast.nodeMainToken(node);
|
||||||
const name_bytes = ast.tokenSlice(token_index);
|
const name_bytes = ast.tokenSlice(token_index);
|
||||||
return std.mem.eql(u8, name_bytes, string);
|
return std.mem.eql(u8, name_bytes, string);
|
||||||
},
|
},
|
||||||
@ -1059,11 +997,10 @@ fn isMatchingIdent(ast: *const Ast, node: Ast.Node.Index, string: []const u8) bo
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn isEmptyBlock(ast: *const Ast, node: Ast.Node.Index) bool {
|
fn isEmptyBlock(ast: *const Ast, node: Ast.Node.Index) bool {
|
||||||
const node_tags = ast.nodes.items(.tag);
|
switch (ast.nodeTag(node)) {
|
||||||
const node_data = ast.nodes.items(.data);
|
|
||||||
switch (node_tags[node]) {
|
|
||||||
.block_two => {
|
.block_two => {
|
||||||
return node_data[node].lhs == 0 and node_data[node].rhs == 0;
|
const opt_lhs, const opt_rhs = ast.nodeData(node).opt_node_and_opt_node;
|
||||||
|
return opt_lhs == .none and opt_rhs == .none;
|
||||||
},
|
},
|
||||||
else => return false,
|
else => return false,
|
||||||
}
|
}
|
||||||
|
|||||||
@ -15,8 +15,7 @@ parent: Index,
|
|||||||
pub const ExtraInfo = struct {
|
pub const ExtraInfo = struct {
|
||||||
is_pub: bool,
|
is_pub: bool,
|
||||||
name: []const u8,
|
name: []const u8,
|
||||||
/// This might not be a doc_comment token in which case there are no doc comments.
|
first_doc_comment: Ast.OptionalTokenIndex,
|
||||||
first_doc_comment: Ast.TokenIndex,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
pub const Index = enum(u32) {
|
pub const Index = enum(u32) {
|
||||||
@ -34,16 +33,14 @@ pub fn is_pub(d: *const Decl) bool {
|
|||||||
|
|
||||||
pub fn extra_info(d: *const Decl) ExtraInfo {
|
pub fn extra_info(d: *const Decl) ExtraInfo {
|
||||||
const ast = d.file.get_ast();
|
const ast = d.file.get_ast();
|
||||||
const token_tags = ast.tokens.items(.tag);
|
switch (ast.nodeTag(d.ast_node)) {
|
||||||
const node_tags = ast.nodes.items(.tag);
|
|
||||||
switch (node_tags[d.ast_node]) {
|
|
||||||
.root => return .{
|
.root => return .{
|
||||||
.name = "",
|
.name = "",
|
||||||
.is_pub = true,
|
.is_pub = true,
|
||||||
.first_doc_comment = if (token_tags[0] == .container_doc_comment)
|
.first_doc_comment = if (ast.tokenTag(0) == .container_doc_comment)
|
||||||
0
|
.fromToken(0)
|
||||||
else
|
else
|
||||||
token_tags.len - 1,
|
.none,
|
||||||
},
|
},
|
||||||
|
|
||||||
.global_var_decl,
|
.global_var_decl,
|
||||||
@ -53,7 +50,7 @@ pub fn extra_info(d: *const Decl) ExtraInfo {
|
|||||||
=> {
|
=> {
|
||||||
const var_decl = ast.fullVarDecl(d.ast_node).?;
|
const var_decl = ast.fullVarDecl(d.ast_node).?;
|
||||||
const name_token = var_decl.ast.mut_token + 1;
|
const name_token = var_decl.ast.mut_token + 1;
|
||||||
assert(token_tags[name_token] == .identifier);
|
assert(ast.tokenTag(name_token) == .identifier);
|
||||||
const ident_name = ast.tokenSlice(name_token);
|
const ident_name = ast.tokenSlice(name_token);
|
||||||
return .{
|
return .{
|
||||||
.name = ident_name,
|
.name = ident_name,
|
||||||
@ -71,7 +68,7 @@ pub fn extra_info(d: *const Decl) ExtraInfo {
|
|||||||
var buf: [1]Ast.Node.Index = undefined;
|
var buf: [1]Ast.Node.Index = undefined;
|
||||||
const fn_proto = ast.fullFnProto(&buf, d.ast_node).?;
|
const fn_proto = ast.fullFnProto(&buf, d.ast_node).?;
|
||||||
const name_token = fn_proto.name_token.?;
|
const name_token = fn_proto.name_token.?;
|
||||||
assert(token_tags[name_token] == .identifier);
|
assert(ast.tokenTag(name_token) == .identifier);
|
||||||
const ident_name = ast.tokenSlice(name_token);
|
const ident_name = ast.tokenSlice(name_token);
|
||||||
return .{
|
return .{
|
||||||
.name = ident_name,
|
.name = ident_name,
|
||||||
@ -89,9 +86,7 @@ pub fn extra_info(d: *const Decl) ExtraInfo {
|
|||||||
|
|
||||||
pub fn value_node(d: *const Decl) ?Ast.Node.Index {
|
pub fn value_node(d: *const Decl) ?Ast.Node.Index {
|
||||||
const ast = d.file.get_ast();
|
const ast = d.file.get_ast();
|
||||||
const node_tags = ast.nodes.items(.tag);
|
return switch (ast.nodeTag(d.ast_node)) {
|
||||||
const token_tags = ast.tokens.items(.tag);
|
|
||||||
return switch (node_tags[d.ast_node]) {
|
|
||||||
.fn_proto,
|
.fn_proto,
|
||||||
.fn_proto_multi,
|
.fn_proto_multi,
|
||||||
.fn_proto_one,
|
.fn_proto_one,
|
||||||
@ -106,8 +101,8 @@ pub fn value_node(d: *const Decl) ?Ast.Node.Index {
|
|||||||
.aligned_var_decl,
|
.aligned_var_decl,
|
||||||
=> {
|
=> {
|
||||||
const var_decl = ast.fullVarDecl(d.ast_node).?;
|
const var_decl = ast.fullVarDecl(d.ast_node).?;
|
||||||
if (token_tags[var_decl.ast.mut_token] == .keyword_const)
|
if (ast.tokenTag(var_decl.ast.mut_token) == .keyword_const)
|
||||||
return var_decl.ast.init_node;
|
return var_decl.ast.init_node.unwrap();
|
||||||
|
|
||||||
return null;
|
return null;
|
||||||
},
|
},
|
||||||
@ -148,20 +143,13 @@ pub fn get_child(decl: *const Decl, name: []const u8) ?Decl.Index {
|
|||||||
pub fn get_type_fn_return_type_fn(decl: *const Decl) ?Decl.Index {
|
pub fn get_type_fn_return_type_fn(decl: *const Decl) ?Decl.Index {
|
||||||
if (decl.get_type_fn_return_expr()) |return_expr| {
|
if (decl.get_type_fn_return_expr()) |return_expr| {
|
||||||
const ast = decl.file.get_ast();
|
const ast = decl.file.get_ast();
|
||||||
const node_tags = ast.nodes.items(.tag);
|
var buffer: [1]Ast.Node.Index = undefined;
|
||||||
|
const call = ast.fullCall(&buffer, return_expr) orelse return null;
|
||||||
switch (node_tags[return_expr]) {
|
const token = ast.nodeMainToken(call.ast.fn_expr);
|
||||||
.call, .call_comma, .call_one, .call_one_comma => {
|
|
||||||
const node_data = ast.nodes.items(.data);
|
|
||||||
const function = node_data[return_expr].lhs;
|
|
||||||
const token = ast.nodes.items(.main_token)[function];
|
|
||||||
const name = ast.tokenSlice(token);
|
const name = ast.tokenSlice(token);
|
||||||
if (decl.lookup(name)) |function_decl| {
|
if (decl.lookup(name)) |function_decl| {
|
||||||
return function_decl;
|
return function_decl;
|
||||||
}
|
}
|
||||||
},
|
|
||||||
else => {},
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
@ -171,36 +159,19 @@ pub fn get_type_fn_return_expr(decl: *const Decl) ?Ast.Node.Index {
|
|||||||
switch (decl.categorize()) {
|
switch (decl.categorize()) {
|
||||||
.type_function => {
|
.type_function => {
|
||||||
const ast = decl.file.get_ast();
|
const ast = decl.file.get_ast();
|
||||||
const node_tags = ast.nodes.items(.tag);
|
|
||||||
const node_data = ast.nodes.items(.data);
|
|
||||||
const body_node = node_data[decl.ast_node].rhs;
|
|
||||||
if (body_node == 0) return null;
|
|
||||||
|
|
||||||
switch (node_tags[body_node]) {
|
const body_node = ast.nodeData(decl.ast_node).node_and_node[1];
|
||||||
.block, .block_semicolon => {
|
|
||||||
const statements = ast.extra_data[node_data[body_node].lhs..node_data[body_node].rhs];
|
var buf: [2]Ast.Node.Index = undefined;
|
||||||
// Look for the return statement
|
const statements = ast.blockStatements(&buf, body_node) orelse return null;
|
||||||
|
|
||||||
for (statements) |stmt| {
|
for (statements) |stmt| {
|
||||||
if (node_tags[stmt] == .@"return") {
|
if (ast.nodeTag(stmt) == .@"return") {
|
||||||
return node_data[stmt].lhs;
|
return ast.nodeData(stmt).node;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return null;
|
return null;
|
||||||
},
|
},
|
||||||
.block_two, .block_two_semicolon => {
|
|
||||||
if (node_tags[node_data[body_node].lhs] == .@"return") {
|
|
||||||
return node_data[node_data[body_node].lhs].lhs;
|
|
||||||
}
|
|
||||||
if (node_data[body_node].rhs != 0 and
|
|
||||||
node_tags[node_data[body_node].rhs] == .@"return")
|
|
||||||
{
|
|
||||||
return node_data[node_data[body_node].rhs].lhs;
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
},
|
|
||||||
else => return null,
|
|
||||||
}
|
|
||||||
},
|
|
||||||
else => return null,
|
else => return null,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -269,16 +240,15 @@ pub fn append_parent_ns(list: *std.ArrayListUnmanaged(u8), parent: Decl.Index) O
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn findFirstDocComment(ast: *const Ast, token: Ast.TokenIndex) Ast.TokenIndex {
|
pub fn findFirstDocComment(ast: *const Ast, token: Ast.TokenIndex) Ast.OptionalTokenIndex {
|
||||||
const token_tags = ast.tokens.items(.tag);
|
|
||||||
var it = token;
|
var it = token;
|
||||||
while (it > 0) {
|
while (it > 0) {
|
||||||
it -= 1;
|
it -= 1;
|
||||||
if (token_tags[it] != .doc_comment) {
|
if (ast.tokenTag(it) != .doc_comment) {
|
||||||
return it + 1;
|
return .fromToken(it + 1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return it;
|
return .none;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Successively looks up each component.
|
/// Successively looks up each component.
|
||||||
|
|||||||
@ -91,12 +91,10 @@ pub const File = struct {
|
|||||||
|
|
||||||
pub fn categorize_decl(file_index: File.Index, node: Ast.Node.Index) Category {
|
pub fn categorize_decl(file_index: File.Index, node: Ast.Node.Index) Category {
|
||||||
const ast = file_index.get_ast();
|
const ast = file_index.get_ast();
|
||||||
const node_tags = ast.nodes.items(.tag);
|
switch (ast.nodeTag(node)) {
|
||||||
const token_tags = ast.tokens.items(.tag);
|
|
||||||
switch (node_tags[node]) {
|
|
||||||
.root => {
|
.root => {
|
||||||
for (ast.rootDecls()) |member| {
|
for (ast.rootDecls()) |member| {
|
||||||
switch (node_tags[member]) {
|
switch (ast.nodeTag(member)) {
|
||||||
.container_field_init,
|
.container_field_init,
|
||||||
.container_field_align,
|
.container_field_align,
|
||||||
.container_field,
|
.container_field,
|
||||||
@ -113,10 +111,12 @@ pub const File = struct {
|
|||||||
.aligned_var_decl,
|
.aligned_var_decl,
|
||||||
=> {
|
=> {
|
||||||
const var_decl = ast.fullVarDecl(node).?;
|
const var_decl = ast.fullVarDecl(node).?;
|
||||||
if (token_tags[var_decl.ast.mut_token] == .keyword_var)
|
if (ast.tokenTag(var_decl.ast.mut_token) == .keyword_var)
|
||||||
return .{ .global_variable = node };
|
return .{ .global_variable = node };
|
||||||
|
const init_node = var_decl.ast.init_node.unwrap() orelse
|
||||||
|
return .{ .global_const = node };
|
||||||
|
|
||||||
return categorize_expr(file_index, var_decl.ast.init_node);
|
return categorize_expr(file_index, init_node);
|
||||||
},
|
},
|
||||||
|
|
||||||
.fn_proto,
|
.fn_proto,
|
||||||
@ -139,7 +139,7 @@ pub const File = struct {
|
|||||||
node: Ast.Node.Index,
|
node: Ast.Node.Index,
|
||||||
full: Ast.full.FnProto,
|
full: Ast.full.FnProto,
|
||||||
) Category {
|
) Category {
|
||||||
return switch (categorize_expr(file_index, full.ast.return_type)) {
|
return switch (categorize_expr(file_index, full.ast.return_type.unwrap().?)) {
|
||||||
.namespace, .container, .error_set, .type_type => .{ .type_function = node },
|
.namespace, .container, .error_set, .type_type => .{ .type_function = node },
|
||||||
else => .{ .function = node },
|
else => .{ .function = node },
|
||||||
};
|
};
|
||||||
@ -155,12 +155,8 @@ pub const File = struct {
|
|||||||
pub fn categorize_expr(file_index: File.Index, node: Ast.Node.Index) Category {
|
pub fn categorize_expr(file_index: File.Index, node: Ast.Node.Index) Category {
|
||||||
const file = file_index.get();
|
const file = file_index.get();
|
||||||
const ast = file_index.get_ast();
|
const ast = file_index.get_ast();
|
||||||
const node_tags = ast.nodes.items(.tag);
|
//log.debug("categorize_expr tag {s}", .{@tagName(ast.nodeTag(node))});
|
||||||
const node_datas = ast.nodes.items(.data);
|
return switch (ast.nodeTag(node)) {
|
||||||
const main_tokens = ast.nodes.items(.main_token);
|
|
||||||
const token_tags = ast.tokens.items(.tag);
|
|
||||||
//log.debug("categorize_expr tag {s}", .{@tagName(node_tags[node])});
|
|
||||||
return switch (node_tags[node]) {
|
|
||||||
.container_decl,
|
.container_decl,
|
||||||
.container_decl_trailing,
|
.container_decl_trailing,
|
||||||
.container_decl_arg,
|
.container_decl_arg,
|
||||||
@ -176,11 +172,11 @@ pub const File = struct {
|
|||||||
=> {
|
=> {
|
||||||
var buf: [2]Ast.Node.Index = undefined;
|
var buf: [2]Ast.Node.Index = undefined;
|
||||||
const container_decl = ast.fullContainerDecl(&buf, node).?;
|
const container_decl = ast.fullContainerDecl(&buf, node).?;
|
||||||
if (token_tags[container_decl.ast.main_token] != .keyword_struct) {
|
if (ast.tokenTag(container_decl.ast.main_token) != .keyword_struct) {
|
||||||
return .{ .container = node };
|
return .{ .container = node };
|
||||||
}
|
}
|
||||||
for (container_decl.ast.members) |member| {
|
for (container_decl.ast.members) |member| {
|
||||||
switch (node_tags[member]) {
|
switch (ast.nodeTag(member)) {
|
||||||
.container_field_init,
|
.container_field_init,
|
||||||
.container_field_align,
|
.container_field_align,
|
||||||
.container_field,
|
.container_field,
|
||||||
@ -196,7 +192,7 @@ pub const File = struct {
|
|||||||
=> .{ .error_set = node },
|
=> .{ .error_set = node },
|
||||||
|
|
||||||
.identifier => {
|
.identifier => {
|
||||||
const name_token = ast.nodes.items(.main_token)[node];
|
const name_token = ast.nodeMainToken(node);
|
||||||
const ident_name = ast.tokenSlice(name_token);
|
const ident_name = ast.tokenSlice(name_token);
|
||||||
if (std.mem.eql(u8, ident_name, "type"))
|
if (std.mem.eql(u8, ident_name, "type"))
|
||||||
return .type_type;
|
return .type_type;
|
||||||
@ -217,9 +213,7 @@ pub const File = struct {
|
|||||||
},
|
},
|
||||||
|
|
||||||
.field_access => {
|
.field_access => {
|
||||||
const object_node = node_datas[node].lhs;
|
const object_node, const field_ident = ast.nodeData(node).node_and_token;
|
||||||
const dot_token = main_tokens[node];
|
|
||||||
const field_ident = dot_token + 1;
|
|
||||||
const field_name = ast.tokenSlice(field_ident);
|
const field_name = ast.tokenSlice(field_ident);
|
||||||
|
|
||||||
switch (categorize_expr(file_index, object_node)) {
|
switch (categorize_expr(file_index, object_node)) {
|
||||||
@ -259,9 +253,9 @@ pub const File = struct {
|
|||||||
.@"if",
|
.@"if",
|
||||||
=> {
|
=> {
|
||||||
const if_full = ast.fullIf(node).?;
|
const if_full = ast.fullIf(node).?;
|
||||||
if (if_full.ast.else_expr != 0) {
|
if (if_full.ast.else_expr.unwrap()) |else_expr| {
|
||||||
const then_cat = categorize_expr_deep(file_index, if_full.ast.then_expr);
|
const then_cat = categorize_expr_deep(file_index, if_full.ast.then_expr);
|
||||||
const else_cat = categorize_expr_deep(file_index, if_full.ast.else_expr);
|
const else_cat = categorize_expr_deep(file_index, else_expr);
|
||||||
if (then_cat == .type_type and else_cat == .type_type) {
|
if (then_cat == .type_type and else_cat == .type_type) {
|
||||||
return .type_type;
|
return .type_type;
|
||||||
} else if (then_cat == .error_set and else_cat == .error_set) {
|
} else if (then_cat == .error_set and else_cat == .error_set) {
|
||||||
@ -320,11 +314,10 @@ pub const File = struct {
|
|||||||
params: []const Ast.Node.Index,
|
params: []const Ast.Node.Index,
|
||||||
) Category {
|
) Category {
|
||||||
const ast = file_index.get_ast();
|
const ast = file_index.get_ast();
|
||||||
const main_tokens = ast.nodes.items(.main_token);
|
const builtin_token = ast.nodeMainToken(node);
|
||||||
const builtin_token = main_tokens[node];
|
|
||||||
const builtin_name = ast.tokenSlice(builtin_token);
|
const builtin_name = ast.tokenSlice(builtin_token);
|
||||||
if (std.mem.eql(u8, builtin_name, "@import")) {
|
if (std.mem.eql(u8, builtin_name, "@import")) {
|
||||||
const str_lit_token = main_tokens[params[0]];
|
const str_lit_token = ast.nodeMainToken(params[0]);
|
||||||
const str_bytes = ast.tokenSlice(str_lit_token);
|
const str_bytes = ast.tokenSlice(str_lit_token);
|
||||||
const file_path = std.zig.string_literal.parseAlloc(gpa, str_bytes) catch @panic("OOM");
|
const file_path = std.zig.string_literal.parseAlloc(gpa, str_bytes) catch @panic("OOM");
|
||||||
defer gpa.free(file_path);
|
defer gpa.free(file_path);
|
||||||
@ -357,14 +350,12 @@ pub const File = struct {
|
|||||||
|
|
||||||
fn categorize_switch(file_index: File.Index, node: Ast.Node.Index) Category {
|
fn categorize_switch(file_index: File.Index, node: Ast.Node.Index) Category {
|
||||||
const ast = file_index.get_ast();
|
const ast = file_index.get_ast();
|
||||||
const node_datas = ast.nodes.items(.data);
|
const full = ast.fullSwitch(node).?;
|
||||||
const extra = ast.extraData(node_datas[node].rhs, Ast.Node.SubRange);
|
|
||||||
const case_nodes = ast.extra_data[extra.start..extra.end];
|
|
||||||
var all_type_type = true;
|
var all_type_type = true;
|
||||||
var all_error_set = true;
|
var all_error_set = true;
|
||||||
var any_type = false;
|
var any_type = false;
|
||||||
if (case_nodes.len == 0) return .{ .global_const = node };
|
if (full.ast.cases.len == 0) return .{ .global_const = node };
|
||||||
for (case_nodes) |case_node| {
|
for (full.ast.cases) |case_node| {
|
||||||
const case = ast.fullSwitchCase(case_node).?;
|
const case = ast.fullSwitchCase(case_node).?;
|
||||||
switch (categorize_expr_deep(file_index, case.ast.target_expr)) {
|
switch (categorize_expr_deep(file_index, case.ast.target_expr)) {
|
||||||
.type_type => {
|
.type_type => {
|
||||||
@ -410,8 +401,8 @@ pub fn add_file(file_name: []const u8, bytes: []u8) !File.Index {
|
|||||||
const scope = try gpa.create(Scope);
|
const scope = try gpa.create(Scope);
|
||||||
scope.* = .{ .tag = .top };
|
scope.* = .{ .tag = .top };
|
||||||
|
|
||||||
const decl_index = try file_index.add_decl(0, .none);
|
const decl_index = try file_index.add_decl(.root, .none);
|
||||||
try struct_decl(&w, scope, decl_index, 0, ast.containerDeclRoot());
|
try struct_decl(&w, scope, decl_index, .root, ast.containerDeclRoot());
|
||||||
|
|
||||||
const file = file_index.get();
|
const file = file_index.get();
|
||||||
shrinkToFit(&file.ident_decls);
|
shrinkToFit(&file.ident_decls);
|
||||||
@ -505,13 +496,12 @@ pub const Scope = struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn lookup(start_scope: *Scope, ast: *const Ast, name: []const u8) ?Ast.Node.Index {
|
pub fn lookup(start_scope: *Scope, ast: *const Ast, name: []const u8) ?Ast.Node.Index {
|
||||||
const main_tokens = ast.nodes.items(.main_token);
|
|
||||||
var it: *Scope = start_scope;
|
var it: *Scope = start_scope;
|
||||||
while (true) switch (it.tag) {
|
while (true) switch (it.tag) {
|
||||||
.top => break,
|
.top => break,
|
||||||
.local => {
|
.local => {
|
||||||
const local: *Local = @alignCast(@fieldParentPtr("base", it));
|
const local: *Local = @alignCast(@fieldParentPtr("base", it));
|
||||||
const name_token = main_tokens[local.var_node] + 1;
|
const name_token = ast.nodeMainToken(local.var_node) + 1;
|
||||||
const ident_name = ast.tokenSlice(name_token);
|
const ident_name = ast.tokenSlice(name_token);
|
||||||
if (std.mem.eql(u8, ident_name, name)) {
|
if (std.mem.eql(u8, ident_name, name)) {
|
||||||
return local.var_node;
|
return local.var_node;
|
||||||
@ -538,8 +528,6 @@ fn struct_decl(
|
|||||||
container_decl: Ast.full.ContainerDecl,
|
container_decl: Ast.full.ContainerDecl,
|
||||||
) Oom!void {
|
) Oom!void {
|
||||||
const ast = w.file.get_ast();
|
const ast = w.file.get_ast();
|
||||||
const node_tags = ast.nodes.items(.tag);
|
|
||||||
const node_datas = ast.nodes.items(.data);
|
|
||||||
|
|
||||||
const namespace = try gpa.create(Scope.Namespace);
|
const namespace = try gpa.create(Scope.Namespace);
|
||||||
namespace.* = .{
|
namespace.* = .{
|
||||||
@ -549,7 +537,7 @@ fn struct_decl(
|
|||||||
try w.file.get().scopes.putNoClobber(gpa, node, &namespace.base);
|
try w.file.get().scopes.putNoClobber(gpa, node, &namespace.base);
|
||||||
try w.scanDecls(namespace, container_decl.ast.members);
|
try w.scanDecls(namespace, container_decl.ast.members);
|
||||||
|
|
||||||
for (container_decl.ast.members) |member| switch (node_tags[member]) {
|
for (container_decl.ast.members) |member| switch (ast.nodeTag(member)) {
|
||||||
.container_field_init,
|
.container_field_init,
|
||||||
.container_field_align,
|
.container_field_align,
|
||||||
.container_field,
|
.container_field,
|
||||||
@ -569,7 +557,7 @@ fn struct_decl(
|
|||||||
try w.file.get().doctests.put(gpa, member, doctest_node);
|
try w.file.get().doctests.put(gpa, member, doctest_node);
|
||||||
}
|
}
|
||||||
const decl_index = try w.file.add_decl(member, parent_decl);
|
const decl_index = try w.file.add_decl(member, parent_decl);
|
||||||
const body = if (node_tags[member] == .fn_decl) node_datas[member].rhs else 0;
|
const body = if (ast.nodeTag(member) == .fn_decl) ast.nodeData(member).node_and_node[1].toOptional() else .none;
|
||||||
try w.fn_decl(&namespace.base, decl_index, body, full);
|
try w.fn_decl(&namespace.base, decl_index, body, full);
|
||||||
},
|
},
|
||||||
|
|
||||||
@ -584,9 +572,9 @@ fn struct_decl(
|
|||||||
|
|
||||||
.@"comptime",
|
.@"comptime",
|
||||||
.@"usingnamespace",
|
.@"usingnamespace",
|
||||||
=> try w.expr(&namespace.base, parent_decl, node_datas[member].lhs),
|
=> try w.expr(&namespace.base, parent_decl, ast.nodeData(member).node),
|
||||||
|
|
||||||
.test_decl => try w.expr(&namespace.base, parent_decl, node_datas[member].rhs),
|
.test_decl => try w.expr(&namespace.base, parent_decl, ast.nodeData(member).opt_token_and_node[1]),
|
||||||
|
|
||||||
else => unreachable,
|
else => unreachable,
|
||||||
};
|
};
|
||||||
@ -633,13 +621,13 @@ fn fn_decl(
|
|||||||
w: *Walk,
|
w: *Walk,
|
||||||
scope: *Scope,
|
scope: *Scope,
|
||||||
parent_decl: Decl.Index,
|
parent_decl: Decl.Index,
|
||||||
body: Ast.Node.Index,
|
body: Ast.Node.OptionalIndex,
|
||||||
full: Ast.full.FnProto,
|
full: Ast.full.FnProto,
|
||||||
) Oom!void {
|
) Oom!void {
|
||||||
for (full.ast.params) |param| {
|
for (full.ast.params) |param| {
|
||||||
try expr(w, scope, parent_decl, param);
|
try expr(w, scope, parent_decl, param);
|
||||||
}
|
}
|
||||||
try expr(w, scope, parent_decl, full.ast.return_type);
|
try expr(w, scope, parent_decl, full.ast.return_type.unwrap().?);
|
||||||
try maybe_expr(w, scope, parent_decl, full.ast.align_expr);
|
try maybe_expr(w, scope, parent_decl, full.ast.align_expr);
|
||||||
try maybe_expr(w, scope, parent_decl, full.ast.addrspace_expr);
|
try maybe_expr(w, scope, parent_decl, full.ast.addrspace_expr);
|
||||||
try maybe_expr(w, scope, parent_decl, full.ast.section_expr);
|
try maybe_expr(w, scope, parent_decl, full.ast.section_expr);
|
||||||
@ -647,17 +635,13 @@ fn fn_decl(
|
|||||||
try maybe_expr(w, scope, parent_decl, body);
|
try maybe_expr(w, scope, parent_decl, body);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn maybe_expr(w: *Walk, scope: *Scope, parent_decl: Decl.Index, node: Ast.Node.Index) Oom!void {
|
fn maybe_expr(w: *Walk, scope: *Scope, parent_decl: Decl.Index, node: Ast.Node.OptionalIndex) Oom!void {
|
||||||
if (node != 0) return expr(w, scope, parent_decl, node);
|
if (node.unwrap()) |n| return expr(w, scope, parent_decl, n);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expr(w: *Walk, scope: *Scope, parent_decl: Decl.Index, node: Ast.Node.Index) Oom!void {
|
fn expr(w: *Walk, scope: *Scope, parent_decl: Decl.Index, node: Ast.Node.Index) Oom!void {
|
||||||
assert(node != 0);
|
|
||||||
const ast = w.file.get_ast();
|
const ast = w.file.get_ast();
|
||||||
const node_tags = ast.nodes.items(.tag);
|
switch (ast.nodeTag(node)) {
|
||||||
const node_datas = ast.nodes.items(.data);
|
|
||||||
const main_tokens = ast.nodes.items(.main_token);
|
|
||||||
switch (node_tags[node]) {
|
|
||||||
.root => unreachable, // Top-level declaration.
|
.root => unreachable, // Top-level declaration.
|
||||||
.@"usingnamespace" => unreachable, // Top-level declaration.
|
.@"usingnamespace" => unreachable, // Top-level declaration.
|
||||||
.test_decl => unreachable, // Top-level declaration.
|
.test_decl => unreachable, // Top-level declaration.
|
||||||
@ -738,8 +722,9 @@ fn expr(w: *Walk, scope: *Scope, parent_decl: Decl.Index, node: Ast.Node.Index)
|
|||||||
.array_access,
|
.array_access,
|
||||||
.switch_range,
|
.switch_range,
|
||||||
=> {
|
=> {
|
||||||
try expr(w, scope, parent_decl, node_datas[node].lhs);
|
const lhs, const rhs = ast.nodeData(node).node_and_node;
|
||||||
try expr(w, scope, parent_decl, node_datas[node].rhs);
|
try expr(w, scope, parent_decl, lhs);
|
||||||
|
try expr(w, scope, parent_decl, rhs);
|
||||||
},
|
},
|
||||||
|
|
||||||
.assign_destructure => {
|
.assign_destructure => {
|
||||||
@ -752,35 +737,33 @@ fn expr(w: *Walk, scope: *Scope, parent_decl: Decl.Index, node: Ast.Node.Index)
|
|||||||
.bit_not,
|
.bit_not,
|
||||||
.negation,
|
.negation,
|
||||||
.negation_wrap,
|
.negation_wrap,
|
||||||
.@"return",
|
|
||||||
.deref,
|
.deref,
|
||||||
.address_of,
|
.address_of,
|
||||||
.optional_type,
|
.optional_type,
|
||||||
.unwrap_optional,
|
|
||||||
.grouped_expression,
|
|
||||||
.@"comptime",
|
.@"comptime",
|
||||||
.@"nosuspend",
|
.@"nosuspend",
|
||||||
.@"suspend",
|
.@"suspend",
|
||||||
.@"await",
|
.@"await",
|
||||||
.@"resume",
|
.@"resume",
|
||||||
.@"try",
|
.@"try",
|
||||||
=> try maybe_expr(w, scope, parent_decl, node_datas[node].lhs),
|
=> try expr(w, scope, parent_decl, ast.nodeData(node).node),
|
||||||
|
.unwrap_optional,
|
||||||
|
.grouped_expression,
|
||||||
|
=> try expr(w, scope, parent_decl, ast.nodeData(node).node_and_token[0]),
|
||||||
|
.@"return" => try maybe_expr(w, scope, parent_decl, ast.nodeData(node).opt_node),
|
||||||
|
|
||||||
.anyframe_type,
|
.anyframe_type => try expr(w, scope, parent_decl, ast.nodeData(node).token_and_node[1]),
|
||||||
.@"break",
|
.@"break" => try maybe_expr(w, scope, parent_decl, ast.nodeData(node).opt_token_and_opt_node[1]),
|
||||||
=> try maybe_expr(w, scope, parent_decl, node_datas[node].rhs),
|
|
||||||
|
|
||||||
.identifier => {
|
.identifier => {
|
||||||
const ident_token = main_tokens[node];
|
const ident_token = ast.nodeMainToken(node);
|
||||||
const ident_name = ast.tokenSlice(ident_token);
|
const ident_name = ast.tokenSlice(ident_token);
|
||||||
if (scope.lookup(ast, ident_name)) |var_node| {
|
if (scope.lookup(ast, ident_name)) |var_node| {
|
||||||
try w.file.get().ident_decls.put(gpa, ident_token, var_node);
|
try w.file.get().ident_decls.put(gpa, ident_token, var_node);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
.field_access => {
|
.field_access => {
|
||||||
const object_node = node_datas[node].lhs;
|
const object_node, const field_ident = ast.nodeData(node).node_and_token;
|
||||||
const dot_token = main_tokens[node];
|
|
||||||
const field_ident = dot_token + 1;
|
|
||||||
try w.file.get().token_parents.put(gpa, field_ident, node);
|
try w.file.get().token_parents.put(gpa, field_ident, node);
|
||||||
// This will populate the left-most field object if it is an
|
// This will populate the left-most field object if it is an
|
||||||
// identifier, allowing rendering code to piece together the link.
|
// identifier, allowing rendering code to piece together the link.
|
||||||
@ -857,9 +840,10 @@ fn expr(w: *Walk, scope: *Scope, parent_decl: Decl.Index, node: Ast.Node.Index)
|
|||||||
.for_simple, .@"for" => {
|
.for_simple, .@"for" => {
|
||||||
const full = ast.fullFor(node).?;
|
const full = ast.fullFor(node).?;
|
||||||
for (full.ast.inputs) |input| {
|
for (full.ast.inputs) |input| {
|
||||||
if (node_tags[input] == .for_range) {
|
if (ast.nodeTag(input) == .for_range) {
|
||||||
try expr(w, scope, parent_decl, node_datas[input].lhs);
|
const start, const end = ast.nodeData(input).node_and_opt_node;
|
||||||
try maybe_expr(w, scope, parent_decl, node_datas[input].rhs);
|
try expr(w, scope, parent_decl, start);
|
||||||
|
try maybe_expr(w, scope, parent_decl, end);
|
||||||
} else {
|
} else {
|
||||||
try expr(w, scope, parent_decl, input);
|
try expr(w, scope, parent_decl, input);
|
||||||
}
|
}
|
||||||
@ -914,17 +898,16 @@ fn expr(w: *Walk, scope: *Scope, parent_decl: Decl.Index, node: Ast.Node.Index)
|
|||||||
},
|
},
|
||||||
|
|
||||||
.array_type_sentinel => {
|
.array_type_sentinel => {
|
||||||
const extra = ast.extraData(node_datas[node].rhs, Ast.Node.ArrayTypeSentinel);
|
const len_expr, const extra_index = ast.nodeData(node).node_and_extra;
|
||||||
try expr(w, scope, parent_decl, node_datas[node].lhs);
|
const extra = ast.extraData(extra_index, Ast.Node.ArrayTypeSentinel);
|
||||||
|
try expr(w, scope, parent_decl, len_expr);
|
||||||
try expr(w, scope, parent_decl, extra.elem_type);
|
try expr(w, scope, parent_decl, extra.elem_type);
|
||||||
try expr(w, scope, parent_decl, extra.sentinel);
|
try expr(w, scope, parent_decl, extra.sentinel);
|
||||||
},
|
},
|
||||||
.@"switch", .switch_comma => {
|
.@"switch", .switch_comma => {
|
||||||
const operand_node = node_datas[node].lhs;
|
const full = ast.fullSwitch(node).?;
|
||||||
try expr(w, scope, parent_decl, operand_node);
|
try expr(w, scope, parent_decl, full.ast.condition);
|
||||||
const extra = ast.extraData(node_datas[node].rhs, Ast.Node.SubRange);
|
for (full.ast.cases) |case_node| {
|
||||||
const case_nodes = ast.extra_data[extra.start..extra.end];
|
|
||||||
for (case_nodes) |case_node| {
|
|
||||||
const case = ast.fullSwitchCase(case_node).?;
|
const case = ast.fullSwitchCase(case_node).?;
|
||||||
for (case.ast.values) |value_node| {
|
for (case.ast.values) |value_node| {
|
||||||
try expr(w, scope, parent_decl, value_node);
|
try expr(w, scope, parent_decl, value_node);
|
||||||
@ -973,7 +956,7 @@ fn expr(w: *Walk, scope: *Scope, parent_decl: Decl.Index, node: Ast.Node.Index)
|
|||||||
.fn_proto,
|
.fn_proto,
|
||||||
=> {
|
=> {
|
||||||
var buf: [1]Ast.Node.Index = undefined;
|
var buf: [1]Ast.Node.Index = undefined;
|
||||||
return fn_decl(w, scope, parent_decl, 0, ast.fullFnProto(&buf, node).?);
|
return fn_decl(w, scope, parent_decl, .none, ast.fullFnProto(&buf, node).?);
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -993,8 +976,7 @@ fn builtin_call(
|
|||||||
params: []const Ast.Node.Index,
|
params: []const Ast.Node.Index,
|
||||||
) Oom!void {
|
) Oom!void {
|
||||||
const ast = w.file.get_ast();
|
const ast = w.file.get_ast();
|
||||||
const main_tokens = ast.nodes.items(.main_token);
|
const builtin_token = ast.nodeMainToken(node);
|
||||||
const builtin_token = main_tokens[node];
|
|
||||||
const builtin_name = ast.tokenSlice(builtin_token);
|
const builtin_name = ast.tokenSlice(builtin_token);
|
||||||
if (std.mem.eql(u8, builtin_name, "@This")) {
|
if (std.mem.eql(u8, builtin_name, "@This")) {
|
||||||
try w.file.get().node_decls.put(gpa, node, scope.getNamespaceDecl());
|
try w.file.get().node_decls.put(gpa, node, scope.getNamespaceDecl());
|
||||||
@ -1012,13 +994,11 @@ fn block(
|
|||||||
statements: []const Ast.Node.Index,
|
statements: []const Ast.Node.Index,
|
||||||
) Oom!void {
|
) Oom!void {
|
||||||
const ast = w.file.get_ast();
|
const ast = w.file.get_ast();
|
||||||
const node_tags = ast.nodes.items(.tag);
|
|
||||||
const node_datas = ast.nodes.items(.data);
|
|
||||||
|
|
||||||
var scope = parent_scope;
|
var scope = parent_scope;
|
||||||
|
|
||||||
for (statements) |node| {
|
for (statements) |node| {
|
||||||
switch (node_tags[node]) {
|
switch (ast.nodeTag(node)) {
|
||||||
.global_var_decl,
|
.global_var_decl,
|
||||||
.local_var_decl,
|
.local_var_decl,
|
||||||
.simple_var_decl,
|
.simple_var_decl,
|
||||||
@ -1039,11 +1019,10 @@ fn block(
|
|||||||
log.debug("walk assign_destructure not implemented yet", .{});
|
log.debug("walk assign_destructure not implemented yet", .{});
|
||||||
},
|
},
|
||||||
|
|
||||||
.grouped_expression => try expr(w, scope, parent_decl, node_datas[node].lhs),
|
.grouped_expression => try expr(w, scope, parent_decl, ast.nodeData(node).node_and_token[0]),
|
||||||
|
|
||||||
.@"defer",
|
.@"defer" => try expr(w, scope, parent_decl, ast.nodeData(node).node),
|
||||||
.@"errdefer",
|
.@"errdefer" => try expr(w, scope, parent_decl, ast.nodeData(node).opt_token_and_node[1]),
|
||||||
=> try expr(w, scope, parent_decl, node_datas[node].rhs),
|
|
||||||
|
|
||||||
else => try expr(w, scope, parent_decl, node),
|
else => try expr(w, scope, parent_decl, node),
|
||||||
}
|
}
|
||||||
@ -1059,18 +1038,14 @@ fn while_expr(w: *Walk, scope: *Scope, parent_decl: Decl.Index, full: Ast.full.W
|
|||||||
|
|
||||||
fn scanDecls(w: *Walk, namespace: *Scope.Namespace, members: []const Ast.Node.Index) Oom!void {
|
fn scanDecls(w: *Walk, namespace: *Scope.Namespace, members: []const Ast.Node.Index) Oom!void {
|
||||||
const ast = w.file.get_ast();
|
const ast = w.file.get_ast();
|
||||||
const node_tags = ast.nodes.items(.tag);
|
|
||||||
const main_tokens = ast.nodes.items(.main_token);
|
|
||||||
const token_tags = ast.tokens.items(.tag);
|
|
||||||
const node_datas = ast.nodes.items(.data);
|
|
||||||
|
|
||||||
for (members) |member_node| {
|
for (members) |member_node| {
|
||||||
const name_token = switch (node_tags[member_node]) {
|
const name_token = switch (ast.nodeTag(member_node)) {
|
||||||
.global_var_decl,
|
.global_var_decl,
|
||||||
.local_var_decl,
|
.local_var_decl,
|
||||||
.simple_var_decl,
|
.simple_var_decl,
|
||||||
.aligned_var_decl,
|
.aligned_var_decl,
|
||||||
=> main_tokens[member_node] + 1,
|
=> ast.nodeMainToken(member_node) + 1,
|
||||||
|
|
||||||
.fn_proto_simple,
|
.fn_proto_simple,
|
||||||
.fn_proto_multi,
|
.fn_proto_multi,
|
||||||
@ -1078,18 +1053,20 @@ fn scanDecls(w: *Walk, namespace: *Scope.Namespace, members: []const Ast.Node.In
|
|||||||
.fn_proto,
|
.fn_proto,
|
||||||
.fn_decl,
|
.fn_decl,
|
||||||
=> blk: {
|
=> blk: {
|
||||||
const ident = main_tokens[member_node] + 1;
|
const ident = ast.nodeMainToken(member_node) + 1;
|
||||||
if (token_tags[ident] != .identifier) continue;
|
if (ast.tokenTag(ident) != .identifier) continue;
|
||||||
break :blk ident;
|
break :blk ident;
|
||||||
},
|
},
|
||||||
|
|
||||||
.test_decl => {
|
.test_decl => {
|
||||||
const ident_token = node_datas[member_node].lhs;
|
const opt_ident_token = ast.nodeData(member_node).opt_token_and_node[0];
|
||||||
const is_doctest = token_tags[ident_token] == .identifier;
|
if (opt_ident_token.unwrap()) |ident_token| {
|
||||||
|
const is_doctest = ast.tokenTag(ident_token) == .identifier;
|
||||||
if (is_doctest) {
|
if (is_doctest) {
|
||||||
const token_bytes = ast.tokenSlice(ident_token);
|
const token_bytes = ast.tokenSlice(ident_token);
|
||||||
try namespace.doctests.put(gpa, token_bytes, member_node);
|
try namespace.doctests.put(gpa, token_bytes, member_node);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
continue;
|
continue;
|
||||||
},
|
},
|
||||||
|
|
||||||
|
|||||||
@ -41,14 +41,10 @@ pub fn fileSourceHtml(
|
|||||||
var field_access_buffer: std.ArrayListUnmanaged(u8) = .empty;
|
var field_access_buffer: std.ArrayListUnmanaged(u8) = .empty;
|
||||||
};
|
};
|
||||||
|
|
||||||
const token_tags = ast.tokens.items(.tag);
|
|
||||||
const token_starts = ast.tokens.items(.start);
|
|
||||||
const main_tokens = ast.nodes.items(.main_token);
|
|
||||||
|
|
||||||
const start_token = ast.firstToken(root_node);
|
const start_token = ast.firstToken(root_node);
|
||||||
const end_token = ast.lastToken(root_node) + 1;
|
const end_token = ast.lastToken(root_node) + 1;
|
||||||
|
|
||||||
var cursor: usize = token_starts[start_token];
|
var cursor: usize = ast.tokenStart(start_token);
|
||||||
|
|
||||||
var indent: usize = 0;
|
var indent: usize = 0;
|
||||||
if (std.mem.lastIndexOf(u8, ast.source[0..cursor], "\n")) |newline_index| {
|
if (std.mem.lastIndexOf(u8, ast.source[0..cursor], "\n")) |newline_index| {
|
||||||
@ -64,8 +60,8 @@ pub fn fileSourceHtml(
|
|||||||
var next_annotate_index: usize = 0;
|
var next_annotate_index: usize = 0;
|
||||||
|
|
||||||
for (
|
for (
|
||||||
token_tags[start_token..end_token],
|
ast.tokens.items(.tag)[start_token..end_token],
|
||||||
token_starts[start_token..end_token],
|
ast.tokens.items(.start)[start_token..end_token],
|
||||||
start_token..,
|
start_token..,
|
||||||
) |tag, start, token_index| {
|
) |tag, start, token_index| {
|
||||||
const between = ast.source[cursor..start];
|
const between = ast.source[cursor..start];
|
||||||
@ -184,7 +180,7 @@ pub fn fileSourceHtml(
|
|||||||
.identifier => i: {
|
.identifier => i: {
|
||||||
if (options.fn_link != .none) {
|
if (options.fn_link != .none) {
|
||||||
const fn_link = options.fn_link.get();
|
const fn_link = options.fn_link.get();
|
||||||
const fn_token = main_tokens[fn_link.ast_node];
|
const fn_token = ast.nodeMainToken(fn_link.ast_node);
|
||||||
if (token_index == fn_token + 1) {
|
if (token_index == fn_token + 1) {
|
||||||
try out.appendSlice(gpa, "<a class=\"tok-fn\" href=\"#");
|
try out.appendSlice(gpa, "<a class=\"tok-fn\" href=\"#");
|
||||||
_ = missing_feature_url_escape;
|
_ = missing_feature_url_escape;
|
||||||
@ -196,7 +192,7 @@ pub fn fileSourceHtml(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (token_index > 0 and token_tags[token_index - 1] == .keyword_fn) {
|
if (token_index > 0 and ast.tokenTag(token_index - 1) == .keyword_fn) {
|
||||||
try out.appendSlice(gpa, "<span class=\"tok-fn\">");
|
try out.appendSlice(gpa, "<span class=\"tok-fn\">");
|
||||||
try appendEscaped(out, slice);
|
try appendEscaped(out, slice);
|
||||||
try out.appendSlice(gpa, "</span>");
|
try out.appendSlice(gpa, "</span>");
|
||||||
@ -358,16 +354,11 @@ fn walkFieldAccesses(
|
|||||||
node: Ast.Node.Index,
|
node: Ast.Node.Index,
|
||||||
) Oom!void {
|
) Oom!void {
|
||||||
const ast = file_index.get_ast();
|
const ast = file_index.get_ast();
|
||||||
const node_tags = ast.nodes.items(.tag);
|
assert(ast.nodeTag(node) == .field_access);
|
||||||
assert(node_tags[node] == .field_access);
|
const object_node, const field_ident = ast.nodeData(node).node_and_token;
|
||||||
const node_datas = ast.nodes.items(.data);
|
switch (ast.nodeTag(object_node)) {
|
||||||
const main_tokens = ast.nodes.items(.main_token);
|
|
||||||
const object_node = node_datas[node].lhs;
|
|
||||||
const dot_token = main_tokens[node];
|
|
||||||
const field_ident = dot_token + 1;
|
|
||||||
switch (node_tags[object_node]) {
|
|
||||||
.identifier => {
|
.identifier => {
|
||||||
const lhs_ident = main_tokens[object_node];
|
const lhs_ident = ast.nodeMainToken(object_node);
|
||||||
try resolveIdentLink(file_index, out, lhs_ident);
|
try resolveIdentLink(file_index, out, lhs_ident);
|
||||||
},
|
},
|
||||||
.field_access => {
|
.field_access => {
|
||||||
|
|||||||
@ -124,7 +124,9 @@ fn query_exec_fallible(query: []const u8, ignore_case: bool) !void {
|
|||||||
@memcpy(g.full_path_search_text_lower.items, g.full_path_search_text.items);
|
@memcpy(g.full_path_search_text_lower.items, g.full_path_search_text.items);
|
||||||
|
|
||||||
const ast = decl.file.get_ast();
|
const ast = decl.file.get_ast();
|
||||||
try collect_docs(&g.doc_search_text, ast, info.first_doc_comment);
|
if (info.first_doc_comment.unwrap()) |first_doc_comment| {
|
||||||
|
try collect_docs(&g.doc_search_text, ast, first_doc_comment);
|
||||||
|
}
|
||||||
|
|
||||||
if (ignore_case) {
|
if (ignore_case) {
|
||||||
ascii_lower(g.full_path_search_text_lower.items);
|
ascii_lower(g.full_path_search_text_lower.items);
|
||||||
@ -227,18 +229,15 @@ const ErrorIdentifier = packed struct(u64) {
|
|||||||
fn hasDocs(ei: ErrorIdentifier) bool {
|
fn hasDocs(ei: ErrorIdentifier) bool {
|
||||||
const decl_index = ei.decl_index;
|
const decl_index = ei.decl_index;
|
||||||
const ast = decl_index.get().file.get_ast();
|
const ast = decl_index.get().file.get_ast();
|
||||||
const token_tags = ast.tokens.items(.tag);
|
|
||||||
const token_index = ei.token_index;
|
const token_index = ei.token_index;
|
||||||
if (token_index == 0) return false;
|
if (token_index == 0) return false;
|
||||||
return token_tags[token_index - 1] == .doc_comment;
|
return ast.tokenTag(token_index - 1) == .doc_comment;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn html(ei: ErrorIdentifier, base_decl: Decl.Index, out: *std.ArrayListUnmanaged(u8)) Oom!void {
|
fn html(ei: ErrorIdentifier, base_decl: Decl.Index, out: *std.ArrayListUnmanaged(u8)) Oom!void {
|
||||||
const decl_index = ei.decl_index;
|
const decl_index = ei.decl_index;
|
||||||
const ast = decl_index.get().file.get_ast();
|
const ast = decl_index.get().file.get_ast();
|
||||||
const name = ast.tokenSlice(ei.token_index);
|
const name = ast.tokenSlice(ei.token_index);
|
||||||
const first_doc_comment = Decl.findFirstDocComment(ast, ei.token_index);
|
|
||||||
const has_docs = ast.tokens.items(.tag)[first_doc_comment] == .doc_comment;
|
|
||||||
const has_link = base_decl != decl_index;
|
const has_link = base_decl != decl_index;
|
||||||
|
|
||||||
try out.appendSlice(gpa, "<dt>");
|
try out.appendSlice(gpa, "<dt>");
|
||||||
@ -253,7 +252,7 @@ const ErrorIdentifier = packed struct(u64) {
|
|||||||
}
|
}
|
||||||
try out.appendSlice(gpa, "</dt>");
|
try out.appendSlice(gpa, "</dt>");
|
||||||
|
|
||||||
if (has_docs) {
|
if (Decl.findFirstDocComment(ast, ei.token_index).unwrap()) |first_doc_comment| {
|
||||||
try out.appendSlice(gpa, "<dd>");
|
try out.appendSlice(gpa, "<dd>");
|
||||||
try render_docs(out, decl_index, first_doc_comment, false);
|
try render_docs(out, decl_index, first_doc_comment, false);
|
||||||
try out.appendSlice(gpa, "</dd>");
|
try out.appendSlice(gpa, "</dd>");
|
||||||
@ -319,17 +318,16 @@ fn addErrorsFromExpr(
|
|||||||
) Oom!void {
|
) Oom!void {
|
||||||
const decl = decl_index.get();
|
const decl = decl_index.get();
|
||||||
const ast = decl.file.get_ast();
|
const ast = decl.file.get_ast();
|
||||||
const node_tags = ast.nodes.items(.tag);
|
|
||||||
const node_datas = ast.nodes.items(.data);
|
|
||||||
|
|
||||||
switch (decl.file.categorize_expr(node)) {
|
switch (decl.file.categorize_expr(node)) {
|
||||||
.error_set => |n| switch (node_tags[n]) {
|
.error_set => |n| switch (ast.nodeTag(n)) {
|
||||||
.error_set_decl => {
|
.error_set_decl => {
|
||||||
try addErrorsFromNode(decl_index, out, node);
|
try addErrorsFromNode(decl_index, out, node);
|
||||||
},
|
},
|
||||||
.merge_error_sets => {
|
.merge_error_sets => {
|
||||||
try addErrorsFromExpr(decl_index, out, node_datas[node].lhs);
|
const lhs, const rhs = ast.nodeData(n).node_and_node;
|
||||||
try addErrorsFromExpr(decl_index, out, node_datas[node].rhs);
|
try addErrorsFromExpr(decl_index, out, lhs);
|
||||||
|
try addErrorsFromExpr(decl_index, out, rhs);
|
||||||
},
|
},
|
||||||
else => unreachable,
|
else => unreachable,
|
||||||
},
|
},
|
||||||
@ -347,11 +345,9 @@ fn addErrorsFromNode(
|
|||||||
) Oom!void {
|
) Oom!void {
|
||||||
const decl = decl_index.get();
|
const decl = decl_index.get();
|
||||||
const ast = decl.file.get_ast();
|
const ast = decl.file.get_ast();
|
||||||
const main_tokens = ast.nodes.items(.main_token);
|
const error_token = ast.nodeMainToken(node);
|
||||||
const token_tags = ast.tokens.items(.tag);
|
|
||||||
const error_token = main_tokens[node];
|
|
||||||
var tok_i = error_token + 2;
|
var tok_i = error_token + 2;
|
||||||
while (true) : (tok_i += 1) switch (token_tags[tok_i]) {
|
while (true) : (tok_i += 1) switch (ast.tokenTag(tok_i)) {
|
||||||
.doc_comment, .comma => {},
|
.doc_comment, .comma => {},
|
||||||
.identifier => {
|
.identifier => {
|
||||||
const name = ast.tokenSlice(tok_i);
|
const name = ast.tokenSlice(tok_i);
|
||||||
@ -391,15 +387,13 @@ fn decl_fields_fallible(decl_index: Decl.Index) ![]Ast.Node.Index {
|
|||||||
|
|
||||||
switch (decl.categorize()) {
|
switch (decl.categorize()) {
|
||||||
.type_function => {
|
.type_function => {
|
||||||
const node_tags = ast.nodes.items(.tag);
|
|
||||||
|
|
||||||
// If the type function returns a reference to another type function, get the fields from there
|
// If the type function returns a reference to another type function, get the fields from there
|
||||||
if (decl.get_type_fn_return_type_fn()) |function_decl| {
|
if (decl.get_type_fn_return_type_fn()) |function_decl| {
|
||||||
return decl_fields_fallible(function_decl);
|
return decl_fields_fallible(function_decl);
|
||||||
}
|
}
|
||||||
// If the type function returns a container, such as a `struct`, read that container's fields
|
// If the type function returns a container, such as a `struct`, read that container's fields
|
||||||
if (decl.get_type_fn_return_expr()) |return_expr| {
|
if (decl.get_type_fn_return_expr()) |return_expr| {
|
||||||
switch (node_tags[return_expr]) {
|
switch (ast.nodeTag(return_expr)) {
|
||||||
.container_decl, .container_decl_trailing, .container_decl_two, .container_decl_two_trailing, .container_decl_arg, .container_decl_arg_trailing => {
|
.container_decl, .container_decl_trailing, .container_decl_two, .container_decl_two_trailing, .container_decl_arg, .container_decl_arg_trailing => {
|
||||||
return ast_decl_fields_fallible(ast, return_expr);
|
return ast_decl_fields_fallible(ast, return_expr);
|
||||||
},
|
},
|
||||||
@ -420,10 +414,9 @@ fn ast_decl_fields_fallible(ast: *Ast, ast_index: Ast.Node.Index) ![]Ast.Node.In
|
|||||||
var result: std.ArrayListUnmanaged(Ast.Node.Index) = .empty;
|
var result: std.ArrayListUnmanaged(Ast.Node.Index) = .empty;
|
||||||
};
|
};
|
||||||
g.result.clearRetainingCapacity();
|
g.result.clearRetainingCapacity();
|
||||||
const node_tags = ast.nodes.items(.tag);
|
|
||||||
var buf: [2]Ast.Node.Index = undefined;
|
var buf: [2]Ast.Node.Index = undefined;
|
||||||
const container_decl = ast.fullContainerDecl(&buf, ast_index) orelse return &.{};
|
const container_decl = ast.fullContainerDecl(&buf, ast_index) orelse return &.{};
|
||||||
for (container_decl.ast.members) |member_node| switch (node_tags[member_node]) {
|
for (container_decl.ast.members) |member_node| switch (ast.nodeTag(member_node)) {
|
||||||
.container_field_init,
|
.container_field_init,
|
||||||
.container_field_align,
|
.container_field_align,
|
||||||
.container_field,
|
.container_field,
|
||||||
@ -478,9 +471,8 @@ fn decl_field_html_fallible(
|
|||||||
try out.appendSlice(gpa, "</code></pre>");
|
try out.appendSlice(gpa, "</code></pre>");
|
||||||
|
|
||||||
const field = ast.fullContainerField(field_node).?;
|
const field = ast.fullContainerField(field_node).?;
|
||||||
const first_doc_comment = Decl.findFirstDocComment(ast, field.firstToken());
|
|
||||||
|
|
||||||
if (ast.tokens.items(.tag)[first_doc_comment] == .doc_comment) {
|
if (Decl.findFirstDocComment(ast, field.firstToken()).unwrap()) |first_doc_comment| {
|
||||||
try out.appendSlice(gpa, "<div class=\"fieldDocs\">");
|
try out.appendSlice(gpa, "<div class=\"fieldDocs\">");
|
||||||
try render_docs(out, decl_index, first_doc_comment, false);
|
try render_docs(out, decl_index, first_doc_comment, false);
|
||||||
try out.appendSlice(gpa, "</div>");
|
try out.appendSlice(gpa, "</div>");
|
||||||
@ -494,14 +486,13 @@ fn decl_param_html_fallible(
|
|||||||
) !void {
|
) !void {
|
||||||
const decl = decl_index.get();
|
const decl = decl_index.get();
|
||||||
const ast = decl.file.get_ast();
|
const ast = decl.file.get_ast();
|
||||||
const token_tags = ast.tokens.items(.tag);
|
|
||||||
const colon = ast.firstToken(param_node) - 1;
|
const colon = ast.firstToken(param_node) - 1;
|
||||||
const name_token = colon - 1;
|
const name_token = colon - 1;
|
||||||
const first_doc_comment = f: {
|
const first_doc_comment = f: {
|
||||||
var it = ast.firstToken(param_node);
|
var it = ast.firstToken(param_node);
|
||||||
while (it > 0) {
|
while (it > 0) {
|
||||||
it -= 1;
|
it -= 1;
|
||||||
switch (token_tags[it]) {
|
switch (ast.tokenTag(it)) {
|
||||||
.doc_comment, .colon, .identifier, .keyword_comptime, .keyword_noalias => {},
|
.doc_comment, .colon, .identifier, .keyword_comptime, .keyword_noalias => {},
|
||||||
else => break,
|
else => break,
|
||||||
}
|
}
|
||||||
@ -516,7 +507,7 @@ fn decl_param_html_fallible(
|
|||||||
try fileSourceHtml(decl.file, out, param_node, .{});
|
try fileSourceHtml(decl.file, out, param_node, .{});
|
||||||
try out.appendSlice(gpa, "</code></pre>");
|
try out.appendSlice(gpa, "</code></pre>");
|
||||||
|
|
||||||
if (ast.tokens.items(.tag)[first_doc_comment] == .doc_comment) {
|
if (ast.tokenTag(first_doc_comment) == .doc_comment) {
|
||||||
try out.appendSlice(gpa, "<div class=\"fieldDocs\">");
|
try out.appendSlice(gpa, "<div class=\"fieldDocs\">");
|
||||||
try render_docs(out, decl_index, first_doc_comment, false);
|
try render_docs(out, decl_index, first_doc_comment, false);
|
||||||
try out.appendSlice(gpa, "</div>");
|
try out.appendSlice(gpa, "</div>");
|
||||||
@ -526,10 +517,8 @@ fn decl_param_html_fallible(
|
|||||||
export fn decl_fn_proto_html(decl_index: Decl.Index, linkify_fn_name: bool) String {
|
export fn decl_fn_proto_html(decl_index: Decl.Index, linkify_fn_name: bool) String {
|
||||||
const decl = decl_index.get();
|
const decl = decl_index.get();
|
||||||
const ast = decl.file.get_ast();
|
const ast = decl.file.get_ast();
|
||||||
const node_tags = ast.nodes.items(.tag);
|
const proto_node = switch (ast.nodeTag(decl.ast_node)) {
|
||||||
const node_datas = ast.nodes.items(.data);
|
.fn_decl => ast.nodeData(decl.ast_node).node_and_node[0],
|
||||||
const proto_node = switch (node_tags[decl.ast_node]) {
|
|
||||||
.fn_decl => node_datas[decl.ast_node].lhs,
|
|
||||||
|
|
||||||
.fn_proto,
|
.fn_proto,
|
||||||
.fn_proto_one,
|
.fn_proto_one,
|
||||||
@ -586,17 +575,16 @@ export fn decl_parent(decl_index: Decl.Index) Decl.Index {
|
|||||||
return decl.parent;
|
return decl.parent;
|
||||||
}
|
}
|
||||||
|
|
||||||
export fn fn_error_set(decl_index: Decl.Index) Ast.Node.Index {
|
export fn fn_error_set(decl_index: Decl.Index) Ast.Node.OptionalIndex {
|
||||||
const decl = decl_index.get();
|
const decl = decl_index.get();
|
||||||
const ast = decl.file.get_ast();
|
const ast = decl.file.get_ast();
|
||||||
var buf: [1]Ast.Node.Index = undefined;
|
var buf: [1]Ast.Node.Index = undefined;
|
||||||
const full = ast.fullFnProto(&buf, decl.ast_node).?;
|
const full = ast.fullFnProto(&buf, decl.ast_node).?;
|
||||||
const node_tags = ast.nodes.items(.tag);
|
const return_type = full.ast.return_type.unwrap().?;
|
||||||
const node_datas = ast.nodes.items(.data);
|
return switch (ast.nodeTag(return_type)) {
|
||||||
return switch (node_tags[full.ast.return_type]) {
|
.error_set_decl => return_type.toOptional(),
|
||||||
.error_set_decl => full.ast.return_type,
|
.error_union => ast.nodeData(return_type).node_and_node[0].toOptional(),
|
||||||
.error_union => node_datas[full.ast.return_type].lhs,
|
else => .none,
|
||||||
else => 0,
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -609,21 +597,19 @@ export fn decl_file_path(decl_index: Decl.Index) String {
|
|||||||
export fn decl_category_name(decl_index: Decl.Index) String {
|
export fn decl_category_name(decl_index: Decl.Index) String {
|
||||||
const decl = decl_index.get();
|
const decl = decl_index.get();
|
||||||
const ast = decl.file.get_ast();
|
const ast = decl.file.get_ast();
|
||||||
const token_tags = ast.tokens.items(.tag);
|
|
||||||
const name = switch (decl.categorize()) {
|
const name = switch (decl.categorize()) {
|
||||||
.namespace, .container => |node| {
|
.namespace, .container => |node| {
|
||||||
const node_tags = ast.nodes.items(.tag);
|
if (ast.nodeTag(decl.ast_node) == .root)
|
||||||
if (node_tags[decl.ast_node] == .root)
|
|
||||||
return String.init("struct");
|
return String.init("struct");
|
||||||
string_result.clearRetainingCapacity();
|
string_result.clearRetainingCapacity();
|
||||||
var buf: [2]Ast.Node.Index = undefined;
|
var buf: [2]Ast.Node.Index = undefined;
|
||||||
const container_decl = ast.fullContainerDecl(&buf, node).?;
|
const container_decl = ast.fullContainerDecl(&buf, node).?;
|
||||||
if (container_decl.layout_token) |t| {
|
if (container_decl.layout_token) |t| {
|
||||||
if (token_tags[t] == .keyword_extern) {
|
if (ast.tokenTag(t) == .keyword_extern) {
|
||||||
string_result.appendSlice(gpa, "extern ") catch @panic("OOM");
|
string_result.appendSlice(gpa, "extern ") catch @panic("OOM");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
const main_token_tag = token_tags[container_decl.ast.main_token];
|
const main_token_tag = ast.tokenTag(container_decl.ast.main_token);
|
||||||
string_result.appendSlice(gpa, main_token_tag.lexeme().?) catch @panic("OOM");
|
string_result.appendSlice(gpa, main_token_tag.lexeme().?) catch @panic("OOM");
|
||||||
return String.init(string_result.items);
|
return String.init(string_result.items);
|
||||||
},
|
},
|
||||||
@ -656,7 +642,9 @@ export fn decl_name(decl_index: Decl.Index) String {
|
|||||||
export fn decl_docs_html(decl_index: Decl.Index, short: bool) String {
|
export fn decl_docs_html(decl_index: Decl.Index, short: bool) String {
|
||||||
const decl = decl_index.get();
|
const decl = decl_index.get();
|
||||||
string_result.clearRetainingCapacity();
|
string_result.clearRetainingCapacity();
|
||||||
render_docs(&string_result, decl_index, decl.extra_info().first_doc_comment, short) catch @panic("OOM");
|
if (decl.extra_info().first_doc_comment.unwrap()) |first_doc_comment| {
|
||||||
|
render_docs(&string_result, decl_index, first_doc_comment, short) catch @panic("OOM");
|
||||||
|
}
|
||||||
return String.init(string_result.items);
|
return String.init(string_result.items);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -665,10 +653,9 @@ fn collect_docs(
|
|||||||
ast: *const Ast,
|
ast: *const Ast,
|
||||||
first_doc_comment: Ast.TokenIndex,
|
first_doc_comment: Ast.TokenIndex,
|
||||||
) Oom!void {
|
) Oom!void {
|
||||||
const token_tags = ast.tokens.items(.tag);
|
|
||||||
list.clearRetainingCapacity();
|
list.clearRetainingCapacity();
|
||||||
var it = first_doc_comment;
|
var it = first_doc_comment;
|
||||||
while (true) : (it += 1) switch (token_tags[it]) {
|
while (true) : (it += 1) switch (ast.tokenTag(it)) {
|
||||||
.doc_comment, .container_doc_comment => {
|
.doc_comment, .container_doc_comment => {
|
||||||
// It is tempting to trim this string but think carefully about how
|
// It is tempting to trim this string but think carefully about how
|
||||||
// that will affect the markdown parser.
|
// that will affect the markdown parser.
|
||||||
@ -687,12 +674,11 @@ fn render_docs(
|
|||||||
) Oom!void {
|
) Oom!void {
|
||||||
const decl = decl_index.get();
|
const decl = decl_index.get();
|
||||||
const ast = decl.file.get_ast();
|
const ast = decl.file.get_ast();
|
||||||
const token_tags = ast.tokens.items(.tag);
|
|
||||||
|
|
||||||
var parser = try markdown.Parser.init(gpa);
|
var parser = try markdown.Parser.init(gpa);
|
||||||
defer parser.deinit();
|
defer parser.deinit();
|
||||||
var it = first_doc_comment;
|
var it = first_doc_comment;
|
||||||
while (true) : (it += 1) switch (token_tags[it]) {
|
while (true) : (it += 1) switch (ast.tokenTag(it)) {
|
||||||
.doc_comment, .container_doc_comment => {
|
.doc_comment, .container_doc_comment => {
|
||||||
const line = ast.tokenSlice(it)[3..];
|
const line = ast.tokenSlice(it)[3..];
|
||||||
if (short and line.len == 0) break;
|
if (short and line.len == 0) break;
|
||||||
@ -767,9 +753,9 @@ export fn decl_type_html(decl_index: Decl.Index) String {
|
|||||||
t: {
|
t: {
|
||||||
// If there is an explicit type, use it.
|
// If there is an explicit type, use it.
|
||||||
if (ast.fullVarDecl(decl.ast_node)) |var_decl| {
|
if (ast.fullVarDecl(decl.ast_node)) |var_decl| {
|
||||||
if (var_decl.ast.type_node != 0) {
|
if (var_decl.ast.type_node.unwrap()) |type_node| {
|
||||||
string_result.appendSlice(gpa, "<code>") catch @panic("OOM");
|
string_result.appendSlice(gpa, "<code>") catch @panic("OOM");
|
||||||
fileSourceHtml(decl.file, &string_result, var_decl.ast.type_node, .{
|
fileSourceHtml(decl.file, &string_result, type_node, .{
|
||||||
.skip_comments = true,
|
.skip_comments = true,
|
||||||
.collapse_whitespace = true,
|
.collapse_whitespace = true,
|
||||||
}) catch |e| {
|
}) catch |e| {
|
||||||
|
|||||||
1821
lib/std/zig/Ast.zig
1821
lib/std/zig/Ast.zig
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -92,27 +92,26 @@ fn containerDecl(
|
|||||||
full: Ast.full.ContainerDecl,
|
full: Ast.full.ContainerDecl,
|
||||||
) !void {
|
) !void {
|
||||||
const tree = astrl.tree;
|
const tree = astrl.tree;
|
||||||
const token_tags = tree.tokens.items(.tag);
|
switch (tree.tokenTag(full.ast.main_token)) {
|
||||||
switch (token_tags[full.ast.main_token]) {
|
|
||||||
.keyword_struct => {
|
.keyword_struct => {
|
||||||
if (full.ast.arg != 0) {
|
if (full.ast.arg.unwrap()) |arg| {
|
||||||
_ = try astrl.expr(full.ast.arg, block, ResultInfo.type_only);
|
_ = try astrl.expr(arg, block, ResultInfo.type_only);
|
||||||
}
|
}
|
||||||
for (full.ast.members) |member_node| {
|
for (full.ast.members) |member_node| {
|
||||||
_ = try astrl.expr(member_node, block, ResultInfo.none);
|
_ = try astrl.expr(member_node, block, ResultInfo.none);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
.keyword_union => {
|
.keyword_union => {
|
||||||
if (full.ast.arg != 0) {
|
if (full.ast.arg.unwrap()) |arg| {
|
||||||
_ = try astrl.expr(full.ast.arg, block, ResultInfo.type_only);
|
_ = try astrl.expr(arg, block, ResultInfo.type_only);
|
||||||
}
|
}
|
||||||
for (full.ast.members) |member_node| {
|
for (full.ast.members) |member_node| {
|
||||||
_ = try astrl.expr(member_node, block, ResultInfo.none);
|
_ = try astrl.expr(member_node, block, ResultInfo.none);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
.keyword_enum => {
|
.keyword_enum => {
|
||||||
if (full.ast.arg != 0) {
|
if (full.ast.arg.unwrap()) |arg| {
|
||||||
_ = try astrl.expr(full.ast.arg, block, ResultInfo.type_only);
|
_ = try astrl.expr(arg, block, ResultInfo.type_only);
|
||||||
}
|
}
|
||||||
for (full.ast.members) |member_node| {
|
for (full.ast.members) |member_node| {
|
||||||
_ = try astrl.expr(member_node, block, ResultInfo.none);
|
_ = try astrl.expr(member_node, block, ResultInfo.none);
|
||||||
@ -130,10 +129,7 @@ fn containerDecl(
|
|||||||
/// Returns true if `rl` provides a result pointer and the expression consumes it.
|
/// Returns true if `rl` provides a result pointer and the expression consumes it.
|
||||||
fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultInfo) Allocator.Error!bool {
|
fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultInfo) Allocator.Error!bool {
|
||||||
const tree = astrl.tree;
|
const tree = astrl.tree;
|
||||||
const token_tags = tree.tokens.items(.tag);
|
switch (tree.nodeTag(node)) {
|
||||||
const node_datas = tree.nodes.items(.data);
|
|
||||||
const node_tags = tree.nodes.items(.tag);
|
|
||||||
switch (node_tags[node]) {
|
|
||||||
.root,
|
.root,
|
||||||
.switch_case_one,
|
.switch_case_one,
|
||||||
.switch_case_inline_one,
|
.switch_case_inline_one,
|
||||||
@ -145,8 +141,12 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
|
|||||||
.asm_input,
|
.asm_input,
|
||||||
=> unreachable,
|
=> unreachable,
|
||||||
|
|
||||||
.@"errdefer", .@"defer" => {
|
.@"errdefer" => {
|
||||||
_ = try astrl.expr(node_datas[node].rhs, block, ResultInfo.none);
|
_ = try astrl.expr(tree.nodeData(node).opt_token_and_node[1], block, ResultInfo.none);
|
||||||
|
return false;
|
||||||
|
},
|
||||||
|
.@"defer" => {
|
||||||
|
_ = try astrl.expr(tree.nodeData(node).node, block, ResultInfo.none);
|
||||||
return false;
|
return false;
|
||||||
},
|
},
|
||||||
|
|
||||||
@ -155,21 +155,22 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
|
|||||||
.container_field,
|
.container_field,
|
||||||
=> {
|
=> {
|
||||||
const full = tree.fullContainerField(node).?;
|
const full = tree.fullContainerField(node).?;
|
||||||
_ = try astrl.expr(full.ast.type_expr, block, ResultInfo.type_only);
|
const type_expr = full.ast.type_expr.unwrap().?;
|
||||||
if (full.ast.align_expr != 0) {
|
_ = try astrl.expr(type_expr, block, ResultInfo.type_only);
|
||||||
_ = try astrl.expr(full.ast.align_expr, block, ResultInfo.type_only);
|
if (full.ast.align_expr.unwrap()) |align_expr| {
|
||||||
|
_ = try astrl.expr(align_expr, block, ResultInfo.type_only);
|
||||||
}
|
}
|
||||||
if (full.ast.value_expr != 0) {
|
if (full.ast.value_expr.unwrap()) |value_expr| {
|
||||||
_ = try astrl.expr(full.ast.value_expr, block, ResultInfo.type_only);
|
_ = try astrl.expr(value_expr, block, ResultInfo.type_only);
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
},
|
},
|
||||||
.@"usingnamespace" => {
|
.@"usingnamespace" => {
|
||||||
_ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.type_only);
|
_ = try astrl.expr(tree.nodeData(node).node, block, ResultInfo.type_only);
|
||||||
return false;
|
return false;
|
||||||
},
|
},
|
||||||
.test_decl => {
|
.test_decl => {
|
||||||
_ = try astrl.expr(node_datas[node].rhs, block, ResultInfo.none);
|
_ = try astrl.expr(tree.nodeData(node).opt_token_and_node[1], block, ResultInfo.none);
|
||||||
return false;
|
return false;
|
||||||
},
|
},
|
||||||
.global_var_decl,
|
.global_var_decl,
|
||||||
@ -178,17 +179,17 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
|
|||||||
.aligned_var_decl,
|
.aligned_var_decl,
|
||||||
=> {
|
=> {
|
||||||
const full = tree.fullVarDecl(node).?;
|
const full = tree.fullVarDecl(node).?;
|
||||||
const init_ri = if (full.ast.type_node != 0) init_ri: {
|
const init_ri = if (full.ast.type_node.unwrap()) |type_node| init_ri: {
|
||||||
_ = try astrl.expr(full.ast.type_node, block, ResultInfo.type_only);
|
_ = try astrl.expr(type_node, block, ResultInfo.type_only);
|
||||||
break :init_ri ResultInfo.typed_ptr;
|
break :init_ri ResultInfo.typed_ptr;
|
||||||
} else ResultInfo.inferred_ptr;
|
} else ResultInfo.inferred_ptr;
|
||||||
if (full.ast.init_node == 0) {
|
const init_node = full.ast.init_node.unwrap() orelse {
|
||||||
// No init node, so we're done.
|
// No init node, so we're done.
|
||||||
return false;
|
return false;
|
||||||
}
|
};
|
||||||
switch (token_tags[full.ast.mut_token]) {
|
switch (tree.tokenTag(full.ast.mut_token)) {
|
||||||
.keyword_const => {
|
.keyword_const => {
|
||||||
const init_consumes_rl = try astrl.expr(full.ast.init_node, block, init_ri);
|
const init_consumes_rl = try astrl.expr(init_node, block, init_ri);
|
||||||
if (init_consumes_rl) {
|
if (init_consumes_rl) {
|
||||||
try astrl.nodes_need_rl.putNoClobber(astrl.gpa, node, {});
|
try astrl.nodes_need_rl.putNoClobber(astrl.gpa, node, {});
|
||||||
}
|
}
|
||||||
@ -197,7 +198,7 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
|
|||||||
.keyword_var => {
|
.keyword_var => {
|
||||||
// We'll create an alloc either way, so don't care if the
|
// We'll create an alloc either way, so don't care if the
|
||||||
// result pointer is consumed.
|
// result pointer is consumed.
|
||||||
_ = try astrl.expr(full.ast.init_node, block, init_ri);
|
_ = try astrl.expr(init_node, block, init_ri);
|
||||||
return false;
|
return false;
|
||||||
},
|
},
|
||||||
else => unreachable,
|
else => unreachable,
|
||||||
@ -213,8 +214,9 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
|
|||||||
return false;
|
return false;
|
||||||
},
|
},
|
||||||
.assign => {
|
.assign => {
|
||||||
_ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.none);
|
const lhs, const rhs = tree.nodeData(node).node_and_node;
|
||||||
_ = try astrl.expr(node_datas[node].rhs, block, ResultInfo.typed_ptr);
|
_ = try astrl.expr(lhs, block, ResultInfo.none);
|
||||||
|
_ = try astrl.expr(rhs, block, ResultInfo.typed_ptr);
|
||||||
return false;
|
return false;
|
||||||
},
|
},
|
||||||
.assign_shl,
|
.assign_shl,
|
||||||
@ -235,13 +237,15 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
|
|||||||
.assign_mul_wrap,
|
.assign_mul_wrap,
|
||||||
.assign_mul_sat,
|
.assign_mul_sat,
|
||||||
=> {
|
=> {
|
||||||
_ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.none);
|
const lhs, const rhs = tree.nodeData(node).node_and_node;
|
||||||
_ = try astrl.expr(node_datas[node].rhs, block, ResultInfo.none);
|
_ = try astrl.expr(lhs, block, ResultInfo.none);
|
||||||
|
_ = try astrl.expr(rhs, block, ResultInfo.none);
|
||||||
return false;
|
return false;
|
||||||
},
|
},
|
||||||
.shl, .shr => {
|
.shl, .shr => {
|
||||||
_ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.none);
|
const lhs, const rhs = tree.nodeData(node).node_and_node;
|
||||||
_ = try astrl.expr(node_datas[node].rhs, block, ResultInfo.type_only);
|
_ = try astrl.expr(lhs, block, ResultInfo.none);
|
||||||
|
_ = try astrl.expr(rhs, block, ResultInfo.type_only);
|
||||||
return false;
|
return false;
|
||||||
},
|
},
|
||||||
.add,
|
.add,
|
||||||
@ -267,33 +271,38 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
|
|||||||
.less_or_equal,
|
.less_or_equal,
|
||||||
.array_cat,
|
.array_cat,
|
||||||
=> {
|
=> {
|
||||||
_ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.none);
|
const lhs, const rhs = tree.nodeData(node).node_and_node;
|
||||||
_ = try astrl.expr(node_datas[node].rhs, block, ResultInfo.none);
|
_ = try astrl.expr(lhs, block, ResultInfo.none);
|
||||||
|
_ = try astrl.expr(rhs, block, ResultInfo.none);
|
||||||
return false;
|
return false;
|
||||||
},
|
},
|
||||||
|
|
||||||
.array_mult => {
|
.array_mult => {
|
||||||
_ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.none);
|
const lhs, const rhs = tree.nodeData(node).node_and_node;
|
||||||
_ = try astrl.expr(node_datas[node].rhs, block, ResultInfo.type_only);
|
_ = try astrl.expr(lhs, block, ResultInfo.none);
|
||||||
|
_ = try astrl.expr(rhs, block, ResultInfo.type_only);
|
||||||
return false;
|
return false;
|
||||||
},
|
},
|
||||||
.error_union, .merge_error_sets => {
|
.error_union, .merge_error_sets => {
|
||||||
_ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.none);
|
const lhs, const rhs = tree.nodeData(node).node_and_node;
|
||||||
_ = try astrl.expr(node_datas[node].rhs, block, ResultInfo.none);
|
_ = try astrl.expr(lhs, block, ResultInfo.none);
|
||||||
|
_ = try astrl.expr(rhs, block, ResultInfo.none);
|
||||||
return false;
|
return false;
|
||||||
},
|
},
|
||||||
.bool_and,
|
.bool_and,
|
||||||
.bool_or,
|
.bool_or,
|
||||||
=> {
|
=> {
|
||||||
_ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.type_only);
|
const lhs, const rhs = tree.nodeData(node).node_and_node;
|
||||||
_ = try astrl.expr(node_datas[node].rhs, block, ResultInfo.type_only);
|
_ = try astrl.expr(lhs, block, ResultInfo.type_only);
|
||||||
|
_ = try astrl.expr(rhs, block, ResultInfo.type_only);
|
||||||
return false;
|
return false;
|
||||||
},
|
},
|
||||||
.bool_not => {
|
.bool_not => {
|
||||||
_ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.type_only);
|
_ = try astrl.expr(tree.nodeData(node).node, block, ResultInfo.type_only);
|
||||||
return false;
|
return false;
|
||||||
},
|
},
|
||||||
.bit_not, .negation, .negation_wrap => {
|
.bit_not, .negation, .negation_wrap => {
|
||||||
_ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.none);
|
_ = try astrl.expr(tree.nodeData(node).node, block, ResultInfo.none);
|
||||||
return false;
|
return false;
|
||||||
},
|
},
|
||||||
|
|
||||||
@ -338,7 +347,7 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
|
|||||||
for (full.ast.params) |param_node| {
|
for (full.ast.params) |param_node| {
|
||||||
_ = try astrl.expr(param_node, block, ResultInfo.type_only);
|
_ = try astrl.expr(param_node, block, ResultInfo.type_only);
|
||||||
}
|
}
|
||||||
return switch (node_tags[node]) {
|
return switch (tree.nodeTag(node)) {
|
||||||
.call_one,
|
.call_one,
|
||||||
.call_one_comma,
|
.call_one_comma,
|
||||||
.call,
|
.call,
|
||||||
@ -354,8 +363,8 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
|
|||||||
},
|
},
|
||||||
|
|
||||||
.@"return" => {
|
.@"return" => {
|
||||||
if (node_datas[node].lhs != 0) {
|
if (tree.nodeData(node).opt_node.unwrap()) |lhs| {
|
||||||
const ret_val_consumes_rl = try astrl.expr(node_datas[node].lhs, block, ResultInfo.typed_ptr);
|
const ret_val_consumes_rl = try astrl.expr(lhs, block, ResultInfo.typed_ptr);
|
||||||
if (ret_val_consumes_rl) {
|
if (ret_val_consumes_rl) {
|
||||||
try astrl.nodes_need_rl.putNoClobber(astrl.gpa, node, {});
|
try astrl.nodes_need_rl.putNoClobber(astrl.gpa, node, {});
|
||||||
}
|
}
|
||||||
@ -364,7 +373,8 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
|
|||||||
},
|
},
|
||||||
|
|
||||||
.field_access => {
|
.field_access => {
|
||||||
_ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.none);
|
const lhs, _ = tree.nodeData(node).node_and_token;
|
||||||
|
_ = try astrl.expr(lhs, block, ResultInfo.none);
|
||||||
return false;
|
return false;
|
||||||
},
|
},
|
||||||
|
|
||||||
@ -376,15 +386,15 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
|
|||||||
_ = try astrl.expr(full.ast.cond_expr, block, ResultInfo.type_only); // bool
|
_ = try astrl.expr(full.ast.cond_expr, block, ResultInfo.type_only); // bool
|
||||||
}
|
}
|
||||||
|
|
||||||
if (full.ast.else_expr == 0) {
|
if (full.ast.else_expr.unwrap()) |else_expr| {
|
||||||
_ = try astrl.expr(full.ast.then_expr, block, ResultInfo.none);
|
|
||||||
return false;
|
|
||||||
} else {
|
|
||||||
const then_uses_rl = try astrl.expr(full.ast.then_expr, block, ri);
|
const then_uses_rl = try astrl.expr(full.ast.then_expr, block, ri);
|
||||||
const else_uses_rl = try astrl.expr(full.ast.else_expr, block, ri);
|
const else_uses_rl = try astrl.expr(else_expr, block, ri);
|
||||||
const uses_rl = then_uses_rl or else_uses_rl;
|
const uses_rl = then_uses_rl or else_uses_rl;
|
||||||
if (uses_rl) try astrl.nodes_need_rl.putNoClobber(astrl.gpa, node, {});
|
if (uses_rl) try astrl.nodes_need_rl.putNoClobber(astrl.gpa, node, {});
|
||||||
return uses_rl;
|
return uses_rl;
|
||||||
|
} else {
|
||||||
|
_ = try astrl.expr(full.ast.then_expr, block, ResultInfo.none);
|
||||||
|
return false;
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
@ -405,12 +415,12 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
|
|||||||
.ri = ri,
|
.ri = ri,
|
||||||
.consumes_res_ptr = false,
|
.consumes_res_ptr = false,
|
||||||
};
|
};
|
||||||
if (full.ast.cont_expr != 0) {
|
if (full.ast.cont_expr.unwrap()) |cont_expr| {
|
||||||
_ = try astrl.expr(full.ast.cont_expr, &new_block, ResultInfo.none);
|
_ = try astrl.expr(cont_expr, &new_block, ResultInfo.none);
|
||||||
}
|
}
|
||||||
_ = try astrl.expr(full.ast.then_expr, &new_block, ResultInfo.none);
|
_ = try astrl.expr(full.ast.then_expr, &new_block, ResultInfo.none);
|
||||||
const else_consumes_rl = if (full.ast.else_expr != 0) else_rl: {
|
const else_consumes_rl = if (full.ast.else_expr.unwrap()) |else_expr| else_rl: {
|
||||||
break :else_rl try astrl.expr(full.ast.else_expr, block, ri);
|
break :else_rl try astrl.expr(else_expr, block, ri);
|
||||||
} else false;
|
} else false;
|
||||||
if (new_block.consumes_res_ptr or else_consumes_rl) {
|
if (new_block.consumes_res_ptr or else_consumes_rl) {
|
||||||
try astrl.nodes_need_rl.putNoClobber(astrl.gpa, node, {});
|
try astrl.nodes_need_rl.putNoClobber(astrl.gpa, node, {});
|
||||||
@ -426,10 +436,11 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
|
|||||||
break :label try astrl.identString(label_token);
|
break :label try astrl.identString(label_token);
|
||||||
} else null;
|
} else null;
|
||||||
for (full.ast.inputs) |input| {
|
for (full.ast.inputs) |input| {
|
||||||
if (node_tags[input] == .for_range) {
|
if (tree.nodeTag(input) == .for_range) {
|
||||||
_ = try astrl.expr(node_datas[input].lhs, block, ResultInfo.type_only);
|
const lhs, const opt_rhs = tree.nodeData(input).node_and_opt_node;
|
||||||
if (node_datas[input].rhs != 0) {
|
_ = try astrl.expr(lhs, block, ResultInfo.type_only);
|
||||||
_ = try astrl.expr(node_datas[input].rhs, block, ResultInfo.type_only);
|
if (opt_rhs.unwrap()) |rhs| {
|
||||||
|
_ = try astrl.expr(rhs, block, ResultInfo.type_only);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
_ = try astrl.expr(input, block, ResultInfo.none);
|
_ = try astrl.expr(input, block, ResultInfo.none);
|
||||||
@ -443,8 +454,8 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
|
|||||||
.consumes_res_ptr = false,
|
.consumes_res_ptr = false,
|
||||||
};
|
};
|
||||||
_ = try astrl.expr(full.ast.then_expr, &new_block, ResultInfo.none);
|
_ = try astrl.expr(full.ast.then_expr, &new_block, ResultInfo.none);
|
||||||
const else_consumes_rl = if (full.ast.else_expr != 0) else_rl: {
|
const else_consumes_rl = if (full.ast.else_expr.unwrap()) |else_expr| else_rl: {
|
||||||
break :else_rl try astrl.expr(full.ast.else_expr, block, ri);
|
break :else_rl try astrl.expr(else_expr, block, ri);
|
||||||
} else false;
|
} else false;
|
||||||
if (new_block.consumes_res_ptr or else_consumes_rl) {
|
if (new_block.consumes_res_ptr or else_consumes_rl) {
|
||||||
try astrl.nodes_need_rl.putNoClobber(astrl.gpa, node, {});
|
try astrl.nodes_need_rl.putNoClobber(astrl.gpa, node, {});
|
||||||
@ -455,45 +466,49 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
|
|||||||
},
|
},
|
||||||
|
|
||||||
.slice_open => {
|
.slice_open => {
|
||||||
_ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.none);
|
const sliced, const start = tree.nodeData(node).node_and_node;
|
||||||
_ = try astrl.expr(node_datas[node].rhs, block, ResultInfo.type_only);
|
_ = try astrl.expr(sliced, block, ResultInfo.none);
|
||||||
|
_ = try astrl.expr(start, block, ResultInfo.type_only);
|
||||||
return false;
|
return false;
|
||||||
},
|
},
|
||||||
.slice => {
|
.slice => {
|
||||||
const extra = tree.extraData(node_datas[node].rhs, Ast.Node.Slice);
|
const sliced, const extra_index = tree.nodeData(node).node_and_extra;
|
||||||
_ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.none);
|
const extra = tree.extraData(extra_index, Ast.Node.Slice);
|
||||||
|
_ = try astrl.expr(sliced, block, ResultInfo.none);
|
||||||
_ = try astrl.expr(extra.start, block, ResultInfo.type_only);
|
_ = try astrl.expr(extra.start, block, ResultInfo.type_only);
|
||||||
_ = try astrl.expr(extra.end, block, ResultInfo.type_only);
|
_ = try astrl.expr(extra.end, block, ResultInfo.type_only);
|
||||||
return false;
|
return false;
|
||||||
},
|
},
|
||||||
.slice_sentinel => {
|
.slice_sentinel => {
|
||||||
const extra = tree.extraData(node_datas[node].rhs, Ast.Node.SliceSentinel);
|
const sliced, const extra_index = tree.nodeData(node).node_and_extra;
|
||||||
_ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.none);
|
const extra = tree.extraData(extra_index, Ast.Node.SliceSentinel);
|
||||||
|
_ = try astrl.expr(sliced, block, ResultInfo.none);
|
||||||
_ = try astrl.expr(extra.start, block, ResultInfo.type_only);
|
_ = try astrl.expr(extra.start, block, ResultInfo.type_only);
|
||||||
if (extra.end != 0) {
|
if (extra.end.unwrap()) |end| {
|
||||||
_ = try astrl.expr(extra.end, block, ResultInfo.type_only);
|
_ = try astrl.expr(end, block, ResultInfo.type_only);
|
||||||
}
|
}
|
||||||
_ = try astrl.expr(extra.sentinel, block, ResultInfo.none);
|
_ = try astrl.expr(extra.sentinel, block, ResultInfo.none);
|
||||||
return false;
|
return false;
|
||||||
},
|
},
|
||||||
.deref => {
|
.deref => {
|
||||||
_ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.none);
|
_ = try astrl.expr(tree.nodeData(node).node, block, ResultInfo.none);
|
||||||
return false;
|
return false;
|
||||||
},
|
},
|
||||||
.address_of => {
|
.address_of => {
|
||||||
_ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.none);
|
_ = try astrl.expr(tree.nodeData(node).node, block, ResultInfo.none);
|
||||||
return false;
|
return false;
|
||||||
},
|
},
|
||||||
.optional_type => {
|
.optional_type => {
|
||||||
_ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.type_only);
|
_ = try astrl.expr(tree.nodeData(node).node, block, ResultInfo.type_only);
|
||||||
return false;
|
return false;
|
||||||
},
|
},
|
||||||
.grouped_expression,
|
|
||||||
.@"try",
|
.@"try",
|
||||||
.@"await",
|
.@"await",
|
||||||
.@"nosuspend",
|
.@"nosuspend",
|
||||||
|
=> return astrl.expr(tree.nodeData(node).node, block, ri),
|
||||||
|
.grouped_expression,
|
||||||
.unwrap_optional,
|
.unwrap_optional,
|
||||||
=> return astrl.expr(node_datas[node].lhs, block, ri),
|
=> return astrl.expr(tree.nodeData(node).node_and_token[0], block, ri),
|
||||||
|
|
||||||
.block_two,
|
.block_two,
|
||||||
.block_two_semicolon,
|
.block_two_semicolon,
|
||||||
@ -505,12 +520,14 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
|
|||||||
return astrl.blockExpr(block, ri, node, statements);
|
return astrl.blockExpr(block, ri, node, statements);
|
||||||
},
|
},
|
||||||
.anyframe_type => {
|
.anyframe_type => {
|
||||||
_ = try astrl.expr(node_datas[node].rhs, block, ResultInfo.type_only);
|
_, const child_type = tree.nodeData(node).token_and_node;
|
||||||
|
_ = try astrl.expr(child_type, block, ResultInfo.type_only);
|
||||||
return false;
|
return false;
|
||||||
},
|
},
|
||||||
.@"catch", .@"orelse" => {
|
.@"catch", .@"orelse" => {
|
||||||
_ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.none);
|
const lhs, const rhs = tree.nodeData(node).node_and_node;
|
||||||
const rhs_consumes_rl = try astrl.expr(node_datas[node].rhs, block, ri);
|
_ = try astrl.expr(lhs, block, ResultInfo.none);
|
||||||
|
const rhs_consumes_rl = try astrl.expr(rhs, block, ri);
|
||||||
if (rhs_consumes_rl) {
|
if (rhs_consumes_rl) {
|
||||||
try astrl.nodes_need_rl.putNoClobber(astrl.gpa, node, {});
|
try astrl.nodes_need_rl.putNoClobber(astrl.gpa, node, {});
|
||||||
}
|
}
|
||||||
@ -524,19 +541,19 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
|
|||||||
=> {
|
=> {
|
||||||
const full = tree.fullPtrType(node).?;
|
const full = tree.fullPtrType(node).?;
|
||||||
_ = try astrl.expr(full.ast.child_type, block, ResultInfo.type_only);
|
_ = try astrl.expr(full.ast.child_type, block, ResultInfo.type_only);
|
||||||
if (full.ast.sentinel != 0) {
|
if (full.ast.sentinel.unwrap()) |sentinel| {
|
||||||
_ = try astrl.expr(full.ast.sentinel, block, ResultInfo.type_only);
|
_ = try astrl.expr(sentinel, block, ResultInfo.type_only);
|
||||||
}
|
}
|
||||||
if (full.ast.addrspace_node != 0) {
|
if (full.ast.addrspace_node.unwrap()) |addrspace_node| {
|
||||||
_ = try astrl.expr(full.ast.addrspace_node, block, ResultInfo.type_only);
|
_ = try astrl.expr(addrspace_node, block, ResultInfo.type_only);
|
||||||
}
|
}
|
||||||
if (full.ast.align_node != 0) {
|
if (full.ast.align_node.unwrap()) |align_node| {
|
||||||
_ = try astrl.expr(full.ast.align_node, block, ResultInfo.type_only);
|
_ = try astrl.expr(align_node, block, ResultInfo.type_only);
|
||||||
}
|
}
|
||||||
if (full.ast.bit_range_start != 0) {
|
if (full.ast.bit_range_start.unwrap()) |bit_range_start| {
|
||||||
assert(full.ast.bit_range_end != 0);
|
const bit_range_end = full.ast.bit_range_end.unwrap().?;
|
||||||
_ = try astrl.expr(full.ast.bit_range_start, block, ResultInfo.type_only);
|
_ = try astrl.expr(bit_range_start, block, ResultInfo.type_only);
|
||||||
_ = try astrl.expr(full.ast.bit_range_end, block, ResultInfo.type_only);
|
_ = try astrl.expr(bit_range_end, block, ResultInfo.type_only);
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
},
|
},
|
||||||
@ -560,63 +577,66 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
|
|||||||
},
|
},
|
||||||
|
|
||||||
.@"break" => {
|
.@"break" => {
|
||||||
if (node_datas[node].rhs == 0) {
|
const opt_label, const opt_rhs = tree.nodeData(node).opt_token_and_opt_node;
|
||||||
|
const rhs = opt_rhs.unwrap() orelse {
|
||||||
// Breaks with void are not interesting
|
// Breaks with void are not interesting
|
||||||
return false;
|
return false;
|
||||||
}
|
};
|
||||||
|
|
||||||
var opt_cur_block = block;
|
var opt_cur_block = block;
|
||||||
if (node_datas[node].lhs == 0) {
|
if (opt_label.unwrap()) |label_token| {
|
||||||
// No label - we're breaking from a loop.
|
const break_label = try astrl.identString(label_token);
|
||||||
while (opt_cur_block) |cur_block| : (opt_cur_block = cur_block.parent) {
|
|
||||||
if (cur_block.is_loop) break;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
const break_label = try astrl.identString(node_datas[node].lhs);
|
|
||||||
while (opt_cur_block) |cur_block| : (opt_cur_block = cur_block.parent) {
|
while (opt_cur_block) |cur_block| : (opt_cur_block = cur_block.parent) {
|
||||||
const block_label = cur_block.label orelse continue;
|
const block_label = cur_block.label orelse continue;
|
||||||
if (std.mem.eql(u8, block_label, break_label)) break;
|
if (std.mem.eql(u8, block_label, break_label)) break;
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
// No label - we're breaking from a loop.
|
||||||
|
while (opt_cur_block) |cur_block| : (opt_cur_block = cur_block.parent) {
|
||||||
|
if (cur_block.is_loop) break;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (opt_cur_block) |target_block| {
|
if (opt_cur_block) |target_block| {
|
||||||
const consumes_break_rl = try astrl.expr(node_datas[node].rhs, block, target_block.ri);
|
const consumes_break_rl = try astrl.expr(rhs, block, target_block.ri);
|
||||||
if (consumes_break_rl) target_block.consumes_res_ptr = true;
|
if (consumes_break_rl) target_block.consumes_res_ptr = true;
|
||||||
} else {
|
} else {
|
||||||
// No corresponding scope to break from - AstGen will emit an error.
|
// No corresponding scope to break from - AstGen will emit an error.
|
||||||
_ = try astrl.expr(node_datas[node].rhs, block, ResultInfo.none);
|
_ = try astrl.expr(rhs, block, ResultInfo.none);
|
||||||
}
|
}
|
||||||
|
|
||||||
return false;
|
return false;
|
||||||
},
|
},
|
||||||
|
|
||||||
.array_type => {
|
.array_type => {
|
||||||
_ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.type_only);
|
const lhs, const rhs = tree.nodeData(node).node_and_node;
|
||||||
_ = try astrl.expr(node_datas[node].rhs, block, ResultInfo.type_only);
|
_ = try astrl.expr(lhs, block, ResultInfo.type_only);
|
||||||
|
_ = try astrl.expr(rhs, block, ResultInfo.type_only);
|
||||||
return false;
|
return false;
|
||||||
},
|
},
|
||||||
.array_type_sentinel => {
|
.array_type_sentinel => {
|
||||||
const extra = tree.extraData(node_datas[node].rhs, Ast.Node.ArrayTypeSentinel);
|
const len_expr, const extra_index = tree.nodeData(node).node_and_extra;
|
||||||
_ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.type_only);
|
const extra = tree.extraData(extra_index, Ast.Node.ArrayTypeSentinel);
|
||||||
|
_ = try astrl.expr(len_expr, block, ResultInfo.type_only);
|
||||||
_ = try astrl.expr(extra.elem_type, block, ResultInfo.type_only);
|
_ = try astrl.expr(extra.elem_type, block, ResultInfo.type_only);
|
||||||
_ = try astrl.expr(extra.sentinel, block, ResultInfo.type_only);
|
_ = try astrl.expr(extra.sentinel, block, ResultInfo.type_only);
|
||||||
return false;
|
return false;
|
||||||
},
|
},
|
||||||
.array_access => {
|
.array_access => {
|
||||||
_ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.none);
|
const lhs, const rhs = tree.nodeData(node).node_and_node;
|
||||||
_ = try astrl.expr(node_datas[node].rhs, block, ResultInfo.type_only);
|
_ = try astrl.expr(lhs, block, ResultInfo.none);
|
||||||
|
_ = try astrl.expr(rhs, block, ResultInfo.type_only);
|
||||||
return false;
|
return false;
|
||||||
},
|
},
|
||||||
.@"comptime" => {
|
.@"comptime" => {
|
||||||
// AstGen will emit an error if the scope is already comptime, so we can assume it is
|
// AstGen will emit an error if the scope is already comptime, so we can assume it is
|
||||||
// not. This means the result location is not forwarded.
|
// not. This means the result location is not forwarded.
|
||||||
_ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.none);
|
_ = try astrl.expr(tree.nodeData(node).node, block, ResultInfo.none);
|
||||||
return false;
|
return false;
|
||||||
},
|
},
|
||||||
.@"switch", .switch_comma => {
|
.@"switch", .switch_comma => {
|
||||||
const operand_node = node_datas[node].lhs;
|
const operand_node, const extra_index = tree.nodeData(node).node_and_extra;
|
||||||
const extra = tree.extraData(node_datas[node].rhs, Ast.Node.SubRange);
|
const case_nodes = tree.extraDataSlice(tree.extraData(extra_index, Ast.Node.SubRange), Ast.Node.Index);
|
||||||
const case_nodes = tree.extra_data[extra.start..extra.end];
|
|
||||||
|
|
||||||
_ = try astrl.expr(operand_node, block, ResultInfo.none);
|
_ = try astrl.expr(operand_node, block, ResultInfo.none);
|
||||||
|
|
||||||
@ -624,9 +644,10 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
|
|||||||
for (case_nodes) |case_node| {
|
for (case_nodes) |case_node| {
|
||||||
const case = tree.fullSwitchCase(case_node).?;
|
const case = tree.fullSwitchCase(case_node).?;
|
||||||
for (case.ast.values) |item_node| {
|
for (case.ast.values) |item_node| {
|
||||||
if (node_tags[item_node] == .switch_range) {
|
if (tree.nodeTag(item_node) == .switch_range) {
|
||||||
_ = try astrl.expr(node_datas[item_node].lhs, block, ResultInfo.none);
|
const lhs, const rhs = tree.nodeData(item_node).node_and_node;
|
||||||
_ = try astrl.expr(node_datas[item_node].rhs, block, ResultInfo.none);
|
_ = try astrl.expr(lhs, block, ResultInfo.none);
|
||||||
|
_ = try astrl.expr(rhs, block, ResultInfo.none);
|
||||||
} else {
|
} else {
|
||||||
_ = try astrl.expr(item_node, block, ResultInfo.none);
|
_ = try astrl.expr(item_node, block, ResultInfo.none);
|
||||||
}
|
}
|
||||||
@ -641,11 +662,11 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
|
|||||||
return any_prong_consumed_rl;
|
return any_prong_consumed_rl;
|
||||||
},
|
},
|
||||||
.@"suspend" => {
|
.@"suspend" => {
|
||||||
_ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.none);
|
_ = try astrl.expr(tree.nodeData(node).node, block, ResultInfo.none);
|
||||||
return false;
|
return false;
|
||||||
},
|
},
|
||||||
.@"resume" => {
|
.@"resume" => {
|
||||||
_ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.none);
|
_ = try astrl.expr(tree.nodeData(node).node, block, ResultInfo.none);
|
||||||
return false;
|
return false;
|
||||||
},
|
},
|
||||||
|
|
||||||
@ -661,9 +682,9 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
|
|||||||
var buf: [2]Ast.Node.Index = undefined;
|
var buf: [2]Ast.Node.Index = undefined;
|
||||||
const full = tree.fullArrayInit(&buf, node).?;
|
const full = tree.fullArrayInit(&buf, node).?;
|
||||||
|
|
||||||
if (full.ast.type_expr != 0) {
|
if (full.ast.type_expr.unwrap()) |type_expr| {
|
||||||
// Explicitly typed init does not participate in RLS
|
// Explicitly typed init does not participate in RLS
|
||||||
_ = try astrl.expr(full.ast.type_expr, block, ResultInfo.none);
|
_ = try astrl.expr(type_expr, block, ResultInfo.none);
|
||||||
for (full.ast.elements) |elem_init| {
|
for (full.ast.elements) |elem_init| {
|
||||||
_ = try astrl.expr(elem_init, block, ResultInfo.type_only);
|
_ = try astrl.expr(elem_init, block, ResultInfo.type_only);
|
||||||
}
|
}
|
||||||
@ -698,9 +719,9 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
|
|||||||
var buf: [2]Ast.Node.Index = undefined;
|
var buf: [2]Ast.Node.Index = undefined;
|
||||||
const full = tree.fullStructInit(&buf, node).?;
|
const full = tree.fullStructInit(&buf, node).?;
|
||||||
|
|
||||||
if (full.ast.type_expr != 0) {
|
if (full.ast.type_expr.unwrap()) |type_expr| {
|
||||||
// Explicitly typed init does not participate in RLS
|
// Explicitly typed init does not participate in RLS
|
||||||
_ = try astrl.expr(full.ast.type_expr, block, ResultInfo.none);
|
_ = try astrl.expr(type_expr, block, ResultInfo.none);
|
||||||
for (full.ast.fields) |field_init| {
|
for (full.ast.fields) |field_init| {
|
||||||
_ = try astrl.expr(field_init, block, ResultInfo.type_only);
|
_ = try astrl.expr(field_init, block, ResultInfo.type_only);
|
||||||
}
|
}
|
||||||
@ -728,33 +749,35 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
|
|||||||
.fn_proto_one,
|
.fn_proto_one,
|
||||||
.fn_proto,
|
.fn_proto,
|
||||||
.fn_decl,
|
.fn_decl,
|
||||||
=> {
|
=> |tag| {
|
||||||
var buf: [1]Ast.Node.Index = undefined;
|
var buf: [1]Ast.Node.Index = undefined;
|
||||||
const full = tree.fullFnProto(&buf, node).?;
|
const full = tree.fullFnProto(&buf, node).?;
|
||||||
const body_node = if (node_tags[node] == .fn_decl) node_datas[node].rhs else 0;
|
const body_node = if (tag == .fn_decl) tree.nodeData(node).node_and_node[1].toOptional() else .none;
|
||||||
{
|
{
|
||||||
var it = full.iterate(tree);
|
var it = full.iterate(tree);
|
||||||
while (it.next()) |param| {
|
while (it.next()) |param| {
|
||||||
if (param.anytype_ellipsis3 == null) {
|
if (param.anytype_ellipsis3 == null) {
|
||||||
_ = try astrl.expr(param.type_expr, block, ResultInfo.type_only);
|
const type_expr = param.type_expr.?;
|
||||||
|
_ = try astrl.expr(type_expr, block, ResultInfo.type_only);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (full.ast.align_expr != 0) {
|
if (full.ast.align_expr.unwrap()) |align_expr| {
|
||||||
_ = try astrl.expr(full.ast.align_expr, block, ResultInfo.type_only);
|
_ = try astrl.expr(align_expr, block, ResultInfo.type_only);
|
||||||
}
|
}
|
||||||
if (full.ast.addrspace_expr != 0) {
|
if (full.ast.addrspace_expr.unwrap()) |addrspace_expr| {
|
||||||
_ = try astrl.expr(full.ast.addrspace_expr, block, ResultInfo.type_only);
|
_ = try astrl.expr(addrspace_expr, block, ResultInfo.type_only);
|
||||||
}
|
}
|
||||||
if (full.ast.section_expr != 0) {
|
if (full.ast.section_expr.unwrap()) |section_expr| {
|
||||||
_ = try astrl.expr(full.ast.section_expr, block, ResultInfo.type_only);
|
_ = try astrl.expr(section_expr, block, ResultInfo.type_only);
|
||||||
}
|
}
|
||||||
if (full.ast.callconv_expr != 0) {
|
if (full.ast.callconv_expr.unwrap()) |callconv_expr| {
|
||||||
_ = try astrl.expr(full.ast.callconv_expr, block, ResultInfo.type_only);
|
_ = try astrl.expr(callconv_expr, block, ResultInfo.type_only);
|
||||||
}
|
}
|
||||||
_ = try astrl.expr(full.ast.return_type, block, ResultInfo.type_only);
|
const return_type = full.ast.return_type.unwrap().?;
|
||||||
if (body_node != 0) {
|
_ = try astrl.expr(return_type, block, ResultInfo.type_only);
|
||||||
_ = try astrl.expr(body_node, block, ResultInfo.none);
|
if (body_node.unwrap()) |body| {
|
||||||
|
_ = try astrl.expr(body, block, ResultInfo.none);
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
},
|
},
|
||||||
@ -763,8 +786,7 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
|
|||||||
|
|
||||||
fn identString(astrl: *AstRlAnnotate, token: Ast.TokenIndex) ![]const u8 {
|
fn identString(astrl: *AstRlAnnotate, token: Ast.TokenIndex) ![]const u8 {
|
||||||
const tree = astrl.tree;
|
const tree = astrl.tree;
|
||||||
const token_tags = tree.tokens.items(.tag);
|
assert(tree.tokenTag(token) == .identifier);
|
||||||
assert(token_tags[token] == .identifier);
|
|
||||||
const ident_name = tree.tokenSlice(token);
|
const ident_name = tree.tokenSlice(token);
|
||||||
if (!std.mem.startsWith(u8, ident_name, "@")) {
|
if (!std.mem.startsWith(u8, ident_name, "@")) {
|
||||||
return ident_name;
|
return ident_name;
|
||||||
@ -777,13 +799,9 @@ fn identString(astrl: *AstRlAnnotate, token: Ast.TokenIndex) ![]const u8 {
|
|||||||
|
|
||||||
fn blockExpr(astrl: *AstRlAnnotate, parent_block: ?*Block, ri: ResultInfo, node: Ast.Node.Index, statements: []const Ast.Node.Index) !bool {
|
fn blockExpr(astrl: *AstRlAnnotate, parent_block: ?*Block, ri: ResultInfo, node: Ast.Node.Index, statements: []const Ast.Node.Index) !bool {
|
||||||
const tree = astrl.tree;
|
const tree = astrl.tree;
|
||||||
const token_tags = tree.tokens.items(.tag);
|
|
||||||
const main_tokens = tree.nodes.items(.main_token);
|
|
||||||
|
|
||||||
const lbrace = main_tokens[node];
|
const lbrace = tree.nodeMainToken(node);
|
||||||
if (token_tags[lbrace - 1] == .colon and
|
if (tree.isTokenPrecededByTags(lbrace, &.{ .identifier, .colon })) {
|
||||||
token_tags[lbrace - 2] == .identifier)
|
|
||||||
{
|
|
||||||
// Labeled block
|
// Labeled block
|
||||||
var new_block: Block = .{
|
var new_block: Block = .{
|
||||||
.parent = parent_block,
|
.parent = parent_block,
|
||||||
@ -812,8 +830,7 @@ fn builtinCall(astrl: *AstRlAnnotate, block: ?*Block, ri: ResultInfo, node: Ast.
|
|||||||
_ = ri; // Currently, no builtin consumes its result location.
|
_ = ri; // Currently, no builtin consumes its result location.
|
||||||
|
|
||||||
const tree = astrl.tree;
|
const tree = astrl.tree;
|
||||||
const main_tokens = tree.nodes.items(.main_token);
|
const builtin_token = tree.nodeMainToken(node);
|
||||||
const builtin_token = main_tokens[node];
|
|
||||||
const builtin_name = tree.tokenSlice(builtin_token);
|
const builtin_name = tree.tokenSlice(builtin_token);
|
||||||
const info = BuiltinFn.list.get(builtin_name) orelse return false;
|
const info = BuiltinFn.list.get(builtin_name) orelse return false;
|
||||||
if (info.param_count) |expected| {
|
if (info.param_count) |expected| {
|
||||||
|
|||||||
@ -481,13 +481,13 @@ pub const Wip = struct {
|
|||||||
const item = zir.extraData(Zir.Inst.CompileErrors.Item, extra_index);
|
const item = zir.extraData(Zir.Inst.CompileErrors.Item, extra_index);
|
||||||
extra_index = item.end;
|
extra_index = item.end;
|
||||||
const err_span = blk: {
|
const err_span = blk: {
|
||||||
if (item.data.node != 0) {
|
if (item.data.node.unwrap()) |node| {
|
||||||
break :blk tree.nodeToSpan(item.data.node);
|
break :blk tree.nodeToSpan(node);
|
||||||
}
|
} else if (item.data.token.unwrap()) |token| {
|
||||||
const token_starts = tree.tokens.items(.start);
|
const start = tree.tokenStart(token) + item.data.byte_offset;
|
||||||
const start = token_starts[item.data.token] + item.data.byte_offset;
|
const end = start + @as(u32, @intCast(tree.tokenSlice(token).len)) - item.data.byte_offset;
|
||||||
const end = start + @as(u32, @intCast(tree.tokenSlice(item.data.token).len)) - item.data.byte_offset;
|
|
||||||
break :blk std.zig.Ast.Span{ .start = start, .end = end, .main = start };
|
break :blk std.zig.Ast.Span{ .start = start, .end = end, .main = start };
|
||||||
|
} else unreachable;
|
||||||
};
|
};
|
||||||
const err_loc = std.zig.findLineColumn(source, err_span.main);
|
const err_loc = std.zig.findLineColumn(source, err_span.main);
|
||||||
|
|
||||||
@ -516,13 +516,13 @@ pub const Wip = struct {
|
|||||||
const note_item = zir.extraData(Zir.Inst.CompileErrors.Item, body_elem);
|
const note_item = zir.extraData(Zir.Inst.CompileErrors.Item, body_elem);
|
||||||
const msg = zir.nullTerminatedString(note_item.data.msg);
|
const msg = zir.nullTerminatedString(note_item.data.msg);
|
||||||
const span = blk: {
|
const span = blk: {
|
||||||
if (note_item.data.node != 0) {
|
if (note_item.data.node.unwrap()) |node| {
|
||||||
break :blk tree.nodeToSpan(note_item.data.node);
|
break :blk tree.nodeToSpan(node);
|
||||||
}
|
} else if (note_item.data.token.unwrap()) |token| {
|
||||||
const token_starts = tree.tokens.items(.start);
|
const start = tree.tokenStart(token) + note_item.data.byte_offset;
|
||||||
const start = token_starts[note_item.data.token] + note_item.data.byte_offset;
|
const end = start + @as(u32, @intCast(tree.tokenSlice(token).len)) - item.data.byte_offset;
|
||||||
const end = start + @as(u32, @intCast(tree.tokenSlice(note_item.data.token).len)) - item.data.byte_offset;
|
|
||||||
break :blk std.zig.Ast.Span{ .start = start, .end = end, .main = start };
|
break :blk std.zig.Ast.Span{ .start = start, .end = end, .main = start };
|
||||||
|
} else unreachable;
|
||||||
};
|
};
|
||||||
const loc = std.zig.findLineColumn(source, span.main);
|
const loc = std.zig.findLineColumn(source, span.main);
|
||||||
|
|
||||||
@ -560,13 +560,14 @@ pub const Wip = struct {
|
|||||||
|
|
||||||
for (zoir.compile_errors) |err| {
|
for (zoir.compile_errors) |err| {
|
||||||
const err_span: std.zig.Ast.Span = span: {
|
const err_span: std.zig.Ast.Span = span: {
|
||||||
if (err.token == std.zig.Zoir.CompileError.invalid_token) {
|
if (err.token.unwrap()) |token| {
|
||||||
break :span tree.nodeToSpan(err.node_or_offset);
|
const token_start = tree.tokenStart(token);
|
||||||
}
|
|
||||||
const token_start = tree.tokens.items(.start)[err.token];
|
|
||||||
const start = token_start + err.node_or_offset;
|
const start = token_start + err.node_or_offset;
|
||||||
const end = token_start + @as(u32, @intCast(tree.tokenSlice(err.token).len));
|
const end = token_start + @as(u32, @intCast(tree.tokenSlice(token).len));
|
||||||
break :span .{ .start = start, .end = end, .main = start };
|
break :span .{ .start = start, .end = end, .main = start };
|
||||||
|
} else {
|
||||||
|
break :span tree.nodeToSpan(@enumFromInt(err.node_or_offset));
|
||||||
|
}
|
||||||
};
|
};
|
||||||
const err_loc = std.zig.findLineColumn(source, err_span.main);
|
const err_loc = std.zig.findLineColumn(source, err_span.main);
|
||||||
|
|
||||||
@ -588,13 +589,14 @@ pub const Wip = struct {
|
|||||||
for (notes_start.., err.first_note.., 0..err.note_count) |eb_note_idx, zoir_note_idx, _| {
|
for (notes_start.., err.first_note.., 0..err.note_count) |eb_note_idx, zoir_note_idx, _| {
|
||||||
const note = zoir.error_notes[zoir_note_idx];
|
const note = zoir.error_notes[zoir_note_idx];
|
||||||
const note_span: std.zig.Ast.Span = span: {
|
const note_span: std.zig.Ast.Span = span: {
|
||||||
if (note.token == std.zig.Zoir.CompileError.invalid_token) {
|
if (note.token.unwrap()) |token| {
|
||||||
break :span tree.nodeToSpan(note.node_or_offset);
|
const token_start = tree.tokenStart(token);
|
||||||
}
|
|
||||||
const token_start = tree.tokens.items(.start)[note.token];
|
|
||||||
const start = token_start + note.node_or_offset;
|
const start = token_start + note.node_or_offset;
|
||||||
const end = token_start + @as(u32, @intCast(tree.tokenSlice(note.token).len));
|
const end = token_start + @as(u32, @intCast(tree.tokenSlice(token).len));
|
||||||
break :span .{ .start = start, .end = end, .main = start };
|
break :span .{ .start = start, .end = end, .main = start };
|
||||||
|
} else {
|
||||||
|
break :span tree.nodeToSpan(@enumFromInt(note.node_or_offset));
|
||||||
|
}
|
||||||
};
|
};
|
||||||
const note_loc = std.zig.findLineColumn(source, note_span.main);
|
const note_loc = std.zig.findLineColumn(source, note_span.main);
|
||||||
|
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@ -80,9 +80,18 @@ pub fn extraData(code: Zir, comptime T: type, index: usize) ExtraData(T) {
|
|||||||
Inst.Declaration.Name,
|
Inst.Declaration.Name,
|
||||||
std.zig.SimpleComptimeReason,
|
std.zig.SimpleComptimeReason,
|
||||||
NullTerminatedString,
|
NullTerminatedString,
|
||||||
|
// Ast.TokenIndex is missing because it is a u32.
|
||||||
|
Ast.OptionalTokenIndex,
|
||||||
|
Ast.Node.Index,
|
||||||
|
Ast.Node.OptionalIndex,
|
||||||
=> @enumFromInt(code.extra[i]),
|
=> @enumFromInt(code.extra[i]),
|
||||||
|
|
||||||
i32,
|
Ast.TokenOffset,
|
||||||
|
Ast.OptionalTokenOffset,
|
||||||
|
Ast.Node.Offset,
|
||||||
|
Ast.Node.OptionalOffset,
|
||||||
|
=> @enumFromInt(@as(i32, @bitCast(code.extra[i]))),
|
||||||
|
|
||||||
Inst.Call.Flags,
|
Inst.Call.Flags,
|
||||||
Inst.BuiltinCall.Flags,
|
Inst.BuiltinCall.Flags,
|
||||||
Inst.SwitchBlock.Bits,
|
Inst.SwitchBlock.Bits,
|
||||||
@ -1904,22 +1913,22 @@ pub const Inst = struct {
|
|||||||
/// `small` is `fields_len: u16`.
|
/// `small` is `fields_len: u16`.
|
||||||
tuple_decl,
|
tuple_decl,
|
||||||
/// Implements the `@This` builtin.
|
/// Implements the `@This` builtin.
|
||||||
/// `operand` is `src_node: i32`.
|
/// `operand` is `src_node: Ast.Node.Offset`.
|
||||||
this,
|
this,
|
||||||
/// Implements the `@returnAddress` builtin.
|
/// Implements the `@returnAddress` builtin.
|
||||||
/// `operand` is `src_node: i32`.
|
/// `operand` is `src_node: Ast.Node.Offset`.
|
||||||
ret_addr,
|
ret_addr,
|
||||||
/// Implements the `@src` builtin.
|
/// Implements the `@src` builtin.
|
||||||
/// `operand` is payload index to `LineColumn`.
|
/// `operand` is payload index to `LineColumn`.
|
||||||
builtin_src,
|
builtin_src,
|
||||||
/// Implements the `@errorReturnTrace` builtin.
|
/// Implements the `@errorReturnTrace` builtin.
|
||||||
/// `operand` is `src_node: i32`.
|
/// `operand` is `src_node: Ast.Node.Offset`.
|
||||||
error_return_trace,
|
error_return_trace,
|
||||||
/// Implements the `@frame` builtin.
|
/// Implements the `@frame` builtin.
|
||||||
/// `operand` is `src_node: i32`.
|
/// `operand` is `src_node: Ast.Node.Offset`.
|
||||||
frame,
|
frame,
|
||||||
/// Implements the `@frameAddress` builtin.
|
/// Implements the `@frameAddress` builtin.
|
||||||
/// `operand` is `src_node: i32`.
|
/// `operand` is `src_node: Ast.Node.Offset`.
|
||||||
frame_address,
|
frame_address,
|
||||||
/// Same as `alloc` from `Tag` but may contain an alignment instruction.
|
/// Same as `alloc` from `Tag` but may contain an alignment instruction.
|
||||||
/// `operand` is payload index to `AllocExtended`.
|
/// `operand` is payload index to `AllocExtended`.
|
||||||
@ -2004,9 +2013,9 @@ pub const Inst = struct {
|
|||||||
/// `operand` is payload index to `UnNode`.
|
/// `operand` is payload index to `UnNode`.
|
||||||
await_nosuspend,
|
await_nosuspend,
|
||||||
/// Implements `@breakpoint`.
|
/// Implements `@breakpoint`.
|
||||||
/// `operand` is `src_node: i32`.
|
/// `operand` is `src_node: Ast.Node.Offset`.
|
||||||
breakpoint,
|
breakpoint,
|
||||||
/// Implement builtin `@disableInstrumentation`. `operand` is `src_node: i32`.
|
/// Implement builtin `@disableInstrumentation`. `operand` is `src_node: Ast.Node.Offset`.
|
||||||
disable_instrumentation,
|
disable_instrumentation,
|
||||||
/// Implement builtin `@disableIntrinsics`. `operand` is `src_node: i32`.
|
/// Implement builtin `@disableIntrinsics`. `operand` is `src_node: i32`.
|
||||||
disable_intrinsics,
|
disable_intrinsics,
|
||||||
@ -2040,7 +2049,7 @@ pub const Inst = struct {
|
|||||||
/// `operand` is payload index to `UnNode`.
|
/// `operand` is payload index to `UnNode`.
|
||||||
c_va_end,
|
c_va_end,
|
||||||
/// Implement builtin `@cVaStart`.
|
/// Implement builtin `@cVaStart`.
|
||||||
/// `operand` is `src_node: i32`.
|
/// `operand` is `src_node: Ast.Node.Offset`.
|
||||||
c_va_start,
|
c_va_start,
|
||||||
/// Implements the following builtins:
|
/// Implements the following builtins:
|
||||||
/// `@ptrCast`, `@alignCast`, `@addrSpaceCast`, `@constCast`, `@volatileCast`.
|
/// `@ptrCast`, `@alignCast`, `@addrSpaceCast`, `@constCast`, `@volatileCast`.
|
||||||
@ -2067,7 +2076,7 @@ pub const Inst = struct {
|
|||||||
/// `operand` is payload index to `UnNode`.
|
/// `operand` is payload index to `UnNode`.
|
||||||
work_group_id,
|
work_group_id,
|
||||||
/// Implements the `@inComptime` builtin.
|
/// Implements the `@inComptime` builtin.
|
||||||
/// `operand` is `src_node: i32`.
|
/// `operand` is `src_node: Ast.Node.Offset`.
|
||||||
in_comptime,
|
in_comptime,
|
||||||
/// Restores the error return index to its last saved state in a given
|
/// Restores the error return index to its last saved state in a given
|
||||||
/// block. If the block is `.none`, restores to the state from the point
|
/// block. If the block is `.none`, restores to the state from the point
|
||||||
@ -2077,7 +2086,7 @@ pub const Inst = struct {
|
|||||||
/// `small` is undefined.
|
/// `small` is undefined.
|
||||||
restore_err_ret_index,
|
restore_err_ret_index,
|
||||||
/// Retrieves a value from the current type declaration scope's closure.
|
/// Retrieves a value from the current type declaration scope's closure.
|
||||||
/// `operand` is `src_node: i32`.
|
/// `operand` is `src_node: Ast.Node.Offset`.
|
||||||
/// `small` is closure index.
|
/// `small` is closure index.
|
||||||
closure_get,
|
closure_get,
|
||||||
/// Used as a placeholder instruction which is just a dummy index for Sema to replace
|
/// Used as a placeholder instruction which is just a dummy index for Sema to replace
|
||||||
@ -2091,7 +2100,7 @@ pub const Inst = struct {
|
|||||||
/// Uses the `pl_node` union field with payload `FieldParentPtr`.
|
/// Uses the `pl_node` union field with payload `FieldParentPtr`.
|
||||||
field_parent_ptr,
|
field_parent_ptr,
|
||||||
/// Get a type or value from `std.builtin`.
|
/// Get a type or value from `std.builtin`.
|
||||||
/// `operand` is `src_node: i32`.
|
/// `operand` is `src_node: Ast.Node.Offset`.
|
||||||
/// `small` is an `Inst.BuiltinValue`.
|
/// `small` is an `Inst.BuiltinValue`.
|
||||||
builtin_value,
|
builtin_value,
|
||||||
/// Provide a `@branchHint` for the current block.
|
/// Provide a `@branchHint` for the current block.
|
||||||
@ -2286,28 +2295,28 @@ pub const Inst = struct {
|
|||||||
/// Used for unary operators, with an AST node source location.
|
/// Used for unary operators, with an AST node source location.
|
||||||
un_node: struct {
|
un_node: struct {
|
||||||
/// Offset from Decl AST node index.
|
/// Offset from Decl AST node index.
|
||||||
src_node: i32,
|
src_node: Ast.Node.Offset,
|
||||||
/// The meaning of this operand depends on the corresponding `Tag`.
|
/// The meaning of this operand depends on the corresponding `Tag`.
|
||||||
operand: Ref,
|
operand: Ref,
|
||||||
},
|
},
|
||||||
/// Used for unary operators, with a token source location.
|
/// Used for unary operators, with a token source location.
|
||||||
un_tok: struct {
|
un_tok: struct {
|
||||||
/// Offset from Decl AST token index.
|
/// Offset from Decl AST token index.
|
||||||
src_tok: Ast.TokenIndex,
|
src_tok: Ast.TokenOffset,
|
||||||
/// The meaning of this operand depends on the corresponding `Tag`.
|
/// The meaning of this operand depends on the corresponding `Tag`.
|
||||||
operand: Ref,
|
operand: Ref,
|
||||||
},
|
},
|
||||||
pl_node: struct {
|
pl_node: struct {
|
||||||
/// Offset from Decl AST node index.
|
/// Offset from Decl AST node index.
|
||||||
/// `Tag` determines which kind of AST node this points to.
|
/// `Tag` determines which kind of AST node this points to.
|
||||||
src_node: i32,
|
src_node: Ast.Node.Offset,
|
||||||
/// index into extra.
|
/// index into extra.
|
||||||
/// `Tag` determines what lives there.
|
/// `Tag` determines what lives there.
|
||||||
payload_index: u32,
|
payload_index: u32,
|
||||||
},
|
},
|
||||||
pl_tok: struct {
|
pl_tok: struct {
|
||||||
/// Offset from Decl AST token index.
|
/// Offset from Decl AST token index.
|
||||||
src_tok: Ast.TokenIndex,
|
src_tok: Ast.TokenOffset,
|
||||||
/// index into extra.
|
/// index into extra.
|
||||||
/// `Tag` determines what lives there.
|
/// `Tag` determines what lives there.
|
||||||
payload_index: u32,
|
payload_index: u32,
|
||||||
@ -2328,16 +2337,16 @@ pub const Inst = struct {
|
|||||||
/// Offset into `string_bytes`. Null-terminated.
|
/// Offset into `string_bytes`. Null-terminated.
|
||||||
start: NullTerminatedString,
|
start: NullTerminatedString,
|
||||||
/// Offset from Decl AST token index.
|
/// Offset from Decl AST token index.
|
||||||
src_tok: u32,
|
src_tok: Ast.TokenOffset,
|
||||||
|
|
||||||
pub fn get(self: @This(), code: Zir) [:0]const u8 {
|
pub fn get(self: @This(), code: Zir) [:0]const u8 {
|
||||||
return code.nullTerminatedString(self.start);
|
return code.nullTerminatedString(self.start);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
/// Offset from Decl AST token index.
|
/// Offset from Decl AST token index.
|
||||||
tok: Ast.TokenIndex,
|
tok: Ast.TokenOffset,
|
||||||
/// Offset from Decl AST node index.
|
/// Offset from Decl AST node index.
|
||||||
node: i32,
|
node: Ast.Node.Offset,
|
||||||
int: u64,
|
int: u64,
|
||||||
float: f64,
|
float: f64,
|
||||||
ptr_type: struct {
|
ptr_type: struct {
|
||||||
@ -2358,14 +2367,14 @@ pub const Inst = struct {
|
|||||||
int_type: struct {
|
int_type: struct {
|
||||||
/// Offset from Decl AST node index.
|
/// Offset from Decl AST node index.
|
||||||
/// `Tag` determines which kind of AST node this points to.
|
/// `Tag` determines which kind of AST node this points to.
|
||||||
src_node: i32,
|
src_node: Ast.Node.Offset,
|
||||||
signedness: std.builtin.Signedness,
|
signedness: std.builtin.Signedness,
|
||||||
bit_count: u16,
|
bit_count: u16,
|
||||||
},
|
},
|
||||||
@"unreachable": struct {
|
@"unreachable": struct {
|
||||||
/// Offset from Decl AST node index.
|
/// Offset from Decl AST node index.
|
||||||
/// `Tag` determines which kind of AST node this points to.
|
/// `Tag` determines which kind of AST node this points to.
|
||||||
src_node: i32,
|
src_node: Ast.Node.Offset,
|
||||||
},
|
},
|
||||||
@"break": struct {
|
@"break": struct {
|
||||||
operand: Ref,
|
operand: Ref,
|
||||||
@ -2377,7 +2386,7 @@ pub const Inst = struct {
|
|||||||
/// with an AST node source location.
|
/// with an AST node source location.
|
||||||
inst_node: struct {
|
inst_node: struct {
|
||||||
/// Offset from Decl AST node index.
|
/// Offset from Decl AST node index.
|
||||||
src_node: i32,
|
src_node: Ast.Node.Offset,
|
||||||
/// The meaning of this operand depends on the corresponding `Tag`.
|
/// The meaning of this operand depends on the corresponding `Tag`.
|
||||||
inst: Index,
|
inst: Index,
|
||||||
},
|
},
|
||||||
@ -2456,9 +2465,7 @@ pub const Inst = struct {
|
|||||||
};
|
};
|
||||||
|
|
||||||
pub const Break = struct {
|
pub const Break = struct {
|
||||||
pub const no_src_node = std.math.maxInt(i32);
|
operand_src_node: Ast.Node.OptionalOffset,
|
||||||
|
|
||||||
operand_src_node: i32,
|
|
||||||
block_inst: Index,
|
block_inst: Index,
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -2467,7 +2474,7 @@ pub const Inst = struct {
|
|||||||
/// 1. Input for every inputs_len
|
/// 1. Input for every inputs_len
|
||||||
/// 2. clobber: NullTerminatedString // index into string_bytes (null terminated) for every clobbers_len.
|
/// 2. clobber: NullTerminatedString // index into string_bytes (null terminated) for every clobbers_len.
|
||||||
pub const Asm = struct {
|
pub const Asm = struct {
|
||||||
src_node: i32,
|
src_node: Ast.Node.Offset,
|
||||||
// null-terminated string index
|
// null-terminated string index
|
||||||
asm_source: NullTerminatedString,
|
asm_source: NullTerminatedString,
|
||||||
/// 1 bit for each outputs_len: whether it uses `-> T` or not.
|
/// 1 bit for each outputs_len: whether it uses `-> T` or not.
|
||||||
@ -2582,7 +2589,7 @@ pub const Inst = struct {
|
|||||||
|
|
||||||
/// Trailing: operand: Ref, // for each `operands_len` (stored in `small`).
|
/// Trailing: operand: Ref, // for each `operands_len` (stored in `small`).
|
||||||
pub const NodeMultiOp = struct {
|
pub const NodeMultiOp = struct {
|
||||||
src_node: i32,
|
src_node: Ast.Node.Offset,
|
||||||
};
|
};
|
||||||
|
|
||||||
/// This data is stored inside extra, with trailing operands according to `body_len`.
|
/// This data is stored inside extra, with trailing operands according to `body_len`.
|
||||||
@ -3033,7 +3040,7 @@ pub const Inst = struct {
|
|||||||
/// Trailing:
|
/// Trailing:
|
||||||
/// 0. operand: Ref // for each `operands_len`
|
/// 0. operand: Ref // for each `operands_len`
|
||||||
pub const TypeOfPeer = struct {
|
pub const TypeOfPeer = struct {
|
||||||
src_node: i32,
|
src_node: Ast.Node.Offset,
|
||||||
body_len: u32,
|
body_len: u32,
|
||||||
body_index: u32,
|
body_index: u32,
|
||||||
};
|
};
|
||||||
@ -3084,7 +3091,7 @@ pub const Inst = struct {
|
|||||||
/// 4. host_size: Ref // if `has_bit_range` flag is set
|
/// 4. host_size: Ref // if `has_bit_range` flag is set
|
||||||
pub const PtrType = struct {
|
pub const PtrType = struct {
|
||||||
elem_type: Ref,
|
elem_type: Ref,
|
||||||
src_node: i32,
|
src_node: Ast.Node.Offset,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub const ArrayTypeSentinel = struct {
|
pub const ArrayTypeSentinel = struct {
|
||||||
@ -3116,7 +3123,7 @@ pub const Inst = struct {
|
|||||||
start: Ref,
|
start: Ref,
|
||||||
len: Ref,
|
len: Ref,
|
||||||
sentinel: Ref,
|
sentinel: Ref,
|
||||||
start_src_node_offset: i32,
|
start_src_node_offset: Ast.Node.Offset,
|
||||||
};
|
};
|
||||||
|
|
||||||
/// The meaning of these operands depends on the corresponding `Tag`.
|
/// The meaning of these operands depends on the corresponding `Tag`.
|
||||||
@ -3126,13 +3133,13 @@ pub const Inst = struct {
|
|||||||
};
|
};
|
||||||
|
|
||||||
pub const BinNode = struct {
|
pub const BinNode = struct {
|
||||||
node: i32,
|
node: Ast.Node.Offset,
|
||||||
lhs: Ref,
|
lhs: Ref,
|
||||||
rhs: Ref,
|
rhs: Ref,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub const UnNode = struct {
|
pub const UnNode = struct {
|
||||||
node: i32,
|
node: Ast.Node.Offset,
|
||||||
operand: Ref,
|
operand: Ref,
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -3186,7 +3193,7 @@ pub const Inst = struct {
|
|||||||
pub const SwitchBlockErrUnion = struct {
|
pub const SwitchBlockErrUnion = struct {
|
||||||
operand: Ref,
|
operand: Ref,
|
||||||
bits: Bits,
|
bits: Bits,
|
||||||
main_src_node_offset: i32,
|
main_src_node_offset: Ast.Node.Offset,
|
||||||
|
|
||||||
pub const Bits = packed struct(u32) {
|
pub const Bits = packed struct(u32) {
|
||||||
/// If true, one or more prongs have multiple items.
|
/// If true, one or more prongs have multiple items.
|
||||||
@ -3592,7 +3599,7 @@ pub const Inst = struct {
|
|||||||
/// init: Inst.Ref, // `.none` for non-`comptime` fields
|
/// init: Inst.Ref, // `.none` for non-`comptime` fields
|
||||||
/// }
|
/// }
|
||||||
pub const TupleDecl = struct {
|
pub const TupleDecl = struct {
|
||||||
src_node: i32, // relative
|
src_node: Ast.Node.Offset,
|
||||||
};
|
};
|
||||||
|
|
||||||
/// Trailing:
|
/// Trailing:
|
||||||
@ -3666,7 +3673,7 @@ pub const Inst = struct {
|
|||||||
};
|
};
|
||||||
|
|
||||||
pub const Cmpxchg = struct {
|
pub const Cmpxchg = struct {
|
||||||
node: i32,
|
node: Ast.Node.Offset,
|
||||||
ptr: Ref,
|
ptr: Ref,
|
||||||
expected_value: Ref,
|
expected_value: Ref,
|
||||||
new_value: Ref,
|
new_value: Ref,
|
||||||
@ -3706,7 +3713,7 @@ pub const Inst = struct {
|
|||||||
};
|
};
|
||||||
|
|
||||||
pub const FieldParentPtr = struct {
|
pub const FieldParentPtr = struct {
|
||||||
src_node: i32,
|
src_node: Ast.Node.Offset,
|
||||||
parent_ptr_type: Ref,
|
parent_ptr_type: Ref,
|
||||||
field_name: Ref,
|
field_name: Ref,
|
||||||
field_ptr: Ref,
|
field_ptr: Ref,
|
||||||
@ -3720,7 +3727,7 @@ pub const Inst = struct {
|
|||||||
};
|
};
|
||||||
|
|
||||||
pub const Select = struct {
|
pub const Select = struct {
|
||||||
node: i32,
|
node: Ast.Node.Offset,
|
||||||
elem_type: Ref,
|
elem_type: Ref,
|
||||||
pred: Ref,
|
pred: Ref,
|
||||||
a: Ref,
|
a: Ref,
|
||||||
@ -3728,7 +3735,7 @@ pub const Inst = struct {
|
|||||||
};
|
};
|
||||||
|
|
||||||
pub const AsyncCall = struct {
|
pub const AsyncCall = struct {
|
||||||
node: i32,
|
node: Ast.Node.Offset,
|
||||||
frame_buffer: Ref,
|
frame_buffer: Ref,
|
||||||
result_ptr: Ref,
|
result_ptr: Ref,
|
||||||
fn_ptr: Ref,
|
fn_ptr: Ref,
|
||||||
@ -3753,7 +3760,7 @@ pub const Inst = struct {
|
|||||||
/// 0. type_inst: Ref, // if small 0b000X is set
|
/// 0. type_inst: Ref, // if small 0b000X is set
|
||||||
/// 1. align_inst: Ref, // if small 0b00X0 is set
|
/// 1. align_inst: Ref, // if small 0b00X0 is set
|
||||||
pub const AllocExtended = struct {
|
pub const AllocExtended = struct {
|
||||||
src_node: i32,
|
src_node: Ast.Node.Offset,
|
||||||
|
|
||||||
pub const Small = packed struct {
|
pub const Small = packed struct {
|
||||||
has_type: bool,
|
has_type: bool,
|
||||||
@ -3778,9 +3785,9 @@ pub const Inst = struct {
|
|||||||
pub const Item = struct {
|
pub const Item = struct {
|
||||||
/// null terminated string index
|
/// null terminated string index
|
||||||
msg: NullTerminatedString,
|
msg: NullTerminatedString,
|
||||||
node: Ast.Node.Index,
|
node: Ast.Node.OptionalIndex,
|
||||||
/// If node is 0 then this will be populated.
|
/// If node is .none then this will be populated.
|
||||||
token: Ast.TokenIndex,
|
token: Ast.OptionalTokenIndex,
|
||||||
/// Can be used in combination with `token`.
|
/// Can be used in combination with `token`.
|
||||||
byte_offset: u32,
|
byte_offset: u32,
|
||||||
/// 0 or a payload index of a `Block`, each is a payload
|
/// 0 or a payload index of a `Block`, each is a payload
|
||||||
@ -3818,7 +3825,7 @@ pub const Inst = struct {
|
|||||||
};
|
};
|
||||||
|
|
||||||
pub const Src = struct {
|
pub const Src = struct {
|
||||||
node: i32,
|
node: Ast.Node.Offset,
|
||||||
line: u32,
|
line: u32,
|
||||||
column: u32,
|
column: u32,
|
||||||
};
|
};
|
||||||
@ -3833,7 +3840,7 @@ pub const Inst = struct {
|
|||||||
/// The value being destructured.
|
/// The value being destructured.
|
||||||
operand: Ref,
|
operand: Ref,
|
||||||
/// The `destructure_assign` node.
|
/// The `destructure_assign` node.
|
||||||
destructure_node: i32,
|
destructure_node: Ast.Node.Offset,
|
||||||
/// The expected field count.
|
/// The expected field count.
|
||||||
expect_len: u32,
|
expect_len: u32,
|
||||||
};
|
};
|
||||||
@ -3848,7 +3855,7 @@ pub const Inst = struct {
|
|||||||
};
|
};
|
||||||
|
|
||||||
pub const RestoreErrRetIndex = struct {
|
pub const RestoreErrRetIndex = struct {
|
||||||
src_node: i32,
|
src_node: Ast.Node.Offset,
|
||||||
/// If `.none`, restore the trace to its state upon function entry.
|
/// If `.none`, restore the trace to its state upon function entry.
|
||||||
block: Ref,
|
block: Ref,
|
||||||
/// If `.none`, restore unconditionally.
|
/// If `.none`, restore unconditionally.
|
||||||
|
|||||||
@ -228,8 +228,8 @@ pub const NullTerminatedString = enum(u32) {
|
|||||||
|
|
||||||
pub const CompileError = extern struct {
|
pub const CompileError = extern struct {
|
||||||
msg: NullTerminatedString,
|
msg: NullTerminatedString,
|
||||||
token: Ast.TokenIndex,
|
token: Ast.OptionalTokenIndex,
|
||||||
/// If `token == invalid_token`, this is an `Ast.Node.Index`.
|
/// If `token == .none`, this is an `Ast.Node.Index`.
|
||||||
/// Otherwise, this is a byte offset into `token`.
|
/// Otherwise, this is a byte offset into `token`.
|
||||||
node_or_offset: u32,
|
node_or_offset: u32,
|
||||||
|
|
||||||
@ -243,14 +243,12 @@ pub const CompileError = extern struct {
|
|||||||
|
|
||||||
pub const Note = extern struct {
|
pub const Note = extern struct {
|
||||||
msg: NullTerminatedString,
|
msg: NullTerminatedString,
|
||||||
token: Ast.TokenIndex,
|
token: Ast.OptionalTokenIndex,
|
||||||
/// If `token == invalid_token`, this is an `Ast.Node.Index`.
|
/// If `token == .none`, this is an `Ast.Node.Index`.
|
||||||
/// Otherwise, this is a byte offset into `token`.
|
/// Otherwise, this is a byte offset into `token`.
|
||||||
node_or_offset: u32,
|
node_or_offset: u32,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub const invalid_token: Ast.TokenIndex = std.math.maxInt(Ast.TokenIndex);
|
|
||||||
|
|
||||||
comptime {
|
comptime {
|
||||||
assert(std.meta.hasUniqueRepresentation(CompileError));
|
assert(std.meta.hasUniqueRepresentation(CompileError));
|
||||||
assert(std.meta.hasUniqueRepresentation(Note));
|
assert(std.meta.hasUniqueRepresentation(Note));
|
||||||
|
|||||||
@ -48,7 +48,7 @@ pub fn generate(gpa: Allocator, tree: Ast, options: Options) Allocator.Error!Zoi
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (tree.errors.len == 0) {
|
if (tree.errors.len == 0) {
|
||||||
const root_ast_node = tree.nodes.items(.data)[0].lhs;
|
const root_ast_node = tree.rootDecls()[0];
|
||||||
try zg.nodes.append(gpa, undefined); // index 0; root node
|
try zg.nodes.append(gpa, undefined); // index 0; root node
|
||||||
try zg.expr(root_ast_node, .root);
|
try zg.expr(root_ast_node, .root);
|
||||||
} else {
|
} else {
|
||||||
@ -97,11 +97,8 @@ pub fn generate(gpa: Allocator, tree: Ast, options: Options) Allocator.Error!Zoi
|
|||||||
fn expr(zg: *ZonGen, node: Ast.Node.Index, dest_node: Zoir.Node.Index) Allocator.Error!void {
|
fn expr(zg: *ZonGen, node: Ast.Node.Index, dest_node: Zoir.Node.Index) Allocator.Error!void {
|
||||||
const gpa = zg.gpa;
|
const gpa = zg.gpa;
|
||||||
const tree = zg.tree;
|
const tree = zg.tree;
|
||||||
const node_tags = tree.nodes.items(.tag);
|
|
||||||
const node_datas = tree.nodes.items(.data);
|
|
||||||
const main_tokens = tree.nodes.items(.main_token);
|
|
||||||
|
|
||||||
switch (node_tags[node]) {
|
switch (tree.nodeTag(node)) {
|
||||||
.root => unreachable,
|
.root => unreachable,
|
||||||
.@"usingnamespace" => unreachable,
|
.@"usingnamespace" => unreachable,
|
||||||
.test_decl => unreachable,
|
.test_decl => unreachable,
|
||||||
@ -173,7 +170,7 @@ fn expr(zg: *ZonGen, node: Ast.Node.Index, dest_node: Zoir.Node.Index) Allocator
|
|||||||
.bool_not,
|
.bool_not,
|
||||||
.bit_not,
|
.bit_not,
|
||||||
.negation_wrap,
|
.negation_wrap,
|
||||||
=> try zg.addErrorTok(main_tokens[node], "operator '{s}' is not allowed in ZON", .{tree.tokenSlice(main_tokens[node])}),
|
=> try zg.addErrorTok(tree.nodeMainToken(node), "operator '{s}' is not allowed in ZON", .{tree.tokenSlice(tree.nodeMainToken(node))}),
|
||||||
|
|
||||||
.error_union,
|
.error_union,
|
||||||
.merge_error_sets,
|
.merge_error_sets,
|
||||||
@ -251,8 +248,8 @@ fn expr(zg: *ZonGen, node: Ast.Node.Index, dest_node: Zoir.Node.Index) Allocator
|
|||||||
.slice_sentinel,
|
.slice_sentinel,
|
||||||
=> try zg.addErrorNode(node, "slice operator is not allowed in ZON", .{}),
|
=> try zg.addErrorNode(node, "slice operator is not allowed in ZON", .{}),
|
||||||
|
|
||||||
.deref, .address_of => try zg.addErrorTok(main_tokens[node], "pointers are not available in ZON", .{}),
|
.deref, .address_of => try zg.addErrorTok(tree.nodeMainToken(node), "pointers are not available in ZON", .{}),
|
||||||
.unwrap_optional => try zg.addErrorTok(main_tokens[node], "optionals are not available in ZON", .{}),
|
.unwrap_optional => try zg.addErrorTok(tree.nodeMainToken(node), "optionals are not available in ZON", .{}),
|
||||||
.error_value => try zg.addErrorNode(node, "errors are not available in ZON", .{}),
|
.error_value => try zg.addErrorNode(node, "errors are not available in ZON", .{}),
|
||||||
|
|
||||||
.array_access => try zg.addErrorNode(node, "array indexing is not allowed in ZON", .{}),
|
.array_access => try zg.addErrorNode(node, "array indexing is not allowed in ZON", .{}),
|
||||||
@ -262,12 +259,9 @@ fn expr(zg: *ZonGen, node: Ast.Node.Index, dest_node: Zoir.Node.Index) Allocator
|
|||||||
.block,
|
.block,
|
||||||
.block_semicolon,
|
.block_semicolon,
|
||||||
=> {
|
=> {
|
||||||
const size = switch (node_tags[node]) {
|
var buffer: [2]Ast.Node.Index = undefined;
|
||||||
.block_two, .block_two_semicolon => @intFromBool(node_datas[node].lhs != 0) + @intFromBool(node_datas[node].rhs != 0),
|
const statements = tree.blockStatements(&buffer, node).?;
|
||||||
.block, .block_semicolon => node_datas[node].rhs - node_datas[node].lhs,
|
if (statements.len == 0) {
|
||||||
else => unreachable,
|
|
||||||
};
|
|
||||||
if (size == 0) {
|
|
||||||
try zg.addErrorNodeNotes(node, "void literals are not available in ZON", .{}, &.{
|
try zg.addErrorNodeNotes(node, "void literals are not available in ZON", .{}, &.{
|
||||||
try zg.errNoteNode(node, "void union payloads can be represented by enum literals", .{}),
|
try zg.errNoteNode(node, "void union payloads can be represented by enum literals", .{}),
|
||||||
});
|
});
|
||||||
@ -288,9 +282,9 @@ fn expr(zg: *ZonGen, node: Ast.Node.Index, dest_node: Zoir.Node.Index) Allocator
|
|||||||
var buf: [2]Ast.Node.Index = undefined;
|
var buf: [2]Ast.Node.Index = undefined;
|
||||||
|
|
||||||
const type_node = if (tree.fullArrayInit(&buf, node)) |full|
|
const type_node = if (tree.fullArrayInit(&buf, node)) |full|
|
||||||
full.ast.type_expr
|
full.ast.type_expr.unwrap().?
|
||||||
else if (tree.fullStructInit(&buf, node)) |full|
|
else if (tree.fullStructInit(&buf, node)) |full|
|
||||||
full.ast.type_expr
|
full.ast.type_expr.unwrap().?
|
||||||
else
|
else
|
||||||
unreachable;
|
unreachable;
|
||||||
|
|
||||||
@ -300,18 +294,18 @@ fn expr(zg: *ZonGen, node: Ast.Node.Index, dest_node: Zoir.Node.Index) Allocator
|
|||||||
},
|
},
|
||||||
|
|
||||||
.grouped_expression => {
|
.grouped_expression => {
|
||||||
try zg.addErrorTokNotes(main_tokens[node], "expression grouping is not allowed in ZON", .{}, &.{
|
try zg.addErrorTokNotes(tree.nodeMainToken(node), "expression grouping is not allowed in ZON", .{}, &.{
|
||||||
try zg.errNoteTok(main_tokens[node], "these parentheses are always redundant", .{}),
|
try zg.errNoteTok(tree.nodeMainToken(node), "these parentheses are always redundant", .{}),
|
||||||
});
|
});
|
||||||
return zg.expr(node_datas[node].lhs, dest_node);
|
return zg.expr(tree.nodeData(node).node_and_token[0], dest_node);
|
||||||
},
|
},
|
||||||
|
|
||||||
.negation => {
|
.negation => {
|
||||||
const child_node = node_datas[node].lhs;
|
const child_node = tree.nodeData(node).node;
|
||||||
switch (node_tags[child_node]) {
|
switch (tree.nodeTag(child_node)) {
|
||||||
.number_literal => return zg.numberLiteral(child_node, node, dest_node, .negative),
|
.number_literal => return zg.numberLiteral(child_node, node, dest_node, .negative),
|
||||||
.identifier => {
|
.identifier => {
|
||||||
const child_ident = tree.tokenSlice(main_tokens[child_node]);
|
const child_ident = tree.tokenSlice(tree.nodeMainToken(child_node));
|
||||||
if (mem.eql(u8, child_ident, "inf")) {
|
if (mem.eql(u8, child_ident, "inf")) {
|
||||||
zg.setNode(dest_node, .{
|
zg.setNode(dest_node, .{
|
||||||
.tag = .neg_inf,
|
.tag = .neg_inf,
|
||||||
@ -323,7 +317,7 @@ fn expr(zg: *ZonGen, node: Ast.Node.Index, dest_node: Zoir.Node.Index) Allocator
|
|||||||
},
|
},
|
||||||
else => {},
|
else => {},
|
||||||
}
|
}
|
||||||
try zg.addErrorTok(main_tokens[node], "expected number or 'inf' after '-'", .{});
|
try zg.addErrorTok(tree.nodeMainToken(node), "expected number or 'inf' after '-'", .{});
|
||||||
},
|
},
|
||||||
.number_literal => try zg.numberLiteral(node, node, dest_node, .positive),
|
.number_literal => try zg.numberLiteral(node, node, dest_node, .positive),
|
||||||
.char_literal => try zg.charLiteral(node, dest_node),
|
.char_literal => try zg.charLiteral(node, dest_node),
|
||||||
@ -331,7 +325,7 @@ fn expr(zg: *ZonGen, node: Ast.Node.Index, dest_node: Zoir.Node.Index) Allocator
|
|||||||
.identifier => try zg.identifier(node, dest_node),
|
.identifier => try zg.identifier(node, dest_node),
|
||||||
|
|
||||||
.enum_literal => {
|
.enum_literal => {
|
||||||
const str_index = zg.identAsString(main_tokens[node]) catch |err| switch (err) {
|
const str_index = zg.identAsString(tree.nodeMainToken(node)) catch |err| switch (err) {
|
||||||
error.BadString => undefined, // doesn't matter, there's an error
|
error.BadString => undefined, // doesn't matter, there's an error
|
||||||
error.OutOfMemory => |e| return e,
|
error.OutOfMemory => |e| return e,
|
||||||
};
|
};
|
||||||
@ -369,7 +363,7 @@ fn expr(zg: *ZonGen, node: Ast.Node.Index, dest_node: Zoir.Node.Index) Allocator
|
|||||||
var buf: [2]Ast.Node.Index = undefined;
|
var buf: [2]Ast.Node.Index = undefined;
|
||||||
const full = tree.fullArrayInit(&buf, node).?;
|
const full = tree.fullArrayInit(&buf, node).?;
|
||||||
assert(full.ast.elements.len != 0); // Otherwise it would be a struct init
|
assert(full.ast.elements.len != 0); // Otherwise it would be a struct init
|
||||||
assert(full.ast.type_expr == 0); // The tag was `array_init_dot_*`
|
assert(full.ast.type_expr == .none); // The tag was `array_init_dot_*`
|
||||||
|
|
||||||
const first_elem: u32 = @intCast(zg.nodes.len);
|
const first_elem: u32 = @intCast(zg.nodes.len);
|
||||||
try zg.nodes.resize(gpa, zg.nodes.len + full.ast.elements.len);
|
try zg.nodes.resize(gpa, zg.nodes.len + full.ast.elements.len);
|
||||||
@ -398,7 +392,7 @@ fn expr(zg: *ZonGen, node: Ast.Node.Index, dest_node: Zoir.Node.Index) Allocator
|
|||||||
=> {
|
=> {
|
||||||
var buf: [2]Ast.Node.Index = undefined;
|
var buf: [2]Ast.Node.Index = undefined;
|
||||||
const full = tree.fullStructInit(&buf, node).?;
|
const full = tree.fullStructInit(&buf, node).?;
|
||||||
assert(full.ast.type_expr == 0); // The tag was `struct_init_dot_*`
|
assert(full.ast.type_expr == .none); // The tag was `struct_init_dot_*`
|
||||||
|
|
||||||
if (full.ast.fields.len == 0) {
|
if (full.ast.fields.len == 0) {
|
||||||
zg.setNode(dest_node, .{
|
zg.setNode(dest_node, .{
|
||||||
@ -460,7 +454,7 @@ fn expr(zg: *ZonGen, node: Ast.Node.Index, dest_node: Zoir.Node.Index) Allocator
|
|||||||
|
|
||||||
fn appendIdentStr(zg: *ZonGen, ident_token: Ast.TokenIndex) !u32 {
|
fn appendIdentStr(zg: *ZonGen, ident_token: Ast.TokenIndex) !u32 {
|
||||||
const tree = zg.tree;
|
const tree = zg.tree;
|
||||||
assert(tree.tokens.items(.tag)[ident_token] == .identifier);
|
assert(tree.tokenTag(ident_token) == .identifier);
|
||||||
const ident_name = tree.tokenSlice(ident_token);
|
const ident_name = tree.tokenSlice(ident_token);
|
||||||
if (!mem.startsWith(u8, ident_name, "@")) {
|
if (!mem.startsWith(u8, ident_name, "@")) {
|
||||||
const start = zg.string_bytes.items.len;
|
const start = zg.string_bytes.items.len;
|
||||||
@ -493,19 +487,16 @@ fn appendIdentStr(zg: *ZonGen, ident_token: Ast.TokenIndex) !u32 {
|
|||||||
|
|
||||||
/// Estimates the size of a string node without parsing it.
|
/// Estimates the size of a string node without parsing it.
|
||||||
pub fn strLitSizeHint(tree: Ast, node: Ast.Node.Index) usize {
|
pub fn strLitSizeHint(tree: Ast, node: Ast.Node.Index) usize {
|
||||||
switch (tree.nodes.items(.tag)[node]) {
|
switch (tree.nodeTag(node)) {
|
||||||
// Parsed string literals are typically around the size of the raw strings.
|
// Parsed string literals are typically around the size of the raw strings.
|
||||||
.string_literal => {
|
.string_literal => {
|
||||||
const token = tree.nodes.items(.main_token)[node];
|
const token = tree.nodeMainToken(node);
|
||||||
const raw_string = tree.tokenSlice(token);
|
const raw_string = tree.tokenSlice(token);
|
||||||
return raw_string.len;
|
return raw_string.len;
|
||||||
},
|
},
|
||||||
// Multiline string literal lengths can be computed exactly.
|
// Multiline string literal lengths can be computed exactly.
|
||||||
.multiline_string_literal => {
|
.multiline_string_literal => {
|
||||||
const first_tok, const last_tok = bounds: {
|
const first_tok, const last_tok = tree.nodeData(node).token_and_token;
|
||||||
const node_data = tree.nodes.items(.data)[node];
|
|
||||||
break :bounds .{ node_data.lhs, node_data.rhs };
|
|
||||||
};
|
|
||||||
|
|
||||||
var size = tree.tokenSlice(first_tok)[2..].len;
|
var size = tree.tokenSlice(first_tok)[2..].len;
|
||||||
for (first_tok + 1..last_tok + 1) |tok_idx| {
|
for (first_tok + 1..last_tok + 1) |tok_idx| {
|
||||||
@ -524,17 +515,14 @@ pub fn parseStrLit(
|
|||||||
node: Ast.Node.Index,
|
node: Ast.Node.Index,
|
||||||
writer: anytype,
|
writer: anytype,
|
||||||
) error{OutOfMemory}!std.zig.string_literal.Result {
|
) error{OutOfMemory}!std.zig.string_literal.Result {
|
||||||
switch (tree.nodes.items(.tag)[node]) {
|
switch (tree.nodeTag(node)) {
|
||||||
.string_literal => {
|
.string_literal => {
|
||||||
const token = tree.nodes.items(.main_token)[node];
|
const token = tree.nodeMainToken(node);
|
||||||
const raw_string = tree.tokenSlice(token);
|
const raw_string = tree.tokenSlice(token);
|
||||||
return std.zig.string_literal.parseWrite(writer, raw_string);
|
return std.zig.string_literal.parseWrite(writer, raw_string);
|
||||||
},
|
},
|
||||||
.multiline_string_literal => {
|
.multiline_string_literal => {
|
||||||
const first_tok, const last_tok = bounds: {
|
const first_tok, const last_tok = tree.nodeData(node).token_and_token;
|
||||||
const node_data = tree.nodes.items(.data)[node];
|
|
||||||
break :bounds .{ node_data.lhs, node_data.rhs };
|
|
||||||
};
|
|
||||||
|
|
||||||
// First line: do not append a newline.
|
// First line: do not append a newline.
|
||||||
{
|
{
|
||||||
@ -572,7 +560,7 @@ fn strLitAsString(zg: *ZonGen, str_node: Ast.Node.Index) !StringLiteralResult {
|
|||||||
switch (try parseStrLit(zg.tree, str_node, zg.string_bytes.writer(zg.gpa))) {
|
switch (try parseStrLit(zg.tree, str_node, zg.string_bytes.writer(zg.gpa))) {
|
||||||
.success => {},
|
.success => {},
|
||||||
.failure => |err| {
|
.failure => |err| {
|
||||||
const token = zg.tree.nodes.items(.main_token)[str_node];
|
const token = zg.tree.nodeMainToken(str_node);
|
||||||
const raw_string = zg.tree.tokenSlice(token);
|
const raw_string = zg.tree.tokenSlice(token);
|
||||||
try zg.lowerStrLitError(err, token, raw_string, 0);
|
try zg.lowerStrLitError(err, token, raw_string, 0);
|
||||||
return error.BadString;
|
return error.BadString;
|
||||||
@ -620,7 +608,7 @@ fn identAsString(zg: *ZonGen, ident_token: Ast.TokenIndex) !Zoir.NullTerminatedS
|
|||||||
|
|
||||||
fn numberLiteral(zg: *ZonGen, num_node: Ast.Node.Index, src_node: Ast.Node.Index, dest_node: Zoir.Node.Index, sign: enum { negative, positive }) !void {
|
fn numberLiteral(zg: *ZonGen, num_node: Ast.Node.Index, src_node: Ast.Node.Index, dest_node: Zoir.Node.Index, sign: enum { negative, positive }) !void {
|
||||||
const tree = zg.tree;
|
const tree = zg.tree;
|
||||||
const num_token = tree.nodes.items(.main_token)[num_node];
|
const num_token = tree.nodeMainToken(num_node);
|
||||||
const num_bytes = tree.tokenSlice(num_token);
|
const num_bytes = tree.tokenSlice(num_token);
|
||||||
|
|
||||||
switch (std.zig.parseNumberLiteral(num_bytes)) {
|
switch (std.zig.parseNumberLiteral(num_bytes)) {
|
||||||
@ -724,8 +712,8 @@ fn setBigIntLiteralNode(zg: *ZonGen, dest_node: Zoir.Node.Index, src_node: Ast.N
|
|||||||
|
|
||||||
fn charLiteral(zg: *ZonGen, node: Ast.Node.Index, dest_node: Zoir.Node.Index) !void {
|
fn charLiteral(zg: *ZonGen, node: Ast.Node.Index, dest_node: Zoir.Node.Index) !void {
|
||||||
const tree = zg.tree;
|
const tree = zg.tree;
|
||||||
assert(tree.nodes.items(.tag)[node] == .char_literal);
|
assert(tree.nodeTag(node) == .char_literal);
|
||||||
const main_token = tree.nodes.items(.main_token)[node];
|
const main_token = tree.nodeMainToken(node);
|
||||||
const slice = tree.tokenSlice(main_token);
|
const slice = tree.tokenSlice(main_token);
|
||||||
switch (std.zig.parseCharLiteral(slice)) {
|
switch (std.zig.parseCharLiteral(slice)) {
|
||||||
.success => |codepoint| zg.setNode(dest_node, .{
|
.success => |codepoint| zg.setNode(dest_node, .{
|
||||||
@ -739,8 +727,8 @@ fn charLiteral(zg: *ZonGen, node: Ast.Node.Index, dest_node: Zoir.Node.Index) !v
|
|||||||
|
|
||||||
fn identifier(zg: *ZonGen, node: Ast.Node.Index, dest_node: Zoir.Node.Index) !void {
|
fn identifier(zg: *ZonGen, node: Ast.Node.Index, dest_node: Zoir.Node.Index) !void {
|
||||||
const tree = zg.tree;
|
const tree = zg.tree;
|
||||||
assert(tree.nodes.items(.tag)[node] == .identifier);
|
assert(tree.nodeTag(node) == .identifier);
|
||||||
const main_token = tree.nodes.items(.main_token)[node];
|
const main_token = tree.nodeMainToken(node);
|
||||||
const ident = tree.tokenSlice(main_token);
|
const ident = tree.tokenSlice(main_token);
|
||||||
|
|
||||||
const tag: Zoir.Node.Repr.Tag = t: {
|
const tag: Zoir.Node.Repr.Tag = t: {
|
||||||
@ -823,8 +811,8 @@ fn errNoteNode(zg: *ZonGen, node: Ast.Node.Index, comptime format: []const u8, a
|
|||||||
|
|
||||||
return .{
|
return .{
|
||||||
.msg = @enumFromInt(message_idx),
|
.msg = @enumFromInt(message_idx),
|
||||||
.token = Zoir.CompileError.invalid_token,
|
.token = .none,
|
||||||
.node_or_offset = node,
|
.node_or_offset = @intFromEnum(node),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -836,33 +824,33 @@ fn errNoteTok(zg: *ZonGen, tok: Ast.TokenIndex, comptime format: []const u8, arg
|
|||||||
|
|
||||||
return .{
|
return .{
|
||||||
.msg = @enumFromInt(message_idx),
|
.msg = @enumFromInt(message_idx),
|
||||||
.token = tok,
|
.token = .fromToken(tok),
|
||||||
.node_or_offset = 0,
|
.node_or_offset = 0,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
fn addErrorNode(zg: *ZonGen, node: Ast.Node.Index, comptime format: []const u8, args: anytype) Allocator.Error!void {
|
fn addErrorNode(zg: *ZonGen, node: Ast.Node.Index, comptime format: []const u8, args: anytype) Allocator.Error!void {
|
||||||
return zg.addErrorInner(Zoir.CompileError.invalid_token, node, format, args, &.{});
|
return zg.addErrorInner(.none, @intFromEnum(node), format, args, &.{});
|
||||||
}
|
}
|
||||||
fn addErrorTok(zg: *ZonGen, tok: Ast.TokenIndex, comptime format: []const u8, args: anytype) Allocator.Error!void {
|
fn addErrorTok(zg: *ZonGen, tok: Ast.TokenIndex, comptime format: []const u8, args: anytype) Allocator.Error!void {
|
||||||
return zg.addErrorInner(tok, 0, format, args, &.{});
|
return zg.addErrorInner(.fromToken(tok), 0, format, args, &.{});
|
||||||
}
|
}
|
||||||
fn addErrorNodeNotes(zg: *ZonGen, node: Ast.Node.Index, comptime format: []const u8, args: anytype, notes: []const Zoir.CompileError.Note) Allocator.Error!void {
|
fn addErrorNodeNotes(zg: *ZonGen, node: Ast.Node.Index, comptime format: []const u8, args: anytype, notes: []const Zoir.CompileError.Note) Allocator.Error!void {
|
||||||
return zg.addErrorInner(Zoir.CompileError.invalid_token, node, format, args, notes);
|
return zg.addErrorInner(.none, @intFromEnum(node), format, args, notes);
|
||||||
}
|
}
|
||||||
fn addErrorTokNotes(zg: *ZonGen, tok: Ast.TokenIndex, comptime format: []const u8, args: anytype, notes: []const Zoir.CompileError.Note) Allocator.Error!void {
|
fn addErrorTokNotes(zg: *ZonGen, tok: Ast.TokenIndex, comptime format: []const u8, args: anytype, notes: []const Zoir.CompileError.Note) Allocator.Error!void {
|
||||||
return zg.addErrorInner(tok, 0, format, args, notes);
|
return zg.addErrorInner(.fromToken(tok), 0, format, args, notes);
|
||||||
}
|
}
|
||||||
fn addErrorTokOff(zg: *ZonGen, tok: Ast.TokenIndex, offset: u32, comptime format: []const u8, args: anytype) Allocator.Error!void {
|
fn addErrorTokOff(zg: *ZonGen, tok: Ast.TokenIndex, offset: u32, comptime format: []const u8, args: anytype) Allocator.Error!void {
|
||||||
return zg.addErrorInner(tok, offset, format, args, &.{});
|
return zg.addErrorInner(.fromToken(tok), offset, format, args, &.{});
|
||||||
}
|
}
|
||||||
fn addErrorTokNotesOff(zg: *ZonGen, tok: Ast.TokenIndex, offset: u32, comptime format: []const u8, args: anytype, notes: []const Zoir.CompileError.Note) Allocator.Error!void {
|
fn addErrorTokNotesOff(zg: *ZonGen, tok: Ast.TokenIndex, offset: u32, comptime format: []const u8, args: anytype, notes: []const Zoir.CompileError.Note) Allocator.Error!void {
|
||||||
return zg.addErrorInner(tok, offset, format, args, notes);
|
return zg.addErrorInner(.fromToken(tok), offset, format, args, notes);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn addErrorInner(
|
fn addErrorInner(
|
||||||
zg: *ZonGen,
|
zg: *ZonGen,
|
||||||
token: Ast.TokenIndex,
|
token: Ast.OptionalTokenIndex,
|
||||||
node_or_offset: u32,
|
node_or_offset: u32,
|
||||||
comptime format: []const u8,
|
comptime format: []const u8,
|
||||||
args: anytype,
|
args: anytype,
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@ -196,16 +196,15 @@ pub const Error = union(enum) {
|
|||||||
return .{ .err = self, .status = status };
|
return .{ .err = self, .status = status };
|
||||||
}
|
}
|
||||||
|
|
||||||
fn zoirErrorLocation(ast: Ast, maybe_token: Ast.TokenIndex, node_or_offset: u32) Ast.Location {
|
fn zoirErrorLocation(ast: Ast, maybe_token: Ast.OptionalTokenIndex, node_or_offset: u32) Ast.Location {
|
||||||
if (maybe_token == Zoir.CompileError.invalid_token) {
|
if (maybe_token.unwrap()) |token| {
|
||||||
const main_tokens = ast.nodes.items(.main_token);
|
var location = ast.tokenLocation(0, token);
|
||||||
const ast_node = node_or_offset;
|
|
||||||
const token = main_tokens[ast_node];
|
|
||||||
return ast.tokenLocation(0, token);
|
|
||||||
} else {
|
|
||||||
var location = ast.tokenLocation(0, maybe_token);
|
|
||||||
location.column += node_or_offset;
|
location.column += node_or_offset;
|
||||||
return location;
|
return location;
|
||||||
|
} else {
|
||||||
|
const ast_node: Ast.Node.Index = @enumFromInt(node_or_offset);
|
||||||
|
const token = ast.nodeMainToken(ast_node);
|
||||||
|
return ast.tokenLocation(0, token);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -632,7 +631,7 @@ const Parser = struct {
|
|||||||
switch (try ZonGen.parseStrLit(self.ast, ast_node, buf.writer(self.gpa))) {
|
switch (try ZonGen.parseStrLit(self.ast, ast_node, buf.writer(self.gpa))) {
|
||||||
.success => {},
|
.success => {},
|
||||||
.failure => |err| {
|
.failure => |err| {
|
||||||
const token = self.ast.nodes.items(.main_token)[ast_node];
|
const token = self.ast.nodeMainToken(ast_node);
|
||||||
const raw_string = self.ast.tokenSlice(token);
|
const raw_string = self.ast.tokenSlice(token);
|
||||||
return self.failTokenFmt(token, @intCast(err.offset()), "{s}", .{err.fmt(raw_string)});
|
return self.failTokenFmt(token, @intCast(err.offset()), "{s}", .{err.fmt(raw_string)});
|
||||||
},
|
},
|
||||||
@ -1005,8 +1004,7 @@ const Parser = struct {
|
|||||||
args: anytype,
|
args: anytype,
|
||||||
) error{ OutOfMemory, ParseZon } {
|
) error{ OutOfMemory, ParseZon } {
|
||||||
@branchHint(.cold);
|
@branchHint(.cold);
|
||||||
const main_tokens = self.ast.nodes.items(.main_token);
|
const token = self.ast.nodeMainToken(node.getAstNode(self.zoir));
|
||||||
const token = main_tokens[node.getAstNode(self.zoir)];
|
|
||||||
return self.failTokenFmt(token, 0, fmt, args);
|
return self.failTokenFmt(token, 0, fmt, args);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1025,8 +1023,7 @@ const Parser = struct {
|
|||||||
message: []const u8,
|
message: []const u8,
|
||||||
) error{ParseZon} {
|
) error{ParseZon} {
|
||||||
@branchHint(.cold);
|
@branchHint(.cold);
|
||||||
const main_tokens = self.ast.nodes.items(.main_token);
|
const token = self.ast.nodeMainToken(node.getAstNode(self.zoir));
|
||||||
const token = main_tokens[node.getAstNode(self.zoir)];
|
|
||||||
return self.failToken(.{
|
return self.failToken(.{
|
||||||
.token = token,
|
.token = token,
|
||||||
.offset = 0,
|
.offset = 0,
|
||||||
@ -1059,10 +1056,7 @@ const Parser = struct {
|
|||||||
const struct_init = self.ast.fullStructInit(&buf, node.getAstNode(self.zoir)).?;
|
const struct_init = self.ast.fullStructInit(&buf, node.getAstNode(self.zoir)).?;
|
||||||
const field_node = struct_init.ast.fields[f];
|
const field_node = struct_init.ast.fields[f];
|
||||||
break :b self.ast.firstToken(field_node) - 2;
|
break :b self.ast.firstToken(field_node) - 2;
|
||||||
} else b: {
|
} else self.ast.nodeMainToken(node.getAstNode(self.zoir));
|
||||||
const main_tokens = self.ast.nodes.items(.main_token);
|
|
||||||
break :b main_tokens[node.getAstNode(self.zoir)];
|
|
||||||
};
|
|
||||||
switch (@typeInfo(T)) {
|
switch (@typeInfo(T)) {
|
||||||
inline .@"struct", .@"union", .@"enum" => |info| {
|
inline .@"struct", .@"union", .@"enum" => |info| {
|
||||||
const note: Error.TypeCheckFailure.Note = if (info.fields.len == 0) b: {
|
const note: Error.TypeCheckFailure.Note = if (info.fields.len == 0) b: {
|
||||||
|
|||||||
@ -30,7 +30,7 @@
|
|||||||
arena: std.heap.ArenaAllocator,
|
arena: std.heap.ArenaAllocator,
|
||||||
location: Location,
|
location: Location,
|
||||||
location_tok: std.zig.Ast.TokenIndex,
|
location_tok: std.zig.Ast.TokenIndex,
|
||||||
hash_tok: std.zig.Ast.TokenIndex,
|
hash_tok: std.zig.Ast.OptionalTokenIndex,
|
||||||
name_tok: std.zig.Ast.TokenIndex,
|
name_tok: std.zig.Ast.TokenIndex,
|
||||||
lazy_status: LazyStatus,
|
lazy_status: LazyStatus,
|
||||||
parent_package_root: Cache.Path,
|
parent_package_root: Cache.Path,
|
||||||
@ -317,8 +317,8 @@ pub fn run(f: *Fetch) RunError!void {
|
|||||||
f.location_tok,
|
f.location_tok,
|
||||||
try eb.addString("expected path relative to build root; found absolute path"),
|
try eb.addString("expected path relative to build root; found absolute path"),
|
||||||
);
|
);
|
||||||
if (f.hash_tok != 0) return f.fail(
|
if (f.hash_tok.unwrap()) |hash_tok| return f.fail(
|
||||||
f.hash_tok,
|
hash_tok,
|
||||||
try eb.addString("path-based dependencies are not hashed"),
|
try eb.addString("path-based dependencies are not hashed"),
|
||||||
);
|
);
|
||||||
// Packages fetched by URL may not use relative paths to escape outside the
|
// Packages fetched by URL may not use relative paths to escape outside the
|
||||||
@ -555,17 +555,18 @@ fn runResource(
|
|||||||
// job is done.
|
// job is done.
|
||||||
|
|
||||||
if (remote_hash) |declared_hash| {
|
if (remote_hash) |declared_hash| {
|
||||||
|
const hash_tok = f.hash_tok.unwrap().?;
|
||||||
if (declared_hash.isOld()) {
|
if (declared_hash.isOld()) {
|
||||||
const actual_hex = Package.multiHashHexDigest(f.computed_hash.digest);
|
const actual_hex = Package.multiHashHexDigest(f.computed_hash.digest);
|
||||||
if (!std.mem.eql(u8, declared_hash.toSlice(), &actual_hex)) {
|
if (!std.mem.eql(u8, declared_hash.toSlice(), &actual_hex)) {
|
||||||
return f.fail(f.hash_tok, try eb.printString(
|
return f.fail(hash_tok, try eb.printString(
|
||||||
"hash mismatch: manifest declares {s} but the fetched package has {s}",
|
"hash mismatch: manifest declares {s} but the fetched package has {s}",
|
||||||
.{ declared_hash.toSlice(), actual_hex },
|
.{ declared_hash.toSlice(), actual_hex },
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if (!computed_package_hash.eql(&declared_hash)) {
|
if (!computed_package_hash.eql(&declared_hash)) {
|
||||||
return f.fail(f.hash_tok, try eb.printString(
|
return f.fail(hash_tok, try eb.printString(
|
||||||
"hash mismatch: manifest declares {s} but the fetched package has {s}",
|
"hash mismatch: manifest declares {s} but the fetched package has {s}",
|
||||||
.{ declared_hash.toSlice(), computed_package_hash.toSlice() },
|
.{ declared_hash.toSlice(), computed_package_hash.toSlice() },
|
||||||
));
|
));
|
||||||
@ -813,15 +814,14 @@ fn srcLoc(
|
|||||||
) Allocator.Error!ErrorBundle.SourceLocationIndex {
|
) Allocator.Error!ErrorBundle.SourceLocationIndex {
|
||||||
const ast = f.parent_manifest_ast orelse return .none;
|
const ast = f.parent_manifest_ast orelse return .none;
|
||||||
const eb = &f.error_bundle;
|
const eb = &f.error_bundle;
|
||||||
const token_starts = ast.tokens.items(.start);
|
|
||||||
const start_loc = ast.tokenLocation(0, tok);
|
const start_loc = ast.tokenLocation(0, tok);
|
||||||
const src_path = try eb.printString("{}" ++ fs.path.sep_str ++ Manifest.basename, .{f.parent_package_root});
|
const src_path = try eb.printString("{}" ++ fs.path.sep_str ++ Manifest.basename, .{f.parent_package_root});
|
||||||
const msg_off = 0;
|
const msg_off = 0;
|
||||||
return eb.addSourceLocation(.{
|
return eb.addSourceLocation(.{
|
||||||
.src_path = src_path,
|
.src_path = src_path,
|
||||||
.span_start = token_starts[tok],
|
.span_start = ast.tokenStart(tok),
|
||||||
.span_end = @intCast(token_starts[tok] + ast.tokenSlice(tok).len),
|
.span_end = @intCast(ast.tokenStart(tok) + ast.tokenSlice(tok).len),
|
||||||
.span_main = token_starts[tok] + msg_off,
|
.span_main = ast.tokenStart(tok) + msg_off,
|
||||||
.line = @intCast(start_loc.line),
|
.line = @intCast(start_loc.line),
|
||||||
.column = @intCast(start_loc.column),
|
.column = @intCast(start_loc.column),
|
||||||
.source_line = try eb.addString(ast.source[start_loc.line_start..start_loc.line_end]),
|
.source_line = try eb.addString(ast.source[start_loc.line_start..start_loc.line_end]),
|
||||||
@ -2322,7 +2322,7 @@ const TestFetchBuilder = struct {
|
|||||||
.arena = std.heap.ArenaAllocator.init(allocator),
|
.arena = std.heap.ArenaAllocator.init(allocator),
|
||||||
.location = .{ .path_or_url = path_or_url },
|
.location = .{ .path_or_url = path_or_url },
|
||||||
.location_tok = 0,
|
.location_tok = 0,
|
||||||
.hash_tok = 0,
|
.hash_tok = .none,
|
||||||
.name_tok = 0,
|
.name_tok = 0,
|
||||||
.lazy_status = .eager,
|
.lazy_status = .eager,
|
||||||
.parent_package_root = Cache.Path{ .root_dir = Cache.Directory{ .handle = cache_dir, .path = null } },
|
.parent_package_root = Cache.Path{ .root_dir = Cache.Directory{ .handle = cache_dir, .path = null } },
|
||||||
|
|||||||
@ -17,8 +17,8 @@ pub const Dependency = struct {
|
|||||||
location_tok: Ast.TokenIndex,
|
location_tok: Ast.TokenIndex,
|
||||||
location_node: Ast.Node.Index,
|
location_node: Ast.Node.Index,
|
||||||
hash: ?[]const u8,
|
hash: ?[]const u8,
|
||||||
hash_tok: Ast.TokenIndex,
|
hash_tok: Ast.OptionalTokenIndex,
|
||||||
hash_node: Ast.Node.Index,
|
hash_node: Ast.Node.OptionalIndex,
|
||||||
node: Ast.Node.Index,
|
node: Ast.Node.Index,
|
||||||
name_tok: Ast.TokenIndex,
|
name_tok: Ast.TokenIndex,
|
||||||
lazy: bool,
|
lazy: bool,
|
||||||
@ -40,7 +40,7 @@ id: u32,
|
|||||||
version: std.SemanticVersion,
|
version: std.SemanticVersion,
|
||||||
version_node: Ast.Node.Index,
|
version_node: Ast.Node.Index,
|
||||||
dependencies: std.StringArrayHashMapUnmanaged(Dependency),
|
dependencies: std.StringArrayHashMapUnmanaged(Dependency),
|
||||||
dependencies_node: Ast.Node.Index,
|
dependencies_node: Ast.Node.OptionalIndex,
|
||||||
paths: std.StringArrayHashMapUnmanaged(void),
|
paths: std.StringArrayHashMapUnmanaged(void),
|
||||||
minimum_zig_version: ?std.SemanticVersion,
|
minimum_zig_version: ?std.SemanticVersion,
|
||||||
|
|
||||||
@ -58,10 +58,7 @@ pub const ParseOptions = struct {
|
|||||||
pub const Error = Allocator.Error;
|
pub const Error = Allocator.Error;
|
||||||
|
|
||||||
pub fn parse(gpa: Allocator, ast: Ast, options: ParseOptions) Error!Manifest {
|
pub fn parse(gpa: Allocator, ast: Ast, options: ParseOptions) Error!Manifest {
|
||||||
const node_tags = ast.nodes.items(.tag);
|
const main_node_index = ast.nodeData(.root).node;
|
||||||
const node_datas = ast.nodes.items(.data);
|
|
||||||
assert(node_tags[0] == .root);
|
|
||||||
const main_node_index = node_datas[0].lhs;
|
|
||||||
|
|
||||||
var arena_instance = std.heap.ArenaAllocator.init(gpa);
|
var arena_instance = std.heap.ArenaAllocator.init(gpa);
|
||||||
errdefer arena_instance.deinit();
|
errdefer arena_instance.deinit();
|
||||||
@ -75,9 +72,9 @@ pub fn parse(gpa: Allocator, ast: Ast, options: ParseOptions) Error!Manifest {
|
|||||||
.name = undefined,
|
.name = undefined,
|
||||||
.id = 0,
|
.id = 0,
|
||||||
.version = undefined,
|
.version = undefined,
|
||||||
.version_node = 0,
|
.version_node = undefined,
|
||||||
.dependencies = .{},
|
.dependencies = .{},
|
||||||
.dependencies_node = 0,
|
.dependencies_node = .none,
|
||||||
.paths = .{},
|
.paths = .{},
|
||||||
.allow_missing_paths_field = options.allow_missing_paths_field,
|
.allow_missing_paths_field = options.allow_missing_paths_field,
|
||||||
.allow_name_string = options.allow_name_string,
|
.allow_name_string = options.allow_name_string,
|
||||||
@ -121,8 +118,6 @@ pub fn copyErrorsIntoBundle(
|
|||||||
src_path: u32,
|
src_path: u32,
|
||||||
eb: *std.zig.ErrorBundle.Wip,
|
eb: *std.zig.ErrorBundle.Wip,
|
||||||
) Allocator.Error!void {
|
) Allocator.Error!void {
|
||||||
const token_starts = ast.tokens.items(.start);
|
|
||||||
|
|
||||||
for (man.errors) |msg| {
|
for (man.errors) |msg| {
|
||||||
const start_loc = ast.tokenLocation(0, msg.tok);
|
const start_loc = ast.tokenLocation(0, msg.tok);
|
||||||
|
|
||||||
@ -130,9 +125,9 @@ pub fn copyErrorsIntoBundle(
|
|||||||
.msg = try eb.addString(msg.msg),
|
.msg = try eb.addString(msg.msg),
|
||||||
.src_loc = try eb.addSourceLocation(.{
|
.src_loc = try eb.addSourceLocation(.{
|
||||||
.src_path = src_path,
|
.src_path = src_path,
|
||||||
.span_start = token_starts[msg.tok],
|
.span_start = ast.tokenStart(msg.tok),
|
||||||
.span_end = @intCast(token_starts[msg.tok] + ast.tokenSlice(msg.tok).len),
|
.span_end = @intCast(ast.tokenStart(msg.tok) + ast.tokenSlice(msg.tok).len),
|
||||||
.span_main = token_starts[msg.tok] + msg.off,
|
.span_main = ast.tokenStart(msg.tok) + msg.off,
|
||||||
.line = @intCast(start_loc.line),
|
.line = @intCast(start_loc.line),
|
||||||
.column = @intCast(start_loc.column),
|
.column = @intCast(start_loc.column),
|
||||||
.source_line = try eb.addString(ast.source[start_loc.line_start..start_loc.line_end]),
|
.source_line = try eb.addString(ast.source[start_loc.line_start..start_loc.line_end]),
|
||||||
@ -153,7 +148,7 @@ const Parse = struct {
|
|||||||
version: std.SemanticVersion,
|
version: std.SemanticVersion,
|
||||||
version_node: Ast.Node.Index,
|
version_node: Ast.Node.Index,
|
||||||
dependencies: std.StringArrayHashMapUnmanaged(Dependency),
|
dependencies: std.StringArrayHashMapUnmanaged(Dependency),
|
||||||
dependencies_node: Ast.Node.Index,
|
dependencies_node: Ast.Node.OptionalIndex,
|
||||||
paths: std.StringArrayHashMapUnmanaged(void),
|
paths: std.StringArrayHashMapUnmanaged(void),
|
||||||
allow_missing_paths_field: bool,
|
allow_missing_paths_field: bool,
|
||||||
allow_name_string: bool,
|
allow_name_string: bool,
|
||||||
@ -164,8 +159,7 @@ const Parse = struct {
|
|||||||
|
|
||||||
fn parseRoot(p: *Parse, node: Ast.Node.Index) !void {
|
fn parseRoot(p: *Parse, node: Ast.Node.Index) !void {
|
||||||
const ast = p.ast;
|
const ast = p.ast;
|
||||||
const main_tokens = ast.nodes.items(.main_token);
|
const main_token = ast.nodeMainToken(node);
|
||||||
const main_token = main_tokens[node];
|
|
||||||
|
|
||||||
var buf: [2]Ast.Node.Index = undefined;
|
var buf: [2]Ast.Node.Index = undefined;
|
||||||
const struct_init = ast.fullStructInit(&buf, node) orelse {
|
const struct_init = ast.fullStructInit(&buf, node) orelse {
|
||||||
@ -184,7 +178,7 @@ const Parse = struct {
|
|||||||
// things manually provides an opportunity to do any additional verification
|
// things manually provides an opportunity to do any additional verification
|
||||||
// that is desirable on a per-field basis.
|
// that is desirable on a per-field basis.
|
||||||
if (mem.eql(u8, field_name, "dependencies")) {
|
if (mem.eql(u8, field_name, "dependencies")) {
|
||||||
p.dependencies_node = field_init;
|
p.dependencies_node = field_init.toOptional();
|
||||||
try parseDependencies(p, field_init);
|
try parseDependencies(p, field_init);
|
||||||
} else if (mem.eql(u8, field_name, "paths")) {
|
} else if (mem.eql(u8, field_name, "paths")) {
|
||||||
have_included_paths = true;
|
have_included_paths = true;
|
||||||
@ -198,17 +192,17 @@ const Parse = struct {
|
|||||||
p.version_node = field_init;
|
p.version_node = field_init;
|
||||||
const version_text = try parseString(p, field_init);
|
const version_text = try parseString(p, field_init);
|
||||||
if (version_text.len > max_version_len) {
|
if (version_text.len > max_version_len) {
|
||||||
try appendError(p, main_tokens[field_init], "version string length {d} exceeds maximum of {d}", .{ version_text.len, max_version_len });
|
try appendError(p, ast.nodeMainToken(field_init), "version string length {d} exceeds maximum of {d}", .{ version_text.len, max_version_len });
|
||||||
}
|
}
|
||||||
p.version = std.SemanticVersion.parse(version_text) catch |err| v: {
|
p.version = std.SemanticVersion.parse(version_text) catch |err| v: {
|
||||||
try appendError(p, main_tokens[field_init], "unable to parse semantic version: {s}", .{@errorName(err)});
|
try appendError(p, ast.nodeMainToken(field_init), "unable to parse semantic version: {s}", .{@errorName(err)});
|
||||||
break :v undefined;
|
break :v undefined;
|
||||||
};
|
};
|
||||||
have_version = true;
|
have_version = true;
|
||||||
} else if (mem.eql(u8, field_name, "minimum_zig_version")) {
|
} else if (mem.eql(u8, field_name, "minimum_zig_version")) {
|
||||||
const version_text = try parseString(p, field_init);
|
const version_text = try parseString(p, field_init);
|
||||||
p.minimum_zig_version = std.SemanticVersion.parse(version_text) catch |err| v: {
|
p.minimum_zig_version = std.SemanticVersion.parse(version_text) catch |err| v: {
|
||||||
try appendError(p, main_tokens[field_init], "unable to parse semantic version: {s}", .{@errorName(err)});
|
try appendError(p, ast.nodeMainToken(field_init), "unable to parse semantic version: {s}", .{@errorName(err)});
|
||||||
break :v null;
|
break :v null;
|
||||||
};
|
};
|
||||||
} else {
|
} else {
|
||||||
@ -251,11 +245,10 @@ const Parse = struct {
|
|||||||
|
|
||||||
fn parseDependencies(p: *Parse, node: Ast.Node.Index) !void {
|
fn parseDependencies(p: *Parse, node: Ast.Node.Index) !void {
|
||||||
const ast = p.ast;
|
const ast = p.ast;
|
||||||
const main_tokens = ast.nodes.items(.main_token);
|
|
||||||
|
|
||||||
var buf: [2]Ast.Node.Index = undefined;
|
var buf: [2]Ast.Node.Index = undefined;
|
||||||
const struct_init = ast.fullStructInit(&buf, node) orelse {
|
const struct_init = ast.fullStructInit(&buf, node) orelse {
|
||||||
const tok = main_tokens[node];
|
const tok = ast.nodeMainToken(node);
|
||||||
return fail(p, tok, "expected dependencies expression to be a struct", .{});
|
return fail(p, tok, "expected dependencies expression to be a struct", .{});
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -269,23 +262,22 @@ const Parse = struct {
|
|||||||
|
|
||||||
fn parseDependency(p: *Parse, node: Ast.Node.Index) !Dependency {
|
fn parseDependency(p: *Parse, node: Ast.Node.Index) !Dependency {
|
||||||
const ast = p.ast;
|
const ast = p.ast;
|
||||||
const main_tokens = ast.nodes.items(.main_token);
|
|
||||||
|
|
||||||
var buf: [2]Ast.Node.Index = undefined;
|
var buf: [2]Ast.Node.Index = undefined;
|
||||||
const struct_init = ast.fullStructInit(&buf, node) orelse {
|
const struct_init = ast.fullStructInit(&buf, node) orelse {
|
||||||
const tok = main_tokens[node];
|
const tok = ast.nodeMainToken(node);
|
||||||
return fail(p, tok, "expected dependency expression to be a struct", .{});
|
return fail(p, tok, "expected dependency expression to be a struct", .{});
|
||||||
};
|
};
|
||||||
|
|
||||||
var dep: Dependency = .{
|
var dep: Dependency = .{
|
||||||
.location = undefined,
|
.location = undefined,
|
||||||
.location_tok = 0,
|
.location_tok = undefined,
|
||||||
.location_node = undefined,
|
.location_node = undefined,
|
||||||
.hash = null,
|
.hash = null,
|
||||||
.hash_tok = 0,
|
.hash_tok = .none,
|
||||||
.hash_node = undefined,
|
.hash_node = .none,
|
||||||
.node = node,
|
.node = node,
|
||||||
.name_tok = 0,
|
.name_tok = undefined,
|
||||||
.lazy = false,
|
.lazy = false,
|
||||||
};
|
};
|
||||||
var has_location = false;
|
var has_location = false;
|
||||||
@ -299,7 +291,7 @@ const Parse = struct {
|
|||||||
// that is desirable on a per-field basis.
|
// that is desirable on a per-field basis.
|
||||||
if (mem.eql(u8, field_name, "url")) {
|
if (mem.eql(u8, field_name, "url")) {
|
||||||
if (has_location) {
|
if (has_location) {
|
||||||
return fail(p, main_tokens[field_init], "dependency should specify only one of 'url' and 'path' fields.", .{});
|
return fail(p, ast.nodeMainToken(field_init), "dependency should specify only one of 'url' and 'path' fields.", .{});
|
||||||
}
|
}
|
||||||
dep.location = .{
|
dep.location = .{
|
||||||
.url = parseString(p, field_init) catch |err| switch (err) {
|
.url = parseString(p, field_init) catch |err| switch (err) {
|
||||||
@ -308,11 +300,11 @@ const Parse = struct {
|
|||||||
},
|
},
|
||||||
};
|
};
|
||||||
has_location = true;
|
has_location = true;
|
||||||
dep.location_tok = main_tokens[field_init];
|
dep.location_tok = ast.nodeMainToken(field_init);
|
||||||
dep.location_node = field_init;
|
dep.location_node = field_init;
|
||||||
} else if (mem.eql(u8, field_name, "path")) {
|
} else if (mem.eql(u8, field_name, "path")) {
|
||||||
if (has_location) {
|
if (has_location) {
|
||||||
return fail(p, main_tokens[field_init], "dependency should specify only one of 'url' and 'path' fields.", .{});
|
return fail(p, ast.nodeMainToken(field_init), "dependency should specify only one of 'url' and 'path' fields.", .{});
|
||||||
}
|
}
|
||||||
dep.location = .{
|
dep.location = .{
|
||||||
.path = parseString(p, field_init) catch |err| switch (err) {
|
.path = parseString(p, field_init) catch |err| switch (err) {
|
||||||
@ -321,15 +313,15 @@ const Parse = struct {
|
|||||||
},
|
},
|
||||||
};
|
};
|
||||||
has_location = true;
|
has_location = true;
|
||||||
dep.location_tok = main_tokens[field_init];
|
dep.location_tok = ast.nodeMainToken(field_init);
|
||||||
dep.location_node = field_init;
|
dep.location_node = field_init;
|
||||||
} else if (mem.eql(u8, field_name, "hash")) {
|
} else if (mem.eql(u8, field_name, "hash")) {
|
||||||
dep.hash = parseHash(p, field_init) catch |err| switch (err) {
|
dep.hash = parseHash(p, field_init) catch |err| switch (err) {
|
||||||
error.ParseFailure => continue,
|
error.ParseFailure => continue,
|
||||||
else => |e| return e,
|
else => |e| return e,
|
||||||
};
|
};
|
||||||
dep.hash_tok = main_tokens[field_init];
|
dep.hash_tok = .fromToken(ast.nodeMainToken(field_init));
|
||||||
dep.hash_node = field_init;
|
dep.hash_node = field_init.toOptional();
|
||||||
} else if (mem.eql(u8, field_name, "lazy")) {
|
} else if (mem.eql(u8, field_name, "lazy")) {
|
||||||
dep.lazy = parseBool(p, field_init) catch |err| switch (err) {
|
dep.lazy = parseBool(p, field_init) catch |err| switch (err) {
|
||||||
error.ParseFailure => continue,
|
error.ParseFailure => continue,
|
||||||
@ -342,7 +334,7 @@ const Parse = struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (!has_location) {
|
if (!has_location) {
|
||||||
try appendError(p, main_tokens[node], "dependency requires location field, one of 'url' or 'path'.", .{});
|
try appendError(p, ast.nodeMainToken(node), "dependency requires location field, one of 'url' or 'path'.", .{});
|
||||||
}
|
}
|
||||||
|
|
||||||
return dep;
|
return dep;
|
||||||
@ -350,11 +342,10 @@ const Parse = struct {
|
|||||||
|
|
||||||
fn parseIncludedPaths(p: *Parse, node: Ast.Node.Index) !void {
|
fn parseIncludedPaths(p: *Parse, node: Ast.Node.Index) !void {
|
||||||
const ast = p.ast;
|
const ast = p.ast;
|
||||||
const main_tokens = ast.nodes.items(.main_token);
|
|
||||||
|
|
||||||
var buf: [2]Ast.Node.Index = undefined;
|
var buf: [2]Ast.Node.Index = undefined;
|
||||||
const array_init = ast.fullArrayInit(&buf, node) orelse {
|
const array_init = ast.fullArrayInit(&buf, node) orelse {
|
||||||
const tok = main_tokens[node];
|
const tok = ast.nodeMainToken(node);
|
||||||
return fail(p, tok, "expected paths expression to be a list of strings", .{});
|
return fail(p, tok, "expected paths expression to be a list of strings", .{});
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -369,12 +360,10 @@ const Parse = struct {
|
|||||||
|
|
||||||
fn parseBool(p: *Parse, node: Ast.Node.Index) !bool {
|
fn parseBool(p: *Parse, node: Ast.Node.Index) !bool {
|
||||||
const ast = p.ast;
|
const ast = p.ast;
|
||||||
const node_tags = ast.nodes.items(.tag);
|
if (ast.nodeTag(node) != .identifier) {
|
||||||
const main_tokens = ast.nodes.items(.main_token);
|
return fail(p, ast.nodeMainToken(node), "expected identifier", .{});
|
||||||
if (node_tags[node] != .identifier) {
|
|
||||||
return fail(p, main_tokens[node], "expected identifier", .{});
|
|
||||||
}
|
}
|
||||||
const ident_token = main_tokens[node];
|
const ident_token = ast.nodeMainToken(node);
|
||||||
const token_bytes = ast.tokenSlice(ident_token);
|
const token_bytes = ast.tokenSlice(ident_token);
|
||||||
if (mem.eql(u8, token_bytes, "true")) {
|
if (mem.eql(u8, token_bytes, "true")) {
|
||||||
return true;
|
return true;
|
||||||
@ -387,10 +376,8 @@ const Parse = struct {
|
|||||||
|
|
||||||
fn parseFingerprint(p: *Parse, node: Ast.Node.Index) !Package.Fingerprint {
|
fn parseFingerprint(p: *Parse, node: Ast.Node.Index) !Package.Fingerprint {
|
||||||
const ast = p.ast;
|
const ast = p.ast;
|
||||||
const node_tags = ast.nodes.items(.tag);
|
const main_token = ast.nodeMainToken(node);
|
||||||
const main_tokens = ast.nodes.items(.main_token);
|
if (ast.nodeTag(node) != .number_literal) {
|
||||||
const main_token = main_tokens[node];
|
|
||||||
if (node_tags[node] != .number_literal) {
|
|
||||||
return fail(p, main_token, "expected integer literal", .{});
|
return fail(p, main_token, "expected integer literal", .{});
|
||||||
}
|
}
|
||||||
const token_bytes = ast.tokenSlice(main_token);
|
const token_bytes = ast.tokenSlice(main_token);
|
||||||
@ -406,11 +393,9 @@ const Parse = struct {
|
|||||||
|
|
||||||
fn parseName(p: *Parse, node: Ast.Node.Index) ![]const u8 {
|
fn parseName(p: *Parse, node: Ast.Node.Index) ![]const u8 {
|
||||||
const ast = p.ast;
|
const ast = p.ast;
|
||||||
const node_tags = ast.nodes.items(.tag);
|
const main_token = ast.nodeMainToken(node);
|
||||||
const main_tokens = ast.nodes.items(.main_token);
|
|
||||||
const main_token = main_tokens[node];
|
|
||||||
|
|
||||||
if (p.allow_name_string and node_tags[node] == .string_literal) {
|
if (p.allow_name_string and ast.nodeTag(node) == .string_literal) {
|
||||||
const name = try parseString(p, node);
|
const name = try parseString(p, node);
|
||||||
if (!std.zig.isValidId(name))
|
if (!std.zig.isValidId(name))
|
||||||
return fail(p, main_token, "name must be a valid bare zig identifier (hint: switch from string to enum literal)", .{});
|
return fail(p, main_token, "name must be a valid bare zig identifier (hint: switch from string to enum literal)", .{});
|
||||||
@ -423,7 +408,7 @@ const Parse = struct {
|
|||||||
return name;
|
return name;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (node_tags[node] != .enum_literal)
|
if (ast.nodeTag(node) != .enum_literal)
|
||||||
return fail(p, main_token, "expected enum literal", .{});
|
return fail(p, main_token, "expected enum literal", .{});
|
||||||
|
|
||||||
const ident_name = ast.tokenSlice(main_token);
|
const ident_name = ast.tokenSlice(main_token);
|
||||||
@ -440,12 +425,10 @@ const Parse = struct {
|
|||||||
|
|
||||||
fn parseString(p: *Parse, node: Ast.Node.Index) ![]const u8 {
|
fn parseString(p: *Parse, node: Ast.Node.Index) ![]const u8 {
|
||||||
const ast = p.ast;
|
const ast = p.ast;
|
||||||
const node_tags = ast.nodes.items(.tag);
|
if (ast.nodeTag(node) != .string_literal) {
|
||||||
const main_tokens = ast.nodes.items(.main_token);
|
return fail(p, ast.nodeMainToken(node), "expected string literal", .{});
|
||||||
if (node_tags[node] != .string_literal) {
|
|
||||||
return fail(p, main_tokens[node], "expected string literal", .{});
|
|
||||||
}
|
}
|
||||||
const str_lit_token = main_tokens[node];
|
const str_lit_token = ast.nodeMainToken(node);
|
||||||
const token_bytes = ast.tokenSlice(str_lit_token);
|
const token_bytes = ast.tokenSlice(str_lit_token);
|
||||||
p.buf.clearRetainingCapacity();
|
p.buf.clearRetainingCapacity();
|
||||||
try parseStrLit(p, str_lit_token, &p.buf, token_bytes, 0);
|
try parseStrLit(p, str_lit_token, &p.buf, token_bytes, 0);
|
||||||
@ -455,8 +438,7 @@ const Parse = struct {
|
|||||||
|
|
||||||
fn parseHash(p: *Parse, node: Ast.Node.Index) ![]const u8 {
|
fn parseHash(p: *Parse, node: Ast.Node.Index) ![]const u8 {
|
||||||
const ast = p.ast;
|
const ast = p.ast;
|
||||||
const main_tokens = ast.nodes.items(.main_token);
|
const tok = ast.nodeMainToken(node);
|
||||||
const tok = main_tokens[node];
|
|
||||||
const h = try parseString(p, node);
|
const h = try parseString(p, node);
|
||||||
|
|
||||||
if (h.len > Package.Hash.max_len) {
|
if (h.len > Package.Hash.max_len) {
|
||||||
@ -469,8 +451,7 @@ const Parse = struct {
|
|||||||
/// TODO: try to DRY this with AstGen.identifierTokenString
|
/// TODO: try to DRY this with AstGen.identifierTokenString
|
||||||
fn identifierTokenString(p: *Parse, token: Ast.TokenIndex) InnerError![]const u8 {
|
fn identifierTokenString(p: *Parse, token: Ast.TokenIndex) InnerError![]const u8 {
|
||||||
const ast = p.ast;
|
const ast = p.ast;
|
||||||
const token_tags = ast.tokens.items(.tag);
|
assert(ast.tokenTag(token) == .identifier);
|
||||||
assert(token_tags[token] == .identifier);
|
|
||||||
const ident_name = ast.tokenSlice(token);
|
const ident_name = ast.tokenSlice(token);
|
||||||
if (!mem.startsWith(u8, ident_name, "@")) {
|
if (!mem.startsWith(u8, ident_name, "@")) {
|
||||||
return ident_name;
|
return ident_name;
|
||||||
|
|||||||
197
src/Sema.zig
197
src/Sema.zig
@ -407,18 +407,18 @@ pub const Block = struct {
|
|||||||
return block.comptime_reason != null;
|
return block.comptime_reason != null;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn builtinCallArgSrc(block: *Block, builtin_call_node: i32, arg_index: u32) LazySrcLoc {
|
fn builtinCallArgSrc(block: *Block, builtin_call_node: std.zig.Ast.Node.Offset, arg_index: u32) LazySrcLoc {
|
||||||
return block.src(.{ .node_offset_builtin_call_arg = .{
|
return block.src(.{ .node_offset_builtin_call_arg = .{
|
||||||
.builtin_call_node = builtin_call_node,
|
.builtin_call_node = builtin_call_node,
|
||||||
.arg_index = arg_index,
|
.arg_index = arg_index,
|
||||||
} });
|
} });
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn nodeOffset(block: Block, node_offset: i32) LazySrcLoc {
|
pub fn nodeOffset(block: Block, node_offset: std.zig.Ast.Node.Offset) LazySrcLoc {
|
||||||
return block.src(LazySrcLoc.Offset.nodeOffset(node_offset));
|
return block.src(LazySrcLoc.Offset.nodeOffset(node_offset));
|
||||||
}
|
}
|
||||||
|
|
||||||
fn tokenOffset(block: Block, tok_offset: u32) LazySrcLoc {
|
fn tokenOffset(block: Block, tok_offset: std.zig.Ast.TokenOffset) LazySrcLoc {
|
||||||
return block.src(.{ .token_offset = tok_offset });
|
return block.src(.{ .token_offset = tok_offset });
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1860,7 +1860,7 @@ fn analyzeBodyInner(
|
|||||||
if (!block.isComptime()) break :blk try sema.zirTry(block, inst);
|
if (!block.isComptime()) break :blk try sema.zirTry(block, inst);
|
||||||
const inst_data = sema.code.instructions.items(.data)[@intFromEnum(inst)].pl_node;
|
const inst_data = sema.code.instructions.items(.data)[@intFromEnum(inst)].pl_node;
|
||||||
const src = block.nodeOffset(inst_data.src_node);
|
const src = block.nodeOffset(inst_data.src_node);
|
||||||
const operand_src = block.src(.{ .node_offset_bin_lhs = inst_data.src_node });
|
const operand_src = block.src(.{ .node_offset_try_operand = inst_data.src_node });
|
||||||
const extra = sema.code.extraData(Zir.Inst.Try, inst_data.payload_index);
|
const extra = sema.code.extraData(Zir.Inst.Try, inst_data.payload_index);
|
||||||
const inline_body = sema.code.bodySlice(extra.end, extra.data.body_len);
|
const inline_body = sema.code.bodySlice(extra.end, extra.data.body_len);
|
||||||
const err_union = try sema.resolveInst(extra.data.operand);
|
const err_union = try sema.resolveInst(extra.data.operand);
|
||||||
@ -1883,7 +1883,7 @@ fn analyzeBodyInner(
|
|||||||
if (!block.isComptime()) break :blk try sema.zirTryPtr(block, inst);
|
if (!block.isComptime()) break :blk try sema.zirTryPtr(block, inst);
|
||||||
const inst_data = sema.code.instructions.items(.data)[@intFromEnum(inst)].pl_node;
|
const inst_data = sema.code.instructions.items(.data)[@intFromEnum(inst)].pl_node;
|
||||||
const src = block.nodeOffset(inst_data.src_node);
|
const src = block.nodeOffset(inst_data.src_node);
|
||||||
const operand_src = block.src(.{ .node_offset_bin_lhs = inst_data.src_node });
|
const operand_src = block.src(.{ .node_offset_try_operand = inst_data.src_node });
|
||||||
const extra = sema.code.extraData(Zir.Inst.Try, inst_data.payload_index);
|
const extra = sema.code.extraData(Zir.Inst.Try, inst_data.payload_index);
|
||||||
const inline_body = sema.code.bodySlice(extra.end, extra.data.body_len);
|
const inline_body = sema.code.bodySlice(extra.end, extra.data.body_len);
|
||||||
const operand = try sema.resolveInst(extra.data.operand);
|
const operand = try sema.resolveInst(extra.data.operand);
|
||||||
@ -2166,7 +2166,7 @@ pub fn setupErrorReturnTrace(sema: *Sema, block: *Block, last_arg_index: usize)
|
|||||||
const addrs_ptr = try err_trace_block.addTy(.alloc, try pt.singleMutPtrType(addr_arr_ty));
|
const addrs_ptr = try err_trace_block.addTy(.alloc, try pt.singleMutPtrType(addr_arr_ty));
|
||||||
|
|
||||||
// var st: StackTrace = undefined;
|
// var st: StackTrace = undefined;
|
||||||
const stack_trace_ty = try sema.getBuiltinType(block.nodeOffset(0), .StackTrace);
|
const stack_trace_ty = try sema.getBuiltinType(block.nodeOffset(.zero), .StackTrace);
|
||||||
try stack_trace_ty.resolveFields(pt);
|
try stack_trace_ty.resolveFields(pt);
|
||||||
const st_ptr = try err_trace_block.addTy(.alloc, try pt.singleMutPtrType(stack_trace_ty));
|
const st_ptr = try err_trace_block.addTy(.alloc, try pt.singleMutPtrType(stack_trace_ty));
|
||||||
|
|
||||||
@ -2901,7 +2901,7 @@ fn zirStructDecl(
|
|||||||
const tracked_inst = try block.trackZir(inst);
|
const tracked_inst = try block.trackZir(inst);
|
||||||
const src: LazySrcLoc = .{
|
const src: LazySrcLoc = .{
|
||||||
.base_node_inst = tracked_inst,
|
.base_node_inst = tracked_inst,
|
||||||
.offset = LazySrcLoc.Offset.nodeOffset(0),
|
.offset = LazySrcLoc.Offset.nodeOffset(.zero),
|
||||||
};
|
};
|
||||||
|
|
||||||
var extra_index = extra.end;
|
var extra_index = extra.end;
|
||||||
@ -3114,7 +3114,7 @@ fn zirEnumDecl(
|
|||||||
var extra_index: usize = extra.end;
|
var extra_index: usize = extra.end;
|
||||||
|
|
||||||
const tracked_inst = try block.trackZir(inst);
|
const tracked_inst = try block.trackZir(inst);
|
||||||
const src: LazySrcLoc = .{ .base_node_inst = tracked_inst, .offset = LazySrcLoc.Offset.nodeOffset(0) };
|
const src: LazySrcLoc = .{ .base_node_inst = tracked_inst, .offset = LazySrcLoc.Offset.nodeOffset(.zero) };
|
||||||
|
|
||||||
const tag_type_ref = if (small.has_tag_type) blk: {
|
const tag_type_ref = if (small.has_tag_type) blk: {
|
||||||
const tag_type_ref: Zir.Inst.Ref = @enumFromInt(sema.code.extra[extra_index]);
|
const tag_type_ref: Zir.Inst.Ref = @enumFromInt(sema.code.extra[extra_index]);
|
||||||
@ -3277,7 +3277,7 @@ fn zirUnionDecl(
|
|||||||
var extra_index: usize = extra.end;
|
var extra_index: usize = extra.end;
|
||||||
|
|
||||||
const tracked_inst = try block.trackZir(inst);
|
const tracked_inst = try block.trackZir(inst);
|
||||||
const src: LazySrcLoc = .{ .base_node_inst = tracked_inst, .offset = LazySrcLoc.Offset.nodeOffset(0) };
|
const src: LazySrcLoc = .{ .base_node_inst = tracked_inst, .offset = LazySrcLoc.Offset.nodeOffset(.zero) };
|
||||||
|
|
||||||
extra_index += @intFromBool(small.has_tag_type);
|
extra_index += @intFromBool(small.has_tag_type);
|
||||||
const captures_len = if (small.has_captures_len) blk: {
|
const captures_len = if (small.has_captures_len) blk: {
|
||||||
@ -3402,7 +3402,7 @@ fn zirOpaqueDecl(
|
|||||||
var extra_index: usize = extra.end;
|
var extra_index: usize = extra.end;
|
||||||
|
|
||||||
const tracked_inst = try block.trackZir(inst);
|
const tracked_inst = try block.trackZir(inst);
|
||||||
const src: LazySrcLoc = .{ .base_node_inst = tracked_inst, .offset = LazySrcLoc.Offset.nodeOffset(0) };
|
const src: LazySrcLoc = .{ .base_node_inst = tracked_inst, .offset = LazySrcLoc.Offset.nodeOffset(.zero) };
|
||||||
|
|
||||||
const captures_len = if (small.has_captures_len) blk: {
|
const captures_len = if (small.has_captures_len) blk: {
|
||||||
const captures_len = sema.code.extra[extra_index];
|
const captures_len = sema.code.extra[extra_index];
|
||||||
@ -3835,7 +3835,7 @@ fn zirMakePtrConst(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileErro
|
|||||||
if (try elem_ty.comptimeOnlySema(pt)) {
|
if (try elem_ty.comptimeOnlySema(pt)) {
|
||||||
// The value was initialized through RLS, so we didn't detect the runtime condition earlier.
|
// The value was initialized through RLS, so we didn't detect the runtime condition earlier.
|
||||||
// TODO: source location of runtime control flow
|
// TODO: source location of runtime control flow
|
||||||
const init_src = block.src(.{ .node_offset_bin_rhs = inst_data.src_node });
|
const init_src = block.src(.{ .node_offset_var_decl_init = inst_data.src_node });
|
||||||
return sema.fail(block, init_src, "value with comptime-only type '{}' depends on runtime control flow", .{elem_ty.fmt(pt)});
|
return sema.fail(block, init_src, "value with comptime-only type '{}' depends on runtime control flow", .{elem_ty.fmt(pt)});
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -6690,8 +6690,8 @@ fn zirBreak(sema: *Sema, start_block: *Block, inst: Zir.Inst.Index) CompileError
|
|||||||
if (block.label) |label| {
|
if (block.label) |label| {
|
||||||
if (label.zir_block == zir_block) {
|
if (label.zir_block == zir_block) {
|
||||||
const br_ref = try start_block.addBr(label.merges.block_inst, operand);
|
const br_ref = try start_block.addBr(label.merges.block_inst, operand);
|
||||||
const src_loc = if (extra.operand_src_node != Zir.Inst.Break.no_src_node)
|
const src_loc = if (extra.operand_src_node.unwrap()) |operand_src_node|
|
||||||
start_block.nodeOffset(extra.operand_src_node)
|
start_block.nodeOffset(operand_src_node)
|
||||||
else
|
else
|
||||||
null;
|
null;
|
||||||
try label.merges.src_locs.append(sema.gpa, src_loc);
|
try label.merges.src_locs.append(sema.gpa, src_loc);
|
||||||
@ -6715,8 +6715,7 @@ fn zirSwitchContinue(sema: *Sema, start_block: *Block, inst: Zir.Inst.Index) Com
|
|||||||
|
|
||||||
const inst_data = sema.code.instructions.items(.data)[@intFromEnum(inst)].@"break";
|
const inst_data = sema.code.instructions.items(.data)[@intFromEnum(inst)].@"break";
|
||||||
const extra = sema.code.extraData(Zir.Inst.Break, inst_data.payload_index).data;
|
const extra = sema.code.extraData(Zir.Inst.Break, inst_data.payload_index).data;
|
||||||
assert(extra.operand_src_node != Zir.Inst.Break.no_src_node);
|
const operand_src = start_block.nodeOffset(extra.operand_src_node.unwrap().?);
|
||||||
const operand_src = start_block.nodeOffset(extra.operand_src_node);
|
|
||||||
const uncoerced_operand = try sema.resolveInst(inst_data.operand);
|
const uncoerced_operand = try sema.resolveInst(inst_data.operand);
|
||||||
const switch_inst = extra.block_inst;
|
const switch_inst = extra.block_inst;
|
||||||
|
|
||||||
@ -7048,7 +7047,7 @@ pub fn analyzeSaveErrRetIndex(sema: *Sema, block: *Block) SemaError!Air.Inst.Ref
|
|||||||
|
|
||||||
if (!block.ownerModule().error_tracing) return .none;
|
if (!block.ownerModule().error_tracing) return .none;
|
||||||
|
|
||||||
const stack_trace_ty = try sema.getBuiltinType(block.nodeOffset(0), .StackTrace);
|
const stack_trace_ty = try sema.getBuiltinType(block.nodeOffset(.zero), .StackTrace);
|
||||||
try stack_trace_ty.resolveFields(pt);
|
try stack_trace_ty.resolveFields(pt);
|
||||||
const field_name = try zcu.intern_pool.getOrPutString(gpa, pt.tid, "index", .no_embedded_nulls);
|
const field_name = try zcu.intern_pool.getOrPutString(gpa, pt.tid, "index", .no_embedded_nulls);
|
||||||
const field_index = sema.structFieldIndex(block, stack_trace_ty, field_name, LazySrcLoc.unneeded) catch |err| switch (err) {
|
const field_index = sema.structFieldIndex(block, stack_trace_ty, field_name, LazySrcLoc.unneeded) catch |err| switch (err) {
|
||||||
@ -7346,7 +7345,7 @@ fn checkCallArgumentCount(
|
|||||||
if (maybe_func_inst) |func_inst| {
|
if (maybe_func_inst) |func_inst| {
|
||||||
try sema.errNote(.{
|
try sema.errNote(.{
|
||||||
.base_node_inst = func_inst,
|
.base_node_inst = func_inst,
|
||||||
.offset = LazySrcLoc.Offset.nodeOffset(0),
|
.offset = LazySrcLoc.Offset.nodeOffset(.zero),
|
||||||
}, msg, "function declared here", .{});
|
}, msg, "function declared here", .{});
|
||||||
}
|
}
|
||||||
break :msg msg;
|
break :msg msg;
|
||||||
@ -7418,7 +7417,7 @@ const CallArgsInfo = union(enum) {
|
|||||||
/// The list of resolved (but uncoerced) arguments is known ahead of time, but
|
/// The list of resolved (but uncoerced) arguments is known ahead of time, but
|
||||||
/// originated from a usage of the @call builtin at the given node offset.
|
/// originated from a usage of the @call builtin at the given node offset.
|
||||||
call_builtin: struct {
|
call_builtin: struct {
|
||||||
call_node_offset: i32,
|
call_node_offset: std.zig.Ast.Node.Offset,
|
||||||
args: []const Air.Inst.Ref,
|
args: []const Air.Inst.Ref,
|
||||||
},
|
},
|
||||||
|
|
||||||
@ -7436,7 +7435,7 @@ const CallArgsInfo = union(enum) {
|
|||||||
/// analyzing arguments.
|
/// analyzing arguments.
|
||||||
call_inst: Zir.Inst.Index,
|
call_inst: Zir.Inst.Index,
|
||||||
/// The node offset of `call_inst`.
|
/// The node offset of `call_inst`.
|
||||||
call_node_offset: i32,
|
call_node_offset: std.zig.Ast.Node.Offset,
|
||||||
/// The number of arguments to this call, not including `bound_arg`.
|
/// The number of arguments to this call, not including `bound_arg`.
|
||||||
num_args: u32,
|
num_args: u32,
|
||||||
/// The ZIR corresponding to all function arguments (other than `bound_arg`, if it
|
/// The ZIR corresponding to all function arguments (other than `bound_arg`, if it
|
||||||
@ -7599,7 +7598,7 @@ fn analyzeCall(
|
|||||||
const maybe_func_inst = try sema.funcDeclSrcInst(callee);
|
const maybe_func_inst = try sema.funcDeclSrcInst(callee);
|
||||||
const func_ret_ty_src: LazySrcLoc = if (maybe_func_inst) |fn_decl_inst| .{
|
const func_ret_ty_src: LazySrcLoc = if (maybe_func_inst) |fn_decl_inst| .{
|
||||||
.base_node_inst = fn_decl_inst,
|
.base_node_inst = fn_decl_inst,
|
||||||
.offset = .{ .node_offset_fn_type_ret_ty = 0 },
|
.offset = .{ .node_offset_fn_type_ret_ty = .zero },
|
||||||
} else func_src;
|
} else func_src;
|
||||||
|
|
||||||
const func_ty_info = zcu.typeToFunc(func_ty).?;
|
const func_ty_info = zcu.typeToFunc(func_ty).?;
|
||||||
@ -7613,7 +7612,7 @@ fn analyzeCall(
|
|||||||
errdefer msg.destroy(gpa);
|
errdefer msg.destroy(gpa);
|
||||||
if (maybe_func_inst) |func_inst| try sema.errNote(.{
|
if (maybe_func_inst) |func_inst| try sema.errNote(.{
|
||||||
.base_node_inst = func_inst,
|
.base_node_inst = func_inst,
|
||||||
.offset = .nodeOffset(0),
|
.offset = .nodeOffset(.zero),
|
||||||
}, msg, "function declared here", .{});
|
}, msg, "function declared here", .{});
|
||||||
break :msg msg;
|
break :msg msg;
|
||||||
});
|
});
|
||||||
@ -9574,7 +9573,7 @@ const Section = union(enum) {
|
|||||||
fn funcCommon(
|
fn funcCommon(
|
||||||
sema: *Sema,
|
sema: *Sema,
|
||||||
block: *Block,
|
block: *Block,
|
||||||
src_node_offset: i32,
|
src_node_offset: std.zig.Ast.Node.Offset,
|
||||||
func_inst: Zir.Inst.Index,
|
func_inst: Zir.Inst.Index,
|
||||||
cc: std.builtin.CallingConvention,
|
cc: std.builtin.CallingConvention,
|
||||||
/// this might be Type.generic_poison
|
/// this might be Type.generic_poison
|
||||||
@ -9948,7 +9947,7 @@ fn finishFunc(
|
|||||||
if (!is_generic and sema.wantErrorReturnTracing(return_type)) {
|
if (!is_generic and sema.wantErrorReturnTracing(return_type)) {
|
||||||
// Make sure that StackTrace's fields are resolved so that the backend can
|
// Make sure that StackTrace's fields are resolved so that the backend can
|
||||||
// lower this fn type.
|
// lower this fn type.
|
||||||
const unresolved_stack_trace_ty = try sema.getBuiltinType(block.nodeOffset(0), .StackTrace);
|
const unresolved_stack_trace_ty = try sema.getBuiltinType(block.nodeOffset(.zero), .StackTrace);
|
||||||
try unresolved_stack_trace_ty.resolveFields(pt);
|
try unresolved_stack_trace_ty.resolveFields(pt);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -12599,7 +12598,7 @@ fn analyzeSwitchRuntimeBlock(
|
|||||||
union_originally: bool,
|
union_originally: bool,
|
||||||
maybe_union_ty: Type,
|
maybe_union_ty: Type,
|
||||||
err_set: bool,
|
err_set: bool,
|
||||||
switch_node_offset: i32,
|
switch_node_offset: std.zig.Ast.Node.Offset,
|
||||||
special_prong_src: LazySrcLoc,
|
special_prong_src: LazySrcLoc,
|
||||||
seen_enum_fields: []?LazySrcLoc,
|
seen_enum_fields: []?LazySrcLoc,
|
||||||
seen_errors: SwitchErrorSet,
|
seen_errors: SwitchErrorSet,
|
||||||
@ -13219,7 +13218,7 @@ fn resolveSwitchComptimeLoop(
|
|||||||
maybe_ptr_operand_ty: Type,
|
maybe_ptr_operand_ty: Type,
|
||||||
cond_ty: Type,
|
cond_ty: Type,
|
||||||
init_cond_val: Value,
|
init_cond_val: Value,
|
||||||
switch_node_offset: i32,
|
switch_node_offset: std.zig.Ast.Node.Offset,
|
||||||
special: SpecialProng,
|
special: SpecialProng,
|
||||||
case_vals: std.ArrayListUnmanaged(Air.Inst.Ref),
|
case_vals: std.ArrayListUnmanaged(Air.Inst.Ref),
|
||||||
scalar_cases_len: u32,
|
scalar_cases_len: u32,
|
||||||
@ -13255,7 +13254,7 @@ fn resolveSwitchComptimeLoop(
|
|||||||
const extra = sema.code.extraData(Zir.Inst.Break, break_inst.data.@"break".payload_index).data;
|
const extra = sema.code.extraData(Zir.Inst.Break, break_inst.data.@"break".payload_index).data;
|
||||||
if (extra.block_inst != spa.switch_block_inst) return error.ComptimeBreak;
|
if (extra.block_inst != spa.switch_block_inst) return error.ComptimeBreak;
|
||||||
// This is a `switch_continue` targeting this block. Change the operand and start over.
|
// This is a `switch_continue` targeting this block. Change the operand and start over.
|
||||||
const src = child_block.nodeOffset(extra.operand_src_node);
|
const src = child_block.nodeOffset(extra.operand_src_node.unwrap().?);
|
||||||
const new_operand_uncoerced = try sema.resolveInst(break_inst.data.@"break".operand);
|
const new_operand_uncoerced = try sema.resolveInst(break_inst.data.@"break".operand);
|
||||||
const new_operand = try sema.coerce(child_block, maybe_ptr_operand_ty, new_operand_uncoerced, src);
|
const new_operand = try sema.coerce(child_block, maybe_ptr_operand_ty, new_operand_uncoerced, src);
|
||||||
|
|
||||||
@ -13287,7 +13286,7 @@ fn resolveSwitchComptime(
|
|||||||
cond_operand: Air.Inst.Ref,
|
cond_operand: Air.Inst.Ref,
|
||||||
operand_val: Value,
|
operand_val: Value,
|
||||||
operand_ty: Type,
|
operand_ty: Type,
|
||||||
switch_node_offset: i32,
|
switch_node_offset: std.zig.Ast.Node.Offset,
|
||||||
special: SpecialProng,
|
special: SpecialProng,
|
||||||
case_vals: std.ArrayListUnmanaged(Air.Inst.Ref),
|
case_vals: std.ArrayListUnmanaged(Air.Inst.Ref),
|
||||||
scalar_cases_len: u32,
|
scalar_cases_len: u32,
|
||||||
@ -13837,7 +13836,7 @@ fn validateSwitchNoRange(
|
|||||||
block: *Block,
|
block: *Block,
|
||||||
ranges_len: u32,
|
ranges_len: u32,
|
||||||
operand_ty: Type,
|
operand_ty: Type,
|
||||||
src_node_offset: i32,
|
src_node_offset: std.zig.Ast.Node.Offset,
|
||||||
) CompileError!void {
|
) CompileError!void {
|
||||||
if (ranges_len == 0)
|
if (ranges_len == 0)
|
||||||
return;
|
return;
|
||||||
@ -14158,14 +14157,24 @@ fn zirShl(
|
|||||||
const pt = sema.pt;
|
const pt = sema.pt;
|
||||||
const zcu = pt.zcu;
|
const zcu = pt.zcu;
|
||||||
const inst_data = sema.code.instructions.items(.data)[@intFromEnum(inst)].pl_node;
|
const inst_data = sema.code.instructions.items(.data)[@intFromEnum(inst)].pl_node;
|
||||||
const src = block.nodeOffset(inst_data.src_node);
|
|
||||||
const lhs_src = block.src(.{ .node_offset_bin_lhs = inst_data.src_node });
|
|
||||||
const rhs_src = block.src(.{ .node_offset_bin_rhs = inst_data.src_node });
|
|
||||||
const extra = sema.code.extraData(Zir.Inst.Bin, inst_data.payload_index).data;
|
const extra = sema.code.extraData(Zir.Inst.Bin, inst_data.payload_index).data;
|
||||||
const lhs = try sema.resolveInst(extra.lhs);
|
const lhs = try sema.resolveInst(extra.lhs);
|
||||||
const rhs = try sema.resolveInst(extra.rhs);
|
const rhs = try sema.resolveInst(extra.rhs);
|
||||||
const lhs_ty = sema.typeOf(lhs);
|
const lhs_ty = sema.typeOf(lhs);
|
||||||
const rhs_ty = sema.typeOf(rhs);
|
const rhs_ty = sema.typeOf(rhs);
|
||||||
|
|
||||||
|
const src = block.nodeOffset(inst_data.src_node);
|
||||||
|
const lhs_src = switch (air_tag) {
|
||||||
|
.shl, .shl_sat => block.src(.{ .node_offset_bin_lhs = inst_data.src_node }),
|
||||||
|
.shl_exact => block.builtinCallArgSrc(inst_data.src_node, 0),
|
||||||
|
else => unreachable,
|
||||||
|
};
|
||||||
|
const rhs_src = switch (air_tag) {
|
||||||
|
.shl, .shl_sat => block.src(.{ .node_offset_bin_rhs = inst_data.src_node }),
|
||||||
|
.shl_exact => block.builtinCallArgSrc(inst_data.src_node, 1),
|
||||||
|
else => unreachable,
|
||||||
|
};
|
||||||
|
|
||||||
try sema.checkVectorizableBinaryOperands(block, src, lhs_ty, rhs_ty, lhs_src, rhs_src);
|
try sema.checkVectorizableBinaryOperands(block, src, lhs_ty, rhs_ty, lhs_src, rhs_src);
|
||||||
|
|
||||||
const scalar_ty = lhs_ty.scalarType(zcu);
|
const scalar_ty = lhs_ty.scalarType(zcu);
|
||||||
@ -14329,14 +14338,24 @@ fn zirShr(
|
|||||||
const pt = sema.pt;
|
const pt = sema.pt;
|
||||||
const zcu = pt.zcu;
|
const zcu = pt.zcu;
|
||||||
const inst_data = sema.code.instructions.items(.data)[@intFromEnum(inst)].pl_node;
|
const inst_data = sema.code.instructions.items(.data)[@intFromEnum(inst)].pl_node;
|
||||||
const src = block.nodeOffset(inst_data.src_node);
|
|
||||||
const lhs_src = block.src(.{ .node_offset_bin_lhs = inst_data.src_node });
|
|
||||||
const rhs_src = block.src(.{ .node_offset_bin_rhs = inst_data.src_node });
|
|
||||||
const extra = sema.code.extraData(Zir.Inst.Bin, inst_data.payload_index).data;
|
const extra = sema.code.extraData(Zir.Inst.Bin, inst_data.payload_index).data;
|
||||||
const lhs = try sema.resolveInst(extra.lhs);
|
const lhs = try sema.resolveInst(extra.lhs);
|
||||||
const rhs = try sema.resolveInst(extra.rhs);
|
const rhs = try sema.resolveInst(extra.rhs);
|
||||||
const lhs_ty = sema.typeOf(lhs);
|
const lhs_ty = sema.typeOf(lhs);
|
||||||
const rhs_ty = sema.typeOf(rhs);
|
const rhs_ty = sema.typeOf(rhs);
|
||||||
|
|
||||||
|
const src = block.nodeOffset(inst_data.src_node);
|
||||||
|
const lhs_src = switch (air_tag) {
|
||||||
|
.shr => block.src(.{ .node_offset_bin_lhs = inst_data.src_node }),
|
||||||
|
.shr_exact => block.builtinCallArgSrc(inst_data.src_node, 0),
|
||||||
|
else => unreachable,
|
||||||
|
};
|
||||||
|
const rhs_src = switch (air_tag) {
|
||||||
|
.shr => block.src(.{ .node_offset_bin_rhs = inst_data.src_node }),
|
||||||
|
.shr_exact => block.builtinCallArgSrc(inst_data.src_node, 1),
|
||||||
|
else => unreachable,
|
||||||
|
};
|
||||||
|
|
||||||
try sema.checkVectorizableBinaryOperands(block, src, lhs_ty, rhs_ty, lhs_src, rhs_src);
|
try sema.checkVectorizableBinaryOperands(block, src, lhs_ty, rhs_ty, lhs_src, rhs_src);
|
||||||
const scalar_ty = lhs_ty.scalarType(zcu);
|
const scalar_ty = lhs_ty.scalarType(zcu);
|
||||||
|
|
||||||
@ -14560,7 +14579,7 @@ fn zirBitNot(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air.
|
|||||||
fn analyzeTupleCat(
|
fn analyzeTupleCat(
|
||||||
sema: *Sema,
|
sema: *Sema,
|
||||||
block: *Block,
|
block: *Block,
|
||||||
src_node: i32,
|
src_node: std.zig.Ast.Node.Offset,
|
||||||
lhs: Air.Inst.Ref,
|
lhs: Air.Inst.Ref,
|
||||||
rhs: Air.Inst.Ref,
|
rhs: Air.Inst.Ref,
|
||||||
) CompileError!Air.Inst.Ref {
|
) CompileError!Air.Inst.Ref {
|
||||||
@ -15005,7 +15024,7 @@ fn getArrayCatInfo(sema: *Sema, block: *Block, src: LazySrcLoc, operand: Air.Ins
|
|||||||
fn analyzeTupleMul(
|
fn analyzeTupleMul(
|
||||||
sema: *Sema,
|
sema: *Sema,
|
||||||
block: *Block,
|
block: *Block,
|
||||||
src_node: i32,
|
src_node: std.zig.Ast.Node.Offset,
|
||||||
operand: Air.Inst.Ref,
|
operand: Air.Inst.Ref,
|
||||||
factor: usize,
|
factor: usize,
|
||||||
) CompileError!Air.Inst.Ref {
|
) CompileError!Air.Inst.Ref {
|
||||||
@ -15494,8 +15513,8 @@ fn zirDivExact(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
|
|||||||
const zcu = pt.zcu;
|
const zcu = pt.zcu;
|
||||||
const inst_data = sema.code.instructions.items(.data)[@intFromEnum(inst)].pl_node;
|
const inst_data = sema.code.instructions.items(.data)[@intFromEnum(inst)].pl_node;
|
||||||
const src = block.src(.{ .node_offset_bin_op = inst_data.src_node });
|
const src = block.src(.{ .node_offset_bin_op = inst_data.src_node });
|
||||||
const lhs_src = block.src(.{ .node_offset_bin_lhs = inst_data.src_node });
|
const lhs_src = block.builtinCallArgSrc(inst_data.src_node, 0);
|
||||||
const rhs_src = block.src(.{ .node_offset_bin_rhs = inst_data.src_node });
|
const rhs_src = block.builtinCallArgSrc(inst_data.src_node, 1);
|
||||||
const extra = sema.code.extraData(Zir.Inst.Bin, inst_data.payload_index).data;
|
const extra = sema.code.extraData(Zir.Inst.Bin, inst_data.payload_index).data;
|
||||||
const lhs = try sema.resolveInst(extra.lhs);
|
const lhs = try sema.resolveInst(extra.lhs);
|
||||||
const rhs = try sema.resolveInst(extra.rhs);
|
const rhs = try sema.resolveInst(extra.rhs);
|
||||||
@ -15660,8 +15679,8 @@ fn zirDivFloor(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
|
|||||||
const zcu = pt.zcu;
|
const zcu = pt.zcu;
|
||||||
const inst_data = sema.code.instructions.items(.data)[@intFromEnum(inst)].pl_node;
|
const inst_data = sema.code.instructions.items(.data)[@intFromEnum(inst)].pl_node;
|
||||||
const src = block.src(.{ .node_offset_bin_op = inst_data.src_node });
|
const src = block.src(.{ .node_offset_bin_op = inst_data.src_node });
|
||||||
const lhs_src = block.src(.{ .node_offset_bin_lhs = inst_data.src_node });
|
const lhs_src = block.builtinCallArgSrc(inst_data.src_node, 0);
|
||||||
const rhs_src = block.src(.{ .node_offset_bin_rhs = inst_data.src_node });
|
const rhs_src = block.builtinCallArgSrc(inst_data.src_node, 1);
|
||||||
const extra = sema.code.extraData(Zir.Inst.Bin, inst_data.payload_index).data;
|
const extra = sema.code.extraData(Zir.Inst.Bin, inst_data.payload_index).data;
|
||||||
const lhs = try sema.resolveInst(extra.lhs);
|
const lhs = try sema.resolveInst(extra.lhs);
|
||||||
const rhs = try sema.resolveInst(extra.rhs);
|
const rhs = try sema.resolveInst(extra.rhs);
|
||||||
@ -15771,8 +15790,8 @@ fn zirDivTrunc(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
|
|||||||
const zcu = pt.zcu;
|
const zcu = pt.zcu;
|
||||||
const inst_data = sema.code.instructions.items(.data)[@intFromEnum(inst)].pl_node;
|
const inst_data = sema.code.instructions.items(.data)[@intFromEnum(inst)].pl_node;
|
||||||
const src = block.src(.{ .node_offset_bin_op = inst_data.src_node });
|
const src = block.src(.{ .node_offset_bin_op = inst_data.src_node });
|
||||||
const lhs_src = block.src(.{ .node_offset_bin_lhs = inst_data.src_node });
|
const lhs_src = block.builtinCallArgSrc(inst_data.src_node, 0);
|
||||||
const rhs_src = block.src(.{ .node_offset_bin_rhs = inst_data.src_node });
|
const rhs_src = block.builtinCallArgSrc(inst_data.src_node, 1);
|
||||||
const extra = sema.code.extraData(Zir.Inst.Bin, inst_data.payload_index).data;
|
const extra = sema.code.extraData(Zir.Inst.Bin, inst_data.payload_index).data;
|
||||||
const lhs = try sema.resolveInst(extra.lhs);
|
const lhs = try sema.resolveInst(extra.lhs);
|
||||||
const rhs = try sema.resolveInst(extra.rhs);
|
const rhs = try sema.resolveInst(extra.rhs);
|
||||||
@ -16201,8 +16220,8 @@ fn zirMod(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air.Ins
|
|||||||
const zcu = pt.zcu;
|
const zcu = pt.zcu;
|
||||||
const inst_data = sema.code.instructions.items(.data)[@intFromEnum(inst)].pl_node;
|
const inst_data = sema.code.instructions.items(.data)[@intFromEnum(inst)].pl_node;
|
||||||
const src = block.src(.{ .node_offset_bin_op = inst_data.src_node });
|
const src = block.src(.{ .node_offset_bin_op = inst_data.src_node });
|
||||||
const lhs_src = block.src(.{ .node_offset_bin_lhs = inst_data.src_node });
|
const lhs_src = block.builtinCallArgSrc(inst_data.src_node, 0);
|
||||||
const rhs_src = block.src(.{ .node_offset_bin_rhs = inst_data.src_node });
|
const rhs_src = block.builtinCallArgSrc(inst_data.src_node, 1);
|
||||||
const extra = sema.code.extraData(Zir.Inst.Bin, inst_data.payload_index).data;
|
const extra = sema.code.extraData(Zir.Inst.Bin, inst_data.payload_index).data;
|
||||||
const lhs = try sema.resolveInst(extra.lhs);
|
const lhs = try sema.resolveInst(extra.lhs);
|
||||||
const rhs = try sema.resolveInst(extra.rhs);
|
const rhs = try sema.resolveInst(extra.rhs);
|
||||||
@ -16297,8 +16316,8 @@ fn zirRem(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air.Ins
|
|||||||
const zcu = pt.zcu;
|
const zcu = pt.zcu;
|
||||||
const inst_data = sema.code.instructions.items(.data)[@intFromEnum(inst)].pl_node;
|
const inst_data = sema.code.instructions.items(.data)[@intFromEnum(inst)].pl_node;
|
||||||
const src = block.src(.{ .node_offset_bin_op = inst_data.src_node });
|
const src = block.src(.{ .node_offset_bin_op = inst_data.src_node });
|
||||||
const lhs_src = block.src(.{ .node_offset_bin_lhs = inst_data.src_node });
|
const lhs_src = block.builtinCallArgSrc(inst_data.src_node, 0);
|
||||||
const rhs_src = block.src(.{ .node_offset_bin_rhs = inst_data.src_node });
|
const rhs_src = block.builtinCallArgSrc(inst_data.src_node, 1);
|
||||||
const extra = sema.code.extraData(Zir.Inst.Bin, inst_data.payload_index).data;
|
const extra = sema.code.extraData(Zir.Inst.Bin, inst_data.payload_index).data;
|
||||||
const lhs = try sema.resolveInst(extra.lhs);
|
const lhs = try sema.resolveInst(extra.lhs);
|
||||||
const rhs = try sema.resolveInst(extra.rhs);
|
const rhs = try sema.resolveInst(extra.rhs);
|
||||||
@ -17867,7 +17886,7 @@ fn zirClosureGet(sema: *Sema, block: *Block, extended: Zir.Inst.Extended.InstDat
|
|||||||
const ip = &zcu.intern_pool;
|
const ip = &zcu.intern_pool;
|
||||||
const captures = Type.fromInterned(zcu.namespacePtr(block.namespace).owner_type).getCaptures(zcu);
|
const captures = Type.fromInterned(zcu.namespacePtr(block.namespace).owner_type).getCaptures(zcu);
|
||||||
|
|
||||||
const src_node: i32 = @bitCast(extended.operand);
|
const src_node: std.zig.Ast.Node.Offset = @enumFromInt(@as(i32, @bitCast(extended.operand)));
|
||||||
const src = block.nodeOffset(src_node);
|
const src = block.nodeOffset(src_node);
|
||||||
|
|
||||||
const capture_ty = switch (captures.get(ip)[extended.small].unwrap()) {
|
const capture_ty = switch (captures.get(ip)[extended.small].unwrap()) {
|
||||||
@ -17891,8 +17910,8 @@ fn zirClosureGet(sema: *Sema, block: *Block, extended: Zir.Inst.Extended.InstDat
|
|||||||
});
|
});
|
||||||
break :name null;
|
break :name null;
|
||||||
};
|
};
|
||||||
const node: std.zig.Ast.Node.Index = @bitCast(src_node + @as(i32, @bitCast(src_base_node)));
|
const node = src_node.toAbsolute(src_base_node);
|
||||||
const token = tree.nodes.items(.main_token)[node];
|
const token = tree.nodeMainToken(node);
|
||||||
break :name tree.tokenSlice(token);
|
break :name tree.tokenSlice(token);
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -17919,8 +17938,8 @@ fn zirClosureGet(sema: *Sema, block: *Block, extended: Zir.Inst.Extended.InstDat
|
|||||||
});
|
});
|
||||||
break :name null;
|
break :name null;
|
||||||
};
|
};
|
||||||
const node: std.zig.Ast.Node.Index = @bitCast(src_node + @as(i32, @bitCast(src_base_node)));
|
const node = src_node.toAbsolute(src_base_node);
|
||||||
const token = tree.nodes.items(.main_token)[node];
|
const token = tree.nodeMainToken(node);
|
||||||
break :name tree.tokenSlice(token);
|
break :name tree.tokenSlice(token);
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -17930,7 +17949,7 @@ fn zirClosureGet(sema: *Sema, block: *Block, extended: Zir.Inst.Extended.InstDat
|
|||||||
try sema.errMsg(src, "variable not accessible from inner function", .{});
|
try sema.errMsg(src, "variable not accessible from inner function", .{});
|
||||||
errdefer msg.destroy(sema.gpa);
|
errdefer msg.destroy(sema.gpa);
|
||||||
|
|
||||||
try sema.errNote(block.nodeOffset(0), msg, "crossed function definition here", .{});
|
try sema.errNote(block.nodeOffset(.zero), msg, "crossed function definition here", .{});
|
||||||
|
|
||||||
// TODO add "declared here" note
|
// TODO add "declared here" note
|
||||||
break :msg msg;
|
break :msg msg;
|
||||||
@ -17962,7 +17981,8 @@ fn zirFrameAddress(
|
|||||||
block: *Block,
|
block: *Block,
|
||||||
extended: Zir.Inst.Extended.InstData,
|
extended: Zir.Inst.Extended.InstData,
|
||||||
) CompileError!Air.Inst.Ref {
|
) CompileError!Air.Inst.Ref {
|
||||||
const src = block.nodeOffset(@bitCast(extended.operand));
|
const src_node: std.zig.Ast.Node.Offset = @enumFromInt(@as(i32, @bitCast(extended.operand)));
|
||||||
|
const src = block.nodeOffset(src_node);
|
||||||
try sema.requireRuntimeBlock(block, src, null);
|
try sema.requireRuntimeBlock(block, src, null);
|
||||||
return try block.addNoOp(.frame_addr);
|
return try block.addNoOp(.frame_addr);
|
||||||
}
|
}
|
||||||
@ -18059,7 +18079,7 @@ fn zirBuiltinSrc(
|
|||||||
} });
|
} });
|
||||||
};
|
};
|
||||||
|
|
||||||
const src_loc_ty = try sema.getBuiltinType(block.nodeOffset(0), .SourceLocation);
|
const src_loc_ty = try sema.getBuiltinType(block.nodeOffset(.zero), .SourceLocation);
|
||||||
const fields = .{
|
const fields = .{
|
||||||
// module: [:0]const u8,
|
// module: [:0]const u8,
|
||||||
module_name_val,
|
module_name_val,
|
||||||
@ -19528,7 +19548,7 @@ fn zirCondbr(
|
|||||||
fn zirTry(sema: *Sema, parent_block: *Block, inst: Zir.Inst.Index) CompileError!Air.Inst.Ref {
|
fn zirTry(sema: *Sema, parent_block: *Block, inst: Zir.Inst.Index) CompileError!Air.Inst.Ref {
|
||||||
const inst_data = sema.code.instructions.items(.data)[@intFromEnum(inst)].pl_node;
|
const inst_data = sema.code.instructions.items(.data)[@intFromEnum(inst)].pl_node;
|
||||||
const src = parent_block.nodeOffset(inst_data.src_node);
|
const src = parent_block.nodeOffset(inst_data.src_node);
|
||||||
const operand_src = parent_block.src(.{ .node_offset_bin_lhs = inst_data.src_node });
|
const operand_src = parent_block.src(.{ .node_offset_try_operand = inst_data.src_node });
|
||||||
const extra = sema.code.extraData(Zir.Inst.Try, inst_data.payload_index);
|
const extra = sema.code.extraData(Zir.Inst.Try, inst_data.payload_index);
|
||||||
const body = sema.code.bodySlice(extra.end, extra.data.body_len);
|
const body = sema.code.bodySlice(extra.end, extra.data.body_len);
|
||||||
const err_union = try sema.resolveInst(extra.data.operand);
|
const err_union = try sema.resolveInst(extra.data.operand);
|
||||||
@ -19587,7 +19607,7 @@ fn zirTry(sema: *Sema, parent_block: *Block, inst: Zir.Inst.Index) CompileError!
|
|||||||
fn zirTryPtr(sema: *Sema, parent_block: *Block, inst: Zir.Inst.Index) CompileError!Air.Inst.Ref {
|
fn zirTryPtr(sema: *Sema, parent_block: *Block, inst: Zir.Inst.Index) CompileError!Air.Inst.Ref {
|
||||||
const inst_data = sema.code.instructions.items(.data)[@intFromEnum(inst)].pl_node;
|
const inst_data = sema.code.instructions.items(.data)[@intFromEnum(inst)].pl_node;
|
||||||
const src = parent_block.nodeOffset(inst_data.src_node);
|
const src = parent_block.nodeOffset(inst_data.src_node);
|
||||||
const operand_src = parent_block.src(.{ .node_offset_bin_lhs = inst_data.src_node });
|
const operand_src = parent_block.src(.{ .node_offset_try_operand = inst_data.src_node });
|
||||||
const extra = sema.code.extraData(Zir.Inst.Try, inst_data.payload_index);
|
const extra = sema.code.extraData(Zir.Inst.Try, inst_data.payload_index);
|
||||||
const body = sema.code.bodySlice(extra.end, extra.data.body_len);
|
const body = sema.code.bodySlice(extra.end, extra.data.body_len);
|
||||||
const operand = try sema.resolveInst(extra.data.operand);
|
const operand = try sema.resolveInst(extra.data.operand);
|
||||||
@ -19790,7 +19810,7 @@ fn zirRetImplicit(
|
|||||||
}
|
}
|
||||||
|
|
||||||
const operand = try sema.resolveInst(inst_data.operand);
|
const operand = try sema.resolveInst(inst_data.operand);
|
||||||
const ret_ty_src = block.src(.{ .node_offset_fn_type_ret_ty = 0 });
|
const ret_ty_src = block.src(.{ .node_offset_fn_type_ret_ty = .zero });
|
||||||
const base_tag = sema.fn_ret_ty.baseZigTypeTag(zcu);
|
const base_tag = sema.fn_ret_ty.baseZigTypeTag(zcu);
|
||||||
if (base_tag == .noreturn) {
|
if (base_tag == .noreturn) {
|
||||||
const msg = msg: {
|
const msg = msg: {
|
||||||
@ -21277,7 +21297,7 @@ fn getErrorReturnTrace(sema: *Sema, block: *Block) CompileError!Air.Inst.Ref {
|
|||||||
const pt = sema.pt;
|
const pt = sema.pt;
|
||||||
const zcu = pt.zcu;
|
const zcu = pt.zcu;
|
||||||
const ip = &zcu.intern_pool;
|
const ip = &zcu.intern_pool;
|
||||||
const stack_trace_ty = try sema.getBuiltinType(block.nodeOffset(0), .StackTrace);
|
const stack_trace_ty = try sema.getBuiltinType(block.nodeOffset(.zero), .StackTrace);
|
||||||
try stack_trace_ty.resolveFields(pt);
|
try stack_trace_ty.resolveFields(pt);
|
||||||
const ptr_stack_trace_ty = try pt.singleMutPtrType(stack_trace_ty);
|
const ptr_stack_trace_ty = try pt.singleMutPtrType(stack_trace_ty);
|
||||||
const opt_ptr_stack_trace_ty = try pt.optionalType(ptr_stack_trace_ty.toIntern());
|
const opt_ptr_stack_trace_ty = try pt.optionalType(ptr_stack_trace_ty.toIntern());
|
||||||
@ -21299,7 +21319,8 @@ fn zirFrame(
|
|||||||
block: *Block,
|
block: *Block,
|
||||||
extended: Zir.Inst.Extended.InstData,
|
extended: Zir.Inst.Extended.InstData,
|
||||||
) CompileError!Air.Inst.Ref {
|
) CompileError!Air.Inst.Ref {
|
||||||
const src = block.nodeOffset(@bitCast(extended.operand));
|
const src_node: std.zig.Ast.Node.Offset = @enumFromInt(@as(i32, @bitCast(extended.operand)));
|
||||||
|
const src = block.nodeOffset(src_node);
|
||||||
return sema.failWithUseOfAsync(block, src);
|
return sema.failWithUseOfAsync(block, src);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -21553,13 +21574,13 @@ fn zirReify(
|
|||||||
const tracked_inst = try block.trackZir(inst);
|
const tracked_inst = try block.trackZir(inst);
|
||||||
const src: LazySrcLoc = .{
|
const src: LazySrcLoc = .{
|
||||||
.base_node_inst = tracked_inst,
|
.base_node_inst = tracked_inst,
|
||||||
.offset = LazySrcLoc.Offset.nodeOffset(0),
|
.offset = LazySrcLoc.Offset.nodeOffset(.zero),
|
||||||
};
|
};
|
||||||
const operand_src: LazySrcLoc = .{
|
const operand_src: LazySrcLoc = .{
|
||||||
.base_node_inst = tracked_inst,
|
.base_node_inst = tracked_inst,
|
||||||
.offset = .{
|
.offset = .{
|
||||||
.node_offset_builtin_call_arg = .{
|
.node_offset_builtin_call_arg = .{
|
||||||
.builtin_call_node = 0, // `tracked_inst` is precisely the `reify` instruction, so offset is 0
|
.builtin_call_node = .zero, // `tracked_inst` is precisely the `reify` instruction, so offset is 0
|
||||||
.arg_index = 0,
|
.arg_index = 0,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -22867,7 +22888,8 @@ fn zirCVaEnd(sema: *Sema, block: *Block, extended: Zir.Inst.Extended.InstData) C
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn zirCVaStart(sema: *Sema, block: *Block, extended: Zir.Inst.Extended.InstData) CompileError!Air.Inst.Ref {
|
fn zirCVaStart(sema: *Sema, block: *Block, extended: Zir.Inst.Extended.InstData) CompileError!Air.Inst.Ref {
|
||||||
const src = block.nodeOffset(@bitCast(extended.operand));
|
const src_node: std.zig.Ast.Node.Offset = @enumFromInt(@as(i32, @bitCast(extended.operand)));
|
||||||
|
const src = block.nodeOffset(src_node);
|
||||||
|
|
||||||
const va_list_ty = try sema.getBuiltinType(src, .VaList);
|
const va_list_ty = try sema.getBuiltinType(src, .VaList);
|
||||||
try sema.requireRuntimeBlock(block, src, null);
|
try sema.requireRuntimeBlock(block, src, null);
|
||||||
@ -24272,12 +24294,12 @@ fn zirOffsetOf(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
|
|||||||
fn bitOffsetOf(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!u64 {
|
fn bitOffsetOf(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!u64 {
|
||||||
const inst_data = sema.code.instructions.items(.data)[@intFromEnum(inst)].pl_node;
|
const inst_data = sema.code.instructions.items(.data)[@intFromEnum(inst)].pl_node;
|
||||||
const src = block.src(.{ .node_offset_bin_op = inst_data.src_node });
|
const src = block.src(.{ .node_offset_bin_op = inst_data.src_node });
|
||||||
const lhs_src = block.src(.{ .node_offset_bin_lhs = inst_data.src_node });
|
const ty_src = block.builtinCallArgSrc(inst_data.src_node, 0);
|
||||||
const rhs_src = block.src(.{ .node_offset_bin_rhs = inst_data.src_node });
|
const field_name_src = block.builtinCallArgSrc(inst_data.src_node, 1);
|
||||||
const extra = sema.code.extraData(Zir.Inst.Bin, inst_data.payload_index).data;
|
const extra = sema.code.extraData(Zir.Inst.Bin, inst_data.payload_index).data;
|
||||||
|
|
||||||
const ty = try sema.resolveType(block, lhs_src, extra.lhs);
|
const ty = try sema.resolveType(block, ty_src, extra.lhs);
|
||||||
const field_name = try sema.resolveConstStringIntern(block, rhs_src, extra.rhs, .{ .simple = .field_name });
|
const field_name = try sema.resolveConstStringIntern(block, field_name_src, extra.rhs, .{ .simple = .field_name });
|
||||||
|
|
||||||
const pt = sema.pt;
|
const pt = sema.pt;
|
||||||
const zcu = pt.zcu;
|
const zcu = pt.zcu;
|
||||||
@ -24285,15 +24307,15 @@ fn bitOffsetOf(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!u6
|
|||||||
try ty.resolveLayout(pt);
|
try ty.resolveLayout(pt);
|
||||||
switch (ty.zigTypeTag(zcu)) {
|
switch (ty.zigTypeTag(zcu)) {
|
||||||
.@"struct" => {},
|
.@"struct" => {},
|
||||||
else => return sema.fail(block, lhs_src, "expected struct type, found '{}'", .{ty.fmt(pt)}),
|
else => return sema.fail(block, ty_src, "expected struct type, found '{}'", .{ty.fmt(pt)}),
|
||||||
}
|
}
|
||||||
|
|
||||||
const field_index = if (ty.isTuple(zcu)) blk: {
|
const field_index = if (ty.isTuple(zcu)) blk: {
|
||||||
if (field_name.eqlSlice("len", ip)) {
|
if (field_name.eqlSlice("len", ip)) {
|
||||||
return sema.fail(block, src, "no offset available for 'len' field of tuple", .{});
|
return sema.fail(block, src, "no offset available for 'len' field of tuple", .{});
|
||||||
}
|
}
|
||||||
break :blk try sema.tupleFieldIndex(block, ty, field_name, rhs_src);
|
break :blk try sema.tupleFieldIndex(block, ty, field_name, field_name_src);
|
||||||
} else try sema.structFieldIndex(block, ty, field_name, rhs_src);
|
} else try sema.structFieldIndex(block, ty, field_name, field_name_src);
|
||||||
|
|
||||||
if (ty.structFieldIsComptime(field_index, zcu)) {
|
if (ty.structFieldIsComptime(field_index, zcu)) {
|
||||||
return sema.fail(block, src, "no offset available for comptime field", .{});
|
return sema.fail(block, src, "no offset available for comptime field", .{});
|
||||||
@ -25077,7 +25099,7 @@ fn zirShuffle(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air
|
|||||||
fn analyzeShuffle(
|
fn analyzeShuffle(
|
||||||
sema: *Sema,
|
sema: *Sema,
|
||||||
block: *Block,
|
block: *Block,
|
||||||
src_node: i32,
|
src_node: std.zig.Ast.Node.Offset,
|
||||||
elem_ty: Type,
|
elem_ty: Type,
|
||||||
a_arg: Air.Inst.Ref,
|
a_arg: Air.Inst.Ref,
|
||||||
b_arg: Air.Inst.Ref,
|
b_arg: Air.Inst.Ref,
|
||||||
@ -27004,7 +27026,8 @@ fn zirBuiltinValue(sema: *Sema, block: *Block, extended: Zir.Inst.Extended.InstD
|
|||||||
const gpa = zcu.gpa;
|
const gpa = zcu.gpa;
|
||||||
const ip = &zcu.intern_pool;
|
const ip = &zcu.intern_pool;
|
||||||
|
|
||||||
const src = block.nodeOffset(@bitCast(extended.operand));
|
const src_node: std.zig.Ast.Node.Offset = @enumFromInt(@as(i32, @bitCast(extended.operand)));
|
||||||
|
const src = block.nodeOffset(src_node);
|
||||||
const value: Zir.Inst.BuiltinValue = @enumFromInt(extended.small);
|
const value: Zir.Inst.BuiltinValue = @enumFromInt(extended.small);
|
||||||
|
|
||||||
const ty = switch (value) {
|
const ty = switch (value) {
|
||||||
@ -29479,7 +29502,7 @@ const CoerceOpts = struct {
|
|||||||
return .{
|
return .{
|
||||||
.base_node_inst = func_inst,
|
.base_node_inst = func_inst,
|
||||||
.offset = .{ .fn_proto_param_type = .{
|
.offset = .{ .fn_proto_param_type = .{
|
||||||
.fn_proto_node_offset = 0,
|
.fn_proto_node_offset = .zero,
|
||||||
.param_index = info.param_i,
|
.param_index = info.param_i,
|
||||||
} },
|
} },
|
||||||
};
|
};
|
||||||
@ -30084,7 +30107,7 @@ fn coerceExtra(
|
|||||||
|
|
||||||
const ret_ty_src: LazySrcLoc = .{
|
const ret_ty_src: LazySrcLoc = .{
|
||||||
.base_node_inst = ip.getNav(zcu.funcInfo(sema.func_index).owner_nav).srcInst(ip),
|
.base_node_inst = ip.getNav(zcu.funcInfo(sema.func_index).owner_nav).srcInst(ip),
|
||||||
.offset = .{ .node_offset_fn_type_ret_ty = 0 },
|
.offset = .{ .node_offset_fn_type_ret_ty = .zero },
|
||||||
};
|
};
|
||||||
try sema.errNote(ret_ty_src, msg, "'noreturn' declared here", .{});
|
try sema.errNote(ret_ty_src, msg, "'noreturn' declared here", .{});
|
||||||
break :msg msg;
|
break :msg msg;
|
||||||
@ -30124,7 +30147,7 @@ fn coerceExtra(
|
|||||||
{
|
{
|
||||||
const ret_ty_src: LazySrcLoc = .{
|
const ret_ty_src: LazySrcLoc = .{
|
||||||
.base_node_inst = ip.getNav(zcu.funcInfo(sema.func_index).owner_nav).srcInst(ip),
|
.base_node_inst = ip.getNav(zcu.funcInfo(sema.func_index).owner_nav).srcInst(ip),
|
||||||
.offset = .{ .node_offset_fn_type_ret_ty = 0 },
|
.offset = .{ .node_offset_fn_type_ret_ty = .zero },
|
||||||
};
|
};
|
||||||
if (inst_ty.isError(zcu) and !dest_ty.isError(zcu)) {
|
if (inst_ty.isError(zcu) and !dest_ty.isError(zcu)) {
|
||||||
try sema.errNote(ret_ty_src, msg, "function cannot return an error", .{});
|
try sema.errNote(ret_ty_src, msg, "function cannot return an error", .{});
|
||||||
@ -32325,7 +32348,7 @@ pub fn ensureNavResolved(sema: *Sema, src: LazySrcLoc, nav_index: InternPool.Nav
|
|||||||
if (zcu.analysis_in_progress.contains(anal_unit)) {
|
if (zcu.analysis_in_progress.contains(anal_unit)) {
|
||||||
return sema.failWithOwnedErrorMsg(null, try sema.errMsg(.{
|
return sema.failWithOwnedErrorMsg(null, try sema.errMsg(.{
|
||||||
.base_node_inst = nav.analysis.?.zir_index,
|
.base_node_inst = nav.analysis.?.zir_index,
|
||||||
.offset = LazySrcLoc.Offset.nodeOffset(0),
|
.offset = LazySrcLoc.Offset.nodeOffset(.zero),
|
||||||
}, "dependency loop detected", .{}));
|
}, "dependency loop detected", .{}));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -33942,7 +33965,7 @@ const PeerTypeCandidateSrc = union(enum) {
|
|||||||
/// index i in this slice
|
/// index i in this slice
|
||||||
override: []const ?LazySrcLoc,
|
override: []const ?LazySrcLoc,
|
||||||
/// resolvePeerTypes originates from a @TypeOf(...) call
|
/// resolvePeerTypes originates from a @TypeOf(...) call
|
||||||
typeof_builtin_call_node_offset: i32,
|
typeof_builtin_call_node_offset: std.zig.Ast.Node.Offset,
|
||||||
|
|
||||||
pub fn resolve(
|
pub fn resolve(
|
||||||
self: PeerTypeCandidateSrc,
|
self: PeerTypeCandidateSrc,
|
||||||
@ -35545,7 +35568,7 @@ fn backingIntType(
|
|||||||
|
|
||||||
const backing_int_src: LazySrcLoc = .{
|
const backing_int_src: LazySrcLoc = .{
|
||||||
.base_node_inst = struct_type.zir_index,
|
.base_node_inst = struct_type.zir_index,
|
||||||
.offset = .{ .node_offset_container_tag = 0 },
|
.offset = .{ .node_offset_container_tag = .zero },
|
||||||
};
|
};
|
||||||
block.comptime_reason = .{ .reason = .{
|
block.comptime_reason = .{ .reason = .{
|
||||||
.src = backing_int_src,
|
.src = backing_int_src,
|
||||||
@ -35566,7 +35589,7 @@ fn backingIntType(
|
|||||||
struct_type.setBackingIntType(ip, backing_int_ty.toIntern());
|
struct_type.setBackingIntType(ip, backing_int_ty.toIntern());
|
||||||
} else {
|
} else {
|
||||||
if (fields_bit_sum > std.math.maxInt(u16)) {
|
if (fields_bit_sum > std.math.maxInt(u16)) {
|
||||||
return sema.fail(&block, block.nodeOffset(0), "size of packed struct '{d}' exceeds maximum bit width of 65535", .{fields_bit_sum});
|
return sema.fail(&block, block.nodeOffset(.zero), "size of packed struct '{d}' exceeds maximum bit width of 65535", .{fields_bit_sum});
|
||||||
}
|
}
|
||||||
const backing_int_ty = try pt.intType(.unsigned, @intCast(fields_bit_sum));
|
const backing_int_ty = try pt.intType(.unsigned, @intCast(fields_bit_sum));
|
||||||
struct_type.setBackingIntType(ip, backing_int_ty.toIntern());
|
struct_type.setBackingIntType(ip, backing_int_ty.toIntern());
|
||||||
@ -36167,7 +36190,7 @@ fn structFields(
|
|||||||
.comptime_reason = .{ .reason = .{
|
.comptime_reason = .{ .reason = .{
|
||||||
.src = .{
|
.src = .{
|
||||||
.base_node_inst = struct_type.zir_index,
|
.base_node_inst = struct_type.zir_index,
|
||||||
.offset = .nodeOffset(0),
|
.offset = .nodeOffset(.zero),
|
||||||
},
|
},
|
||||||
.r = .{ .simple = .struct_fields },
|
.r = .{ .simple = .struct_fields },
|
||||||
} },
|
} },
|
||||||
@ -36508,7 +36531,7 @@ fn unionFields(
|
|||||||
|
|
||||||
const src: LazySrcLoc = .{
|
const src: LazySrcLoc = .{
|
||||||
.base_node_inst = union_type.zir_index,
|
.base_node_inst = union_type.zir_index,
|
||||||
.offset = .nodeOffset(0),
|
.offset = .nodeOffset(.zero),
|
||||||
};
|
};
|
||||||
|
|
||||||
var block_scope: Block = .{
|
var block_scope: Block = .{
|
||||||
@ -36537,7 +36560,7 @@ fn unionFields(
|
|||||||
if (tag_type_ref != .none) {
|
if (tag_type_ref != .none) {
|
||||||
const tag_ty_src: LazySrcLoc = .{
|
const tag_ty_src: LazySrcLoc = .{
|
||||||
.base_node_inst = union_type.zir_index,
|
.base_node_inst = union_type.zir_index,
|
||||||
.offset = .{ .node_offset_container_tag = 0 },
|
.offset = .{ .node_offset_container_tag = .zero },
|
||||||
};
|
};
|
||||||
const provided_ty = try sema.resolveType(&block_scope, tag_ty_src, tag_type_ref);
|
const provided_ty = try sema.resolveType(&block_scope, tag_ty_src, tag_type_ref);
|
||||||
if (small.auto_enum_tag) {
|
if (small.auto_enum_tag) {
|
||||||
@ -38512,7 +38535,7 @@ pub fn resolveDeclaredEnum(
|
|||||||
const zcu = pt.zcu;
|
const zcu = pt.zcu;
|
||||||
const gpa = zcu.gpa;
|
const gpa = zcu.gpa;
|
||||||
|
|
||||||
const src: LazySrcLoc = .{ .base_node_inst = tracked_inst, .offset = LazySrcLoc.Offset.nodeOffset(0) };
|
const src: LazySrcLoc = .{ .base_node_inst = tracked_inst, .offset = LazySrcLoc.Offset.nodeOffset(.zero) };
|
||||||
|
|
||||||
var arena: std.heap.ArenaAllocator = .init(gpa);
|
var arena: std.heap.ArenaAllocator = .init(gpa);
|
||||||
defer arena.deinit();
|
defer arena.deinit();
|
||||||
@ -38599,7 +38622,7 @@ fn resolveDeclaredEnumInner(
|
|||||||
|
|
||||||
const bit_bags_count = std.math.divCeil(usize, fields_len, 32) catch unreachable;
|
const bit_bags_count = std.math.divCeil(usize, fields_len, 32) catch unreachable;
|
||||||
|
|
||||||
const tag_ty_src: LazySrcLoc = .{ .base_node_inst = tracked_inst, .offset = .{ .node_offset_container_tag = 0 } };
|
const tag_ty_src: LazySrcLoc = .{ .base_node_inst = tracked_inst, .offset = .{ .node_offset_container_tag = .zero } };
|
||||||
|
|
||||||
const int_tag_ty = ty: {
|
const int_tag_ty = ty: {
|
||||||
if (body.len != 0) {
|
if (body.len != 0) {
|
||||||
@ -38752,9 +38775,9 @@ pub fn resolveNavPtrModifiers(
|
|||||||
const gpa = zcu.gpa;
|
const gpa = zcu.gpa;
|
||||||
const ip = &zcu.intern_pool;
|
const ip = &zcu.intern_pool;
|
||||||
|
|
||||||
const align_src = block.src(.{ .node_offset_var_decl_align = 0 });
|
const align_src = block.src(.{ .node_offset_var_decl_align = .zero });
|
||||||
const section_src = block.src(.{ .node_offset_var_decl_section = 0 });
|
const section_src = block.src(.{ .node_offset_var_decl_section = .zero });
|
||||||
const addrspace_src = block.src(.{ .node_offset_var_decl_addrspace = 0 });
|
const addrspace_src = block.src(.{ .node_offset_var_decl_addrspace = .zero });
|
||||||
|
|
||||||
const alignment: InternPool.Alignment = a: {
|
const alignment: InternPool.Alignment = a: {
|
||||||
const align_body = zir_decl.align_body orelse break :a .none;
|
const align_body = zir_decl.align_body orelse break :a .none;
|
||||||
@ -38827,7 +38850,7 @@ pub fn analyzeMemoizedState(sema: *Sema, block: *Block, simple_src: LazySrcLoc,
|
|||||||
|
|
||||||
const src: LazySrcLoc = .{
|
const src: LazySrcLoc = .{
|
||||||
.base_node_inst = ip.getNav(nav).srcInst(ip),
|
.base_node_inst = ip.getNav(nav).srcInst(ip),
|
||||||
.offset = .nodeOffset(0),
|
.offset = .nodeOffset(.zero),
|
||||||
};
|
};
|
||||||
|
|
||||||
const result = try sema.analyzeNavVal(block, src, nav);
|
const result = try sema.analyzeNavVal(block, src, nav);
|
||||||
|
|||||||
@ -3505,7 +3505,7 @@ pub fn srcLocOrNull(ty: Type, zcu: *Zcu) ?Zcu.LazySrcLoc {
|
|||||||
},
|
},
|
||||||
else => return null,
|
else => return null,
|
||||||
},
|
},
|
||||||
.offset = Zcu.LazySrcLoc.Offset.nodeOffset(0),
|
.offset = Zcu.LazySrcLoc.Offset.nodeOffset(.zero),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
573
src/Zcu.zig
573
src/Zcu.zig
File diff suppressed because it is too large
Load Diff
@ -841,7 +841,7 @@ fn analyzeComptimeUnit(pt: Zcu.PerThread, cu_id: InternPool.ComptimeUnit.Id) Zcu
|
|||||||
.comptime_reason = .{ .reason = .{
|
.comptime_reason = .{ .reason = .{
|
||||||
.src = .{
|
.src = .{
|
||||||
.base_node_inst = comptime_unit.zir_index,
|
.base_node_inst = comptime_unit.zir_index,
|
||||||
.offset = .{ .token_offset = 0 },
|
.offset = .{ .token_offset = .zero },
|
||||||
},
|
},
|
||||||
.r = .{ .simple = .comptime_keyword },
|
.r = .{ .simple = .comptime_keyword },
|
||||||
} },
|
} },
|
||||||
@ -1042,11 +1042,11 @@ fn analyzeNavVal(pt: Zcu.PerThread, nav_id: InternPool.Nav.Index) Zcu.CompileErr
|
|||||||
const zir_decl = zir.getDeclaration(inst_resolved.inst);
|
const zir_decl = zir.getDeclaration(inst_resolved.inst);
|
||||||
assert(old_nav.is_usingnamespace == (zir_decl.kind == .@"usingnamespace"));
|
assert(old_nav.is_usingnamespace == (zir_decl.kind == .@"usingnamespace"));
|
||||||
|
|
||||||
const ty_src = block.src(.{ .node_offset_var_decl_ty = 0 });
|
const ty_src = block.src(.{ .node_offset_var_decl_ty = .zero });
|
||||||
const init_src = block.src(.{ .node_offset_var_decl_init = 0 });
|
const init_src = block.src(.{ .node_offset_var_decl_init = .zero });
|
||||||
const align_src = block.src(.{ .node_offset_var_decl_align = 0 });
|
const align_src = block.src(.{ .node_offset_var_decl_align = .zero });
|
||||||
const section_src = block.src(.{ .node_offset_var_decl_section = 0 });
|
const section_src = block.src(.{ .node_offset_var_decl_section = .zero });
|
||||||
const addrspace_src = block.src(.{ .node_offset_var_decl_addrspace = 0 });
|
const addrspace_src = block.src(.{ .node_offset_var_decl_addrspace = .zero });
|
||||||
|
|
||||||
block.comptime_reason = .{ .reason = .{
|
block.comptime_reason = .{ .reason = .{
|
||||||
.src = init_src,
|
.src = init_src,
|
||||||
@ -1135,7 +1135,7 @@ fn analyzeNavVal(pt: Zcu.PerThread, nav_id: InternPool.Nav.Index) Zcu.CompileErr
|
|||||||
break :l zir.nullTerminatedString(zir_decl.lib_name);
|
break :l zir.nullTerminatedString(zir_decl.lib_name);
|
||||||
} else null;
|
} else null;
|
||||||
if (lib_name) |l| {
|
if (lib_name) |l| {
|
||||||
const lib_name_src = block.src(.{ .node_offset_lib_name = 0 });
|
const lib_name_src = block.src(.{ .node_offset_lib_name = .zero });
|
||||||
try sema.handleExternLibName(&block, lib_name_src, l);
|
try sema.handleExternLibName(&block, lib_name_src, l);
|
||||||
}
|
}
|
||||||
break :val .fromInterned(try pt.getExtern(.{
|
break :val .fromInterned(try pt.getExtern(.{
|
||||||
@ -1233,7 +1233,7 @@ fn analyzeNavVal(pt: Zcu.PerThread, nav_id: InternPool.Nav.Index) Zcu.CompileErr
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (zir_decl.linkage == .@"export") {
|
if (zir_decl.linkage == .@"export") {
|
||||||
const export_src = block.src(.{ .token_offset = @intFromBool(zir_decl.is_pub) });
|
const export_src = block.src(.{ .token_offset = @enumFromInt(@intFromBool(zir_decl.is_pub)) });
|
||||||
const name_slice = zir.nullTerminatedString(zir_decl.name);
|
const name_slice = zir.nullTerminatedString(zir_decl.name);
|
||||||
const name_ip = try ip.getOrPutString(gpa, pt.tid, name_slice, .no_embedded_nulls);
|
const name_ip = try ip.getOrPutString(gpa, pt.tid, name_slice, .no_embedded_nulls);
|
||||||
try sema.analyzeExport(&block, export_src, .{ .name = name_ip }, nav_id);
|
try sema.analyzeExport(&block, export_src, .{ .name = name_ip }, nav_id);
|
||||||
@ -1414,7 +1414,7 @@ fn analyzeNavType(pt: Zcu.PerThread, nav_id: InternPool.Nav.Index) Zcu.CompileEr
|
|||||||
const zir_decl = zir.getDeclaration(inst_resolved.inst);
|
const zir_decl = zir.getDeclaration(inst_resolved.inst);
|
||||||
assert(old_nav.is_usingnamespace == (zir_decl.kind == .@"usingnamespace"));
|
assert(old_nav.is_usingnamespace == (zir_decl.kind == .@"usingnamespace"));
|
||||||
|
|
||||||
const ty_src = block.src(.{ .node_offset_var_decl_ty = 0 });
|
const ty_src = block.src(.{ .node_offset_var_decl_ty = .zero });
|
||||||
|
|
||||||
block.comptime_reason = .{ .reason = .{
|
block.comptime_reason = .{ .reason = .{
|
||||||
.src = ty_src,
|
.src = ty_src,
|
||||||
@ -2743,7 +2743,7 @@ fn analyzeFnBodyInner(pt: Zcu.PerThread, func_index: InternPool.Index) Zcu.SemaE
|
|||||||
if (sema.fn_ret_ty_ies) |ies| {
|
if (sema.fn_ret_ty_ies) |ies| {
|
||||||
sema.resolveInferredErrorSetPtr(&inner_block, .{
|
sema.resolveInferredErrorSetPtr(&inner_block, .{
|
||||||
.base_node_inst = inner_block.src_base_inst,
|
.base_node_inst = inner_block.src_base_inst,
|
||||||
.offset = Zcu.LazySrcLoc.Offset.nodeOffset(0),
|
.offset = Zcu.LazySrcLoc.Offset.nodeOffset(.zero),
|
||||||
}, ies) catch |err| switch (err) {
|
}, ies) catch |err| switch (err) {
|
||||||
error.ComptimeReturn => unreachable,
|
error.ComptimeReturn => unreachable,
|
||||||
error.ComptimeBreak => unreachable,
|
error.ComptimeBreak => unreachable,
|
||||||
@ -2762,7 +2762,7 @@ fn analyzeFnBodyInner(pt: Zcu.PerThread, func_index: InternPool.Index) Zcu.SemaE
|
|||||||
// result in circular dependency errors.
|
// result in circular dependency errors.
|
||||||
// TODO: this can go away once we fix backends having to resolve `StackTrace`.
|
// TODO: this can go away once we fix backends having to resolve `StackTrace`.
|
||||||
// The codegen timing guarantees that the parameter types will be populated.
|
// The codegen timing guarantees that the parameter types will be populated.
|
||||||
sema.resolveFnTypes(fn_ty, inner_block.nodeOffset(0)) catch |err| switch (err) {
|
sema.resolveFnTypes(fn_ty, inner_block.nodeOffset(.zero)) catch |err| switch (err) {
|
||||||
error.ComptimeReturn => unreachable,
|
error.ComptimeReturn => unreachable,
|
||||||
error.ComptimeBreak => unreachable,
|
error.ComptimeBreak => unreachable,
|
||||||
else => |e| return e,
|
else => |e| return e,
|
||||||
|
|||||||
18
src/main.zig
18
src/main.zig
@ -5224,7 +5224,7 @@ fn cmdBuild(gpa: Allocator, arena: Allocator, args: []const []const u8) !void {
|
|||||||
.arena = std.heap.ArenaAllocator.init(gpa),
|
.arena = std.heap.ArenaAllocator.init(gpa),
|
||||||
.location = .{ .relative_path = build_mod.root },
|
.location = .{ .relative_path = build_mod.root },
|
||||||
.location_tok = 0,
|
.location_tok = 0,
|
||||||
.hash_tok = 0,
|
.hash_tok = .none,
|
||||||
.name_tok = 0,
|
.name_tok = 0,
|
||||||
.lazy_status = .eager,
|
.lazy_status = .eager,
|
||||||
.parent_package_root = build_mod.root,
|
.parent_package_root = build_mod.root,
|
||||||
@ -6285,8 +6285,10 @@ fn cmdAstCheck(
|
|||||||
file.tree.?.tokens.len * (@sizeOf(std.zig.Token.Tag) + @sizeOf(Ast.ByteOffset));
|
file.tree.?.tokens.len * (@sizeOf(std.zig.Token.Tag) + @sizeOf(Ast.ByteOffset));
|
||||||
const tree_bytes = @sizeOf(Ast) + file.tree.?.nodes.len *
|
const tree_bytes = @sizeOf(Ast) + file.tree.?.nodes.len *
|
||||||
(@sizeOf(Ast.Node.Tag) +
|
(@sizeOf(Ast.Node.Tag) +
|
||||||
@sizeOf(Ast.Node.Data) +
|
@sizeOf(Ast.TokenIndex) +
|
||||||
@sizeOf(Ast.TokenIndex));
|
// Here we don't use @sizeOf(Ast.Node.Data) because it would include
|
||||||
|
// the debug safety tag but we want to measure release size.
|
||||||
|
8);
|
||||||
const instruction_bytes = file.zir.?.instructions.len *
|
const instruction_bytes = file.zir.?.instructions.len *
|
||||||
// Here we don't use @sizeOf(Zir.Inst.Data) because it would include
|
// Here we don't use @sizeOf(Zir.Inst.Data) because it would include
|
||||||
// the debug safety tag but we want to measure release size.
|
// the debug safety tag but we want to measure release size.
|
||||||
@ -7126,7 +7128,7 @@ fn cmdFetch(
|
|||||||
.arena = std.heap.ArenaAllocator.init(gpa),
|
.arena = std.heap.ArenaAllocator.init(gpa),
|
||||||
.location = .{ .path_or_url = path_or_url },
|
.location = .{ .path_or_url = path_or_url },
|
||||||
.location_tok = 0,
|
.location_tok = 0,
|
||||||
.hash_tok = 0,
|
.hash_tok = .none,
|
||||||
.name_tok = 0,
|
.name_tok = 0,
|
||||||
.lazy_status = .eager,
|
.lazy_status = .eager,
|
||||||
.parent_package_root = undefined,
|
.parent_package_root = undefined,
|
||||||
@ -7282,8 +7284,8 @@ fn cmdFetch(
|
|||||||
|
|
||||||
warn("overwriting existing dependency named '{s}'", .{name});
|
warn("overwriting existing dependency named '{s}'", .{name});
|
||||||
try fixups.replace_nodes_with_string.put(gpa, dep.location_node, location_replace);
|
try fixups.replace_nodes_with_string.put(gpa, dep.location_node, location_replace);
|
||||||
if (dep.hash_node != 0) {
|
if (dep.hash_node.unwrap()) |hash_node| {
|
||||||
try fixups.replace_nodes_with_string.put(gpa, dep.hash_node, hash_replace);
|
try fixups.replace_nodes_with_string.put(gpa, hash_node, hash_replace);
|
||||||
} else {
|
} else {
|
||||||
// https://github.com/ziglang/zig/issues/21690
|
// https://github.com/ziglang/zig/issues/21690
|
||||||
}
|
}
|
||||||
@ -7292,9 +7294,9 @@ fn cmdFetch(
|
|||||||
const deps = manifest.dependencies.values();
|
const deps = manifest.dependencies.values();
|
||||||
const last_dep_node = deps[deps.len - 1].node;
|
const last_dep_node = deps[deps.len - 1].node;
|
||||||
try fixups.append_string_after_node.put(gpa, last_dep_node, new_node_text);
|
try fixups.append_string_after_node.put(gpa, last_dep_node, new_node_text);
|
||||||
} else if (manifest.dependencies_node != 0) {
|
} else if (manifest.dependencies_node.unwrap()) |dependencies_node| {
|
||||||
// Add fixup for replacing the entire dependencies struct.
|
// Add fixup for replacing the entire dependencies struct.
|
||||||
try fixups.replace_nodes_with_string.put(gpa, manifest.dependencies_node, dependencies_init);
|
try fixups.replace_nodes_with_string.put(gpa, dependencies_node, dependencies_init);
|
||||||
} else {
|
} else {
|
||||||
// Add fixup for adding dependencies struct.
|
// Add fixup for adding dependencies struct.
|
||||||
try fixups.append_string_after_node.put(gpa, manifest.version_node, dependencies_text);
|
try fixups.append_string_after_node.put(gpa, manifest.version_node, dependencies_text);
|
||||||
|
|||||||
@ -24,7 +24,7 @@ pub fn renderAsTextToFile(
|
|||||||
.file = scope_file,
|
.file = scope_file,
|
||||||
.code = scope_file.zir.?,
|
.code = scope_file.zir.?,
|
||||||
.indent = 0,
|
.indent = 0,
|
||||||
.parent_decl_node = 0,
|
.parent_decl_node = .root,
|
||||||
.recurse_decls = true,
|
.recurse_decls = true,
|
||||||
.recurse_blocks = true,
|
.recurse_blocks = true,
|
||||||
};
|
};
|
||||||
@ -185,10 +185,6 @@ const Writer = struct {
|
|||||||
}
|
}
|
||||||
} = .{},
|
} = .{},
|
||||||
|
|
||||||
fn relativeToNodeIndex(self: *Writer, offset: i32) Ast.Node.Index {
|
|
||||||
return @bitCast(offset + @as(i32, @bitCast(self.parent_decl_node)));
|
|
||||||
}
|
|
||||||
|
|
||||||
fn writeInstToStream(
|
fn writeInstToStream(
|
||||||
self: *Writer,
|
self: *Writer,
|
||||||
stream: anytype,
|
stream: anytype,
|
||||||
@ -595,7 +591,7 @@ const Writer = struct {
|
|||||||
const prev_parent_decl_node = self.parent_decl_node;
|
const prev_parent_decl_node = self.parent_decl_node;
|
||||||
self.parent_decl_node = inst_data.node;
|
self.parent_decl_node = inst_data.node;
|
||||||
defer self.parent_decl_node = prev_parent_decl_node;
|
defer self.parent_decl_node = prev_parent_decl_node;
|
||||||
try self.writeSrcNode(stream, 0);
|
try self.writeSrcNode(stream, .zero);
|
||||||
},
|
},
|
||||||
|
|
||||||
.builtin_extern,
|
.builtin_extern,
|
||||||
@ -631,7 +627,8 @@ const Writer = struct {
|
|||||||
|
|
||||||
fn writeExtNode(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void {
|
fn writeExtNode(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void {
|
||||||
try stream.writeAll(")) ");
|
try stream.writeAll(")) ");
|
||||||
try self.writeSrcNode(stream, @bitCast(extended.operand));
|
const src_node: Ast.Node.Offset = @enumFromInt(@as(i32, @bitCast(extended.operand)));
|
||||||
|
try self.writeSrcNode(stream, src_node);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn writeArrayInitElemType(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void {
|
fn writeArrayInitElemType(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void {
|
||||||
@ -1579,7 +1576,7 @@ const Writer = struct {
|
|||||||
try stream.writeByteNTimes(' ', self.indent);
|
try stream.writeByteNTimes(' ', self.indent);
|
||||||
try stream.writeAll("}) ");
|
try stream.writeAll("}) ");
|
||||||
}
|
}
|
||||||
try self.writeSrcNode(stream, 0);
|
try self.writeSrcNode(stream, .zero);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn writeUnionDecl(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void {
|
fn writeUnionDecl(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void {
|
||||||
@ -1659,7 +1656,7 @@ const Writer = struct {
|
|||||||
|
|
||||||
if (fields_len == 0) {
|
if (fields_len == 0) {
|
||||||
try stream.writeAll("}) ");
|
try stream.writeAll("}) ");
|
||||||
try self.writeSrcNode(stream, 0);
|
try self.writeSrcNode(stream, .zero);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
try stream.writeAll(", ");
|
try stream.writeAll(", ");
|
||||||
@ -1730,7 +1727,7 @@ const Writer = struct {
|
|||||||
self.indent -= 2;
|
self.indent -= 2;
|
||||||
try stream.writeByteNTimes(' ', self.indent);
|
try stream.writeByteNTimes(' ', self.indent);
|
||||||
try stream.writeAll("}) ");
|
try stream.writeAll("}) ");
|
||||||
try self.writeSrcNode(stream, 0);
|
try self.writeSrcNode(stream, .zero);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn writeEnumDecl(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void {
|
fn writeEnumDecl(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void {
|
||||||
@ -1849,7 +1846,7 @@ const Writer = struct {
|
|||||||
try stream.writeByteNTimes(' ', self.indent);
|
try stream.writeByteNTimes(' ', self.indent);
|
||||||
try stream.writeAll("}) ");
|
try stream.writeAll("}) ");
|
||||||
}
|
}
|
||||||
try self.writeSrcNode(stream, 0);
|
try self.writeSrcNode(stream, .zero);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn writeOpaqueDecl(
|
fn writeOpaqueDecl(
|
||||||
@ -1893,7 +1890,7 @@ const Writer = struct {
|
|||||||
try stream.writeByteNTimes(' ', self.indent);
|
try stream.writeByteNTimes(' ', self.indent);
|
||||||
try stream.writeAll("}) ");
|
try stream.writeAll("}) ");
|
||||||
}
|
}
|
||||||
try self.writeSrcNode(stream, 0);
|
try self.writeSrcNode(stream, .zero);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn writeTupleDecl(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void {
|
fn writeTupleDecl(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void {
|
||||||
@ -2539,7 +2536,7 @@ const Writer = struct {
|
|||||||
ret_ty_body: []const Zir.Inst.Index,
|
ret_ty_body: []const Zir.Inst.Index,
|
||||||
ret_ty_is_generic: bool,
|
ret_ty_is_generic: bool,
|
||||||
body: []const Zir.Inst.Index,
|
body: []const Zir.Inst.Index,
|
||||||
src_node: i32,
|
src_node: Ast.Node.Offset,
|
||||||
src_locs: Zir.Inst.Func.SrcLocs,
|
src_locs: Zir.Inst.Func.SrcLocs,
|
||||||
noalias_bits: u32,
|
noalias_bits: u32,
|
||||||
) !void {
|
) !void {
|
||||||
@ -2647,18 +2644,20 @@ const Writer = struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
try stream.writeAll(") ");
|
try stream.writeAll(") ");
|
||||||
try self.writeSrcNode(stream, 0);
|
try self.writeSrcNode(stream, .zero);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn writeClosureGet(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void {
|
fn writeClosureGet(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void {
|
||||||
try stream.print("{d})) ", .{extended.small});
|
try stream.print("{d})) ", .{extended.small});
|
||||||
try self.writeSrcNode(stream, @bitCast(extended.operand));
|
const src_node: Ast.Node.Offset = @enumFromInt(@as(i32, @bitCast(extended.operand)));
|
||||||
|
try self.writeSrcNode(stream, src_node);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn writeBuiltinValue(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void {
|
fn writeBuiltinValue(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void {
|
||||||
const val: Zir.Inst.BuiltinValue = @enumFromInt(extended.small);
|
const val: Zir.Inst.BuiltinValue = @enumFromInt(extended.small);
|
||||||
try stream.print("{s})) ", .{@tagName(val)});
|
try stream.print("{s})) ", .{@tagName(val)});
|
||||||
try self.writeSrcNode(stream, @bitCast(extended.operand));
|
const src_node: Ast.Node.Offset = @enumFromInt(@as(i32, @bitCast(extended.operand)));
|
||||||
|
try self.writeSrcNode(stream, src_node);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn writeInplaceArithResultTy(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void {
|
fn writeInplaceArithResultTy(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void {
|
||||||
@ -2760,9 +2759,9 @@ const Writer = struct {
|
|||||||
try stream.writeAll(name);
|
try stream.writeAll(name);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn writeSrcNode(self: *Writer, stream: anytype, src_node: i32) !void {
|
fn writeSrcNode(self: *Writer, stream: anytype, src_node: Ast.Node.Offset) !void {
|
||||||
const tree = self.file.tree orelse return;
|
const tree = self.file.tree orelse return;
|
||||||
const abs_node = self.relativeToNodeIndex(src_node);
|
const abs_node = src_node.toAbsolute(self.parent_decl_node);
|
||||||
const src_span = tree.nodeToSpan(abs_node);
|
const src_span = tree.nodeToSpan(abs_node);
|
||||||
const start = self.line_col_cursor.find(tree.source, src_span.start);
|
const start = self.line_col_cursor.find(tree.source, src_span.start);
|
||||||
const end = self.line_col_cursor.find(tree.source, src_span.end);
|
const end = self.line_col_cursor.find(tree.source, src_span.end);
|
||||||
@ -2772,10 +2771,10 @@ const Writer = struct {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
fn writeSrcTok(self: *Writer, stream: anytype, src_tok: u32) !void {
|
fn writeSrcTok(self: *Writer, stream: anytype, src_tok: Ast.TokenOffset) !void {
|
||||||
const tree = self.file.tree orelse return;
|
const tree = self.file.tree orelse return;
|
||||||
const abs_tok = tree.firstToken(self.parent_decl_node) + src_tok;
|
const abs_tok = src_tok.toAbsolute(tree.firstToken(self.parent_decl_node));
|
||||||
const span_start = tree.tokens.items(.start)[abs_tok];
|
const span_start = tree.tokenStart(abs_tok);
|
||||||
const span_end = span_start + @as(u32, @intCast(tree.tokenSlice(abs_tok).len));
|
const span_end = span_start + @as(u32, @intCast(tree.tokenSlice(abs_tok).len));
|
||||||
const start = self.line_col_cursor.find(tree.source, span_start);
|
const start = self.line_col_cursor.find(tree.source, span_start);
|
||||||
const end = self.line_col_cursor.find(tree.source, span_end);
|
const end = self.line_col_cursor.find(tree.source, span_end);
|
||||||
@ -2785,9 +2784,9 @@ const Writer = struct {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
fn writeSrcTokAbs(self: *Writer, stream: anytype, src_tok: u32) !void {
|
fn writeSrcTokAbs(self: *Writer, stream: anytype, src_tok: Ast.TokenIndex) !void {
|
||||||
const tree = self.file.tree orelse return;
|
const tree = self.file.tree orelse return;
|
||||||
const span_start = tree.tokens.items(.start)[src_tok];
|
const span_start = tree.tokenStart(src_tok);
|
||||||
const span_end = span_start + @as(u32, @intCast(tree.tokenSlice(src_tok).len));
|
const span_end = span_start + @as(u32, @intCast(tree.tokenSlice(src_tok).len));
|
||||||
const start = self.line_col_cursor.find(tree.source, span_start);
|
const start = self.line_col_cursor.find(tree.source, span_start);
|
||||||
const end = self.line_col_cursor.find(tree.source, span_end);
|
const end = self.line_col_cursor.find(tree.source, span_end);
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user