stage2: finish implementation of LazySrcLoc

This commit is contained in:
Andrew Kelley 2021-03-31 23:00:00 -07:00
parent b27d052676
commit c9e31febf8
5 changed files with 284 additions and 56 deletions

View File

@ -1,35 +0,0 @@
this is my WIP branch scratch pad, to be deleted before merging into master
Merge TODO list:
* finish implementing SrcLoc byteOffset function
* audit all the .unneeded src locations
* audit the calls in codegen toSrcLocWithDecl specifically if there is inlined function
calls from other files.
Performance optimizations to look into:
* don't store end index for blocks; rely on last instruction being noreturn
* look into not storing the field name of field access as a string in zir
instructions. or, look into introducing interning to string_bytes (local
to the owner Decl), or, look into allowing field access based on a token/node
and have it reference source code bytes. Another idea: null terminated
string variants which avoid having to store the length.
- Look into this for enum literals too
* make ret_type and ret_ptr instructions be implied indexes; no need to have
tags associated with them.
* use a smaller encoding for the auto generated return void at the end of
function ZIR.
* enum literals can use small strings
* string literals can use small strings
* don't need the Sema coercion on condbr condition, it's done with result locations
* astgen for loops using pointer arithmetic because it's faster and if the programmer
wants an index capture, that will just be a convenience variable that zig sets up
independently.
* in astgen, if a decl_val would be to a const variable or to a function, there could be
a special zir.Inst.Ref form that means to refer to a decl as the operand. This
would elide all the decl_val instructions in the ZIR.
* don't have an explicit dbg_stmt zir instruction - instead merge it with
var decl and assignment instructions, etc.
- make it set sema.src where appropriate
* look into not emitting redundant dbg stmts to TZIR
* make decl references in ZIR be u32 indexes to the Decl dependencies array hash map
instead of duplicating *Decl entries in zir.Code.

View File

@ -3476,6 +3476,8 @@ fn asmExpr(
const asm_source = try expr(gz, scope, .{ .ty = .const_slice_u8_type }, full.ast.template);
if (full.outputs.len != 0) {
// when implementing this be sure to add test coverage for the asm return type
// not resolving into a type (the node_offset_asm_ret_ty field of LazySrcLoc)
return mod.failTok(scope, full.ast.asm_token, "TODO implement asm with an output", .{});
}

View File

@ -1525,7 +1525,6 @@ pub const SrcLoc = struct {
.node_offset_for_cond,
.node_offset_builtin_call_arg0,
.node_offset_builtin_call_arg1,
.node_offset_builtin_call_argn,
.node_offset_array_access_index,
.node_offset_slice_sentinel,
.node_offset_call_func,
@ -1620,15 +1619,129 @@ pub const SrcLoc = struct {
const token_starts = tree.tokens.items(.start);
return token_starts[tok_index];
},
.node_offset_builtin_call_arg1 => @panic("TODO"),
.node_offset_builtin_call_argn => unreachable, // Handled specially in `Sema`.
.node_offset_array_access_index => @panic("TODO"),
.node_offset_slice_sentinel => @panic("TODO"),
.node_offset_call_func => @panic("TODO"),
.node_offset_field_name => @panic("TODO"),
.node_offset_deref_ptr => @panic("TODO"),
.node_offset_asm_source => @panic("TODO"),
.node_offset_asm_ret_ty => @panic("TODO"),
.node_offset_builtin_call_arg1 => |node_off| {
const decl = src_loc.container.decl;
const tree = decl.container.file_scope.base.tree();
const node_datas = tree.nodes.items(.data);
const node_tags = tree.nodes.items(.tag);
const node = decl.relativeToNodeIndex(node_off);
const param = switch (node_tags[node]) {
.builtin_call_two, .builtin_call_two_comma => node_datas[node].rhs,
.builtin_call, .builtin_call_comma => tree.extra_data[node_datas[node].lhs + 1],
else => unreachable,
};
const main_tokens = tree.nodes.items(.main_token);
const tok_index = main_tokens[param];
const token_starts = tree.tokens.items(.start);
return token_starts[tok_index];
},
.node_offset_array_access_index => |node_off| {
const decl = src_loc.container.decl;
const tree = decl.container.file_scope.base.tree();
const node_datas = tree.nodes.items(.data);
const node_tags = tree.nodes.items(.tag);
const node = decl.relativeToNodeIndex(node_off);
const main_tokens = tree.nodes.items(.main_token);
const tok_index = main_tokens[node_datas[node].rhs];
const token_starts = tree.tokens.items(.start);
return token_starts[tok_index];
},
.node_offset_slice_sentinel => |node_off| {
const decl = src_loc.container.decl;
const tree = decl.container.file_scope.base.tree();
const node_datas = tree.nodes.items(.data);
const node_tags = tree.nodes.items(.tag);
const node = decl.relativeToNodeIndex(node_off);
const full = switch (node_tags[node]) {
.slice_open => tree.sliceOpen(node),
.slice => tree.slice(node),
.slice_sentinel => tree.sliceSentinel(node),
else => unreachable,
};
const main_tokens = tree.nodes.items(.main_token);
const tok_index = main_tokens[full.ast.sentinel];
const token_starts = tree.tokens.items(.start);
return token_starts[tok_index];
},
.node_offset_call_func => |node_off| {
const decl = src_loc.container.decl;
const tree = decl.container.file_scope.base.tree();
const node_datas = tree.nodes.items(.data);
const node_tags = tree.nodes.items(.tag);
const node = decl.relativeToNodeIndex(node_off);
var params: [1]ast.Node.Index = undefined;
const full = switch (node_tags[node]) {
.call_one,
.call_one_comma,
.async_call_one,
.async_call_one_comma,
=> tree.callOne(&params, node),
.call,
.call_comma,
.async_call,
.async_call_comma,
=> tree.callFull(node),
else => unreachable,
};
const main_tokens = tree.nodes.items(.main_token);
const tok_index = main_tokens[full.ast.fn_expr];
const token_starts = tree.tokens.items(.start);
return token_starts[tok_index];
},
.node_offset_field_name => |node_off| {
const decl = src_loc.container.decl;
const tree = decl.container.file_scope.base.tree();
const node_datas = tree.nodes.items(.data);
const node_tags = tree.nodes.items(.tag);
const node = decl.relativeToNodeIndex(node_off);
const tok_index = node_datas[node].rhs;
const token_starts = tree.tokens.items(.start);
return token_starts[tok_index];
},
.node_offset_deref_ptr => |node_off| {
const decl = src_loc.container.decl;
const tree = decl.container.file_scope.base.tree();
const node_datas = tree.nodes.items(.data);
const node_tags = tree.nodes.items(.tag);
const node = decl.relativeToNodeIndex(node_off);
const tok_index = node_datas[node].lhs;
const token_starts = tree.tokens.items(.start);
return token_starts[tok_index];
},
.node_offset_asm_source => |node_off| {
const decl = src_loc.container.decl;
const tree = decl.container.file_scope.base.tree();
const node_datas = tree.nodes.items(.data);
const node_tags = tree.nodes.items(.tag);
const node = decl.relativeToNodeIndex(node_off);
const full = switch (node_tags[node]) {
.asm_simple => tree.asmSimple(node),
.@"asm" => tree.asmFull(node),
else => unreachable,
};
const main_tokens = tree.nodes.items(.main_token);
const tok_index = main_tokens[full.ast.template];
const token_starts = tree.tokens.items(.start);
return token_starts[tok_index];
},
.node_offset_asm_ret_ty => |node_off| {
const decl = src_loc.container.decl;
const tree = decl.container.file_scope.base.tree();
const node_datas = tree.nodes.items(.data);
const node_tags = tree.nodes.items(.tag);
const node = decl.relativeToNodeIndex(node_off);
const full = switch (node_tags[node]) {
.asm_simple => tree.asmSimple(node),
.@"asm" => tree.asmFull(node),
else => unreachable,
};
const main_tokens = tree.nodes.items(.main_token);
const tok_index = main_tokens[full.outputs[0]];
const token_starts = tree.tokens.items(.start);
return token_starts[tok_index];
},
.node_offset_for_cond, .node_offset_if_cond => |node_off| {
const decl = src_loc.container.decl;
@ -1672,11 +1785,116 @@ pub const SrcLoc = struct {
const token_starts = tree.tokens.items(.start);
return token_starts[tok_index];
},
.node_offset_switch_operand => @panic("TODO"),
.node_offset_switch_special_prong => @panic("TODO"),
.node_offset_switch_range => @panic("TODO"),
.node_offset_fn_type_cc => @panic("TODO"),
.node_offset_fn_type_ret_ty => @panic("TODO"),
.node_offset_switch_operand => |node_off| {
const decl = src_loc.container.decl;
const node = decl.relativeToNodeIndex(node_off);
const tree = decl.container.file_scope.base.tree();
const node_datas = tree.nodes.items(.data);
const src_node = node_datas[node].lhs;
const main_tokens = tree.nodes.items(.main_token);
const tok_index = main_tokens[src_node];
const token_starts = tree.tokens.items(.start);
return token_starts[tok_index];
},
.node_offset_switch_special_prong => |node_off| {
const decl = src_loc.container.decl;
const switch_node = decl.relativeToNodeIndex(node_off);
const tree = decl.container.file_scope.base.tree();
const node_datas = tree.nodes.items(.data);
const node_tags = tree.nodes.items(.tag);
const main_tokens = tree.nodes.items(.main_token);
const extra = tree.extraData(node_datas[switch_node].rhs, ast.Node.SubRange);
const case_nodes = tree.extra_data[extra.start..extra.end];
for (case_nodes) |case_node| {
const case = switch (node_tags[case_node]) {
.switch_case_one => tree.switchCaseOne(case_node),
.switch_case => tree.switchCase(case_node),
else => unreachable,
};
const is_special = (case.ast.values.len == 0) or
(case.ast.values.len == 1 and
node_tags[case.ast.values[0]] == .identifier and
mem.eql(u8, tree.tokenSlice(main_tokens[case.ast.values[0]]), "_"));
if (!is_special) continue;
const tok_index = main_tokens[case_node];
const token_starts = tree.tokens.items(.start);
return token_starts[tok_index];
} else unreachable;
},
.node_offset_switch_range => |node_off| {
const decl = src_loc.container.decl;
const switch_node = decl.relativeToNodeIndex(node_off);
const tree = decl.container.file_scope.base.tree();
const node_datas = tree.nodes.items(.data);
const node_tags = tree.nodes.items(.tag);
const main_tokens = tree.nodes.items(.main_token);
const extra = tree.extraData(node_datas[switch_node].rhs, ast.Node.SubRange);
const case_nodes = tree.extra_data[extra.start..extra.end];
for (case_nodes) |case_node| {
const case = switch (node_tags[case_node]) {
.switch_case_one => tree.switchCaseOne(case_node),
.switch_case => tree.switchCase(case_node),
else => unreachable,
};
const is_special = (case.ast.values.len == 0) or
(case.ast.values.len == 1 and
node_tags[case.ast.values[0]] == .identifier and
mem.eql(u8, tree.tokenSlice(main_tokens[case.ast.values[0]]), "_"));
if (is_special) continue;
for (case.ast.values) |item_node| {
if (node_tags[item_node] == .switch_range) {
const tok_index = main_tokens[item_node];
const token_starts = tree.tokens.items(.start);
return token_starts[tok_index];
}
}
} else unreachable;
},
.node_offset_fn_type_cc => |node_off| {
const decl = src_loc.container.decl;
const tree = decl.container.file_scope.base.tree();
const node_datas = tree.nodes.items(.data);
const node_tags = tree.nodes.items(.tag);
const node = decl.relativeToNodeIndex(node_off);
var params: [1]ast.Node.Index = undefined;
const full = switch (node_tags[node]) {
.fn_proto_simple => tree.fnProtoSimple(&params, node),
.fn_proto_multi => tree.fnProtoMulti(node),
.fn_proto_one => tree.fnProtoOne(&params, node),
.fn_proto => tree.fnProto(node),
else => unreachable,
};
const main_tokens = tree.nodes.items(.main_token);
const tok_index = main_tokens[full.ast.callconv_expr];
const token_starts = tree.tokens.items(.start);
return token_starts[tok_index];
},
.node_offset_fn_type_ret_ty => |node_off| {
const decl = src_loc.container.decl;
const tree = decl.container.file_scope.base.tree();
const node_datas = tree.nodes.items(.data);
const node_tags = tree.nodes.items(.tag);
const node = decl.relativeToNodeIndex(node_off);
var params: [1]ast.Node.Index = undefined;
const full = switch (node_tags[node]) {
.fn_proto_simple => tree.fnProtoSimple(&params, node),
.fn_proto_multi => tree.fnProtoMulti(node),
.fn_proto_one => tree.fnProtoOne(&params, node),
.fn_proto => tree.fnProto(node),
else => unreachable,
};
const main_tokens = tree.nodes.items(.main_token);
const tok_index = main_tokens[full.ast.return_type];
const token_starts = tree.tokens.items(.start);
return token_starts[tok_index];
},
}
}
};
@ -1739,9 +1957,6 @@ pub const LazySrcLoc = union(enum) {
node_offset_builtin_call_arg0: i32,
/// Same as `node_offset_builtin_call_arg0` except arg index 1.
node_offset_builtin_call_arg1: i32,
/// Same as `node_offset_builtin_call_arg0` except the arg index is contextually
/// determined.
node_offset_builtin_call_argn: i32,
/// The source location points to the index expression of an array access
/// expression, found by taking this AST node index offset from the containing
/// Decl AST node, which points to an array access AST node. Next, navigate
@ -1852,7 +2067,6 @@ pub const LazySrcLoc = union(enum) {
.node_offset_for_cond,
.node_offset_builtin_call_arg0,
.node_offset_builtin_call_arg1,
.node_offset_builtin_call_argn,
.node_offset_array_access_index,
.node_offset_slice_sentinel,
.node_offset_call_func,
@ -1895,7 +2109,6 @@ pub const LazySrcLoc = union(enum) {
.node_offset_for_cond,
.node_offset_builtin_call_arg0,
.node_offset_builtin_call_arg1,
.node_offset_builtin_call_argn,
.node_offset_array_access_index,
.node_offset_slice_sentinel,
.node_offset_call_func,
@ -2393,7 +2606,7 @@ fn astgenAndSemaFn(
.src_decl = decl,
.instructions = .{},
.inlining = null,
.is_comptime = false,
.is_comptime = true,
};
defer block_scope.instructions.deinit(mod.gpa);

View File

@ -76,6 +76,7 @@ pub fn rootAsRef(sema: *Sema, root_block: *Scope.Block) !zir.Inst.Ref {
/// Assumes that `root_block` ends with `break_inline`.
pub fn rootAsType(sema: *Sema, root_block: *Scope.Block) !Type {
assert(root_block.is_comptime);
const zir_inst_ref = try sema.rootAsRef(root_block);
// Source location is unneeded because resolveConstValue must have already
// been successfully called when coercing the value to a type, from the

View File

@ -39,6 +39,21 @@ pub fn addCases(ctx: *TestContext) !void {
\\}
\\fn unused() void {}
, "yo!" ++ std.cstr.line_sep);
// Comptime return type and calling convention expected.
case.addError(
\\var x: i32 = 1234;
\\export fn main() x {
\\ return 0;
\\}
\\export fn foo() callconv(y) c_int {
\\ return 0;
\\}
\\var y: i32 = 1234;
, &.{
":2:18: error: unable to resolve comptime value",
":5:26: error: unable to resolve comptime value",
});
}
{
@ -375,6 +390,38 @@ pub fn addCases(ctx: *TestContext) !void {
":6:14: error: duplicate switch value",
":4:9: note: previous value here",
});
// Ranges not allowed for some kinds of switches.
case.addError(
\\export fn main() c_int {
\\ const A: type = i32;
\\ const b: c_int = switch (A) {
\\ i32 => 1,
\\ bool => 2,
\\ f16...f64 => 3,
\\ else => 4,
\\ };
\\}
, &.{
":3:30: error: ranges not allowed when switching on type 'type'",
":6:12: note: range here",
});
// Switch expression has unreachable else prong.
case.addError(
\\export fn main() c_int {
\\ var a: u2 = 0;
\\ const b: i32 = switch (a) {
\\ 0 => 10,
\\ 1 => 20,
\\ 2 => 30,
\\ 3 => 40,
\\ else => 50,
\\ };
\\}
, &.{
":8:14: error: unreachable else prong; all cases already handled",
});
}
//{
// var case = ctx.exeFromCompiledC("optionals", .{});