mirror of
https://github.com/ziglang/zig.git
synced 2025-12-06 06:13:07 +00:00
stage2: finish source location reworkings in the branch
* remove the LazySrcLoc.todo tag
* finish updating Sema and AstGen, remove the last of the
`@panic("TODO")`.
This commit is contained in:
parent
e8143f6cbe
commit
b27d052676
@ -1,9 +1,6 @@
|
||||
this is my WIP branch scratch pad, to be deleted before merging into master
|
||||
|
||||
Merge TODO list:
|
||||
* remove the LazySrcLoc.todo tag
|
||||
* update astgen.zig
|
||||
* finish updating Sema.zig
|
||||
* finish implementing SrcLoc byteOffset function
|
||||
* audit all the .unneeded src locations
|
||||
* audit the calls in codegen toSrcLocWithDecl specifically if there is inlined function
|
||||
|
||||
@ -1252,6 +1252,7 @@ pub const Tree = struct {
|
||||
buffer[0] = data.lhs;
|
||||
const params = if (data.lhs == 0) buffer[0..0] else buffer[0..1];
|
||||
return tree.fullFnProto(.{
|
||||
.proto_node = node,
|
||||
.fn_token = tree.nodes.items(.main_token)[node],
|
||||
.return_type = data.rhs,
|
||||
.params = params,
|
||||
@ -1267,6 +1268,7 @@ pub const Tree = struct {
|
||||
const params_range = tree.extraData(data.lhs, Node.SubRange);
|
||||
const params = tree.extra_data[params_range.start..params_range.end];
|
||||
return tree.fullFnProto(.{
|
||||
.proto_node = node,
|
||||
.fn_token = tree.nodes.items(.main_token)[node],
|
||||
.return_type = data.rhs,
|
||||
.params = params,
|
||||
@ -1283,6 +1285,7 @@ pub const Tree = struct {
|
||||
buffer[0] = extra.param;
|
||||
const params = if (extra.param == 0) buffer[0..0] else buffer[0..1];
|
||||
return tree.fullFnProto(.{
|
||||
.proto_node = node,
|
||||
.fn_token = tree.nodes.items(.main_token)[node],
|
||||
.return_type = data.rhs,
|
||||
.params = params,
|
||||
@ -1298,6 +1301,7 @@ pub const Tree = struct {
|
||||
const extra = tree.extraData(data.lhs, Node.FnProto);
|
||||
const params = tree.extra_data[extra.params_start..extra.params_end];
|
||||
return tree.fullFnProto(.{
|
||||
.proto_node = node,
|
||||
.fn_token = tree.nodes.items(.main_token)[node],
|
||||
.return_type = data.rhs,
|
||||
.params = params,
|
||||
@ -2120,6 +2124,7 @@ pub const full = struct {
|
||||
ast: Ast,
|
||||
|
||||
pub const Ast = struct {
|
||||
proto_node: Node.Index,
|
||||
fn_token: TokenIndex,
|
||||
return_type: Node.Index,
|
||||
params: []const Node.Index,
|
||||
|
||||
@ -133,10 +133,6 @@ pub const ResultLoc = union(enum) {
|
||||
/// The result instruction from the expression must be ignored.
|
||||
/// Always an instruction with tag `alloc_inferred`.
|
||||
inferred_ptr: zir.Inst.Ref,
|
||||
/// The expression must store its result into this pointer, which is a typed pointer that
|
||||
/// has been bitcasted to whatever the expression's type is.
|
||||
/// The result instruction from the expression must be ignored.
|
||||
bitcasted_ptr: zir.Inst.Ref,
|
||||
/// There is a pointer for the expression to store its result into, however, its type
|
||||
/// is inferred based on peer type resolution for a `zir.Inst.Block`.
|
||||
/// The result instruction from the expression must be ignored.
|
||||
@ -172,7 +168,7 @@ pub const ResultLoc = union(enum) {
|
||||
.tag = .break_void,
|
||||
.elide_store_to_block_ptr_instructions = false,
|
||||
},
|
||||
.inferred_ptr, .bitcasted_ptr, .block_ptr => {
|
||||
.inferred_ptr, .block_ptr => {
|
||||
if (block_scope.rvalue_rl_count == block_scope.break_count) {
|
||||
// Neither prong of the if consumed the result location, so we can
|
||||
// use break instructions to create an rvalue.
|
||||
@ -388,7 +384,7 @@ fn lvalExpr(gz: *GenZir, scope: *Scope, node: ast.Node.Index) InnerError!zir.Ins
|
||||
}
|
||||
|
||||
/// Turn Zig AST into untyped ZIR istructions.
|
||||
/// When `rl` is discard, ptr, inferred_ptr, bitcasted_ptr, or inferred_ptr, the
|
||||
/// When `rl` is discard, ptr, inferred_ptr, or inferred_ptr, the
|
||||
/// result instruction can be used to inspect whether it is isNoReturn() but that is it,
|
||||
/// it must otherwise not be used.
|
||||
pub fn expr(gz: *GenZir, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) InnerError!zir.Inst.Ref {
|
||||
@ -1155,7 +1151,6 @@ fn blockExprStmts(
|
||||
.asm_volatile,
|
||||
.bit_and,
|
||||
.bitcast,
|
||||
.bitcast_ref,
|
||||
.bitcast_result_ptr,
|
||||
.bit_or,
|
||||
.block,
|
||||
@ -1804,7 +1799,7 @@ fn orelseCatchExpr(
|
||||
// TODO handle catch
|
||||
const operand_rl: ResultLoc = switch (block_scope.break_result_loc) {
|
||||
.ref => .ref,
|
||||
.discard, .none, .block_ptr, .inferred_ptr, .bitcasted_ptr => .none,
|
||||
.discard, .none, .block_ptr, .inferred_ptr => .none,
|
||||
.ty => |elem_ty| blk: {
|
||||
const wrapped_ty = try block_scope.addUnNode(.optional_type, elem_ty, node);
|
||||
break :blk .{ .ty = wrapped_ty };
|
||||
@ -3519,7 +3514,6 @@ fn as(
|
||||
gz: *GenZir,
|
||||
scope: *Scope,
|
||||
rl: ResultLoc,
|
||||
builtin_token: ast.TokenIndex,
|
||||
node: ast.Node.Index,
|
||||
lhs: ast.Node.Index,
|
||||
rhs: ast.Node.Index,
|
||||
@ -3538,13 +3532,9 @@ fn as(
|
||||
return asRlPtr(gz, scope, rl, block_scope.rl_ptr, rhs, dest_type);
|
||||
},
|
||||
|
||||
.bitcasted_ptr => |bitcasted_ptr| {
|
||||
// TODO here we should be able to resolve the inference; we now have a type for the result.
|
||||
return gz.astgen.mod.failTok(scope, builtin_token, "TODO implement @as with result location @bitCast", .{});
|
||||
},
|
||||
.inferred_ptr => |result_alloc| {
|
||||
// TODO here we should be able to resolve the inference; we now have a type for the result.
|
||||
return gz.astgen.mod.failTok(scope, builtin_token, "TODO implement @as with inferred-type result location pointer", .{});
|
||||
return gz.astgen.mod.failNode(scope, node, "TODO implement @as with inferred-type result location pointer", .{});
|
||||
},
|
||||
}
|
||||
}
|
||||
@ -3599,47 +3589,32 @@ fn bitCast(
|
||||
gz: *GenZir,
|
||||
scope: *Scope,
|
||||
rl: ResultLoc,
|
||||
builtin_token: ast.TokenIndex,
|
||||
node: ast.Node.Index,
|
||||
lhs: ast.Node.Index,
|
||||
rhs: ast.Node.Index,
|
||||
) InnerError!zir.Inst.Ref {
|
||||
if (true) @panic("TODO update for zir-memory-layout");
|
||||
const mod = gz.astgen.mod;
|
||||
const dest_type = try typeExpr(gz, scope, lhs);
|
||||
switch (rl) {
|
||||
.none => {
|
||||
.none, .discard, .ty => {
|
||||
const operand = try expr(gz, scope, .none, rhs);
|
||||
return addZIRBinOp(mod, scope, src, .bitcast, dest_type, operand);
|
||||
},
|
||||
.discard => {
|
||||
const operand = try expr(gz, scope, .none, rhs);
|
||||
const result = try addZIRBinOp(mod, scope, src, .bitcast, dest_type, operand);
|
||||
_ = try addZIRUnOp(mod, scope, result.src, .ensure_result_non_error, result);
|
||||
return result;
|
||||
},
|
||||
.ref => {
|
||||
const operand = try expr(gz, scope, .ref, rhs);
|
||||
const result = try addZIRBinOp(mod, scope, src, .bitcast_ref, dest_type, operand);
|
||||
return result;
|
||||
},
|
||||
.ty => |result_ty| {
|
||||
const result = try expr(gz, scope, .none, rhs);
|
||||
const bitcasted = try addZIRBinOp(mod, scope, src, .bitcast, dest_type, result);
|
||||
return addZIRBinOp(mod, scope, src, .as, result_ty, bitcasted);
|
||||
const result = try gz.addPlNode(.bitcast, node, zir.Inst.Bin{
|
||||
.lhs = dest_type,
|
||||
.rhs = operand,
|
||||
});
|
||||
return rvalue(gz, scope, rl, result, node);
|
||||
},
|
||||
.ref => unreachable, // `@bitCast` is not allowed as an r-value.
|
||||
.ptr => |result_ptr| {
|
||||
const casted_result_ptr = try addZIRUnOp(mod, scope, src, .bitcast_result_ptr, result_ptr);
|
||||
return expr(gz, scope, .{ .bitcasted_ptr = casted_result_ptr.castTag(.bitcast_result_ptr).? }, rhs);
|
||||
},
|
||||
.bitcasted_ptr => |bitcasted_ptr| {
|
||||
return mod.failTok(scope, builtin_token, "TODO implement @bitCast with result location another @bitCast", .{});
|
||||
const casted_result_ptr = try gz.addUnNode(.bitcast_result_ptr, result_ptr, node);
|
||||
return expr(gz, scope, .{ .ptr = casted_result_ptr }, rhs);
|
||||
},
|
||||
.block_ptr => |block_ptr| {
|
||||
return mod.failTok(scope, builtin_token, "TODO implement @bitCast with result location inferred peer types", .{});
|
||||
return mod.failNode(scope, node, "TODO implement @bitCast with result location inferred peer types", .{});
|
||||
},
|
||||
.inferred_ptr => |result_alloc| {
|
||||
// TODO here we should be able to resolve the inference; we now have a type for the result.
|
||||
return mod.failTok(scope, builtin_token, "TODO implement @bitCast with inferred-type result location pointer", .{});
|
||||
return mod.failNode(scope, node, "TODO implement @bitCast with inferred-type result location pointer", .{});
|
||||
},
|
||||
}
|
||||
}
|
||||
@ -3648,12 +3623,11 @@ fn typeOf(
|
||||
gz: *GenZir,
|
||||
scope: *Scope,
|
||||
rl: ResultLoc,
|
||||
builtin_token: ast.TokenIndex,
|
||||
node: ast.Node.Index,
|
||||
params: []const ast.Node.Index,
|
||||
) InnerError!zir.Inst.Ref {
|
||||
if (params.len < 1) {
|
||||
return gz.astgen.mod.failTok(scope, builtin_token, "expected at least 1 argument, found 0", .{});
|
||||
return gz.astgen.mod.failNode(scope, node, "expected at least 1 argument, found 0", .{});
|
||||
}
|
||||
if (params.len == 1) {
|
||||
const result = try gz.addUnNode(.typeof, try expr(gz, scope, .none, params[0]), node);
|
||||
@ -3693,14 +3667,14 @@ fn builtinCall(
|
||||
// Also, some builtins have a variable number of parameters.
|
||||
|
||||
const info = BuiltinFn.list.get(builtin_name) orelse {
|
||||
return mod.failTok(scope, builtin_token, "invalid builtin function: '{s}'", .{
|
||||
return mod.failNode(scope, node, "invalid builtin function: '{s}'", .{
|
||||
builtin_name,
|
||||
});
|
||||
};
|
||||
if (info.param_count) |expected| {
|
||||
if (expected != params.len) {
|
||||
const s = if (expected == 1) "" else "s";
|
||||
return mod.failTok(scope, builtin_token, "expected {d} parameter{s}, found {d}", .{
|
||||
return mod.failNode(scope, node, "expected {d} parameter{s}, found {d}", .{
|
||||
expected, s, params.len,
|
||||
});
|
||||
}
|
||||
@ -3788,9 +3762,9 @@ fn builtinCall(
|
||||
});
|
||||
return rvalue(gz, scope, rl, result, node);
|
||||
},
|
||||
.as => return as(gz, scope, rl, builtin_token, node, params[0], params[1]),
|
||||
.bit_cast => return bitCast(gz, scope, rl, builtin_token, node, params[0], params[1]),
|
||||
.TypeOf => return typeOf(gz, scope, rl, builtin_token, node, params),
|
||||
.as => return as(gz, scope, rl, node, params[0], params[1]),
|
||||
.bit_cast => return bitCast(gz, scope, rl, node, params[0], params[1]),
|
||||
.TypeOf => return typeOf(gz, scope, rl, node, params),
|
||||
|
||||
.add_with_overflow,
|
||||
.align_cast,
|
||||
@ -3875,7 +3849,7 @@ fn builtinCall(
|
||||
.type_info,
|
||||
.type_name,
|
||||
.union_init,
|
||||
=> return mod.failTok(scope, builtin_token, "TODO: implement builtin function {s}", .{
|
||||
=> return mod.failNode(scope, node, "TODO: implement builtin function {s}", .{
|
||||
builtin_name,
|
||||
}),
|
||||
|
||||
@ -3884,7 +3858,7 @@ fn builtinCall(
|
||||
.Frame,
|
||||
.frame_address,
|
||||
.frame_size,
|
||||
=> return mod.failTok(scope, builtin_token, "async and related features are not yet supported", .{}),
|
||||
=> return mod.failNode(scope, node, "async and related features are not yet supported", .{}),
|
||||
}
|
||||
}
|
||||
|
||||
@ -4286,9 +4260,6 @@ fn rvalue(
|
||||
});
|
||||
return result;
|
||||
},
|
||||
.bitcasted_ptr => |bitcasted_ptr| {
|
||||
return gz.astgen.mod.failNode(scope, src_node, "TODO implement rvalue .bitcasted_ptr", .{});
|
||||
},
|
||||
.inferred_ptr => |alloc| {
|
||||
_ = try gz.addBin(.store_to_inferred_ptr, alloc, result);
|
||||
return result;
|
||||
|
||||
@ -1016,11 +1016,6 @@ pub const Scope = struct {
|
||||
gz.break_result_loc = .{ .block_ptr = gz };
|
||||
},
|
||||
|
||||
.bitcasted_ptr => |ptr| {
|
||||
gz.rl_ptr = ptr;
|
||||
gz.break_result_loc = .{ .block_ptr = gz };
|
||||
},
|
||||
|
||||
.block_ptr => |parent_block_scope| {
|
||||
gz.rl_ty_inst = parent_block_scope.rl_ty_inst;
|
||||
gz.rl_ptr = parent_block_scope.rl_ptr;
|
||||
@ -1052,10 +1047,12 @@ pub const Scope = struct {
|
||||
}
|
||||
|
||||
pub fn addFnTypeCc(gz: *GenZir, tag: zir.Inst.Tag, args: struct {
|
||||
src_node: ast.Node.Index,
|
||||
param_types: []const zir.Inst.Ref,
|
||||
ret_ty: zir.Inst.Ref,
|
||||
cc: zir.Inst.Ref,
|
||||
}) !zir.Inst.Ref {
|
||||
assert(args.src_node != 0);
|
||||
assert(args.ret_ty != .none);
|
||||
assert(args.cc != .none);
|
||||
const gpa = gz.astgen.mod.gpa;
|
||||
@ -1065,6 +1062,7 @@ pub const Scope = struct {
|
||||
@typeInfo(zir.Inst.FnTypeCc).Struct.fields.len + args.param_types.len);
|
||||
|
||||
const payload_index = gz.astgen.addExtraAssumeCapacity(zir.Inst.FnTypeCc{
|
||||
.return_type = args.ret_ty,
|
||||
.cc = args.cc,
|
||||
.param_types_len = @intCast(u32, args.param_types.len),
|
||||
});
|
||||
@ -1073,8 +1071,8 @@ pub const Scope = struct {
|
||||
const new_index = @intCast(zir.Inst.Index, gz.astgen.instructions.len);
|
||||
gz.astgen.instructions.appendAssumeCapacity(.{
|
||||
.tag = tag,
|
||||
.data = .{ .fn_type = .{
|
||||
.return_type = args.ret_ty,
|
||||
.data = .{ .pl_node = .{
|
||||
.src_node = gz.astgen.decl.nodeIndexToRelative(args.src_node),
|
||||
.payload_index = payload_index,
|
||||
} },
|
||||
});
|
||||
@ -1082,29 +1080,30 @@ pub const Scope = struct {
|
||||
return gz.astgen.indexToRef(new_index);
|
||||
}
|
||||
|
||||
pub fn addFnType(
|
||||
gz: *GenZir,
|
||||
tag: zir.Inst.Tag,
|
||||
pub fn addFnType(gz: *GenZir, tag: zir.Inst.Tag, args: struct {
|
||||
src_node: ast.Node.Index,
|
||||
ret_ty: zir.Inst.Ref,
|
||||
param_types: []const zir.Inst.Ref,
|
||||
) !zir.Inst.Ref {
|
||||
assert(ret_ty != .none);
|
||||
}) !zir.Inst.Ref {
|
||||
assert(args.src_node != 0);
|
||||
assert(args.ret_ty != .none);
|
||||
const gpa = gz.astgen.mod.gpa;
|
||||
try gz.instructions.ensureCapacity(gpa, gz.instructions.items.len + 1);
|
||||
try gz.astgen.instructions.ensureCapacity(gpa, gz.astgen.instructions.len + 1);
|
||||
try gz.astgen.extra.ensureCapacity(gpa, gz.astgen.extra.items.len +
|
||||
@typeInfo(zir.Inst.FnType).Struct.fields.len + param_types.len);
|
||||
@typeInfo(zir.Inst.FnType).Struct.fields.len + args.param_types.len);
|
||||
|
||||
const payload_index = gz.astgen.addExtraAssumeCapacity(zir.Inst.FnType{
|
||||
.param_types_len = @intCast(u32, param_types.len),
|
||||
.return_type = args.ret_ty,
|
||||
.param_types_len = @intCast(u32, args.param_types.len),
|
||||
});
|
||||
gz.astgen.appendRefsAssumeCapacity(param_types);
|
||||
gz.astgen.appendRefsAssumeCapacity(args.param_types);
|
||||
|
||||
const new_index = @intCast(zir.Inst.Index, gz.astgen.instructions.len);
|
||||
gz.astgen.instructions.appendAssumeCapacity(.{
|
||||
.tag = tag,
|
||||
.data = .{ .fn_type = .{
|
||||
.return_type = ret_ty,
|
||||
.data = .{ .pl_node = .{
|
||||
.src_node = gz.astgen.decl.nodeIndexToRelative(args.src_node),
|
||||
.payload_index = payload_index,
|
||||
} },
|
||||
});
|
||||
@ -1513,7 +1512,6 @@ pub const SrcLoc = struct {
|
||||
pub fn fileScope(src_loc: SrcLoc) *Scope.File {
|
||||
return switch (src_loc.lazy) {
|
||||
.unneeded => unreachable,
|
||||
.todo => unreachable,
|
||||
|
||||
.byte_abs,
|
||||
.token_abs,
|
||||
@ -1542,6 +1540,8 @@ pub const SrcLoc = struct {
|
||||
.node_offset_switch_operand,
|
||||
.node_offset_switch_special_prong,
|
||||
.node_offset_switch_range,
|
||||
.node_offset_fn_type_cc,
|
||||
.node_offset_fn_type_ret_ty,
|
||||
=> src_loc.container.decl.container.file_scope,
|
||||
};
|
||||
}
|
||||
@ -1549,7 +1549,6 @@ pub const SrcLoc = struct {
|
||||
pub fn byteOffset(src_loc: SrcLoc) !u32 {
|
||||
switch (src_loc.lazy) {
|
||||
.unneeded => unreachable,
|
||||
.todo => unreachable,
|
||||
|
||||
.byte_abs => |byte_index| return byte_index,
|
||||
|
||||
@ -1676,6 +1675,8 @@ pub const SrcLoc = struct {
|
||||
.node_offset_switch_operand => @panic("TODO"),
|
||||
.node_offset_switch_special_prong => @panic("TODO"),
|
||||
.node_offset_switch_range => @panic("TODO"),
|
||||
.node_offset_fn_type_cc => @panic("TODO"),
|
||||
.node_offset_fn_type_ret_ty => @panic("TODO"),
|
||||
}
|
||||
}
|
||||
};
|
||||
@ -1695,11 +1696,6 @@ pub const LazySrcLoc = union(enum) {
|
||||
/// look into using reverse-continue with a memory watchpoint to see where the
|
||||
/// value is being set to this tag.
|
||||
unneeded,
|
||||
/// Same as `unneeded`, except the code setting up this tag knew that actually
|
||||
/// the source location was needed, and I wanted to get other stuff compiling
|
||||
/// and working before coming back to messing with source locations.
|
||||
/// TODO delete this tag before merging the zir-memory-layout branch.
|
||||
todo,
|
||||
/// The source location points to a byte offset within a source file,
|
||||
/// offset from 0. The source file is determined contextually.
|
||||
/// Inside a `SrcLoc`, the `file_scope` union field will be active.
|
||||
@ -1824,12 +1820,23 @@ pub const LazySrcLoc = union(enum) {
|
||||
/// range nodes. The error applies to all of them.
|
||||
/// The Decl is determined contextually.
|
||||
node_offset_switch_range: i32,
|
||||
/// The source location points to the calling convention of a function type
|
||||
/// expression, found by taking this AST node index offset from the containing
|
||||
/// Decl AST node, which points to a function type AST node. Next, nagivate to
|
||||
/// the calling convention node.
|
||||
/// The Decl is determined contextually.
|
||||
node_offset_fn_type_cc: i32,
|
||||
/// The source location points to the return type of a function type
|
||||
/// expression, found by taking this AST node index offset from the containing
|
||||
/// Decl AST node, which points to a function type AST node. Next, nagivate to
|
||||
/// the return type node.
|
||||
/// The Decl is determined contextually.
|
||||
node_offset_fn_type_ret_ty: i32,
|
||||
|
||||
/// Upgrade to a `SrcLoc` based on the `Decl` or file in the provided scope.
|
||||
pub fn toSrcLoc(lazy: LazySrcLoc, scope: *Scope) SrcLoc {
|
||||
return switch (lazy) {
|
||||
.unneeded,
|
||||
.todo,
|
||||
.byte_abs,
|
||||
.token_abs,
|
||||
.node_abs,
|
||||
@ -1860,6 +1867,8 @@ pub const LazySrcLoc = union(enum) {
|
||||
.node_offset_switch_operand,
|
||||
.node_offset_switch_special_prong,
|
||||
.node_offset_switch_range,
|
||||
.node_offset_fn_type_cc,
|
||||
.node_offset_fn_type_ret_ty,
|
||||
=> .{
|
||||
.container = .{ .decl = scope.srcDecl().? },
|
||||
.lazy = lazy,
|
||||
@ -1871,7 +1880,6 @@ pub const LazySrcLoc = union(enum) {
|
||||
pub fn toSrcLocWithDecl(lazy: LazySrcLoc, decl: *Decl) SrcLoc {
|
||||
return switch (lazy) {
|
||||
.unneeded,
|
||||
.todo,
|
||||
.byte_abs,
|
||||
.token_abs,
|
||||
.node_abs,
|
||||
@ -1902,6 +1910,8 @@ pub const LazySrcLoc = union(enum) {
|
||||
.node_offset_switch_operand,
|
||||
.node_offset_switch_special_prong,
|
||||
.node_offset_switch_range,
|
||||
.node_offset_fn_type_cc,
|
||||
.node_offset_fn_type_ret_ty,
|
||||
=> .{
|
||||
.container = .{ .decl = decl },
|
||||
.lazy = lazy,
|
||||
@ -2340,13 +2350,18 @@ fn astgenAndSemaFn(
|
||||
const fn_type_inst: zir.Inst.Ref = if (cc != .none) fn_type: {
|
||||
const tag: zir.Inst.Tag = if (is_var_args) .fn_type_cc_var_args else .fn_type_cc;
|
||||
break :fn_type try fn_type_scope.addFnTypeCc(tag, .{
|
||||
.src_node = fn_proto.ast.proto_node,
|
||||
.ret_ty = return_type_inst,
|
||||
.param_types = param_types,
|
||||
.cc = cc,
|
||||
});
|
||||
} else fn_type: {
|
||||
const tag: zir.Inst.Tag = if (is_var_args) .fn_type_var_args else .fn_type;
|
||||
break :fn_type try fn_type_scope.addFnType(tag, return_type_inst, param_types);
|
||||
break :fn_type try fn_type_scope.addFnType(tag, .{
|
||||
.src_node = fn_proto.ast.proto_node,
|
||||
.ret_ty = return_type_inst,
|
||||
.param_types = param_types,
|
||||
});
|
||||
};
|
||||
_ = try fn_type_scope.addBreak(.break_inline, 0, fn_type_inst);
|
||||
|
||||
|
||||
96
src/Sema.zig
96
src/Sema.zig
@ -148,7 +148,6 @@ pub fn analyzeBody(
|
||||
.bit_not => try sema.zirBitNot(block, inst),
|
||||
.bit_or => try sema.zirBitwise(block, inst, .bit_or),
|
||||
.bitcast => try sema.zirBitcast(block, inst),
|
||||
.bitcast_ref => try sema.zirBitcastRef(block, inst),
|
||||
.bitcast_result_ptr => try sema.zirBitcastResultPtr(block, inst),
|
||||
.block => try sema.zirBlock(block, inst),
|
||||
.bool_not => try sema.zirBoolNot(block, inst),
|
||||
@ -498,12 +497,6 @@ fn zirConst(sema: *Sema, block: *Scope.Block, inst: zir.Inst.Index) InnerError!*
|
||||
return sema.mod.constInst(sema.arena, .unneeded, typed_value_copy);
|
||||
}
|
||||
|
||||
fn zirBitcastRef(sema: *Sema, block: *Scope.Block, inst: zir.Inst.Index) InnerError!*Inst {
|
||||
const tracy = trace(@src());
|
||||
defer tracy.end();
|
||||
return sema.mod.fail(&block.base, sema.src, "TODO implement zir_sema.zirBitcastRef", .{});
|
||||
}
|
||||
|
||||
fn zirBitcastResultPtr(sema: *Sema, block: *Scope.Block, inst: zir.Inst.Index) InnerError!*Inst {
|
||||
const tracy = trace(@src());
|
||||
defer tracy.end();
|
||||
@ -942,7 +935,7 @@ fn zirLoop(sema: *Sema, parent_block: *Scope.Block, inst: zir.Inst.Index) InnerE
|
||||
try child_block.instructions.append(sema.gpa, &loop_inst.base);
|
||||
loop_inst.body = .{ .instructions = try sema.arena.dupe(*Inst, loop_block.instructions.items) };
|
||||
|
||||
return sema.analyzeBlockBody(parent_block, &child_block, merges);
|
||||
return sema.analyzeBlockBody(parent_block, src, &child_block, merges);
|
||||
}
|
||||
|
||||
fn zirBlock(sema: *Sema, parent_block: *Scope.Block, inst: zir.Inst.Index) InnerError!*Inst {
|
||||
@ -992,12 +985,13 @@ fn zirBlock(sema: *Sema, parent_block: *Scope.Block, inst: zir.Inst.Index) Inner
|
||||
|
||||
_ = try sema.analyzeBody(&child_block, body);
|
||||
|
||||
return sema.analyzeBlockBody(parent_block, &child_block, merges);
|
||||
return sema.analyzeBlockBody(parent_block, src, &child_block, merges);
|
||||
}
|
||||
|
||||
fn analyzeBlockBody(
|
||||
sema: *Sema,
|
||||
parent_block: *Scope.Block,
|
||||
src: LazySrcLoc,
|
||||
child_block: *Scope.Block,
|
||||
merges: *Scope.Block.Merges,
|
||||
) InnerError!*Inst {
|
||||
@ -1034,7 +1028,7 @@ fn analyzeBlockBody(
|
||||
// Need to set the type and emit the Block instruction. This allows machine code generation
|
||||
// to emit a jump instruction to after the block when it encounters the break.
|
||||
try parent_block.instructions.append(sema.gpa, &merges.block_inst.base);
|
||||
const resolved_ty = try sema.resolvePeerTypes(parent_block, .todo, merges.results.items);
|
||||
const resolved_ty = try sema.resolvePeerTypes(parent_block, src, merges.results.items);
|
||||
merges.block_inst.base.ty = resolved_ty;
|
||||
merges.block_inst.body = .{
|
||||
.instructions = try sema.arena.dupe(*Inst, child_block.instructions.items),
|
||||
@ -1048,7 +1042,7 @@ fn analyzeBlockBody(
|
||||
}
|
||||
var coerce_block = parent_block.makeSubBlock();
|
||||
defer coerce_block.instructions.deinit(sema.gpa);
|
||||
const coerced_operand = try sema.coerce(&coerce_block, resolved_ty, br.operand, .todo);
|
||||
const coerced_operand = try sema.coerce(&coerce_block, resolved_ty, br.operand, br.operand.src);
|
||||
// If no instructions were produced, such as in the case of a coercion of a
|
||||
// constant value to a new type, we can simply point the br operand to it.
|
||||
if (coerce_block.instructions.items.len == 0) {
|
||||
@ -1334,7 +1328,7 @@ fn analyzeCall(
|
||||
// the block_inst above.
|
||||
_ = try inline_sema.root(&child_block);
|
||||
|
||||
const result = try inline_sema.analyzeBlockBody(block, &child_block, merges);
|
||||
const result = try inline_sema.analyzeBlockBody(block, call_src, &child_block, merges);
|
||||
|
||||
sema.branch_quota = inline_sema.branch_quota;
|
||||
sema.branch_count = inline_sema.branch_count;
|
||||
@ -1845,15 +1839,16 @@ fn zirFnType(sema: *Sema, block: *Scope.Block, inst: zir.Inst.Index, var_args: b
|
||||
const tracy = trace(@src());
|
||||
defer tracy.end();
|
||||
|
||||
const inst_data = sema.code.instructions.items(.data)[inst].fn_type;
|
||||
const inst_data = sema.code.instructions.items(.data)[inst].pl_node;
|
||||
const src = inst_data.src();
|
||||
const extra = sema.code.extraData(zir.Inst.FnType, inst_data.payload_index);
|
||||
const param_types = sema.code.refSlice(extra.end, extra.data.param_types_len);
|
||||
|
||||
return sema.fnTypeCommon(
|
||||
block,
|
||||
.unneeded,
|
||||
inst_data.src_node,
|
||||
param_types,
|
||||
inst_data.return_type,
|
||||
extra.data.return_type,
|
||||
.Unspecified,
|
||||
var_args,
|
||||
);
|
||||
@ -1863,21 +1858,23 @@ fn zirFnTypeCc(sema: *Sema, block: *Scope.Block, inst: zir.Inst.Index, var_args:
|
||||
const tracy = trace(@src());
|
||||
defer tracy.end();
|
||||
|
||||
const inst_data = sema.code.instructions.items(.data)[inst].fn_type;
|
||||
const inst_data = sema.code.instructions.items(.data)[inst].pl_node;
|
||||
const src = inst_data.src();
|
||||
const cc_src: LazySrcLoc = .{ .node_offset_fn_type_cc = inst_data.src_node };
|
||||
const extra = sema.code.extraData(zir.Inst.FnTypeCc, inst_data.payload_index);
|
||||
const param_types = sema.code.refSlice(extra.end, extra.data.param_types_len);
|
||||
|
||||
const cc_tv = try sema.resolveInstConst(block, .todo, extra.data.cc);
|
||||
const cc_tv = try sema.resolveInstConst(block, cc_src, extra.data.cc);
|
||||
// TODO once we're capable of importing and analyzing decls from
|
||||
// std.builtin, this needs to change
|
||||
const cc_str = cc_tv.val.castTag(.enum_literal).?.data;
|
||||
const cc = std.meta.stringToEnum(std.builtin.CallingConvention, cc_str) orelse
|
||||
return sema.mod.fail(&block.base, .todo, "Unknown calling convention {s}", .{cc_str});
|
||||
return sema.mod.fail(&block.base, cc_src, "Unknown calling convention {s}", .{cc_str});
|
||||
return sema.fnTypeCommon(
|
||||
block,
|
||||
.unneeded,
|
||||
inst_data.src_node,
|
||||
param_types,
|
||||
inst_data.return_type,
|
||||
extra.data.return_type,
|
||||
cc,
|
||||
var_args,
|
||||
);
|
||||
@ -1886,13 +1883,15 @@ fn zirFnTypeCc(sema: *Sema, block: *Scope.Block, inst: zir.Inst.Index, var_args:
|
||||
fn fnTypeCommon(
|
||||
sema: *Sema,
|
||||
block: *Scope.Block,
|
||||
src: LazySrcLoc,
|
||||
src_node_offset: i32,
|
||||
zir_param_types: []const zir.Inst.Ref,
|
||||
zir_return_type: zir.Inst.Ref,
|
||||
cc: std.builtin.CallingConvention,
|
||||
var_args: bool,
|
||||
) InnerError!*Inst {
|
||||
const return_type = try sema.resolveType(block, src, zir_return_type);
|
||||
const src: LazySrcLoc = .{ .node_offset = src_node_offset };
|
||||
const ret_ty_src: LazySrcLoc = .{ .node_offset_fn_type_ret_ty = src_node_offset };
|
||||
const return_type = try sema.resolveType(block, ret_ty_src, zir_return_type);
|
||||
|
||||
// Hot path for some common function types.
|
||||
if (zir_param_types.len == 0 and !var_args) {
|
||||
@ -1915,12 +1914,11 @@ fn fnTypeCommon(
|
||||
|
||||
const param_types = try sema.arena.alloc(Type, zir_param_types.len);
|
||||
for (zir_param_types) |param_type, i| {
|
||||
const resolved = try sema.resolveType(block, src, param_type);
|
||||
// TODO skip for comptime params
|
||||
if (!resolved.isValidVarType(false)) {
|
||||
return sema.mod.fail(&block.base, .todo, "parameter of type '{}' must be declared comptime", .{resolved});
|
||||
}
|
||||
param_types[i] = resolved;
|
||||
// TODO make a compile error from `resolveType` report the source location
|
||||
// of the specific parameter. Will need to take a similar strategy as
|
||||
// `resolveSwitchItemVal` to avoid resolving the source location unless
|
||||
// we actually need to report an error.
|
||||
param_types[i] = try sema.resolveType(block, src, param_type);
|
||||
}
|
||||
|
||||
const fn_ty = try Type.Tag.function.create(sema.arena, .{
|
||||
@ -2082,9 +2080,14 @@ fn zirBitcast(sema: *Sema, block: *Scope.Block, inst: zir.Inst.Index) InnerError
|
||||
const tracy = trace(@src());
|
||||
defer tracy.end();
|
||||
|
||||
const bin_inst = sema.code.instructions.items(.data)[inst].bin;
|
||||
const dest_type = try sema.resolveType(block, .todo, bin_inst.lhs);
|
||||
const operand = try sema.resolveInst(bin_inst.rhs);
|
||||
const inst_data = sema.code.instructions.items(.data)[inst].pl_node;
|
||||
const src = inst_data.src();
|
||||
const dest_ty_src: LazySrcLoc = .{ .node_offset_builtin_call_arg0 = inst_data.src_node };
|
||||
const operand_src: LazySrcLoc = .{ .node_offset_builtin_call_arg1 = inst_data.src_node };
|
||||
const extra = sema.code.extraData(zir.Inst.Bin, inst_data.payload_index).data;
|
||||
|
||||
const dest_type = try sema.resolveType(block, dest_ty_src, extra.lhs);
|
||||
const operand = try sema.resolveInst(extra.rhs);
|
||||
return sema.bitcast(block, dest_type, operand);
|
||||
}
|
||||
|
||||
@ -2234,7 +2237,12 @@ fn zirSwitchCapture(
|
||||
const tracy = trace(@src());
|
||||
defer tracy.end();
|
||||
|
||||
@panic("TODO implement Sema for zirSwitchCapture");
|
||||
const zir_datas = sema.code.instructions.items(.data);
|
||||
const capture_info = zir_datas[inst].switch_capture;
|
||||
const switch_info = zir_datas[capture_info.switch_inst].pl_node;
|
||||
const src = switch_info.src();
|
||||
|
||||
return sema.mod.fail(&block.base, src, "TODO implement Sema for zirSwitchCapture", .{});
|
||||
}
|
||||
|
||||
fn zirSwitchCaptureElse(
|
||||
@ -2246,7 +2254,12 @@ fn zirSwitchCaptureElse(
|
||||
const tracy = trace(@src());
|
||||
defer tracy.end();
|
||||
|
||||
@panic("TODO implement Sema for zirSwitchCaptureElse");
|
||||
const zir_datas = sema.code.instructions.items(.data);
|
||||
const capture_info = zir_datas[inst].switch_capture;
|
||||
const switch_info = zir_datas[capture_info.switch_inst].pl_node;
|
||||
const src = switch_info.src();
|
||||
|
||||
return sema.mod.fail(&block.base, src, "TODO implement Sema for zirSwitchCaptureElse", .{});
|
||||
}
|
||||
|
||||
fn zirSwitchBlock(
|
||||
@ -2631,8 +2644,9 @@ fn analyzeSwitch(
|
||||
const body = sema.code.extra[extra_index..][0..body_len];
|
||||
extra_index += body_len;
|
||||
|
||||
const item = try sema.resolveInst(item_ref);
|
||||
const item_val = try sema.resolveConstValue(block, item.src, item);
|
||||
// Validation above ensured these will succeed.
|
||||
const item = sema.resolveInst(item_ref) catch unreachable;
|
||||
const item_val = sema.resolveConstValue(block, .unneeded, item) catch unreachable;
|
||||
if (operand_val.eql(item_val)) {
|
||||
return sema.resolveBody(block, body);
|
||||
}
|
||||
@ -2652,8 +2666,9 @@ fn analyzeSwitch(
|
||||
const body = sema.code.extra[extra_index + 2 * ranges_len ..][0..body_len];
|
||||
|
||||
for (items) |item_ref| {
|
||||
const item = try sema.resolveInst(item_ref);
|
||||
const item_val = try sema.resolveConstValue(block, item.src, item);
|
||||
// Validation above ensured these will succeed.
|
||||
const item = sema.resolveInst(item_ref) catch unreachable;
|
||||
const item_val = sema.resolveConstValue(block, item.src, item) catch unreachable;
|
||||
if (operand_val.eql(item_val)) {
|
||||
return sema.resolveBody(block, body);
|
||||
}
|
||||
@ -2666,8 +2681,9 @@ fn analyzeSwitch(
|
||||
const item_last = @intToEnum(zir.Inst.Ref, sema.code.extra[extra_index]);
|
||||
extra_index += 1;
|
||||
|
||||
const first_tv = try sema.resolveInstConst(block, .todo, item_first);
|
||||
const last_tv = try sema.resolveInstConst(block, .todo, item_last);
|
||||
// Validation above ensured these will succeed.
|
||||
const first_tv = sema.resolveInstConst(block, .unneeded, item_first) catch unreachable;
|
||||
const last_tv = sema.resolveInstConst(block, .unneeded, item_last) catch unreachable;
|
||||
if (Value.compare(operand_val, .gte, first_tv.val) and
|
||||
Value.compare(operand_val, .lte, last_tv.val))
|
||||
{
|
||||
@ -2876,7 +2892,7 @@ fn analyzeSwitch(
|
||||
};
|
||||
|
||||
_ = try child_block.addSwitchBr(src, operand, cases, final_else_body);
|
||||
return sema.analyzeBlockBody(block, &child_block, merges);
|
||||
return sema.analyzeBlockBody(block, src, &child_block, merges);
|
||||
}
|
||||
|
||||
fn resolveSwitchItemVal(
|
||||
|
||||
@ -1487,7 +1487,7 @@ fn buildOutputType(
|
||||
for (diags.arch.?.allCpuModels()) |cpu| {
|
||||
help_text.writer().print(" {s}\n", .{cpu.name}) catch break :help;
|
||||
}
|
||||
std.log.info("Available CPUs for architecture '{s}': {s}", .{
|
||||
std.log.info("Available CPUs for architecture '{s}':\n{s}", .{
|
||||
@tagName(diags.arch.?), help_text.items,
|
||||
});
|
||||
}
|
||||
@ -1499,7 +1499,7 @@ fn buildOutputType(
|
||||
for (diags.arch.?.allFeaturesList()) |feature| {
|
||||
help_text.writer().print(" {s}: {s}\n", .{ feature.name, feature.description }) catch break :help;
|
||||
}
|
||||
std.log.info("Available CPU features for architecture '{s}': {s}", .{
|
||||
std.log.info("Available CPU features for architecture '{s}':\n{s}", .{
|
||||
@tagName(diags.arch.?), help_text.items,
|
||||
});
|
||||
}
|
||||
|
||||
63
src/zir.zig
63
src/zir.zig
@ -168,15 +168,12 @@ pub const Inst = struct {
|
||||
asm_volatile,
|
||||
/// Bitwise AND. `&`
|
||||
bit_and,
|
||||
/// TODO delete this instruction, it has no purpose.
|
||||
/// Bitcast a value to a different type.
|
||||
/// Uses the pl_node field with payload `Bin`.
|
||||
bitcast,
|
||||
/// An arbitrary typed pointer is pointer-casted to a new Pointer.
|
||||
/// The destination type is given by LHS. The cast is to be evaluated
|
||||
/// as if it were a bit-cast operation from the operand pointer element type to the
|
||||
/// provided destination type.
|
||||
bitcast_ref,
|
||||
/// A typed result location pointer is bitcasted to a new result location pointer.
|
||||
/// The new result location pointer has an inferred type.
|
||||
/// Uses the un_node field.
|
||||
bitcast_result_ptr,
|
||||
/// Bitwise NOT. `~`
|
||||
/// Uses `un_node`.
|
||||
@ -338,12 +335,12 @@ pub const Inst = struct {
|
||||
/// Payload is `Bin` with lhs as the dest type, rhs the operand.
|
||||
floatcast,
|
||||
/// Returns a function type, assuming unspecified calling convention.
|
||||
/// Uses the `fn_type` union field. `payload_index` points to a `FnType`.
|
||||
/// Uses the `pl_node` union field. `payload_index` points to a `FnType`.
|
||||
fn_type,
|
||||
/// Same as `fn_type` but the function is variadic.
|
||||
fn_type_var_args,
|
||||
/// Returns a function type, with a calling convention instruction operand.
|
||||
/// Uses the `fn_type` union field. `payload_index` points to a `FnTypeCc`.
|
||||
/// Uses the `pl_node` union field. `payload_index` points to a `FnTypeCc`.
|
||||
fn_type_cc,
|
||||
/// Same as `fn_type_cc` but the function is variadic.
|
||||
fn_type_cc_var_args,
|
||||
@ -662,7 +659,6 @@ pub const Inst = struct {
|
||||
.asm_volatile,
|
||||
.bit_and,
|
||||
.bitcast,
|
||||
.bitcast_ref,
|
||||
.bitcast_result_ptr,
|
||||
.bit_or,
|
||||
.block,
|
||||
@ -1212,12 +1208,6 @@ pub const Inst = struct {
|
||||
/// Index into extra. See `PtrType`.
|
||||
payload_index: u32,
|
||||
},
|
||||
fn_type: struct {
|
||||
return_type: Ref,
|
||||
/// For `fn_type` this points to a `FnType` in `extra`.
|
||||
/// For `fn_type_cc` this points to `FnTypeCc` in `extra`.
|
||||
payload_index: u32,
|
||||
},
|
||||
int_type: struct {
|
||||
/// Offset from Decl AST node index.
|
||||
/// `Tag` determines which kind of AST node this points to.
|
||||
@ -1289,6 +1279,7 @@ pub const Inst = struct {
|
||||
/// according to `param_types_len`.
|
||||
/// Each param type is a `Ref`.
|
||||
pub const FnTypeCc = struct {
|
||||
return_type: Ref,
|
||||
cc: Ref,
|
||||
param_types_len: u32,
|
||||
};
|
||||
@ -1297,6 +1288,7 @@ pub const Inst = struct {
|
||||
/// according to `param_types_len`.
|
||||
/// Each param type is a `Ref`.
|
||||
pub const FnType = struct {
|
||||
return_type: Ref,
|
||||
param_types_len: u32,
|
||||
};
|
||||
|
||||
@ -1640,7 +1632,6 @@ const Writer = struct {
|
||||
=> try self.writeSwitchCapture(stream, inst),
|
||||
|
||||
.bitcast,
|
||||
.bitcast_ref,
|
||||
.bitcast_result_ptr,
|
||||
.store_to_inferred_ptr,
|
||||
=> try stream.writeAll("TODO)"),
|
||||
@ -2044,11 +2035,26 @@ const Writer = struct {
|
||||
stream: anytype,
|
||||
inst: Inst.Index,
|
||||
var_args: bool,
|
||||
) (@TypeOf(stream).Error || error{OutOfMemory})!void {
|
||||
const inst_data = self.code.instructions.items(.data)[inst].fn_type;
|
||||
) !void {
|
||||
const inst_data = self.code.instructions.items(.data)[inst].pl_node;
|
||||
const src = inst_data.src();
|
||||
const extra = self.code.extraData(Inst.FnType, inst_data.payload_index);
|
||||
const param_types = self.code.refSlice(extra.end, extra.data.param_types_len);
|
||||
return self.writeFnTypeCommon(stream, param_types, inst_data.return_type, var_args, .none);
|
||||
return self.writeFnTypeCommon(stream, param_types, extra.data.return_type, var_args, .none, src);
|
||||
}
|
||||
|
||||
fn writeFnTypeCc(
|
||||
self: *Writer,
|
||||
stream: anytype,
|
||||
inst: Inst.Index,
|
||||
var_args: bool,
|
||||
) (@TypeOf(stream).Error || error{OutOfMemory})!void {
|
||||
const inst_data = self.code.instructions.items(.data)[inst].pl_node;
|
||||
const src = inst_data.src();
|
||||
const extra = self.code.extraData(Inst.FnTypeCc, inst_data.payload_index);
|
||||
const param_types = self.code.refSlice(extra.end, extra.data.param_types_len);
|
||||
const cc = extra.data.cc;
|
||||
return self.writeFnTypeCommon(stream, param_types, extra.data.return_type, var_args, cc, src);
|
||||
}
|
||||
|
||||
fn writeBoolBr(self: *Writer, stream: anytype, inst: Inst.Index) !void {
|
||||
@ -2064,19 +2070,6 @@ const Writer = struct {
|
||||
try stream.writeAll("})");
|
||||
}
|
||||
|
||||
fn writeFnTypeCc(
|
||||
self: *Writer,
|
||||
stream: anytype,
|
||||
inst: Inst.Index,
|
||||
var_args: bool,
|
||||
) (@TypeOf(stream).Error || error{OutOfMemory})!void {
|
||||
const inst_data = self.code.instructions.items(.data)[inst].fn_type;
|
||||
const extra = self.code.extraData(Inst.FnTypeCc, inst_data.payload_index);
|
||||
const param_types = self.code.refSlice(extra.end, extra.data.param_types_len);
|
||||
const cc = extra.data.cc;
|
||||
return self.writeFnTypeCommon(stream, param_types, inst_data.return_type, var_args, cc);
|
||||
}
|
||||
|
||||
fn writeIntType(self: *Writer, stream: anytype, inst: Inst.Index) !void {
|
||||
const int_type = self.code.instructions.items(.data)[inst].int_type;
|
||||
const prefix: u8 = switch (int_type.signedness) {
|
||||
@ -2110,7 +2103,8 @@ const Writer = struct {
|
||||
ret_ty: Inst.Ref,
|
||||
var_args: bool,
|
||||
cc: Inst.Ref,
|
||||
) (@TypeOf(stream).Error || error{OutOfMemory})!void {
|
||||
src: LazySrcLoc,
|
||||
) !void {
|
||||
try stream.writeAll("[");
|
||||
for (param_types) |param_type, i| {
|
||||
if (i != 0) try stream.writeAll(", ");
|
||||
@ -2120,7 +2114,8 @@ const Writer = struct {
|
||||
try self.writeInstRef(stream, ret_ty);
|
||||
try self.writeOptionalInstRef(stream, ", cc=", cc);
|
||||
try self.writeFlag(stream, ", var_args", var_args);
|
||||
try stream.writeAll(")");
|
||||
try stream.writeAll(") ");
|
||||
try self.writeSrc(stream, src);
|
||||
}
|
||||
|
||||
fn writeSmallStr(
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user