stage2: result location types for function call arguments

* AstGen: restore the param_type ZIR instruction and pass it to the
   expression for function call arguments. This does not solve the
   problem for generic function parameters, but it catches stage2 up to
   stage1 which also does not solve the problem for generic function
   parameters.
   - Most of the enhancements in this commit will still be needed for a
     more sophisticated further improvement to handle generic function
     types.
   - In Sema, handling of `as` coercion recognizes the `var_args_param`
     Type Tag and passes the operand through doing no coercion.
   - That was the last ZIR tag and we are now using all 256 ZIR tags.
 * AstGen: array init and struct init expressions use the anon form even
   when the result location has a type. Prevents the type system
   incorrectly believing, for example, that a tuple is actually an array
   when the result location is a param_type of a function with `anytype`
   parameter.
 * Sema: add missing coercion in `unionInit` to coerce the init to the
   corresponding union field type.
 * `Value.fieldValue` now takes a type and does not take an allocator.

closes #11293

After this commit, stage2 passes all the parser tests.
This commit is contained in:
Andrew Kelley 2022-03-25 18:27:10 -07:00
parent bcd7eb012a
commit bae35bdf2d
7 changed files with 127 additions and 19 deletions

View File

@ -1318,13 +1318,13 @@ fn arrayInitExpr(
return arrayInitExprRlNone(gz, scope, node, array_init.ast.elements, .array_init_anon);
}
},
.ty, .coerced_ty => |ty_inst| {
.ty, .coerced_ty => {
if (types.array != .none) {
const result = try arrayInitExprRlTy(gz, scope, node, array_init.ast.elements, types.elem, types.sentinel, false);
return rvalue(gz, rl, result, node);
} else {
const elem_type = try gz.addUnNode(.elem_type, ty_inst, node);
return arrayInitExprRlTy(gz, scope, node, array_init.ast.elements, elem_type, types.sentinel, false);
const result = try arrayInitExprRlNone(gz, scope, node, array_init.ast.elements, .array_init_anon);
return rvalue(gz, rl, result, node);
}
},
.ptr => |ptr_inst| {
@ -1559,7 +1559,7 @@ fn structInitExpr(
_ = try gz.addUnNode(.validate_struct_init_ty, ty_inst, node);
return structInitExprRlTy(gz, scope, node, struct_init, ty_inst, .struct_init_ref);
} else {
return structInitExprRlNone(gz, scope, node, struct_init, .struct_init_anon_ref);
return structInitExprRlNone(gz, scope, node, struct_init, .none, .struct_init_anon_ref);
}
},
.none => {
@ -1568,12 +1568,13 @@ fn structInitExpr(
_ = try gz.addUnNode(.validate_struct_init_ty, ty_inst, node);
return structInitExprRlTy(gz, scope, node, struct_init, ty_inst, .struct_init);
} else {
return structInitExprRlNone(gz, scope, node, struct_init, .struct_init_anon);
return structInitExprRlNone(gz, scope, node, struct_init, .none, .struct_init_anon);
}
},
.ty, .coerced_ty => |ty_inst| {
if (struct_init.ast.type_expr == 0) {
return structInitExprRlTy(gz, scope, node, struct_init, ty_inst, .struct_init);
const result = try structInitExprRlNone(gz, scope, node, struct_init, ty_inst, .struct_init_anon);
return rvalue(gz, rl, result, node);
}
const inner_ty_inst = try typeExpr(gz, scope, struct_init.ast.type_expr);
_ = try gz.addUnNode(.validate_struct_init_ty, inner_ty_inst, node);
@ -1586,7 +1587,7 @@ fn structInitExpr(
// We treat this case differently so that we don't get a crash when
// analyzing field_base_ptr against an alloc_inferred_mut.
// See corresponding logic in arrayInitExpr.
const result = try structInitExprRlNone(gz, scope, node, struct_init, .struct_init_anon);
const result = try structInitExprRlNone(gz, scope, node, struct_init, .none, .struct_init_anon);
return rvalue(gz, rl, result, node);
} else {
return structInitExprRlPtr(gz, scope, rl, node, struct_init, ptr_inst);
@ -1596,7 +1597,7 @@ fn structInitExpr(
// This condition is here for the same reason as the above condition in `inferred_ptr`.
// See corresponding logic in arrayInitExpr.
if (struct_init.ast.type_expr == 0 and astgen.isInferred(block_gz.rl_ptr)) {
const result = try structInitExprRlNone(gz, scope, node, struct_init, .struct_init_anon);
const result = try structInitExprRlNone(gz, scope, node, struct_init, .none, .struct_init_anon);
return rvalue(gz, rl, result, node);
}
@ -1610,6 +1611,7 @@ fn structInitExprRlNone(
scope: *Scope,
node: Ast.Node.Index,
struct_init: Ast.full.StructInit,
ty_inst: Zir.Inst.Ref,
tag: Zir.Inst.Tag,
) InnerError!Zir.Inst.Ref {
const astgen = gz.astgen;
@ -1624,9 +1626,16 @@ fn structInitExprRlNone(
for (struct_init.ast.fields) |field_init| {
const name_token = tree.firstToken(field_init) - 2;
const str_index = try astgen.identAsString(name_token);
const sub_rl: ResultLoc = if (ty_inst != .none)
ResultLoc{ .ty = try gz.addPlNode(.field_type, field_init, Zir.Inst.FieldType{
.container_type = ty_inst,
.name_start = str_index,
}) }
else
.none;
setExtra(astgen, extra_index, Zir.Inst.StructInitAnon.Item{
.field_name = str_index,
.init = try expr(gz, scope, .none, field_init),
.init = try expr(gz, scope, sub_rl, field_init),
});
extra_index += field_size;
}
@ -2350,6 +2359,7 @@ fn unusedResultExpr(gz: *GenZir, scope: *Scope, statement: Ast.Node.Index) Inner
.closure_get,
.array_base_ptr,
.field_base_ptr,
.param_type,
=> break :b false,
// ZIR instructions that are always `noreturn`.
@ -7846,10 +7856,15 @@ fn callExpr(
});
var extra_index = try reserveExtra(astgen, call.ast.params.len);
for (call.ast.params) |param_node| {
// Parameters are always temporary values, they have no
// meaningful result location. Sema will coerce them.
const arg_ref = try expr(gz, scope, .none, param_node);
for (call.ast.params) |param_node, i| {
const param_type = try gz.add(.{
.tag = .param_type,
.data = .{ .param_type = .{
.callee = callee,
.param_index = @intCast(u32, i),
} },
});
const arg_ref = try expr(gz, scope, .{ .coerced_ty = param_type }, param_node);
astgen.extra.items[extra_index] = @enumToInt(arg_ref);
extra_index += 1;
}

View File

@ -738,6 +738,7 @@ fn analyzeBodyInner(
.optional_payload_unsafe => try sema.zirOptionalPayload(block, inst, false),
.optional_payload_unsafe_ptr => try sema.zirOptionalPayloadPtr(block, inst, false),
.optional_type => try sema.zirOptionalType(block, inst),
.param_type => try sema.zirParamType(block, inst),
.ptr_type => try sema.zirPtrType(block, inst),
.ptr_type_simple => try sema.zirPtrTypeSimple(block, inst),
.ref => try sema.zirRef(block, inst),
@ -3638,6 +3639,39 @@ fn zirStoreNode(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!v
return sema.storePtr(block, src, ptr, operand);
}
fn zirParamType(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air.Inst.Ref {
const callee_src = sema.src;
const inst_data = sema.code.instructions.items(.data)[inst].param_type;
const callee = sema.resolveInst(inst_data.callee);
const callee_ty = sema.typeOf(callee);
var param_index = inst_data.param_index;
const fn_ty = if (callee_ty.tag() == .bound_fn) fn_ty: {
const bound_fn_val = try sema.resolveConstValue(block, callee_src, callee);
const bound_fn = bound_fn_val.castTag(.bound_fn).?.data;
const fn_ty = sema.typeOf(bound_fn.func_inst);
param_index += 1;
break :fn_ty fn_ty;
} else callee_ty;
const fn_info = if (fn_ty.zigTypeTag() == .Pointer)
fn_ty.childType().fnInfo()
else
fn_ty.fnInfo();
if (param_index >= fn_info.param_types.len) {
assert(fn_info.is_var_args);
return sema.addType(Type.initTag(.var_args_param));
}
if (fn_info.param_types[param_index].tag() == .generic_poison) {
return sema.addType(Type.initTag(.var_args_param));
}
return sema.addType(fn_info.param_types[param_index]);
}
fn zirStr(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air.Inst.Ref {
const tracy = trace(@src());
defer tracy.end();
@ -6613,6 +6647,7 @@ fn analyzeAs(
) CompileError!Air.Inst.Ref {
const dest_ty = try sema.resolveType(block, src, zir_dest_type);
const operand = sema.resolveInst(zir_operand);
if (dest_ty.tag() == .var_args_param) return operand;
return sema.coerce(block, dest_ty, operand, src);
}
@ -12140,7 +12175,7 @@ fn zirUnionInit(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!A
fn unionInit(
sema: *Sema,
block: *Block,
init: Air.Inst.Ref,
uncasted_init: Air.Inst.Ref,
init_src: LazySrcLoc,
union_ty: Type,
union_ty_src: LazySrcLoc,
@ -12148,6 +12183,8 @@ fn unionInit(
field_src: LazySrcLoc,
) CompileError!Air.Inst.Ref {
const field_index = try sema.unionFieldIndex(block, union_ty, field_name, field_src);
const field = union_ty.unionFields().values()[field_index];
const init = try sema.coerce(block, field.ty, uncasted_init, init_src);
if (try sema.resolveMaybeUndefVal(block, init_src, init)) |init_val| {
const tag_val = try Value.Tag.enum_field_index.create(sema.arena, field_index);
@ -12620,6 +12657,7 @@ fn zirFieldType(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!A
const ty_src = inst_data.src();
const field_src = inst_data.src();
const aggregate_ty = try sema.resolveType(block, ty_src, extra.container_type);
if (aggregate_ty.tag() == .var_args_param) return sema.addType(aggregate_ty);
const field_name = sema.code.nullTerminatedString(extra.name_start);
return sema.fieldType(block, aggregate_ty, field_name, field_src, ty_src);
}
@ -18964,7 +19002,7 @@ fn beginComptimePtrLoad(
if (coerce_in_mem_ok) {
deref.pointee = TypedValue{
.ty = field_ty,
.val = try tv.val.fieldValue(sema.arena, field_index),
.val = tv.val.fieldValue(tv.ty, field_index),
};
break :blk deref;
}

View File

@ -464,6 +464,14 @@ pub const Inst = struct {
/// Merge two error sets into one, `E1 || E2`.
/// Uses the `pl_node` field with payload `Bin`.
merge_error_sets,
/// Given a reference to a function and a parameter index, returns the
/// type of the parameter. The only usage of this instruction is for the
/// result location of parameters of function calls. In the case of a function's
/// parameter type being `anytype`, it is the type coercion's job to detect this
/// scenario and skip the coercion, so that semantic analysis of this instruction
/// is not in a position where it must create an invalid type.
/// Uses the `param_type` union field.
param_type,
/// Turns an R-Value into a const L-Value. In other words, it takes a value,
/// stores it in a memory location, and returns a const pointer to it. If the value
/// is `comptime`, the memory location is global static constant data. Otherwise,
@ -1077,6 +1085,7 @@ pub const Inst = struct {
.mul,
.mulwrap,
.mul_sat,
.param_type,
.ref,
.shl,
.shl_sat,
@ -1266,6 +1275,7 @@ pub const Inst = struct {
.mulwrap = .pl_node,
.mul_sat = .pl_node,
.param_type = .param_type,
.param = .pl_tok,
.param_comptime = .pl_tok,
.param_anytype = .str_tok,
@ -2213,6 +2223,10 @@ pub const Inst = struct {
/// Points to a `Block`.
payload_index: u32,
},
param_type: struct {
callee: Ref,
param_index: u32,
},
@"unreachable": struct {
/// Offset from Decl AST node index.
/// `Tag` determines which kind of AST node this points to.
@ -2288,6 +2302,7 @@ pub const Inst = struct {
ptr_type,
int_type,
bool_br,
param_type,
@"unreachable",
@"break",
switch_capture,

View File

@ -252,6 +252,7 @@ const Writer = struct {
=> try self.writeBoolBr(stream, inst),
.array_type_sentinel => try self.writeArrayTypeSentinel(stream, inst),
.param_type => try self.writeParamType(stream, inst),
.ptr_type_simple => try self.writePtrTypeSimple(stream, inst),
.ptr_type => try self.writePtrType(stream, inst),
.int => try self.writeInt(stream, inst),
@ -558,6 +559,16 @@ const Writer = struct {
try self.writeSrc(stream, inst_data.src());
}
fn writeParamType(
self: *Writer,
stream: anytype,
inst: Zir.Inst.Index,
) (@TypeOf(stream).Error || error{OutOfMemory})!void {
const inst_data = self.code.instructions.items(.data)[inst].param_type;
try self.writeInstRef(stream, inst_data.callee);
try stream.print(", {d})", .{inst_data.param_index});
}
fn writePtrTypeSimple(
self: *Writer,
stream: anytype,

View File

@ -3724,6 +3724,8 @@ pub const Type = extern union {
.single_const_pointer_to_comptime_int => Type.initTag(.comptime_int),
.pointer => ty.castTag(.pointer).?.data.pointee_type,
.var_args_param => ty,
else => unreachable,
};
}

View File

@ -2659,8 +2659,7 @@ pub const Value = extern union {
};
}
pub fn fieldValue(val: Value, allocator: Allocator, index: usize) error{OutOfMemory}!Value {
_ = allocator;
pub fn fieldValue(val: Value, ty: Type, index: usize) Value {
switch (val.tag()) {
.aggregate => {
const field_values = val.castTag(.aggregate).?.data;
@ -2671,8 +2670,16 @@ pub const Value = extern union {
// TODO assert the tag is correct
return payload.val;
},
// Structs which have only one possible value need to consist of members which have only one possible value.
.the_only_possible_value => return val,
.the_only_possible_value => return ty.onePossibleValue().?,
.empty_struct_value => {
if (ty.isTupleOrAnonStruct()) {
const tuple = ty.tupleFields();
return tuple.values[index];
}
unreachable;
},
else => unreachable,
}

View File

@ -98,3 +98,23 @@ test "comptime call with bound function as parameter" {
var inst: S = undefined;
try expectEqual(?i32, S.ReturnType(inst.call_me_maybe));
}
test "result location of function call argument through runtime condition and struct init" {
if (builtin.zig_backend == .stage2_c) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
const E = enum { a, b };
const S = struct {
e: E,
};
const namespace = struct {
fn foo(s: S) !void {
try expect(s.e == .b);
}
};
var runtime = true;
try namespace.foo(.{
.e = if (!runtime) .a else .b,
});
}