AstGen: add tuple aware elem_type_index

This commit is contained in:
Veikka Tuominen 2022-05-28 22:04:52 +03:00
parent c7b778992e
commit 0e8307789a
5 changed files with 141 additions and 29 deletions

View File

@ -1273,8 +1273,14 @@ fn arrayInitExpr(
assert(array_init.ast.elements.len != 0); // Otherwise it would be struct init.
const array_ty: Zir.Inst.Ref = inst: {
if (array_init.ast.type_expr == 0) break :inst .none;
const types: struct {
array: Zir.Inst.Ref,
elem: Zir.Inst.Ref,
} = inst: {
if (array_init.ast.type_expr == 0) break :inst .{
.array = .none,
.elem = .none,
};
infer: {
const array_type: Ast.full.ArrayType = switch (node_tags[array_init.ast.type_expr]) {
@ -1289,10 +1295,14 @@ fn arrayInitExpr(
const len_inst = try gz.addInt(array_init.ast.elements.len);
const elem_type = try typeExpr(gz, scope, array_type.ast.elem_type);
if (array_type.ast.sentinel == 0) {
break :inst try gz.addBin(.array_type, len_inst, elem_type);
const array_type_inst = try gz.addBin(.array_type, len_inst, elem_type);
break :inst .{
.array = array_type_inst,
.elem = elem_type,
};
} else {
const sentinel = try comptimeExpr(gz, scope, .{ .ty = elem_type }, array_type.ast.sentinel);
break :inst try gz.addPlNode(
const array_type_inst = try gz.addPlNode(
.array_type_sentinel,
array_init.ast.type_expr,
Zir.Inst.ArrayTypeSentinel{
@ -1301,12 +1311,19 @@ fn arrayInitExpr(
.sentinel = sentinel,
},
);
break :inst .{
.array = array_type_inst,
.elem = elem_type,
};
}
}
}
const array_type_inst = try typeExpr(gz, scope, array_init.ast.type_expr);
_ = try gz.addUnNode(.validate_array_init_ty, array_type_inst, node);
break :inst array_type_inst;
break :inst .{
.array = array_type_inst,
.elem = .none,
};
};
switch (rl) {
@ -1318,40 +1335,40 @@ fn arrayInitExpr(
return Zir.Inst.Ref.void_value;
},
.ref => {
const tag: Zir.Inst.Tag = if (array_ty != .none) .array_init_ref else .array_init_anon_ref;
return arrayInitExprInner(gz, scope, node, array_init.ast.elements, array_ty, tag);
const tag: Zir.Inst.Tag = if (types.array != .none) .array_init_ref else .array_init_anon_ref;
return arrayInitExprInner(gz, scope, node, array_init.ast.elements, types.array, types.elem, tag);
},
.none => {
const tag: Zir.Inst.Tag = if (array_ty != .none) .array_init else .array_init_anon;
return arrayInitExprInner(gz, scope, node, array_init.ast.elements, array_ty, tag);
const tag: Zir.Inst.Tag = if (types.array != .none) .array_init else .array_init_anon;
return arrayInitExprInner(gz, scope, node, array_init.ast.elements, types.array, types.elem, tag);
},
.ty, .coerced_ty => {
const tag: Zir.Inst.Tag = if (array_ty != .none) .array_init else .array_init_anon;
const result = try arrayInitExprInner(gz, scope, node, array_init.ast.elements, array_ty, tag);
const tag: Zir.Inst.Tag = if (types.array != .none) .array_init else .array_init_anon;
const result = try arrayInitExprInner(gz, scope, node, array_init.ast.elements, types.array, types.elem, tag);
return rvalue(gz, rl, result, node);
},
.ptr => |ptr_inst| {
return arrayInitExprRlPtr(gz, scope, rl, node, ptr_inst, array_init.ast.elements, array_ty);
return arrayInitExprRlPtr(gz, scope, rl, node, ptr_inst, array_init.ast.elements, types.array);
},
.inferred_ptr => |ptr_inst| {
if (array_ty == .none) {
if (types.array == .none) {
// We treat this case differently so that we don't get a crash when
// analyzing array_base_ptr against an alloc_inferred_mut.
// See corresponding logic in structInitExpr.
const result = try arrayInitExprRlNone(gz, scope, node, array_init.ast.elements, .array_init_anon);
return rvalue(gz, rl, result, node);
} else {
return arrayInitExprRlPtr(gz, scope, rl, node, ptr_inst, array_init.ast.elements, array_ty);
return arrayInitExprRlPtr(gz, scope, rl, node, ptr_inst, array_init.ast.elements, types.array);
}
},
.block_ptr => |block_gz| {
// This condition is here for the same reason as the above condition in `inferred_ptr`.
// See corresponding logic in structInitExpr.
if (array_ty == .none and astgen.isInferred(block_gz.rl_ptr)) {
if (types.array == .none and astgen.isInferred(block_gz.rl_ptr)) {
const result = try arrayInitExprRlNone(gz, scope, node, array_init.ast.elements, .array_init_anon);
return rvalue(gz, rl, result, node);
}
return arrayInitExprRlPtr(gz, scope, rl, node, block_gz.rl_ptr, array_init.ast.elements, array_ty);
return arrayInitExprRlPtr(gz, scope, rl, node, block_gz.rl_ptr, array_init.ast.elements, types.array);
},
}
}
@ -1384,6 +1401,7 @@ fn arrayInitExprInner(
node: Ast.Node.Index,
elements: []const Ast.Node.Index,
array_ty_inst: Zir.Inst.Ref,
elem_ty: Zir.Inst.Ref,
tag: Zir.Inst.Tag,
) InnerError!Zir.Inst.Ref {
const astgen = gz.astgen;
@ -1398,8 +1416,21 @@ fn arrayInitExprInner(
extra_index += 1;
}
for (elements) |elem_init| {
const elem_ref = try expr(gz, scope, .none, elem_init);
for (elements) |elem_init, i| {
const rl = if (elem_ty != .none)
ResultLoc{ .coerced_ty = elem_ty }
else if (array_ty_inst != .none and nodeMayNeedMemoryLocation(astgen.tree, elem_init, true)) rl: {
const ty_expr = try gz.add(.{
.tag = .elem_type_index,
.data = .{ .bin = .{
.lhs = array_ty_inst,
.rhs = @intToEnum(Zir.Inst.Ref, i),
} },
});
break :rl ResultLoc{ .coerced_ty = ty_expr };
} else ResultLoc{ .none = {} };
const elem_ref = try expr(gz, scope, rl, elem_init);
astgen.extra.items[extra_index] = @enumToInt(elem_ref);
extra_index += 1;
}
@ -2192,6 +2223,7 @@ fn unusedResultExpr(gz: *GenZir, scope: *Scope, statement: Ast.Node.Index) Inner
.array_mul,
.array_type,
.array_type_sentinel,
.elem_type_index,
.vector_type,
.indexable_ptr_len,
.anyframe_type,

View File

@ -726,6 +726,7 @@ fn analyzeBodyInner(
.elem_ptr_imm => try sema.zirElemPtrImm(block, inst),
.elem_val => try sema.zirElemVal(block, inst),
.elem_val_node => try sema.zirElemValNode(block, inst),
.elem_type_index => try sema.zirElemTypeIndex(block, inst),
.enum_literal => try sema.zirEnumLiteral(block, inst),
.enum_to_int => try sema.zirEnumToInt(block, inst),
.int_to_enum => try sema.zirIntToEnum(block, inst),
@ -3021,7 +3022,10 @@ fn zirArrayBasePtr(
const elem_ty = sema.typeOf(base_ptr).childType();
switch (elem_ty.zigTypeTag()) {
.Array, .Vector => return base_ptr,
.Struct => if (elem_ty.isTuple()) return base_ptr,
.Struct => if (elem_ty.isTuple()) {
// TODO validate element count
return base_ptr;
},
else => {},
}
return sema.failWithArrayInitNotSupported(block, src, sema.typeOf(start_ptr).childType());
@ -3062,7 +3066,10 @@ fn validateArrayInitTy(
switch (ty.zigTypeTag()) {
.Array, .Vector => return,
.Struct => if (ty.isTuple()) return,
.Struct => if (ty.isTuple()) {
// TODO validate element count
return;
},
else => {},
}
return sema.failWithArrayInitNotSupported(block, src, ty);
@ -5805,12 +5812,17 @@ fn zirOptionalType(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileErro
return sema.addType(opt_type);
}
fn zirElemType(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air.Inst.Ref {
const inst_data = sema.code.instructions.items(.data)[inst].un_node;
const src = inst_data.src();
const array_type = try sema.resolveType(block, src, inst_data.operand);
const elem_type = array_type.elemType();
return sema.addType(elem_type);
fn zirElemTypeIndex(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air.Inst.Ref {
const bin = sema.code.instructions.items(.data)[inst].bin;
const indexable_ty = try sema.resolveType(block, .unneeded, bin.lhs);
assert(indexable_ty.isIndexable()); // validated by a previous instruction
if (indexable_ty.zigTypeTag() == .Struct) {
const elem_type = indexable_ty.tupleFields().types[@enumToInt(bin.rhs)];
return sema.addType(elem_type);
} else {
const elem_type = indexable_ty.elemType2();
return sema.addType(elem_type);
}
}
fn zirVectorType(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air.Inst.Ref {
@ -13277,6 +13289,8 @@ fn zirStructInit(
try sema.requireRuntimeBlock(block, src);
try sema.queueFullTypeResolution(resolved_ty);
return block.addUnionInit(resolved_ty, field_index, init_inst);
} else if (resolved_ty.isAnonStruct()) {
return sema.fail(block, src, "TODO anon struct init validation", .{});
}
unreachable;
}
@ -13447,15 +13461,18 @@ fn zirArrayInit(
const resolved_args = try gpa.alloc(Air.Inst.Ref, args.len - 1 + @boolToInt(sentinel_val != null));
defer gpa.free(resolved_args);
const elem_ty = array_ty.elemType2();
for (args[1..]) |arg, i| {
const resolved_arg = try sema.resolveInst(arg);
const arg_src = src; // TODO better source location
const elem_ty = if (array_ty.zigTypeTag() == .Struct)
array_ty.tupleFields().types[i]
else
array_ty.elemType2();
resolved_args[i] = try sema.coerce(block, elem_ty, resolved_arg, arg_src);
}
if (sentinel_val) |some| {
resolved_args[resolved_args.len - 1] = try sema.addConstant(elem_ty, some);
resolved_args[resolved_args.len - 1] = try sema.addConstant(array_ty.elemType2(), some);
}
const opt_runtime_src: ?LazySrcLoc = for (resolved_args) |arg| {
@ -13487,10 +13504,27 @@ fn zirArrayInit(
});
const alloc = try block.addTy(.alloc, alloc_ty);
if (array_ty.isTuple()) {
const types = array_ty.tupleFields().types;
for (resolved_args) |arg, i| {
const elem_ptr_ty = try Type.ptr(sema.arena, sema.mod, .{
.mutable = true,
.@"addrspace" = target_util.defaultAddressSpace(target, .local),
.pointee_type = types[i],
});
const elem_ptr_ty_ref = try sema.addType(elem_ptr_ty);
const index = try sema.addIntUnsigned(Type.usize, i);
const elem_ptr = try block.addPtrElemPtrTypeRef(alloc, index, elem_ptr_ty_ref);
_ = try block.addBinOp(.store, elem_ptr, arg);
}
return alloc;
}
const elem_ptr_ty = try Type.ptr(sema.arena, sema.mod, .{
.mutable = true,
.@"addrspace" = target_util.defaultAddressSpace(target, .local),
.pointee_type = elem_ty,
.pointee_type = array_ty.elemType2(),
});
const elem_ptr_ty_ref = try sema.addType(elem_ptr_ty);
@ -13632,6 +13666,10 @@ fn fieldType(
while (true) {
switch (cur_ty.zigTypeTag()) {
.Struct => {
if (cur_ty.isAnonStruct()) {
const field_index = try sema.anonStructFieldIndex(block, cur_ty, field_name, field_src);
return sema.addType(cur_ty.tupleFields().types[field_index]);
}
const struct_obj = cur_ty.castTag(.@"struct").?.data;
const field = struct_obj.fields.get(field_name) orelse
return sema.failWithBadStructFieldAccess(block, struct_obj, field_src, field_name);

View File

@ -221,6 +221,9 @@ pub const Inst = struct {
/// Uses the `pl_node` union field with `Bin` payload.
/// lhs is length, rhs is element type.
vector_type,
/// Given an indexable type, returns the type of the element at given index.
/// Uses the `bin` union field. lhs is the indexable type, rhs is the index.
elem_type_index,
/// Given a pointer to an indexable object, returns the len property. This is
/// used by for loops. This instruction also emits a for-loop specific compile
/// error if the indexable object is not indexable.
@ -1008,6 +1011,7 @@ pub const Inst = struct {
.array_type,
.array_type_sentinel,
.vector_type,
.elem_type_index,
.indexable_ptr_len,
.anyframe_type,
.as,
@ -1300,6 +1304,7 @@ pub const Inst = struct {
.array_type,
.array_type_sentinel,
.vector_type,
.elem_type_index,
.indexable_ptr_len,
.anyframe_type,
.as,
@ -1537,6 +1542,7 @@ pub const Inst = struct {
.array_type = .bin,
.array_type_sentinel = .pl_node,
.vector_type = .pl_node,
.elem_type_index = .bin,
.indexable_ptr_len = .un_node,
.anyframe_type = .un_node,
.as = .bin,

View File

@ -152,6 +152,8 @@ const Writer = struct {
.store_to_inferred_ptr,
=> try self.writeBin(stream, inst),
.elem_type_index => try self.writeElemTypeIndex(stream, inst),
.alloc,
.alloc_mut,
.alloc_comptime_mut,
@ -538,6 +540,12 @@ const Writer = struct {
try stream.writeByte(')');
}
fn writeElemTypeIndex(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void {
const inst_data = self.code.instructions.items(.data)[inst].bin;
try self.writeInstRef(stream, inst_data.lhs);
try stream.print(", {d})", .{inst_data.rhs});
}
fn writeUnNode(
self: *Writer,
stream: anytype,

View File

@ -949,3 +949,31 @@ test "vector initialized with array init syntax has proper type" {
try std.testing.expectEqual(@Vector(4, i32){ -1, -2, -3, -4 }, actual);
}
}
test "weird array and tuple initializations" {
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
const E = enum { a, b };
const S = struct { e: E };
var a = false;
const b = S{ .e = .a };
_ = &[_]S{
if (a) .{ .e = .a } else .{ .e = .b },
};
if (true) return error.SkipZigTest;
const S2 = @TypeOf(.{ false, b });
_ = &S2{
true,
if (a) .{ .e = .a } else .{ .e = .b },
};
const S3 = @TypeOf(.{ .a = false, .b = b });
_ = &S3{
.a = true,
.b = if (a) .{ .e = .a } else .{ .e = .b },
};
}