Merge branch 'Vexu-stage2'

closes #6042
This commit is contained in:
Andrew Kelley 2020-08-13 10:05:20 -07:00
commit 6e0fb06010
8 changed files with 420 additions and 44 deletions

View File

@ -2219,11 +2219,6 @@ pub fn wantSafety(self: *Module, scope: *Scope) bool {
};
}
pub fn analyzeUnreach(self: *Module, scope: *Scope, src: usize) InnerError!*Inst {
const b = try self.requireRuntimeBlock(scope, src);
return self.addNoOp(b, src, Type.initTag(.noreturn), .unreach);
}
pub fn analyzeIsNull(
self: *Module,
scope: *Scope,
@ -2476,6 +2471,24 @@ pub fn coerce(self: *Module, scope: *Scope, dest_type: Type, inst: *Inst) !*Inst
}
assert(inst.ty.zigTypeTag() != .Undefined);
// null to ?T
if (dest_type.zigTypeTag() == .Optional and inst.ty.zigTypeTag() == .Null) {
return self.constInst(scope, inst.src, .{ .ty = dest_type, .val = Value.initTag(.null_value) });
}
// T to ?T
if (dest_type.zigTypeTag() == .Optional) {
const child_type = dest_type.elemType();
if (inst.value()) |val| {
if (child_type.eql(inst.ty)) {
return self.constInst(scope, inst.src, .{ .ty = dest_type, .val = val });
}
return self.fail(scope, inst.src, "TODO optional wrap {} to {}", .{ val, dest_type });
} else if (child_type.eql(inst.ty)) {
return self.fail(scope, inst.src, "TODO optional wrap {}", .{dest_type});
}
}
// *[N]T to []T
if (inst.ty.isSinglePointer() and dest_type.isSlice() and
(!inst.ty.isConstPtr() or dest_type.isConstPtr()))
@ -2884,3 +2897,70 @@ pub fn dumpInst(self: *Module, scope: *Scope, inst: *Inst) void {
});
}
}
pub const PanicId = enum {
unreach,
unwrap_null,
};
pub fn addSafetyCheck(mod: *Module, parent_block: *Scope.Block, ok: *Inst, panic_id: PanicId) !void {
const block_inst = try parent_block.arena.create(Inst.Block);
block_inst.* = .{
.base = .{
.tag = Inst.Block.base_tag,
.ty = Type.initTag(.void),
.src = ok.src,
},
.body = .{
.instructions = try parent_block.arena.alloc(*Inst, 1), // Only need space for the condbr.
},
};
const ok_body: ir.Body = .{
.instructions = try parent_block.arena.alloc(*Inst, 1), // Only need space for the brvoid.
};
const brvoid = try parent_block.arena.create(Inst.BrVoid);
brvoid.* = .{
.base = .{
.tag = .brvoid,
.ty = Type.initTag(.noreturn),
.src = ok.src,
},
.block = block_inst,
};
ok_body.instructions[0] = &brvoid.base;
var fail_block: Scope.Block = .{
.parent = parent_block,
.func = parent_block.func,
.decl = parent_block.decl,
.instructions = .{},
.arena = parent_block.arena,
};
defer fail_block.instructions.deinit(mod.gpa);
_ = try mod.safetyPanic(&fail_block, ok.src, panic_id);
const fail_body: ir.Body = .{ .instructions = try parent_block.arena.dupe(*Inst, fail_block.instructions.items) };
const condbr = try parent_block.arena.create(Inst.CondBr);
condbr.* = .{
.base = .{
.tag = .condbr,
.ty = Type.initTag(.noreturn),
.src = ok.src,
},
.condition = ok,
.then_body = ok_body,
.else_body = fail_body,
};
block_inst.body.instructions[0] = &condbr.base;
try parent_block.instructions.append(mod.gpa, &block_inst.base);
}
pub fn safetyPanic(mod: *Module, block: *Scope.Block, src: usize, panic_id: PanicId) !*Inst {
// TODO Once we have a panic function to call, call it here instead of breakpoint.
_ = try mod.addNoOp(block, src, Type.initTag(.void), .breakpoint);
return mod.addNoOp(block, src, Type.initTag(.noreturn), .unreach);
}

View File

@ -105,6 +105,8 @@ pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node) InnerEr
.UndefinedLiteral => return rlWrap(mod, scope, rl, try undefLiteral(mod, scope, node.castTag(.UndefinedLiteral).?)),
.BoolLiteral => return rlWrap(mod, scope, rl, try boolLiteral(mod, scope, node.castTag(.BoolLiteral).?)),
.NullLiteral => return rlWrap(mod, scope, rl, try nullLiteral(mod, scope, node.castTag(.NullLiteral).?)),
.OptionalType => return rlWrap(mod, scope, rl, try optionalType(mod, scope, node.castTag(.OptionalType).?)),
.UnwrapOptional => return unwrapOptional(mod, scope, rl, node.castTag(.UnwrapOptional).?),
else => return mod.failNode(scope, node, "TODO implement astgen.Expr for {}", .{@tagName(node.tag)}),
}
}
@ -293,6 +295,28 @@ fn boolNot(mod: *Module, scope: *Scope, node: *ast.Node.SimplePrefixOp) InnerErr
return addZIRUnOp(mod, scope, src, .boolnot, operand);
}
fn optionalType(mod: *Module, scope: *Scope, node: *ast.Node.SimplePrefixOp) InnerError!*zir.Inst {
const tree = scope.tree();
const src = tree.token_locs[node.op_token].start;
const meta_type = try addZIRInstConst(mod, scope, src, .{
.ty = Type.initTag(.type),
.val = Value.initTag(.type_type),
});
const operand = try expr(mod, scope, .{ .ty = meta_type }, node.rhs);
return addZIRUnOp(mod, scope, src, .optional_type, operand);
}
fn unwrapOptional(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.SimpleSuffixOp) InnerError!*zir.Inst {
const tree = scope.tree();
const src = tree.token_locs[node.rtoken].start;
const operand = try expr(mod, scope, .lvalue, node.lhs);
const unwrapped_ptr = try addZIRUnOp(mod, scope, src, .unwrap_optional_safe, operand);
if (rl == .lvalue) return unwrapped_ptr;
return rlWrap(mod, scope, rl, try addZIRUnOp(mod, scope, src, .deref, unwrapped_ptr));
}
/// Identifier token -> String (allocated in scope.arena())
pub fn identifierTokenString(mod: *Module, scope: *Scope, token: ast.TokenIndex) InnerError![]const u8 {
const tree = scope.tree();

View File

@ -668,6 +668,7 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
.store => return self.genStore(inst.castTag(.store).?),
.sub => return self.genSub(inst.castTag(.sub).?),
.unreach => return MCValue{ .unreach = {} },
.unwrap_optional => return self.genUnwrapOptional(inst.castTag(.unwrap_optional).?),
}
}
@ -817,6 +818,15 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
}
}
fn genUnwrapOptional(self: *Self, inst: *ir.Inst.UnOp) !MCValue {
// No side effects, so if it's unreferenced, do nothing.
if (inst.base.isUnused())
return MCValue.dead;
switch (arch) {
else => return self.fail(inst.base.src, "TODO implement unwrap optional for {}", .{self.target.cpu.arch}),
}
}
fn genLoad(self: *Self, inst: *ir.Inst.UnOp) !MCValue {
const elem_ty = inst.base.ty;
if (!elem_ty.hasCodeGenBits())

View File

@ -82,6 +82,7 @@ pub const Inst = struct {
not,
floatcast,
intcast,
unwrap_optional,
pub fn Type(tag: Tag) type {
return switch (tag) {
@ -102,6 +103,7 @@ pub const Inst = struct {
.floatcast,
.intcast,
.load,
.unwrap_optional,
=> UnOp,
.add,
@ -419,7 +421,6 @@ pub const Inst = struct {
return null;
}
};
};
pub const Body = struct {

View File

@ -70,6 +70,11 @@ pub const Type = extern union {
.single_mut_pointer => return .Pointer,
.single_const_pointer_to_comptime_int => return .Pointer,
.const_slice_u8 => return .Pointer,
.optional,
.optional_single_const_pointer,
.optional_single_mut_pointer,
=> return .Optional,
}
}
@ -179,9 +184,11 @@ pub const Type = extern union {
}
return true;
},
.Optional => {
return a.elemType().eql(b.elemType());
},
.Float,
.Struct,
.Optional,
.ErrorUnion,
.ErrorSet,
.Enum,
@ -241,9 +248,11 @@ pub const Type = extern union {
std.hash.autoHash(&hasher, self.fnParamType(i).hash());
}
},
.Optional => {
std.hash.autoHash(&hasher, self.elemType().hash());
},
.Float,
.Struct,
.Optional,
.ErrorUnion,
.ErrorSet,
.Enum,
@ -317,24 +326,8 @@ pub const Type = extern union {
};
return Type{ .ptr_otherwise = &new_payload.base };
},
.single_const_pointer => {
const payload = @fieldParentPtr(Payload.SingleConstPointer, "base", self.ptr_otherwise);
const new_payload = try allocator.create(Payload.SingleConstPointer);
new_payload.* = .{
.base = payload.base,
.pointee_type = try payload.pointee_type.copy(allocator),
};
return Type{ .ptr_otherwise = &new_payload.base };
},
.single_mut_pointer => {
const payload = @fieldParentPtr(Payload.SingleMutPointer, "base", self.ptr_otherwise);
const new_payload = try allocator.create(Payload.SingleMutPointer);
new_payload.* = .{
.base = payload.base,
.pointee_type = try payload.pointee_type.copy(allocator),
};
return Type{ .ptr_otherwise = &new_payload.base };
},
.single_const_pointer => return self.copyPayloadSingleField(allocator, Payload.SingleConstPointer, "pointee_type"),
.single_mut_pointer => return self.copyPayloadSingleField(allocator, Payload.SingleMutPointer, "pointee_type"),
.int_signed => return self.copyPayloadShallow(allocator, Payload.IntSigned),
.int_unsigned => return self.copyPayloadShallow(allocator, Payload.IntUnsigned),
.function => {
@ -352,6 +345,9 @@ pub const Type = extern union {
};
return Type{ .ptr_otherwise = &new_payload.base };
},
.optional => return self.copyPayloadSingleField(allocator, Payload.Optional, "child_type"),
.optional_single_mut_pointer => return self.copyPayloadSingleField(allocator, Payload.OptionalSingleMutPointer, "pointee_type"),
.optional_single_const_pointer => return self.copyPayloadSingleField(allocator, Payload.OptionalSingleConstPointer, "pointee_type"),
}
}
@ -362,6 +358,14 @@ pub const Type = extern union {
return Type{ .ptr_otherwise = &new_payload.base };
}
fn copyPayloadSingleField(self: Type, allocator: *Allocator, comptime T: type, comptime field_name: []const u8) error{OutOfMemory}!Type {
const payload = @fieldParentPtr(T, "base", self.ptr_otherwise);
const new_payload = try allocator.create(T);
new_payload.base = payload.base;
@field(new_payload, field_name) = try @field(payload, field_name).copy(allocator);
return Type{ .ptr_otherwise = &new_payload.base };
}
pub fn format(
self: Type,
comptime fmt: []const u8,
@ -456,6 +460,24 @@ pub const Type = extern union {
const payload = @fieldParentPtr(Payload.IntUnsigned, "base", ty.ptr_otherwise);
return out_stream.print("u{}", .{payload.bits});
},
.optional => {
const payload = @fieldParentPtr(Payload.Optional, "base", ty.ptr_otherwise);
try out_stream.writeByte('?');
ty = payload.child_type;
continue;
},
.optional_single_const_pointer => {
const payload = @fieldParentPtr(Payload.OptionalSingleConstPointer, "base", ty.ptr_otherwise);
try out_stream.writeAll("?*const ");
ty = payload.pointee_type;
continue;
},
.optional_single_mut_pointer => {
const payload = @fieldParentPtr(Payload.OptionalSingleMutPointer, "base", ty.ptr_otherwise);
try out_stream.writeAll("?*");
ty = payload.pointee_type;
continue;
},
}
unreachable;
}
@ -545,12 +567,16 @@ pub const Type = extern union {
.single_const_pointer_to_comptime_int,
.const_slice_u8,
.array_u8_sentinel_0,
.array, // TODO check for zero bits
.single_const_pointer,
.single_mut_pointer,
.int_signed, // TODO check for zero bits
.int_unsigned, // TODO check for zero bits
.optional,
.optional_single_mut_pointer,
.optional_single_const_pointer,
=> true,
// TODO lazy types
.array => self.elemType().hasCodeGenBits() and self.arrayLen() != 0,
.single_const_pointer => self.elemType().hasCodeGenBits(),
.single_mut_pointer => self.elemType().hasCodeGenBits(),
.int_signed => self.cast(Payload.IntSigned).?.bits == 0,
.int_unsigned => self.cast(Payload.IntUnsigned).?.bits == 0,
.c_void,
.void,
@ -597,6 +623,8 @@ pub const Type = extern union {
.const_slice_u8,
.single_const_pointer,
.single_mut_pointer,
.optional_single_const_pointer,
.optional_single_mut_pointer,
=> return @divExact(target.cpu.arch.ptrBitWidth(), 8),
.c_short => return @divExact(CType.short.sizeInBits(target), 8),
@ -629,6 +657,16 @@ pub const Type = extern union {
return std.math.ceilPowerOfTwoPromote(u16, (bits + 7) / 8);
},
.optional => {
const child_type = self.cast(Payload.Optional).?.child_type;
if (!child_type.hasCodeGenBits()) return 1;
if (child_type.zigTypeTag() == .Pointer and !child_type.isCPtr())
return @divExact(target.cpu.arch.ptrBitWidth(), 8);
return child_type.abiAlignment(target);
},
.c_void,
.void,
.type,
@ -679,6 +717,8 @@ pub const Type = extern union {
.const_slice_u8,
.single_const_pointer,
.single_mut_pointer,
.optional_single_const_pointer,
.optional_single_mut_pointer,
=> return @divExact(target.cpu.arch.ptrBitWidth(), 8),
.c_short => return @divExact(CType.short.sizeInBits(target), 8),
@ -708,6 +748,20 @@ pub const Type = extern union {
return std.math.ceilPowerOfTwoPromote(u16, (bits + 7) / 8);
},
.optional => {
const child_type = self.cast(Payload.Optional).?.child_type;
if (!child_type.hasCodeGenBits()) return 1;
if (child_type.zigTypeTag() == .Pointer and !child_type.isCPtr())
return @divExact(target.cpu.arch.ptrBitWidth(), 8);
// Optional types are represented as a struct with the child type as the first
// field and a boolean as the second. Since the child type's abi alignment is
// guaranteed to be >= that of bool's (1 byte) the added size is exactly equal
// to the child type's ABI alignment.
return child_type.abiAlignment(target) + child_type.abiSize(target);
},
};
}
@ -756,6 +810,9 @@ pub const Type = extern union {
.function,
.int_unsigned,
.int_signed,
.optional,
.optional_single_mut_pointer,
.optional_single_const_pointer,
=> false,
.single_const_pointer,
@ -812,6 +869,9 @@ pub const Type = extern union {
.function,
.int_unsigned,
.int_signed,
.optional,
.optional_single_mut_pointer,
.optional_single_const_pointer,
=> false,
.const_slice_u8 => true,
@ -863,6 +923,9 @@ pub const Type = extern union {
.int_unsigned,
.int_signed,
.single_mut_pointer,
.optional,
.optional_single_mut_pointer,
.optional_single_const_pointer,
=> false,
.single_const_pointer,
@ -920,11 +983,14 @@ pub const Type = extern union {
.single_const_pointer,
.single_const_pointer_to_comptime_int,
.const_slice_u8,
.optional,
.optional_single_mut_pointer,
.optional_single_const_pointer,
=> false,
};
}
/// Asserts the type is a pointer or array type.
/// Asserts the type is a pointer, optional or array type.
pub fn elemType(self: Type) Type {
return switch (self.tag()) {
.u8,
@ -974,6 +1040,9 @@ pub const Type = extern union {
.single_mut_pointer => self.cast(Payload.SingleMutPointer).?.pointee_type,
.array_u8_sentinel_0, .const_slice_u8 => Type.initTag(.u8),
.single_const_pointer_to_comptime_int => Type.initTag(.comptime_int),
.optional => self.cast(Payload.Optional).?.child_type,
.optional_single_mut_pointer => self.cast(Payload.OptionalSingleMutPointer).?.pointee_type,
.optional_single_const_pointer => self.cast(Payload.OptionalSingleConstPointer).?.pointee_type,
};
}
@ -1024,6 +1093,9 @@ pub const Type = extern union {
.const_slice_u8,
.int_unsigned,
.int_signed,
.optional,
.optional_single_mut_pointer,
.optional_single_const_pointer,
=> unreachable,
.array => self.cast(Payload.Array).?.len,
@ -1078,6 +1150,9 @@ pub const Type = extern union {
.const_slice_u8,
.int_unsigned,
.int_signed,
.optional,
.optional_single_mut_pointer,
.optional_single_const_pointer,
=> unreachable,
.array => return null,
@ -1129,6 +1204,9 @@ pub const Type = extern union {
.u16,
.u32,
.u64,
.optional,
.optional_single_mut_pointer,
.optional_single_const_pointer,
=> false,
.int_signed,
@ -1184,6 +1262,9 @@ pub const Type = extern union {
.i16,
.i32,
.i64,
.optional,
.optional_single_mut_pointer,
.optional_single_const_pointer,
=> false,
.int_unsigned,
@ -1229,6 +1310,9 @@ pub const Type = extern union {
.single_const_pointer_to_comptime_int,
.array_u8_sentinel_0,
.const_slice_u8,
.optional,
.optional_single_mut_pointer,
.optional_single_const_pointer,
=> unreachable,
.int_unsigned => .{ .signed = false, .bits = self.cast(Payload.IntUnsigned).?.bits },
@ -1292,6 +1376,9 @@ pub const Type = extern union {
.i32,
.u64,
.i64,
.optional,
.optional_single_mut_pointer,
.optional_single_const_pointer,
=> false,
.usize,
@ -1384,6 +1471,9 @@ pub const Type = extern union {
.c_ulonglong,
.int_unsigned,
.int_signed,
.optional,
.optional_single_mut_pointer,
.optional_single_const_pointer,
=> unreachable,
};
}
@ -1442,6 +1532,9 @@ pub const Type = extern union {
.c_ulonglong,
.int_unsigned,
.int_signed,
.optional,
.optional_single_mut_pointer,
.optional_single_const_pointer,
=> unreachable,
}
}
@ -1499,6 +1592,9 @@ pub const Type = extern union {
.c_ulonglong,
.int_unsigned,
.int_signed,
.optional,
.optional_single_mut_pointer,
.optional_single_const_pointer,
=> unreachable,
}
}
@ -1556,6 +1652,9 @@ pub const Type = extern union {
.c_ulonglong,
.int_unsigned,
.int_signed,
.optional,
.optional_single_mut_pointer,
.optional_single_const_pointer,
=> unreachable,
};
}
@ -1610,6 +1709,9 @@ pub const Type = extern union {
.c_ulonglong,
.int_unsigned,
.int_signed,
.optional,
.optional_single_mut_pointer,
.optional_single_const_pointer,
=> unreachable,
};
}
@ -1664,6 +1766,9 @@ pub const Type = extern union {
.c_ulonglong,
.int_unsigned,
.int_signed,
.optional,
.optional_single_mut_pointer,
.optional_single_const_pointer,
=> unreachable,
};
}
@ -1718,6 +1823,9 @@ pub const Type = extern union {
.single_const_pointer_to_comptime_int,
.array_u8_sentinel_0,
.const_slice_u8,
.optional,
.optional_single_mut_pointer,
.optional_single_const_pointer,
=> false,
};
}
@ -1762,6 +1870,9 @@ pub const Type = extern union {
.array_u8_sentinel_0,
.const_slice_u8,
.c_void,
.optional,
.optional_single_mut_pointer,
.optional_single_const_pointer,
=> return null,
.void => return Value.initTag(.void_value),
@ -1851,6 +1962,9 @@ pub const Type = extern union {
.array,
.single_const_pointer,
.single_mut_pointer,
.optional,
.optional_single_mut_pointer,
.optional_single_const_pointer,
=> return false,
};
}
@ -1911,6 +2025,9 @@ pub const Type = extern union {
int_signed,
int_unsigned,
function,
optional,
optional_single_mut_pointer,
optional_single_const_pointer,
pub const last_no_payload_tag = Tag.const_slice_u8;
pub const no_payload_count = @enumToInt(last_no_payload_tag) + 1;
@ -1963,6 +2080,24 @@ pub const Type = extern union {
return_type: Type,
cc: std.builtin.CallingConvention,
};
pub const Optional = struct {
base: Payload = Payload{ .tag = .optional },
child_type: Type,
};
pub const OptionalSingleConstPointer = struct {
base: Payload = Payload{ .tag = .optional_single_const_pointer },
pointee_type: Type,
};
pub const OptionalSingleMutPointer = struct {
base: Payload = Payload{ .tag = .optional_single_mut_pointer },
pointee_type: Type,
};
};
};

View File

@ -212,6 +212,12 @@ pub const Inst = struct {
@"unreachable",
/// Bitwise XOR. `^`
xor,
/// Create an optional type '?T'
optional_type,
/// Unwraps an optional value 'lhs.?'
unwrap_optional_safe,
/// Same as previous, but without safety checks. Used for orelse, if and while
unwrap_optional_unsafe,
pub fn Type(tag: Tag) type {
return switch (tag) {
@ -240,6 +246,9 @@ pub const Inst = struct {
.typeof,
.single_const_ptr_type,
.single_mut_ptr_type,
.optional_type,
.unwrap_optional_safe,
.unwrap_optional_unsafe,
=> UnOp,
.add,
@ -372,6 +381,9 @@ pub const Inst = struct {
.subwrap,
.typeof,
.xor,
.optional_type,
.unwrap_optional_safe,
.unwrap_optional_unsafe,
=> false,
.@"break",
@ -1915,6 +1927,7 @@ const EmitZIR = struct {
.isnonnull => try self.emitUnOp(inst.src, new_body, inst.castTag(.isnonnull).?, .isnonnull),
.load => try self.emitUnOp(inst.src, new_body, inst.castTag(.load).?, .deref),
.ref => try self.emitUnOp(inst.src, new_body, inst.castTag(.ref).?, .ref),
.unwrap_optional => try self.emitUnOp(inst.src, new_body, inst.castTag(.unwrap_optional).?, .unwrap_optional_unsafe),
.add => try self.emitBinOp(inst.src, new_body, inst.castTag(.add).?, .add),
.sub => try self.emitBinOp(inst.src, new_body, inst.castTag(.sub).?, .sub),
@ -2242,6 +2255,20 @@ const EmitZIR = struct {
std.debug.panic("TODO implement emitType for {}", .{ty});
}
},
.Optional => {
const inst = try self.arena.allocator.create(Inst.UnOp);
inst.* = .{
.base = .{
.src = src,
.tag = .optional_type,
},
.positionals = .{
.operand = (try self.emitType(src, ty.elemType())).inst,
},
.kw_args = .{},
};
return self.emitUnnamedDecl(&inst.base);
},
else => std.debug.panic("TODO implement emitType for {}", .{ty}),
},
}

View File

@ -68,8 +68,8 @@ pub fn analyzeInst(mod: *Module, scope: *Scope, old_inst: *zir.Inst) InnerError!
.deref => return analyzeInstDeref(mod, scope, old_inst.castTag(.deref).?),
.as => return analyzeInstAs(mod, scope, old_inst.castTag(.as).?),
.@"asm" => return analyzeInstAsm(mod, scope, old_inst.castTag(.@"asm").?),
.@"unreachable" => return analyzeInstUnreachable(mod, scope, old_inst.castTag(.@"unreachable").?),
.unreach_nocheck => return analyzeInstUnreachNoChk(mod, scope, old_inst.castTag(.unreach_nocheck).?),
.@"unreachable" => return analyzeInstUnreachable(mod, scope, old_inst.castTag(.@"unreachable").?, true),
.unreach_nocheck => return analyzeInstUnreachable(mod, scope, old_inst.castTag(.unreach_nocheck).?, false),
.@"return" => return analyzeInstRet(mod, scope, old_inst.castTag(.@"return").?),
.returnvoid => return analyzeInstRetVoid(mod, scope, old_inst.castTag(.returnvoid).?),
.@"fn" => return analyzeInstFn(mod, scope, old_inst.castTag(.@"fn").?),
@ -106,6 +106,9 @@ pub fn analyzeInst(mod: *Module, scope: *Scope, old_inst: *zir.Inst) InnerError!
.isnonnull => return analyzeInstIsNonNull(mod, scope, old_inst.castTag(.isnonnull).?, false),
.boolnot => return analyzeInstBoolNot(mod, scope, old_inst.castTag(.boolnot).?),
.typeof => return analyzeInstTypeOf(mod, scope, old_inst.castTag(.typeof).?),
.optional_type => return analyzeInstOptionalType(mod, scope, old_inst.castTag(.optional_type).?),
.unwrap_optional_safe => return analyzeInstUnwrapOptional(mod, scope, old_inst.castTag(.unwrap_optional_safe).?, true),
.unwrap_optional_unsafe => return analyzeInstUnwrapOptional(mod, scope, old_inst.castTag(.unwrap_optional_unsafe).?, false),
}
}
@ -305,8 +308,19 @@ fn analyzeInstRetPtr(mod: *Module, scope: *Scope, inst: *zir.Inst.NoOp) InnerErr
fn analyzeInstRef(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
const operand = try resolveInst(mod, scope, inst.positionals.operand);
const b = try mod.requireRuntimeBlock(scope, inst.base.src);
const ptr_type = try mod.singleConstPtrType(scope, inst.base.src, operand.ty);
if (operand.value()) |val| {
const ref_payload = try scope.arena().create(Value.Payload.RefVal);
ref_payload.* = .{ .val = val };
return mod.constInst(scope, inst.base.src, .{
.ty = ptr_type,
.val = Value.initPayload(&ref_payload.base),
});
}
const b = try mod.requireRuntimeBlock(scope, inst.base.src);
return mod.addUnOp(b, inst.base.src, ptr_type, .ref, operand);
}
@ -620,6 +634,66 @@ fn analyzeInstIntType(mod: *Module, scope: *Scope, inttype: *zir.Inst.IntType) I
return mod.fail(scope, inttype.base.src, "TODO implement inttype", .{});
}
fn analyzeInstOptionalType(mod: *Module, scope: *Scope, optional: *zir.Inst.UnOp) InnerError!*Inst {
const child_type = try resolveType(mod, scope, optional.positionals.operand);
return mod.constType(scope, optional.base.src, Type.initPayload(switch (child_type.tag()) {
.single_const_pointer => blk: {
const payload = try scope.arena().create(Type.Payload.OptionalSingleConstPointer);
payload.* = .{
.pointee_type = child_type.elemType(),
};
break :blk &payload.base;
},
.single_mut_pointer => blk: {
const payload = try scope.arena().create(Type.Payload.OptionalSingleMutPointer);
payload.* = .{
.pointee_type = child_type.elemType(),
};
break :blk &payload.base;
},
else => blk: {
const payload = try scope.arena().create(Type.Payload.Optional);
payload.* = .{
.child_type = child_type,
};
break :blk &payload.base;
},
}));
}
fn analyzeInstUnwrapOptional(mod: *Module, scope: *Scope, unwrap: *zir.Inst.UnOp, safety_check: bool) InnerError!*Inst {
const operand = try resolveInst(mod, scope, unwrap.positionals.operand);
assert(operand.ty.zigTypeTag() == .Pointer);
if (operand.ty.elemType().zigTypeTag() != .Optional) {
return mod.fail(scope, unwrap.base.src, "expected optional type, found {}", .{operand.ty.elemType()});
}
const child_type = operand.ty.elemType().elemType();
const child_pointer = if (operand.ty.isConstPtr())
try mod.singleConstPtrType(scope, unwrap.base.src, child_type)
else
try mod.singleMutPtrType(scope, unwrap.base.src, child_type);
if (operand.value()) |val| {
if (val.isNull()) {
return mod.fail(scope, unwrap.base.src, "unable to unwrap null", .{});
}
return mod.constInst(scope, unwrap.base.src, .{
.ty = child_pointer,
.val = val,
});
}
const b = try mod.requireRuntimeBlock(scope, unwrap.base.src);
if (safety_check and mod.wantSafety(scope)) {
const is_non_null = try mod.addUnOp(b, unwrap.base.src, Type.initTag(.bool), .isnonnull, operand);
try mod.addSafetyCheck(b, is_non_null, .unwrap_null);
}
return mod.addUnOp(b, unwrap.base.src, child_pointer, .unwrap_optional, operand);
}
fn analyzeInstFnType(mod: *Module, scope: *Scope, fntype: *zir.Inst.FnType) InnerError!*Inst {
const return_type = try resolveType(mod, scope, fntype.positionals.return_type);
@ -1094,18 +1168,19 @@ fn analyzeInstCondBr(mod: *Module, scope: *Scope, inst: *zir.Inst.CondBr) InnerE
return mod.addCondBr(parent_block, inst.base.src, cond, then_body, else_body);
}
fn analyzeInstUnreachNoChk(mod: *Module, scope: *Scope, unreach: *zir.Inst.NoOp) InnerError!*Inst {
return mod.analyzeUnreach(scope, unreach.base.src);
}
fn analyzeInstUnreachable(mod: *Module, scope: *Scope, unreach: *zir.Inst.NoOp) InnerError!*Inst {
fn analyzeInstUnreachable(
mod: *Module,
scope: *Scope,
unreach: *zir.Inst.NoOp,
safety_check: bool,
) InnerError!*Inst {
const b = try mod.requireRuntimeBlock(scope, unreach.base.src);
// TODO Add compile error for @optimizeFor occurring too late in a scope.
if (mod.wantSafety(scope)) {
// TODO Once we have a panic function to call, call it here instead of this.
_ = try mod.addNoOp(b, unreach.base.src, Type.initTag(.void), .breakpoint);
if (safety_check and mod.wantSafety(scope)) {
return mod.safetyPanic(b, unreach.base.src, .unreach);
} else {
return mod.addNoOp(b, unreach.base.src, Type.initTag(.noreturn), .unreach);
}
return mod.analyzeUnreach(scope, unreach.base.src);
}
fn analyzeInstRet(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {

View File

@ -441,5 +441,29 @@ pub fn addCases(ctx: *TestContext) !void {
,
"",
);
// Optionals
case.addCompareOutput(
\\export fn _start() noreturn {
\\ const a: u32 = 2;
\\ const b: ?u32 = a;
\\ const c = b.?;
\\ if (c != 2) unreachable;
\\
\\ exit();
\\}
\\
\\fn exit() noreturn {
\\ asm volatile ("syscall"
\\ :
\\ : [number] "{rax}" (231),
\\ [arg1] "{rdi}" (0)
\\ : "rcx", "r11", "memory"
\\ );
\\ unreachable;
\\}
,
"",
);
}
}