mirror of
https://github.com/ziglang/zig.git
synced 2026-02-13 12:59:04 +00:00
commit
d8fb377e2a
@ -2207,8 +2207,11 @@ pub fn analyzeDeclRef(self: *Module, scope: *Scope, src: usize, decl: *Decl) Inn
|
||||
};
|
||||
|
||||
const decl_tv = try decl.typedValue();
|
||||
const ty_payload = try scope.arena().create(Type.Payload.SingleConstPointer);
|
||||
ty_payload.* = .{ .pointee_type = decl_tv.ty };
|
||||
const ty_payload = try scope.arena().create(Type.Payload.Pointer);
|
||||
ty_payload.* = .{
|
||||
.base = .{ .tag = .single_const_pointer },
|
||||
.pointee_type = decl_tv.ty,
|
||||
};
|
||||
const val_payload = try scope.arena().create(Value.Payload.DeclRef);
|
||||
val_payload.* = .{ .decl = decl };
|
||||
|
||||
@ -2432,6 +2435,15 @@ pub fn cmpNumeric(
|
||||
return self.addBinOp(b, src, Type.initTag(.bool), Inst.Tag.fromCmpOp(op), casted_lhs, casted_rhs);
|
||||
}
|
||||
|
||||
fn wrapOptional(self: *Module, scope: *Scope, dest_type: Type, inst: *Inst) !*Inst {
|
||||
if (inst.value()) |val| {
|
||||
return self.constInst(scope, inst.src, .{ .ty = dest_type, .val = val });
|
||||
}
|
||||
|
||||
const b = try self.requireRuntimeBlock(scope, inst.src);
|
||||
return self.addUnOp(b, inst.src, dest_type, .wrap_optional, inst);
|
||||
}
|
||||
|
||||
fn makeIntType(self: *Module, scope: *Scope, signed: bool, bits: u16) !Type {
|
||||
if (signed) {
|
||||
const int_payload = try scope.arena().create(Type.Payload.IntSigned);
|
||||
@ -2509,14 +2521,12 @@ pub fn coerce(self: *Module, scope: *Scope, dest_type: Type, inst: *Inst) !*Inst
|
||||
|
||||
// T to ?T
|
||||
if (dest_type.zigTypeTag() == .Optional) {
|
||||
const child_type = dest_type.elemType();
|
||||
if (inst.value()) |val| {
|
||||
if (child_type.eql(inst.ty)) {
|
||||
return self.constInst(scope, inst.src, .{ .ty = dest_type, .val = val });
|
||||
}
|
||||
return self.fail(scope, inst.src, "TODO optional wrap {} to {}", .{ val, dest_type });
|
||||
} else if (child_type.eql(inst.ty)) {
|
||||
return self.fail(scope, inst.src, "TODO optional wrap {}", .{dest_type});
|
||||
var buf: Type.Payload.Pointer = undefined;
|
||||
const child_type = dest_type.optionalChild(&buf);
|
||||
if (child_type.eql(inst.ty)) {
|
||||
return self.wrapOptional(scope, dest_type, inst);
|
||||
} else if (try self.coerceNum(scope, child_type, inst)) |some| {
|
||||
return self.wrapOptional(scope, dest_type, some);
|
||||
}
|
||||
}
|
||||
|
||||
@ -2534,39 +2544,8 @@ pub fn coerce(self: *Module, scope: *Scope, dest_type: Type, inst: *Inst) !*Inst
|
||||
}
|
||||
|
||||
// comptime known number to other number
|
||||
if (inst.value()) |val| {
|
||||
const src_zig_tag = inst.ty.zigTypeTag();
|
||||
const dst_zig_tag = dest_type.zigTypeTag();
|
||||
|
||||
if (dst_zig_tag == .ComptimeInt or dst_zig_tag == .Int) {
|
||||
if (src_zig_tag == .Float or src_zig_tag == .ComptimeFloat) {
|
||||
if (val.floatHasFraction()) {
|
||||
return self.fail(scope, inst.src, "fractional component prevents float value {} from being casted to type '{}'", .{ val, inst.ty });
|
||||
}
|
||||
return self.fail(scope, inst.src, "TODO float to int", .{});
|
||||
} else if (src_zig_tag == .Int or src_zig_tag == .ComptimeInt) {
|
||||
if (!val.intFitsInType(dest_type, self.target())) {
|
||||
return self.fail(scope, inst.src, "type {} cannot represent integer value {}", .{ inst.ty, val });
|
||||
}
|
||||
return self.constInst(scope, inst.src, .{ .ty = dest_type, .val = val });
|
||||
}
|
||||
} else if (dst_zig_tag == .ComptimeFloat or dst_zig_tag == .Float) {
|
||||
if (src_zig_tag == .Float or src_zig_tag == .ComptimeFloat) {
|
||||
const res = val.floatCast(scope.arena(), dest_type, self.target()) catch |err| switch (err) {
|
||||
error.Overflow => return self.fail(
|
||||
scope,
|
||||
inst.src,
|
||||
"cast of value {} to type '{}' loses information",
|
||||
.{ val, dest_type },
|
||||
),
|
||||
error.OutOfMemory => return error.OutOfMemory,
|
||||
};
|
||||
return self.constInst(scope, inst.src, .{ .ty = dest_type, .val = res });
|
||||
} else if (src_zig_tag == .Int or src_zig_tag == .ComptimeInt) {
|
||||
return self.fail(scope, inst.src, "TODO int to float", .{});
|
||||
}
|
||||
}
|
||||
}
|
||||
if (try self.coerceNum(scope, dest_type, inst)) |some|
|
||||
return some;
|
||||
|
||||
// integer widening
|
||||
if (inst.ty.zigTypeTag() == .Int and dest_type.zigTypeTag() == .Int) {
|
||||
@ -2598,6 +2577,42 @@ pub fn coerce(self: *Module, scope: *Scope, dest_type: Type, inst: *Inst) !*Inst
|
||||
return self.fail(scope, inst.src, "expected {}, found {}", .{ dest_type, inst.ty });
|
||||
}
|
||||
|
||||
pub fn coerceNum(self: *Module, scope: *Scope, dest_type: Type, inst: *Inst) !?*Inst {
|
||||
const val = inst.value() orelse return null;
|
||||
const src_zig_tag = inst.ty.zigTypeTag();
|
||||
const dst_zig_tag = dest_type.zigTypeTag();
|
||||
|
||||
if (dst_zig_tag == .ComptimeInt or dst_zig_tag == .Int) {
|
||||
if (src_zig_tag == .Float or src_zig_tag == .ComptimeFloat) {
|
||||
if (val.floatHasFraction()) {
|
||||
return self.fail(scope, inst.src, "fractional component prevents float value {} from being casted to type '{}'", .{ val, inst.ty });
|
||||
}
|
||||
return self.fail(scope, inst.src, "TODO float to int", .{});
|
||||
} else if (src_zig_tag == .Int or src_zig_tag == .ComptimeInt) {
|
||||
if (!val.intFitsInType(dest_type, self.target())) {
|
||||
return self.fail(scope, inst.src, "type {} cannot represent integer value {}", .{ inst.ty, val });
|
||||
}
|
||||
return self.constInst(scope, inst.src, .{ .ty = dest_type, .val = val });
|
||||
}
|
||||
} else if (dst_zig_tag == .ComptimeFloat or dst_zig_tag == .Float) {
|
||||
if (src_zig_tag == .Float or src_zig_tag == .ComptimeFloat) {
|
||||
const res = val.floatCast(scope.arena(), dest_type, self.target()) catch |err| switch (err) {
|
||||
error.Overflow => return self.fail(
|
||||
scope,
|
||||
inst.src,
|
||||
"cast of value {} to type '{}' loses information",
|
||||
.{ val, dest_type },
|
||||
),
|
||||
error.OutOfMemory => return error.OutOfMemory,
|
||||
};
|
||||
return self.constInst(scope, inst.src, .{ .ty = dest_type, .val = res });
|
||||
} else if (src_zig_tag == .Int or src_zig_tag == .ComptimeInt) {
|
||||
return self.fail(scope, inst.src, "TODO int to float", .{});
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
pub fn storePtr(self: *Module, scope: *Scope, src: usize, ptr: *Inst, uncasted_value: *Inst) !*Inst {
|
||||
if (ptr.ty.isConstPtr())
|
||||
return self.fail(scope, src, "cannot assign to constant", .{});
|
||||
@ -2885,15 +2900,12 @@ pub fn floatSub(self: *Module, scope: *Scope, float_type: Type, src: usize, lhs:
|
||||
return Value.initPayload(val_payload);
|
||||
}
|
||||
|
||||
pub fn singleMutPtrType(self: *Module, scope: *Scope, src: usize, elem_ty: Type) error{OutOfMemory}!Type {
|
||||
const type_payload = try scope.arena().create(Type.Payload.SingleMutPointer);
|
||||
type_payload.* = .{ .pointee_type = elem_ty };
|
||||
return Type.initPayload(&type_payload.base);
|
||||
}
|
||||
|
||||
pub fn singleConstPtrType(self: *Module, scope: *Scope, src: usize, elem_ty: Type) error{OutOfMemory}!Type {
|
||||
const type_payload = try scope.arena().create(Type.Payload.SingleConstPointer);
|
||||
type_payload.* = .{ .pointee_type = elem_ty };
|
||||
pub fn singlePtrType(self: *Module, scope: *Scope, src: usize, mutable: bool, elem_ty: Type) error{OutOfMemory}!Type {
|
||||
const type_payload = try scope.arena().create(Type.Payload.Pointer);
|
||||
type_payload.* = .{
|
||||
.base = .{ .tag = if (mutable) .single_mut_pointer else .single_const_pointer },
|
||||
.pointee_type = elem_ty,
|
||||
};
|
||||
return Type.initPayload(&type_payload.base);
|
||||
}
|
||||
|
||||
|
||||
@ -113,6 +113,7 @@ pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node) InnerEr
|
||||
.Period => return rlWrap(mod, scope, rl, try field(mod, scope, node.castTag(.Period).?)),
|
||||
.Deref => return rlWrap(mod, scope, rl, try deref(mod, scope, node.castTag(.Deref).?)),
|
||||
.BoolNot => return rlWrap(mod, scope, rl, try boolNot(mod, scope, node.castTag(.BoolNot).?)),
|
||||
.AddressOf => return rlWrap(mod, scope, rl, try addressOf(mod, scope, node.castTag(.AddressOf).?)),
|
||||
.FloatLiteral => return rlWrap(mod, scope, rl, try floatLiteral(mod, scope, node.castTag(.FloatLiteral).?)),
|
||||
.UndefinedLiteral => return rlWrap(mod, scope, rl, try undefLiteral(mod, scope, node.castTag(.UndefinedLiteral).?)),
|
||||
.BoolLiteral => return rlWrap(mod, scope, rl, try boolLiteral(mod, scope, node.castTag(.BoolLiteral).?)),
|
||||
@ -122,6 +123,7 @@ pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node) InnerEr
|
||||
.Block => return rlWrapVoid(mod, scope, rl, node, try blockExpr(mod, scope, node.castTag(.Block).?)),
|
||||
.LabeledBlock => return labeledBlockExpr(mod, scope, rl, node.castTag(.LabeledBlock).?),
|
||||
.Break => return rlWrap(mod, scope, rl, try breakExpr(mod, scope, node.castTag(.Break).?)),
|
||||
.PtrType => return rlWrap(mod, scope, rl, try ptrType(mod, scope, node.castTag(.PtrType).?)),
|
||||
|
||||
.Defer => return mod.failNode(scope, node, "TODO implement astgen.expr for .Defer", .{}),
|
||||
.Catch => return mod.failNode(scope, node, "TODO implement astgen.expr for .Catch", .{}),
|
||||
@ -131,7 +133,6 @@ pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node) InnerEr
|
||||
.MergeErrorSets => return mod.failNode(scope, node, "TODO implement astgen.expr for .MergeErrorSets", .{}),
|
||||
.Range => return mod.failNode(scope, node, "TODO implement astgen.expr for .Range", .{}),
|
||||
.OrElse => return mod.failNode(scope, node, "TODO implement astgen.expr for .OrElse", .{}),
|
||||
.AddressOf => return mod.failNode(scope, node, "TODO implement astgen.expr for .AddressOf", .{}),
|
||||
.Await => return mod.failNode(scope, node, "TODO implement astgen.expr for .Await", .{}),
|
||||
.BitNot => return mod.failNode(scope, node, "TODO implement astgen.expr for .BitNot", .{}),
|
||||
.Negation => return mod.failNode(scope, node, "TODO implement astgen.expr for .Negation", .{}),
|
||||
@ -140,7 +141,6 @@ pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node) InnerEr
|
||||
.Try => return mod.failNode(scope, node, "TODO implement astgen.expr for .Try", .{}),
|
||||
.ArrayType => return mod.failNode(scope, node, "TODO implement astgen.expr for .ArrayType", .{}),
|
||||
.ArrayTypeSentinel => return mod.failNode(scope, node, "TODO implement astgen.expr for .ArrayTypeSentinel", .{}),
|
||||
.PtrType => return mod.failNode(scope, node, "TODO implement astgen.expr for .PtrType", .{}),
|
||||
.SliceType => return mod.failNode(scope, node, "TODO implement astgen.expr for .SliceType", .{}),
|
||||
.Slice => return mod.failNode(scope, node, "TODO implement astgen.expr for .Slice", .{}),
|
||||
.ArrayAccess => return mod.failNode(scope, node, "TODO implement astgen.expr for .ArrayAccess", .{}),
|
||||
@ -425,6 +425,10 @@ fn boolNot(mod: *Module, scope: *Scope, node: *ast.Node.SimplePrefixOp) InnerErr
|
||||
return addZIRUnOp(mod, scope, src, .boolnot, operand);
|
||||
}
|
||||
|
||||
fn addressOf(mod: *Module, scope: *Scope, node: *ast.Node.SimplePrefixOp) InnerError!*zir.Inst {
|
||||
return expr(mod, scope, .lvalue, node.rhs);
|
||||
}
|
||||
|
||||
fn optionalType(mod: *Module, scope: *Scope, node: *ast.Node.SimplePrefixOp) InnerError!*zir.Inst {
|
||||
const tree = scope.tree();
|
||||
const src = tree.token_locs[node.op_token].start;
|
||||
@ -436,6 +440,50 @@ fn optionalType(mod: *Module, scope: *Scope, node: *ast.Node.SimplePrefixOp) Inn
|
||||
return addZIRUnOp(mod, scope, src, .optional_type, operand);
|
||||
}
|
||||
|
||||
fn ptrType(mod: *Module, scope: *Scope, node: *ast.Node.PtrType) InnerError!*zir.Inst {
|
||||
const tree = scope.tree();
|
||||
const src = tree.token_locs[node.op_token].start;
|
||||
const meta_type = try addZIRInstConst(mod, scope, src, .{
|
||||
.ty = Type.initTag(.type),
|
||||
.val = Value.initTag(.type_type),
|
||||
});
|
||||
|
||||
const simple = node.ptr_info.allowzero_token == null and
|
||||
node.ptr_info.align_info == null and
|
||||
node.ptr_info.volatile_token == null and
|
||||
node.ptr_info.sentinel == null;
|
||||
|
||||
if (simple) {
|
||||
const child_type = try expr(mod, scope, .{ .ty = meta_type }, node.rhs);
|
||||
return addZIRUnOp(mod, scope, src, if (node.ptr_info.const_token == null)
|
||||
.single_mut_ptr_type
|
||||
else
|
||||
.single_const_ptr_type, child_type);
|
||||
}
|
||||
|
||||
var kw_args: std.meta.fieldInfo(zir.Inst.PtrType, "kw_args").field_type = .{};
|
||||
kw_args.@"allowzero" = node.ptr_info.allowzero_token != null;
|
||||
if (node.ptr_info.align_info) |some| {
|
||||
kw_args.@"align" = try expr(mod, scope, .none, some.node);
|
||||
if (some.bit_range) |bit_range| {
|
||||
kw_args.align_bit_start = try expr(mod, scope, .none, bit_range.start);
|
||||
kw_args.align_bit_end = try expr(mod, scope, .none, bit_range.end);
|
||||
}
|
||||
}
|
||||
kw_args.@"const" = node.ptr_info.const_token != null;
|
||||
kw_args.@"volatile" = node.ptr_info.volatile_token != null;
|
||||
if (node.ptr_info.sentinel) |some| {
|
||||
kw_args.sentinel = try expr(mod, scope, .none, some);
|
||||
}
|
||||
|
||||
const child_type = try expr(mod, scope, .{ .ty = meta_type }, node.rhs);
|
||||
if (kw_args.sentinel) |some| {
|
||||
kw_args.sentinel = try addZIRBinOp(mod, scope, some.src, .as, child_type, some);
|
||||
}
|
||||
|
||||
return addZIRInst(mod, scope, src, zir.Inst.PtrType, .{ .child_type = child_type }, kw_args);
|
||||
}
|
||||
|
||||
fn unwrapOptional(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.SimpleSuffixOp) InnerError!*zir.Inst {
|
||||
const tree = scope.tree();
|
||||
const src = tree.token_locs[node.rtoken].start;
|
||||
@ -520,13 +568,77 @@ fn simpleBinOp(
|
||||
return rlWrap(mod, scope, rl, result);
|
||||
}
|
||||
|
||||
fn ifExpr(mod: *Module, scope: *Scope, rl: ResultLoc, if_node: *ast.Node.If) InnerError!*zir.Inst {
|
||||
if (if_node.payload) |payload| {
|
||||
return mod.failNode(scope, payload, "TODO implement astgen.IfExpr for optionals", .{});
|
||||
const CondKind = union(enum) {
|
||||
bool,
|
||||
optional: ?*zir.Inst,
|
||||
err_union: ?*zir.Inst,
|
||||
|
||||
fn cond(self: *CondKind, mod: *Module, block_scope: *Scope.GenZIR, src: usize, cond_node: *ast.Node) !*zir.Inst {
|
||||
switch (self.*) {
|
||||
.bool => {
|
||||
const bool_type = try addZIRInstConst(mod, &block_scope.base, src, .{
|
||||
.ty = Type.initTag(.type),
|
||||
.val = Value.initTag(.bool_type),
|
||||
});
|
||||
return try expr(mod, &block_scope.base, .{ .ty = bool_type }, cond_node);
|
||||
},
|
||||
.optional => {
|
||||
const cond_ptr = try expr(mod, &block_scope.base, .lvalue, cond_node);
|
||||
self.* = .{ .optional = cond_ptr };
|
||||
const result = try addZIRUnOp(mod, &block_scope.base, src, .deref, cond_ptr);
|
||||
return try addZIRUnOp(mod, &block_scope.base, src, .isnonnull, result);
|
||||
},
|
||||
.err_union => {
|
||||
const err_ptr = try expr(mod, &block_scope.base, .lvalue, cond_node);
|
||||
self.* = .{ .err_union = err_ptr };
|
||||
const result = try addZIRUnOp(mod, &block_scope.base, src, .deref, err_ptr);
|
||||
return try addZIRUnOp(mod, &block_scope.base, src, .iserr, result);
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn thenSubScope(self: CondKind, mod: *Module, then_scope: *Scope.GenZIR, src: usize, payload_node: ?*ast.Node) !*Scope {
|
||||
if (self == .bool) return &then_scope.base;
|
||||
|
||||
const payload = payload_node.?.castTag(.PointerPayload).?;
|
||||
const is_ptr = payload.ptr_token != null;
|
||||
const ident_node = payload.value_symbol.castTag(.Identifier).?;
|
||||
|
||||
// This intentionally does not support @"_" syntax.
|
||||
const ident_name = then_scope.base.tree().tokenSlice(ident_node.token);
|
||||
if (mem.eql(u8, ident_name, "_")) {
|
||||
if (is_ptr)
|
||||
return mod.failTok(&then_scope.base, payload.ptr_token.?, "pointer modifier invalid on discard", .{});
|
||||
return &then_scope.base;
|
||||
}
|
||||
|
||||
return mod.failNode(&then_scope.base, payload.value_symbol, "TODO implement payload symbols", .{});
|
||||
}
|
||||
|
||||
fn elseSubScope(self: CondKind, mod: *Module, else_scope: *Scope.GenZIR, src: usize, payload_node: ?*ast.Node) !*Scope {
|
||||
if (self != .err_union) return &else_scope.base;
|
||||
|
||||
const payload_ptr = try addZIRUnOp(mod, &else_scope.base, src, .unwrap_err_unsafe, self.err_union.?);
|
||||
|
||||
const payload = payload_node.?.castTag(.Payload).?;
|
||||
const ident_node = payload.error_symbol.castTag(.Identifier).?;
|
||||
|
||||
// This intentionally does not support @"_" syntax.
|
||||
const ident_name = else_scope.base.tree().tokenSlice(ident_node.token);
|
||||
if (mem.eql(u8, ident_name, "_")) {
|
||||
return &else_scope.base;
|
||||
}
|
||||
|
||||
return mod.failNode(&else_scope.base, payload.error_symbol, "TODO implement payload symbols", .{});
|
||||
}
|
||||
};
|
||||
|
||||
fn ifExpr(mod: *Module, scope: *Scope, rl: ResultLoc, if_node: *ast.Node.If) InnerError!*zir.Inst {
|
||||
var cond_kind: CondKind = .bool;
|
||||
if (if_node.payload) |_| cond_kind = .{ .optional = null };
|
||||
if (if_node.@"else") |else_node| {
|
||||
if (else_node.payload) |payload| {
|
||||
return mod.failNode(scope, payload, "TODO implement astgen.IfExpr for error unions", .{});
|
||||
cond_kind = .{ .err_union = null };
|
||||
}
|
||||
}
|
||||
var block_scope: Scope.GenZIR = .{
|
||||
@ -539,11 +651,7 @@ fn ifExpr(mod: *Module, scope: *Scope, rl: ResultLoc, if_node: *ast.Node.If) Inn
|
||||
|
||||
const tree = scope.tree();
|
||||
const if_src = tree.token_locs[if_node.if_token].start;
|
||||
const bool_type = try addZIRInstConst(mod, scope, if_src, .{
|
||||
.ty = Type.initTag(.type),
|
||||
.val = Value.initTag(.bool_type),
|
||||
});
|
||||
const cond = try expr(mod, &block_scope.base, .{ .ty = bool_type }, if_node.condition);
|
||||
const cond = try cond_kind.cond(mod, &block_scope, if_src, if_node.condition);
|
||||
|
||||
const condbr = try addZIRInstSpecial(mod, &block_scope.base, if_src, zir.Inst.CondBr, .{
|
||||
.condition = cond,
|
||||
@ -554,6 +662,8 @@ fn ifExpr(mod: *Module, scope: *Scope, rl: ResultLoc, if_node: *ast.Node.If) Inn
|
||||
const block = try addZIRInstBlock(mod, scope, if_src, .{
|
||||
.instructions = try block_scope.arena.dupe(*zir.Inst, block_scope.instructions.items),
|
||||
});
|
||||
|
||||
const then_src = tree.token_locs[if_node.body.lastToken()].start;
|
||||
var then_scope: Scope.GenZIR = .{
|
||||
.parent = scope,
|
||||
.decl = block_scope.decl,
|
||||
@ -562,6 +672,9 @@ fn ifExpr(mod: *Module, scope: *Scope, rl: ResultLoc, if_node: *ast.Node.If) Inn
|
||||
};
|
||||
defer then_scope.instructions.deinit(mod.gpa);
|
||||
|
||||
// declare payload to the then_scope
|
||||
const then_sub_scope = try cond_kind.thenSubScope(mod, &then_scope, then_src, if_node.payload);
|
||||
|
||||
// Most result location types can be forwarded directly; however
|
||||
// if we need to write to a pointer which has an inferred type,
|
||||
// proper type inference requires peer type resolution on the if's
|
||||
@ -571,10 +684,9 @@ fn ifExpr(mod: *Module, scope: *Scope, rl: ResultLoc, if_node: *ast.Node.If) Inn
|
||||
.inferred_ptr, .bitcasted_ptr, .block_ptr => .{ .block_ptr = block },
|
||||
};
|
||||
|
||||
const then_result = try expr(mod, &then_scope.base, branch_rl, if_node.body);
|
||||
const then_result = try expr(mod, then_sub_scope, branch_rl, if_node.body);
|
||||
if (!then_result.tag.isNoReturn()) {
|
||||
const then_src = tree.token_locs[if_node.body.lastToken()].start;
|
||||
_ = try addZIRInst(mod, &then_scope.base, then_src, zir.Inst.Break, .{
|
||||
_ = try addZIRInst(mod, then_sub_scope, then_src, zir.Inst.Break, .{
|
||||
.block = block,
|
||||
.operand = then_result,
|
||||
}, .{});
|
||||
@ -592,10 +704,13 @@ fn ifExpr(mod: *Module, scope: *Scope, rl: ResultLoc, if_node: *ast.Node.If) Inn
|
||||
defer else_scope.instructions.deinit(mod.gpa);
|
||||
|
||||
if (if_node.@"else") |else_node| {
|
||||
const else_result = try expr(mod, &else_scope.base, branch_rl, else_node.body);
|
||||
const else_src = tree.token_locs[else_node.body.lastToken()].start;
|
||||
// declare payload to the then_scope
|
||||
const else_sub_scope = try cond_kind.elseSubScope(mod, &else_scope, else_src, else_node.payload);
|
||||
|
||||
const else_result = try expr(mod, else_sub_scope, branch_rl, else_node.body);
|
||||
if (!else_result.tag.isNoReturn()) {
|
||||
const else_src = tree.token_locs[else_node.body.lastToken()].start;
|
||||
_ = try addZIRInst(mod, &else_scope.base, else_src, zir.Inst.Break, .{
|
||||
_ = try addZIRInst(mod, else_sub_scope, else_src, zir.Inst.Break, .{
|
||||
.block = block,
|
||||
.operand = else_result,
|
||||
}, .{});
|
||||
@ -616,12 +731,11 @@ fn ifExpr(mod: *Module, scope: *Scope, rl: ResultLoc, if_node: *ast.Node.If) Inn
|
||||
}
|
||||
|
||||
fn whileExpr(mod: *Module, scope: *Scope, rl: ResultLoc, while_node: *ast.Node.While) InnerError!*zir.Inst {
|
||||
if (while_node.payload) |payload| {
|
||||
return mod.failNode(scope, payload, "TODO implement astgen.whileExpr for optionals", .{});
|
||||
}
|
||||
var cond_kind: CondKind = .bool;
|
||||
if (while_node.payload) |_| cond_kind = .{ .optional = null };
|
||||
if (while_node.@"else") |else_node| {
|
||||
if (else_node.payload) |payload| {
|
||||
return mod.failNode(scope, payload, "TODO implement astgen.whileExpr for error unions", .{});
|
||||
cond_kind = .{ .err_union = null };
|
||||
}
|
||||
}
|
||||
|
||||
@ -651,15 +765,11 @@ fn whileExpr(mod: *Module, scope: *Scope, rl: ResultLoc, while_node: *ast.Node.W
|
||||
|
||||
const tree = scope.tree();
|
||||
const while_src = tree.token_locs[while_node.while_token].start;
|
||||
const bool_type = try addZIRInstConst(mod, scope, while_src, .{
|
||||
.ty = Type.initTag(.type),
|
||||
.val = Value.initTag(.bool_type),
|
||||
});
|
||||
const void_type = try addZIRInstConst(mod, scope, while_src, .{
|
||||
.ty = Type.initTag(.type),
|
||||
.val = Value.initTag(.void_type),
|
||||
});
|
||||
const cond = try expr(mod, &continue_scope.base, .{ .ty = bool_type }, while_node.condition);
|
||||
const cond = try cond_kind.cond(mod, &continue_scope, while_src, while_node.condition);
|
||||
|
||||
const condbr = try addZIRInstSpecial(mod, &continue_scope.base, while_src, zir.Inst.CondBr, .{
|
||||
.condition = cond,
|
||||
@ -682,6 +792,8 @@ fn whileExpr(mod: *Module, scope: *Scope, rl: ResultLoc, while_node: *ast.Node.W
|
||||
const while_block = try addZIRInstBlock(mod, scope, while_src, .{
|
||||
.instructions = try expr_scope.arena.dupe(*zir.Inst, expr_scope.instructions.items),
|
||||
});
|
||||
|
||||
const then_src = tree.token_locs[while_node.body.lastToken()].start;
|
||||
var then_scope: Scope.GenZIR = .{
|
||||
.parent = &continue_scope.base,
|
||||
.decl = continue_scope.decl,
|
||||
@ -690,6 +802,9 @@ fn whileExpr(mod: *Module, scope: *Scope, rl: ResultLoc, while_node: *ast.Node.W
|
||||
};
|
||||
defer then_scope.instructions.deinit(mod.gpa);
|
||||
|
||||
// declare payload to the then_scope
|
||||
const then_sub_scope = try cond_kind.thenSubScope(mod, &then_scope, then_src, while_node.payload);
|
||||
|
||||
// Most result location types can be forwarded directly; however
|
||||
// if we need to write to a pointer which has an inferred type,
|
||||
// proper type inference requires peer type resolution on the while's
|
||||
@ -699,10 +814,9 @@ fn whileExpr(mod: *Module, scope: *Scope, rl: ResultLoc, while_node: *ast.Node.W
|
||||
.inferred_ptr, .bitcasted_ptr, .block_ptr => .{ .block_ptr = while_block },
|
||||
};
|
||||
|
||||
const then_result = try expr(mod, &then_scope.base, branch_rl, while_node.body);
|
||||
const then_result = try expr(mod, then_sub_scope, branch_rl, while_node.body);
|
||||
if (!then_result.tag.isNoReturn()) {
|
||||
const then_src = tree.token_locs[while_node.body.lastToken()].start;
|
||||
_ = try addZIRInst(mod, &then_scope.base, then_src, zir.Inst.Break, .{
|
||||
_ = try addZIRInst(mod, then_sub_scope, then_src, zir.Inst.Break, .{
|
||||
.block = cond_block,
|
||||
.operand = then_result,
|
||||
}, .{});
|
||||
@ -720,10 +834,13 @@ fn whileExpr(mod: *Module, scope: *Scope, rl: ResultLoc, while_node: *ast.Node.W
|
||||
defer else_scope.instructions.deinit(mod.gpa);
|
||||
|
||||
if (while_node.@"else") |else_node| {
|
||||
const else_result = try expr(mod, &else_scope.base, branch_rl, else_node.body);
|
||||
const else_src = tree.token_locs[else_node.body.lastToken()].start;
|
||||
// declare payload to the then_scope
|
||||
const else_sub_scope = try cond_kind.elseSubScope(mod, &else_scope, else_src, else_node.payload);
|
||||
|
||||
const else_result = try expr(mod, else_sub_scope, branch_rl, else_node.body);
|
||||
if (!else_result.tag.isNoReturn()) {
|
||||
const else_src = tree.token_locs[else_node.body.lastToken()].start;
|
||||
_ = try addZIRInst(mod, &else_scope.base, else_src, zir.Inst.Break, .{
|
||||
_ = try addZIRInst(mod, else_sub_scope, else_src, zir.Inst.Break, .{
|
||||
.block = while_block,
|
||||
.operand = else_result,
|
||||
}, .{});
|
||||
@ -796,7 +913,7 @@ fn identifier(mod: *Module, scope: *Scope, rl: ResultLoc, ident: *ast.Node.OneTo
|
||||
const int_type_payload = try scope.arena().create(Value.Payload.IntType);
|
||||
int_type_payload.* = .{ .signed = is_signed, .bits = bit_count };
|
||||
const result = try addZIRInstConst(mod, scope, src, .{
|
||||
.ty = Type.initTag(.comptime_int),
|
||||
.ty = Type.initTag(.type),
|
||||
.val = Value.initPayload(&int_type_payload.base),
|
||||
});
|
||||
return rlWrap(mod, scope, rl, result);
|
||||
|
||||
@ -671,6 +671,7 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
|
||||
.intcast => return self.genIntCast(inst.castTag(.intcast).?),
|
||||
.isnonnull => return self.genIsNonNull(inst.castTag(.isnonnull).?),
|
||||
.isnull => return self.genIsNull(inst.castTag(.isnull).?),
|
||||
.iserr => return self.genIsErr(inst.castTag(.iserr).?),
|
||||
.load => return self.genLoad(inst.castTag(.load).?),
|
||||
.loop => return self.genLoop(inst.castTag(.loop).?),
|
||||
.not => return self.genNot(inst.castTag(.not).?),
|
||||
@ -682,6 +683,7 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
|
||||
.sub => return self.genSub(inst.castTag(.sub).?),
|
||||
.unreach => return MCValue{ .unreach = {} },
|
||||
.unwrap_optional => return self.genUnwrapOptional(inst.castTag(.unwrap_optional).?),
|
||||
.wrap_optional => return self.genWrapOptional(inst.castTag(.wrap_optional).?),
|
||||
}
|
||||
}
|
||||
|
||||
@ -840,6 +842,22 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
|
||||
}
|
||||
}
|
||||
|
||||
fn genWrapOptional(self: *Self, inst: *ir.Inst.UnOp) !MCValue {
|
||||
const optional_ty = inst.base.ty;
|
||||
|
||||
// No side effects, so if it's unreferenced, do nothing.
|
||||
if (inst.base.isUnused())
|
||||
return MCValue.dead;
|
||||
|
||||
// Optional type is just a boolean true
|
||||
if (optional_ty.abiSize(self.target.*) == 1)
|
||||
return MCValue{ .immediate = 1 };
|
||||
|
||||
switch (arch) {
|
||||
else => return self.fail(inst.base.src, "TODO implement wrap optional for {}", .{self.target.cpu.arch}),
|
||||
}
|
||||
}
|
||||
|
||||
fn genLoad(self: *Self, inst: *ir.Inst.UnOp) !MCValue {
|
||||
const elem_ty = inst.base.ty;
|
||||
if (!elem_ty.hasCodeGenBits())
|
||||
@ -1374,6 +1392,12 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
|
||||
}
|
||||
}
|
||||
|
||||
fn genIsErr(self: *Self, inst: *ir.Inst.UnOp) !MCValue {
|
||||
switch (arch) {
|
||||
else => return self.fail(inst.base.src, "TODO implement iserr for {}", .{self.target.cpu.arch}),
|
||||
}
|
||||
}
|
||||
|
||||
fn genLoop(self: *Self, inst: *ir.Inst.Loop) !MCValue {
|
||||
// A loop is a setup to be able to jump back to the beginning.
|
||||
const start_index = self.code.items.len;
|
||||
@ -2028,9 +2052,9 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
|
||||
return mcv;
|
||||
}
|
||||
|
||||
fn genTypedValue(self: *Self, src: usize, typed_value: TypedValue) !MCValue {
|
||||
fn genTypedValue(self: *Self, src: usize, typed_value: TypedValue) InnerError!MCValue {
|
||||
if (typed_value.val.isUndef())
|
||||
return MCValue.undef;
|
||||
return MCValue{ .undef = {} };
|
||||
const ptr_bits = self.target.cpu.arch.ptrBitWidth();
|
||||
const ptr_bytes: u64 = @divExact(ptr_bits, 8);
|
||||
switch (typed_value.ty.zigTypeTag()) {
|
||||
@ -2055,6 +2079,21 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
|
||||
},
|
||||
.ComptimeInt => unreachable, // semantic analysis prevents this
|
||||
.ComptimeFloat => unreachable, // semantic analysis prevents this
|
||||
.Optional => {
|
||||
if (typed_value.ty.isPtrLikeOptional()) {
|
||||
if (typed_value.val.isNull())
|
||||
return MCValue{ .immediate = 0 };
|
||||
|
||||
var buf: Type.Payload.Pointer = undefined;
|
||||
return self.genTypedValue(src, .{
|
||||
.ty = typed_value.ty.optionalChild(&buf),
|
||||
.val = typed_value.val,
|
||||
});
|
||||
} else if (typed_value.ty.abiSize(self.target.*) == 1) {
|
||||
return MCValue{ .immediate = @boolToInt(typed_value.val.isNull()) };
|
||||
}
|
||||
return self.fail(src, "TODO non pointer optionals", .{});
|
||||
},
|
||||
else => return self.fail(src, "TODO implement const of type '{}'", .{typed_value.ty}),
|
||||
}
|
||||
}
|
||||
@ -2160,7 +2199,7 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
|
||||
};
|
||||
}
|
||||
|
||||
fn fail(self: *Self, src: usize, comptime format: []const u8, args: anytype) error{ CodegenFail, OutOfMemory } {
|
||||
fn fail(self: *Self, src: usize, comptime format: []const u8, args: anytype) InnerError {
|
||||
@setCold(true);
|
||||
assert(self.err_msg == null);
|
||||
self.err_msg = try ErrorMsg.create(self.bin_file.base.allocator, src, format, args);
|
||||
|
||||
@ -68,6 +68,7 @@ pub const Inst = struct {
|
||||
dbg_stmt,
|
||||
isnonnull,
|
||||
isnull,
|
||||
iserr,
|
||||
/// Read a value from a pointer.
|
||||
load,
|
||||
loop,
|
||||
@ -83,6 +84,7 @@ pub const Inst = struct {
|
||||
floatcast,
|
||||
intcast,
|
||||
unwrap_optional,
|
||||
wrap_optional,
|
||||
|
||||
pub fn Type(tag: Tag) type {
|
||||
return switch (tag) {
|
||||
@ -99,11 +101,13 @@ pub const Inst = struct {
|
||||
.not,
|
||||
.isnonnull,
|
||||
.isnull,
|
||||
.iserr,
|
||||
.ptrtoint,
|
||||
.floatcast,
|
||||
.intcast,
|
||||
.load,
|
||||
.unwrap_optional,
|
||||
.wrap_optional,
|
||||
=> UnOp,
|
||||
|
||||
.add,
|
||||
|
||||
@ -107,6 +107,17 @@ pub const Type = extern union {
|
||||
return @fieldParentPtr(T, "base", self.ptr_otherwise);
|
||||
}
|
||||
|
||||
pub fn castPointer(self: Type) ?*Payload.Pointer {
|
||||
return switch (self.tag()) {
|
||||
.single_const_pointer,
|
||||
.single_mut_pointer,
|
||||
.optional_single_const_pointer,
|
||||
.optional_single_mut_pointer,
|
||||
=> @fieldParentPtr(Payload.Pointer, "base", self.ptr_otherwise),
|
||||
else => null,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn eql(a: Type, b: Type) bool {
|
||||
// As a shortcut, if the small tags / addresses match, we're done.
|
||||
if (a.tag_if_small_enough == b.tag_if_small_enough)
|
||||
@ -126,8 +137,8 @@ pub const Type = extern union {
|
||||
.Null => return true,
|
||||
.Pointer => {
|
||||
// Hot path for common case:
|
||||
if (a.cast(Payload.SingleConstPointer)) |a_payload| {
|
||||
if (b.cast(Payload.SingleConstPointer)) |b_payload| {
|
||||
if (a.castPointer()) |a_payload| {
|
||||
if (b.castPointer()) |b_payload| {
|
||||
return eql(a_payload.pointee_type, b_payload.pointee_type);
|
||||
}
|
||||
}
|
||||
@ -185,7 +196,9 @@ pub const Type = extern union {
|
||||
return true;
|
||||
},
|
||||
.Optional => {
|
||||
return a.elemType().eql(b.elemType());
|
||||
var buf_a: Payload.Pointer = undefined;
|
||||
var buf_b: Payload.Pointer = undefined;
|
||||
return a.optionalChild(&buf_a).eql(b.optionalChild(&buf_b));
|
||||
},
|
||||
.Float,
|
||||
.Struct,
|
||||
@ -249,7 +262,8 @@ pub const Type = extern union {
|
||||
}
|
||||
},
|
||||
.Optional => {
|
||||
std.hash.autoHash(&hasher, self.elemType().hash());
|
||||
var buf: Payload.Pointer = undefined;
|
||||
std.hash.autoHash(&hasher, self.optionalChild(&buf).hash());
|
||||
},
|
||||
.Float,
|
||||
.Struct,
|
||||
@ -326,8 +340,6 @@ pub const Type = extern union {
|
||||
};
|
||||
return Type{ .ptr_otherwise = &new_payload.base };
|
||||
},
|
||||
.single_const_pointer => return self.copyPayloadSingleField(allocator, Payload.SingleConstPointer, "pointee_type"),
|
||||
.single_mut_pointer => return self.copyPayloadSingleField(allocator, Payload.SingleMutPointer, "pointee_type"),
|
||||
.int_signed => return self.copyPayloadShallow(allocator, Payload.IntSigned),
|
||||
.int_unsigned => return self.copyPayloadShallow(allocator, Payload.IntUnsigned),
|
||||
.function => {
|
||||
@ -346,8 +358,11 @@ pub const Type = extern union {
|
||||
return Type{ .ptr_otherwise = &new_payload.base };
|
||||
},
|
||||
.optional => return self.copyPayloadSingleField(allocator, Payload.Optional, "child_type"),
|
||||
.optional_single_mut_pointer => return self.copyPayloadSingleField(allocator, Payload.OptionalSingleMutPointer, "pointee_type"),
|
||||
.optional_single_const_pointer => return self.copyPayloadSingleField(allocator, Payload.OptionalSingleConstPointer, "pointee_type"),
|
||||
.single_const_pointer,
|
||||
.single_mut_pointer,
|
||||
.optional_single_mut_pointer,
|
||||
.optional_single_const_pointer,
|
||||
=> return self.copyPayloadSingleField(allocator, Payload.Pointer, "pointee_type"),
|
||||
}
|
||||
}
|
||||
|
||||
@ -441,13 +456,13 @@ pub const Type = extern union {
|
||||
continue;
|
||||
},
|
||||
.single_const_pointer => {
|
||||
const payload = @fieldParentPtr(Payload.SingleConstPointer, "base", ty.ptr_otherwise);
|
||||
const payload = @fieldParentPtr(Payload.Pointer, "base", ty.ptr_otherwise);
|
||||
try out_stream.writeAll("*const ");
|
||||
ty = payload.pointee_type;
|
||||
continue;
|
||||
},
|
||||
.single_mut_pointer => {
|
||||
const payload = @fieldParentPtr(Payload.SingleMutPointer, "base", ty.ptr_otherwise);
|
||||
const payload = @fieldParentPtr(Payload.Pointer, "base", ty.ptr_otherwise);
|
||||
try out_stream.writeAll("*");
|
||||
ty = payload.pointee_type;
|
||||
continue;
|
||||
@ -467,13 +482,13 @@ pub const Type = extern union {
|
||||
continue;
|
||||
},
|
||||
.optional_single_const_pointer => {
|
||||
const payload = @fieldParentPtr(Payload.OptionalSingleConstPointer, "base", ty.ptr_otherwise);
|
||||
const payload = @fieldParentPtr(Payload.Pointer, "base", ty.ptr_otherwise);
|
||||
try out_stream.writeAll("?*const ");
|
||||
ty = payload.pointee_type;
|
||||
continue;
|
||||
},
|
||||
.optional_single_mut_pointer => {
|
||||
const payload = @fieldParentPtr(Payload.OptionalSingleMutPointer, "base", ty.ptr_otherwise);
|
||||
const payload = @fieldParentPtr(Payload.Pointer, "base", ty.ptr_otherwise);
|
||||
try out_stream.writeAll("?*");
|
||||
ty = payload.pointee_type;
|
||||
continue;
|
||||
@ -658,7 +673,8 @@ pub const Type = extern union {
|
||||
},
|
||||
|
||||
.optional => {
|
||||
const child_type = self.cast(Payload.Optional).?.child_type;
|
||||
var buf: Payload.Pointer = undefined;
|
||||
const child_type = self.optionalChild(&buf);
|
||||
if (!child_type.hasCodeGenBits()) return 1;
|
||||
|
||||
if (child_type.zigTypeTag() == .Pointer and !child_type.isCPtr())
|
||||
@ -750,7 +766,8 @@ pub const Type = extern union {
|
||||
},
|
||||
|
||||
.optional => {
|
||||
const child_type = self.cast(Payload.Optional).?.child_type;
|
||||
var buf: Payload.Pointer = undefined;
|
||||
const child_type = self.optionalChild(&buf);
|
||||
if (!child_type.hasCodeGenBits()) return 1;
|
||||
|
||||
if (child_type.zigTypeTag() == .Pointer and !child_type.isCPtr())
|
||||
@ -990,7 +1007,23 @@ pub const Type = extern union {
|
||||
};
|
||||
}
|
||||
|
||||
/// Asserts the type is a pointer, optional or array type.
|
||||
/// Asserts that the type is an optional
|
||||
pub fn isPtrLikeOptional(self: Type) bool {
|
||||
switch (self.tag()) {
|
||||
.optional_single_const_pointer, .optional_single_mut_pointer => return true,
|
||||
.optional => {
|
||||
var buf: Payload.Pointer = undefined;
|
||||
const child_type = self.optionalChild(&buf);
|
||||
// optionals of zero sized pointers behave like bools
|
||||
if (!child_type.hasCodeGenBits()) return false;
|
||||
|
||||
return child_type.zigTypeTag() == .Pointer and !child_type.isCPtr();
|
||||
},
|
||||
else => unreachable,
|
||||
}
|
||||
}
|
||||
|
||||
/// Asserts the type is a pointer or array type.
|
||||
pub fn elemType(self: Type) Type {
|
||||
return switch (self.tag()) {
|
||||
.u8,
|
||||
@ -1033,16 +1066,60 @@ pub const Type = extern union {
|
||||
.function,
|
||||
.int_unsigned,
|
||||
.int_signed,
|
||||
.optional,
|
||||
.optional_single_const_pointer,
|
||||
.optional_single_mut_pointer,
|
||||
=> unreachable,
|
||||
|
||||
.array => self.cast(Payload.Array).?.elem_type,
|
||||
.single_const_pointer => self.cast(Payload.SingleConstPointer).?.pointee_type,
|
||||
.single_mut_pointer => self.cast(Payload.SingleMutPointer).?.pointee_type,
|
||||
.single_const_pointer => self.castPointer().?.pointee_type,
|
||||
.single_mut_pointer => self.castPointer().?.pointee_type,
|
||||
.array_u8_sentinel_0, .const_slice_u8 => Type.initTag(.u8),
|
||||
.single_const_pointer_to_comptime_int => Type.initTag(.comptime_int),
|
||||
};
|
||||
}
|
||||
|
||||
/// Asserts that the type is an optional.
|
||||
pub fn optionalChild(self: Type, buf: *Payload.Pointer) Type {
|
||||
return switch (self.tag()) {
|
||||
.optional => self.cast(Payload.Optional).?.child_type,
|
||||
.optional_single_mut_pointer => self.cast(Payload.OptionalSingleMutPointer).?.pointee_type,
|
||||
.optional_single_const_pointer => self.cast(Payload.OptionalSingleConstPointer).?.pointee_type,
|
||||
.optional_single_mut_pointer => {
|
||||
buf.* = .{
|
||||
.base = .{ .tag = .single_mut_pointer },
|
||||
.pointee_type = self.castPointer().?.pointee_type,
|
||||
};
|
||||
return Type.initPayload(&buf.base);
|
||||
},
|
||||
.optional_single_const_pointer => {
|
||||
buf.* = .{
|
||||
.base = .{ .tag = .single_const_pointer },
|
||||
.pointee_type = self.castPointer().?.pointee_type,
|
||||
};
|
||||
return Type.initPayload(&buf.base);
|
||||
},
|
||||
else => unreachable,
|
||||
};
|
||||
}
|
||||
|
||||
/// Asserts that the type is an optional.
|
||||
/// Same as `optionalChild` but allocates the buffer if needed.
|
||||
pub fn optionalChildAlloc(self: Type, allocator: *Allocator) !Type {
|
||||
return switch (self.tag()) {
|
||||
.optional => self.cast(Payload.Optional).?.child_type,
|
||||
.optional_single_mut_pointer, .optional_single_const_pointer => {
|
||||
const payload = try allocator.create(Payload.Pointer);
|
||||
payload.* = .{
|
||||
.base = .{
|
||||
.tag = if (self.tag() == .optional_single_const_pointer)
|
||||
.single_const_pointer
|
||||
else
|
||||
.single_mut_pointer,
|
||||
},
|
||||
.pointee_type = self.castPointer().?.pointee_type,
|
||||
};
|
||||
return Type.initPayload(&payload.base);
|
||||
},
|
||||
else => unreachable,
|
||||
};
|
||||
}
|
||||
|
||||
@ -1901,13 +1978,8 @@ pub const Type = extern union {
|
||||
ty = array.elem_type;
|
||||
continue;
|
||||
},
|
||||
.single_const_pointer => {
|
||||
const ptr = ty.cast(Payload.SingleConstPointer).?;
|
||||
ty = ptr.pointee_type;
|
||||
continue;
|
||||
},
|
||||
.single_mut_pointer => {
|
||||
const ptr = ty.cast(Payload.SingleMutPointer).?;
|
||||
.single_const_pointer, .single_mut_pointer => {
|
||||
const ptr = ty.castPointer().?;
|
||||
ty = ptr.pointee_type;
|
||||
continue;
|
||||
},
|
||||
@ -2049,14 +2121,8 @@ pub const Type = extern union {
|
||||
len: u64,
|
||||
};
|
||||
|
||||
pub const SingleConstPointer = struct {
|
||||
base: Payload = Payload{ .tag = .single_const_pointer },
|
||||
|
||||
pointee_type: Type,
|
||||
};
|
||||
|
||||
pub const SingleMutPointer = struct {
|
||||
base: Payload = Payload{ .tag = .single_mut_pointer },
|
||||
pub const Pointer = struct {
|
||||
base: Payload,
|
||||
|
||||
pointee_type: Type,
|
||||
};
|
||||
@ -2086,18 +2152,6 @@ pub const Type = extern union {
|
||||
|
||||
child_type: Type,
|
||||
};
|
||||
|
||||
pub const OptionalSingleConstPointer = struct {
|
||||
base: Payload = Payload{ .tag = .optional_single_const_pointer },
|
||||
|
||||
pointee_type: Type,
|
||||
};
|
||||
|
||||
pub const OptionalSingleMutPointer = struct {
|
||||
base: Payload = Payload{ .tag = .optional_single_mut_pointer },
|
||||
|
||||
pointee_type: Type,
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
|
||||
@ -151,6 +151,8 @@ pub const Inst = struct {
|
||||
isnonnull,
|
||||
/// Return a boolean true if an optional is null. `x == null`
|
||||
isnull,
|
||||
/// Return a boolean true if value is an error
|
||||
iserr,
|
||||
/// A labeled block of code that loops forever. At the end of the body it is implied
|
||||
/// to repeat; no explicit "repeat" instruction terminates loop bodies.
|
||||
loop,
|
||||
@ -192,6 +194,8 @@ pub const Inst = struct {
|
||||
single_const_ptr_type,
|
||||
/// Create a mutable pointer type based on the element type. `*T`
|
||||
single_mut_ptr_type,
|
||||
/// Create a pointer type with attributes
|
||||
ptr_type,
|
||||
/// Write a value to a pointer. For loading, see `deref`.
|
||||
store,
|
||||
/// String Literal. Makes an anonymous Decl and then takes a pointer to it.
|
||||
@ -217,6 +221,10 @@ pub const Inst = struct {
|
||||
unwrap_optional_safe,
|
||||
/// Same as previous, but without safety checks. Used for orelse, if and while
|
||||
unwrap_optional_unsafe,
|
||||
/// Gets the payload of an error union
|
||||
unwrap_err_safe,
|
||||
/// Same as previous, but without safety checks. Used for orelse, if and while
|
||||
unwrap_err_unsafe,
|
||||
|
||||
pub fn Type(tag: Tag) type {
|
||||
return switch (tag) {
|
||||
@ -235,6 +243,7 @@ pub const Inst = struct {
|
||||
.@"return",
|
||||
.isnull,
|
||||
.isnonnull,
|
||||
.iserr,
|
||||
.ptrtoint,
|
||||
.alloc,
|
||||
.ensure_result_used,
|
||||
@ -248,6 +257,8 @@ pub const Inst = struct {
|
||||
.optional_type,
|
||||
.unwrap_optional_safe,
|
||||
.unwrap_optional_unsafe,
|
||||
.unwrap_err_safe,
|
||||
.unwrap_err_unsafe,
|
||||
=> UnOp,
|
||||
|
||||
.add,
|
||||
@ -305,6 +316,7 @@ pub const Inst = struct {
|
||||
.fntype => FnType,
|
||||
.elemptr => ElemPtr,
|
||||
.condbr => CondBr,
|
||||
.ptr_type => PtrType,
|
||||
};
|
||||
}
|
||||
|
||||
@ -360,6 +372,7 @@ pub const Inst = struct {
|
||||
.inttype,
|
||||
.isnonnull,
|
||||
.isnull,
|
||||
.iserr,
|
||||
.mod_rem,
|
||||
.mul,
|
||||
.mulwrap,
|
||||
@ -382,6 +395,9 @@ pub const Inst = struct {
|
||||
.optional_type,
|
||||
.unwrap_optional_safe,
|
||||
.unwrap_optional_unsafe,
|
||||
.unwrap_err_safe,
|
||||
.unwrap_err_unsafe,
|
||||
.ptr_type,
|
||||
=> false,
|
||||
|
||||
.@"break",
|
||||
@ -811,6 +827,24 @@ pub const Inst = struct {
|
||||
},
|
||||
kw_args: struct {},
|
||||
};
|
||||
|
||||
pub const PtrType = struct {
|
||||
pub const base_tag = Tag.ptr_type;
|
||||
base: Inst,
|
||||
|
||||
positionals: struct {
|
||||
child_type: *Inst,
|
||||
},
|
||||
kw_args: struct {
|
||||
@"allowzero": bool = false,
|
||||
@"align": ?*Inst = null,
|
||||
align_bit_start: ?*Inst = null,
|
||||
align_bit_end: ?*Inst = null,
|
||||
@"const": bool = true,
|
||||
@"volatile": bool = false,
|
||||
sentinel: ?*Inst = null,
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
pub const ErrorMsg = struct {
|
||||
@ -1992,9 +2026,11 @@ const EmitZIR = struct {
|
||||
.ptrtoint => try self.emitUnOp(inst.src, new_body, inst.castTag(.ptrtoint).?, .ptrtoint),
|
||||
.isnull => try self.emitUnOp(inst.src, new_body, inst.castTag(.isnull).?, .isnull),
|
||||
.isnonnull => try self.emitUnOp(inst.src, new_body, inst.castTag(.isnonnull).?, .isnonnull),
|
||||
.iserr => try self.emitUnOp(inst.src, new_body, inst.castTag(.iserr).?, .iserr),
|
||||
.load => try self.emitUnOp(inst.src, new_body, inst.castTag(.load).?, .deref),
|
||||
.ref => try self.emitUnOp(inst.src, new_body, inst.castTag(.ref).?, .ref),
|
||||
.unwrap_optional => try self.emitUnOp(inst.src, new_body, inst.castTag(.unwrap_optional).?, .unwrap_optional_unsafe),
|
||||
.wrap_optional => try self.emitCast(inst.src, new_body, inst.castTag(.wrap_optional).?, .as),
|
||||
|
||||
.add => try self.emitBinOp(inst.src, new_body, inst.castTag(.add).?, .add),
|
||||
.sub => try self.emitBinOp(inst.src, new_body, inst.castTag(.sub).?, .sub),
|
||||
@ -2338,6 +2374,7 @@ const EmitZIR = struct {
|
||||
}
|
||||
},
|
||||
.Optional => {
|
||||
var buf: Type.Payload.Pointer = undefined;
|
||||
const inst = try self.arena.allocator.create(Inst.UnOp);
|
||||
inst.* = .{
|
||||
.base = .{
|
||||
@ -2345,7 +2382,7 @@ const EmitZIR = struct {
|
||||
.tag = .optional_type,
|
||||
},
|
||||
.positionals = .{
|
||||
.operand = (try self.emitType(src, ty.elemType())).inst,
|
||||
.operand = (try self.emitType(src, ty.optionalChild(&buf))).inst,
|
||||
},
|
||||
.kw_args = .{},
|
||||
};
|
||||
|
||||
@ -53,6 +53,7 @@ pub fn analyzeInst(mod: *Module, scope: *Scope, old_inst: *zir.Inst) InnerError!
|
||||
.ret_type => return analyzeInstRetType(mod, scope, old_inst.castTag(.ret_type).?),
|
||||
.single_const_ptr_type => return analyzeInstSingleConstPtrType(mod, scope, old_inst.castTag(.single_const_ptr_type).?),
|
||||
.single_mut_ptr_type => return analyzeInstSingleMutPtrType(mod, scope, old_inst.castTag(.single_mut_ptr_type).?),
|
||||
.ptr_type => return analyzeInstPtrType(mod, scope, old_inst.castTag(.ptr_type).?),
|
||||
.store => return analyzeInstStore(mod, scope, old_inst.castTag(.store).?),
|
||||
.str => return analyzeInstStr(mod, scope, old_inst.castTag(.str).?),
|
||||
.int => {
|
||||
@ -103,11 +104,14 @@ pub fn analyzeInst(mod: *Module, scope: *Scope, old_inst: *zir.Inst) InnerError!
|
||||
.condbr => return analyzeInstCondBr(mod, scope, old_inst.castTag(.condbr).?),
|
||||
.isnull => return analyzeInstIsNonNull(mod, scope, old_inst.castTag(.isnull).?, true),
|
||||
.isnonnull => return analyzeInstIsNonNull(mod, scope, old_inst.castTag(.isnonnull).?, false),
|
||||
.iserr => return analyzeInstIsErr(mod, scope, old_inst.castTag(.iserr).?, true),
|
||||
.boolnot => return analyzeInstBoolNot(mod, scope, old_inst.castTag(.boolnot).?),
|
||||
.typeof => return analyzeInstTypeOf(mod, scope, old_inst.castTag(.typeof).?),
|
||||
.optional_type => return analyzeInstOptionalType(mod, scope, old_inst.castTag(.optional_type).?),
|
||||
.unwrap_optional_safe => return analyzeInstUnwrapOptional(mod, scope, old_inst.castTag(.unwrap_optional_safe).?, true),
|
||||
.unwrap_optional_unsafe => return analyzeInstUnwrapOptional(mod, scope, old_inst.castTag(.unwrap_optional_unsafe).?, false),
|
||||
.unwrap_err_safe => return analyzeInstUnwrapErr(mod, scope, old_inst.castTag(.unwrap_err_safe).?, true),
|
||||
.unwrap_err_unsafe => return analyzeInstUnwrapErr(mod, scope, old_inst.castTag(.unwrap_err_unsafe).?, false),
|
||||
}
|
||||
}
|
||||
|
||||
@ -316,7 +320,7 @@ fn analyzeInstRetPtr(mod: *Module, scope: *Scope, inst: *zir.Inst.NoOp) InnerErr
|
||||
|
||||
fn analyzeInstRef(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
|
||||
const operand = try resolveInst(mod, scope, inst.positionals.operand);
|
||||
const ptr_type = try mod.singleConstPtrType(scope, inst.base.src, operand.ty);
|
||||
const ptr_type = try mod.singlePtrType(scope, inst.base.src, false, operand.ty);
|
||||
|
||||
if (operand.value()) |val| {
|
||||
const ref_payload = try scope.arena().create(Value.Payload.RefVal);
|
||||
@ -357,7 +361,7 @@ fn analyzeInstEnsureResultNonError(mod: *Module, scope: *Scope, inst: *zir.Inst.
|
||||
|
||||
fn analyzeInstAlloc(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
|
||||
const var_type = try resolveType(mod, scope, inst.positionals.operand);
|
||||
const ptr_type = try mod.singleMutPtrType(scope, inst.base.src, var_type);
|
||||
const ptr_type = try mod.singlePtrType(scope, inst.base.src, true, var_type);
|
||||
const b = try mod.requireRuntimeBlock(scope, inst.base.src);
|
||||
return mod.addNoOp(b, inst.base.src, ptr_type, .alloc);
|
||||
}
|
||||
@ -673,15 +677,17 @@ fn analyzeInstOptionalType(mod: *Module, scope: *Scope, optional: *zir.Inst.UnOp
|
||||
|
||||
return mod.constType(scope, optional.base.src, Type.initPayload(switch (child_type.tag()) {
|
||||
.single_const_pointer => blk: {
|
||||
const payload = try scope.arena().create(Type.Payload.OptionalSingleConstPointer);
|
||||
const payload = try scope.arena().create(Type.Payload.Pointer);
|
||||
payload.* = .{
|
||||
.base = .{ .tag = .optional_single_const_pointer },
|
||||
.pointee_type = child_type.elemType(),
|
||||
};
|
||||
break :blk &payload.base;
|
||||
},
|
||||
.single_mut_pointer => blk: {
|
||||
const payload = try scope.arena().create(Type.Payload.OptionalSingleMutPointer);
|
||||
const payload = try scope.arena().create(Type.Payload.Pointer);
|
||||
payload.* = .{
|
||||
.base = .{ .tag = .optional_single_mut_pointer },
|
||||
.pointee_type = child_type.elemType(),
|
||||
};
|
||||
break :blk &payload.base;
|
||||
@ -704,11 +710,8 @@ fn analyzeInstUnwrapOptional(mod: *Module, scope: *Scope, unwrap: *zir.Inst.UnOp
|
||||
return mod.fail(scope, unwrap.base.src, "expected optional type, found {}", .{operand.ty.elemType()});
|
||||
}
|
||||
|
||||
const child_type = operand.ty.elemType().elemType();
|
||||
const child_pointer = if (operand.ty.isConstPtr())
|
||||
try mod.singleConstPtrType(scope, unwrap.base.src, child_type)
|
||||
else
|
||||
try mod.singleMutPtrType(scope, unwrap.base.src, child_type);
|
||||
const child_type = try operand.ty.elemType().optionalChildAlloc(scope.arena());
|
||||
const child_pointer = try mod.singlePtrType(scope, unwrap.base.src, operand.ty.isConstPtr(), child_type);
|
||||
|
||||
if (operand.value()) |val| {
|
||||
if (val.isNull()) {
|
||||
@ -728,6 +731,10 @@ fn analyzeInstUnwrapOptional(mod: *Module, scope: *Scope, unwrap: *zir.Inst.UnOp
|
||||
return mod.addUnOp(b, unwrap.base.src, child_pointer, .unwrap_optional, operand);
|
||||
}
|
||||
|
||||
fn analyzeInstUnwrapErr(mod: *Module, scope: *Scope, unwrap: *zir.Inst.UnOp, safety_check: bool) InnerError!*Inst {
|
||||
return mod.fail(scope, unwrap.base.src, "TODO implement analyzeInstUnwrapErr", .{});
|
||||
}
|
||||
|
||||
fn analyzeInstFnType(mod: *Module, scope: *Scope, fntype: *zir.Inst.FnType) InnerError!*Inst {
|
||||
const return_type = try resolveType(mod, scope, fntype.positionals.return_type);
|
||||
|
||||
@ -912,8 +919,11 @@ fn analyzeInstElemPtr(mod: *Module, scope: *Scope, inst: *zir.Inst.ElemPtr) Inne
|
||||
// required a larger index.
|
||||
const elem_ptr = try array_ptr_val.elemPtr(scope.arena(), @intCast(usize, index_u64));
|
||||
|
||||
const type_payload = try scope.arena().create(Type.Payload.SingleConstPointer);
|
||||
type_payload.* = .{ .pointee_type = array_ptr.ty.elemType().elemType() };
|
||||
const type_payload = try scope.arena().create(Type.Payload.Pointer);
|
||||
type_payload.* = .{
|
||||
.base = .{ .tag = .single_const_pointer },
|
||||
.pointee_type = array_ptr.ty.elemType().elemType(),
|
||||
};
|
||||
|
||||
return mod.constInst(scope, inst.base.src, .{
|
||||
.ty = Type.initPayload(&type_payload.base),
|
||||
@ -1165,6 +1175,10 @@ fn analyzeInstIsNonNull(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp, inver
|
||||
return mod.analyzeIsNull(scope, inst.base.src, operand, invert_logic);
|
||||
}
|
||||
|
||||
fn analyzeInstIsErr(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp, invert_logic: bool) InnerError!*Inst {
|
||||
return mod.fail(scope, inst.base.src, "TODO implement analyzeInstIsErr", .{});
|
||||
}
|
||||
|
||||
fn analyzeInstCondBr(mod: *Module, scope: *Scope, inst: *zir.Inst.CondBr) InnerError!*Inst {
|
||||
const uncasted_cond = try resolveInst(mod, scope, inst.positionals.condition);
|
||||
const cond = try mod.coerce(scope, Type.initTag(.bool), uncasted_cond);
|
||||
@ -1278,12 +1292,16 @@ fn analyzeDeclVal(mod: *Module, scope: *Scope, inst: *zir.Inst.DeclVal) InnerErr
|
||||
|
||||
fn analyzeInstSingleConstPtrType(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
|
||||
const elem_type = try resolveType(mod, scope, inst.positionals.operand);
|
||||
const ty = try mod.singleConstPtrType(scope, inst.base.src, elem_type);
|
||||
const ty = try mod.singlePtrType(scope, inst.base.src, false, elem_type);
|
||||
return mod.constType(scope, inst.base.src, ty);
|
||||
}
|
||||
|
||||
fn analyzeInstSingleMutPtrType(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
|
||||
const elem_type = try resolveType(mod, scope, inst.positionals.operand);
|
||||
const ty = try mod.singleMutPtrType(scope, inst.base.src, elem_type);
|
||||
const ty = try mod.singlePtrType(scope, inst.base.src, true, elem_type);
|
||||
return mod.constType(scope, inst.base.src, ty);
|
||||
}
|
||||
|
||||
fn analyzeInstPtrType(mod: *Module, scope: *Scope, inst: *zir.Inst.PtrType) InnerError!*Inst {
|
||||
return mod.fail(scope, inst.base.src, "TODO implement ptr_type", .{});
|
||||
}
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user