mirror of
https://github.com/ziglang/zig.git
synced 2026-02-03 05:03:38 +00:00
this also deletes C string literals from the language, and then makes the std lib changes and compiler changes necessary to get the behavior tests and std lib tests passing again.
2589 lines
104 KiB
Zig
2589 lines
104 KiB
Zig
const std = @import("std");
|
|
const builtin = @import("builtin");
|
|
const Compilation = @import("compilation.zig").Compilation;
|
|
const Scope = @import("scope.zig").Scope;
|
|
const ast = std.zig.ast;
|
|
const Allocator = std.mem.Allocator;
|
|
const Value = @import("value.zig").Value;
|
|
const Type = Value.Type;
|
|
const assert = std.debug.assert;
|
|
const Token = std.zig.Token;
|
|
const Span = @import("errmsg.zig").Span;
|
|
const llvm = @import("llvm.zig");
|
|
const codegen = @import("codegen.zig");
|
|
const ObjectFile = codegen.ObjectFile;
|
|
const Decl = @import("decl.zig").Decl;
|
|
const mem = std.mem;
|
|
|
|
pub const LVal = enum {
|
|
None,
|
|
Ptr,
|
|
};
|
|
|
|
pub const IrVal = union(enum) {
|
|
Unknown,
|
|
KnownType: *Type,
|
|
KnownValue: *Value,
|
|
|
|
const Init = enum {
|
|
Unknown,
|
|
NoReturn,
|
|
Void,
|
|
};
|
|
|
|
pub fn dump(self: IrVal) void {
|
|
switch (self) {
|
|
IrVal.Unknown => std.debug.warn("Unknown"),
|
|
IrVal.KnownType => |typ| {
|
|
std.debug.warn("KnownType(");
|
|
typ.dump();
|
|
std.debug.warn(")");
|
|
},
|
|
IrVal.KnownValue => |value| {
|
|
std.debug.warn("KnownValue(");
|
|
value.dump();
|
|
std.debug.warn(")");
|
|
},
|
|
}
|
|
}
|
|
};
|
|
|
|
pub const Inst = struct {
|
|
id: Id,
|
|
scope: *Scope,
|
|
debug_id: usize,
|
|
val: IrVal,
|
|
ref_count: usize,
|
|
span: Span,
|
|
owner_bb: *BasicBlock,
|
|
|
|
/// true if this instruction was generated by zig and not from user code
|
|
is_generated: bool,
|
|
|
|
/// the instruction that is derived from this one in analysis
|
|
child: ?*Inst,
|
|
|
|
/// the instruction that this one derives from in analysis
|
|
parent: ?*Inst,
|
|
|
|
/// populated durign codegen
|
|
llvm_value: ?*llvm.Value,
|
|
|
|
pub fn cast(base: *Inst, comptime T: type) ?*T {
|
|
if (base.id == comptime typeToId(T)) {
|
|
return @fieldParentPtr(T, "base", base);
|
|
}
|
|
return null;
|
|
}
|
|
|
|
pub fn typeToId(comptime T: type) Id {
|
|
comptime var i = 0;
|
|
inline while (i < @memberCount(Id)) : (i += 1) {
|
|
if (T == @field(Inst, @memberName(Id, i))) {
|
|
return @field(Id, @memberName(Id, i));
|
|
}
|
|
}
|
|
unreachable;
|
|
}
|
|
|
|
pub fn dump(base: *const Inst) void {
|
|
comptime var i = 0;
|
|
inline while (i < @memberCount(Id)) : (i += 1) {
|
|
if (base.id == @field(Id, @memberName(Id, i))) {
|
|
const T = @field(Inst, @memberName(Id, i));
|
|
std.debug.warn("#{} = {}(", base.debug_id, @tagName(base.id));
|
|
@fieldParentPtr(T, "base", base).dump();
|
|
std.debug.warn(")");
|
|
return;
|
|
}
|
|
}
|
|
unreachable;
|
|
}
|
|
|
|
pub fn hasSideEffects(base: *const Inst) bool {
|
|
comptime var i = 0;
|
|
inline while (i < @memberCount(Id)) : (i += 1) {
|
|
if (base.id == @field(Id, @memberName(Id, i))) {
|
|
const T = @field(Inst, @memberName(Id, i));
|
|
return @fieldParentPtr(T, "base", base).hasSideEffects();
|
|
}
|
|
}
|
|
unreachable;
|
|
}
|
|
|
|
pub async fn analyze(base: *Inst, ira: *Analyze) Analyze.Error!*Inst {
|
|
switch (base.id) {
|
|
Id.Return => return @fieldParentPtr(Return, "base", base).analyze(ira),
|
|
Id.Const => return @fieldParentPtr(Const, "base", base).analyze(ira),
|
|
Id.Call => return @fieldParentPtr(Call, "base", base).analyze(ira),
|
|
Id.DeclRef => return await (async @fieldParentPtr(DeclRef, "base", base).analyze(ira) catch unreachable),
|
|
Id.Ref => return await (async @fieldParentPtr(Ref, "base", base).analyze(ira) catch unreachable),
|
|
Id.DeclVar => return @fieldParentPtr(DeclVar, "base", base).analyze(ira),
|
|
Id.CheckVoidStmt => return @fieldParentPtr(CheckVoidStmt, "base", base).analyze(ira),
|
|
Id.Phi => return @fieldParentPtr(Phi, "base", base).analyze(ira),
|
|
Id.Br => return @fieldParentPtr(Br, "base", base).analyze(ira),
|
|
Id.AddImplicitReturnType => return @fieldParentPtr(AddImplicitReturnType, "base", base).analyze(ira),
|
|
Id.PtrType => return await (async @fieldParentPtr(PtrType, "base", base).analyze(ira) catch unreachable),
|
|
Id.VarPtr => return await (async @fieldParentPtr(VarPtr, "base", base).analyze(ira) catch unreachable),
|
|
Id.LoadPtr => return await (async @fieldParentPtr(LoadPtr, "base", base).analyze(ira) catch unreachable),
|
|
}
|
|
}
|
|
|
|
pub fn render(base: *Inst, ofile: *ObjectFile, fn_val: *Value.Fn) (error{OutOfMemory}!?*llvm.Value) {
|
|
switch (base.id) {
|
|
Id.Return => return @fieldParentPtr(Return, "base", base).render(ofile, fn_val),
|
|
Id.Const => return @fieldParentPtr(Const, "base", base).render(ofile, fn_val),
|
|
Id.Call => return @fieldParentPtr(Call, "base", base).render(ofile, fn_val),
|
|
Id.VarPtr => return @fieldParentPtr(VarPtr, "base", base).render(ofile, fn_val),
|
|
Id.LoadPtr => return @fieldParentPtr(LoadPtr, "base", base).render(ofile, fn_val),
|
|
Id.DeclRef => unreachable,
|
|
Id.PtrType => unreachable,
|
|
Id.Ref => @panic("TODO"),
|
|
Id.DeclVar => @panic("TODO"),
|
|
Id.CheckVoidStmt => @panic("TODO"),
|
|
Id.Phi => @panic("TODO"),
|
|
Id.Br => @panic("TODO"),
|
|
Id.AddImplicitReturnType => @panic("TODO"),
|
|
}
|
|
}
|
|
|
|
fn ref(base: *Inst, builder: *Builder) void {
|
|
base.ref_count += 1;
|
|
if (base.owner_bb != builder.current_basic_block and !base.isCompTime()) {
|
|
base.owner_bb.ref(builder);
|
|
}
|
|
}
|
|
|
|
fn copyVal(base: *Inst, comp: *Compilation) !*Value {
|
|
if (base.parent.?.ref_count == 0) {
|
|
return base.val.KnownValue.derefAndCopy(comp);
|
|
}
|
|
return base.val.KnownValue.copy(comp);
|
|
}
|
|
|
|
fn getAsParam(param: *Inst) !*Inst {
|
|
param.ref_count -= 1;
|
|
const child = param.child orelse return error.SemanticAnalysisFailed;
|
|
switch (child.val) {
|
|
IrVal.Unknown => return error.SemanticAnalysisFailed,
|
|
else => return child,
|
|
}
|
|
}
|
|
|
|
fn getConstVal(self: *Inst, ira: *Analyze) !*Value {
|
|
if (self.isCompTime()) {
|
|
return self.val.KnownValue;
|
|
} else {
|
|
try ira.addCompileError(self.span, "unable to evaluate constant expression");
|
|
return error.SemanticAnalysisFailed;
|
|
}
|
|
}
|
|
|
|
fn getAsConstType(param: *Inst, ira: *Analyze) !*Type {
|
|
const meta_type = Type.MetaType.get(ira.irb.comp);
|
|
meta_type.base.base.deref(ira.irb.comp);
|
|
|
|
const inst = try param.getAsParam();
|
|
const casted = try ira.implicitCast(inst, &meta_type.base);
|
|
const val = try casted.getConstVal(ira);
|
|
return val.cast(Value.Type).?;
|
|
}
|
|
|
|
fn getAsConstAlign(param: *Inst, ira: *Analyze) !u32 {
|
|
return error.Unimplemented;
|
|
//const align_type = Type.Int.get_align(ira.irb.comp);
|
|
//align_type.base.base.deref(ira.irb.comp);
|
|
|
|
//const inst = try param.getAsParam();
|
|
//const casted = try ira.implicitCast(inst, align_type);
|
|
//const val = try casted.getConstVal(ira);
|
|
|
|
//uint32_t align_bytes = bigint_as_unsigned(&const_val->data.x_bigint);
|
|
//if (align_bytes == 0) {
|
|
// ir_add_error(ira, value, buf_sprintf("alignment must be >= 1"));
|
|
// return false;
|
|
//}
|
|
|
|
//if (!is_power_of_2(align_bytes)) {
|
|
// ir_add_error(ira, value, buf_sprintf("alignment value %" PRIu32 " is not a power of 2", align_bytes));
|
|
// return false;
|
|
//}
|
|
}
|
|
|
|
/// asserts that the type is known
|
|
fn getKnownType(self: *Inst) *Type {
|
|
switch (self.val) {
|
|
IrVal.KnownType => |typ| return typ,
|
|
IrVal.KnownValue => |value| return value.typ,
|
|
IrVal.Unknown => unreachable,
|
|
}
|
|
}
|
|
|
|
pub fn setGenerated(base: *Inst) void {
|
|
base.is_generated = true;
|
|
}
|
|
|
|
pub fn isNoReturn(base: *const Inst) bool {
|
|
switch (base.val) {
|
|
IrVal.Unknown => return false,
|
|
IrVal.KnownValue => |x| return x.typ.id == Type.Id.NoReturn,
|
|
IrVal.KnownType => |typ| return typ.id == Type.Id.NoReturn,
|
|
}
|
|
}
|
|
|
|
pub fn isCompTime(base: *const Inst) bool {
|
|
return base.val == IrVal.KnownValue;
|
|
}
|
|
|
|
pub fn linkToParent(self: *Inst, parent: *Inst) void {
|
|
assert(self.parent == null);
|
|
assert(parent.child == null);
|
|
self.parent = parent;
|
|
parent.child = self;
|
|
}
|
|
|
|
pub const Id = enum {
|
|
Return,
|
|
Const,
|
|
Ref,
|
|
DeclVar,
|
|
CheckVoidStmt,
|
|
Phi,
|
|
Br,
|
|
AddImplicitReturnType,
|
|
Call,
|
|
DeclRef,
|
|
PtrType,
|
|
VarPtr,
|
|
LoadPtr,
|
|
};
|
|
|
|
pub const Call = struct {
|
|
base: Inst,
|
|
params: Params,
|
|
|
|
const Params = struct {
|
|
fn_ref: *Inst,
|
|
args: []*Inst,
|
|
};
|
|
|
|
const ir_val_init = IrVal.Init.Unknown;
|
|
|
|
pub fn dump(self: *const Call) void {
|
|
std.debug.warn("#{}(", self.params.fn_ref.debug_id);
|
|
for (self.params.args) |arg| {
|
|
std.debug.warn("#{},", arg.debug_id);
|
|
}
|
|
std.debug.warn(")");
|
|
}
|
|
|
|
pub fn hasSideEffects(self: *const Call) bool {
|
|
return true;
|
|
}
|
|
|
|
pub fn analyze(self: *const Call, ira: *Analyze) !*Inst {
|
|
const fn_ref = try self.params.fn_ref.getAsParam();
|
|
const fn_ref_type = fn_ref.getKnownType();
|
|
const fn_type = fn_ref_type.cast(Type.Fn) orelse {
|
|
try ira.addCompileError(fn_ref.span, "type '{}' not a function", fn_ref_type.name);
|
|
return error.SemanticAnalysisFailed;
|
|
};
|
|
|
|
const fn_type_param_count = fn_type.paramCount();
|
|
|
|
if (fn_type_param_count != self.params.args.len) {
|
|
try ira.addCompileError(
|
|
self.base.span,
|
|
"expected {} arguments, found {}",
|
|
fn_type_param_count,
|
|
self.params.args.len,
|
|
);
|
|
return error.SemanticAnalysisFailed;
|
|
}
|
|
|
|
const args = try ira.irb.arena().alloc(*Inst, self.params.args.len);
|
|
for (self.params.args) |arg, i| {
|
|
args[i] = try arg.getAsParam();
|
|
}
|
|
const new_inst = try ira.irb.build(Call, self.base.scope, self.base.span, Params{
|
|
.fn_ref = fn_ref,
|
|
.args = args,
|
|
});
|
|
new_inst.val = IrVal{ .KnownType = fn_type.key.data.Normal.return_type };
|
|
return new_inst;
|
|
}
|
|
|
|
pub fn render(self: *Call, ofile: *ObjectFile, fn_val: *Value.Fn) !?*llvm.Value {
|
|
const fn_ref = self.params.fn_ref.llvm_value.?;
|
|
|
|
const args = try ofile.arena.alloc(*llvm.Value, self.params.args.len);
|
|
for (self.params.args) |arg, i| {
|
|
args[i] = arg.llvm_value.?;
|
|
}
|
|
|
|
const llvm_cc = llvm.CCallConv;
|
|
const fn_inline = llvm.FnInline.Auto;
|
|
|
|
return llvm.BuildCall(
|
|
ofile.builder,
|
|
fn_ref,
|
|
args.ptr,
|
|
@intCast(c_uint, args.len),
|
|
llvm_cc,
|
|
fn_inline,
|
|
"",
|
|
) orelse error.OutOfMemory;
|
|
}
|
|
};
|
|
|
|
pub const Const = struct {
|
|
base: Inst,
|
|
params: Params,
|
|
|
|
const Params = struct {};
|
|
|
|
// Use Builder.buildConst* methods, or, after building a Const instruction,
|
|
// manually set the ir_val field.
|
|
const ir_val_init = IrVal.Init.Unknown;
|
|
|
|
pub fn dump(self: *const Const) void {
|
|
self.base.val.KnownValue.dump();
|
|
}
|
|
|
|
pub fn hasSideEffects(self: *const Const) bool {
|
|
return false;
|
|
}
|
|
|
|
pub fn analyze(self: *const Const, ira: *Analyze) !*Inst {
|
|
const new_inst = try ira.irb.build(Const, self.base.scope, self.base.span, Params{});
|
|
new_inst.val = IrVal{ .KnownValue = self.base.val.KnownValue.getRef() };
|
|
return new_inst;
|
|
}
|
|
|
|
pub fn render(self: *Const, ofile: *ObjectFile, fn_val: *Value.Fn) !?*llvm.Value {
|
|
return self.base.val.KnownValue.getLlvmConst(ofile);
|
|
}
|
|
};
|
|
|
|
pub const Return = struct {
|
|
base: Inst,
|
|
params: Params,
|
|
|
|
const Params = struct {
|
|
return_value: *Inst,
|
|
};
|
|
|
|
const ir_val_init = IrVal.Init.NoReturn;
|
|
|
|
pub fn dump(self: *const Return) void {
|
|
std.debug.warn("#{}", self.params.return_value.debug_id);
|
|
}
|
|
|
|
pub fn hasSideEffects(self: *const Return) bool {
|
|
return true;
|
|
}
|
|
|
|
pub fn analyze(self: *const Return, ira: *Analyze) !*Inst {
|
|
const value = try self.params.return_value.getAsParam();
|
|
const casted_value = try ira.implicitCast(value, ira.explicit_return_type);
|
|
|
|
// TODO detect returning local variable address
|
|
|
|
return ira.irb.build(Return, self.base.scope, self.base.span, Params{ .return_value = casted_value });
|
|
}
|
|
|
|
pub fn render(self: *Return, ofile: *ObjectFile, fn_val: *Value.Fn) !?*llvm.Value {
|
|
const value = self.params.return_value.llvm_value;
|
|
const return_type = self.params.return_value.getKnownType();
|
|
|
|
if (return_type.handleIsPtr()) {
|
|
@panic("TODO");
|
|
} else {
|
|
_ = llvm.BuildRet(ofile.builder, value) orelse return error.OutOfMemory;
|
|
}
|
|
return null;
|
|
}
|
|
};
|
|
|
|
pub const Ref = struct {
|
|
base: Inst,
|
|
params: Params,
|
|
|
|
const Params = struct {
|
|
target: *Inst,
|
|
mut: Type.Pointer.Mut,
|
|
volatility: Type.Pointer.Vol,
|
|
};
|
|
|
|
const ir_val_init = IrVal.Init.Unknown;
|
|
|
|
pub fn dump(inst: *const Ref) void {}
|
|
|
|
pub fn hasSideEffects(inst: *const Ref) bool {
|
|
return false;
|
|
}
|
|
|
|
pub async fn analyze(self: *const Ref, ira: *Analyze) !*Inst {
|
|
const target = try self.params.target.getAsParam();
|
|
|
|
if (ira.getCompTimeValOrNullUndefOk(target)) |val| {
|
|
return ira.getCompTimeRef(
|
|
val,
|
|
Value.Ptr.Mut.CompTimeConst,
|
|
self.params.mut,
|
|
self.params.volatility,
|
|
);
|
|
}
|
|
|
|
const new_inst = try ira.irb.build(Ref, self.base.scope, self.base.span, Params{
|
|
.target = target,
|
|
.mut = self.params.mut,
|
|
.volatility = self.params.volatility,
|
|
});
|
|
const elem_type = target.getKnownType();
|
|
const ptr_type = try await (async Type.Pointer.get(ira.irb.comp, Type.Pointer.Key{
|
|
.child_type = elem_type,
|
|
.mut = self.params.mut,
|
|
.vol = self.params.volatility,
|
|
.size = Type.Pointer.Size.One,
|
|
.alignment = Type.Pointer.Align.Abi,
|
|
}) catch unreachable);
|
|
// TODO: potentially set the hint that this is a stack pointer. But it might not be - this
|
|
// could be a ref of a global, for example
|
|
new_inst.val = IrVal{ .KnownType = &ptr_type.base };
|
|
// TODO potentially add an alloca entry here
|
|
return new_inst;
|
|
}
|
|
};
|
|
|
|
pub const DeclRef = struct {
|
|
base: Inst,
|
|
params: Params,
|
|
|
|
const Params = struct {
|
|
decl: *Decl,
|
|
lval: LVal,
|
|
};
|
|
|
|
const ir_val_init = IrVal.Init.Unknown;
|
|
|
|
pub fn dump(inst: *const DeclRef) void {}
|
|
|
|
pub fn hasSideEffects(inst: *const DeclRef) bool {
|
|
return false;
|
|
}
|
|
|
|
pub async fn analyze(self: *const DeclRef, ira: *Analyze) !*Inst {
|
|
(await (async ira.irb.comp.resolveDecl(self.params.decl) catch unreachable)) catch |err| switch (err) {
|
|
error.OutOfMemory => return error.OutOfMemory,
|
|
else => return error.SemanticAnalysisFailed,
|
|
};
|
|
switch (self.params.decl.id) {
|
|
Decl.Id.CompTime => unreachable,
|
|
Decl.Id.Var => return error.Unimplemented,
|
|
Decl.Id.Fn => {
|
|
const fn_decl = @fieldParentPtr(Decl.Fn, "base", self.params.decl);
|
|
const decl_val = switch (fn_decl.value) {
|
|
Decl.Fn.Val.Unresolved => unreachable,
|
|
Decl.Fn.Val.Fn => |fn_val| &fn_val.base,
|
|
Decl.Fn.Val.FnProto => |fn_proto| &fn_proto.base,
|
|
};
|
|
switch (self.params.lval) {
|
|
LVal.None => {
|
|
return ira.irb.buildConstValue(self.base.scope, self.base.span, decl_val);
|
|
},
|
|
LVal.Ptr => return error.Unimplemented,
|
|
}
|
|
},
|
|
}
|
|
}
|
|
};
|
|
|
|
pub const VarPtr = struct {
|
|
base: Inst,
|
|
params: Params,
|
|
|
|
const Params = struct {
|
|
var_scope: *Scope.Var,
|
|
};
|
|
|
|
const ir_val_init = IrVal.Init.Unknown;
|
|
|
|
pub fn dump(inst: *const VarPtr) void {
|
|
std.debug.warn("{}", inst.params.var_scope.name);
|
|
}
|
|
|
|
pub fn hasSideEffects(inst: *const VarPtr) bool {
|
|
return false;
|
|
}
|
|
|
|
pub async fn analyze(self: *const VarPtr, ira: *Analyze) !*Inst {
|
|
switch (self.params.var_scope.data) {
|
|
Scope.Var.Data.Const => @panic("TODO"),
|
|
Scope.Var.Data.Param => |param| {
|
|
const new_inst = try ira.irb.build(
|
|
Inst.VarPtr,
|
|
self.base.scope,
|
|
self.base.span,
|
|
Inst.VarPtr.Params{ .var_scope = self.params.var_scope },
|
|
);
|
|
const ptr_type = try await (async Type.Pointer.get(ira.irb.comp, Type.Pointer.Key{
|
|
.child_type = param.typ,
|
|
.mut = Type.Pointer.Mut.Const,
|
|
.vol = Type.Pointer.Vol.Non,
|
|
.size = Type.Pointer.Size.One,
|
|
.alignment = Type.Pointer.Align.Abi,
|
|
}) catch unreachable);
|
|
new_inst.val = IrVal{ .KnownType = &ptr_type.base };
|
|
return new_inst;
|
|
},
|
|
}
|
|
}
|
|
|
|
pub fn render(self: *VarPtr, ofile: *ObjectFile, fn_val: *Value.Fn) *llvm.Value {
|
|
switch (self.params.var_scope.data) {
|
|
Scope.Var.Data.Const => unreachable, // turned into Inst.Const in analyze pass
|
|
Scope.Var.Data.Param => |param| return param.llvm_value,
|
|
}
|
|
}
|
|
};
|
|
|
|
pub const LoadPtr = struct {
|
|
base: Inst,
|
|
params: Params,
|
|
|
|
const Params = struct {
|
|
target: *Inst,
|
|
};
|
|
|
|
const ir_val_init = IrVal.Init.Unknown;
|
|
|
|
pub fn dump(inst: *const LoadPtr) void {}
|
|
|
|
pub fn hasSideEffects(inst: *const LoadPtr) bool {
|
|
return false;
|
|
}
|
|
|
|
pub async fn analyze(self: *const LoadPtr, ira: *Analyze) !*Inst {
|
|
const target = try self.params.target.getAsParam();
|
|
const target_type = target.getKnownType();
|
|
if (target_type.id != Type.Id.Pointer) {
|
|
try ira.addCompileError(self.base.span, "dereference of non pointer type '{}'", target_type.name);
|
|
return error.SemanticAnalysisFailed;
|
|
}
|
|
const ptr_type = @fieldParentPtr(Type.Pointer, "base", target_type);
|
|
// if (instr_is_comptime(ptr)) {
|
|
// if (ptr->value.data.x_ptr.mut == ConstPtrMutComptimeConst ||
|
|
// ptr->value.data.x_ptr.mut == ConstPtrMutComptimeVar)
|
|
// {
|
|
// ConstExprValue *pointee = const_ptr_pointee(ira->codegen, &ptr->value);
|
|
// if (pointee->special != ConstValSpecialRuntime) {
|
|
// IrInstruction *result = ir_create_const(&ira->new_irb, source_instruction->scope,
|
|
// source_instruction->source_node, child_type);
|
|
// copy_const_val(&result->value, pointee, ptr->value.data.x_ptr.mut == ConstPtrMutComptimeConst);
|
|
// result->value.type = child_type;
|
|
// return result;
|
|
// }
|
|
// }
|
|
// }
|
|
const new_inst = try ira.irb.build(
|
|
Inst.LoadPtr,
|
|
self.base.scope,
|
|
self.base.span,
|
|
Inst.LoadPtr.Params{ .target = target },
|
|
);
|
|
new_inst.val = IrVal{ .KnownType = ptr_type.key.child_type };
|
|
return new_inst;
|
|
}
|
|
|
|
pub fn render(self: *LoadPtr, ofile: *ObjectFile, fn_val: *Value.Fn) !?*llvm.Value {
|
|
const child_type = self.base.getKnownType();
|
|
if (!child_type.hasBits()) {
|
|
return null;
|
|
}
|
|
const ptr = self.params.target.llvm_value.?;
|
|
const ptr_type = self.params.target.getKnownType().cast(Type.Pointer).?;
|
|
|
|
return try codegen.getHandleValue(ofile, ptr, ptr_type);
|
|
|
|
//uint32_t unaligned_bit_count = ptr_type->data.pointer.unaligned_bit_count;
|
|
//if (unaligned_bit_count == 0)
|
|
// return get_handle_value(g, ptr, child_type, ptr_type);
|
|
|
|
//bool big_endian = g->is_big_endian;
|
|
|
|
//assert(!handle_is_ptr(child_type));
|
|
//LLVMValueRef containing_int = gen_load(g, ptr, ptr_type, "");
|
|
|
|
//uint32_t bit_offset = ptr_type->data.pointer.bit_offset;
|
|
//uint32_t host_bit_count = LLVMGetIntTypeWidth(LLVMTypeOf(containing_int));
|
|
//uint32_t shift_amt = big_endian ? host_bit_count - bit_offset - unaligned_bit_count : bit_offset;
|
|
|
|
//LLVMValueRef shift_amt_val = LLVMConstInt(LLVMTypeOf(containing_int), shift_amt, false);
|
|
//LLVMValueRef shifted_value = LLVMBuildLShr(g->builder, containing_int, shift_amt_val, "");
|
|
|
|
//return LLVMBuildTrunc(g->builder, shifted_value, child_type->type_ref, "");
|
|
}
|
|
};
|
|
|
|
pub const PtrType = struct {
|
|
base: Inst,
|
|
params: Params,
|
|
|
|
const Params = struct {
|
|
child_type: *Inst,
|
|
mut: Type.Pointer.Mut,
|
|
vol: Type.Pointer.Vol,
|
|
size: Type.Pointer.Size,
|
|
alignment: ?*Inst,
|
|
};
|
|
|
|
const ir_val_init = IrVal.Init.Unknown;
|
|
|
|
pub fn dump(inst: *const PtrType) void {}
|
|
|
|
pub fn hasSideEffects(inst: *const PtrType) bool {
|
|
return false;
|
|
}
|
|
|
|
pub async fn analyze(self: *const PtrType, ira: *Analyze) !*Inst {
|
|
const child_type = try self.params.child_type.getAsConstType(ira);
|
|
// if (child_type->id == TypeTableEntryIdUnreachable) {
|
|
// ir_add_error(ira, &instruction->base, buf_sprintf("pointer to noreturn not allowed"));
|
|
// return ira->codegen->builtin_types.entry_invalid;
|
|
// } else if (child_type->id == TypeTableEntryIdOpaque && instruction->ptr_len == PtrLenUnknown) {
|
|
// ir_add_error(ira, &instruction->base, buf_sprintf("unknown-length pointer to opaque"));
|
|
// return ira->codegen->builtin_types.entry_invalid;
|
|
// }
|
|
const alignment = if (self.params.alignment) |align_inst| blk: {
|
|
const amt = try align_inst.getAsConstAlign(ira);
|
|
break :blk Type.Pointer.Align{ .Override = amt };
|
|
} else blk: {
|
|
break :blk Type.Pointer.Align{ .Abi = {} };
|
|
};
|
|
const ptr_type = try await (async Type.Pointer.get(ira.irb.comp, Type.Pointer.Key{
|
|
.child_type = child_type,
|
|
.mut = self.params.mut,
|
|
.vol = self.params.vol,
|
|
.size = self.params.size,
|
|
.alignment = alignment,
|
|
}) catch unreachable);
|
|
ptr_type.base.base.deref(ira.irb.comp);
|
|
|
|
return ira.irb.buildConstValue(self.base.scope, self.base.span, &ptr_type.base.base);
|
|
}
|
|
};
|
|
|
|
pub const DeclVar = struct {
|
|
base: Inst,
|
|
params: Params,
|
|
|
|
const Params = struct {
|
|
variable: *Variable,
|
|
};
|
|
|
|
const ir_val_init = IrVal.Init.Unknown;
|
|
|
|
pub fn dump(inst: *const DeclVar) void {}
|
|
|
|
pub fn hasSideEffects(inst: *const DeclVar) bool {
|
|
return true;
|
|
}
|
|
|
|
pub fn analyze(self: *const DeclVar, ira: *Analyze) !*Inst {
|
|
return error.Unimplemented; // TODO
|
|
}
|
|
};
|
|
|
|
pub const CheckVoidStmt = struct {
|
|
base: Inst,
|
|
params: Params,
|
|
|
|
const Params = struct {
|
|
target: *Inst,
|
|
};
|
|
|
|
const ir_val_init = IrVal.Init.Unknown;
|
|
|
|
pub fn dump(self: *const CheckVoidStmt) void {
|
|
std.debug.warn("#{}", self.params.target.debug_id);
|
|
}
|
|
|
|
pub fn hasSideEffects(inst: *const CheckVoidStmt) bool {
|
|
return true;
|
|
}
|
|
|
|
pub fn analyze(self: *const CheckVoidStmt, ira: *Analyze) !*Inst {
|
|
const target = try self.params.target.getAsParam();
|
|
if (target.getKnownType().id != Type.Id.Void) {
|
|
try ira.addCompileError(self.base.span, "expression value is ignored");
|
|
return error.SemanticAnalysisFailed;
|
|
}
|
|
return ira.irb.buildConstVoid(self.base.scope, self.base.span, true);
|
|
}
|
|
};
|
|
|
|
pub const Phi = struct {
|
|
base: Inst,
|
|
params: Params,
|
|
|
|
const Params = struct {
|
|
incoming_blocks: []*BasicBlock,
|
|
incoming_values: []*Inst,
|
|
};
|
|
|
|
const ir_val_init = IrVal.Init.Unknown;
|
|
|
|
pub fn dump(inst: *const Phi) void {}
|
|
|
|
pub fn hasSideEffects(inst: *const Phi) bool {
|
|
return false;
|
|
}
|
|
|
|
pub fn analyze(self: *const Phi, ira: *Analyze) !*Inst {
|
|
return error.Unimplemented; // TODO
|
|
}
|
|
};
|
|
|
|
pub const Br = struct {
|
|
base: Inst,
|
|
params: Params,
|
|
|
|
const Params = struct {
|
|
dest_block: *BasicBlock,
|
|
is_comptime: *Inst,
|
|
};
|
|
|
|
const ir_val_init = IrVal.Init.NoReturn;
|
|
|
|
pub fn dump(inst: *const Br) void {}
|
|
|
|
pub fn hasSideEffects(inst: *const Br) bool {
|
|
return true;
|
|
}
|
|
|
|
pub fn analyze(self: *const Br, ira: *Analyze) !*Inst {
|
|
return error.Unimplemented; // TODO
|
|
}
|
|
};
|
|
|
|
pub const CondBr = struct {
|
|
base: Inst,
|
|
params: Params,
|
|
|
|
const Params = struct {
|
|
condition: *Inst,
|
|
then_block: *BasicBlock,
|
|
else_block: *BasicBlock,
|
|
is_comptime: *Inst,
|
|
};
|
|
|
|
const ir_val_init = IrVal.Init.NoReturn;
|
|
|
|
pub fn dump(inst: *const CondBr) void {}
|
|
|
|
pub fn hasSideEffects(inst: *const CondBr) bool {
|
|
return true;
|
|
}
|
|
|
|
pub fn analyze(self: *const CondBr, ira: *Analyze) !*Inst {
|
|
return error.Unimplemented; // TODO
|
|
}
|
|
};
|
|
|
|
pub const AddImplicitReturnType = struct {
|
|
base: Inst,
|
|
params: Params,
|
|
|
|
pub const Params = struct {
|
|
target: *Inst,
|
|
};
|
|
|
|
const ir_val_init = IrVal.Init.Unknown;
|
|
|
|
pub fn dump(inst: *const AddImplicitReturnType) void {
|
|
std.debug.warn("#{}", inst.params.target.debug_id);
|
|
}
|
|
|
|
pub fn hasSideEffects(inst: *const AddImplicitReturnType) bool {
|
|
return true;
|
|
}
|
|
|
|
pub fn analyze(self: *const AddImplicitReturnType, ira: *Analyze) !*Inst {
|
|
const target = try self.params.target.getAsParam();
|
|
try ira.src_implicit_return_type_list.append(target);
|
|
return ira.irb.buildConstVoid(self.base.scope, self.base.span, true);
|
|
}
|
|
};
|
|
|
|
pub const TestErr = struct {
|
|
base: Inst,
|
|
params: Params,
|
|
|
|
pub const Params = struct {
|
|
target: *Inst,
|
|
};
|
|
|
|
const ir_val_init = IrVal.Init.Unknown;
|
|
|
|
pub fn dump(inst: *const TestErr) void {
|
|
std.debug.warn("#{}", inst.params.target.debug_id);
|
|
}
|
|
|
|
pub fn hasSideEffects(inst: *const TestErr) bool {
|
|
return false;
|
|
}
|
|
|
|
pub fn analyze(self: *const TestErr, ira: *Analyze) !*Inst {
|
|
const target = try self.params.target.getAsParam();
|
|
const target_type = target.getKnownType();
|
|
switch (target_type.id) {
|
|
Type.Id.ErrorUnion => {
|
|
return error.Unimplemented;
|
|
// if (instr_is_comptime(value)) {
|
|
// ConstExprValue *err_union_val = ir_resolve_const(ira, value, UndefBad);
|
|
// if (!err_union_val)
|
|
// return ira->codegen->builtin_types.entry_invalid;
|
|
|
|
// if (err_union_val->special != ConstValSpecialRuntime) {
|
|
// ConstExprValue *out_val = ir_build_const_from(ira, &instruction->base);
|
|
// out_val->data.x_bool = (err_union_val->data.x_err_union.err != nullptr);
|
|
// return ira->codegen->builtin_types.entry_bool;
|
|
// }
|
|
// }
|
|
|
|
// TypeTableEntry *err_set_type = type_entry->data.error_union.err_set_type;
|
|
// if (!resolve_inferred_error_set(ira->codegen, err_set_type, instruction->base.source_node)) {
|
|
// return ira->codegen->builtin_types.entry_invalid;
|
|
// }
|
|
// if (!type_is_global_error_set(err_set_type) &&
|
|
// err_set_type->data.error_set.err_count == 0)
|
|
// {
|
|
// assert(err_set_type->data.error_set.infer_fn == nullptr);
|
|
// ConstExprValue *out_val = ir_build_const_from(ira, &instruction->base);
|
|
// out_val->data.x_bool = false;
|
|
// return ira->codegen->builtin_types.entry_bool;
|
|
// }
|
|
|
|
// ir_build_test_err_from(&ira->new_irb, &instruction->base, value);
|
|
// return ira->codegen->builtin_types.entry_bool;
|
|
},
|
|
Type.Id.ErrorSet => {
|
|
return ira.irb.buildConstBool(self.base.scope, self.base.span, true);
|
|
},
|
|
else => {
|
|
return ira.irb.buildConstBool(self.base.scope, self.base.span, false);
|
|
},
|
|
}
|
|
}
|
|
};
|
|
|
|
pub const TestCompTime = struct {
|
|
base: Inst,
|
|
params: Params,
|
|
|
|
pub const Params = struct {
|
|
target: *Inst,
|
|
};
|
|
|
|
const ir_val_init = IrVal.Init.Unknown;
|
|
|
|
pub fn dump(inst: *const TestCompTime) void {
|
|
std.debug.warn("#{}", inst.params.target.debug_id);
|
|
}
|
|
|
|
pub fn hasSideEffects(inst: *const TestCompTime) bool {
|
|
return false;
|
|
}
|
|
|
|
pub fn analyze(self: *const TestCompTime, ira: *Analyze) !*Inst {
|
|
const target = try self.params.target.getAsParam();
|
|
return ira.irb.buildConstBool(self.base.scope, self.base.span, target.isCompTime());
|
|
}
|
|
};
|
|
|
|
pub const SaveErrRetAddr = struct {
|
|
base: Inst,
|
|
params: Params,
|
|
|
|
const Params = struct {};
|
|
|
|
const ir_val_init = IrVal.Init.Unknown;
|
|
|
|
pub fn dump(inst: *const SaveErrRetAddr) void {}
|
|
|
|
pub fn hasSideEffects(inst: *const SaveErrRetAddr) bool {
|
|
return true;
|
|
}
|
|
|
|
pub fn analyze(self: *const SaveErrRetAddr, ira: *Analyze) !*Inst {
|
|
return ira.irb.build(Inst.SaveErrRetAddr, self.base.scope, self.base.span, Params{});
|
|
}
|
|
};
|
|
};
|
|
|
|
pub const Variable = struct {
|
|
child_scope: *Scope,
|
|
};
|
|
|
|
pub const BasicBlock = struct {
|
|
ref_count: usize,
|
|
name_hint: [*]const u8, // must be a C string literal
|
|
debug_id: usize,
|
|
scope: *Scope,
|
|
instruction_list: std.ArrayList(*Inst),
|
|
ref_instruction: ?*Inst,
|
|
|
|
/// for codegen
|
|
llvm_block: *llvm.BasicBlock,
|
|
llvm_exit_block: *llvm.BasicBlock,
|
|
|
|
/// the basic block that is derived from this one in analysis
|
|
child: ?*BasicBlock,
|
|
|
|
/// the basic block that this one derives from in analysis
|
|
parent: ?*BasicBlock,
|
|
|
|
pub fn ref(self: *BasicBlock, builder: *Builder) void {
|
|
self.ref_count += 1;
|
|
}
|
|
|
|
pub fn linkToParent(self: *BasicBlock, parent: *BasicBlock) void {
|
|
assert(self.parent == null);
|
|
assert(parent.child == null);
|
|
self.parent = parent;
|
|
parent.child = self;
|
|
}
|
|
};
|
|
|
|
/// Stuff that survives longer than Builder
|
|
pub const Code = struct {
|
|
basic_block_list: std.ArrayList(*BasicBlock),
|
|
arena: std.heap.ArenaAllocator,
|
|
return_type: ?*Type,
|
|
tree_scope: *Scope.AstTree,
|
|
|
|
/// allocator is comp.gpa()
|
|
pub fn destroy(self: *Code, allocator: *Allocator) void {
|
|
self.arena.deinit();
|
|
allocator.destroy(self);
|
|
}
|
|
|
|
pub fn dump(self: *Code) void {
|
|
var bb_i: usize = 0;
|
|
for (self.basic_block_list.toSliceConst()) |bb| {
|
|
std.debug.warn("{s}_{}:\n", bb.name_hint, bb.debug_id);
|
|
for (bb.instruction_list.toSliceConst()) |instr| {
|
|
std.debug.warn(" ");
|
|
instr.dump();
|
|
std.debug.warn("\n");
|
|
}
|
|
}
|
|
}
|
|
|
|
/// returns a ref-incremented value, or adds a compile error
|
|
pub fn getCompTimeResult(self: *Code, comp: *Compilation) !*Value {
|
|
const bb = self.basic_block_list.at(0);
|
|
for (bb.instruction_list.toSliceConst()) |inst| {
|
|
if (inst.cast(Inst.Return)) |ret_inst| {
|
|
const ret_value = ret_inst.params.return_value;
|
|
if (ret_value.isCompTime()) {
|
|
return ret_value.val.KnownValue.getRef();
|
|
}
|
|
try comp.addCompileError(
|
|
self.tree_scope,
|
|
ret_value.span,
|
|
"unable to evaluate constant expression",
|
|
);
|
|
return error.SemanticAnalysisFailed;
|
|
} else if (inst.hasSideEffects()) {
|
|
try comp.addCompileError(
|
|
self.tree_scope,
|
|
inst.span,
|
|
"unable to evaluate constant expression",
|
|
);
|
|
return error.SemanticAnalysisFailed;
|
|
}
|
|
}
|
|
unreachable;
|
|
}
|
|
};
|
|
|
|
pub const Builder = struct {
|
|
comp: *Compilation,
|
|
code: *Code,
|
|
current_basic_block: *BasicBlock,
|
|
next_debug_id: usize,
|
|
is_comptime: bool,
|
|
is_async: bool,
|
|
begin_scope: ?*Scope,
|
|
|
|
pub const Error = Analyze.Error;
|
|
|
|
pub fn init(comp: *Compilation, tree_scope: *Scope.AstTree, begin_scope: ?*Scope) !Builder {
|
|
const code = try comp.gpa().create(Code);
|
|
code.* = Code{
|
|
.basic_block_list = undefined,
|
|
.arena = std.heap.ArenaAllocator.init(comp.gpa()),
|
|
.return_type = null,
|
|
.tree_scope = tree_scope,
|
|
};
|
|
code.basic_block_list = std.ArrayList(*BasicBlock).init(&code.arena.allocator);
|
|
errdefer code.destroy(comp.gpa());
|
|
|
|
return Builder{
|
|
.comp = comp,
|
|
.current_basic_block = undefined,
|
|
.code = code,
|
|
.next_debug_id = 0,
|
|
.is_comptime = false,
|
|
.is_async = false,
|
|
.begin_scope = begin_scope,
|
|
};
|
|
}
|
|
|
|
pub fn abort(self: *Builder) void {
|
|
self.code.destroy(self.comp.gpa());
|
|
}
|
|
|
|
/// Call code.destroy() when done
|
|
pub fn finish(self: *Builder) *Code {
|
|
return self.code;
|
|
}
|
|
|
|
/// No need to clean up resources thanks to the arena allocator.
|
|
pub fn createBasicBlock(self: *Builder, scope: *Scope, name_hint: [*]const u8) !*BasicBlock {
|
|
const basic_block = try self.arena().create(BasicBlock);
|
|
basic_block.* = BasicBlock{
|
|
.ref_count = 0,
|
|
.name_hint = name_hint,
|
|
.debug_id = self.next_debug_id,
|
|
.scope = scope,
|
|
.instruction_list = std.ArrayList(*Inst).init(self.arena()),
|
|
.child = null,
|
|
.parent = null,
|
|
.ref_instruction = null,
|
|
.llvm_block = undefined,
|
|
.llvm_exit_block = undefined,
|
|
};
|
|
self.next_debug_id += 1;
|
|
return basic_block;
|
|
}
|
|
|
|
pub fn setCursorAtEndAndAppendBlock(self: *Builder, basic_block: *BasicBlock) !void {
|
|
try self.code.basic_block_list.append(basic_block);
|
|
self.setCursorAtEnd(basic_block);
|
|
}
|
|
|
|
pub fn setCursorAtEnd(self: *Builder, basic_block: *BasicBlock) void {
|
|
self.current_basic_block = basic_block;
|
|
}
|
|
|
|
pub async fn genNode(irb: *Builder, node: *ast.Node, scope: *Scope, lval: LVal) Error!*Inst {
|
|
switch (node.id) {
|
|
ast.Node.Id.Root => unreachable,
|
|
ast.Node.Id.Use => unreachable,
|
|
ast.Node.Id.TestDecl => unreachable,
|
|
ast.Node.Id.VarDecl => return error.Unimplemented,
|
|
ast.Node.Id.Defer => return error.Unimplemented,
|
|
ast.Node.Id.InfixOp => return error.Unimplemented,
|
|
ast.Node.Id.PrefixOp => {
|
|
const prefix_op = @fieldParentPtr(ast.Node.PrefixOp, "base", node);
|
|
switch (prefix_op.op) {
|
|
ast.Node.PrefixOp.Op.AddressOf => return error.Unimplemented,
|
|
ast.Node.PrefixOp.Op.ArrayType => |n| return error.Unimplemented,
|
|
ast.Node.PrefixOp.Op.Await => return error.Unimplemented,
|
|
ast.Node.PrefixOp.Op.BitNot => return error.Unimplemented,
|
|
ast.Node.PrefixOp.Op.BoolNot => return error.Unimplemented,
|
|
ast.Node.PrefixOp.Op.Cancel => return error.Unimplemented,
|
|
ast.Node.PrefixOp.Op.OptionalType => return error.Unimplemented,
|
|
ast.Node.PrefixOp.Op.Negation => return error.Unimplemented,
|
|
ast.Node.PrefixOp.Op.NegationWrap => return error.Unimplemented,
|
|
ast.Node.PrefixOp.Op.Resume => return error.Unimplemented,
|
|
ast.Node.PrefixOp.Op.PtrType => |ptr_info| {
|
|
const inst = try await (async irb.genPtrType(prefix_op, ptr_info, scope) catch unreachable);
|
|
return irb.lvalWrap(scope, inst, lval);
|
|
},
|
|
ast.Node.PrefixOp.Op.SliceType => |ptr_info| return error.Unimplemented,
|
|
ast.Node.PrefixOp.Op.Try => return error.Unimplemented,
|
|
}
|
|
},
|
|
ast.Node.Id.SuffixOp => {
|
|
const suffix_op = @fieldParentPtr(ast.Node.SuffixOp, "base", node);
|
|
switch (suffix_op.op) {
|
|
@TagType(ast.Node.SuffixOp.Op).Call => |*call| {
|
|
const inst = try await (async irb.genCall(suffix_op, call, scope) catch unreachable);
|
|
return irb.lvalWrap(scope, inst, lval);
|
|
},
|
|
@TagType(ast.Node.SuffixOp.Op).ArrayAccess => |n| return error.Unimplemented,
|
|
@TagType(ast.Node.SuffixOp.Op).Slice => |slice| return error.Unimplemented,
|
|
@TagType(ast.Node.SuffixOp.Op).ArrayInitializer => |init_list| return error.Unimplemented,
|
|
@TagType(ast.Node.SuffixOp.Op).StructInitializer => |init_list| return error.Unimplemented,
|
|
@TagType(ast.Node.SuffixOp.Op).Deref => return error.Unimplemented,
|
|
@TagType(ast.Node.SuffixOp.Op).UnwrapOptional => return error.Unimplemented,
|
|
}
|
|
},
|
|
ast.Node.Id.Switch => return error.Unimplemented,
|
|
ast.Node.Id.While => return error.Unimplemented,
|
|
ast.Node.Id.For => return error.Unimplemented,
|
|
ast.Node.Id.If => return error.Unimplemented,
|
|
ast.Node.Id.ControlFlowExpression => {
|
|
const control_flow_expr = @fieldParentPtr(ast.Node.ControlFlowExpression, "base", node);
|
|
return await (async irb.genControlFlowExpr(control_flow_expr, scope, lval) catch unreachable);
|
|
},
|
|
ast.Node.Id.Suspend => return error.Unimplemented,
|
|
ast.Node.Id.VarType => return error.Unimplemented,
|
|
ast.Node.Id.ErrorType => return error.Unimplemented,
|
|
ast.Node.Id.FnProto => return error.Unimplemented,
|
|
ast.Node.Id.PromiseType => return error.Unimplemented,
|
|
ast.Node.Id.IntegerLiteral => {
|
|
const int_lit = @fieldParentPtr(ast.Node.IntegerLiteral, "base", node);
|
|
return irb.lvalWrap(scope, try irb.genIntLit(int_lit, scope), lval);
|
|
},
|
|
ast.Node.Id.FloatLiteral => return error.Unimplemented,
|
|
ast.Node.Id.StringLiteral => {
|
|
const str_lit = @fieldParentPtr(ast.Node.StringLiteral, "base", node);
|
|
const inst = try await (async irb.genStrLit(str_lit, scope) catch unreachable);
|
|
return irb.lvalWrap(scope, inst, lval);
|
|
},
|
|
ast.Node.Id.MultilineStringLiteral => return error.Unimplemented,
|
|
ast.Node.Id.CharLiteral => return error.Unimplemented,
|
|
ast.Node.Id.BoolLiteral => return error.Unimplemented,
|
|
ast.Node.Id.NullLiteral => return error.Unimplemented,
|
|
ast.Node.Id.UndefinedLiteral => return error.Unimplemented,
|
|
ast.Node.Id.Unreachable => return error.Unimplemented,
|
|
ast.Node.Id.Identifier => {
|
|
const identifier = @fieldParentPtr(ast.Node.Identifier, "base", node);
|
|
return await (async irb.genIdentifier(identifier, scope, lval) catch unreachable);
|
|
},
|
|
ast.Node.Id.GroupedExpression => {
|
|
const grouped_expr = @fieldParentPtr(ast.Node.GroupedExpression, "base", node);
|
|
return await (async irb.genNode(grouped_expr.expr, scope, lval) catch unreachable);
|
|
},
|
|
ast.Node.Id.BuiltinCall => return error.Unimplemented,
|
|
ast.Node.Id.ErrorSetDecl => return error.Unimplemented,
|
|
ast.Node.Id.ContainerDecl => return error.Unimplemented,
|
|
ast.Node.Id.Asm => return error.Unimplemented,
|
|
ast.Node.Id.Comptime => return error.Unimplemented,
|
|
ast.Node.Id.Block => {
|
|
const block = @fieldParentPtr(ast.Node.Block, "base", node);
|
|
const inst = try await (async irb.genBlock(block, scope) catch unreachable);
|
|
return irb.lvalWrap(scope, inst, lval);
|
|
},
|
|
ast.Node.Id.DocComment => return error.Unimplemented,
|
|
ast.Node.Id.SwitchCase => return error.Unimplemented,
|
|
ast.Node.Id.SwitchElse => return error.Unimplemented,
|
|
ast.Node.Id.Else => return error.Unimplemented,
|
|
ast.Node.Id.Payload => return error.Unimplemented,
|
|
ast.Node.Id.PointerPayload => return error.Unimplemented,
|
|
ast.Node.Id.PointerIndexPayload => return error.Unimplemented,
|
|
ast.Node.Id.ContainerField => return error.Unimplemented,
|
|
ast.Node.Id.ErrorTag => return error.Unimplemented,
|
|
ast.Node.Id.AsmInput => return error.Unimplemented,
|
|
ast.Node.Id.AsmOutput => return error.Unimplemented,
|
|
ast.Node.Id.ParamDecl => return error.Unimplemented,
|
|
ast.Node.Id.FieldInitializer => return error.Unimplemented,
|
|
ast.Node.Id.EnumLiteral => return error.Unimplemented,
|
|
}
|
|
}
|
|
|
|
async fn genCall(irb: *Builder, suffix_op: *ast.Node.SuffixOp, call: *ast.Node.SuffixOp.Op.Call, scope: *Scope) !*Inst {
|
|
const fn_ref = try await (async irb.genNode(suffix_op.lhs, scope, LVal.None) catch unreachable);
|
|
|
|
const args = try irb.arena().alloc(*Inst, call.params.len);
|
|
var it = call.params.iterator(0);
|
|
var i: usize = 0;
|
|
while (it.next()) |arg_node_ptr| : (i += 1) {
|
|
args[i] = try await (async irb.genNode(arg_node_ptr.*, scope, LVal.None) catch unreachable);
|
|
}
|
|
|
|
//bool is_async = node->data.fn_call_expr.is_async;
|
|
//IrInstruction *async_allocator = nullptr;
|
|
//if (is_async) {
|
|
// if (node->data.fn_call_expr.async_allocator) {
|
|
// async_allocator = ir_gen_node(irb, node->data.fn_call_expr.async_allocator, scope);
|
|
// if (async_allocator == irb->codegen->invalid_instruction)
|
|
// return async_allocator;
|
|
// }
|
|
//}
|
|
|
|
return irb.build(Inst.Call, scope, Span.token(suffix_op.rtoken), Inst.Call.Params{
|
|
.fn_ref = fn_ref,
|
|
.args = args,
|
|
});
|
|
//IrInstruction *fn_call = ir_build_call(irb, scope, node, nullptr, fn_ref, arg_count, args, false, FnInlineAuto, is_async, async_allocator, nullptr);
|
|
//return ir_lval_wrap(irb, scope, fn_call, lval);
|
|
}
|
|
|
|
async fn genPtrType(
|
|
irb: *Builder,
|
|
prefix_op: *ast.Node.PrefixOp,
|
|
ptr_info: ast.Node.PrefixOp.PtrInfo,
|
|
scope: *Scope,
|
|
) !*Inst {
|
|
// TODO port more logic
|
|
|
|
//assert(node->type == NodeTypePointerType);
|
|
//PtrLen ptr_len = (node->data.pointer_type.star_token->id == TokenIdStar ||
|
|
// node->data.pointer_type.star_token->id == TokenIdStarStar) ? PtrLenSingle : PtrLenUnknown;
|
|
//bool is_const = node->data.pointer_type.is_const;
|
|
//bool is_volatile = node->data.pointer_type.is_volatile;
|
|
//AstNode *expr_node = node->data.pointer_type.op_expr;
|
|
//AstNode *align_expr = node->data.pointer_type.align_expr;
|
|
|
|
//IrInstruction *align_value;
|
|
//if (align_expr != nullptr) {
|
|
// align_value = ir_gen_node(irb, align_expr, scope);
|
|
// if (align_value == irb->codegen->invalid_instruction)
|
|
// return align_value;
|
|
//} else {
|
|
// align_value = nullptr;
|
|
//}
|
|
const child_type = try await (async irb.genNode(prefix_op.rhs, scope, LVal.None) catch unreachable);
|
|
|
|
//uint32_t bit_offset_start = 0;
|
|
//if (node->data.pointer_type.bit_offset_start != nullptr) {
|
|
// if (!bigint_fits_in_bits(node->data.pointer_type.bit_offset_start, 32, false)) {
|
|
// Buf *val_buf = buf_alloc();
|
|
// bigint_append_buf(val_buf, node->data.pointer_type.bit_offset_start, 10);
|
|
// exec_add_error_node(irb->codegen, irb->exec, node,
|
|
// buf_sprintf("value %s too large for u32 bit offset", buf_ptr(val_buf)));
|
|
// return irb->codegen->invalid_instruction;
|
|
// }
|
|
// bit_offset_start = bigint_as_unsigned(node->data.pointer_type.bit_offset_start);
|
|
//}
|
|
|
|
//uint32_t bit_offset_end = 0;
|
|
//if (node->data.pointer_type.bit_offset_end != nullptr) {
|
|
// if (!bigint_fits_in_bits(node->data.pointer_type.bit_offset_end, 32, false)) {
|
|
// Buf *val_buf = buf_alloc();
|
|
// bigint_append_buf(val_buf, node->data.pointer_type.bit_offset_end, 10);
|
|
// exec_add_error_node(irb->codegen, irb->exec, node,
|
|
// buf_sprintf("value %s too large for u32 bit offset", buf_ptr(val_buf)));
|
|
// return irb->codegen->invalid_instruction;
|
|
// }
|
|
// bit_offset_end = bigint_as_unsigned(node->data.pointer_type.bit_offset_end);
|
|
//}
|
|
|
|
//if ((bit_offset_start != 0 || bit_offset_end != 0) && bit_offset_start >= bit_offset_end) {
|
|
// exec_add_error_node(irb->codegen, irb->exec, node,
|
|
// buf_sprintf("bit offset start must be less than bit offset end"));
|
|
// return irb->codegen->invalid_instruction;
|
|
//}
|
|
|
|
return irb.build(Inst.PtrType, scope, Span.node(&prefix_op.base), Inst.PtrType.Params{
|
|
.child_type = child_type,
|
|
.mut = Type.Pointer.Mut.Mut,
|
|
.vol = Type.Pointer.Vol.Non,
|
|
.size = Type.Pointer.Size.Many,
|
|
.alignment = null,
|
|
});
|
|
}
|
|
|
|
fn isCompTime(irb: *Builder, target_scope: *Scope) bool {
|
|
if (irb.is_comptime)
|
|
return true;
|
|
|
|
var scope = target_scope;
|
|
while (true) {
|
|
switch (scope.id) {
|
|
Scope.Id.CompTime => return true,
|
|
Scope.Id.FnDef => return false,
|
|
Scope.Id.Decls => unreachable,
|
|
Scope.Id.Root => unreachable,
|
|
Scope.Id.AstTree => unreachable,
|
|
Scope.Id.Block,
|
|
Scope.Id.Defer,
|
|
Scope.Id.DeferExpr,
|
|
Scope.Id.Var,
|
|
=> scope = scope.parent.?,
|
|
}
|
|
}
|
|
}
|
|
|
|
pub fn genIntLit(irb: *Builder, int_lit: *ast.Node.IntegerLiteral, scope: *Scope) !*Inst {
|
|
const int_token = irb.code.tree_scope.tree.tokenSlice(int_lit.token);
|
|
|
|
var base: u8 = undefined;
|
|
var rest: []const u8 = undefined;
|
|
if (int_token.len >= 3 and int_token[0] == '0') {
|
|
base = switch (int_token[1]) {
|
|
'b' => u8(2),
|
|
'o' => u8(8),
|
|
'x' => u8(16),
|
|
else => unreachable,
|
|
};
|
|
rest = int_token[2..];
|
|
} else {
|
|
base = 10;
|
|
rest = int_token;
|
|
}
|
|
|
|
const comptime_int_type = Type.ComptimeInt.get(irb.comp);
|
|
defer comptime_int_type.base.base.deref(irb.comp);
|
|
|
|
const int_val = Value.Int.createFromString(
|
|
irb.comp,
|
|
&comptime_int_type.base,
|
|
base,
|
|
rest,
|
|
) catch |err| switch (err) {
|
|
error.OutOfMemory => return error.OutOfMemory,
|
|
error.InvalidBase => unreachable,
|
|
error.InvalidCharForDigit => unreachable,
|
|
error.DigitTooLargeForBase => unreachable,
|
|
};
|
|
errdefer int_val.base.deref(irb.comp);
|
|
|
|
const inst = try irb.build(Inst.Const, scope, Span.token(int_lit.token), Inst.Const.Params{});
|
|
inst.val = IrVal{ .KnownValue = &int_val.base };
|
|
return inst;
|
|
}
|
|
|
|
pub async fn genStrLit(irb: *Builder, str_lit: *ast.Node.StringLiteral, scope: *Scope) !*Inst {
|
|
const str_token = irb.code.tree_scope.tree.tokenSlice(str_lit.token);
|
|
const src_span = Span.token(str_lit.token);
|
|
|
|
var bad_index: usize = undefined;
|
|
var buf = std.zig.parseStringLiteral(irb.comp.gpa(), str_token, &bad_index) catch |err| switch (err) {
|
|
error.OutOfMemory => return error.OutOfMemory,
|
|
error.InvalidCharacter => {
|
|
try irb.comp.addCompileError(
|
|
irb.code.tree_scope,
|
|
src_span,
|
|
"invalid character in string literal: '{c}'",
|
|
str_token[bad_index],
|
|
);
|
|
return error.SemanticAnalysisFailed;
|
|
},
|
|
};
|
|
var buf_cleaned = false;
|
|
errdefer if (!buf_cleaned) irb.comp.gpa().free(buf);
|
|
|
|
if (str_token[0] == 'c') {
|
|
// first we add a null
|
|
buf = try irb.comp.gpa().realloc(buf, buf.len + 1);
|
|
buf[buf.len - 1] = 0;
|
|
|
|
// next make an array value
|
|
const array_val = try await (async Value.Array.createOwnedBuffer(irb.comp, buf) catch unreachable);
|
|
buf_cleaned = true;
|
|
defer array_val.base.deref(irb.comp);
|
|
|
|
// then make a pointer value pointing at the first element
|
|
const ptr_val = try await (async Value.Ptr.createArrayElemPtr(
|
|
irb.comp,
|
|
array_val,
|
|
Type.Pointer.Mut.Const,
|
|
Type.Pointer.Size.Many,
|
|
0,
|
|
) catch unreachable);
|
|
defer ptr_val.base.deref(irb.comp);
|
|
|
|
return irb.buildConstValue(scope, src_span, &ptr_val.base);
|
|
} else {
|
|
const array_val = try await (async Value.Array.createOwnedBuffer(irb.comp, buf) catch unreachable);
|
|
buf_cleaned = true;
|
|
defer array_val.base.deref(irb.comp);
|
|
|
|
return irb.buildConstValue(scope, src_span, &array_val.base);
|
|
}
|
|
}
|
|
|
|
pub async fn genBlock(irb: *Builder, block: *ast.Node.Block, parent_scope: *Scope) !*Inst {
|
|
const block_scope = try Scope.Block.create(irb.comp, parent_scope);
|
|
|
|
const outer_block_scope = &block_scope.base;
|
|
var child_scope = outer_block_scope;
|
|
|
|
if (parent_scope.findFnDef()) |fndef_scope| {
|
|
if (fndef_scope.fn_val.?.block_scope == null) {
|
|
fndef_scope.fn_val.?.block_scope = block_scope;
|
|
}
|
|
}
|
|
|
|
if (block.statements.len == 0) {
|
|
// {}
|
|
return irb.buildConstVoid(child_scope, Span.token(block.lbrace), false);
|
|
}
|
|
|
|
if (block.label) |label| {
|
|
block_scope.incoming_values = std.ArrayList(*Inst).init(irb.arena());
|
|
block_scope.incoming_blocks = std.ArrayList(*BasicBlock).init(irb.arena());
|
|
block_scope.end_block = try irb.createBasicBlock(parent_scope, "BlockEnd");
|
|
block_scope.is_comptime = try irb.buildConstBool(
|
|
parent_scope,
|
|
Span.token(block.lbrace),
|
|
irb.isCompTime(parent_scope),
|
|
);
|
|
}
|
|
|
|
var is_continuation_unreachable = false;
|
|
var noreturn_return_value: ?*Inst = null;
|
|
|
|
var stmt_it = block.statements.iterator(0);
|
|
while (stmt_it.next()) |statement_node_ptr| {
|
|
const statement_node = statement_node_ptr.*;
|
|
|
|
if (statement_node.cast(ast.Node.Defer)) |defer_node| {
|
|
// defer starts a new scope
|
|
const defer_token = irb.code.tree_scope.tree.tokens.at(defer_node.defer_token);
|
|
const kind = switch (defer_token.id) {
|
|
Token.Id.Keyword_defer => Scope.Defer.Kind.ScopeExit,
|
|
Token.Id.Keyword_errdefer => Scope.Defer.Kind.ErrorExit,
|
|
else => unreachable,
|
|
};
|
|
const defer_expr_scope = try Scope.DeferExpr.create(irb.comp, parent_scope, defer_node.expr);
|
|
const defer_child_scope = try Scope.Defer.create(irb.comp, parent_scope, kind, defer_expr_scope);
|
|
child_scope = &defer_child_scope.base;
|
|
continue;
|
|
}
|
|
const statement_value = try await (async irb.genNode(statement_node, child_scope, LVal.None) catch unreachable);
|
|
|
|
is_continuation_unreachable = statement_value.isNoReturn();
|
|
if (is_continuation_unreachable) {
|
|
// keep the last noreturn statement value around in case we need to return it
|
|
noreturn_return_value = statement_value;
|
|
}
|
|
|
|
if (statement_value.cast(Inst.DeclVar)) |decl_var| {
|
|
// variable declarations start a new scope
|
|
child_scope = decl_var.params.variable.child_scope;
|
|
} else if (!is_continuation_unreachable) {
|
|
// this statement's value must be void
|
|
_ = try irb.build(
|
|
Inst.CheckVoidStmt,
|
|
child_scope,
|
|
Span{
|
|
.first = statement_node.firstToken(),
|
|
.last = statement_node.lastToken(),
|
|
},
|
|
Inst.CheckVoidStmt.Params{ .target = statement_value },
|
|
);
|
|
}
|
|
}
|
|
|
|
if (is_continuation_unreachable) {
|
|
assert(noreturn_return_value != null);
|
|
if (block.label == null or block_scope.incoming_blocks.len == 0) {
|
|
return noreturn_return_value.?;
|
|
}
|
|
|
|
try irb.setCursorAtEndAndAppendBlock(block_scope.end_block);
|
|
return irb.build(Inst.Phi, parent_scope, Span.token(block.rbrace), Inst.Phi.Params{
|
|
.incoming_blocks = block_scope.incoming_blocks.toOwnedSlice(),
|
|
.incoming_values = block_scope.incoming_values.toOwnedSlice(),
|
|
});
|
|
}
|
|
|
|
if (block.label) |label| {
|
|
try block_scope.incoming_blocks.append(irb.current_basic_block);
|
|
try block_scope.incoming_values.append(
|
|
try irb.buildConstVoid(parent_scope, Span.token(block.rbrace), true),
|
|
);
|
|
_ = try await (async irb.genDefersForBlock(child_scope, outer_block_scope, Scope.Defer.Kind.ScopeExit) catch unreachable);
|
|
|
|
_ = try irb.buildGen(Inst.Br, parent_scope, Span.token(block.rbrace), Inst.Br.Params{
|
|
.dest_block = block_scope.end_block,
|
|
.is_comptime = block_scope.is_comptime,
|
|
});
|
|
|
|
try irb.setCursorAtEndAndAppendBlock(block_scope.end_block);
|
|
|
|
return irb.build(Inst.Phi, parent_scope, Span.token(block.rbrace), Inst.Phi.Params{
|
|
.incoming_blocks = block_scope.incoming_blocks.toOwnedSlice(),
|
|
.incoming_values = block_scope.incoming_values.toOwnedSlice(),
|
|
});
|
|
}
|
|
|
|
_ = try await (async irb.genDefersForBlock(child_scope, outer_block_scope, Scope.Defer.Kind.ScopeExit) catch unreachable);
|
|
return irb.buildConstVoid(child_scope, Span.token(block.rbrace), true);
|
|
}
|
|
|
|
pub async fn genControlFlowExpr(
|
|
irb: *Builder,
|
|
control_flow_expr: *ast.Node.ControlFlowExpression,
|
|
scope: *Scope,
|
|
lval: LVal,
|
|
) !*Inst {
|
|
switch (control_flow_expr.kind) {
|
|
ast.Node.ControlFlowExpression.Kind.Break => |arg| return error.Unimplemented,
|
|
ast.Node.ControlFlowExpression.Kind.Continue => |arg| return error.Unimplemented,
|
|
ast.Node.ControlFlowExpression.Kind.Return => {
|
|
const src_span = Span.token(control_flow_expr.ltoken);
|
|
if (scope.findFnDef() == null) {
|
|
try irb.comp.addCompileError(
|
|
irb.code.tree_scope,
|
|
src_span,
|
|
"return expression outside function definition",
|
|
);
|
|
return error.SemanticAnalysisFailed;
|
|
}
|
|
|
|
if (scope.findDeferExpr()) |scope_defer_expr| {
|
|
if (!scope_defer_expr.reported_err) {
|
|
try irb.comp.addCompileError(
|
|
irb.code.tree_scope,
|
|
src_span,
|
|
"cannot return from defer expression",
|
|
);
|
|
scope_defer_expr.reported_err = true;
|
|
}
|
|
return error.SemanticAnalysisFailed;
|
|
}
|
|
|
|
const outer_scope = irb.begin_scope.?;
|
|
const return_value = if (control_flow_expr.rhs) |rhs| blk: {
|
|
break :blk try await (async irb.genNode(rhs, scope, LVal.None) catch unreachable);
|
|
} else blk: {
|
|
break :blk try irb.buildConstVoid(scope, src_span, true);
|
|
};
|
|
|
|
const defer_counts = irb.countDefers(scope, outer_scope);
|
|
const have_err_defers = defer_counts.error_exit != 0;
|
|
if (have_err_defers or irb.comp.have_err_ret_tracing) {
|
|
const err_block = try irb.createBasicBlock(scope, "ErrRetErr");
|
|
const ok_block = try irb.createBasicBlock(scope, "ErrRetOk");
|
|
if (!have_err_defers) {
|
|
_ = try await (async irb.genDefersForBlock(scope, outer_scope, Scope.Defer.Kind.ScopeExit) catch unreachable);
|
|
}
|
|
|
|
const is_err = try irb.build(
|
|
Inst.TestErr,
|
|
scope,
|
|
src_span,
|
|
Inst.TestErr.Params{ .target = return_value },
|
|
);
|
|
|
|
const err_is_comptime = try irb.buildTestCompTime(scope, src_span, is_err);
|
|
|
|
_ = try irb.buildGen(Inst.CondBr, scope, src_span, Inst.CondBr.Params{
|
|
.condition = is_err,
|
|
.then_block = err_block,
|
|
.else_block = ok_block,
|
|
.is_comptime = err_is_comptime,
|
|
});
|
|
|
|
const ret_stmt_block = try irb.createBasicBlock(scope, "RetStmt");
|
|
|
|
try irb.setCursorAtEndAndAppendBlock(err_block);
|
|
if (have_err_defers) {
|
|
_ = try await (async irb.genDefersForBlock(scope, outer_scope, Scope.Defer.Kind.ErrorExit) catch unreachable);
|
|
}
|
|
if (irb.comp.have_err_ret_tracing and !irb.isCompTime(scope)) {
|
|
_ = try irb.build(Inst.SaveErrRetAddr, scope, src_span, Inst.SaveErrRetAddr.Params{});
|
|
}
|
|
_ = try irb.build(Inst.Br, scope, src_span, Inst.Br.Params{
|
|
.dest_block = ret_stmt_block,
|
|
.is_comptime = err_is_comptime,
|
|
});
|
|
|
|
try irb.setCursorAtEndAndAppendBlock(ok_block);
|
|
if (have_err_defers) {
|
|
_ = try await (async irb.genDefersForBlock(scope, outer_scope, Scope.Defer.Kind.ScopeExit) catch unreachable);
|
|
}
|
|
_ = try irb.build(Inst.Br, scope, src_span, Inst.Br.Params{
|
|
.dest_block = ret_stmt_block,
|
|
.is_comptime = err_is_comptime,
|
|
});
|
|
|
|
try irb.setCursorAtEndAndAppendBlock(ret_stmt_block);
|
|
return irb.genAsyncReturn(scope, src_span, return_value, false);
|
|
} else {
|
|
_ = try await (async irb.genDefersForBlock(scope, outer_scope, Scope.Defer.Kind.ScopeExit) catch unreachable);
|
|
return irb.genAsyncReturn(scope, src_span, return_value, false);
|
|
}
|
|
},
|
|
}
|
|
}
|
|
|
|
pub async fn genIdentifier(irb: *Builder, identifier: *ast.Node.Identifier, scope: *Scope, lval: LVal) !*Inst {
|
|
const src_span = Span.token(identifier.token);
|
|
const name = irb.code.tree_scope.tree.tokenSlice(identifier.token);
|
|
|
|
//if (buf_eql_str(variable_name, "_") && lval == LValPtr) {
|
|
// IrInstructionConst *const_instruction = ir_build_instruction<IrInstructionConst>(irb, scope, node);
|
|
// const_instruction->base.value.type = get_pointer_to_type(irb->codegen,
|
|
// irb->codegen->builtin_types.entry_void, false);
|
|
// const_instruction->base.value.special = ConstValSpecialStatic;
|
|
// const_instruction->base.value.data.x_ptr.special = ConstPtrSpecialDiscard;
|
|
// return &const_instruction->base;
|
|
//}
|
|
|
|
if (await (async irb.comp.getPrimitiveType(name) catch unreachable)) |result| {
|
|
if (result) |primitive_type| {
|
|
defer primitive_type.base.deref(irb.comp);
|
|
switch (lval) {
|
|
// if (lval == LValPtr) {
|
|
// return ir_build_ref(irb, scope, node, value, false, false);
|
|
LVal.Ptr => return error.Unimplemented,
|
|
LVal.None => return irb.buildConstValue(scope, src_span, &primitive_type.base),
|
|
}
|
|
}
|
|
} else |err| switch (err) {
|
|
error.Overflow => {
|
|
try irb.comp.addCompileError(irb.code.tree_scope, src_span, "integer too large");
|
|
return error.SemanticAnalysisFailed;
|
|
},
|
|
error.OutOfMemory => return error.OutOfMemory,
|
|
}
|
|
|
|
switch (await (async irb.findIdent(scope, name) catch unreachable)) {
|
|
Ident.Decl => |decl| {
|
|
return irb.build(Inst.DeclRef, scope, src_span, Inst.DeclRef.Params{
|
|
.decl = decl,
|
|
.lval = lval,
|
|
});
|
|
},
|
|
Ident.VarScope => |var_scope| {
|
|
const var_ptr = try irb.build(Inst.VarPtr, scope, src_span, Inst.VarPtr.Params{ .var_scope = var_scope });
|
|
switch (lval) {
|
|
LVal.Ptr => return var_ptr,
|
|
LVal.None => {
|
|
return irb.build(Inst.LoadPtr, scope, src_span, Inst.LoadPtr.Params{ .target = var_ptr });
|
|
},
|
|
}
|
|
},
|
|
Ident.NotFound => {},
|
|
}
|
|
|
|
//if (node->owner->any_imports_failed) {
|
|
// // skip the error message since we had a failing import in this file
|
|
// // if an import breaks we don't need redundant undeclared identifier errors
|
|
// return irb->codegen->invalid_instruction;
|
|
//}
|
|
|
|
// TODO put a variable of same name with invalid type in global scope
|
|
// so that future references to this same name will find a variable with an invalid type
|
|
|
|
try irb.comp.addCompileError(irb.code.tree_scope, src_span, "unknown identifier '{}'", name);
|
|
return error.SemanticAnalysisFailed;
|
|
}
|
|
|
|
const DeferCounts = struct {
|
|
scope_exit: usize,
|
|
error_exit: usize,
|
|
};
|
|
|
|
fn countDefers(irb: *Builder, inner_scope: *Scope, outer_scope: *Scope) DeferCounts {
|
|
var result = DeferCounts{ .scope_exit = 0, .error_exit = 0 };
|
|
|
|
var scope = inner_scope;
|
|
while (scope != outer_scope) {
|
|
switch (scope.id) {
|
|
Scope.Id.Defer => {
|
|
const defer_scope = @fieldParentPtr(Scope.Defer, "base", scope);
|
|
switch (defer_scope.kind) {
|
|
Scope.Defer.Kind.ScopeExit => result.scope_exit += 1,
|
|
Scope.Defer.Kind.ErrorExit => result.error_exit += 1,
|
|
}
|
|
scope = scope.parent orelse break;
|
|
},
|
|
Scope.Id.FnDef => break,
|
|
|
|
Scope.Id.CompTime,
|
|
Scope.Id.Block,
|
|
Scope.Id.Decls,
|
|
Scope.Id.Root,
|
|
Scope.Id.Var,
|
|
=> scope = scope.parent orelse break,
|
|
|
|
Scope.Id.DeferExpr => unreachable,
|
|
Scope.Id.AstTree => unreachable,
|
|
}
|
|
}
|
|
return result;
|
|
}
|
|
|
|
async fn genDefersForBlock(
|
|
irb: *Builder,
|
|
inner_scope: *Scope,
|
|
outer_scope: *Scope,
|
|
gen_kind: Scope.Defer.Kind,
|
|
) !bool {
|
|
var scope = inner_scope;
|
|
var is_noreturn = false;
|
|
while (true) {
|
|
switch (scope.id) {
|
|
Scope.Id.Defer => {
|
|
const defer_scope = @fieldParentPtr(Scope.Defer, "base", scope);
|
|
const generate = switch (defer_scope.kind) {
|
|
Scope.Defer.Kind.ScopeExit => true,
|
|
Scope.Defer.Kind.ErrorExit => gen_kind == Scope.Defer.Kind.ErrorExit,
|
|
};
|
|
if (generate) {
|
|
const defer_expr_scope = defer_scope.defer_expr_scope;
|
|
const instruction = try await (async irb.genNode(
|
|
defer_expr_scope.expr_node,
|
|
&defer_expr_scope.base,
|
|
LVal.None,
|
|
) catch unreachable);
|
|
if (instruction.isNoReturn()) {
|
|
is_noreturn = true;
|
|
} else {
|
|
_ = try irb.build(
|
|
Inst.CheckVoidStmt,
|
|
&defer_expr_scope.base,
|
|
Span.token(defer_expr_scope.expr_node.lastToken()),
|
|
Inst.CheckVoidStmt.Params{ .target = instruction },
|
|
);
|
|
}
|
|
}
|
|
},
|
|
Scope.Id.FnDef,
|
|
Scope.Id.Decls,
|
|
Scope.Id.Root,
|
|
=> return is_noreturn,
|
|
|
|
Scope.Id.CompTime,
|
|
Scope.Id.Block,
|
|
Scope.Id.Var,
|
|
=> scope = scope.parent orelse return is_noreturn,
|
|
|
|
Scope.Id.DeferExpr => unreachable,
|
|
Scope.Id.AstTree => unreachable,
|
|
}
|
|
}
|
|
}
|
|
|
|
pub fn lvalWrap(irb: *Builder, scope: *Scope, instruction: *Inst, lval: LVal) !*Inst {
|
|
switch (lval) {
|
|
LVal.None => return instruction,
|
|
LVal.Ptr => {
|
|
// We needed a pointer to a value, but we got a value. So we create
|
|
// an instruction which just makes a const pointer of it.
|
|
return irb.build(Inst.Ref, scope, instruction.span, Inst.Ref.Params{
|
|
.target = instruction,
|
|
.mut = Type.Pointer.Mut.Const,
|
|
.volatility = Type.Pointer.Vol.Non,
|
|
});
|
|
},
|
|
}
|
|
}
|
|
|
|
fn arena(self: *Builder) *Allocator {
|
|
return &self.code.arena.allocator;
|
|
}
|
|
|
|
fn buildExtra(
|
|
self: *Builder,
|
|
comptime I: type,
|
|
scope: *Scope,
|
|
span: Span,
|
|
params: I.Params,
|
|
is_generated: bool,
|
|
) !*Inst {
|
|
const inst = try self.arena().create(I);
|
|
inst.* = I{
|
|
.base = Inst{
|
|
.id = Inst.typeToId(I),
|
|
.is_generated = is_generated,
|
|
.scope = scope,
|
|
.debug_id = self.next_debug_id,
|
|
.val = switch (I.ir_val_init) {
|
|
IrVal.Init.Unknown => IrVal.Unknown,
|
|
IrVal.Init.NoReturn => IrVal{ .KnownValue = &Value.NoReturn.get(self.comp).base },
|
|
IrVal.Init.Void => IrVal{ .KnownValue = &Value.Void.get(self.comp).base },
|
|
},
|
|
.ref_count = 0,
|
|
.span = span,
|
|
.child = null,
|
|
.parent = null,
|
|
.llvm_value = undefined,
|
|
.owner_bb = self.current_basic_block,
|
|
},
|
|
.params = params,
|
|
};
|
|
|
|
// Look at the params and ref() other instructions
|
|
comptime var i = 0;
|
|
inline while (i < @memberCount(I.Params)) : (i += 1) {
|
|
const FieldType = comptime @typeOf(@field(I.Params(undefined), @memberName(I.Params, i)));
|
|
switch (FieldType) {
|
|
*Inst => @field(inst.params, @memberName(I.Params, i)).ref(self),
|
|
*BasicBlock => @field(inst.params, @memberName(I.Params, i)).ref(self),
|
|
?*Inst => if (@field(inst.params, @memberName(I.Params, i))) |other| other.ref(self),
|
|
[]*Inst => {
|
|
// TODO https://github.com/ziglang/zig/issues/1269
|
|
for (@field(inst.params, @memberName(I.Params, i))) |other|
|
|
other.ref(self);
|
|
},
|
|
[]*BasicBlock => {
|
|
// TODO https://github.com/ziglang/zig/issues/1269
|
|
for (@field(inst.params, @memberName(I.Params, i))) |other|
|
|
other.ref(self);
|
|
},
|
|
Type.Pointer.Mut,
|
|
Type.Pointer.Vol,
|
|
Type.Pointer.Size,
|
|
LVal,
|
|
*Decl,
|
|
*Scope.Var,
|
|
=> {},
|
|
// it's ok to add more types here, just make sure that
|
|
// any instructions and basic blocks are ref'd appropriately
|
|
else => @compileError("unrecognized type in Params: " ++ @typeName(FieldType)),
|
|
}
|
|
}
|
|
|
|
self.next_debug_id += 1;
|
|
try self.current_basic_block.instruction_list.append(&inst.base);
|
|
return &inst.base;
|
|
}
|
|
|
|
fn build(
|
|
self: *Builder,
|
|
comptime I: type,
|
|
scope: *Scope,
|
|
span: Span,
|
|
params: I.Params,
|
|
) !*Inst {
|
|
return self.buildExtra(I, scope, span, params, false);
|
|
}
|
|
|
|
fn buildGen(
|
|
self: *Builder,
|
|
comptime I: type,
|
|
scope: *Scope,
|
|
span: Span,
|
|
params: I.Params,
|
|
) !*Inst {
|
|
return self.buildExtra(I, scope, span, params, true);
|
|
}
|
|
|
|
fn buildConstBool(self: *Builder, scope: *Scope, span: Span, x: bool) !*Inst {
|
|
const inst = try self.build(Inst.Const, scope, span, Inst.Const.Params{});
|
|
inst.val = IrVal{ .KnownValue = &Value.Bool.get(self.comp, x).base };
|
|
return inst;
|
|
}
|
|
|
|
fn buildConstVoid(self: *Builder, scope: *Scope, span: Span, is_generated: bool) !*Inst {
|
|
const inst = try self.buildExtra(Inst.Const, scope, span, Inst.Const.Params{}, is_generated);
|
|
inst.val = IrVal{ .KnownValue = &Value.Void.get(self.comp).base };
|
|
return inst;
|
|
}
|
|
|
|
fn buildConstValue(self: *Builder, scope: *Scope, span: Span, v: *Value) !*Inst {
|
|
const inst = try self.build(Inst.Const, scope, span, Inst.Const.Params{});
|
|
inst.val = IrVal{ .KnownValue = v.getRef() };
|
|
return inst;
|
|
}
|
|
|
|
/// If the code is explicitly set to be comptime, then builds a const bool,
|
|
/// otherwise builds a TestCompTime instruction.
|
|
fn buildTestCompTime(self: *Builder, scope: *Scope, span: Span, target: *Inst) !*Inst {
|
|
if (self.isCompTime(scope)) {
|
|
return self.buildConstBool(scope, span, true);
|
|
} else {
|
|
return self.build(
|
|
Inst.TestCompTime,
|
|
scope,
|
|
span,
|
|
Inst.TestCompTime.Params{ .target = target },
|
|
);
|
|
}
|
|
}
|
|
|
|
fn genAsyncReturn(irb: *Builder, scope: *Scope, span: Span, result: *Inst, is_gen: bool) !*Inst {
|
|
_ = try irb.buildGen(
|
|
Inst.AddImplicitReturnType,
|
|
scope,
|
|
span,
|
|
Inst.AddImplicitReturnType.Params{ .target = result },
|
|
);
|
|
|
|
if (!irb.is_async) {
|
|
return irb.buildExtra(
|
|
Inst.Return,
|
|
scope,
|
|
span,
|
|
Inst.Return.Params{ .return_value = result },
|
|
is_gen,
|
|
);
|
|
}
|
|
return error.Unimplemented;
|
|
|
|
}
|
|
|
|
const Ident = union(enum) {
|
|
NotFound,
|
|
Decl: *Decl,
|
|
VarScope: *Scope.Var,
|
|
};
|
|
|
|
async fn findIdent(irb: *Builder, scope: *Scope, name: []const u8) Ident {
|
|
var s = scope;
|
|
while (true) {
|
|
switch (s.id) {
|
|
Scope.Id.Root => return Ident.NotFound,
|
|
Scope.Id.Decls => {
|
|
const decls = @fieldParentPtr(Scope.Decls, "base", s);
|
|
const locked_table = await (async decls.table.acquireRead() catch unreachable);
|
|
defer locked_table.release();
|
|
if (locked_table.value.get(name)) |entry| {
|
|
return Ident{ .Decl = entry.value };
|
|
}
|
|
},
|
|
Scope.Id.Var => {
|
|
const var_scope = @fieldParentPtr(Scope.Var, "base", s);
|
|
if (mem.eql(u8, var_scope.name, name)) {
|
|
return Ident{ .VarScope = var_scope };
|
|
}
|
|
},
|
|
else => {},
|
|
}
|
|
s = s.parent.?;
|
|
}
|
|
}
|
|
};
|
|
|
|
const Analyze = struct {
|
|
irb: Builder,
|
|
old_bb_index: usize,
|
|
const_predecessor_bb: ?*BasicBlock,
|
|
parent_basic_block: *BasicBlock,
|
|
instruction_index: usize,
|
|
src_implicit_return_type_list: std.ArrayList(*Inst),
|
|
explicit_return_type: ?*Type,
|
|
|
|
pub const Error = error{
|
|
/// This is only for when we have already reported a compile error. It is the poison value.
|
|
SemanticAnalysisFailed,
|
|
|
|
/// This is a placeholder - it is useful to use instead of panicking but once the compiler is
|
|
/// done this error code will be removed.
|
|
Unimplemented,
|
|
|
|
OutOfMemory,
|
|
};
|
|
|
|
pub fn init(comp: *Compilation, tree_scope: *Scope.AstTree, explicit_return_type: ?*Type) !Analyze {
|
|
var irb = try Builder.init(comp, tree_scope, null);
|
|
errdefer irb.abort();
|
|
|
|
return Analyze{
|
|
.irb = irb,
|
|
.old_bb_index = 0,
|
|
.const_predecessor_bb = null,
|
|
.parent_basic_block = undefined, // initialized with startBasicBlock
|
|
.instruction_index = undefined, // initialized with startBasicBlock
|
|
.src_implicit_return_type_list = std.ArrayList(*Inst).init(irb.arena()),
|
|
.explicit_return_type = explicit_return_type,
|
|
};
|
|
}
|
|
|
|
pub fn abort(self: *Analyze) void {
|
|
self.irb.abort();
|
|
}
|
|
|
|
pub fn getNewBasicBlock(self: *Analyze, old_bb: *BasicBlock, ref_old_instruction: ?*Inst) !*BasicBlock {
|
|
if (old_bb.child) |child| {
|
|
if (ref_old_instruction == null or child.ref_instruction != ref_old_instruction)
|
|
return child;
|
|
}
|
|
|
|
const new_bb = try self.irb.createBasicBlock(old_bb.scope, old_bb.name_hint);
|
|
new_bb.linkToParent(old_bb);
|
|
new_bb.ref_instruction = ref_old_instruction;
|
|
return new_bb;
|
|
}
|
|
|
|
pub fn startBasicBlock(self: *Analyze, old_bb: *BasicBlock, const_predecessor_bb: ?*BasicBlock) void {
|
|
self.instruction_index = 0;
|
|
self.parent_basic_block = old_bb;
|
|
self.const_predecessor_bb = const_predecessor_bb;
|
|
}
|
|
|
|
pub fn finishBasicBlock(ira: *Analyze, old_code: *Code) !void {
|
|
try ira.irb.code.basic_block_list.append(ira.irb.current_basic_block);
|
|
ira.instruction_index += 1;
|
|
|
|
while (ira.instruction_index < ira.parent_basic_block.instruction_list.len) {
|
|
const next_instruction = ira.parent_basic_block.instruction_list.at(ira.instruction_index);
|
|
|
|
if (!next_instruction.is_generated) {
|
|
try ira.addCompileError(next_instruction.span, "unreachable code");
|
|
break;
|
|
}
|
|
ira.instruction_index += 1;
|
|
}
|
|
|
|
ira.old_bb_index += 1;
|
|
|
|
var need_repeat = true;
|
|
while (true) {
|
|
while (ira.old_bb_index < old_code.basic_block_list.len) {
|
|
const old_bb = old_code.basic_block_list.at(ira.old_bb_index);
|
|
const new_bb = old_bb.child orelse {
|
|
ira.old_bb_index += 1;
|
|
continue;
|
|
};
|
|
if (new_bb.instruction_list.len != 0) {
|
|
ira.old_bb_index += 1;
|
|
continue;
|
|
}
|
|
ira.irb.current_basic_block = new_bb;
|
|
|
|
ira.startBasicBlock(old_bb, null);
|
|
return;
|
|
}
|
|
if (!need_repeat)
|
|
return;
|
|
need_repeat = false;
|
|
ira.old_bb_index = 0;
|
|
continue;
|
|
}
|
|
}
|
|
|
|
fn addCompileError(self: *Analyze, span: Span, comptime fmt: []const u8, args: ...) !void {
|
|
return self.irb.comp.addCompileError(self.irb.code.tree_scope, span, fmt, args);
|
|
}
|
|
|
|
fn resolvePeerTypes(self: *Analyze, expected_type: ?*Type, peers: []const *Inst) Analyze.Error!*Type {
|
|
// TODO actual implementation
|
|
return &Type.Void.get(self.irb.comp).base;
|
|
}
|
|
|
|
fn implicitCast(self: *Analyze, target: *Inst, optional_dest_type: ?*Type) Analyze.Error!*Inst {
|
|
const dest_type = optional_dest_type orelse return target;
|
|
const from_type = target.getKnownType();
|
|
if (from_type == dest_type or from_type.id == Type.Id.NoReturn) return target;
|
|
return self.analyzeCast(target, target, dest_type);
|
|
}
|
|
|
|
fn analyzeCast(ira: *Analyze, source_instr: *Inst, target: *Inst, dest_type: *Type) !*Inst {
|
|
const from_type = target.getKnownType();
|
|
|
|
//if (type_is_invalid(wanted_type) || type_is_invalid(actual_type)) {
|
|
// return ira->codegen->invalid_instruction;
|
|
//}
|
|
|
|
//// perfect match or non-const to const
|
|
//ConstCastOnly const_cast_result = types_match_const_cast_only(ira, wanted_type, actual_type,
|
|
// source_node, false);
|
|
//if (const_cast_result.id == ConstCastResultIdOk) {
|
|
// return ir_resolve_cast(ira, source_instr, value, wanted_type, CastOpNoop, false);
|
|
//}
|
|
|
|
//// widening conversion
|
|
//if (wanted_type->id == TypeTableEntryIdInt &&
|
|
// actual_type->id == TypeTableEntryIdInt &&
|
|
// wanted_type->data.integral.is_signed == actual_type->data.integral.is_signed &&
|
|
// wanted_type->data.integral.bit_count >= actual_type->data.integral.bit_count)
|
|
//{
|
|
// return ir_analyze_widen_or_shorten(ira, source_instr, value, wanted_type);
|
|
//}
|
|
|
|
//// small enough unsigned ints can get casted to large enough signed ints
|
|
//if (wanted_type->id == TypeTableEntryIdInt && wanted_type->data.integral.is_signed &&
|
|
// actual_type->id == TypeTableEntryIdInt && !actual_type->data.integral.is_signed &&
|
|
// wanted_type->data.integral.bit_count > actual_type->data.integral.bit_count)
|
|
//{
|
|
// return ir_analyze_widen_or_shorten(ira, source_instr, value, wanted_type);
|
|
//}
|
|
|
|
//// float widening conversion
|
|
//if (wanted_type->id == TypeTableEntryIdFloat &&
|
|
// actual_type->id == TypeTableEntryIdFloat &&
|
|
// wanted_type->data.floating.bit_count >= actual_type->data.floating.bit_count)
|
|
//{
|
|
// return ir_analyze_widen_or_shorten(ira, source_instr, value, wanted_type);
|
|
//}
|
|
|
|
//// cast from [N]T to []const T
|
|
//if (is_slice(wanted_type) && actual_type->id == TypeTableEntryIdArray) {
|
|
// TypeTableEntry *ptr_type = wanted_type->data.structure.fields[slice_ptr_index].type_entry;
|
|
// assert(ptr_type->id == TypeTableEntryIdPointer);
|
|
// if ((ptr_type->data.pointer.is_const || actual_type->data.array.len == 0) &&
|
|
// types_match_const_cast_only(ira, ptr_type->data.pointer.child_type, actual_type->data.array.child_type,
|
|
// source_node, false).id == ConstCastResultIdOk)
|
|
// {
|
|
// return ir_analyze_array_to_slice(ira, source_instr, value, wanted_type);
|
|
// }
|
|
//}
|
|
|
|
//// cast from *const [N]T to []const T
|
|
//if (is_slice(wanted_type) &&
|
|
// actual_type->id == TypeTableEntryIdPointer &&
|
|
// actual_type->data.pointer.is_const &&
|
|
// actual_type->data.pointer.child_type->id == TypeTableEntryIdArray)
|
|
//{
|
|
// TypeTableEntry *ptr_type = wanted_type->data.structure.fields[slice_ptr_index].type_entry;
|
|
// assert(ptr_type->id == TypeTableEntryIdPointer);
|
|
|
|
// TypeTableEntry *array_type = actual_type->data.pointer.child_type;
|
|
|
|
// if ((ptr_type->data.pointer.is_const || array_type->data.array.len == 0) &&
|
|
// types_match_const_cast_only(ira, ptr_type->data.pointer.child_type, array_type->data.array.child_type,
|
|
// source_node, false).id == ConstCastResultIdOk)
|
|
// {
|
|
// return ir_analyze_array_to_slice(ira, source_instr, value, wanted_type);
|
|
// }
|
|
//}
|
|
|
|
//// cast from [N]T to *const []const T
|
|
//if (wanted_type->id == TypeTableEntryIdPointer &&
|
|
// wanted_type->data.pointer.is_const &&
|
|
// is_slice(wanted_type->data.pointer.child_type) &&
|
|
// actual_type->id == TypeTableEntryIdArray)
|
|
//{
|
|
// TypeTableEntry *ptr_type =
|
|
// wanted_type->data.pointer.child_type->data.structure.fields[slice_ptr_index].type_entry;
|
|
// assert(ptr_type->id == TypeTableEntryIdPointer);
|
|
// if ((ptr_type->data.pointer.is_const || actual_type->data.array.len == 0) &&
|
|
// types_match_const_cast_only(ira, ptr_type->data.pointer.child_type, actual_type->data.array.child_type,
|
|
// source_node, false).id == ConstCastResultIdOk)
|
|
// {
|
|
// IrInstruction *cast1 = ir_analyze_cast(ira, source_instr, wanted_type->data.pointer.child_type, value);
|
|
// if (type_is_invalid(cast1->value.type))
|
|
// return ira->codegen->invalid_instruction;
|
|
|
|
// IrInstruction *cast2 = ir_analyze_cast(ira, source_instr, wanted_type, cast1);
|
|
// if (type_is_invalid(cast2->value.type))
|
|
// return ira->codegen->invalid_instruction;
|
|
|
|
// return cast2;
|
|
// }
|
|
//}
|
|
|
|
//// cast from [N]T to ?[]const T
|
|
//if (wanted_type->id == TypeTableEntryIdOptional &&
|
|
// is_slice(wanted_type->data.maybe.child_type) &&
|
|
// actual_type->id == TypeTableEntryIdArray)
|
|
//{
|
|
// TypeTableEntry *ptr_type =
|
|
// wanted_type->data.maybe.child_type->data.structure.fields[slice_ptr_index].type_entry;
|
|
// assert(ptr_type->id == TypeTableEntryIdPointer);
|
|
// if ((ptr_type->data.pointer.is_const || actual_type->data.array.len == 0) &&
|
|
// types_match_const_cast_only(ira, ptr_type->data.pointer.child_type, actual_type->data.array.child_type,
|
|
// source_node, false).id == ConstCastResultIdOk)
|
|
// {
|
|
// IrInstruction *cast1 = ir_analyze_cast(ira, source_instr, wanted_type->data.maybe.child_type, value);
|
|
// if (type_is_invalid(cast1->value.type))
|
|
// return ira->codegen->invalid_instruction;
|
|
|
|
// IrInstruction *cast2 = ir_analyze_cast(ira, source_instr, wanted_type, cast1);
|
|
// if (type_is_invalid(cast2->value.type))
|
|
// return ira->codegen->invalid_instruction;
|
|
|
|
// return cast2;
|
|
// }
|
|
//}
|
|
|
|
//// *[N]T to [*]T
|
|
//if (wanted_type->id == TypeTableEntryIdPointer &&
|
|
// wanted_type->data.pointer.ptr_len == PtrLenUnknown &&
|
|
// actual_type->id == TypeTableEntryIdPointer &&
|
|
// actual_type->data.pointer.ptr_len == PtrLenSingle &&
|
|
// actual_type->data.pointer.child_type->id == TypeTableEntryIdArray &&
|
|
// actual_type->data.pointer.alignment >= wanted_type->data.pointer.alignment &&
|
|
// types_match_const_cast_only(ira, wanted_type->data.pointer.child_type,
|
|
// actual_type->data.pointer.child_type->data.array.child_type, source_node,
|
|
// !wanted_type->data.pointer.is_const).id == ConstCastResultIdOk)
|
|
//{
|
|
// return ir_resolve_ptr_of_array_to_unknown_len_ptr(ira, source_instr, value, wanted_type);
|
|
//}
|
|
|
|
//// *[N]T to []T
|
|
//if (is_slice(wanted_type) &&
|
|
// actual_type->id == TypeTableEntryIdPointer &&
|
|
// actual_type->data.pointer.ptr_len == PtrLenSingle &&
|
|
// actual_type->data.pointer.child_type->id == TypeTableEntryIdArray)
|
|
//{
|
|
// TypeTableEntry *slice_ptr_type = wanted_type->data.structure.fields[slice_ptr_index].type_entry;
|
|
// assert(slice_ptr_type->id == TypeTableEntryIdPointer);
|
|
// if (types_match_const_cast_only(ira, slice_ptr_type->data.pointer.child_type,
|
|
// actual_type->data.pointer.child_type->data.array.child_type, source_node,
|
|
// !slice_ptr_type->data.pointer.is_const).id == ConstCastResultIdOk)
|
|
// {
|
|
// return ir_resolve_ptr_of_array_to_slice(ira, source_instr, value, wanted_type);
|
|
// }
|
|
//}
|
|
|
|
//// cast from T to ?T
|
|
//// note that the *T to ?*T case is handled via the "ConstCastOnly" mechanism
|
|
//if (wanted_type->id == TypeTableEntryIdOptional) {
|
|
// TypeTableEntry *wanted_child_type = wanted_type->data.maybe.child_type;
|
|
// if (types_match_const_cast_only(ira, wanted_child_type, actual_type, source_node,
|
|
// false).id == ConstCastResultIdOk)
|
|
// {
|
|
// return ir_analyze_maybe_wrap(ira, source_instr, value, wanted_type);
|
|
// } else if (actual_type->id == TypeTableEntryIdComptimeInt ||
|
|
// actual_type->id == TypeTableEntryIdComptimeFloat)
|
|
// {
|
|
// if (ir_num_lit_fits_in_other_type(ira, value, wanted_child_type, true)) {
|
|
// return ir_analyze_maybe_wrap(ira, source_instr, value, wanted_type);
|
|
// } else {
|
|
// return ira->codegen->invalid_instruction;
|
|
// }
|
|
// } else if (wanted_child_type->id == TypeTableEntryIdPointer &&
|
|
// wanted_child_type->data.pointer.is_const &&
|
|
// (actual_type->id == TypeTableEntryIdPointer || is_container(actual_type)))
|
|
// {
|
|
// IrInstruction *cast1 = ir_analyze_cast(ira, source_instr, wanted_child_type, value);
|
|
// if (type_is_invalid(cast1->value.type))
|
|
// return ira->codegen->invalid_instruction;
|
|
|
|
// IrInstruction *cast2 = ir_analyze_cast(ira, source_instr, wanted_type, cast1);
|
|
// if (type_is_invalid(cast2->value.type))
|
|
// return ira->codegen->invalid_instruction;
|
|
|
|
// return cast2;
|
|
// }
|
|
//}
|
|
|
|
//// cast from null literal to maybe type
|
|
//if (wanted_type->id == TypeTableEntryIdOptional &&
|
|
// actual_type->id == TypeTableEntryIdNull)
|
|
//{
|
|
// return ir_analyze_null_to_maybe(ira, source_instr, value, wanted_type);
|
|
//}
|
|
|
|
//// cast from child type of error type to error type
|
|
//if (wanted_type->id == TypeTableEntryIdErrorUnion) {
|
|
// if (types_match_const_cast_only(ira, wanted_type->data.error_union.payload_type, actual_type,
|
|
// source_node, false).id == ConstCastResultIdOk)
|
|
// {
|
|
// return ir_analyze_err_wrap_payload(ira, source_instr, value, wanted_type);
|
|
// } else if (actual_type->id == TypeTableEntryIdComptimeInt ||
|
|
// actual_type->id == TypeTableEntryIdComptimeFloat)
|
|
// {
|
|
// if (ir_num_lit_fits_in_other_type(ira, value, wanted_type->data.error_union.payload_type, true)) {
|
|
// return ir_analyze_err_wrap_payload(ira, source_instr, value, wanted_type);
|
|
// } else {
|
|
// return ira->codegen->invalid_instruction;
|
|
// }
|
|
// }
|
|
//}
|
|
|
|
//// cast from [N]T to E![]const T
|
|
//if (wanted_type->id == TypeTableEntryIdErrorUnion &&
|
|
// is_slice(wanted_type->data.error_union.payload_type) &&
|
|
// actual_type->id == TypeTableEntryIdArray)
|
|
//{
|
|
// TypeTableEntry *ptr_type =
|
|
// wanted_type->data.error_union.payload_type->data.structure.fields[slice_ptr_index].type_entry;
|
|
// assert(ptr_type->id == TypeTableEntryIdPointer);
|
|
// if ((ptr_type->data.pointer.is_const || actual_type->data.array.len == 0) &&
|
|
// types_match_const_cast_only(ira, ptr_type->data.pointer.child_type, actual_type->data.array.child_type,
|
|
// source_node, false).id == ConstCastResultIdOk)
|
|
// {
|
|
// IrInstruction *cast1 = ir_analyze_cast(ira, source_instr, wanted_type->data.error_union.payload_type, value);
|
|
// if (type_is_invalid(cast1->value.type))
|
|
// return ira->codegen->invalid_instruction;
|
|
|
|
// IrInstruction *cast2 = ir_analyze_cast(ira, source_instr, wanted_type, cast1);
|
|
// if (type_is_invalid(cast2->value.type))
|
|
// return ira->codegen->invalid_instruction;
|
|
|
|
// return cast2;
|
|
// }
|
|
//}
|
|
|
|
//// cast from error set to error union type
|
|
//if (wanted_type->id == TypeTableEntryIdErrorUnion &&
|
|
// actual_type->id == TypeTableEntryIdErrorSet)
|
|
//{
|
|
// return ir_analyze_err_wrap_code(ira, source_instr, value, wanted_type);
|
|
//}
|
|
|
|
//// cast from T to E!?T
|
|
//if (wanted_type->id == TypeTableEntryIdErrorUnion &&
|
|
// wanted_type->data.error_union.payload_type->id == TypeTableEntryIdOptional &&
|
|
// actual_type->id != TypeTableEntryIdOptional)
|
|
//{
|
|
// TypeTableEntry *wanted_child_type = wanted_type->data.error_union.payload_type->data.maybe.child_type;
|
|
// if (types_match_const_cast_only(ira, wanted_child_type, actual_type, source_node, false).id == ConstCastResultIdOk ||
|
|
// actual_type->id == TypeTableEntryIdNull ||
|
|
// actual_type->id == TypeTableEntryIdComptimeInt ||
|
|
// actual_type->id == TypeTableEntryIdComptimeFloat)
|
|
// {
|
|
// IrInstruction *cast1 = ir_analyze_cast(ira, source_instr, wanted_type->data.error_union.payload_type, value);
|
|
// if (type_is_invalid(cast1->value.type))
|
|
// return ira->codegen->invalid_instruction;
|
|
|
|
// IrInstruction *cast2 = ir_analyze_cast(ira, source_instr, wanted_type, cast1);
|
|
// if (type_is_invalid(cast2->value.type))
|
|
// return ira->codegen->invalid_instruction;
|
|
|
|
// return cast2;
|
|
// }
|
|
//}
|
|
|
|
// cast from comptime-known integer to another integer where the value fits
|
|
if (target.isCompTime() and (from_type.id == Type.Id.Int or from_type.id == Type.Id.ComptimeInt)) cast: {
|
|
const target_val = target.val.KnownValue;
|
|
const from_int = &target_val.cast(Value.Int).?.big_int;
|
|
const fits = fits: {
|
|
if (dest_type.cast(Type.ComptimeInt)) |ctint| {
|
|
break :fits true;
|
|
}
|
|
if (dest_type.cast(Type.Int)) |int| {
|
|
break :fits from_int.fitsInTwosComp(int.key.is_signed, int.key.bit_count);
|
|
}
|
|
break :cast;
|
|
};
|
|
if (!fits) {
|
|
try ira.addCompileError(
|
|
source_instr.span,
|
|
"integer value '{}' cannot be stored in type '{}'",
|
|
from_int,
|
|
dest_type.name,
|
|
);
|
|
return error.SemanticAnalysisFailed;
|
|
}
|
|
|
|
const new_val = try target.copyVal(ira.irb.comp);
|
|
new_val.setType(dest_type, ira.irb.comp);
|
|
return ira.irb.buildConstValue(source_instr.scope, source_instr.span, new_val);
|
|
}
|
|
|
|
// cast from number literal to another type
|
|
// cast from number literal to *const integer
|
|
//if (actual_type->id == TypeTableEntryIdComptimeFloat ||
|
|
// actual_type->id == TypeTableEntryIdComptimeInt)
|
|
//{
|
|
// ensure_complete_type(ira->codegen, wanted_type);
|
|
// if (type_is_invalid(wanted_type))
|
|
// return ira->codegen->invalid_instruction;
|
|
// if (wanted_type->id == TypeTableEntryIdEnum) {
|
|
// IrInstruction *cast1 = ir_analyze_cast(ira, source_instr, wanted_type->data.enumeration.tag_int_type, value);
|
|
// if (type_is_invalid(cast1->value.type))
|
|
// return ira->codegen->invalid_instruction;
|
|
|
|
// IrInstruction *cast2 = ir_analyze_cast(ira, source_instr, wanted_type, cast1);
|
|
// if (type_is_invalid(cast2->value.type))
|
|
// return ira->codegen->invalid_instruction;
|
|
|
|
// return cast2;
|
|
// } else if (wanted_type->id == TypeTableEntryIdPointer &&
|
|
// wanted_type->data.pointer.is_const)
|
|
// {
|
|
// IrInstruction *cast1 = ir_analyze_cast(ira, source_instr, wanted_type->data.pointer.child_type, value);
|
|
// if (type_is_invalid(cast1->value.type))
|
|
// return ira->codegen->invalid_instruction;
|
|
|
|
// IrInstruction *cast2 = ir_analyze_cast(ira, source_instr, wanted_type, cast1);
|
|
// if (type_is_invalid(cast2->value.type))
|
|
// return ira->codegen->invalid_instruction;
|
|
|
|
// return cast2;
|
|
// } else if (ir_num_lit_fits_in_other_type(ira, value, wanted_type, true)) {
|
|
// CastOp op;
|
|
// if ((actual_type->id == TypeTableEntryIdComptimeFloat &&
|
|
// wanted_type->id == TypeTableEntryIdFloat) ||
|
|
// (actual_type->id == TypeTableEntryIdComptimeInt &&
|
|
// wanted_type->id == TypeTableEntryIdInt))
|
|
// {
|
|
// op = CastOpNumLitToConcrete;
|
|
// } else if (wanted_type->id == TypeTableEntryIdInt) {
|
|
// op = CastOpFloatToInt;
|
|
// } else if (wanted_type->id == TypeTableEntryIdFloat) {
|
|
// op = CastOpIntToFloat;
|
|
// } else {
|
|
// zig_unreachable();
|
|
// }
|
|
// return ir_resolve_cast(ira, source_instr, value, wanted_type, op, false);
|
|
// } else {
|
|
// return ira->codegen->invalid_instruction;
|
|
// }
|
|
//}
|
|
|
|
//// cast from typed number to integer or float literal.
|
|
//// works when the number is known at compile time
|
|
//if (instr_is_comptime(value) &&
|
|
// ((actual_type->id == TypeTableEntryIdInt && wanted_type->id == TypeTableEntryIdComptimeInt) ||
|
|
// (actual_type->id == TypeTableEntryIdFloat && wanted_type->id == TypeTableEntryIdComptimeFloat)))
|
|
//{
|
|
// return ir_analyze_number_to_literal(ira, source_instr, value, wanted_type);
|
|
//}
|
|
|
|
//// cast from union to the enum type of the union
|
|
//if (actual_type->id == TypeTableEntryIdUnion && wanted_type->id == TypeTableEntryIdEnum) {
|
|
// type_ensure_zero_bits_known(ira->codegen, actual_type);
|
|
// if (type_is_invalid(actual_type))
|
|
// return ira->codegen->invalid_instruction;
|
|
|
|
// if (actual_type->data.unionation.tag_type == wanted_type) {
|
|
// return ir_analyze_union_to_tag(ira, source_instr, value, wanted_type);
|
|
// }
|
|
//}
|
|
|
|
//// enum to union which has the enum as the tag type
|
|
//if (wanted_type->id == TypeTableEntryIdUnion && actual_type->id == TypeTableEntryIdEnum &&
|
|
// (wanted_type->data.unionation.decl_node->data.container_decl.auto_enum ||
|
|
// wanted_type->data.unionation.decl_node->data.container_decl.init_arg_expr != nullptr))
|
|
//{
|
|
// type_ensure_zero_bits_known(ira->codegen, wanted_type);
|
|
// if (wanted_type->data.unionation.tag_type == actual_type) {
|
|
// return ir_analyze_enum_to_union(ira, source_instr, value, wanted_type);
|
|
// }
|
|
//}
|
|
|
|
//// enum to &const union which has the enum as the tag type
|
|
//if (actual_type->id == TypeTableEntryIdEnum && wanted_type->id == TypeTableEntryIdPointer) {
|
|
// TypeTableEntry *union_type = wanted_type->data.pointer.child_type;
|
|
// if (union_type->data.unionation.decl_node->data.container_decl.auto_enum ||
|
|
// union_type->data.unionation.decl_node->data.container_decl.init_arg_expr != nullptr)
|
|
// {
|
|
// type_ensure_zero_bits_known(ira->codegen, union_type);
|
|
// if (union_type->data.unionation.tag_type == actual_type) {
|
|
// IrInstruction *cast1 = ir_analyze_cast(ira, source_instr, union_type, value);
|
|
// if (type_is_invalid(cast1->value.type))
|
|
// return ira->codegen->invalid_instruction;
|
|
|
|
// IrInstruction *cast2 = ir_analyze_cast(ira, source_instr, wanted_type, cast1);
|
|
// if (type_is_invalid(cast2->value.type))
|
|
// return ira->codegen->invalid_instruction;
|
|
|
|
// return cast2;
|
|
// }
|
|
// }
|
|
//}
|
|
|
|
//// cast from *T to *[1]T
|
|
//if (wanted_type->id == TypeTableEntryIdPointer && wanted_type->data.pointer.ptr_len == PtrLenSingle &&
|
|
// actual_type->id == TypeTableEntryIdPointer && actual_type->data.pointer.ptr_len == PtrLenSingle)
|
|
//{
|
|
// TypeTableEntry *array_type = wanted_type->data.pointer.child_type;
|
|
// if (array_type->id == TypeTableEntryIdArray && array_type->data.array.len == 1 &&
|
|
// types_match_const_cast_only(ira, array_type->data.array.child_type,
|
|
// actual_type->data.pointer.child_type, source_node,
|
|
// !wanted_type->data.pointer.is_const).id == ConstCastResultIdOk)
|
|
// {
|
|
// if (wanted_type->data.pointer.alignment > actual_type->data.pointer.alignment) {
|
|
// ErrorMsg *msg = ir_add_error(ira, source_instr, buf_sprintf("cast increases pointer alignment"));
|
|
// add_error_note(ira->codegen, msg, value->source_node,
|
|
// buf_sprintf("'%s' has alignment %" PRIu32, buf_ptr(&actual_type->name),
|
|
// actual_type->data.pointer.alignment));
|
|
// add_error_note(ira->codegen, msg, source_instr->source_node,
|
|
// buf_sprintf("'%s' has alignment %" PRIu32, buf_ptr(&wanted_type->name),
|
|
// wanted_type->data.pointer.alignment));
|
|
// return ira->codegen->invalid_instruction;
|
|
// }
|
|
// return ir_analyze_ptr_to_array(ira, source_instr, value, wanted_type);
|
|
// }
|
|
//}
|
|
|
|
//// cast from T to *T where T is zero bits
|
|
//if (wanted_type->id == TypeTableEntryIdPointer && wanted_type->data.pointer.ptr_len == PtrLenSingle &&
|
|
// types_match_const_cast_only(ira, wanted_type->data.pointer.child_type,
|
|
// actual_type, source_node, !wanted_type->data.pointer.is_const).id == ConstCastResultIdOk)
|
|
//{
|
|
// type_ensure_zero_bits_known(ira->codegen, actual_type);
|
|
// if (type_is_invalid(actual_type)) {
|
|
// return ira->codegen->invalid_instruction;
|
|
// }
|
|
// if (!type_has_bits(actual_type)) {
|
|
// return ir_get_ref(ira, source_instr, value, false, false);
|
|
// }
|
|
//}
|
|
|
|
//// cast from undefined to anything
|
|
//if (actual_type->id == TypeTableEntryIdUndefined) {
|
|
// return ir_analyze_undefined_to_anything(ira, source_instr, value, wanted_type);
|
|
//}
|
|
|
|
//// cast from something to const pointer of it
|
|
//if (!type_requires_comptime(actual_type)) {
|
|
// TypeTableEntry *const_ptr_actual = get_pointer_to_type(ira->codegen, actual_type, true);
|
|
// if (types_match_const_cast_only(ira, wanted_type, const_ptr_actual, source_node, false).id == ConstCastResultIdOk) {
|
|
// return ir_analyze_cast_ref(ira, source_instr, value, wanted_type);
|
|
// }
|
|
//}
|
|
|
|
try ira.addCompileError(
|
|
source_instr.span,
|
|
"expected type '{}', found '{}'",
|
|
dest_type.name,
|
|
from_type.name,
|
|
);
|
|
//ErrorMsg *parent_msg = ir_add_error_node(ira, source_instr->source_node,
|
|
// buf_sprintf("expected type '%s', found '%s'",
|
|
// buf_ptr(&wanted_type->name),
|
|
// buf_ptr(&actual_type->name)));
|
|
//report_recursive_error(ira, source_instr->source_node, &const_cast_result, parent_msg);
|
|
return error.SemanticAnalysisFailed;
|
|
}
|
|
|
|
fn getCompTimeValOrNullUndefOk(self: *Analyze, target: *Inst) ?*Value {
|
|
@panic("TODO");
|
|
}
|
|
|
|
fn getCompTimeRef(
|
|
self: *Analyze,
|
|
value: *Value,
|
|
ptr_mut: Value.Ptr.Mut,
|
|
mut: Type.Pointer.Mut,
|
|
volatility: Type.Pointer.Vol,
|
|
) Analyze.Error!*Inst {
|
|
return error.Unimplemented;
|
|
}
|
|
};
|
|
|
|
pub async fn gen(
|
|
comp: *Compilation,
|
|
body_node: *ast.Node,
|
|
tree_scope: *Scope.AstTree,
|
|
scope: *Scope,
|
|
) !*Code {
|
|
var irb = try Builder.init(comp, tree_scope, scope);
|
|
errdefer irb.abort();
|
|
|
|
const entry_block = try irb.createBasicBlock(scope, "Entry");
|
|
entry_block.ref(&irb); // Entry block gets a reference because we enter it to begin.
|
|
try irb.setCursorAtEndAndAppendBlock(entry_block);
|
|
|
|
const result = try await (async irb.genNode(body_node, scope, LVal.None) catch unreachable);
|
|
if (!result.isNoReturn()) {
|
|
// no need for save_err_ret_addr because this cannot return error
|
|
_ = try irb.genAsyncReturn(scope, Span.token(body_node.lastToken()), result, true);
|
|
}
|
|
|
|
return irb.finish();
|
|
}
|
|
|
|
pub async fn analyze(comp: *Compilation, old_code: *Code, expected_type: ?*Type) !*Code {
|
|
const old_entry_bb = old_code.basic_block_list.at(0);
|
|
|
|
var ira = try Analyze.init(comp, old_code.tree_scope, expected_type);
|
|
errdefer ira.abort();
|
|
|
|
const new_entry_bb = try ira.getNewBasicBlock(old_entry_bb, null);
|
|
new_entry_bb.ref(&ira.irb);
|
|
|
|
ira.irb.current_basic_block = new_entry_bb;
|
|
|
|
ira.startBasicBlock(old_entry_bb, null);
|
|
|
|
while (ira.old_bb_index < old_code.basic_block_list.len) {
|
|
const old_instruction = ira.parent_basic_block.instruction_list.at(ira.instruction_index);
|
|
|
|
if (old_instruction.ref_count == 0 and !old_instruction.hasSideEffects()) {
|
|
ira.instruction_index += 1;
|
|
continue;
|
|
}
|
|
|
|
const return_inst = try await (async old_instruction.analyze(&ira) catch unreachable);
|
|
assert(return_inst.val != IrVal.Unknown); // at least the type should be known at this point
|
|
return_inst.linkToParent(old_instruction);
|
|
// Note: if we ever modify the above to handle error.CompileError by continuing analysis,
|
|
// then here we want to check if ira.isCompTime() and return early if true
|
|
|
|
if (return_inst.isNoReturn()) {
|
|
try ira.finishBasicBlock(old_code);
|
|
continue;
|
|
}
|
|
|
|
ira.instruction_index += 1;
|
|
}
|
|
|
|
if (ira.src_implicit_return_type_list.len == 0) {
|
|
ira.irb.code.return_type = &Type.NoReturn.get(comp).base;
|
|
return ira.irb.finish();
|
|
}
|
|
|
|
ira.irb.code.return_type = try ira.resolvePeerTypes(expected_type, ira.src_implicit_return_type_list.toSliceConst());
|
|
return ira.irb.finish();
|
|
}
|