Merge pull request #9486 from ziglang/comptime-pointers

stage2: more principled approach to comptime pointers and garbage collection of unused anon decls
This commit is contained in:
Andrew Kelley 2021-07-30 01:40:32 -04:00 committed by GitHub
commit e5e6ceda6a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
16 changed files with 444 additions and 381 deletions

View File

@ -15,7 +15,6 @@ instructions: std.MultiArrayList(Inst).Slice,
/// The first few indexes are reserved. See `ExtraIndex` for the values.
extra: []const u32,
values: []const Value,
variables: []const *Module.Var,
pub const ExtraIndex = enum(u32) {
/// Payload index of the main `Block` in the `extra` array.
@ -193,20 +192,10 @@ pub const Inst = struct {
/// Result type is always `u1`.
/// Uses the `un_op` field.
bool_to_int,
/// Stores a value onto the stack and returns a pointer to it.
/// TODO audit where this AIR instruction is emitted, maybe it should instead be emitting
/// alloca instruction and storing to the alloca.
/// Uses the `ty_op` field.
ref,
/// Return a value from a function.
/// Result type is always noreturn; no instructions in a block follow this one.
/// Uses the `un_op` field.
ret,
/// Returns a pointer to a global variable.
/// Uses the `ty_pl` field. Index is into the `variables` array.
/// TODO this can be modeled simply as a constant with a decl ref and then
/// the variables array can be removed from Air.
varptr,
/// Write a value to a pointer. LHS is pointer, RHS is value.
/// Result type is always void.
/// Uses the `bin_op` field.
@ -454,7 +443,6 @@ pub fn typeOfIndex(air: Air, inst: Air.Inst.Index) Type {
.assembly,
.block,
.constant,
.varptr,
.struct_field_ptr,
.struct_field_val,
=> return air.getRefType(datas[inst].ty_pl.ty),
@ -462,7 +450,6 @@ pub fn typeOfIndex(air: Air, inst: Air.Inst.Index) Type {
.not,
.bitcast,
.load,
.ref,
.floatcast,
.intcast,
.optional_payload,
@ -550,7 +537,6 @@ pub fn deinit(air: *Air, gpa: *std.mem.Allocator) void {
air.instructions.deinit(gpa);
gpa.free(air.extra);
gpa.free(air.values);
gpa.free(air.variables);
air.* = undefined;
}

View File

@ -2061,11 +2061,19 @@ pub fn performAllTheWork(self: *Compilation) error{ TimerUnsupported, OutOfMemor
.complete, .codegen_failure_retryable => {
if (build_options.omit_stage2)
@panic("sadly stage2 is omitted from this build to save memory on the CI server");
const module = self.bin_file.options.module.?;
assert(decl.has_tv);
assert(decl.ty.hasCodeGenBits());
try module.linkerUpdateDecl(decl);
if (decl.alive) {
try module.linkerUpdateDecl(decl);
continue;
}
// Instead of sending this decl to the linker, we actually will delete it
// because we found out that it in fact was never referenced.
module.deleteUnusedDecl(decl);
},
},
.codegen_func => |func| switch (func.owner_decl.analysis) {

View File

@ -256,14 +256,12 @@ fn analyzeInst(
.const_ty,
.breakpoint,
.dbg_stmt,
.varptr,
.unreach,
=> return trackOperands(a, new_set, inst, main_tomb, .{ .none, .none, .none }),
.not,
.bitcast,
.load,
.ref,
.floatcast,
.intcast,
.optional_payload,

View File

@ -255,6 +255,15 @@ pub const Decl = struct {
has_align: bool,
/// Whether the ZIR code provides a linksection instruction.
has_linksection: bool,
/// Flag used by garbage collection to mark and sweep.
/// Decls which correspond to an AST node always have this field set to `true`.
/// Anonymous Decls are initialized with this field set to `false` and then it
/// is the responsibility of machine code backends to mark it `true` whenever
/// a `decl_ref` Value is encountered that points to this Decl.
/// When the `codegen_decl` job is encountered in the main work queue, if the
/// Decl is marked alive, then it sends the Decl to the linker. Otherwise it
/// deletes the Decl on the spot.
alive: bool,
/// Represents the position of the code in the output file.
/// This is populated regardless of semantic analysis and code generation.
@ -1324,6 +1333,42 @@ pub const Scope = struct {
block.instructions.appendAssumeCapacity(result_index);
return result_index;
}
pub fn startAnonDecl(block: *Block) !WipAnonDecl {
return WipAnonDecl{
.block = block,
.new_decl_arena = std.heap.ArenaAllocator.init(block.sema.gpa),
.finished = false,
};
}
pub const WipAnonDecl = struct {
block: *Scope.Block,
new_decl_arena: std.heap.ArenaAllocator,
finished: bool,
pub fn arena(wad: *WipAnonDecl) *Allocator {
return &wad.new_decl_arena.allocator;
}
pub fn deinit(wad: *WipAnonDecl) void {
if (!wad.finished) {
wad.new_decl_arena.deinit();
}
wad.* = undefined;
}
pub fn finish(wad: *WipAnonDecl, ty: Type, val: Value) !*Decl {
const new_decl = try wad.block.sema.mod.createAnonymousDecl(&wad.block.base, .{
.ty = ty,
.val = val,
});
errdefer wad.block.sema.mod.deleteAnonDecl(&wad.block.base, new_decl);
try new_decl.finalizeNewArena(&wad.new_decl_arena);
wad.finished = true;
return new_decl;
}
};
};
};
@ -1700,6 +1745,7 @@ pub const SrcLoc = struct {
.node_offset_fn_type_cc => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
const node_datas = tree.nodes.items(.data);
const node_tags = tree.nodes.items(.tag);
const node = src_loc.declRelativeToNodeIndex(node_off);
var params: [1]ast.Node.Index = undefined;
@ -1708,6 +1754,13 @@ pub const SrcLoc = struct {
.fn_proto_multi => tree.fnProtoMulti(node),
.fn_proto_one => tree.fnProtoOne(&params, node),
.fn_proto => tree.fnProto(node),
.fn_decl => switch (node_tags[node_datas[node].lhs]) {
.fn_proto_simple => tree.fnProtoSimple(&params, node_datas[node].lhs),
.fn_proto_multi => tree.fnProtoMulti(node_datas[node].lhs),
.fn_proto_one => tree.fnProtoOne(&params, node_datas[node].lhs),
.fn_proto => tree.fnProto(node_datas[node].lhs),
else => unreachable,
},
else => unreachable,
};
const main_tokens = tree.nodes.items(.main_token);
@ -2825,6 +2878,7 @@ pub fn semaFile(mod: *Module, file: *Scope.File) SemaError!void {
new_decl.val = struct_val;
new_decl.has_tv = true;
new_decl.owns_tv = true;
new_decl.alive = true; // This Decl corresponds to a File and is therefore always alive.
new_decl.analysis = .in_progress;
new_decl.generation = mod.generation;
@ -2935,7 +2989,7 @@ fn semaDecl(mod: *Module, decl: *Decl) !bool {
const break_index = try sema.analyzeBody(&block_scope, body);
const result_ref = zir_datas[break_index].@"break".operand;
const src: LazySrcLoc = .{ .node_offset = 0 };
const decl_tv = try sema.resolveInstConst(&block_scope, src, result_ref);
const decl_tv = try sema.resolveInstValue(&block_scope, src, result_ref);
const align_val = blk: {
const align_ref = decl.zirAlignRef();
if (align_ref == .none) break :blk Value.initTag(.null_value);
@ -2946,6 +3000,7 @@ fn semaDecl(mod: *Module, decl: *Decl) !bool {
if (linksection_ref == .none) break :blk Value.initTag(.null_value);
break :blk (try sema.resolveInstConst(&block_scope, src, linksection_ref)).val;
};
try sema.resolveTypeLayout(&block_scope, src, decl_tv.ty);
// We need the memory for the Type to go into the arena for the Decl
var decl_arena = std.heap.ArenaAllocator.init(gpa);
@ -2983,8 +3038,8 @@ fn semaDecl(mod: *Module, decl: *Decl) !bool {
const is_inline = decl_tv.ty.fnCallingConvention() == .Inline;
if (!is_inline and decl_tv.ty.hasCodeGenBits()) {
// We don't fully codegen the decl until later, but we do need to reserve a global
// offset table index for it. This allows us to codegen decls out of dependency order,
// increasing how many computations can be done in parallel.
// offset table index for it. This allows us to codegen decls out of dependency
// order, increasing how many computations can be done in parallel.
try mod.comp.bin_file.allocateDeclIndexes(decl);
try mod.comp.work_queue.writeItem(.{ .codegen_func = func });
if (type_changed and mod.emit_h != null) {
@ -3343,6 +3398,7 @@ fn scanDecl(iter: *ScanDeclIter, decl_sub_index: usize, flags: u4) SemaError!voi
new_decl.has_align = has_align;
new_decl.has_linksection = has_linksection;
new_decl.zir_decl_index = @intCast(u32, decl_sub_index);
new_decl.alive = true; // This Decl corresponds to an AST node and therefore always alive.
return;
}
gpa.free(decl_name);
@ -3482,6 +3538,43 @@ pub fn clearDecl(
decl.analysis = .unreferenced;
}
pub fn deleteUnusedDecl(mod: *Module, decl: *Decl) void {
log.debug("deleteUnusedDecl {*} ({s})", .{ decl, decl.name });
// TODO: remove `allocateDeclIndexes` and make the API that the linker backends
// are required to notice the first time `updateDecl` happens and keep track
// of it themselves. However they can rely on getting a `freeDecl` call if any
// `updateDecl` or `updateFunc` calls happen. This will allow us to avoid any call
// into the linker backend here, since the linker backend will never have been told
// about the Decl in the first place.
// Until then, we did call `allocateDeclIndexes` on this anonymous Decl and so we
// must call `freeDecl` in the linker backend now.
if (decl.has_tv) {
if (decl.ty.hasCodeGenBits()) {
mod.comp.bin_file.freeDecl(decl);
}
}
const dependants = decl.dependants.keys();
assert(dependants[0].namespace.anon_decls.swapRemove(decl));
for (dependants) |dep| {
dep.removeDependency(decl);
}
for (decl.dependencies.keys()) |dep| {
dep.removeDependant(decl);
}
decl.destroy(mod);
}
pub fn deleteAnonDecl(mod: *Module, scope: *Scope, decl: *Decl) void {
log.debug("deleteAnonDecl {*} ({s})", .{ decl, decl.name });
const scope_decl = scope.ownerDecl().?;
assert(scope_decl.namespace.anon_decls.swapRemove(decl));
decl.destroy(mod);
}
/// Delete all the Export objects that are caused by this Decl. Re-analysis of
/// this Decl will cause them to be re-created (or not).
fn deleteDeclExports(mod: *Module, decl: *Decl) void {
@ -3603,7 +3696,6 @@ pub fn analyzeFnBody(mod: *Module, decl: *Decl, func: *Fn) SemaError!Air {
.instructions = sema.air_instructions.toOwnedSlice(),
.extra = sema.air_extra.toOwnedSlice(gpa),
.values = sema.air_values.toOwnedSlice(gpa),
.variables = sema.air_variables.toOwnedSlice(gpa),
};
}
@ -3670,6 +3762,7 @@ fn allocateNewDecl(mod: *Module, namespace: *Scope.Namespace, src_node: ast.Node
.is_exported = false,
.has_linksection = false,
.has_align = false,
.alive = false,
};
return new_decl;
}
@ -3759,12 +3852,6 @@ pub fn analyzeExport(
errdefer de_gop.value_ptr.* = mod.gpa.shrink(de_gop.value_ptr.*, de_gop.value_ptr.len - 1);
}
pub fn deleteAnonDecl(mod: *Module, scope: *Scope, decl: *Decl) void {
const scope_decl = scope.ownerDecl().?;
assert(scope_decl.namespace.anon_decls.swapRemove(decl));
decl.destroy(mod);
}
/// Takes ownership of `name` even if it returns an error.
pub fn createAnonymousDeclNamed(
mod: *Module,

View File

@ -14,7 +14,6 @@ code: Zir,
air_instructions: std.MultiArrayList(Air.Inst) = .{},
air_extra: std.ArrayListUnmanaged(u32) = .{},
air_values: std.ArrayListUnmanaged(Value) = .{},
air_variables: std.ArrayListUnmanaged(*Module.Var) = .{},
/// Maps ZIR to AIR.
inst_map: InstMap = .{},
/// When analyzing an inline function call, owner_decl is the Decl of the caller
@ -76,7 +75,6 @@ pub fn deinit(sema: *Sema) void {
sema.air_instructions.deinit(gpa);
sema.air_extra.deinit(gpa);
sema.air_values.deinit(gpa);
sema.air_variables.deinit(gpa);
sema.inst_map.deinit(gpa);
sema.decl_val_table.deinit(gpa);
sema.* = undefined;
@ -639,16 +637,40 @@ fn analyzeAsType(
return val.toType(sema.arena);
}
/// May return Value Tags: `variable`, `undef`.
/// See `resolveConstValue` for an alternative.
fn resolveValue(
sema: *Sema,
block: *Scope.Block,
src: LazySrcLoc,
air_ref: Air.Inst.Ref,
) CompileError!Value {
if (try sema.resolveMaybeUndefValAllowVariables(block, src, air_ref)) |val| {
return val;
}
return sema.failWithNeededComptime(block, src);
}
/// Will not return Value Tags: `variable`, `undef`. Instead they will emit compile errors.
/// See `resolveValue` for an alternative.
fn resolveConstValue(
sema: *Sema,
block: *Scope.Block,
src: LazySrcLoc,
air_ref: Air.Inst.Ref,
) CompileError!Value {
return (try sema.resolveDefinedValue(block, src, air_ref)) orelse
return sema.failWithNeededComptime(block, src);
if (try sema.resolveMaybeUndefValAllowVariables(block, src, air_ref)) |val| {
switch (val.tag()) {
.undef => return sema.failWithUseOfUndef(block, src),
.variable => return sema.failWithNeededComptime(block, src),
else => return val,
}
}
return sema.failWithNeededComptime(block, src);
}
/// Value Tag `variable` causes this function to return `null`.
/// Value Tag `undef` causes this function to return a compile error.
fn resolveDefinedValue(
sema: *Sema,
block: *Scope.Block,
@ -664,11 +686,27 @@ fn resolveDefinedValue(
return null;
}
/// Value Tag `variable` causes this function to return `null`.
/// Value Tag `undef` causes this function to return the Value.
fn resolveMaybeUndefVal(
sema: *Sema,
block: *Scope.Block,
src: LazySrcLoc,
inst: Air.Inst.Ref,
) CompileError!?Value {
const val = (try sema.resolveMaybeUndefValAllowVariables(block, src, inst)) orelse return null;
if (val.tag() == .variable) {
return null;
}
return val;
}
/// Returns all Value tags including `variable` and `undef`.
fn resolveMaybeUndefValAllowVariables(
sema: *Sema,
block: *Scope.Block,
src: LazySrcLoc,
inst: Air.Inst.Ref,
) CompileError!?Value {
// First section of indexes correspond to a set number of constant values.
var i: usize = @enumToInt(inst);
@ -734,6 +772,8 @@ fn resolveInt(
return val.toUnsignedInt();
}
// Returns a compile error if the value has tag `variable`. See `resolveInstValue` for
// a function that does not.
pub fn resolveInstConst(
sema: *Sema,
block: *Scope.Block,
@ -748,6 +788,22 @@ pub fn resolveInstConst(
};
}
// Value Tag may be `undef` or `variable`.
// See `resolveInstConst` for an alternative.
pub fn resolveInstValue(
sema: *Sema,
block: *Scope.Block,
src: LazySrcLoc,
zir_ref: Zir.Inst.Ref,
) CompileError!TypedValue {
const air_ref = sema.resolveInst(zir_ref);
const val = try sema.resolveValue(block, src, air_ref);
return TypedValue{
.ty = sema.typeOf(air_ref),
.val = val,
};
}
fn zirBitcastResultPtr(sema: *Sema, block: *Scope.Block, inst: Zir.Inst.Index) CompileError!Air.Inst.Ref {
const inst_data = sema.code.instructions.items(.data)[inst].pl_node;
const src = inst_data.src();
@ -1707,7 +1763,7 @@ fn zirStr(sema: *Sema, block: *Scope.Block, inst: Zir.Inst.Index) CompileError!A
});
errdefer sema.mod.deleteAnonDecl(&block.base, new_decl);
try new_decl.finalizeNewArena(&new_decl_arena);
return sema.analyzeDeclRef(block, .unneeded, new_decl);
return sema.analyzeDeclRef(new_decl);
}
fn zirInt(sema: *Sema, block: *Scope.Block, inst: Zir.Inst.Index) CompileError!Air.Inst.Ref {
@ -2090,10 +2146,7 @@ fn zirExport(sema: *Sema, block: *Scope.Block, inst: Zir.Inst.Index) CompileErro
const linkage_index = struct_obj.fields.getIndex("linkage").?;
const section_index = struct_obj.fields.getIndex("section").?;
const export_name = try fields[name_index].toAllocatedBytes(sema.arena);
const linkage = fields[linkage_index].toEnum(
struct_obj.fields.values()[linkage_index].ty,
std.builtin.GlobalLinkage,
);
const linkage = fields[linkage_index].toEnum(std.builtin.GlobalLinkage);
if (linkage != .Strong) {
return sema.mod.fail(&block.base, src, "TODO: implement exporting with non-strong linkage", .{});
@ -2194,7 +2247,7 @@ fn zirDeclRef(sema: *Sema, block: *Scope.Block, inst: Zir.Inst.Index) CompileErr
const src = inst_data.src();
const decl_name = inst_data.get(sema.code);
const decl = try sema.lookupIdentifier(block, src, decl_name);
return sema.analyzeDeclRef(block, src, decl);
return sema.analyzeDeclRef(decl);
}
fn zirDeclVal(sema: *Sema, block: *Scope.Block, inst: Zir.Inst.Index) CompileError!Air.Inst.Ref {
@ -2864,12 +2917,13 @@ fn zirOptionalPayloadPtr(
const child_pointer = try Module.simplePtrType(sema.arena, child_type, !optional_ptr_ty.isConstPtr(), .One);
if (try sema.resolveDefinedValue(block, src, optional_ptr)) |pointer_val| {
const val = try pointer_val.pointerDeref(sema.arena);
if (val.isNull()) {
return sema.mod.fail(&block.base, src, "unable to unwrap null", .{});
if (try pointer_val.pointerDeref(sema.arena)) |val| {
if (val.isNull()) {
return sema.mod.fail(&block.base, src, "unable to unwrap null", .{});
}
// The same Value represents the pointer to the optional and the payload.
return sema.addConstant(child_pointer, pointer_val);
}
// The same Value represents the pointer to the optional and the payload.
return sema.addConstant(child_pointer, pointer_val);
}
try sema.requireRuntimeBlock(block, src);
@ -2974,19 +3028,15 @@ fn zirErrUnionPayloadPtr(
const operand_pointer_ty = try Module.simplePtrType(sema.arena, payload_ty, !operand_ty.isConstPtr(), .One);
if (try sema.resolveDefinedValue(block, src, operand)) |pointer_val| {
const val = try pointer_val.pointerDeref(sema.arena);
if (val.getError()) |name| {
return sema.mod.fail(&block.base, src, "caught unexpected error '{s}'", .{name});
if (try pointer_val.pointerDeref(sema.arena)) |val| {
if (val.getError()) |name| {
return sema.mod.fail(&block.base, src, "caught unexpected error '{s}'", .{name});
}
return sema.addConstant(
operand_pointer_ty,
try Value.Tag.eu_payload_ptr.create(sema.arena, pointer_val),
);
}
const data = val.castTag(.error_union).?.data;
// The same Value represents the pointer to the error union and the payload.
return sema.addConstant(
operand_pointer_ty,
try Value.Tag.ref_val.create(
sema.arena,
data,
),
);
}
try sema.requireRuntimeBlock(block, src);
@ -3038,10 +3088,11 @@ fn zirErrUnionCodePtr(sema: *Sema, block: *Scope.Block, inst: Zir.Inst.Index) Co
const result_ty = operand_ty.elemType().errorUnionSet();
if (try sema.resolveDefinedValue(block, src, operand)) |pointer_val| {
const val = try pointer_val.pointerDeref(sema.arena);
assert(val.getError() != null);
const data = val.castTag(.error_union).?.data;
return sema.addConstant(result_ty, data);
if (try pointer_val.pointerDeref(sema.arena)) |val| {
assert(val.getError() != null);
const data = val.castTag(.error_union).?.data;
return sema.addConstant(result_ty, data);
}
}
try sema.requireRuntimeBlock(block, src);
@ -4872,10 +4923,13 @@ fn analyzeArithmetic(
log.debug("{s}({}, {}) result: {}", .{ @tagName(zir_tag), lhs_val, rhs_val, value });
return sema.addConstant(scalar_type, value);
} else {
try sema.requireRuntimeBlock(block, rhs_src);
}
} else {
try sema.requireRuntimeBlock(block, lhs_src);
}
try sema.requireRuntimeBlock(block, src);
const air_tag: Air.Inst.Tag = switch (zir_tag) {
.add => .add,
.addwrap => .addwrap,
@ -6296,7 +6350,7 @@ fn zirFuncExtended(
const cc_ref = @intToEnum(Zir.Inst.Ref, sema.code.extra[extra_index]);
extra_index += 1;
const cc_tv = try sema.resolveInstConst(block, cc_src, cc_ref);
break :blk cc_tv.val.toEnum(cc_tv.ty, std.builtin.CallingConvention);
break :blk cc_tv.val.toEnum(std.builtin.CallingConvention);
} else .Unspecified;
const align_val: Value = if (small.has_align) blk: {
@ -6554,7 +6608,7 @@ fn safetyPanic(
});
errdefer sema.mod.deleteAnonDecl(&block.base, new_decl);
try new_decl.finalizeNewArena(&new_decl_arena);
break :msg_inst try sema.analyzeDeclRef(block, .unneeded, new_decl);
break :msg_inst try sema.analyzeDeclRef(new_decl);
};
const casted_msg_inst = try sema.coerce(block, Type.initTag(.const_slice_u8), msg_inst, src);
@ -6761,13 +6815,12 @@ fn fieldPtr(
switch (object_ty.zigTypeTag()) {
.Array => {
if (mem.eql(u8, field_name, "len")) {
return sema.addConstant(
Type.initTag(.single_const_pointer_to_comptime_int),
try Value.Tag.ref_val.create(
arena,
try Value.Tag.int_u64.create(arena, object_ty.arrayLen()),
),
);
var anon_decl = try block.startAnonDecl();
defer anon_decl.deinit();
return sema.analyzeDeclRef(try anon_decl.finish(
Type.initTag(.comptime_int),
try Value.Tag.int_u64.create(anon_decl.arena(), object_ty.arrayLen()),
));
} else {
return mod.fail(
&block.base,
@ -6780,18 +6833,25 @@ fn fieldPtr(
.Pointer => {
const ptr_child = object_ty.elemType();
if (ptr_child.isSlice()) {
// Here for the ptr and len fields what we need to do is the situation
// when a temporary has its address taken, e.g. `&a[c..d].len`.
// This value may be known at compile-time or runtime. In the former
// case, it should create an anonymous Decl and return a decl_ref to it.
// In the latter case, it should add an `alloc` instruction, store
// the runtime value to it, and then return the `alloc`.
// In both cases the pointer should be const.
if (mem.eql(u8, field_name, "ptr")) {
return mod.fail(
&block.base,
field_name_src,
"cannot obtain reference to pointer field of slice '{}'",
"TODO: implement reference to 'ptr' field of slice '{}'",
.{object_ty},
);
} else if (mem.eql(u8, field_name, "len")) {
return mod.fail(
&block.base,
field_name_src,
"cannot obtain reference to length field of slice '{}'",
"TODO: implement reference to 'len' field of slice '{}'",
.{object_ty},
);
} else {
@ -6805,13 +6865,12 @@ fn fieldPtr(
} else switch (ptr_child.zigTypeTag()) {
.Array => {
if (mem.eql(u8, field_name, "len")) {
return sema.addConstant(
Type.initTag(.single_const_pointer_to_comptime_int),
try Value.Tag.ref_val.create(
arena,
try Value.Tag.int_u64.create(arena, ptr_child.arrayLen()),
),
);
var anon_decl = try block.startAnonDecl();
defer anon_decl.deinit();
return sema.analyzeDeclRef(try anon_decl.finish(
Type.initTag(.comptime_int),
try Value.Tag.int_u64.create(anon_decl.arena(), ptr_child.arrayLen()),
));
} else {
return mod.fail(
&block.base,
@ -6848,15 +6907,12 @@ fn fieldPtr(
});
} else (try mod.getErrorValue(field_name)).key;
return sema.addConstant(
try Module.simplePtrType(arena, child_type, false, .One),
try Value.Tag.ref_val.create(
arena,
try Value.Tag.@"error".create(arena, .{
.name = name,
}),
),
);
var anon_decl = try block.startAnonDecl();
defer anon_decl.deinit();
return sema.analyzeDeclRef(try anon_decl.finish(
child_type,
try Value.Tag.@"error".create(anon_decl.arena(), .{ .name = name }),
));
},
.Struct, .Opaque, .Union => {
if (child_type.getNamespace()) |namespace| {
@ -6901,11 +6957,12 @@ fn fieldPtr(
return mod.failWithOwnedErrorMsg(&block.base, msg);
};
const field_index_u32 = @intCast(u32, field_index);
const enum_val = try Value.Tag.enum_field_index.create(arena, field_index_u32);
return sema.addConstant(
try Module.simplePtrType(arena, child_type, false, .One),
try Value.Tag.ref_val.create(arena, enum_val),
);
var anon_decl = try block.startAnonDecl();
defer anon_decl.deinit();
return sema.analyzeDeclRef(try anon_decl.finish(
child_type,
try Value.Tag.enum_field_index.create(anon_decl.arena(), field_index_u32),
));
},
else => return mod.fail(&block.base, src, "type '{}' has no members", .{child_type}),
}
@ -6951,7 +7008,7 @@ fn namespaceLookupRef(
decl_name: []const u8,
) CompileError!?Air.Inst.Ref {
const decl = (try sema.namespaceLookup(block, src, namespace, decl_name)) orelse return null;
return try sema.analyzeDeclRef(block, src, decl);
return try sema.analyzeDeclRef(decl);
}
fn structFieldPtr(
@ -7207,13 +7264,15 @@ fn elemPtrArray(
fn coerce(
sema: *Sema,
block: *Scope.Block,
dest_type: Type,
dest_type_unresolved: Type,
inst: Air.Inst.Ref,
inst_src: LazySrcLoc,
) CompileError!Air.Inst.Ref {
if (dest_type.tag() == .var_args_param) {
if (dest_type_unresolved.tag() == .var_args_param) {
return sema.coerceVarArgParam(block, inst, inst_src);
}
const dest_type_src = inst_src; // TODO better source location
const dest_type = try sema.resolveTypeFields(block, dest_type_src, dest_type_unresolved);
const inst_ty = sema.typeOf(inst);
// If the types are the same, we can return the operand.
@ -7554,17 +7613,17 @@ fn analyzeDeclVal(
if (sema.decl_val_table.get(decl)) |result| {
return result;
}
const decl_ref = try sema.analyzeDeclRef(block, src, decl);
const decl_ref = try sema.analyzeDeclRef(decl);
const result = try sema.analyzeLoad(block, src, decl_ref, src);
if (Air.refToIndex(result)) |index| {
if (sema.air_instructions.items(.tag)[index] == .constant) {
sema.decl_val_table.put(sema.gpa, decl, result) catch {};
try sema.decl_val_table.put(sema.gpa, decl, result);
}
}
return result;
}
fn analyzeDeclRef(sema: *Sema, block: *Scope.Block, src: LazySrcLoc, decl: *Decl) CompileError!Air.Inst.Ref {
fn analyzeDeclRef(sema: *Sema, decl: *Decl) CompileError!Air.Inst.Ref {
try sema.mod.declareDeclDependency(sema.owner_decl, decl);
sema.mod.ensureDeclAnalyzed(decl) catch |err| {
if (sema.func) |func| {
@ -7576,8 +7635,10 @@ fn analyzeDeclRef(sema: *Sema, block: *Scope.Block, src: LazySrcLoc, decl: *Decl
};
const decl_tv = try decl.typedValue();
if (decl_tv.val.tag() == .variable) {
return sema.analyzeVarRef(block, src, decl_tv);
if (decl_tv.val.castTag(.variable)) |payload| {
const variable = payload.data;
const ty = try Module.simplePtrType(sema.arena, decl_tv.ty, variable.is_mutable, .One);
return sema.addConstant(ty, try Value.Tag.decl_ref.create(sema.arena, decl));
}
return sema.addConstant(
try Module.simplePtrType(sema.arena, decl_tv.ty, false, .One),
@ -7585,26 +7646,6 @@ fn analyzeDeclRef(sema: *Sema, block: *Scope.Block, src: LazySrcLoc, decl: *Decl
);
}
fn analyzeVarRef(sema: *Sema, block: *Scope.Block, src: LazySrcLoc, tv: TypedValue) CompileError!Air.Inst.Ref {
const variable = tv.val.castTag(.variable).?.data;
const ty = try Module.simplePtrType(sema.arena, tv.ty, variable.is_mutable, .One);
if (!variable.is_mutable and !variable.is_extern) {
return sema.addConstant(ty, try Value.Tag.ref_val.create(sema.arena, variable.init));
}
const gpa = sema.gpa;
try sema.requireRuntimeBlock(block, src);
try sema.air_variables.append(gpa, variable);
return block.addInst(.{
.tag = .varptr,
.data = .{ .ty_pl = .{
.ty = try sema.addType(ty),
.payload = @intCast(u32, sema.air_variables.items.len - 1),
} },
});
}
fn analyzeRef(
sema: *Sema,
block: *Scope.Block,
@ -7612,14 +7653,21 @@ fn analyzeRef(
operand: Air.Inst.Ref,
) CompileError!Air.Inst.Ref {
const operand_ty = sema.typeOf(operand);
const ptr_type = try Module.simplePtrType(sema.arena, operand_ty, false, .One);
if (try sema.resolveMaybeUndefVal(block, src, operand)) |val| {
return sema.addConstant(ptr_type, try Value.Tag.ref_val.create(sema.arena, val));
var anon_decl = try block.startAnonDecl();
defer anon_decl.deinit();
return sema.analyzeDeclRef(try anon_decl.finish(
operand_ty,
try val.copy(anon_decl.arena()),
));
}
try sema.requireRuntimeBlock(block, src);
return block.addTyOp(.ref, ptr_type, operand);
const ptr_type = try Module.simplePtrType(sema.arena, operand_ty, false, .One);
const alloc = try block.addTy(.alloc, ptr_type);
try sema.storePtr(block, src, alloc, operand);
return alloc;
}
fn analyzeLoad(
@ -7634,11 +7682,10 @@ fn analyzeLoad(
.Pointer => ptr_ty.elemType(),
else => return sema.mod.fail(&block.base, ptr_src, "expected pointer, found '{}'", .{ptr_ty}),
};
if (try sema.resolveDefinedValue(block, ptr_src, ptr)) |ptr_val| blk: {
if (ptr_val.tag() == .int_u64)
break :blk; // do it at runtime
return sema.addConstant(elem_ty, try ptr_val.pointerDeref(sema.arena));
if (try sema.resolveDefinedValue(block, ptr_src, ptr)) |ptr_val| {
if (try ptr_val.pointerDeref(sema.arena)) |elem_val| {
return sema.addConstant(elem_ty, elem_val);
}
}
try sema.requireRuntimeBlock(block, src);
@ -8146,6 +8193,36 @@ fn resolvePeerTypes(
return sema.typeOf(chosen);
}
pub fn resolveTypeLayout(
sema: *Sema,
block: *Scope.Block,
src: LazySrcLoc,
ty: Type,
) CompileError!void {
switch (ty.zigTypeTag()) {
.Pointer => {
return sema.resolveTypeLayout(block, src, ty.elemType());
},
.Struct => {
const resolved_ty = try sema.resolveTypeFields(block, src, ty);
const struct_obj = resolved_ty.castTag(.@"struct").?.data;
switch (struct_obj.status) {
.none, .have_field_types => {},
.field_types_wip, .layout_wip => {
return sema.mod.fail(&block.base, src, "struct {} depends on itself", .{ty});
},
.have_layout => return,
}
struct_obj.status = .layout_wip;
for (struct_obj.fields.values()) |field| {
try sema.resolveTypeLayout(block, src, field.ty);
}
struct_obj.status = .have_layout;
},
else => {},
}
}
fn resolveTypeFields(sema: *Sema, block: *Scope.Block, src: LazySrcLoc, ty: Type) CompileError!Type {
switch (ty.tag()) {
.@"struct" => {
@ -8153,9 +8230,7 @@ fn resolveTypeFields(sema: *Sema, block: *Scope.Block, src: LazySrcLoc, ty: Type
switch (struct_obj.status) {
.none => {},
.field_types_wip => {
return sema.mod.fail(&block.base, src, "struct {} depends on itself", .{
ty,
});
return sema.mod.fail(&block.base, src, "struct {} depends on itself", .{ty});
},
.have_field_types, .have_layout, .layout_wip => return ty,
}
@ -8447,12 +8522,12 @@ fn getTmpAir(sema: Sema) Air {
.instructions = sema.air_instructions.slice(),
.extra = sema.air_extra.items,
.values = sema.air_values.items,
.variables = sema.air_variables.items,
};
}
pub fn addType(sema: *Sema, ty: Type) !Air.Inst.Ref {
switch (ty.tag()) {
.u1 => return .u1_type,
.u8 => return .u8_type,
.i8 => return .i8_type,
.u16 => return .u16_type,

View File

@ -184,6 +184,7 @@ pub fn generateSymbol(
if (typed_value.val.castTag(.decl_ref)) |payload| {
const decl = payload.data;
if (decl.analysis != .complete) return error.AnalysisFail;
decl.alive = true;
// TODO handle the dependency of this symbol on the decl's vaddr.
// If the decl changes vaddr, then this symbol needs to get regenerated.
const vaddr = bin_file.getDeclVAddr(decl);
@ -848,13 +849,11 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
.loop => try self.airLoop(inst),
.not => try self.airNot(inst),
.ptrtoint => try self.airPtrToInt(inst),
.ref => try self.airRef(inst),
.ret => try self.airRet(inst),
.store => try self.airStore(inst),
.struct_field_ptr=> try self.airStructFieldPtr(inst),
.struct_field_val=> try self.airStructFieldVal(inst),
.switch_br => try self.airSwitch(inst),
.varptr => try self.airVarPtr(inst),
.slice_ptr => try self.airSlicePtr(inst),
.slice_len => try self.airSliceLen(inst),
@ -1340,13 +1339,6 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
return self.finishAir(inst, result, .{ ty_op.operand, .none, .none });
}
fn airVarPtr(self: *Self, inst: Air.Inst.Index) !void {
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else switch (arch) {
else => return self.fail("TODO implement varptr for {}", .{self.target.cpu.arch}),
};
return self.finishAir(inst, result, .{ .none, .none, .none });
}
fn airSlicePtr(self: *Self, inst: Air.Inst.Index) !void {
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else switch (arch) {
@ -2833,38 +2825,6 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
return bt.finishAir(result);
}
fn airRef(self: *Self, inst: Air.Inst.Index) !void {
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
const operand_ty = self.air.typeOf(ty_op.operand);
const operand = try self.resolveInst(ty_op.operand);
switch (operand) {
.unreach => unreachable,
.dead => unreachable,
.none => break :result MCValue{ .none = {} },
.immediate,
.register,
.ptr_stack_offset,
.ptr_embedded_in_code,
.compare_flags_unsigned,
.compare_flags_signed,
=> {
const stack_offset = try self.allocMemPtr(inst);
try self.genSetStack(operand_ty, stack_offset, operand);
break :result MCValue{ .ptr_stack_offset = stack_offset };
},
.stack_offset => |offset| break :result MCValue{ .ptr_stack_offset = offset },
.embedded_in_code => |offset| break :result MCValue{ .ptr_embedded_in_code = offset },
.memory => |vaddr| break :result MCValue{ .immediate = vaddr },
.undef => return self.fail("TODO implement ref on an undefined value", .{}),
}
};
return self.finishAir(inst, result, .{ ty_op.operand, .none, .none });
}
fn ret(self: *Self, mcv: MCValue) !void {
const ret_ty = self.fn_type.fnReturnType();
try self.setRegOrMem(ret_ty, self.ret_mcv, mcv);
@ -4721,13 +4681,13 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
},
else => {
if (typed_value.val.castTag(.decl_ref)) |payload| {
const decl = payload.data;
decl.alive = true;
if (self.bin_file.cast(link.File.Elf)) |elf_file| {
const decl = payload.data;
const got = &elf_file.program_headers.items[elf_file.phdr_got_index.?];
const got_addr = got.p_vaddr + decl.link.elf.offset_table_index * ptr_bytes;
return MCValue{ .memory = got_addr };
} else if (self.bin_file.cast(link.File.MachO)) |macho_file| {
const decl = payload.data;
const got_addr = blk: {
const seg = macho_file.load_commands.items[macho_file.data_const_segment_cmd_index.?].Segment;
const got = seg.sections.items[macho_file.got_section_index.?];
@ -4739,11 +4699,9 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
};
return MCValue{ .memory = got_addr };
} else if (self.bin_file.cast(link.File.Coff)) |coff_file| {
const decl = payload.data;
const got_addr = coff_file.offset_table_virtual_address + decl.link.coff.offset_table_index * ptr_bytes;
return MCValue{ .memory = got_addr };
} else if (self.bin_file.cast(link.File.Plan9)) |p9| {
const decl = payload.data;
const got_addr = p9.bases.data + decl.link.plan9.got_index.? * ptr_bytes;
return MCValue{ .memory = got_addr };
} else {

View File

@ -262,6 +262,7 @@ pub const DeclGen = struct {
.one => try writer.writeAll("1"),
.decl_ref => {
const decl = val.castTag(.decl_ref).?.data;
decl.alive = true;
// Determine if we must pointer cast.
assert(decl.has_tv);
@ -281,36 +282,7 @@ pub const DeclGen = struct {
const decl = val.castTag(.extern_fn).?.data;
try writer.print("{s}", .{decl.name});
},
else => switch (t.ptrSize()) {
.Slice => unreachable,
.Many => {
if (val.castTag(.ref_val)) |ref_val_payload| {
const sub_val = ref_val_payload.data;
if (sub_val.castTag(.bytes)) |bytes_payload| {
const bytes = bytes_payload.data;
try writer.writeByte('(');
try dg.renderType(writer, t);
// TODO: make our own C string escape instead of using std.zig.fmtEscapes
try writer.print(")\"{}\"", .{std.zig.fmtEscapes(bytes)});
} else {
unreachable;
}
} else {
unreachable;
}
},
.One => {
var arena = std.heap.ArenaAllocator.init(dg.module.gpa);
defer arena.deinit();
const elem_ty = t.elemType();
const elem_val = try val.pointerDeref(&arena.allocator);
try writer.writeAll("&");
try dg.renderValue(writer, elem_ty, elem_val);
},
.C => unreachable,
},
else => unreachable,
},
},
.Array => {
@ -436,6 +408,7 @@ pub const DeclGen = struct {
.one => try writer.writeAll("1"),
.decl_ref => {
const decl = val.castTag(.decl_ref).?.data;
decl.alive = true;
// Determine if we must pointer cast.
assert(decl.has_tv);
@ -448,11 +421,13 @@ pub const DeclGen = struct {
}
},
.function => {
const func = val.castTag(.function).?.data;
try writer.print("{s}", .{func.owner_decl.name});
const decl = val.castTag(.function).?.data.owner_decl;
decl.alive = true;
try writer.print("{s}", .{decl.name});
},
.extern_fn => {
const decl = val.castTag(.extern_fn).?.data;
decl.alive = true;
try writer.print("{s}", .{decl.name});
},
else => unreachable,
@ -934,10 +909,8 @@ fn genBody(o: *Object, body: []const Air.Inst.Index) error{ AnalysisFail, OutOfM
.br => try airBr(o, inst),
.switch_br => try airSwitchBr(o, inst),
.wrap_optional => try airWrapOptional(o, inst),
.ref => try airRef(o, inst),
.struct_field_ptr => try airStructFieldPtr(o, inst),
.struct_field_val => try airStructFieldVal(o, inst),
.varptr => try airVarPtr(o, inst),
.slice_ptr => try airSliceField(o, inst, ".ptr;\n"),
.slice_len => try airSliceField(o, inst, ".len;\n"),
@ -996,12 +969,6 @@ fn airSliceElemVal(o: *Object, inst: Air.Inst.Index, prefix: []const u8) !CValue
return local;
}
fn airVarPtr(o: *Object, inst: Air.Inst.Index) !CValue {
const ty_pl = o.air.instructions.items(.data)[inst].ty_pl;
const variable = o.air.variables[ty_pl.payload];
return CValue{ .decl_ref = variable.owner_decl };
}
fn airAlloc(o: *Object, inst: Air.Inst.Index) !CValue {
const writer = o.writer();
const inst_ty = o.air.typeOfIndex(inst);
@ -1653,22 +1620,6 @@ fn airOptionalPayload(o: *Object, inst: Air.Inst.Index) !CValue {
return local;
}
fn airRef(o: *Object, inst: Air.Inst.Index) !CValue {
if (o.liveness.isUnused(inst))
return CValue.none;
const ty_op = o.air.instructions.items(.data)[inst].ty_op;
const writer = o.writer();
const operand = try o.resolveInst(ty_op.operand);
const inst_ty = o.air.typeOfIndex(inst);
const local = try o.allocLocal(inst_ty, .Const);
try writer.writeAll(" = ");
try o.writeCValue(writer, operand);
try writer.writeAll(";\n");
return local;
}
fn airStructFieldPtr(o: *Object, inst: Air.Inst.Index) !CValue {
if (o.liveness.isUnused(inst))
return CValue.none;

View File

@ -673,17 +673,21 @@ pub const DeclGen = struct {
}
fn genTypedValue(self: *DeclGen, tv: TypedValue) error{ OutOfMemory, CodegenFail }!*const llvm.Value {
const llvm_type = try self.llvmType(tv.ty);
if (tv.val.isUndef())
if (tv.val.isUndef()) {
const llvm_type = try self.llvmType(tv.ty);
return llvm_type.getUndef();
}
switch (tv.ty.zigTypeTag()) {
.Bool => return if (tv.val.toBool()) llvm_type.constAllOnes() else llvm_type.constNull(),
.Bool => {
const llvm_type = try self.llvmType(tv.ty);
return if (tv.val.toBool()) llvm_type.constAllOnes() else llvm_type.constNull();
},
.Int => {
var bigint_space: Value.BigIntSpace = undefined;
const bigint = tv.val.toBigInt(&bigint_space);
const llvm_type = try self.llvmType(tv.ty);
if (bigint.eqZero()) return llvm_type.constNull();
if (bigint.limbs.len != 1) {
@ -698,30 +702,18 @@ pub const DeclGen = struct {
.Pointer => switch (tv.val.tag()) {
.decl_ref => {
const decl = tv.val.castTag(.decl_ref).?.data;
decl.alive = true;
const val = try self.resolveGlobalDecl(decl);
const usize_type = try self.llvmType(Type.initTag(.usize));
// TODO: second index should be the index into the memory!
var indices: [2]*const llvm.Value = .{
usize_type.constNull(),
usize_type.constNull(),
};
return val.constInBoundsGEP(&indices, indices.len);
},
.ref_val => {
//const elem_value = tv.val.castTag(.ref_val).?.data;
//const elem_type = tv.ty.castPointer().?.data;
//const alloca = fg.?.buildAlloca(try self.llvmType(elem_type));
//_ = fg.?.builder.buildStore(try self.genTypedValue(.{ .ty = elem_type, .val = elem_value }, fg), alloca);
//return alloca;
// TODO eliminate the ref_val Value Tag
return self.todo("implement const of pointer tag ref_val", .{});
const llvm_type = try self.llvmType(tv.ty);
return val.constBitCast(llvm_type);
},
.variable => {
const variable = tv.val.castTag(.variable).?.data;
return self.resolveGlobalDecl(variable.owner_decl);
const decl = tv.val.castTag(.variable).?.data.owner_decl;
decl.alive = true;
const val = try self.resolveGlobalDecl(decl);
const llvm_var_type = try self.llvmType(tv.ty);
const llvm_type = llvm_var_type.pointerType(0);
return val.constBitCast(llvm_type);
},
.slice => {
const slice = tv.val.castTag(.slice).?.data;
@ -800,6 +792,7 @@ pub const DeclGen = struct {
.decl_ref => tv.val.castTag(.decl_ref).?.data,
else => unreachable,
};
fn_decl.alive = true;
return self.resolveLlvmFunction(fn_decl);
},
.ErrorSet => {
@ -920,9 +913,7 @@ pub const FuncGen = struct {
return self.dg.genTypedValue(.{ .ty = self.air.typeOf(inst), .val = val });
}
const inst_index = Air.refToIndex(inst).?;
if (self.func_inst_table.get(inst_index)) |value| return value;
return self.todo("implement global llvm values (or the value is not in the func_inst_table table)", .{});
return self.func_inst_table.get(inst_index).?;
}
fn genBody(self: *FuncGen, body: []const Air.Inst.Index) error{ OutOfMemory, CodegenFail }!void {
@ -977,15 +968,14 @@ pub const FuncGen = struct {
.ret => try self.airRet(inst),
.store => try self.airStore(inst),
.assembly => try self.airAssembly(inst),
.varptr => try self.airVarPtr(inst),
.slice_ptr => try self.airSliceField(inst, 0),
.slice_len => try self.airSliceField(inst, 1),
.struct_field_ptr => try self.airStructFieldPtr(inst),
.struct_field_val => try self.airStructFieldVal(inst),
.slice_elem_val => try self.airSliceElemVal(inst, false),
.ptr_slice_elem_val => try self.airSliceElemVal(inst, true),
.slice_elem_val => try self.airSliceElemVal(inst),
.ptr_slice_elem_val => try self.airPtrSliceElemVal(inst),
.optional_payload => try self.airOptionalPayload(inst, false),
.optional_payload_ptr => try self.airOptionalPayload(inst, true),
@ -1001,7 +991,6 @@ pub const FuncGen = struct {
.constant => unreachable,
.const_ty => unreachable,
.ref => unreachable, // TODO eradicate this instruction
.unreach => self.airUnreach(inst),
.dbg_stmt => blk: {
// TODO: implement debug info
@ -1180,16 +1169,6 @@ pub const FuncGen = struct {
return null;
}
fn airVarPtr(self: *FuncGen, inst: Air.Inst.Index) !?*const llvm.Value {
if (self.liveness.isUnused(inst))
return null;
const ty_pl = self.air.instructions.items(.data)[inst].ty_pl;
const variable = self.air.variables[ty_pl.payload];
const decl_llvm_value = self.dg.resolveGlobalDecl(variable.owner_decl);
return decl_llvm_value;
}
fn airSliceField(self: *FuncGen, inst: Air.Inst.Index, index: c_uint) !?*const llvm.Value {
if (self.liveness.isUnused(inst))
return null;
@ -1199,11 +1178,20 @@ pub const FuncGen = struct {
return self.builder.buildExtractValue(operand, index, "");
}
fn airSliceElemVal(
self: *FuncGen,
inst: Air.Inst.Index,
operand_is_ptr: bool,
) !?*const llvm.Value {
fn airSliceElemVal(self: *FuncGen, inst: Air.Inst.Index) !?*const llvm.Value {
if (self.liveness.isUnused(inst))
return null;
const bin_op = self.air.instructions.items(.data)[inst].bin_op;
const lhs = try self.resolveInst(bin_op.lhs);
const rhs = try self.resolveInst(bin_op.rhs);
const base_ptr = self.builder.buildExtractValue(lhs, 0, "");
const indices: [1]*const llvm.Value = .{rhs};
const ptr = self.builder.buildInBoundsGEP(base_ptr, &indices, indices.len, "");
return self.builder.buildLoad(ptr, "");
}
fn airPtrSliceElemVal(self: *FuncGen, inst: Air.Inst.Index) !?*const llvm.Value {
if (self.liveness.isUnused(inst))
return null;
@ -1211,7 +1199,7 @@ pub const FuncGen = struct {
const lhs = try self.resolveInst(bin_op.lhs);
const rhs = try self.resolveInst(bin_op.rhs);
const base_ptr = if (!operand_is_ptr) lhs else ptr: {
const base_ptr = ptr: {
const index_type = self.context.intType(32);
const indices: [2]*const llvm.Value = .{
index_type.constNull(),

View File

@ -112,6 +112,9 @@ pub const Value = opaque {
ConstantIndices: [*]const *const Value,
NumIndices: c_uint,
) *const Value;
pub const constBitCast = LLVMConstBitCast;
extern fn LLVMConstBitCast(ConstantVal: *const Value, ToType: *const Type) *const Value;
};
pub const Type = opaque {

View File

@ -754,22 +754,21 @@ pub const Context = struct {
}
/// Generates the wasm bytecode for the declaration belonging to `Context`
pub fn gen(self: *Context, typed_value: TypedValue) InnerError!Result {
switch (typed_value.ty.zigTypeTag()) {
pub fn gen(self: *Context, ty: Type, val: Value) InnerError!Result {
switch (ty.zigTypeTag()) {
.Fn => {
try self.genFunctype();
if (typed_value.val.castTag(.extern_fn)) |_| return Result.appended; // don't need code body for extern functions
if (val.tag() == .extern_fn) {
return Result.appended; // don't need code body for extern functions
}
return self.fail("TODO implement wasm codegen for function pointers", .{});
},
.Array => {
if (typed_value.val.castTag(.bytes)) |payload| {
if (typed_value.ty.sentinel()) |sentinel| {
if (val.castTag(.bytes)) |payload| {
if (ty.sentinel()) |sentinel| {
try self.code.appendSlice(payload.data);
switch (try self.gen(.{
.ty = typed_value.ty.elemType(),
.val = sentinel,
})) {
switch (try self.gen(ty.elemType(), sentinel)) {
.appended => return Result.appended,
.externally_managed => |data| {
try self.code.appendSlice(data);
@ -781,13 +780,17 @@ pub const Context = struct {
} else return self.fail("TODO implement gen for more kinds of arrays", .{});
},
.Int => {
const info = typed_value.ty.intInfo(self.target);
const info = ty.intInfo(self.target);
if (info.bits == 8 and info.signedness == .unsigned) {
const int_byte = typed_value.val.toUnsignedInt();
const int_byte = val.toUnsignedInt();
try self.code.append(@intCast(u8, int_byte));
return Result.appended;
}
return self.fail("TODO: Implement codegen for int type: '{}'", .{typed_value.ty});
return self.fail("TODO: Implement codegen for int type: '{}'", .{ty});
},
.Enum => {
try self.emitConstant(val, ty);
return Result.appended;
},
else => |tag| return self.fail("TODO: Implement zig type codegen for type: '{s}'", .{tag}),
}
@ -969,7 +972,7 @@ pub const Context = struct {
return WValue{ .code_offset = offset };
}
fn emitConstant(self: *Context, value: Value, ty: Type) InnerError!void {
fn emitConstant(self: *Context, val: Value, ty: Type) InnerError!void {
const writer = self.code.writer();
switch (ty.zigTypeTag()) {
.Int => {
@ -982,10 +985,10 @@ pub const Context = struct {
const int_info = ty.intInfo(self.target);
// write constant
switch (int_info.signedness) {
.signed => try leb.writeILEB128(writer, value.toSignedInt()),
.signed => try leb.writeILEB128(writer, val.toSignedInt()),
.unsigned => switch (int_info.bits) {
0...32 => try leb.writeILEB128(writer, @bitCast(i32, @intCast(u32, value.toUnsignedInt()))),
33...64 => try leb.writeILEB128(writer, @bitCast(i64, value.toUnsignedInt())),
0...32 => try leb.writeILEB128(writer, @bitCast(i32, @intCast(u32, val.toUnsignedInt()))),
33...64 => try leb.writeILEB128(writer, @bitCast(i64, val.toUnsignedInt())),
else => |bits| return self.fail("Wasm TODO: emitConstant for integer with {d} bits", .{bits}),
},
}
@ -994,7 +997,7 @@ pub const Context = struct {
// write opcode
try writer.writeByte(wasm.opcode(.i32_const));
// write constant
try leb.writeILEB128(writer, value.toSignedInt());
try leb.writeILEB128(writer, val.toSignedInt());
},
.Float => {
// write opcode
@ -1005,14 +1008,15 @@ pub const Context = struct {
try writer.writeByte(wasm.opcode(opcode));
// write constant
switch (ty.floatBits(self.target)) {
0...32 => try writer.writeIntLittle(u32, @bitCast(u32, value.toFloat(f32))),
64 => try writer.writeIntLittle(u64, @bitCast(u64, value.toFloat(f64))),
0...32 => try writer.writeIntLittle(u32, @bitCast(u32, val.toFloat(f32))),
64 => try writer.writeIntLittle(u64, @bitCast(u64, val.toFloat(f64))),
else => |bits| return self.fail("Wasm TODO: emitConstant for float with {d} bits", .{bits}),
}
},
.Pointer => {
if (value.castTag(.decl_ref)) |payload| {
if (val.castTag(.decl_ref)) |payload| {
const decl = payload.data;
decl.alive = true;
// offset into the offset table within the 'data' section
const ptr_width = self.target.cpu.arch.ptrBitWidth() / 8;
@ -1024,11 +1028,11 @@ pub const Context = struct {
try writer.writeByte(wasm.opcode(.i32_load));
try leb.writeULEB128(writer, @as(u32, 0));
try leb.writeULEB128(writer, @as(u32, 0));
} else return self.fail("Wasm TODO: emitConstant for other const pointer tag {s}", .{value.tag()});
} else return self.fail("Wasm TODO: emitConstant for other const pointer tag {s}", .{val.tag()});
},
.Void => {},
.Enum => {
if (value.castTag(.enum_field_index)) |field_index| {
if (val.castTag(.enum_field_index)) |field_index| {
switch (ty.tag()) {
.enum_simple => {
try writer.writeByte(wasm.opcode(.i32_const));
@ -1049,20 +1053,20 @@ pub const Context = struct {
} else {
var int_tag_buffer: Type.Payload.Bits = undefined;
const int_tag_ty = ty.intTagType(&int_tag_buffer);
try self.emitConstant(value, int_tag_ty);
try self.emitConstant(val, int_tag_ty);
}
},
.ErrorSet => {
const error_index = self.global_error_set.get(value.getError().?).?;
const error_index = self.global_error_set.get(val.getError().?).?;
try writer.writeByte(wasm.opcode(.i32_const));
try leb.writeULEB128(writer, error_index);
},
.ErrorUnion => {
const data = value.castTag(.error_union).?.data;
const data = val.castTag(.error_union).?.data;
const error_type = ty.errorUnionSet();
const payload_type = ty.errorUnionPayload();
if (value.getError()) |_| {
// write the error value
if (val.getError()) |_| {
// write the error val
try self.emitConstant(data, error_type);
// no payload, so write a '0' const
@ -1085,7 +1089,7 @@ pub const Context = struct {
}
/// Returns a `Value` as a signed 32 bit value.
/// It's illegale to provide a value with a type that cannot be represented
/// It's illegal to provide a value with a type that cannot be represented
/// as an integer value.
fn valueAsI32(self: Context, val: Value, ty: Type) i32 {
switch (ty.zigTypeTag()) {

View File

@ -224,7 +224,9 @@ pub fn flushModule(self: *Plan9, comp: *Compilation) !void {
const mod = self.base.options.module orelse return error.LinkingWithoutZigSourceUnimplemented;
assert(self.got_len == self.fn_decl_table.count() + self.data_decl_table.count());
// TODO I changed this assert from == to >= but this code all needs to be audited; see
// the comment in `freeDecl`.
assert(self.got_len >= self.fn_decl_table.count() + self.data_decl_table.count());
const got_size = self.got_len * if (!self.sixtyfour_bit) @as(u32, 4) else 8;
var got_table = try self.base.allocator.alloc(u8, got_size);
defer self.base.allocator.free(got_table);
@ -358,11 +360,18 @@ fn addDeclExports(
}
pub fn freeDecl(self: *Plan9, decl: *Module.Decl) void {
// TODO this is not the correct check for being function body,
// it could just be a function pointer.
// TODO audit the lifetimes of decls table entries. It's possible to get
// allocateDeclIndexes and then freeDecl without any updateDecl in between.
// However that is planned to change, see the TODO comment in Module.zig
// in the deleteUnusedDecl function.
const is_fn = (decl.ty.zigTypeTag() == .Fn);
if (is_fn)
assert(self.fn_decl_table.swapRemove(decl))
else
assert(self.data_decl_table.swapRemove(decl));
if (is_fn) {
_ = self.fn_decl_table.swapRemove(decl);
} else {
_ = self.data_decl_table.swapRemove(decl);
}
}
pub fn updateDeclExports(

View File

@ -275,7 +275,7 @@ pub fn updateDecl(self: *Wasm, module: *Module, decl: *Module.Decl) !void {
defer context.deinit();
// generate the 'code' section for the function declaration
const result = context.gen(.{ .ty = decl.ty, .val = decl.val }) catch |err| switch (err) {
const result = context.gen(decl.ty, decl.val) catch |err| switch (err) {
error.CodegenFail => {
decl.analysis = .codegen_failure;
try module.failed_decls.put(module.gpa, decl, context.err_msg);

View File

@ -15,12 +15,11 @@ pub fn dump(gpa: *Allocator, air: Air, zir: Zir, liveness: Liveness) void {
(@sizeOf(Air.Inst.Tag) + 8);
const extra_bytes = air.extra.len * @sizeOf(u32);
const values_bytes = air.values.len * @sizeOf(Value);
const variables_bytes = air.variables.len * @sizeOf(*Module.Var);
const tomb_bytes = liveness.tomb_bits.len * @sizeOf(usize);
const liveness_extra_bytes = liveness.extra.len * @sizeOf(u32);
const liveness_special_bytes = liveness.special.count() * 8;
const total_bytes = @sizeOf(Air) + instruction_bytes + extra_bytes +
values_bytes * variables_bytes + @sizeOf(Liveness) + liveness_extra_bytes +
values_bytes + @sizeOf(Liveness) + liveness_extra_bytes +
liveness_special_bytes + tomb_bytes;
// zig fmt: off
@ -29,7 +28,6 @@ pub fn dump(gpa: *Allocator, air: Air, zir: Zir, liveness: Liveness) void {
\\# AIR Instructions: {d} ({})
\\# AIR Extra Data: {d} ({})
\\# AIR Values Bytes: {d} ({})
\\# AIR Variables Bytes: {d} ({})
\\# Liveness tomb_bits: {}
\\# Liveness Extra Data: {d} ({})
\\# Liveness special table: {d} ({})
@ -39,7 +37,6 @@ pub fn dump(gpa: *Allocator, air: Air, zir: Zir, liveness: Liveness) void {
air.instructions.len, fmtIntSizeBin(instruction_bytes),
air.extra.len, fmtIntSizeBin(extra_bytes),
air.values.len, fmtIntSizeBin(values_bytes),
air.variables.len, fmtIntSizeBin(variables_bytes),
fmtIntSizeBin(tomb_bytes),
liveness.extra.len, fmtIntSizeBin(liveness_extra_bytes),
liveness.special.count(), fmtIntSizeBin(liveness_special_bytes),
@ -152,7 +149,6 @@ const Writer = struct {
.not,
.bitcast,
.load,
.ref,
.floatcast,
.intcast,
.optional_payload,
@ -174,7 +170,6 @@ const Writer = struct {
.struct_field_ptr => try w.writeStructField(s, inst),
.struct_field_val => try w.writeStructField(s, inst),
.varptr => try w.writeVarPtr(s, inst),
.constant => try w.writeConstant(s, inst),
.assembly => try w.writeAssembly(s, inst),
.dbg_stmt => try w.writeDbgStmt(s, inst),
@ -243,12 +238,6 @@ const Writer = struct {
try s.print(", {d}", .{extra.data.field_index});
}
fn writeVarPtr(w: *Writer, s: anytype, inst: Air.Inst.Index) @TypeOf(s).Error!void {
_ = w;
_ = inst;
try s.writeAll("TODO");
}
fn writeConstant(w: *Writer, s: anytype, inst: Air.Inst.Index) @TypeOf(s).Error!void {
const ty_pl = w.air.instructions.items(.data)[inst].ty_pl;
const val = w.air.values[ty_pl.payload];

View File

@ -100,11 +100,10 @@ pub const Value = extern union {
function,
extern_fn,
variable,
/// Represents a pointer to another immutable value.
ref_val,
/// Represents a comptime variables storage.
comptime_alloc,
/// Represents a pointer to a decl, not the value of the decl.
/// When machine codegen backend sees this, it must set the Decl's `alive` field to true.
decl_ref,
elem_ptr,
field_ptr,
@ -126,6 +125,8 @@ pub const Value = extern union {
enum_field_index,
@"error",
error_union,
/// A pointer to the payload of an error union, based on a pointer to an error union.
eu_payload_ptr,
/// An instance of a struct.
@"struct",
/// An instance of a union.
@ -214,9 +215,9 @@ pub const Value = extern union {
.decl_ref,
=> Payload.Decl,
.ref_val,
.repeated,
.error_union,
.eu_payload_ptr,
=> Payload.SubValue,
.bytes,
@ -407,15 +408,6 @@ pub const Value = extern union {
.function => return self.copyPayloadShallow(allocator, Payload.Function),
.extern_fn => return self.copyPayloadShallow(allocator, Payload.Decl),
.variable => return self.copyPayloadShallow(allocator, Payload.Variable),
.ref_val => {
const payload = self.castTag(.ref_val).?;
const new_payload = try allocator.create(Payload.SubValue);
new_payload.* = .{
.base = payload.base,
.data = try payload.data.copy(allocator),
};
return Value{ .ptr_otherwise = &new_payload.base };
},
.comptime_alloc => return self.copyPayloadShallow(allocator, Payload.ComptimeAlloc),
.decl_ref => return self.copyPayloadShallow(allocator, Payload.Decl),
.elem_ptr => {
@ -443,8 +435,8 @@ pub const Value = extern union {
return Value{ .ptr_otherwise = &new_payload.base };
},
.bytes => return self.copyPayloadShallow(allocator, Payload.Bytes),
.repeated => {
const payload = self.castTag(.repeated).?;
.repeated, .error_union, .eu_payload_ptr => {
const payload = self.cast(Payload.SubValue).?;
const new_payload = try allocator.create(Payload.SubValue);
new_payload.* = .{
.base = payload.base,
@ -489,15 +481,6 @@ pub const Value = extern union {
},
.enum_field_index => return self.copyPayloadShallow(allocator, Payload.U32),
.@"error" => return self.copyPayloadShallow(allocator, Payload.Error),
.error_union => {
const payload = self.castTag(.error_union).?;
const new_payload = try allocator.create(Payload.SubValue);
new_payload.* = .{
.base = payload.base,
.data = try payload.data.copy(allocator),
};
return Value{ .ptr_otherwise = &new_payload.base };
},
.@"struct" => @panic("TODO can't copy struct value without knowing the type"),
.@"union" => @panic("TODO can't copy union value without knowing the type"),
@ -609,11 +592,6 @@ pub const Value = extern union {
.function => return out_stream.print("(function '{s}')", .{val.castTag(.function).?.data.owner_decl.name}),
.extern_fn => return out_stream.writeAll("(extern function)"),
.variable => return out_stream.writeAll("(variable)"),
.ref_val => {
const ref_val = val.castTag(.ref_val).?.data;
try out_stream.writeAll("&const ");
val = ref_val;
},
.comptime_alloc => {
const ref_val = val.castTag(.comptime_alloc).?.data.val;
try out_stream.writeAll("&");
@ -648,6 +626,10 @@ pub const Value = extern union {
// TODO to print this it should be error{ Set, Items }!T(val), but we need the type for that
.error_union => return out_stream.print("error_union_val({})", .{val.castTag(.error_union).?.data}),
.inferred_alloc => return out_stream.writeAll("(inferred allocation value)"),
.eu_payload_ptr => {
try out_stream.writeAll("(eu_payload_ptr)");
val = val.castTag(.eu_payload_ptr).?.data;
},
};
}
@ -758,7 +740,6 @@ pub const Value = extern union {
.function,
.extern_fn,
.variable,
.ref_val,
.comptime_alloc,
.decl_ref,
.elem_ptr,
@ -780,18 +761,21 @@ pub const Value = extern union {
.@"union",
.inferred_alloc,
.abi_align_default,
.eu_payload_ptr,
=> unreachable,
};
}
/// Asserts the type is an enum type.
pub fn toEnum(val: Value, enum_ty: Type, comptime E: type) E {
_ = enum_ty;
// TODO this needs to resolve other kinds of Value tags rather than
// assuming the tag will be .enum_field_index.
const field_index = val.castTag(.enum_field_index).?.data;
// TODO should `@intToEnum` do this `@intCast` for you?
return @intToEnum(E, @intCast(@typeInfo(E).Enum.tag_type, field_index));
pub fn toEnum(val: Value, comptime E: type) E {
switch (val.tag()) {
.enum_field_index => {
const field_index = val.castTag(.enum_field_index).?.data;
// TODO should `@intToEnum` do this `@intCast` for you?
return @intToEnum(E, @intCast(@typeInfo(E).Enum.tag_type, field_index));
},
else => unreachable,
}
}
/// Asserts the value is an integer.
@ -1255,6 +1239,9 @@ pub const Value = extern union {
.slice => {
@panic("TODO Value.hash for slice");
},
.eu_payload_ptr => {
@panic("TODO Value.hash for eu_payload_ptr");
},
.int_u64 => {
const payload = self.castTag(.int_u64).?;
std.hash.autoHash(&hasher, payload.data);
@ -1263,10 +1250,6 @@ pub const Value = extern union {
const payload = self.castTag(.int_i64).?;
std.hash.autoHash(&hasher, payload.data);
},
.ref_val => {
const payload = self.castTag(.ref_val).?;
std.hash.autoHash(&hasher, payload.data.hash());
},
.comptime_alloc => {
const payload = self.castTag(.comptime_alloc).?;
std.hash.autoHash(&hasher, payload.data.val.hash());
@ -1364,24 +1347,48 @@ pub const Value = extern union {
/// Asserts the value is a pointer and dereferences it.
/// Returns error.AnalysisFail if the pointer points to a Decl that failed semantic analysis.
pub fn pointerDeref(self: Value, allocator: *Allocator) error{ AnalysisFail, OutOfMemory }!Value {
return switch (self.tag()) {
pub fn pointerDeref(
self: Value,
allocator: *Allocator,
) error{ AnalysisFail, OutOfMemory }!?Value {
const sub_val: Value = switch (self.tag()) {
.comptime_alloc => self.castTag(.comptime_alloc).?.data.val,
.ref_val => self.castTag(.ref_val).?.data,
.decl_ref => self.castTag(.decl_ref).?.data.value(),
.elem_ptr => {
.decl_ref => try self.castTag(.decl_ref).?.data.value(),
.elem_ptr => blk: {
const elem_ptr = self.castTag(.elem_ptr).?.data;
const array_val = try elem_ptr.array_ptr.pointerDeref(allocator);
return array_val.elemValue(allocator, elem_ptr.index);
const array_val = (try elem_ptr.array_ptr.pointerDeref(allocator)) orelse return null;
break :blk try array_val.elemValue(allocator, elem_ptr.index);
},
.field_ptr => {
.field_ptr => blk: {
const field_ptr = self.castTag(.field_ptr).?.data;
const container_val = try field_ptr.container_ptr.pointerDeref(allocator);
return container_val.fieldValue(allocator, field_ptr.field_index);
const container_val = (try field_ptr.container_ptr.pointerDeref(allocator)) orelse return null;
break :blk try container_val.fieldValue(allocator, field_ptr.field_index);
},
.eu_payload_ptr => blk: {
const err_union_ptr = self.castTag(.eu_payload_ptr).?.data;
const err_union_val = (try err_union_ptr.pointerDeref(allocator)) orelse return null;
break :blk err_union_val.castTag(.error_union).?.data;
},
.zero,
.one,
.int_u64,
.int_i64,
.int_big_positive,
.int_big_negative,
.variable,
.extern_fn,
.function,
=> return null,
else => unreachable,
};
if (sub_val.tag() == .variable) {
// This would be loading a runtime value at compile-time so we return
// the indicator that this pointer dereference requires being done at runtime.
return null;
}
return sub_val;
}
pub fn sliceLen(val: Value) u64 {
@ -1390,7 +1397,6 @@ pub const Value = extern union {
.bytes => val.castTag(.bytes).?.data.len,
.array => val.castTag(.array).?.data.len,
.slice => val.castTag(.slice).?.data.len.toUnsignedInt(),
.ref_val => sliceLen(val.castTag(.ref_val).?.data),
.decl_ref => {
const decl = val.castTag(.decl_ref).?.data;
if (decl.ty.zigTypeTag() == .Array) {
@ -1576,7 +1582,6 @@ pub const Value = extern union {
.int_i64,
.int_big_positive,
.int_big_negative,
.ref_val,
.comptime_alloc,
.decl_ref,
.elem_ptr,
@ -1599,6 +1604,7 @@ pub const Value = extern union {
.@"union",
.null_value,
.abi_align_default,
.eu_payload_ptr,
=> false,
.undef => unreachable,

View File

@ -1182,10 +1182,11 @@ pub fn addCases(ctx: *TestContext) !void {
var case = ctx.obj("extern variable has no type", linux_x64);
case.addError(
\\comptime {
\\ _ = foo;
\\ const x = foo + foo;
\\ _ = x;
\\}
\\extern var foo: i32;
, &[_][]const u8{":2:9: error: unable to resolve comptime value"});
, &[_][]const u8{":2:15: error: unable to resolve comptime value"});
case.addError(
\\export fn entry() void {
\\ _ = foo;

View File

@ -49,7 +49,7 @@ pub fn addCases(ctx: *TestContext) !void {
\\export fn foo() callconv(y) c_int {
\\ return 0;
\\}
\\var y: i32 = 1234;
\\var y: @import("std").builtin.CallingConvention = .C;
, &.{
":2:22: error: unable to resolve comptime value",
":5:26: error: unable to resolve comptime value",