Merge pull request #14781 from ziglang/codegen-cleanup

codegen: move common logic for generating typed values from each native backend into codegen.zig
This commit is contained in:
Jakub Konka 2023-03-04 03:38:12 +01:00 committed by GitHub
commit 18e6d1e819
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
14 changed files with 414 additions and 902 deletions

View File

@ -23,7 +23,7 @@ const leb128 = std.leb;
const log = std.log.scoped(.codegen);
const build_options = @import("build_options");
const GenerateSymbolError = codegen.GenerateSymbolError;
const CodeGenError = codegen.CodeGenError;
const Result = codegen.Result;
const DebugInfoOutput = codegen.DebugInfoOutput;
@ -41,11 +41,7 @@ const c_abi_int_param_regs = abi.c_abi_int_param_regs;
const c_abi_int_return_regs = abi.c_abi_int_return_regs;
const gp = abi.RegisterClass.gp;
const InnerError = error{
OutOfMemory,
CodegenFail,
OutOfRegisters,
};
const InnerError = CodeGenError || error{OutOfRegisters};
gpa: Allocator,
air: Air,
@ -337,7 +333,7 @@ pub fn generate(
liveness: Liveness,
code: *std.ArrayList(u8),
debug_output: DebugInfoOutput,
) GenerateSymbolError!Result {
) CodeGenError!Result {
if (build_options.skip_non_native and builtin.cpu.arch != bin_file.options.target.cpu.arch) {
@panic("Attempted to compile for architecture that was disabled by build configuration");
}
@ -6137,201 +6133,26 @@ fn getResolvedInstValue(self: *Self, inst: Air.Inst.Index) MCValue {
}
}
fn lowerDeclRef(self: *Self, tv: TypedValue, decl_index: Module.Decl.Index) InnerError!MCValue {
const ptr_bits = self.target.cpu.arch.ptrBitWidth();
const ptr_bytes: u64 = @divExact(ptr_bits, 8);
// TODO this feels clunky. Perhaps we should check for it in `genTypedValue`?
if (tv.ty.zigTypeTag() == .Pointer) blk: {
if (tv.ty.castPtrToFn()) |_| break :blk;
if (!tv.ty.elemType2().hasRuntimeBits()) {
return MCValue.none;
}
}
const mod = self.bin_file.options.module.?;
const decl = mod.declPtr(decl_index);
mod.markDeclAlive(decl);
if (self.bin_file.cast(link.File.Elf)) |elf_file| {
const atom_index = try elf_file.getOrCreateAtomForDecl(decl_index);
const atom = elf_file.getAtom(atom_index);
return MCValue{ .memory = atom.getOffsetTableAddress(elf_file) };
} else if (self.bin_file.cast(link.File.MachO)) |macho_file| {
const atom = try macho_file.getOrCreateAtomForDecl(decl_index);
const sym_index = macho_file.getAtom(atom).getSymbolIndex().?;
return MCValue{ .linker_load = .{
.type = .got,
.sym_index = sym_index,
} };
} else if (self.bin_file.cast(link.File.Coff)) |coff_file| {
const atom_index = try coff_file.getOrCreateAtomForDecl(decl_index);
const sym_index = coff_file.getAtom(atom_index).getSymbolIndex().?;
return MCValue{ .linker_load = .{
.type = .got,
.sym_index = sym_index,
} };
} else if (self.bin_file.cast(link.File.Plan9)) |p9| {
const decl_block_index = try p9.seeDecl(decl_index);
const decl_block = p9.getDeclBlock(decl_block_index);
const got_addr = p9.bases.data + decl_block.got_index.? * ptr_bytes;
return MCValue{ .memory = got_addr };
} else {
return self.fail("TODO codegen non-ELF const Decl pointer", .{});
}
}
fn lowerUnnamedConst(self: *Self, tv: TypedValue) InnerError!MCValue {
log.debug("lowerUnnamedConst: ty = {}, val = {}", .{ tv.ty.fmtDebug(), tv.val.fmtDebug() });
const local_sym_index = self.bin_file.lowerUnnamedConst(tv, self.mod_fn.owner_decl) catch |err| {
return self.fail("lowering unnamed constant failed: {s}", .{@errorName(err)});
};
if (self.bin_file.cast(link.File.Elf)) |elf_file| {
return MCValue{ .memory = elf_file.getSymbol(local_sym_index).st_value };
} else if (self.bin_file.cast(link.File.MachO)) |_| {
return MCValue{ .linker_load = .{
.type = .direct,
.sym_index = local_sym_index,
} };
} else if (self.bin_file.cast(link.File.Coff)) |_| {
return MCValue{ .linker_load = .{
.type = .direct,
.sym_index = local_sym_index,
} };
} else if (self.bin_file.cast(link.File.Plan9)) |_| {
return self.fail("TODO lower unnamed const in Plan9", .{});
} else {
return self.fail("TODO lower unnamed const", .{});
}
}
fn genTypedValue(self: *Self, arg_tv: TypedValue) InnerError!MCValue {
var typed_value = arg_tv;
if (typed_value.val.castTag(.runtime_value)) |rt| {
typed_value.val = rt.data;
}
log.debug("genTypedValue: ty = {}, val = {}", .{ typed_value.ty.fmtDebug(), typed_value.val.fmtDebug() });
if (typed_value.val.isUndef())
return MCValue{ .undef = {} };
if (typed_value.val.castTag(.decl_ref)) |payload| {
return self.lowerDeclRef(typed_value, payload.data);
}
if (typed_value.val.castTag(.decl_ref_mut)) |payload| {
return self.lowerDeclRef(typed_value, payload.data.decl_index);
}
const target = self.target.*;
switch (typed_value.ty.zigTypeTag()) {
.Pointer => switch (typed_value.ty.ptrSize()) {
.Slice => {},
else => {
switch (typed_value.val.tag()) {
.int_u64 => {
return MCValue{ .immediate = typed_value.val.toUnsignedInt(target) };
},
else => {},
}
},
const mcv: MCValue = switch (try codegen.genTypedValue(
self.bin_file,
self.src_loc,
arg_tv,
self.mod_fn.owner_decl,
)) {
.mcv => |mcv| switch (mcv) {
.none => .none,
.undef => .undef,
.linker_load => |ll| .{ .linker_load = ll },
.immediate => |imm| .{ .immediate = imm },
.memory => |addr| .{ .memory = addr },
},
.Int => {
const info = typed_value.ty.intInfo(self.target.*);
if (info.bits <= 64) {
const unsigned = switch (info.signedness) {
.signed => blk: {
const signed = typed_value.val.toSignedInt(target);
break :blk @bitCast(u64, signed);
},
.unsigned => typed_value.val.toUnsignedInt(target),
};
return MCValue{ .immediate = unsigned };
}
.fail => |msg| {
self.err_msg = msg;
return error.CodegenFail;
},
.Bool => {
return MCValue{ .immediate = @boolToInt(typed_value.val.toBool()) };
},
.Optional => {
if (typed_value.ty.isPtrLikeOptional()) {
if (typed_value.val.isNull())
return MCValue{ .immediate = 0 };
var buf: Type.Payload.ElemType = undefined;
return self.genTypedValue(.{
.ty = typed_value.ty.optionalChild(&buf),
.val = typed_value.val,
});
} else if (typed_value.ty.abiSize(self.target.*) == 1) {
return MCValue{ .immediate = @boolToInt(typed_value.val.isNull()) };
}
},
.Enum => {
if (typed_value.val.castTag(.enum_field_index)) |field_index| {
switch (typed_value.ty.tag()) {
.enum_simple => {
return MCValue{ .immediate = field_index.data };
},
.enum_full, .enum_nonexhaustive => {
const enum_full = typed_value.ty.cast(Type.Payload.EnumFull).?.data;
if (enum_full.values.count() != 0) {
const tag_val = enum_full.values.keys()[field_index.data];
return self.genTypedValue(.{ .ty = enum_full.tag_ty, .val = tag_val });
} else {
return MCValue{ .immediate = field_index.data };
}
},
else => unreachable,
}
} else {
var int_tag_buffer: Type.Payload.Bits = undefined;
const int_tag_ty = typed_value.ty.intTagType(&int_tag_buffer);
return self.genTypedValue(.{ .ty = int_tag_ty, .val = typed_value.val });
}
},
.ErrorSet => {
switch (typed_value.val.tag()) {
.@"error" => {
const err_name = typed_value.val.castTag(.@"error").?.data.name;
const module = self.bin_file.options.module.?;
const global_error_set = module.global_error_set;
const error_index = global_error_set.get(err_name).?;
return MCValue{ .immediate = error_index };
},
else => {
// In this case we are rendering an error union which has a 0 bits payload.
return MCValue{ .immediate = 0 };
},
}
},
.ErrorUnion => {
const error_type = typed_value.ty.errorUnionSet();
const payload_type = typed_value.ty.errorUnionPayload();
const is_pl = typed_value.val.errorUnionIsPayload();
if (!payload_type.hasRuntimeBitsIgnoreComptime()) {
// We use the error type directly as the type.
const err_val = if (!is_pl) typed_value.val else Value.initTag(.zero);
return self.genTypedValue(.{ .ty = error_type, .val = err_val });
}
return self.lowerUnnamedConst(typed_value);
},
.ComptimeInt => unreachable, // semantic analysis prevents this
.ComptimeFloat => unreachable, // semantic analysis prevents this
.Type => unreachable,
.EnumLiteral => unreachable,
.Void => unreachable,
.NoReturn => unreachable,
.Undefined => unreachable,
.Null => unreachable,
.Opaque => unreachable,
else => {},
}
return self.lowerUnnamedConst(typed_value);
};
return mcv;
}
const CallMCValues = struct {

View File

@ -24,7 +24,7 @@ const log = std.log.scoped(.codegen);
const build_options = @import("build_options");
const Result = codegen.Result;
const GenerateSymbolError = codegen.GenerateSymbolError;
const CodeGenError = codegen.CodeGenError;
const DebugInfoOutput = codegen.DebugInfoOutput;
const bits = @import("bits.zig");
@ -42,11 +42,7 @@ const c_abi_int_param_regs = abi.c_abi_int_param_regs;
const c_abi_int_return_regs = abi.c_abi_int_return_regs;
const gp = abi.RegisterClass.gp;
const InnerError = error{
OutOfMemory,
CodegenFail,
OutOfRegisters,
};
const InnerError = CodeGenError || error{OutOfRegisters};
gpa: Allocator,
air: Air,
@ -343,7 +339,7 @@ pub fn generate(
liveness: Liveness,
code: *std.ArrayList(u8),
debug_output: DebugInfoOutput,
) GenerateSymbolError!Result {
) CodeGenError!Result {
if (build_options.skip_non_native and builtin.cpu.arch != bin_file.options.target.cpu.arch) {
@panic("Attempted to compile for architecture that was disabled by build configuration");
}
@ -6087,178 +6083,26 @@ fn getResolvedInstValue(self: *Self, inst: Air.Inst.Index) MCValue {
}
}
fn lowerDeclRef(self: *Self, tv: TypedValue, decl_index: Module.Decl.Index) InnerError!MCValue {
const ptr_bits = self.target.cpu.arch.ptrBitWidth();
const ptr_bytes: u64 = @divExact(ptr_bits, 8);
const mod = self.bin_file.options.module.?;
const decl = mod.declPtr(decl_index);
mod.markDeclAlive(decl);
if (self.bin_file.cast(link.File.Elf)) |elf_file| {
const atom_index = try elf_file.getOrCreateAtomForDecl(decl_index);
const atom = elf_file.getAtom(atom_index);
return MCValue{ .memory = atom.getOffsetTableAddress(elf_file) };
} else if (self.bin_file.cast(link.File.MachO)) |_| {
unreachable; // unsupported architecture for MachO
} else if (self.bin_file.cast(link.File.Coff)) |_| {
return self.fail("TODO codegen COFF const Decl pointer", .{});
} else if (self.bin_file.cast(link.File.Plan9)) |p9| {
const decl_block_index = try p9.seeDecl(decl_index);
const decl_block = p9.getDeclBlock(decl_block_index);
const got_addr = p9.bases.data + decl_block.got_index.? * ptr_bytes;
return MCValue{ .memory = got_addr };
} else {
return self.fail("TODO codegen non-ELF const Decl pointer", .{});
}
_ = tv;
}
fn lowerUnnamedConst(self: *Self, tv: TypedValue) InnerError!MCValue {
const local_sym_index = self.bin_file.lowerUnnamedConst(tv, self.mod_fn.owner_decl) catch |err| {
return self.fail("lowering unnamed constant failed: {s}", .{@errorName(err)});
};
if (self.bin_file.cast(link.File.Elf)) |elf_file| {
return MCValue{ .memory = elf_file.getSymbol(local_sym_index).st_value };
} else if (self.bin_file.cast(link.File.MachO)) |_| {
unreachable;
} else if (self.bin_file.cast(link.File.Coff)) |_| {
return self.fail("TODO lower unnamed const in COFF", .{});
} else if (self.bin_file.cast(link.File.Plan9)) |_| {
return self.fail("TODO lower unnamed const in Plan9", .{});
} else {
return self.fail("TODO lower unnamed const", .{});
}
}
fn genTypedValue(self: *Self, arg_tv: TypedValue) InnerError!MCValue {
var typed_value = arg_tv;
if (typed_value.val.castTag(.runtime_value)) |rt| {
typed_value.val = rt.data;
}
log.debug("genTypedValue: ty = {}, val = {}", .{ typed_value.ty.fmtDebug(), typed_value.val.fmtDebug() });
if (typed_value.val.isUndef())
return MCValue{ .undef = {} };
const ptr_bits = self.target.cpu.arch.ptrBitWidth();
if (typed_value.val.castTag(.decl_ref)) |payload| {
return self.lowerDeclRef(typed_value, payload.data);
}
if (typed_value.val.castTag(.decl_ref_mut)) |payload| {
return self.lowerDeclRef(typed_value, payload.data.decl_index);
}
const target = self.target.*;
switch (typed_value.ty.zigTypeTag()) {
.Pointer => switch (typed_value.ty.ptrSize()) {
.Slice => {},
else => {
switch (typed_value.val.tag()) {
.int_u64 => {
return MCValue{ .immediate = @intCast(u32, typed_value.val.toUnsignedInt(target)) };
},
else => {},
}
},
const mcv: MCValue = switch (try codegen.genTypedValue(
self.bin_file,
self.src_loc,
arg_tv,
self.mod_fn.owner_decl,
)) {
.mcv => |mcv| switch (mcv) {
.none => .none,
.undef => .undef,
.linker_load => unreachable, // TODO
.immediate => |imm| .{ .immediate = @truncate(u32, imm) },
.memory => |addr| .{ .memory = addr },
},
.Int => {
const info = typed_value.ty.intInfo(self.target.*);
if (info.bits <= ptr_bits) {
const unsigned = switch (info.signedness) {
.signed => blk: {
const signed = @intCast(i32, typed_value.val.toSignedInt(target));
break :blk @bitCast(u32, signed);
},
.unsigned => @intCast(u32, typed_value.val.toUnsignedInt(target)),
};
return MCValue{ .immediate = unsigned };
} else {
return self.lowerUnnamedConst(typed_value);
}
.fail => |msg| {
self.err_msg = msg;
return error.CodegenFail;
},
.Bool => {
return MCValue{ .immediate = @boolToInt(typed_value.val.toBool()) };
},
.Optional => {
if (typed_value.ty.isPtrLikeOptional()) {
if (typed_value.val.isNull())
return MCValue{ .immediate = 0 };
var buf: Type.Payload.ElemType = undefined;
return self.genTypedValue(.{
.ty = typed_value.ty.optionalChild(&buf),
.val = typed_value.val,
});
} else if (typed_value.ty.abiSize(self.target.*) == 1) {
return MCValue{ .immediate = @boolToInt(typed_value.val.isNull()) };
}
},
.Enum => {
if (typed_value.val.castTag(.enum_field_index)) |field_index| {
switch (typed_value.ty.tag()) {
.enum_simple => {
return MCValue{ .immediate = field_index.data };
},
.enum_full, .enum_nonexhaustive => {
const enum_full = typed_value.ty.cast(Type.Payload.EnumFull).?.data;
if (enum_full.values.count() != 0) {
const tag_val = enum_full.values.keys()[field_index.data];
return self.genTypedValue(.{ .ty = enum_full.tag_ty, .val = tag_val });
} else {
return MCValue{ .immediate = field_index.data };
}
},
else => unreachable,
}
} else {
var int_tag_buffer: Type.Payload.Bits = undefined;
const int_tag_ty = typed_value.ty.intTagType(&int_tag_buffer);
return self.genTypedValue(.{ .ty = int_tag_ty, .val = typed_value.val });
}
},
.ErrorSet => {
switch (typed_value.val.tag()) {
.@"error" => {
const err_name = typed_value.val.castTag(.@"error").?.data.name;
const module = self.bin_file.options.module.?;
const global_error_set = module.global_error_set;
const error_index = global_error_set.get(err_name).?;
return MCValue{ .immediate = error_index };
},
else => {
// In this case we are rendering an error union which has a 0 bits payload.
return MCValue{ .immediate = 0 };
},
}
},
.ErrorUnion => {
const error_type = typed_value.ty.errorUnionSet();
const payload_type = typed_value.ty.errorUnionPayload();
const is_pl = typed_value.val.errorUnionIsPayload();
if (!payload_type.hasRuntimeBitsIgnoreComptime()) {
// We use the error type directly as the type.
const err_val = if (!is_pl) typed_value.val else Value.initTag(.zero);
return self.genTypedValue(.{ .ty = error_type, .val = err_val });
}
},
.ComptimeInt => unreachable, // semantic analysis prevents this
.ComptimeFloat => unreachable, // semantic analysis prevents this
.Type => unreachable,
.EnumLiteral => unreachable,
.Void => unreachable,
.NoReturn => unreachable,
.Undefined => unreachable,
.Null => unreachable,
.Opaque => unreachable,
else => {},
}
return self.lowerUnnamedConst(typed_value);
};
return mcv;
}
const CallMCValues = struct {

View File

@ -21,10 +21,11 @@ const DW = std.dwarf;
const leb128 = std.leb;
const log = std.log.scoped(.codegen);
const build_options = @import("build_options");
const codegen = @import("../../codegen.zig");
const Result = @import("../../codegen.zig").Result;
const GenerateSymbolError = @import("../../codegen.zig").GenerateSymbolError;
const DebugInfoOutput = @import("../../codegen.zig").DebugInfoOutput;
const CodeGenError = codegen.CodeGenError;
const Result = codegen.Result;
const DebugInfoOutput = codegen.DebugInfoOutput;
const bits = @import("bits.zig");
const abi = @import("abi.zig");
@ -35,11 +36,7 @@ const Instruction = abi.Instruction;
const callee_preserved_regs = abi.callee_preserved_regs;
const gp = abi.RegisterClass.gp;
const InnerError = error{
OutOfMemory,
CodegenFail,
OutOfRegisters,
};
const InnerError = CodeGenError || error{OutOfRegisters};
gpa: Allocator,
air: Air,
@ -225,7 +222,7 @@ pub fn generate(
liveness: Liveness,
code: *std.ArrayList(u8),
debug_output: DebugInfoOutput,
) GenerateSymbolError!Result {
) CodeGenError!Result {
if (build_options.skip_non_native and builtin.cpu.arch != bin_file.options.target.cpu.arch) {
@panic("Attempted to compile for architecture that was disabled by build configuration");
}
@ -2552,145 +2549,26 @@ fn getResolvedInstValue(self: *Self, inst: Air.Inst.Index) MCValue {
}
}
fn lowerDeclRef(self: *Self, tv: TypedValue, decl_index: Module.Decl.Index) InnerError!MCValue {
const ptr_bits = self.target.cpu.arch.ptrBitWidth();
const ptr_bytes: u64 = @divExact(ptr_bits, 8);
const mod = self.bin_file.options.module.?;
const decl = mod.declPtr(decl_index);
mod.markDeclAlive(decl);
if (self.bin_file.cast(link.File.Elf)) |elf_file| {
const atom_index = try elf_file.getOrCreateAtomForDecl(decl_index);
const atom = elf_file.getAtom(atom_index);
return MCValue{ .memory = atom.getOffsetTableAddress(elf_file) };
} else if (self.bin_file.cast(link.File.MachO)) |_| {
unreachable;
} else if (self.bin_file.cast(link.File.Coff)) |_| {
return self.fail("TODO codegen COFF const Decl pointer", .{});
} else if (self.bin_file.cast(link.File.Plan9)) |p9| {
const decl_block_index = try p9.seeDecl(decl_index);
const decl_block = p9.getDeclBlock(decl_block_index);
const got_addr = p9.bases.data + decl_block.got_index.? * ptr_bytes;
return MCValue{ .memory = got_addr };
} else {
return self.fail("TODO codegen non-ELF const Decl pointer", .{});
}
_ = tv;
}
fn genTypedValue(self: *Self, typed_value: TypedValue) InnerError!MCValue {
if (typed_value.val.isUndef())
return MCValue{ .undef = {} };
if (typed_value.val.castTag(.decl_ref)) |payload| {
return self.lowerDeclRef(typed_value, payload.data);
}
if (typed_value.val.castTag(.decl_ref_mut)) |payload| {
return self.lowerDeclRef(typed_value, payload.data.decl_index);
}
const target = self.target.*;
const ptr_bits = self.target.cpu.arch.ptrBitWidth();
switch (typed_value.ty.zigTypeTag()) {
.Pointer => switch (typed_value.ty.ptrSize()) {
.Slice => {
var buf: Type.SlicePtrFieldTypeBuffer = undefined;
const ptr_type = typed_value.ty.slicePtrFieldType(&buf);
const ptr_mcv = try self.genTypedValue(.{ .ty = ptr_type, .val = typed_value.val });
const mod = self.bin_file.options.module.?;
const slice_len = typed_value.val.sliceLen(mod);
// Codegen can't handle some kinds of indirection. If the wrong union field is accessed here it may mean
// the Sema code needs to use anonymous Decls or alloca instructions to store data.
const ptr_imm = ptr_mcv.memory;
_ = slice_len;
_ = ptr_imm;
// We need more general support for const data being stored in memory to make this work.
return self.fail("TODO codegen for const slices", .{});
},
else => {
if (typed_value.val.tag() == .int_u64) {
return MCValue{ .immediate = typed_value.val.toUnsignedInt(target) };
}
return self.fail("TODO codegen more kinds of const pointers", .{});
},
const mcv: MCValue = switch (try codegen.genTypedValue(
self.bin_file,
self.src_loc,
typed_value,
self.mod_fn.owner_decl,
)) {
.mcv => |mcv| switch (mcv) {
.none => .none,
.undef => .undef,
.linker_load => unreachable, // TODO
.immediate => |imm| .{ .immediate = imm },
.memory => |addr| .{ .memory = addr },
},
.Int => {
const info = typed_value.ty.intInfo(self.target.*);
if (info.bits > ptr_bits or info.signedness == .signed) {
return self.fail("TODO const int bigger than ptr and signed int", .{});
}
return MCValue{ .immediate = typed_value.val.toUnsignedInt(target) };
.fail => |msg| {
self.err_msg = msg;
return error.CodegenFail;
},
.Bool => {
return MCValue{ .immediate = @boolToInt(typed_value.val.toBool()) };
},
.ComptimeInt => unreachable, // semantic analysis prevents this
.ComptimeFloat => unreachable, // semantic analysis prevents this
.Optional => {
if (typed_value.ty.isPtrLikeOptional()) {
if (typed_value.val.isNull())
return MCValue{ .immediate = 0 };
var buf: Type.Payload.ElemType = undefined;
return self.genTypedValue(.{
.ty = typed_value.ty.optionalChild(&buf),
.val = typed_value.val,
});
} else if (typed_value.ty.abiSize(self.target.*) == 1) {
return MCValue{ .immediate = @boolToInt(typed_value.val.isNull()) };
}
return self.fail("TODO non pointer optionals", .{});
},
.Enum => {
if (typed_value.val.castTag(.enum_field_index)) |field_index| {
switch (typed_value.ty.tag()) {
.enum_simple => {
return MCValue{ .immediate = field_index.data };
},
.enum_full, .enum_nonexhaustive => {
const enum_full = typed_value.ty.cast(Type.Payload.EnumFull).?.data;
if (enum_full.values.count() != 0) {
const tag_val = enum_full.values.keys()[field_index.data];
return self.genTypedValue(.{ .ty = enum_full.tag_ty, .val = tag_val });
} else {
return MCValue{ .immediate = field_index.data };
}
},
else => unreachable,
}
} else {
var int_tag_buffer: Type.Payload.Bits = undefined;
const int_tag_ty = typed_value.ty.intTagType(&int_tag_buffer);
return self.genTypedValue(.{ .ty = int_tag_ty, .val = typed_value.val });
}
},
.ErrorSet => {
switch (typed_value.val.tag()) {
.@"error" => {
const err_name = typed_value.val.castTag(.@"error").?.data.name;
const module = self.bin_file.options.module.?;
const global_error_set = module.global_error_set;
const error_index = global_error_set.get(err_name).?;
return MCValue{ .immediate = error_index };
},
else => {
// In this case we are rendering an error union which has a 0 bits payload.
return MCValue{ .immediate = 0 };
},
}
},
.ErrorUnion => {
const error_type = typed_value.ty.errorUnionSet();
const payload_type = typed_value.ty.errorUnionPayload();
const sub_val = typed_value.val.castTag(.eu_payload).?.data;
if (!payload_type.hasRuntimeBits()) {
// We use the error type directly as the type.
return self.genTypedValue(.{ .ty = error_type, .val = sub_val });
}
return self.fail("TODO implement error union const of type '{}'", .{typed_value.ty.fmtDebug()});
},
else => return self.fail("TODO implement const of type '{}'", .{typed_value.ty.fmtDebug()}),
}
};
return mcv;
}
const CallMCValues = struct {

View File

@ -19,7 +19,7 @@ const Mir = @import("Mir.zig");
const Emit = @import("Emit.zig");
const Liveness = @import("../../Liveness.zig");
const Type = @import("../../type.zig").Type;
const GenerateSymbolError = @import("../../codegen.zig").GenerateSymbolError;
const CodeGenError = codegen.CodeGenError;
const Result = @import("../../codegen.zig").Result;
const DebugInfoOutput = @import("../../codegen.zig").DebugInfoOutput;
@ -38,11 +38,7 @@ const gp = abi.RegisterClass.gp;
const Self = @This();
const InnerError = error{
OutOfMemory,
CodegenFail,
OutOfRegisters,
};
const InnerError = CodeGenError || error{OutOfRegisters};
const RegisterView = enum(u1) {
caller,
@ -265,7 +261,7 @@ pub fn generate(
liveness: Liveness,
code: *std.ArrayList(u8),
debug_output: DebugInfoOutput,
) GenerateSymbolError!Result {
) CodeGenError!Result {
if (build_options.skip_non_native and builtin.cpu.arch != bin_file.options.target.cpu.arch) {
@panic("Attempted to compile for architecture that was disabled by build configuration");
}
@ -3898,133 +3894,25 @@ fn genStore(self: *Self, value_reg: Register, addr_reg: Register, comptime off_t
}
fn genTypedValue(self: *Self, typed_value: TypedValue) InnerError!MCValue {
var tv = typed_value;
log.debug("genTypedValue: ty = {}, val = {}", .{ tv.ty.fmtDebug(), tv.val.fmtDebug() });
if (tv.val.castTag(.runtime_value)) |rt| {
tv.val = rt.data;
}
if (tv.val.isUndef())
return MCValue{ .undef = {} };
if (tv.val.castTag(.decl_ref)) |payload| {
return self.lowerDeclRef(tv, payload.data);
}
if (tv.val.castTag(.decl_ref_mut)) |payload| {
return self.lowerDeclRef(tv, payload.data.decl_index);
}
const target = self.target.*;
switch (tv.ty.zigTypeTag()) {
.Pointer => switch (tv.ty.ptrSize()) {
.Slice => {},
else => {
switch (tv.val.tag()) {
.int_u64 => {
return MCValue{ .immediate = tv.val.toUnsignedInt(target) };
},
else => {},
}
},
const mcv: MCValue = switch (try codegen.genTypedValue(
self.bin_file,
self.src_loc,
typed_value,
self.mod_fn.owner_decl,
)) {
.mcv => |mcv| switch (mcv) {
.none => .none,
.undef => .undef,
.linker_load => unreachable, // TODO
.immediate => |imm| .{ .immediate = imm },
.memory => |addr| .{ .memory = addr },
},
.Bool => {
return MCValue{ .immediate = @boolToInt(tv.val.toBool()) };
.fail => |msg| {
self.err_msg = msg;
return error.CodegenFail;
},
.Int => {
const info = tv.ty.intInfo(self.target.*);
if (info.bits <= 64) {
const unsigned = switch (info.signedness) {
.signed => blk: {
const signed = tv.val.toSignedInt(target);
break :blk @bitCast(u64, signed);
},
.unsigned => tv.val.toUnsignedInt(target),
};
return MCValue{ .immediate = unsigned };
} else {
return self.fail("TODO implement int genTypedValue of > 64 bits", .{});
}
},
.Optional => {
if (tv.ty.isPtrLikeOptional()) {
if (tv.val.isNull())
return MCValue{ .immediate = 0 };
var buf: Type.Payload.ElemType = undefined;
return self.genTypedValue(.{
.ty = tv.ty.optionalChild(&buf),
.val = tv.val,
});
} else if (tv.ty.abiSize(self.target.*) == 1) {
return MCValue{ .immediate = @boolToInt(tv.val.isNull()) };
}
},
.Enum => {
if (tv.val.castTag(.enum_field_index)) |field_index| {
switch (tv.ty.tag()) {
.enum_simple => {
return MCValue{ .immediate = field_index.data };
},
.enum_full, .enum_nonexhaustive => {
const enum_full = tv.ty.cast(Type.Payload.EnumFull).?.data;
if (enum_full.values.count() != 0) {
const tag_val = enum_full.values.keys()[field_index.data];
return self.genTypedValue(.{ .ty = enum_full.tag_ty, .val = tag_val });
} else {
return MCValue{ .immediate = field_index.data };
}
},
else => unreachable,
}
} else {
var int_tag_buffer: Type.Payload.Bits = undefined;
const int_tag_ty = tv.ty.intTagType(&int_tag_buffer);
return self.genTypedValue(.{ .ty = int_tag_ty, .val = tv.val });
}
},
.ErrorSet => {
const err_name = tv.val.castTag(.@"error").?.data.name;
const module = self.bin_file.options.module.?;
const global_error_set = module.global_error_set;
const error_index = global_error_set.get(err_name).?;
return MCValue{ .immediate = error_index };
},
.ErrorUnion => {
const error_type = tv.ty.errorUnionSet();
const payload_type = tv.ty.errorUnionPayload();
if (tv.val.castTag(.eu_payload)) |pl| {
if (!payload_type.hasRuntimeBits()) {
// We use the error type directly as the type.
return MCValue{ .immediate = 0 };
}
_ = pl;
return self.fail("TODO implement error union const of type '{}' (non-error)", .{tv.ty.fmtDebug()});
} else {
if (!payload_type.hasRuntimeBits()) {
// We use the error type directly as the type.
return self.genTypedValue(.{ .ty = error_type, .val = tv.val });
}
return self.fail("TODO implement error union const of type '{}' (error)", .{tv.ty.fmtDebug()});
}
},
.ComptimeInt => unreachable, // semantic analysis prevents this
.ComptimeFloat => unreachable, // semantic analysis prevents this
.Type => unreachable,
.EnumLiteral => unreachable,
.Void => unreachable,
.NoReturn => unreachable,
.Undefined => unreachable,
.Null => unreachable,
.Opaque => unreachable,
else => {},
}
return self.fail("TODO implement const of type '{}'", .{tv.ty.fmtDebug()});
};
return mcv;
}
fn getResolvedInstValue(self: *Self, inst: Air.Inst.Index) MCValue {
@ -4200,28 +4088,6 @@ fn load(self: *Self, dst_mcv: MCValue, ptr: MCValue, ptr_ty: Type) InnerError!vo
}
}
fn lowerDeclRef(self: *Self, tv: TypedValue, decl_index: Module.Decl.Index) InnerError!MCValue {
// TODO this feels clunky. Perhaps we should check for it in `genTypedValue`?
if (tv.ty.zigTypeTag() == .Pointer) blk: {
if (tv.ty.castPtrToFn()) |_| break :blk;
if (!tv.ty.elemType2().hasRuntimeBits()) {
return MCValue.none;
}
}
const mod = self.bin_file.options.module.?;
const decl = mod.declPtr(decl_index);
mod.markDeclAlive(decl);
if (self.bin_file.cast(link.File.Elf)) |elf_file| {
const atom_index = try elf_file.getOrCreateAtomForDecl(decl_index);
const atom = elf_file.getAtom(atom_index);
return MCValue{ .memory = atom.getOffsetTableAddress(elf_file) };
} else {
return self.fail("TODO codegen non-ELF const Decl pointer", .{});
}
}
fn minMax(
self: *Self,
tag: Air.Inst.Tag,

View File

@ -733,8 +733,6 @@ const InnerError = error{
OutOfMemory,
/// An error occurred when trying to lower AIR to MIR.
CodegenFail,
/// Can occur when dereferencing a pointer that points to a `Decl` of which the analysis has failed
AnalysisFail,
/// Compiler implementation could not handle a large integer.
Overflow,
};
@ -1164,7 +1162,7 @@ pub fn generate(
liveness: Liveness,
code: *std.ArrayList(u8),
debug_output: codegen.DebugInfoOutput,
) codegen.GenerateSymbolError!codegen.Result {
) codegen.CodeGenError!codegen.Result {
_ = src_loc;
var code_gen: CodeGen = .{
.gpa = bin_file.allocator,

View File

@ -12,12 +12,12 @@ const trace = @import("../../tracy.zig").trace;
const Air = @import("../../Air.zig");
const Allocator = mem.Allocator;
const CodeGenError = codegen.CodeGenError;
const Compilation = @import("../../Compilation.zig");
const DebugInfoOutput = codegen.DebugInfoOutput;
const DW = std.dwarf;
const ErrorMsg = Module.ErrorMsg;
const Result = codegen.Result;
const GenerateSymbolError = codegen.GenerateSymbolError;
const Emit = @import("Emit.zig");
const Liveness = @import("../../Liveness.zig");
const Mir = @import("Mir.zig");
@ -40,11 +40,7 @@ const Register = bits.Register;
const gp = abi.RegisterClass.gp;
const sse = abi.RegisterClass.sse;
const InnerError = error{
OutOfMemory,
CodegenFail,
OutOfRegisters,
};
const InnerError = CodeGenError || error{OutOfRegisters};
gpa: Allocator,
air: Air,
@ -257,7 +253,7 @@ pub fn generate(
liveness: Liveness,
code: *std.ArrayList(u8),
debug_output: DebugInfoOutput,
) GenerateSymbolError!Result {
) CodeGenError!Result {
if (build_options.skip_non_native and builtin.cpu.arch != bin_file.options.target.cpu.arch) {
@panic("Attempted to compile for architecture that was disabled by build configuration");
}
@ -6683,7 +6679,7 @@ fn airMulAdd(self: *Self, inst: Air.Inst.Index) !void {
return self.finishAir(inst, result, .{ extra.lhs, extra.rhs, pl_op.operand });
}
pub fn resolveInst(self: *Self, inst: Air.Inst.Ref) InnerError!MCValue {
fn resolveInst(self: *Self, inst: Air.Inst.Ref) InnerError!MCValue {
// First section of indexes correspond to a set number of constant values.
const ref_int = @enumToInt(inst);
if (ref_int < Air.Inst.Ref.typed_value_map.len) {
@ -6752,200 +6748,26 @@ fn limitImmediateType(self: *Self, operand: Air.Inst.Ref, comptime T: type) !MCV
return mcv;
}
fn lowerDeclRef(self: *Self, tv: TypedValue, decl_index: Module.Decl.Index) InnerError!MCValue {
log.debug("lowerDeclRef: ty = {}, val = {}", .{ tv.ty.fmtDebug(), tv.val.fmtDebug() });
const ptr_bits = self.target.cpu.arch.ptrBitWidth();
const ptr_bytes: u64 = @divExact(ptr_bits, 8);
// TODO this feels clunky. Perhaps we should check for it in `genTypedValue`?
if (tv.ty.zigTypeTag() == .Pointer) blk: {
if (tv.ty.castPtrToFn()) |_| break :blk;
if (!tv.ty.elemType2().hasRuntimeBits()) {
return MCValue.none;
}
}
const module = self.bin_file.options.module.?;
const decl = module.declPtr(decl_index);
module.markDeclAlive(decl);
if (self.bin_file.cast(link.File.Elf)) |elf_file| {
const atom_index = try elf_file.getOrCreateAtomForDecl(decl_index);
const atom = elf_file.getAtom(atom_index);
return MCValue{ .memory = atom.getOffsetTableAddress(elf_file) };
} else if (self.bin_file.cast(link.File.MachO)) |macho_file| {
const atom_index = try macho_file.getOrCreateAtomForDecl(decl_index);
const sym_index = macho_file.getAtom(atom_index).getSymbolIndex().?;
return MCValue{ .linker_load = .{
.type = .got,
.sym_index = sym_index,
} };
} else if (self.bin_file.cast(link.File.Coff)) |coff_file| {
const atom_index = try coff_file.getOrCreateAtomForDecl(decl_index);
const sym_index = coff_file.getAtom(atom_index).getSymbolIndex().?;
return MCValue{ .linker_load = .{
.type = .got,
.sym_index = sym_index,
} };
} else if (self.bin_file.cast(link.File.Plan9)) |p9| {
const decl_block_index = try p9.seeDecl(decl_index);
const decl_block = p9.getDeclBlock(decl_block_index);
const got_addr = p9.bases.data + decl_block.got_index.? * ptr_bytes;
return MCValue{ .memory = got_addr };
} else {
return self.fail("TODO codegen non-ELF const Decl pointer", .{});
}
}
fn lowerUnnamedConst(self: *Self, tv: TypedValue) InnerError!MCValue {
log.debug("lowerUnnamedConst: ty = {}, val = {}", .{ tv.ty.fmtDebug(), tv.val.fmtDebug() });
const local_sym_index = self.bin_file.lowerUnnamedConst(tv, self.mod_fn.owner_decl) catch |err| {
return self.fail("lowering unnamed constant failed: {s}", .{@errorName(err)});
};
if (self.bin_file.cast(link.File.Elf)) |elf_file| {
return MCValue{ .memory = elf_file.getSymbol(local_sym_index).st_value };
} else if (self.bin_file.cast(link.File.MachO)) |_| {
return MCValue{ .linker_load = .{
.type = .direct,
.sym_index = local_sym_index,
} };
} else if (self.bin_file.cast(link.File.Coff)) |_| {
return MCValue{ .linker_load = .{
.type = .direct,
.sym_index = local_sym_index,
} };
} else if (self.bin_file.cast(link.File.Plan9)) |p9| {
const ptr_bits = self.target.cpu.arch.ptrBitWidth();
const ptr_bytes: u64 = @divExact(ptr_bits, 8);
const got_index = local_sym_index; // the plan9 backend returns the got_index
const got_addr = p9.bases.data + got_index * ptr_bytes;
return MCValue{ .memory = got_addr };
} else {
return self.fail("TODO lower unnamed const", .{});
}
}
fn genTypedValue(self: *Self, arg_tv: TypedValue) InnerError!MCValue {
var typed_value = arg_tv;
if (typed_value.val.castTag(.runtime_value)) |rt| {
typed_value.val = rt.data;
}
log.debug("genTypedValue: ty = {}, val = {}", .{ typed_value.ty.fmtDebug(), typed_value.val.fmtDebug() });
if (typed_value.val.isUndef())
return MCValue{ .undef = {} };
const ptr_bits = self.target.cpu.arch.ptrBitWidth();
if (typed_value.val.castTag(.decl_ref)) |payload| {
return self.lowerDeclRef(typed_value, payload.data);
}
if (typed_value.val.castTag(.decl_ref_mut)) |payload| {
return self.lowerDeclRef(typed_value, payload.data.decl_index);
}
const target = self.target.*;
switch (typed_value.ty.zigTypeTag()) {
.Void => return MCValue{ .none = {} },
.Pointer => switch (typed_value.ty.ptrSize()) {
.Slice => {},
else => {
switch (typed_value.val.tag()) {
.int_u64 => {
return MCValue{ .immediate = typed_value.val.toUnsignedInt(target) };
},
else => {},
}
},
const mcv: MCValue = switch (try codegen.genTypedValue(
self.bin_file,
self.src_loc,
arg_tv,
self.mod_fn.owner_decl,
)) {
.mcv => |mcv| switch (mcv) {
.none => .none,
.undef => .undef,
.linker_load => |ll| .{ .linker_load = ll },
.immediate => |imm| .{ .immediate = imm },
.memory => |addr| .{ .memory = addr },
},
.Int => {
const info = typed_value.ty.intInfo(self.target.*);
if (info.bits <= ptr_bits and info.signedness == .signed) {
return MCValue{ .immediate = @bitCast(u64, typed_value.val.toSignedInt(target)) };
}
if (!(info.bits > ptr_bits or info.signedness == .signed)) {
return MCValue{ .immediate = typed_value.val.toUnsignedInt(target) };
}
.fail => |msg| {
self.err_msg = msg;
return error.CodegenFail;
},
.Bool => {
return MCValue{ .immediate = @boolToInt(typed_value.val.toBool()) };
},
.Optional => {
if (typed_value.ty.isPtrLikeOptional()) {
if (typed_value.val.isNull())
return MCValue{ .immediate = 0 };
var buf: Type.Payload.ElemType = undefined;
return self.genTypedValue(.{
.ty = typed_value.ty.optionalChild(&buf),
.val = typed_value.val,
});
} else if (typed_value.ty.abiSize(self.target.*) == 1) {
return MCValue{ .immediate = @boolToInt(!typed_value.val.isNull()) };
}
},
.Enum => {
if (typed_value.val.castTag(.enum_field_index)) |field_index| {
switch (typed_value.ty.tag()) {
.enum_simple => {
return MCValue{ .immediate = field_index.data };
},
.enum_full, .enum_nonexhaustive => {
const enum_full = typed_value.ty.cast(Type.Payload.EnumFull).?.data;
if (enum_full.values.count() != 0) {
const tag_val = enum_full.values.keys()[field_index.data];
return self.genTypedValue(.{ .ty = enum_full.tag_ty, .val = tag_val });
} else {
return MCValue{ .immediate = field_index.data };
}
},
else => unreachable,
}
} else {
var int_tag_buffer: Type.Payload.Bits = undefined;
const int_tag_ty = typed_value.ty.intTagType(&int_tag_buffer);
return self.genTypedValue(.{ .ty = int_tag_ty, .val = typed_value.val });
}
},
.ErrorSet => {
switch (typed_value.val.tag()) {
.@"error" => {
const err_name = typed_value.val.castTag(.@"error").?.data.name;
const module = self.bin_file.options.module.?;
const global_error_set = module.global_error_set;
const error_index = global_error_set.get(err_name).?;
return MCValue{ .immediate = error_index };
},
else => {
// In this case we are rendering an error union which has a 0 bits payload.
return MCValue{ .immediate = 0 };
},
}
},
.ErrorUnion => {
const error_type = typed_value.ty.errorUnionSet();
const payload_type = typed_value.ty.errorUnionPayload();
const is_pl = typed_value.val.errorUnionIsPayload();
if (!payload_type.hasRuntimeBitsIgnoreComptime()) {
// We use the error type directly as the type.
const err_val = if (!is_pl) typed_value.val else Value.initTag(.zero);
return self.genTypedValue(.{ .ty = error_type, .val = err_val });
}
},
.ComptimeInt => unreachable,
.ComptimeFloat => unreachable,
.Type => unreachable,
.EnumLiteral => unreachable,
.NoReturn => unreachable,
.Undefined => unreachable,
.Null => unreachable,
.Opaque => unreachable,
else => {},
}
return self.lowerUnnamedConst(typed_value);
};
return mcv;
}
const CallMCValues = struct {

View File

@ -29,11 +29,10 @@ pub const Result = union(enum) {
fail: *ErrorMsg,
};
pub const GenerateSymbolError = error{
pub const CodeGenError = error{
OutOfMemory,
Overflow,
/// A Decl that this symbol depends on had a semantic analysis failure.
AnalysisFail,
CodegenFail,
};
pub const DebugInfoOutput = union(enum) {
@ -63,19 +62,6 @@ pub const DebugInfoOutput = union(enum) {
none,
};
/// Helper struct to denote that the value is in memory but requires a linker relocation fixup:
/// * got - the value is referenced indirectly via GOT entry index (the linker emits a got-type reloc)
/// * direct - the value is referenced directly via symbol index index (the linker emits a displacement reloc)
/// * import - the value is referenced indirectly via import entry index (the linker emits an import-type reloc)
pub const LinkerLoad = struct {
type: enum {
got,
direct,
import,
},
sym_index: u32,
};
pub fn generateFunction(
bin_file: *link.File,
src_loc: Module.SrcLoc,
@ -84,7 +70,7 @@ pub fn generateFunction(
liveness: Liveness,
code: *std.ArrayList(u8),
debug_output: DebugInfoOutput,
) GenerateSymbolError!Result {
) CodeGenError!Result {
switch (bin_file.options.target.cpu.arch) {
.arm,
.armeb,
@ -120,7 +106,7 @@ pub fn generateSymbol(
code: *std.ArrayList(u8),
debug_output: DebugInfoOutput,
reloc_info: RelocInfo,
) GenerateSymbolError!Result {
) CodeGenError!Result {
const tracy = trace(@src());
defer tracy.end();
@ -823,7 +809,7 @@ fn lowerDeclRef(
code: *std.ArrayList(u8),
debug_output: DebugInfoOutput,
reloc_info: RelocInfo,
) GenerateSymbolError!Result {
) CodeGenError!Result {
const target = bin_file.options.target;
const module = bin_file.options.module.?;
if (typed_value.ty.isSlice()) {
@ -880,6 +866,288 @@ fn lowerDeclRef(
return Result.ok;
}
/// Helper struct to denote that the value is in memory but requires a linker relocation fixup:
/// * got - the value is referenced indirectly via GOT entry index (the linker emits a got-type reloc)
/// * direct - the value is referenced directly via symbol index index (the linker emits a displacement reloc)
/// * import - the value is referenced indirectly via import entry index (the linker emits an import-type reloc)
pub const LinkerLoad = struct {
type: enum {
got,
direct,
import,
},
sym_index: u32,
};
pub const GenResult = union(enum) {
mcv: MCValue,
fail: *ErrorMsg,
const MCValue = union(enum) {
none,
undef,
/// The bit-width of the immediate may be smaller than `u64`. For example, on 32-bit targets
/// such as ARM, the immediate will never exceed 32-bits.
immediate: u64,
linker_load: LinkerLoad,
/// Direct by-address reference to memory location.
memory: u64,
};
fn mcv(val: MCValue) GenResult {
return .{ .mcv = val };
}
fn fail(
gpa: Allocator,
src_loc: Module.SrcLoc,
comptime format: []const u8,
args: anytype,
) Allocator.Error!GenResult {
const msg = try ErrorMsg.create(gpa, src_loc, format, args);
return .{ .fail = msg };
}
};
fn genDeclRef(
bin_file: *link.File,
src_loc: Module.SrcLoc,
tv: TypedValue,
decl_index: Module.Decl.Index,
) CodeGenError!GenResult {
log.debug("genDeclRef: ty = {}, val = {}", .{ tv.ty.fmtDebug(), tv.val.fmtDebug() });
const target = bin_file.options.target;
const ptr_bits = target.cpu.arch.ptrBitWidth();
const ptr_bytes: u64 = @divExact(ptr_bits, 8);
const module = bin_file.options.module.?;
const decl = module.declPtr(decl_index);
if (decl.ty.zigTypeTag() != .Fn and !decl.ty.hasRuntimeBitsIgnoreComptime()) {
const imm: u64 = switch (ptr_bytes) {
1 => 0xaa,
2 => 0xaaaa,
4 => 0xaaaaaaaa,
8 => 0xaaaaaaaaaaaaaaaa,
else => unreachable,
};
return GenResult.mcv(.{ .immediate = imm });
}
// TODO this feels clunky. Perhaps we should check for it in `genTypedValue`?
if (tv.ty.zigTypeTag() == .Pointer) blk: {
if (tv.ty.castPtrToFn()) |_| break :blk;
if (!tv.ty.elemType2().hasRuntimeBits()) {
return GenResult.mcv(.none);
}
}
module.markDeclAlive(decl);
if (bin_file.cast(link.File.Elf)) |elf_file| {
const atom_index = try elf_file.getOrCreateAtomForDecl(decl_index);
const atom = elf_file.getAtom(atom_index);
return GenResult.mcv(.{ .memory = atom.getOffsetTableAddress(elf_file) });
} else if (bin_file.cast(link.File.MachO)) |macho_file| {
const atom_index = try macho_file.getOrCreateAtomForDecl(decl_index);
const sym_index = macho_file.getAtom(atom_index).getSymbolIndex().?;
return GenResult.mcv(.{ .linker_load = .{
.type = .got,
.sym_index = sym_index,
} });
} else if (bin_file.cast(link.File.Coff)) |coff_file| {
const atom_index = try coff_file.getOrCreateAtomForDecl(decl_index);
const sym_index = coff_file.getAtom(atom_index).getSymbolIndex().?;
return GenResult.mcv(.{ .linker_load = .{
.type = .got,
.sym_index = sym_index,
} });
} else if (bin_file.cast(link.File.Plan9)) |p9| {
const decl_block_index = try p9.seeDecl(decl_index);
const decl_block = p9.getDeclBlock(decl_block_index);
const got_addr = p9.bases.data + decl_block.got_index.? * ptr_bytes;
return GenResult.mcv(.{ .memory = got_addr });
} else {
return GenResult.fail(bin_file.allocator, src_loc, "TODO genDeclRef for target {}", .{target});
}
}
fn genUnnamedConst(
bin_file: *link.File,
src_loc: Module.SrcLoc,
tv: TypedValue,
owner_decl_index: Module.Decl.Index,
) CodeGenError!GenResult {
log.debug("genUnnamedConst: ty = {}, val = {}", .{ tv.ty.fmtDebug(), tv.val.fmtDebug() });
const target = bin_file.options.target;
const local_sym_index = bin_file.lowerUnnamedConst(tv, owner_decl_index) catch |err| {
return GenResult.fail(bin_file.allocator, src_loc, "lowering unnamed constant failed: {s}", .{@errorName(err)});
};
if (bin_file.cast(link.File.Elf)) |elf_file| {
return GenResult.mcv(.{ .memory = elf_file.getSymbol(local_sym_index).st_value });
} else if (bin_file.cast(link.File.MachO)) |_| {
return GenResult.mcv(.{ .linker_load = .{
.type = .direct,
.sym_index = local_sym_index,
} });
} else if (bin_file.cast(link.File.Coff)) |_| {
return GenResult.mcv(.{ .linker_load = .{
.type = .direct,
.sym_index = local_sym_index,
} });
} else if (bin_file.cast(link.File.Plan9)) |p9| {
const ptr_bits = target.cpu.arch.ptrBitWidth();
const ptr_bytes: u64 = @divExact(ptr_bits, 8);
const got_index = local_sym_index; // the plan9 backend returns the got_index
const got_addr = p9.bases.data + got_index * ptr_bytes;
return GenResult.mcv(.{ .memory = got_addr });
} else {
return GenResult.fail(bin_file.allocator, src_loc, "TODO genUnnamedConst for target {}", .{target});
}
}
pub fn genTypedValue(
bin_file: *link.File,
src_loc: Module.SrcLoc,
arg_tv: TypedValue,
owner_decl_index: Module.Decl.Index,
) CodeGenError!GenResult {
var typed_value = arg_tv;
if (typed_value.val.castTag(.runtime_value)) |rt| {
typed_value.val = rt.data;
}
log.debug("genTypedValue: ty = {}, val = {}", .{ typed_value.ty.fmtDebug(), typed_value.val.fmtDebug() });
if (typed_value.val.isUndef())
return GenResult.mcv(.undef);
const target = bin_file.options.target;
const ptr_bits = target.cpu.arch.ptrBitWidth();
if (typed_value.val.castTag(.decl_ref)) |payload| {
return genDeclRef(bin_file, src_loc, typed_value, payload.data);
}
if (typed_value.val.castTag(.decl_ref_mut)) |payload| {
return genDeclRef(bin_file, src_loc, typed_value, payload.data.decl_index);
}
switch (typed_value.ty.zigTypeTag()) {
.Void => return GenResult.mcv(.none),
.Pointer => switch (typed_value.ty.ptrSize()) {
.Slice => {},
else => {
switch (typed_value.val.tag()) {
.int_u64 => {
return GenResult.mcv(.{ .immediate = typed_value.val.toUnsignedInt(target) });
},
else => {},
}
},
},
.Int => {
const info = typed_value.ty.intInfo(target);
if (info.bits <= ptr_bits) {
const unsigned = switch (info.signedness) {
.signed => @bitCast(u64, typed_value.val.toSignedInt(target)),
.unsigned => typed_value.val.toUnsignedInt(target),
};
return GenResult.mcv(.{ .immediate = unsigned });
}
},
.Bool => {
return GenResult.mcv(.{ .immediate = @boolToInt(typed_value.val.toBool()) });
},
.Optional => {
if (typed_value.ty.isPtrLikeOptional()) {
if (typed_value.val.isNull())
return GenResult.mcv(.{ .immediate = 0 });
var buf: Type.Payload.ElemType = undefined;
return genTypedValue(bin_file, src_loc, .{
.ty = typed_value.ty.optionalChild(&buf),
.val = typed_value.val,
}, owner_decl_index);
} else if (typed_value.ty.abiSize(target) == 1) {
return GenResult.mcv(.{ .immediate = @boolToInt(!typed_value.val.isNull()) });
}
},
.Enum => {
if (typed_value.val.castTag(.enum_field_index)) |field_index| {
switch (typed_value.ty.tag()) {
.enum_simple => {
return GenResult.mcv(.{ .immediate = field_index.data });
},
.enum_full, .enum_nonexhaustive => {
const enum_full = typed_value.ty.cast(Type.Payload.EnumFull).?.data;
if (enum_full.values.count() != 0) {
const tag_val = enum_full.values.keys()[field_index.data];
return genTypedValue(bin_file, src_loc, .{
.ty = enum_full.tag_ty,
.val = tag_val,
}, owner_decl_index);
} else {
return GenResult.mcv(.{ .immediate = field_index.data });
}
},
else => unreachable,
}
} else {
var int_tag_buffer: Type.Payload.Bits = undefined;
const int_tag_ty = typed_value.ty.intTagType(&int_tag_buffer);
return genTypedValue(bin_file, src_loc, .{
.ty = int_tag_ty,
.val = typed_value.val,
}, owner_decl_index);
}
},
.ErrorSet => {
switch (typed_value.val.tag()) {
.@"error" => {
const err_name = typed_value.val.castTag(.@"error").?.data.name;
const module = bin_file.options.module.?;
const global_error_set = module.global_error_set;
const error_index = global_error_set.get(err_name).?;
return GenResult.mcv(.{ .immediate = error_index });
},
else => {
// In this case we are rendering an error union which has a 0 bits payload.
return GenResult.mcv(.{ .immediate = 0 });
},
}
},
.ErrorUnion => {
const error_type = typed_value.ty.errorUnionSet();
const payload_type = typed_value.ty.errorUnionPayload();
const is_pl = typed_value.val.errorUnionIsPayload();
if (!payload_type.hasRuntimeBitsIgnoreComptime()) {
// We use the error type directly as the type.
const err_val = if (!is_pl) typed_value.val else Value.initTag(.zero);
return genTypedValue(bin_file, src_loc, .{
.ty = error_type,
.val = err_val,
}, owner_decl_index);
}
},
.ComptimeInt => unreachable,
.ComptimeFloat => unreachable,
.Type => unreachable,
.EnumLiteral => unreachable,
.NoReturn => unreachable,
.Undefined => unreachable,
.Null => unreachable,
.Opaque => unreachable,
else => {},
}
return genUnnamedConst(bin_file, src_loc, typed_value, owner_decl_index);
}
pub fn errUnionPayloadOffset(payload_ty: Type, target: std.Target) u64 {
const payload_align = payload_ty.abiAlignment(target);
const error_align = Type.anyerror.abiAlignment(target);

View File

@ -1060,7 +1060,7 @@ pub fn lowerUnnamedConst(self: *Coff, tv: TypedValue, decl_index: Module.Decl.In
decl.analysis = .codegen_failure;
try mod.failed_decls.put(mod.gpa, decl_index, em);
log.err("{s}", .{em.msg});
return error.AnalysisFail;
return error.CodegenFail;
},
};

View File

@ -2097,9 +2097,16 @@ fn freeAtom(self: *Elf, atom_index: Atom.Index) void {
// Appending to free lists is allowed to fail because the free lists are heuristics based anyway.
const local_sym_index = atom.getSymbolIndex().?;
log.debug("adding %{d} to local symbols free list", .{local_sym_index});
self.local_symbol_free_list.append(gpa, local_sym_index) catch {};
self.local_symbols.items[local_sym_index].st_info = 0;
self.local_symbols.items[local_sym_index].st_shndx = 0;
self.local_symbols.items[local_sym_index] = .{
.st_name = 0,
.st_info = 0,
.st_other = 0,
.st_shndx = 0,
.st_value = 0,
.st_size = 0,
};
_ = self.atom_by_index_table.remove(local_sym_index);
self.getAtomPtr(atom_index).local_sym_index = 0;
@ -2618,7 +2625,7 @@ pub fn lowerUnnamedConst(self: *Elf, typed_value: TypedValue, decl_index: Module
decl.analysis = .codegen_failure;
try mod.failed_decls.put(mod.gpa, decl_index, em);
log.err("{s}", .{em.msg});
return error.AnalysisFail;
return error.CodegenFail;
},
};

View File

@ -2089,7 +2089,7 @@ pub fn lowerUnnamedConst(self: *MachO, typed_value: TypedValue, decl_index: Modu
decl.analysis = .codegen_failure;
try module.failed_decls.put(module.gpa, decl_index, em);
log.err("{s}", .{em.msg});
return error.AnalysisFail;
return error.CodegenFail;
},
};

View File

@ -377,7 +377,7 @@ pub fn lowerUnnamedConst(self: *Plan9, tv: TypedValue, decl_index: Module.Decl.I
decl.analysis = .codegen_failure;
try mod.failed_decls.put(mod.gpa, decl_index, em);
log.err("{s}", .{em.msg});
return error.AnalysisFail;
return error.CodegenFail;
},
};
// duped_code is freed when the unnamed const is freed

View File

@ -1265,7 +1265,7 @@ pub fn lowerUnnamedConst(wasm: *Wasm, tv: TypedValue, decl_index: Module.Decl.In
.fail => |em| {
decl.analysis = .codegen_failure;
try mod.failed_decls.put(mod.gpa, decl_index, em);
return error.AnalysisFail;
return error.CodegenFail;
},
};
};

View File

@ -19,6 +19,9 @@ pub const AllocateRegistersError = error{
/// Can happen when spilling an instruction in codegen runs out of
/// memory, so we propagate that error
OutOfMemory,
/// Can happen when spilling an instruction in codegen triggers integer
/// overflow, so we propagate that error
Overflow,
/// Can happen when spilling an instruction triggers a codegen
/// error, so we propagate that error
CodegenFail,

View File

@ -3,6 +3,7 @@ const builtin = @import("builtin");
test "@fieldParentPtr non-first field" {
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
try testParentFieldPtr(&foo.c);
comptime try testParentFieldPtr(&foo.c);
@ -10,6 +11,7 @@ test "@fieldParentPtr non-first field" {
test "@fieldParentPtr first field" {
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
try testParentFieldPtrFirst(&foo.a);
comptime try testParentFieldPtrFirst(&foo.a);
@ -47,6 +49,7 @@ fn testParentFieldPtrFirst(a: *const bool) !void {
test "@fieldParentPtr untagged union" {
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
@ -73,6 +76,7 @@ fn testFieldParentPtrUnion(c: *const i32) !void {
test "@fieldParentPtr tagged union" {
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
@ -99,6 +103,7 @@ fn testFieldParentPtrTaggedUnion(c: *const i32) !void {
test "@fieldParentPtr extern union" {
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO