mirror of
https://github.com/ziglang/zig.git
synced 2026-02-13 04:48:20 +00:00
Merge pull request #10991 from ziglang/macho-pointer-rebase
This commit is contained in:
commit
0474943ddf
@ -786,6 +786,11 @@ fn allocMem(self: *Self, inst: Air.Inst.Index, abi_size: u32, abi_align: u32) !u
|
||||
/// Use a pointer instruction as the basis for allocating stack memory.
|
||||
fn allocMemPtr(self: *Self, inst: Air.Inst.Index) !u32 {
|
||||
const elem_ty = self.air.typeOfIndex(inst).elemType();
|
||||
|
||||
if (!elem_ty.hasRuntimeBits()) {
|
||||
return self.allocMem(inst, @sizeOf(usize), @alignOf(usize));
|
||||
}
|
||||
|
||||
const abi_size = math.cast(u32, elem_ty.abiSize(self.target.*)) catch {
|
||||
return self.fail("type '{}' too big to fit into stack frame", .{elem_ty});
|
||||
};
|
||||
@ -3545,6 +3550,15 @@ fn getResolvedInstValue(self: *Self, inst: Air.Inst.Index) MCValue {
|
||||
fn lowerDeclRef(self: *Self, tv: TypedValue, decl: *Module.Decl) InnerError!MCValue {
|
||||
const ptr_bits = self.target.cpu.arch.ptrBitWidth();
|
||||
const ptr_bytes: u64 = @divExact(ptr_bits, 8);
|
||||
|
||||
// TODO this feels clunky. Perhaps we should check for it in `genTypedValue`?
|
||||
if (tv.ty.zigTypeTag() == .Pointer) blk: {
|
||||
if (tv.ty.castPtrToFn()) |_| break :blk;
|
||||
if (!tv.ty.elemType2().hasRuntimeBits()) {
|
||||
return MCValue.none;
|
||||
}
|
||||
}
|
||||
|
||||
decl.alive = true;
|
||||
if (self.bin_file.cast(link.File.Elf)) |elf_file| {
|
||||
const got = &elf_file.program_headers.items[elf_file.phdr_got_index.?];
|
||||
@ -3553,6 +3567,7 @@ fn lowerDeclRef(self: *Self, tv: TypedValue, decl: *Module.Decl) InnerError!MCVa
|
||||
} else if (self.bin_file.cast(link.File.MachO)) |_| {
|
||||
// Because MachO is PIE-always-on, we defer memory address resolution until
|
||||
// the linker has enough info to perform relocations.
|
||||
assert(decl.link.macho.local_sym_index != 0);
|
||||
return MCValue{ .got_load = decl.link.macho.local_sym_index };
|
||||
} else if (self.bin_file.cast(link.File.Coff)) |coff_file| {
|
||||
const got_addr = coff_file.offset_table_virtual_address + decl.link.coff.offset_table_index * ptr_bytes;
|
||||
|
||||
@ -208,8 +208,8 @@ fn instructionSize(emit: *Emit, inst: Mir.Inst.Index) usize {
|
||||
}
|
||||
|
||||
switch (tag) {
|
||||
.load_memory_direct => return 3 * 4,
|
||||
.load_memory_got,
|
||||
.load_memory_direct,
|
||||
.load_memory_ptr_got,
|
||||
.load_memory_ptr_direct,
|
||||
=> return 2 * 4,
|
||||
@ -654,15 +654,31 @@ fn mirLoadMemoryPie(emit: *Emit, inst: Mir.Inst.Index) !void {
|
||||
const data = emit.mir.extraData(Mir.LoadMemoryPie, payload).data;
|
||||
const reg = @intToEnum(Register, data.register);
|
||||
|
||||
// PC-relative displacement to the entry in the GOT table.
|
||||
// PC-relative displacement to the entry in memory.
|
||||
// adrp
|
||||
const offset = @intCast(u32, emit.code.items.len);
|
||||
try emit.writeInstruction(Instruction.adrp(reg, 0));
|
||||
|
||||
switch (tag) {
|
||||
.load_memory_got,
|
||||
.load_memory_direct,
|
||||
=> {
|
||||
.load_memory_got => {
|
||||
// ldr reg, reg, offset
|
||||
try emit.writeInstruction(Instruction.ldr(
|
||||
reg,
|
||||
reg,
|
||||
Instruction.LoadStoreOffset.imm(0),
|
||||
));
|
||||
},
|
||||
.load_memory_direct => {
|
||||
// We cannot load the offset directly as it may not be aligned properly.
|
||||
// For example, load for 64bit register will require the target address offset
|
||||
// to be 8-byte aligned, while the value might have non-8-byte natural alignment,
|
||||
// meaning the linker might have put it at a non-8-byte aligned address. To circumvent
|
||||
// this, we use `adrp, add` to form the address value which we then dereference with
|
||||
// `ldr`.
|
||||
// Note that this can potentially be optimised out by the codegen/linker if the
|
||||
// target address is appropriately aligned.
|
||||
// add reg, reg, offset
|
||||
try emit.writeInstruction(Instruction.add(reg, reg, 0, false));
|
||||
// ldr reg, reg, offset
|
||||
try emit.writeInstruction(Instruction.ldr(
|
||||
reg,
|
||||
|
||||
@ -852,7 +852,7 @@ fn allocMemPtr(self: *Self, inst: Air.Inst.Index) !u32 {
|
||||
const elem_ty = ptr_ty.elemType();
|
||||
|
||||
if (!elem_ty.hasRuntimeBits()) {
|
||||
return self.allocMem(inst, 8, 8);
|
||||
return self.allocMem(inst, @sizeOf(usize), @alignOf(usize));
|
||||
}
|
||||
|
||||
const abi_size = math.cast(u32, elem_ty.abiSize(self.target.*)) catch {
|
||||
@ -5333,6 +5333,14 @@ fn lowerDeclRef(self: *Self, tv: TypedValue, decl: *Module.Decl) InnerError!MCVa
|
||||
const ptr_bits = self.target.cpu.arch.ptrBitWidth();
|
||||
const ptr_bytes: u64 = @divExact(ptr_bits, 8);
|
||||
|
||||
// TODO this feels clunky. Perhaps we should check for it in `genTypedValue`?
|
||||
if (tv.ty.zigTypeTag() == .Pointer) blk: {
|
||||
if (tv.ty.castPtrToFn()) |_| break :blk;
|
||||
if (!tv.ty.elemType2().hasRuntimeBits()) {
|
||||
return MCValue.none;
|
||||
}
|
||||
}
|
||||
|
||||
decl.alive = true;
|
||||
if (self.bin_file.cast(link.File.Elf)) |elf_file| {
|
||||
const got = &elf_file.program_headers.items[elf_file.phdr_got_index.?];
|
||||
@ -5341,6 +5349,7 @@ fn lowerDeclRef(self: *Self, tv: TypedValue, decl: *Module.Decl) InnerError!MCVa
|
||||
} else if (self.bin_file.cast(link.File.MachO)) |_| {
|
||||
// Because MachO is PIE-always-on, we defer memory address resolution until
|
||||
// the linker has enough info to perform relocations.
|
||||
assert(decl.link.macho.local_sym_index != 0);
|
||||
return MCValue{ .got_load = decl.link.macho.local_sym_index };
|
||||
} else if (self.bin_file.cast(link.File.Coff)) |coff_file| {
|
||||
const got_addr = coff_file.offset_table_virtual_address + decl.link.coff.offset_table_index * ptr_bytes;
|
||||
|
||||
@ -857,6 +857,7 @@ fn mirLeaPie(emit: *Emit, inst: Mir.Inst.Index) InnerError!void {
|
||||
else => return emit.fail("TODO unused LEA PIE variants 0b10 and 0b11", .{}),
|
||||
};
|
||||
const atom = macho_file.atom_by_index_table.get(load_reloc.atom_index).?;
|
||||
log.debug("adding reloc of type {} to local @{d}", .{ reloc_type, load_reloc.sym_index });
|
||||
try atom.relocs.append(emit.bin_file.allocator, .{
|
||||
.offset = @intCast(u32, end_offset - 4),
|
||||
.target = .{ .local = load_reloc.sym_index },
|
||||
|
||||
@ -3797,10 +3797,11 @@ pub fn lowerUnnamedConst(self: *MachO, typed_value: TypedValue, decl: *Module.De
|
||||
atom.code.clearRetainingCapacity();
|
||||
try atom.code.appendSlice(self.base.allocator, code);
|
||||
|
||||
const match = try self.getMatchingSectionAtom(atom, typed_value.ty, typed_value.val);
|
||||
const match = try self.getMatchingSectionAtom(atom, decl_name, typed_value.ty, typed_value.val);
|
||||
const addr = try self.allocateAtom(atom, code.len, required_alignment, match);
|
||||
|
||||
log.debug("allocated atom for {s} at 0x{x}", .{ name, addr });
|
||||
log.debug(" (required alignment 0x{x})", .{required_alignment});
|
||||
|
||||
errdefer self.freeAtom(atom, match, true);
|
||||
|
||||
@ -3903,28 +3904,60 @@ pub fn updateDecl(self: *MachO, module: *Module, decl: *Module.Decl) !void {
|
||||
try self.updateDeclExports(module, decl, decl_exports);
|
||||
}
|
||||
|
||||
fn isElemTyPointer(ty: Type) bool {
|
||||
/// Checks if the value, or any of its embedded values stores a pointer, and thus requires
|
||||
/// a rebase opcode for the dynamic linker.
|
||||
fn needsPointerRebase(ty: Type, val: Value) bool {
|
||||
if (ty.zigTypeTag() == .Fn) {
|
||||
return false;
|
||||
}
|
||||
if (val.pointerDecl()) |_| {
|
||||
return true;
|
||||
}
|
||||
|
||||
switch (ty.zigTypeTag()) {
|
||||
.Fn => return false,
|
||||
.Fn => unreachable,
|
||||
.Pointer => return true,
|
||||
.Array => {
|
||||
const elem_ty = ty.elemType();
|
||||
return isElemTyPointer(elem_ty);
|
||||
.Array, .Vector => {
|
||||
if (ty.arrayLen() == 0) return false;
|
||||
const elem_ty = ty.childType();
|
||||
var elem_value_buf: Value.ElemValueBuffer = undefined;
|
||||
const elem_val = val.elemValueBuffer(0, &elem_value_buf);
|
||||
return needsPointerRebase(elem_ty, elem_val);
|
||||
},
|
||||
.Struct, .Union => {
|
||||
const len = ty.structFieldCount();
|
||||
var i: usize = 0;
|
||||
while (i < len) : (i += 1) {
|
||||
const field_ty = ty.structFieldType(i);
|
||||
if (isElemTyPointer(field_ty)) return true;
|
||||
}
|
||||
return false;
|
||||
.Struct => {
|
||||
const fields = ty.structFields().values();
|
||||
if (fields.len == 0) return false;
|
||||
if (val.castTag(.@"struct")) |payload| {
|
||||
const field_values = payload.data;
|
||||
for (field_values) |field_val, i| {
|
||||
if (needsPointerRebase(fields[i].ty, field_val)) return true;
|
||||
} else return false;
|
||||
} else return false;
|
||||
},
|
||||
.Optional => {
|
||||
if (val.castTag(.opt_payload)) |payload| {
|
||||
const sub_val = payload.data;
|
||||
var buffer: Type.Payload.ElemType = undefined;
|
||||
const sub_ty = ty.optionalChild(&buffer);
|
||||
return needsPointerRebase(sub_ty, sub_val);
|
||||
} else return false;
|
||||
},
|
||||
.Union => {
|
||||
const union_obj = val.cast(Value.Payload.Union).?.data;
|
||||
const active_field_ty = ty.unionFieldType(union_obj.tag);
|
||||
return needsPointerRebase(active_field_ty, union_obj.val);
|
||||
},
|
||||
.ErrorUnion => {
|
||||
if (val.castTag(.eu_payload)) |payload| {
|
||||
const payload_ty = ty.errorUnionPayload();
|
||||
return needsPointerRebase(payload_ty, payload.data);
|
||||
} else return false;
|
||||
},
|
||||
else => return false,
|
||||
}
|
||||
}
|
||||
|
||||
fn getMatchingSectionAtom(self: *MachO, atom: *Atom, ty: Type, val: Value) !MatchingSection {
|
||||
fn getMatchingSectionAtom(self: *MachO, atom: *Atom, name: []const u8, ty: Type, val: Value) !MatchingSection {
|
||||
const code = atom.code.items;
|
||||
const alignment = ty.abiAlignment(self.base.options.target);
|
||||
const align_log_2 = math.log2(alignment);
|
||||
@ -3938,10 +3971,25 @@ fn getMatchingSectionAtom(self: *MachO, atom: *Atom, ty: Type, val: Value) !Matc
|
||||
.seg = self.data_segment_cmd_index.?,
|
||||
.sect = self.bss_section_index.?,
|
||||
};
|
||||
} else {
|
||||
break :blk MatchingSection{
|
||||
.seg = self.data_segment_cmd_index.?,
|
||||
.sect = self.data_section_index.?,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
if (val.castTag(.variable)) |_| {
|
||||
break :blk MatchingSection{
|
||||
.seg = self.data_segment_cmd_index.?,
|
||||
.sect = self.data_section_index.?,
|
||||
};
|
||||
}
|
||||
|
||||
if (needsPointerRebase(ty, val)) {
|
||||
break :blk (try self.getMatchingSection(.{
|
||||
.segname = makeStaticString("__DATA"),
|
||||
.sectname = makeStaticString("__data"),
|
||||
.segname = makeStaticString("__DATA_CONST"),
|
||||
.sectname = makeStaticString("__const"),
|
||||
.size = code.len,
|
||||
.@"align" = align_log_2,
|
||||
})).?;
|
||||
@ -3954,8 +4002,8 @@ fn getMatchingSectionAtom(self: *MachO, atom: *Atom, ty: Type, val: Value) !Matc
|
||||
.sect = self.text_section_index.?,
|
||||
};
|
||||
},
|
||||
.Array => switch (val.tag()) {
|
||||
.bytes => {
|
||||
.Array => {
|
||||
if (val.tag() == .bytes) {
|
||||
switch (ty.tag()) {
|
||||
.array_u8_sentinel_0,
|
||||
.const_slice_u8_sentinel_0,
|
||||
@ -3969,79 +4017,23 @@ fn getMatchingSectionAtom(self: *MachO, atom: *Atom, ty: Type, val: Value) !Matc
|
||||
.@"align" = align_log_2,
|
||||
})).?;
|
||||
},
|
||||
else => {
|
||||
break :blk (try self.getMatchingSection(.{
|
||||
.segname = makeStaticString("__TEXT"),
|
||||
.sectname = makeStaticString("__const"),
|
||||
.size = code.len,
|
||||
.@"align" = align_log_2,
|
||||
})).?;
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
},
|
||||
.array => {
|
||||
if (isElemTyPointer(ty)) {
|
||||
break :blk (try self.getMatchingSection(.{
|
||||
.segname = makeStaticString("__DATA_CONST"),
|
||||
.sectname = makeStaticString("__const"),
|
||||
.size = code.len,
|
||||
.@"align" = align_log_2,
|
||||
})).?;
|
||||
} else {
|
||||
break :blk (try self.getMatchingSection(.{
|
||||
.segname = makeStaticString("__TEXT"),
|
||||
.sectname = makeStaticString("__const"),
|
||||
.size = code.len,
|
||||
.@"align" = align_log_2,
|
||||
})).?;
|
||||
}
|
||||
},
|
||||
else => {
|
||||
break :blk (try self.getMatchingSection(.{
|
||||
.segname = makeStaticString("__TEXT"),
|
||||
.sectname = makeStaticString("__const"),
|
||||
.size = code.len,
|
||||
.@"align" = align_log_2,
|
||||
})).?;
|
||||
},
|
||||
},
|
||||
.Pointer => {
|
||||
if (val.castTag(.variable)) |_| {
|
||||
break :blk MatchingSection{
|
||||
.seg = self.data_segment_cmd_index.?,
|
||||
.sect = self.data_section_index.?,
|
||||
};
|
||||
} else {
|
||||
break :blk (try self.getMatchingSection(.{
|
||||
.segname = makeStaticString("__DATA_CONST"),
|
||||
.sectname = makeStaticString("__const"),
|
||||
.size = code.len,
|
||||
.@"align" = align_log_2,
|
||||
})).?;
|
||||
}
|
||||
},
|
||||
else => {
|
||||
if (val.castTag(.variable)) |_| {
|
||||
break :blk MatchingSection{
|
||||
.seg = self.data_segment_cmd_index.?,
|
||||
.sect = self.data_section_index.?,
|
||||
};
|
||||
} else {
|
||||
break :blk (try self.getMatchingSection(.{
|
||||
.segname = makeStaticString("__TEXT"),
|
||||
.sectname = makeStaticString("__const"),
|
||||
.size = code.len,
|
||||
.@"align" = align_log_2,
|
||||
})).?;
|
||||
}
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
break :blk (try self.getMatchingSection(.{
|
||||
.segname = makeStaticString("__TEXT"),
|
||||
.sectname = makeStaticString("__const"),
|
||||
.size = code.len,
|
||||
.@"align" = align_log_2,
|
||||
})).?;
|
||||
};
|
||||
const local = self.locals.items[atom.local_sym_index];
|
||||
const seg = self.load_commands.items[match.seg].segment;
|
||||
const sect = seg.sections.items[match.sect];
|
||||
log.debug(" allocating atom '{s}' in '{s},{s}' ({d},{d})", .{
|
||||
self.getString(local.n_strx),
|
||||
name,
|
||||
sect.segName(),
|
||||
sect.sectName(),
|
||||
match.seg,
|
||||
@ -4055,14 +4047,15 @@ fn placeDecl(self: *MachO, decl: *Module.Decl, code_len: usize) !*macho.nlist_64
|
||||
assert(decl.link.macho.local_sym_index != 0); // Caller forgot to call allocateDeclIndexes()
|
||||
const symbol = &self.locals.items[decl.link.macho.local_sym_index];
|
||||
|
||||
const decl_ptr = self.decls.getPtr(decl).?;
|
||||
if (decl_ptr.* == null) {
|
||||
decl_ptr.* = try self.getMatchingSectionAtom(&decl.link.macho, decl.ty, decl.val);
|
||||
}
|
||||
const match = decl_ptr.*.?;
|
||||
const sym_name = try decl.getFullyQualifiedName(self.base.allocator);
|
||||
defer self.base.allocator.free(sym_name);
|
||||
|
||||
const decl_ptr = self.decls.getPtr(decl).?;
|
||||
if (decl_ptr.* == null) {
|
||||
decl_ptr.* = try self.getMatchingSectionAtom(&decl.link.macho, sym_name, decl.ty, decl.val);
|
||||
}
|
||||
const match = decl_ptr.*.?;
|
||||
|
||||
if (decl.link.macho.size != 0) {
|
||||
const capacity = decl.link.macho.capacity(self.*);
|
||||
const need_realloc = code_len > capacity or !mem.isAlignedGeneric(u64, symbol.n_value, required_alignment);
|
||||
@ -4071,6 +4064,7 @@ fn placeDecl(self: *MachO, decl: *Module.Decl, code_len: usize) !*macho.nlist_64
|
||||
const vaddr = try self.growAtom(&decl.link.macho, code_len, required_alignment, match);
|
||||
|
||||
log.debug("growing {s} and moving from 0x{x} to 0x{x}", .{ sym_name, symbol.n_value, vaddr });
|
||||
log.debug(" (required alignment 0x{x})", .{required_alignment});
|
||||
|
||||
if (vaddr != symbol.n_value) {
|
||||
log.debug(" (writing new GOT entry)", .{});
|
||||
@ -4105,6 +4099,7 @@ fn placeDecl(self: *MachO, decl: *Module.Decl, code_len: usize) !*macho.nlist_64
|
||||
const addr = try self.allocateAtom(&decl.link.macho, code_len, required_alignment, match);
|
||||
|
||||
log.debug("allocated atom for {s} at 0x{x}", .{ sym_name, addr });
|
||||
log.debug(" (required alignment 0x{x})", .{required_alignment});
|
||||
|
||||
errdefer self.freeAtom(&decl.link.macho, match, false);
|
||||
|
||||
@ -4291,6 +4286,7 @@ pub fn deleteExport(self: *MachO, exp: Export) void {
|
||||
}
|
||||
|
||||
fn freeUnnamedConsts(self: *MachO, decl: *Module.Decl) void {
|
||||
log.debug("freeUnnamedConsts for decl {*}", .{decl});
|
||||
const unnamed_consts = self.unnamed_const_atoms.getPtr(decl) orelse return;
|
||||
for (unnamed_consts.items) |atom| {
|
||||
self.freeAtom(atom, .{
|
||||
@ -4300,6 +4296,7 @@ fn freeUnnamedConsts(self: *MachO, decl: *Module.Decl) void {
|
||||
self.locals_free_list.append(self.base.allocator, atom.local_sym_index) catch {};
|
||||
self.locals.items[atom.local_sym_index].n_type = 0;
|
||||
_ = self.atom_by_index_table.remove(atom.local_sym_index);
|
||||
log.debug(" adding local symbol index {d} to free list", .{atom.local_sym_index});
|
||||
atom.local_sym_index = 0;
|
||||
}
|
||||
unnamed_consts.clearAndFree(self.base.allocator);
|
||||
@ -4324,10 +4321,15 @@ pub fn freeDecl(self: *MachO, decl: *Module.Decl) void {
|
||||
self.got_entries_free_list.append(self.base.allocator, @intCast(u32, got_index)) catch {};
|
||||
self.got_entries.items[got_index] = .{ .target = .{ .local = 0 }, .atom = undefined };
|
||||
_ = self.got_entries_table.swapRemove(.{ .local = decl.link.macho.local_sym_index });
|
||||
log.debug(" adding GOT index {d} to free list (target local@{d})", .{
|
||||
got_index,
|
||||
decl.link.macho.local_sym_index,
|
||||
});
|
||||
}
|
||||
|
||||
self.locals.items[decl.link.macho.local_sym_index].n_type = 0;
|
||||
_ = self.atom_by_index_table.remove(decl.link.macho.local_sym_index);
|
||||
log.debug(" adding local symbol index {d} to free list", .{decl.link.macho.local_sym_index});
|
||||
decl.link.macho.local_sym_index = 0;
|
||||
}
|
||||
if (self.d_sym) |*d_sym| {
|
||||
|
||||
@ -691,11 +691,11 @@ pub fn resolveRelocs(self: *Atom, macho_file: *MachO) !void {
|
||||
|
||||
if (is_via_got) {
|
||||
const got_index = macho_file.got_entries_table.get(rel.target) orelse {
|
||||
const n_strx = switch (rel.target) {
|
||||
.local => |sym_index| macho_file.locals.items[sym_index].n_strx,
|
||||
.global => |n_strx| n_strx,
|
||||
};
|
||||
log.err("expected GOT entry for symbol '{s}'", .{macho_file.getString(n_strx)});
|
||||
log.err("expected GOT entry for symbol", .{});
|
||||
switch (rel.target) {
|
||||
.local => |sym_index| log.err(" local @{d}", .{sym_index}),
|
||||
.global => |n_strx| log.err(" global @'{s}'", .{macho_file.getString(n_strx)}),
|
||||
}
|
||||
log.err(" this is an internal linker error", .{});
|
||||
return error.FailedToResolveRelocationTarget;
|
||||
};
|
||||
|
||||
@ -7,7 +7,6 @@ var foo: u8 align(4) = 100;
|
||||
|
||||
test "global variable alignment" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64 and builtin.os.tag == .macos) return error.SkipZigTest;
|
||||
|
||||
comptime try expect(@typeInfo(@TypeOf(&foo)).Pointer.alignment == 4);
|
||||
comptime try expect(@TypeOf(&foo) == *align(4) u8);
|
||||
|
||||
@ -195,9 +195,6 @@ test "multiline string comments at multiple places" {
|
||||
}
|
||||
|
||||
test "string concatenation" {
|
||||
if (builtin.zig_backend == .stage2_aarch64 and builtin.os.tag == .macos) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64 and builtin.os.tag == .macos) return error.SkipZigTest;
|
||||
|
||||
try expect(mem.eql(u8, "OK" ++ " IT " ++ "WORKED", "OK IT WORKED"));
|
||||
}
|
||||
|
||||
@ -402,8 +399,6 @@ fn testTakeAddressOfParameter(f: f32) !void {
|
||||
|
||||
test "pointer to void return type" {
|
||||
if (builtin.zig_backend == .stage2_c) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64 and builtin.os.tag == .macos) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64 and builtin.os.tag == .macos) return error.SkipZigTest;
|
||||
|
||||
try testPointerToVoidReturnType();
|
||||
}
|
||||
|
||||
@ -370,8 +370,6 @@ test "empty struct method call" {
|
||||
if (builtin.zig_backend == .stage2_wasm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_c) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64 and builtin.os.tag == .macos) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86_64 and builtin.os.tag == .macos) return error.SkipZigTest; // TODO
|
||||
|
||||
const es = EmptyStruct{};
|
||||
try expect(es.method() == 1234);
|
||||
|
||||
@ -44,7 +44,6 @@ fn setInt(foo: *Foo, x: i32) void {
|
||||
|
||||
test "comptime union field access" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
|
||||
|
||||
comptime {
|
||||
var foo = Foo{ .int = 0 };
|
||||
@ -77,14 +76,12 @@ const ExternPtrOrInt = extern union {
|
||||
};
|
||||
test "extern union size" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
|
||||
|
||||
comptime try expect(@sizeOf(ExternPtrOrInt) == 8);
|
||||
}
|
||||
|
||||
test "0-sized extern union definition" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
|
||||
|
||||
const U = extern union {
|
||||
a: void,
|
||||
@ -115,9 +112,7 @@ const err = @as(anyerror!Agg, Agg{
|
||||
const array = [_]Value{ v1, v2, v1, v2 };
|
||||
|
||||
test "unions embedded in aggregate types" {
|
||||
if (builtin.zig_backend == .stage2_x86_64 and builtin.os.tag == .macos) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
|
||||
|
||||
switch (array[1]) {
|
||||
Value.Array => |arr| try expect(arr[4] == 3),
|
||||
@ -131,7 +126,6 @@ test "unions embedded in aggregate types" {
|
||||
|
||||
test "access a member of tagged union with conflicting enum tag name" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
|
||||
|
||||
const Bar = union(enum) {
|
||||
A: A,
|
||||
@ -176,7 +170,6 @@ const TaggedUnionWithPayload = union(enum) {
|
||||
|
||||
test "union alignment" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
|
||||
|
||||
comptime {
|
||||
try expect(@alignOf(AlignTestTaggedUnion) >= @alignOf([9]u8));
|
||||
@ -276,7 +269,6 @@ fn testCastUnionToTag() !void {
|
||||
|
||||
test "union field access gives the enum values" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
|
||||
|
||||
try expect(TheUnion.A == TheTag.A);
|
||||
try expect(TheUnion.B == TheTag.B);
|
||||
@ -352,7 +344,6 @@ const PackedPtrOrInt = packed union {
|
||||
};
|
||||
test "packed union size" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
|
||||
|
||||
comptime try expect(@sizeOf(PackedPtrOrInt) == 8);
|
||||
}
|
||||
@ -362,7 +353,6 @@ const ZeroBits = union {
|
||||
};
|
||||
test "union with only 1 field which is void should be zero bits" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
|
||||
|
||||
comptime try expect(@sizeOf(ZeroBits) == 0);
|
||||
}
|
||||
@ -422,7 +412,6 @@ test "union with only 1 field casted to its enum type" {
|
||||
|
||||
test "union with one member defaults to u0 tag type" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
|
||||
|
||||
const U0 = union(enum) {
|
||||
X: u32,
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user