test: test with -fstrip and fix failures

Closes #17513
This commit is contained in:
Jacob Young 2023-11-30 14:27:18 -05:00 committed by Matthew Lugg
parent 39a966b0a4
commit bf5ab54510
7 changed files with 32 additions and 24 deletions

View File

@ -2657,6 +2657,8 @@ pub fn dumpStackPointerAddr(prefix: []const u8) void {
} }
test "manage resources correctly" { test "manage resources correctly" {
if (builtin.strip_debug_info) return error.SkipZigTest;
if (builtin.os.tag == .wasi) return error.SkipZigTest; if (builtin.os.tag == .wasi) return error.SkipZigTest;
if (builtin.os.tag == .windows) { if (builtin.os.tag == .windows) {

View File

@ -27865,6 +27865,7 @@ fn coerceExtra(
return sema.coerceInMemory(val, dest_ty); return sema.coerceInMemory(val, dest_ty);
} }
try sema.requireRuntimeBlock(block, inst_src, null); try sema.requireRuntimeBlock(block, inst_src, null);
try sema.queueFullTypeResolution(dest_ty);
const new_val = try block.addBitCast(dest_ty, inst); const new_val = try block.addBitCast(dest_ty, inst);
try sema.checkKnownAllocPtr(inst, new_val); try sema.checkKnownAllocPtr(inst, new_val);
return new_val; return new_val;

View File

@ -7083,15 +7083,14 @@ fn fieldPtr(self: *Self, inst: Air.Inst.Index, operand: Air.Inst.Ref, index: u32
const ptr_container_ty_info = ptr_container_ty.ptrInfo(mod); const ptr_container_ty_info = ptr_container_ty.ptrInfo(mod);
const container_ty = ptr_container_ty.childType(mod); const container_ty = ptr_container_ty.childType(mod);
const field_offset: i32 = blk: { const field_offset: i32 = if (mod.typeToPackedStruct(container_ty)) |struct_obj|
if (mod.typeToPackedStruct(container_ty)) |struct_type| { if (ptr_field_ty.ptrInfo(mod).packed_offset.host_size == 0)
break :blk if (ptr_field_ty.ptrInfo(mod).packed_offset.host_size == 0) @divExact(mod.structPackedFieldBitOffset(struct_obj, index) +
@divExact(mod.structPackedFieldBitOffset(struct_type, index) + ptr_container_ty_info.packed_offset.bit_offset, 8) ptr_container_ty_info.packed_offset.bit_offset, 8)
else else
0; 0
} else
break :blk @intCast(container_ty.structFieldOffset(index, mod)); @intCast(container_ty.structFieldOffset(index, mod));
};
const src_mcv = try self.resolveInst(operand); const src_mcv = try self.resolveInst(operand);
const dst_mcv = if (switch (src_mcv) { const dst_mcv = if (switch (src_mcv) {

View File

@ -657,14 +657,15 @@ fn lowerParentPtr(
Type.fromInterned(mod.intern_pool.typeOf(elem.base)).elemType2(mod).abiSize(mod)))), Type.fromInterned(mod.intern_pool.typeOf(elem.base)).elemType2(mod).abiSize(mod)))),
), ),
.field => |field| { .field => |field| {
const base_type = mod.intern_pool.indexToKey(mod.intern_pool.typeOf(field.base)).ptr_type.child; const base_ptr_ty = mod.intern_pool.typeOf(field.base);
const base_ty = mod.intern_pool.indexToKey(base_ptr_ty).ptr_type.child;
return lowerParentPtr( return lowerParentPtr(
bin_file, bin_file,
src_loc, src_loc,
field.base, field.base,
code, code,
debug_output, debug_output,
reloc_info.offset(switch (mod.intern_pool.indexToKey(base_type)) { reloc_info.offset(switch (mod.intern_pool.indexToKey(base_ty)) {
.ptr_type => |ptr_type| switch (ptr_type.flags.size) { .ptr_type => |ptr_type| switch (ptr_type.flags.size) {
.One, .Many, .C => unreachable, .One, .Many, .C => unreachable,
.Slice => switch (field.index) { .Slice => switch (field.index) {
@ -676,19 +677,20 @@ fn lowerParentPtr(
.struct_type, .struct_type,
.anon_struct_type, .anon_struct_type,
.union_type, .union_type,
=> switch (Type.fromInterned(base_type).containerLayout(mod)) { => switch (Type.fromInterned(base_ty).containerLayout(mod)) {
.Auto, .Extern => @intCast(Type.fromInterned(base_type).structFieldOffset( .Auto, .Extern => @intCast(Type.fromInterned(base_ty).structFieldOffset(
@intCast(field.index), @intCast(field.index),
mod, mod,
)), )),
.Packed => if (mod.typeToStruct(Type.fromInterned(base_type))) |struct_type| .Packed => if (mod.typeToStruct(Type.fromInterned(base_ty))) |struct_obj|
math.divExact(u16, mod.structPackedFieldBitOffset( if (Type.fromInterned(ptr.ty).ptrInfo(mod).packed_offset.host_size == 0)
struct_type, @divExact(Type.fromInterned(base_ptr_ty).ptrInfo(mod)
.packed_offset.bit_offset + mod.structPackedFieldBitOffset(
struct_obj,
@intCast(field.index), @intCast(field.index),
), 8) catch |err| switch (err) { ), 8)
error.UnexpectedRemainder => 0, else
error.DivisionByZero => unreachable, 0
}
else else
0, 0,
}, },

View File

@ -331,11 +331,11 @@ test "@bitCast packed struct of floats" {
test "comptime @bitCast packed struct to int and back" { test "comptime @bitCast packed struct to int and back" {
if (builtin.zig_backend == .stage2_wasm) return error.SkipZigTest; if (builtin.zig_backend == .stage2_wasm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_c) return error.SkipZigTest; if (builtin.zig_backend == .stage2_c) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest; if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_x86_64 and builtin.target.ofmt != .elf) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_llvm and native_endian == .big) { if (builtin.zig_backend == .stage2_llvm and native_endian == .big) {
// https://github.com/ziglang/zig/issues/13782 // https://github.com/ziglang/zig/issues/13782

View File

@ -432,7 +432,6 @@ test "nested packed struct field pointers" {
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest; if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_c) return error.SkipZigTest; // ubsan unaligned pointer access if (builtin.zig_backend == .stage2_c) return error.SkipZigTest; // ubsan unaligned pointer access
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
if (native_endian != .little) return error.SkipZigTest; // Byte aligned packed struct field pointers have not been implemented yet if (native_endian != .little) return error.SkipZigTest; // Byte aligned packed struct field pointers have not been implemented yet
const S2 = packed struct { const S2 = packed struct {
@ -513,7 +512,6 @@ test "@intFromPtr on a packed struct field unaligned and nested" {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest; if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
if (native_endian != .little) return error.SkipZigTest; // Byte aligned packed struct field pointers have not been implemented yet if (native_endian != .little) return error.SkipZigTest; // Byte aligned packed struct field pointers have not been implemented yet
const S1 = packed struct { const S1 = packed struct {

View File

@ -29,6 +29,7 @@ const TestTarget = struct {
use_llvm: ?bool = null, use_llvm: ?bool = null,
use_lld: ?bool = null, use_lld: ?bool = null,
force_pic: ?bool = null, force_pic: ?bool = null,
strip: ?bool = null,
}; };
const test_targets = blk: { const test_targets = blk: {
@ -115,6 +116,7 @@ const test_targets = blk: {
}, },
.use_llvm = false, .use_llvm = false,
.use_lld = false, .use_lld = false,
.strip = true,
}, },
// Doesn't support new liveness // Doesn't support new liveness
//.{ //.{
@ -497,6 +499,7 @@ const CAbiTarget = struct {
use_llvm: ?bool = null, use_llvm: ?bool = null,
use_lld: ?bool = null, use_lld: ?bool = null,
force_pic: ?bool = null, force_pic: ?bool = null,
strip: ?bool = null,
c_defines: []const []const u8 = &.{}, c_defines: []const []const u8 = &.{},
}; };
@ -528,6 +531,7 @@ const c_abi_targets = [_]CAbiTarget{
}, },
.use_llvm = false, .use_llvm = false,
.use_lld = false, .use_lld = false,
.strip = true,
.c_defines = &.{"ZIG_BACKEND_STAGE2_X86_64"}, .c_defines = &.{"ZIG_BACKEND_STAGE2_X86_64"},
}, },
.{ .{
@ -1111,6 +1115,7 @@ pub fn addModuleTests(b: *std.Build, options: ModuleTestOptions) *Step {
.zig_lib_dir = .{ .path = "lib" }, .zig_lib_dir = .{ .path = "lib" },
}); });
these_tests.force_pic = test_target.force_pic; these_tests.force_pic = test_target.force_pic;
these_tests.strip = test_target.strip;
const single_threaded_suffix = if (test_target.single_threaded == true) "-single" else ""; const single_threaded_suffix = if (test_target.single_threaded == true) "-single" else "";
const backend_suffix = if (test_target.use_llvm == true) const backend_suffix = if (test_target.use_llvm == true)
"-llvm" "-llvm"
@ -1253,6 +1258,7 @@ pub fn addCAbiTests(b: *std.Build, skip_non_native: bool, skip_release: bool) *S
.use_lld = c_abi_target.use_lld, .use_lld = c_abi_target.use_lld,
}); });
test_step.force_pic = c_abi_target.force_pic; test_step.force_pic = c_abi_target.force_pic;
test_step.strip = c_abi_target.strip;
if (c_abi_target.target.abi != null and c_abi_target.target.abi.?.isMusl()) { if (c_abi_target.target.abi != null and c_abi_target.target.abi.?.isMusl()) {
// TODO NativeTargetInfo insists on dynamically linking musl // TODO NativeTargetInfo insists on dynamically linking musl
// for some reason? // for some reason?