mirror of
https://github.com/ziglang/zig.git
synced 2025-12-06 06:13:07 +00:00
riscv: std.fmt.format running
- implements `airSlice`, `airBitAnd`, `airBitOr`, `airShr`. - got a basic design going for the `airErrorName` but for some reason it simply returns empty bytes. will investigate further. - only generating `.got.zig` entries when not compiling an object or shared library - reduced the total amount of ops a mnemonic can have to 3, simplifying the logic
This commit is contained in:
parent
55b28c7e44
commit
05de6c279b
@ -1609,10 +1609,7 @@ fn allocRegOrMem(self: *Self, inst: Air.Inst.Index, reg_ok: bool) !MCValue {
|
||||
};
|
||||
|
||||
if (reg_ok) {
|
||||
// Make sure the type can fit in a register before we try to allocate one.
|
||||
const ptr_bits = self.target.ptrBitWidth();
|
||||
const ptr_bytes: u64 = @divExact(ptr_bits, 8);
|
||||
if (abi_size <= ptr_bytes) {
|
||||
if (abi_size <= 8) {
|
||||
if (self.register_manager.tryAllocReg(inst, gp)) |reg| {
|
||||
return .{ .register = reg };
|
||||
}
|
||||
@ -1625,7 +1622,7 @@ fn allocRegOrMem(self: *Self, inst: Air.Inst.Index, reg_ok: bool) !MCValue {
|
||||
|
||||
/// Allocates a register from the general purpose set and returns the Register and the Lock.
|
||||
///
|
||||
/// Up to the user to unlock the register later.
|
||||
/// Up to the caller to unlock the register later.
|
||||
fn allocReg(self: *Self) !struct { Register, RegisterLock } {
|
||||
const reg = try self.register_manager.allocReg(null, gp);
|
||||
const lock = self.register_manager.lockRegAssumeUnused(reg);
|
||||
@ -1923,9 +1920,25 @@ fn airMinMax(
|
||||
}
|
||||
|
||||
fn airSlice(self: *Self, inst: Air.Inst.Index) !void {
|
||||
const zcu = self.bin_file.comp.module.?;
|
||||
const ty_pl = self.air.instructions.items(.data)[@intFromEnum(inst)].ty_pl;
|
||||
const bin_op = self.air.extraData(Air.Bin, ty_pl.payload).data;
|
||||
const result: MCValue = if (self.liveness.isUnused(inst)) .unreach else return self.fail("TODO implement slice for {}", .{self.target.cpu.arch});
|
||||
|
||||
const slice_ty = self.typeOfIndex(inst);
|
||||
const frame_index = try self.allocFrameIndex(FrameAlloc.initSpill(slice_ty, zcu));
|
||||
|
||||
const ptr_ty = self.typeOf(bin_op.lhs);
|
||||
try self.genSetMem(.{ .frame = frame_index }, 0, ptr_ty, .{ .air_ref = bin_op.lhs });
|
||||
|
||||
const len_ty = self.typeOf(bin_op.rhs);
|
||||
try self.genSetMem(
|
||||
.{ .frame = frame_index },
|
||||
@intCast(ptr_ty.abiSize(zcu)),
|
||||
len_ty,
|
||||
.{ .air_ref = bin_op.rhs },
|
||||
);
|
||||
|
||||
const result = MCValue{ .load_frame = .{ .index = frame_index } };
|
||||
return self.finishAir(inst, result, .{ bin_op.lhs, bin_op.rhs, .none });
|
||||
}
|
||||
|
||||
@ -2575,13 +2588,91 @@ fn airMod(self: *Self, inst: Air.Inst.Index) !void {
|
||||
|
||||
fn airBitAnd(self: *Self, inst: Air.Inst.Index) !void {
|
||||
const bin_op = self.air.instructions.items(.data)[@intFromEnum(inst)].bin_op;
|
||||
const result: MCValue = if (self.liveness.isUnused(inst)) .unreach else return self.fail("TODO implement bitwise and for {}", .{self.target.cpu.arch});
|
||||
const result: MCValue = if (self.liveness.isUnused(inst)) .unreach else result: {
|
||||
const lhs = try self.resolveInst(bin_op.lhs);
|
||||
const rhs = try self.resolveInst(bin_op.rhs);
|
||||
|
||||
const lhs_ty = self.typeOf(bin_op.lhs);
|
||||
const rhs_ty = self.typeOf(bin_op.rhs);
|
||||
|
||||
const lhs_reg, const lhs_lock = blk: {
|
||||
if (lhs == .register) break :blk .{ lhs.register, null };
|
||||
|
||||
const lhs_reg, const lhs_lock = try self.allocReg();
|
||||
try self.genSetReg(lhs_ty, lhs_reg, lhs);
|
||||
break :blk .{ lhs_reg, lhs_lock };
|
||||
};
|
||||
defer if (lhs_lock) |lock| self.register_manager.unlockReg(lock);
|
||||
|
||||
const rhs_reg, const rhs_lock = blk: {
|
||||
if (rhs == .register) break :blk .{ rhs.register, null };
|
||||
|
||||
const rhs_reg, const rhs_lock = try self.allocReg();
|
||||
try self.genSetReg(rhs_ty, rhs_reg, rhs);
|
||||
break :blk .{ rhs_reg, rhs_lock };
|
||||
};
|
||||
defer if (rhs_lock) |lock| self.register_manager.unlockReg(lock);
|
||||
|
||||
const dest_reg, const dest_lock = try self.allocReg();
|
||||
defer self.register_manager.unlockReg(dest_lock);
|
||||
|
||||
_ = try self.addInst(.{
|
||||
.tag = .@"and",
|
||||
.ops = .rrr,
|
||||
.data = .{ .r_type = .{
|
||||
.rd = dest_reg,
|
||||
.rs1 = lhs_reg,
|
||||
.rs2 = rhs_reg,
|
||||
} },
|
||||
});
|
||||
|
||||
break :result .{ .register = dest_reg };
|
||||
};
|
||||
return self.finishAir(inst, result, .{ bin_op.lhs, bin_op.rhs, .none });
|
||||
}
|
||||
|
||||
fn airBitOr(self: *Self, inst: Air.Inst.Index) !void {
|
||||
const bin_op = self.air.instructions.items(.data)[@intFromEnum(inst)].bin_op;
|
||||
const result: MCValue = if (self.liveness.isUnused(inst)) .unreach else return self.fail("TODO implement bitwise or for {}", .{self.target.cpu.arch});
|
||||
const result: MCValue = if (self.liveness.isUnused(inst)) .unreach else result: {
|
||||
const lhs = try self.resolveInst(bin_op.lhs);
|
||||
const rhs = try self.resolveInst(bin_op.rhs);
|
||||
|
||||
const lhs_ty = self.typeOf(bin_op.lhs);
|
||||
const rhs_ty = self.typeOf(bin_op.rhs);
|
||||
|
||||
const lhs_reg, const lhs_lock = blk: {
|
||||
if (lhs == .register) break :blk .{ lhs.register, null };
|
||||
|
||||
const lhs_reg, const lhs_lock = try self.allocReg();
|
||||
try self.genSetReg(lhs_ty, lhs_reg, lhs);
|
||||
break :blk .{ lhs_reg, lhs_lock };
|
||||
};
|
||||
defer if (lhs_lock) |lock| self.register_manager.unlockReg(lock);
|
||||
|
||||
const rhs_reg, const rhs_lock = blk: {
|
||||
if (rhs == .register) break :blk .{ rhs.register, null };
|
||||
|
||||
const rhs_reg, const rhs_lock = try self.allocReg();
|
||||
try self.genSetReg(rhs_ty, rhs_reg, rhs);
|
||||
break :blk .{ rhs_reg, rhs_lock };
|
||||
};
|
||||
defer if (rhs_lock) |lock| self.register_manager.unlockReg(lock);
|
||||
|
||||
const dest_reg, const dest_lock = try self.allocReg();
|
||||
defer self.register_manager.unlockReg(dest_lock);
|
||||
|
||||
_ = try self.addInst(.{
|
||||
.tag = .@"or",
|
||||
.ops = .rrr,
|
||||
.data = .{ .r_type = .{
|
||||
.rd = dest_reg,
|
||||
.rs1 = lhs_reg,
|
||||
.rs2 = rhs_reg,
|
||||
} },
|
||||
});
|
||||
|
||||
break :result .{ .register = dest_reg };
|
||||
};
|
||||
return self.finishAir(inst, result, .{ bin_op.lhs, bin_op.rhs, .none });
|
||||
}
|
||||
|
||||
@ -2612,7 +2703,14 @@ fn airShlSat(self: *Self, inst: Air.Inst.Index) !void {
|
||||
|
||||
fn airShr(self: *Self, inst: Air.Inst.Index) !void {
|
||||
const bin_op = self.air.instructions.items(.data)[@intFromEnum(inst)].bin_op;
|
||||
const result: MCValue = if (self.liveness.isUnused(inst)) .unreach else return self.fail("TODO implement shr for {}", .{self.target.cpu.arch});
|
||||
const result: MCValue = if (self.liveness.isUnused(inst)) .unreach else result: {
|
||||
const lhs = try self.resolveInst(bin_op.lhs);
|
||||
const rhs = try self.resolveInst(bin_op.rhs);
|
||||
const lhs_ty = self.typeOf(bin_op.lhs);
|
||||
const rhs_ty = self.typeOf(bin_op.rhs);
|
||||
|
||||
break :result try self.binOp(.shr, lhs, lhs_ty, rhs, rhs_ty);
|
||||
};
|
||||
return self.finishAir(inst, result, .{ bin_op.lhs, bin_op.rhs, .none });
|
||||
}
|
||||
|
||||
@ -2671,6 +2769,10 @@ fn airUnwrapErrErr(self: *Self, inst: Air.Inst.Index) !void {
|
||||
}
|
||||
break :result result;
|
||||
},
|
||||
.load_frame => |frame_addr| break :result .{ .load_frame = .{
|
||||
.index = frame_addr.index,
|
||||
.off = frame_addr.off + @as(i32, @intCast(err_off)),
|
||||
} },
|
||||
else => return self.fail("TODO implement unwrap_err_err for {}", .{operand}),
|
||||
}
|
||||
};
|
||||
@ -3317,6 +3419,7 @@ fn airLoad(self: *Self, inst: Air.Inst.Index) !void {
|
||||
const zcu = self.bin_file.comp.module.?;
|
||||
const ty_op = self.air.instructions.items(.data)[@intFromEnum(inst)].ty_op;
|
||||
const elem_ty = self.typeOfIndex(inst);
|
||||
|
||||
const result: MCValue = result: {
|
||||
if (!elem_ty.hasRuntimeBits(zcu))
|
||||
break :result .none;
|
||||
@ -3326,8 +3429,11 @@ fn airLoad(self: *Self, inst: Air.Inst.Index) !void {
|
||||
if (self.liveness.isUnused(inst) and !is_volatile)
|
||||
break :result .unreach;
|
||||
|
||||
const elem_size = elem_ty.abiSize(zcu);
|
||||
|
||||
const dst_mcv: MCValue = blk: {
|
||||
if (self.reuseOperand(inst, ty_op.operand, 0, ptr)) {
|
||||
// Pointer is 8 bytes, and if the element is more than that, we cannot reuse it.
|
||||
if (elem_size <= 8 and self.reuseOperand(inst, ty_op.operand, 0, ptr)) {
|
||||
// The MCValue that holds the pointer can be re-used as the value.
|
||||
break :blk ptr;
|
||||
} else {
|
||||
@ -3794,6 +3900,7 @@ fn genCall(
|
||||
|
||||
for (call_info.args, arg_tys, args, frame_indices) |dst_arg, arg_ty, src_arg, frame_index| {
|
||||
switch (dst_arg) {
|
||||
.none, .load_frame => {},
|
||||
.register_pair => try self.genCopy(arg_ty, dst_arg, src_arg),
|
||||
.register => |dst_reg| try self.genSetReg(
|
||||
arg_ty,
|
||||
@ -5573,6 +5680,7 @@ fn airErrorName(self: *Self, inst: Air.Inst.Index) !void {
|
||||
const addr_reg, const addr_lock = try self.allocReg();
|
||||
defer self.register_manager.unlockReg(addr_lock);
|
||||
|
||||
// this is now the base address of the error name table
|
||||
const lazy_sym = link.File.LazySymbol.initDecl(.const_data, null, zcu);
|
||||
if (self.bin_file.cast(link.File.Elf)) |elf_file| {
|
||||
const sym_index = elf_file.zigObjectPtr().?.getOrCreateMetadataForLazySymbol(elf_file, lazy_sym) catch |err|
|
||||
@ -5589,10 +5697,77 @@ fn airErrorName(self: *Self, inst: Air.Inst.Index) !void {
|
||||
const end_reg, const end_lock = try self.allocReg();
|
||||
defer self.register_manager.unlockReg(end_lock);
|
||||
|
||||
_ = start_reg;
|
||||
_ = end_reg;
|
||||
// const tmp_reg, const tmp_lock = try self.allocReg();
|
||||
// defer self.register_manager.unlockReg(tmp_lock);
|
||||
|
||||
return self.fail("TODO: airErrorName", .{});
|
||||
// we move the base address forward by the following formula: base + (errno * 8)
|
||||
|
||||
// shifting left by 4 is the same as multiplying by 8
|
||||
_ = try self.addInst(.{
|
||||
.tag = .slli,
|
||||
.ops = .rri,
|
||||
.data = .{ .i_type = .{
|
||||
.imm12 = Immediate.s(4),
|
||||
.rd = err_reg,
|
||||
.rs1 = err_reg,
|
||||
} },
|
||||
});
|
||||
|
||||
_ = try self.addInst(.{
|
||||
.tag = .add,
|
||||
.ops = .rrr,
|
||||
.data = .{ .r_type = .{
|
||||
.rd = addr_reg,
|
||||
.rs1 = addr_reg,
|
||||
.rs2 = err_reg,
|
||||
} },
|
||||
});
|
||||
|
||||
_ = try self.addInst(.{
|
||||
.tag = .pseudo,
|
||||
.ops = .pseudo_load_rm,
|
||||
.data = .{
|
||||
.rm = .{
|
||||
.r = start_reg,
|
||||
.m = .{
|
||||
.base = .{ .reg = addr_reg },
|
||||
.mod = .{ .off = 0 },
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
_ = try self.addInst(.{
|
||||
.tag = .pseudo,
|
||||
.ops = .pseudo_load_rm,
|
||||
.data = .{
|
||||
.rm = .{
|
||||
.r = end_reg,
|
||||
.m = .{
|
||||
.base = .{ .reg = addr_reg },
|
||||
.mod = .{ .off = 8 },
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const dst_mcv = try self.allocRegOrMem(inst, false);
|
||||
const frame = dst_mcv.load_frame;
|
||||
try self.genSetMem(
|
||||
.{ .frame = frame.index },
|
||||
frame.off,
|
||||
Type.usize,
|
||||
.{ .register = start_reg },
|
||||
);
|
||||
|
||||
try self.genSetMem(
|
||||
.{ .frame = frame.index },
|
||||
frame.off + 8,
|
||||
Type.usize,
|
||||
.{ .register = end_reg },
|
||||
);
|
||||
|
||||
return self.finishAir(inst, dst_mcv, .{ un_op, .none, .none });
|
||||
}
|
||||
|
||||
fn airSplat(self: *Self, inst: Air.Inst.Index) !void {
|
||||
@ -5881,7 +6056,11 @@ fn resolveCallingConventionValues(
|
||||
}
|
||||
|
||||
for (param_types, result.args) |ty, *arg| {
|
||||
assert(ty.hasRuntimeBitsIgnoreComptime(zcu));
|
||||
if (!ty.hasRuntimeBitsIgnoreComptime(zcu)) {
|
||||
assert(cc == .Unspecified);
|
||||
arg.* = .none;
|
||||
continue;
|
||||
}
|
||||
|
||||
var arg_mcv: [2]MCValue = undefined;
|
||||
var arg_mcv_i: usize = 0;
|
||||
|
||||
@ -42,6 +42,12 @@ pub fn emitMir(emit: *Emit) Error!void {
|
||||
.enc = std.meta.activeTag(lowered_inst.encoding.data),
|
||||
}),
|
||||
.load_symbol_reloc => |symbol| {
|
||||
const is_obj_or_static_lib = switch (emit.lower.output_mode) {
|
||||
.Exe => false,
|
||||
.Obj => true,
|
||||
.Lib => emit.lower.link_mode == .static,
|
||||
};
|
||||
|
||||
if (emit.lower.bin_file.cast(link.File.Elf)) |elf_file| {
|
||||
const atom_ptr = elf_file.symbol(symbol.atom_index).atom(elf_file).?;
|
||||
const sym_index = elf_file.zigObjectPtr().?.symbol(symbol.sym_index);
|
||||
@ -50,7 +56,7 @@ pub fn emitMir(emit: *Emit) Error!void {
|
||||
var hi_r_type: u32 = @intFromEnum(std.elf.R_RISCV.HI20);
|
||||
var lo_r_type: u32 = @intFromEnum(std.elf.R_RISCV.LO12_I);
|
||||
|
||||
if (sym.flags.needs_zig_got) {
|
||||
if (sym.flags.needs_zig_got and !is_obj_or_static_lib) {
|
||||
_ = try sym.getOrCreateZigGotEntry(sym_index, elf_file);
|
||||
|
||||
hi_r_type = Elf.R_ZIG_GOT_HI20;
|
||||
|
||||
@ -16,6 +16,7 @@ pub const Mnemonic = enum {
|
||||
slli,
|
||||
srli,
|
||||
srai,
|
||||
sllw,
|
||||
|
||||
addi,
|
||||
jalr,
|
||||
@ -77,6 +78,8 @@ pub const Mnemonic = enum {
|
||||
.slli => .{ .opcode = 0b0010011, .funct3 = 0b001, .funct7 = null },
|
||||
.srli => .{ .opcode = 0b0010011, .funct3 = 0b101, .funct7 = null },
|
||||
.srai => .{ .opcode = 0b0010011, .funct3 = 0b101, .funct7 = null, .offset = 1 << 10 },
|
||||
|
||||
.sllw => .{ .opcode = 0b0111011, .funct3 = 0b001, .funct7 = 0b0000000 },
|
||||
|
||||
.lui => .{ .opcode = 0b0110111, .funct3 = null, .funct7 = null },
|
||||
.auipc => .{ .opcode = 0b0010111, .funct3 = null, .funct7 = null },
|
||||
@ -152,6 +155,7 @@ pub const InstEnc = enum {
|
||||
|
||||
.slt,
|
||||
.sltu,
|
||||
.sllw,
|
||||
.mul,
|
||||
.xor,
|
||||
.add,
|
||||
|
||||
@ -71,7 +71,7 @@ pub fn lowerMir(lower: *Lower, index: Mir.Inst.Index) Error!struct {
|
||||
|
||||
switch (inst.ops) {
|
||||
.pseudo_load_rm => {
|
||||
const tag: Encoding.Mnemonic = switch (rm.m.mod.rm.size) {
|
||||
const tag: Encoding.Mnemonic = switch (rm.m.mod.size()) {
|
||||
.byte => .lb,
|
||||
.hword => .lh,
|
||||
.word => .lw,
|
||||
@ -85,7 +85,7 @@ pub fn lowerMir(lower: *Lower, index: Mir.Inst.Index) Error!struct {
|
||||
});
|
||||
},
|
||||
.pseudo_store_rm => {
|
||||
const tag: Encoding.Mnemonic = switch (rm.m.mod.rm.size) {
|
||||
const tag: Encoding.Mnemonic = switch (rm.m.mod.size()) {
|
||||
.byte => .sb,
|
||||
.hword => .sh,
|
||||
.word => .sw,
|
||||
|
||||
@ -21,6 +21,13 @@ pub const Memory = struct {
|
||||
disp: i32 = 0,
|
||||
},
|
||||
off: i32,
|
||||
|
||||
pub fn size(mod: Mod) Size {
|
||||
return switch (mod) {
|
||||
.rm => |rm| rm.size,
|
||||
.off => Size.dword, // assumed to be a register size
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
pub const Size = enum(u4) {
|
||||
|
||||
@ -11,12 +11,11 @@ pub const Instruction = struct {
|
||||
|
||||
pub fn new(mnemonic: Encoding.Mnemonic, ops: []const Operand) !Instruction {
|
||||
const encoding = (try Encoding.findByMnemonic(mnemonic, ops)) orelse {
|
||||
std.log.err("no encoding found for: {s} {s} {s} {s} {s}", .{
|
||||
std.log.err("no encoding found for: {s} [{s} {s} {s}]", .{
|
||||
@tagName(mnemonic),
|
||||
@tagName(if (ops.len > 0) ops[0] else .none),
|
||||
@tagName(if (ops.len > 1) ops[1] else .none),
|
||||
@tagName(if (ops.len > 2) ops[2] else .none),
|
||||
@tagName(if (ops.len > 3) ops[3] else .none),
|
||||
});
|
||||
return error.InvalidInstruction;
|
||||
};
|
||||
|
||||
@ -54,7 +54,6 @@ fn addUnaligned(a: *align(1) const u32, b: *align(1) const u32) u32 {
|
||||
}
|
||||
|
||||
test "@alignCast pointers" {
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest; // TODO
|
||||
var x: u32 align(4) = 1;
|
||||
expectsOnly1(&x);
|
||||
try expect(x == 2);
|
||||
@ -426,7 +425,6 @@ test "function callconv expression depends on generic parameter" {
|
||||
}
|
||||
|
||||
test "runtime-known array index has best alignment possible" {
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
|
||||
// take full advantage of over-alignment
|
||||
@ -562,7 +560,6 @@ test "align(@alignOf(T)) T does not force resolution of T" {
|
||||
}
|
||||
|
||||
test "align(N) on functions" {
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
|
||||
@ -1047,7 +1047,6 @@ test "union that needs padding bytes inside an array" {
|
||||
|
||||
test "runtime index of array of zero-bit values" {
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
var runtime: struct { array: [1]void, index: usize } = undefined;
|
||||
runtime = .{ .array = .{{}}, .index = 0 };
|
||||
|
||||
@ -513,7 +513,6 @@ var global_foo: *i32 = undefined;
|
||||
test "peer result location with typed parent, runtime condition, comptime prongs" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const S = struct {
|
||||
fn doTheTest(arg: i32) i32 {
|
||||
@ -593,7 +592,6 @@ test "equality compare fn ptrs" {
|
||||
|
||||
test "self reference through fn ptr field" {
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const S = struct {
|
||||
const A = struct {
|
||||
@ -838,7 +836,6 @@ test "labeled block implicitly ends in a break" {
|
||||
test "catch in block has correct result location" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const S = struct {
|
||||
fn open() error{A}!@This() {
|
||||
@ -870,7 +867,6 @@ test "labeled block with runtime branch forwards its result location type to bre
|
||||
|
||||
test "try in labeled block doesn't cast to wrong type" {
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const S = struct {
|
||||
a: u32,
|
||||
@ -1246,8 +1242,6 @@ test "pointer to tuple field can be dereferenced at comptime" {
|
||||
}
|
||||
|
||||
test "proper value is returned from labeled block" {
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const S = struct {
|
||||
fn hash(v: *u32, key: anytype) void {
|
||||
const Key = @TypeOf(key);
|
||||
|
||||
@ -250,7 +250,6 @@ test "bitcast packed struct to integer and back" {
|
||||
test "implicit cast to error union by returning" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const S = struct {
|
||||
fn entry() !void {
|
||||
@ -280,8 +279,6 @@ test "comptime bitcast used in expression has the correct type" {
|
||||
}
|
||||
|
||||
test "bitcast passed as tuple element" {
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const S = struct {
|
||||
fn foo(args: anytype) !void {
|
||||
comptime assert(@TypeOf(args[0]) == f32);
|
||||
@ -292,8 +289,6 @@ test "bitcast passed as tuple element" {
|
||||
}
|
||||
|
||||
test "triple level result location with bitcast sandwich passed as tuple element" {
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const S = struct {
|
||||
fn foo(args: anytype) !void {
|
||||
comptime assert(@TypeOf(args[0]) == f64);
|
||||
|
||||
@ -60,7 +60,6 @@ test "tuple parameters" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const add = struct {
|
||||
fn add(a: i32, b: i32) i32 {
|
||||
@ -94,7 +93,6 @@ test "result location of function call argument through runtime condition and st
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const E = enum { a, b };
|
||||
const S = struct {
|
||||
@ -411,7 +409,6 @@ test "recursive inline call with comptime known argument" {
|
||||
test "inline while with @call" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const S = struct {
|
||||
fn inc(a: *u32) void {
|
||||
@ -427,8 +424,6 @@ test "inline while with @call" {
|
||||
}
|
||||
|
||||
test "method call as parameter type" {
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const S = struct {
|
||||
fn foo(x: anytype, y: @TypeOf(x).Inner()) @TypeOf(y) {
|
||||
return y;
|
||||
@ -477,7 +472,6 @@ test "argument to generic function has correct result type" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const S = struct {
|
||||
fn foo(_: anytype, e: enum { a, b }) bool {
|
||||
|
||||
@ -483,7 +483,6 @@ fn castToOptionalTypeError(z: i32) !void {
|
||||
test "implicitly cast from [0]T to anyerror![]T" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
try testCastZeroArrayToErrSliceMut();
|
||||
try comptime testCastZeroArrayToErrSliceMut();
|
||||
@ -558,7 +557,6 @@ fn testCastConstArrayRefToConstSlice() !void {
|
||||
test "peer type resolution: error and [N]T" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
try expect(mem.eql(u8, try testPeerErrorAndArray(0), "OK"));
|
||||
comptime assert(mem.eql(u8, try testPeerErrorAndArray(0), "OK"));
|
||||
@ -1157,7 +1155,6 @@ fn foobar(func: PFN_void) !void {
|
||||
|
||||
test "cast function with an opaque parameter" {
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
if (builtin.zig_backend == .stage2_c) {
|
||||
// https://github.com/ziglang/zig/issues/16845
|
||||
@ -1309,7 +1306,6 @@ fn incrementVoidPtrValue(value: ?*anyopaque) void {
|
||||
test "implicit cast *[0]T to E![]const u8" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
var x = @as(anyerror![]const u8, &[0]u8{});
|
||||
_ = &x;
|
||||
@ -1496,7 +1492,6 @@ test "cast compatible optional types" {
|
||||
test "coerce undefined single-item pointer of array to error union of slice" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const a = @as([*]u8, undefined)[0..0];
|
||||
var b: error{a}![]const u8 = a;
|
||||
@ -2206,7 +2201,6 @@ test "peer type resolution: tuples with comptime fields" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const a = .{ 1, 2 };
|
||||
const b = .{ @as(u32, 3), @as(i16, 4) };
|
||||
@ -2361,7 +2355,6 @@ test "cast builtins can wrap result in error union" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const S = struct {
|
||||
const MyEnum = enum(u32) { _ };
|
||||
|
||||
@ -408,8 +408,6 @@ test "mutate entire slice at comptime" {
|
||||
}
|
||||
|
||||
test "dereference undefined pointer to zero-bit type" {
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const p0: *void = undefined;
|
||||
try testing.expectEqual({}, p0.*);
|
||||
|
||||
@ -515,7 +513,5 @@ fn fieldPtrTest() u32 {
|
||||
return a.value;
|
||||
}
|
||||
test "pointer in aggregate field can mutate comptime state" {
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
try comptime std.testing.expect(fieldPtrTest() == 2);
|
||||
}
|
||||
|
||||
@ -162,7 +162,6 @@ test "reference to errdefer payload" {
|
||||
test "simple else prong doesn't emit an error for unreachable else prong" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const S = struct {
|
||||
fn foo() error{Foo}!void {
|
||||
|
||||
@ -48,8 +48,6 @@ test "empty extern union" {
|
||||
}
|
||||
|
||||
test "empty union passed as argument" {
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const U = union(enum) {
|
||||
fn f(u: @This()) void {
|
||||
switch (u) {}
|
||||
@ -59,8 +57,6 @@ test "empty union passed as argument" {
|
||||
}
|
||||
|
||||
test "empty enum passed as argument" {
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const E = enum {
|
||||
fn f(e: @This()) void {
|
||||
switch (e) {}
|
||||
|
||||
@ -856,8 +856,6 @@ fn doALoopThing(id: EnumWithOneMember) void {
|
||||
}
|
||||
|
||||
test "comparison operator on enum with one member is comptime-known" {
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
doALoopThing(EnumWithOneMember.Eof);
|
||||
}
|
||||
|
||||
|
||||
@ -31,7 +31,6 @@ fn shouldBeNotEqual(a: anyerror, b: anyerror) void {
|
||||
|
||||
test "error binary operator" {
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const a = errBinaryOperatorG(true) catch 3;
|
||||
const b = errBinaryOperatorG(false) catch 3;
|
||||
@ -63,14 +62,12 @@ pub fn baz() anyerror!i32 {
|
||||
|
||||
test "error wrapping" {
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
try expect((baz() catch unreachable) == 15);
|
||||
}
|
||||
|
||||
test "unwrap simple value from error" {
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const i = unwrapSimpleValueFromErrorDo() catch unreachable;
|
||||
try expect(i == 13);
|
||||
@ -81,7 +78,6 @@ fn unwrapSimpleValueFromErrorDo() anyerror!isize {
|
||||
|
||||
test "error return in assignment" {
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
doErrReturnInAssignment() catch unreachable;
|
||||
}
|
||||
@ -104,7 +100,6 @@ test "syntax: optional operator in front of error union operator" {
|
||||
test "widen cast integer payload of error union function call" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const S = struct {
|
||||
fn errorable() !u64 {
|
||||
@ -241,8 +236,6 @@ fn testExplicitErrorSetCast(set1: Set1) !void {
|
||||
}
|
||||
|
||||
test "@errorCast on error unions" {
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const S = struct {
|
||||
fn doTheTest() !void {
|
||||
{
|
||||
@ -270,7 +263,6 @@ test "@errorCast on error unions" {
|
||||
|
||||
test "comptime test error for empty error set" {
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
try testComptimeTestErrorEmptySet(1234);
|
||||
try comptime testComptimeTestErrorEmptySet(1234);
|
||||
@ -306,8 +298,6 @@ test "inferred empty error set comptime catch" {
|
||||
}
|
||||
|
||||
test "error inference with an empty set" {
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const S = struct {
|
||||
const Struct = struct {
|
||||
pub fn func() (error{})!usize {
|
||||
@ -362,7 +352,6 @@ fn quux_1() !i32 {
|
||||
|
||||
test "error: Zero sized error set returned with value payload crash" {
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
_ = try foo3(0);
|
||||
_ = try comptime foo3(0);
|
||||
@ -376,7 +365,6 @@ fn foo3(b: usize) Error!usize {
|
||||
test "error: Infer error set from literals" {
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
_ = nullLiteral("n") catch |err| handleErrors(err);
|
||||
_ = floatLiteral("n") catch |err| handleErrors(err);
|
||||
@ -498,7 +486,6 @@ test "optional error set is the same size as error set" {
|
||||
test "nested catch" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const S = struct {
|
||||
fn entry() !void {
|
||||
@ -524,7 +511,6 @@ test "nested catch" {
|
||||
test "function pointer with return type that is error union with payload which is pointer of parent struct" {
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const S = struct {
|
||||
const Foo = struct {
|
||||
@ -582,7 +568,6 @@ test "error payload type is correctly resolved" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const MyIntWrapper = struct {
|
||||
const Self = @This();
|
||||
@ -1039,7 +1024,6 @@ test "function called at runtime is properly analyzed for inferred error set" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const S = struct {
|
||||
fn foo() !void {
|
||||
@ -1063,7 +1047,6 @@ test "generic type constructed from inferred error set of unresolved function" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const S = struct {
|
||||
fn write(_: void, bytes: []const u8) !usize {
|
||||
@ -1079,8 +1062,6 @@ test "generic type constructed from inferred error set of unresolved function" {
|
||||
}
|
||||
|
||||
test "errorCast to adhoc inferred error set" {
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const S = struct {
|
||||
inline fn baz() !i32 {
|
||||
return @errorCast(err());
|
||||
@ -1093,8 +1074,6 @@ test "errorCast to adhoc inferred error set" {
|
||||
}
|
||||
|
||||
test "errorCast from error sets to error unions" {
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const err_union: Set1!void = @errorCast(error.A);
|
||||
try expectError(error.A, err_union);
|
||||
}
|
||||
@ -1103,7 +1082,6 @@ test "result location initialization of error union with OPV payload" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
|
||||
const S = struct {
|
||||
|
||||
@ -505,7 +505,6 @@ test "comptime shlWithOverflow" {
|
||||
test "const ptr to variable data changes at runtime" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
try expect(foo_ref.name[0] == 'a');
|
||||
foo_ref.name = "b";
|
||||
@ -720,8 +719,6 @@ fn loopNTimes(comptime n: usize) void {
|
||||
}
|
||||
|
||||
test "variable inside inline loop that has different types on different iterations" {
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
try testVarInsideInlineLoop(.{ true, @as(u32, 42) });
|
||||
}
|
||||
|
||||
@ -1643,8 +1640,6 @@ test "result of nested switch assigned to variable" {
|
||||
}
|
||||
|
||||
test "inline for loop of functions returning error unions" {
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const T1 = struct {
|
||||
fn v() error{}!usize {
|
||||
return 1;
|
||||
|
||||
@ -71,7 +71,6 @@ fn outer(y: u32) *const fn (u32) u32 {
|
||||
|
||||
test "return inner function which references comptime variable of outer function" {
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const func = outer(10);
|
||||
try expect(func(3) == 7);
|
||||
@ -81,7 +80,6 @@ test "discard the result of a function that returns a struct" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const S = struct {
|
||||
fn entry() void {
|
||||
@ -191,7 +189,6 @@ test "function with complex callconv and return type expressions" {
|
||||
|
||||
test "pass by non-copying value" {
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
try expect(addPointCoords(Point{ .x = 1, .y = 2 }) == 3);
|
||||
}
|
||||
@ -207,7 +204,6 @@ fn addPointCoords(pt: Point) i32 {
|
||||
|
||||
test "pass by non-copying value through var arg" {
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
try expect((try addPointCoordsVar(Point{ .x = 1, .y = 2 })) == 3);
|
||||
}
|
||||
@ -219,7 +215,6 @@ fn addPointCoordsVar(pt: anytype) !i32 {
|
||||
|
||||
test "pass by non-copying value as method" {
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
var pt = Point2{ .x = 1, .y = 2 };
|
||||
try expect(pt.addPointCoords() == 3);
|
||||
@ -236,7 +231,6 @@ const Point2 = struct {
|
||||
|
||||
test "pass by non-copying value as method, which is generic" {
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
var pt = Point3{ .x = 1, .y = 2 };
|
||||
try expect(pt.addPointCoords(i32) == 3);
|
||||
@ -292,7 +286,6 @@ test "implicit cast fn call result to optional in field result" {
|
||||
test "void parameters" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
try voidFun(1, void{}, 2, {});
|
||||
}
|
||||
@ -424,7 +417,6 @@ test "function with inferred error set but returning no error" {
|
||||
|
||||
test "import passed byref to function in return type" {
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const S = struct {
|
||||
fn get() @import("std").ArrayListUnmanaged(i32) {
|
||||
@ -541,7 +533,6 @@ test "function returns function returning type" {
|
||||
|
||||
test "peer type resolution of inferred error set with non-void payload" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const S = struct {
|
||||
fn openDataFile(mode: enum { read, write }) !u32 {
|
||||
@ -584,8 +575,6 @@ test "lazy values passed to anytype parameter" {
|
||||
}
|
||||
|
||||
test "pass and return comptime-only types" {
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const S = struct {
|
||||
fn returnNull(comptime x: @Type(.Null)) @Type(.Null) {
|
||||
return x;
|
||||
|
||||
@ -34,7 +34,6 @@ fn custom(comptime T: type, comptime num: u64) fn (T) u64 {
|
||||
test "fn delegation" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const foo = Foo{};
|
||||
try expect(foo.one() == 11);
|
||||
|
||||
@ -438,7 +438,6 @@ test "inline for with counter as the comptime-known" {
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
var runtime_slice = "hello";
|
||||
var runtime_i: usize = 3;
|
||||
@ -471,7 +470,6 @@ test "inline for on tuple pointer" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const S = struct { u32, u32, u32 };
|
||||
var s: S = .{ 100, 200, 300 };
|
||||
|
||||
@ -286,7 +286,6 @@ test "generic function instantiation turns into comptime call" {
|
||||
|
||||
test "generic function with void and comptime parameter" {
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const S = struct { x: i32 };
|
||||
const namespace = struct {
|
||||
@ -303,7 +302,6 @@ test "generic function with void and comptime parameter" {
|
||||
test "anonymous struct return type referencing comptime parameter" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const S = struct {
|
||||
pub fn extraData(comptime T: type, index: usize) struct { data: T, end: usize } {
|
||||
@ -394,7 +392,6 @@ test "extern function used as generic parameter" {
|
||||
|
||||
test "generic struct as parameter type" {
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const S = struct {
|
||||
fn doTheTest(comptime Int: type, thing: struct { int: Int }) !void {
|
||||
@ -435,7 +432,6 @@ test "null sentinel pointer passed as generic argument" {
|
||||
|
||||
test "generic function passed as comptime argument" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const S = struct {
|
||||
fn doMath(comptime f: fn (type, i32, i32) error{Overflow}!i32, a: i32, b: i32) !void {
|
||||
@ -461,7 +457,6 @@ test "return type of generic function is function pointer" {
|
||||
|
||||
test "coerced function body has inequal value with its uncoerced body" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const S = struct {
|
||||
const A = B(i32, c);
|
||||
@ -546,7 +541,6 @@ test "call generic function with from function called by the generic function" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_llvm and
|
||||
builtin.cpu.arch == .aarch64 and builtin.os.tag == .windows) return error.SkipZigTest;
|
||||
|
||||
|
||||
@ -23,7 +23,6 @@ fn foo(a: A) i32 {
|
||||
|
||||
test "incomplete struct param top level declaration" {
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const a = A{
|
||||
.b = B{
|
||||
|
||||
@ -89,7 +89,6 @@ test "inline else bool" {
|
||||
test "inline else error" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const Err = error{ a, b, c };
|
||||
var a = Err.a;
|
||||
|
||||
@ -21,7 +21,6 @@ test "ir block deps" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
try expect((foo(1) catch unreachable) == 0);
|
||||
try expect((foo(2) catch unreachable) == 0);
|
||||
|
||||
@ -1471,8 +1471,6 @@ fn testShrExact(x: u8) !void {
|
||||
}
|
||||
|
||||
test "shift left/right on u0 operand" {
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const S = struct {
|
||||
fn doTheTest() !void {
|
||||
var x: u0 = 0;
|
||||
|
||||
@ -300,6 +300,7 @@ test "@min/@max notices bounds from vector types when element of comptime-known
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64 and
|
||||
!comptime std.Target.x86.featureSetHas(builtin.cpu.features, .avx)) return error.SkipZigTest;
|
||||
|
||||
|
||||
@ -31,7 +31,6 @@ test "standard field calls" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
try expect(HasFuncs.one(0) == 1);
|
||||
try expect(HasFuncs.two(0) == 2);
|
||||
|
||||
@ -7,7 +7,6 @@ test "@memset on array pointers" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
try testMemsetArray();
|
||||
try comptime testMemsetArray();
|
||||
@ -167,7 +166,6 @@ test "zero keys with @memset" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const Keys = struct {
|
||||
up: bool,
|
||||
|
||||
@ -13,7 +13,6 @@ fn foo() C!void {
|
||||
|
||||
test "merge error sets" {
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
if (foo()) {
|
||||
@panic("unexpected");
|
||||
|
||||
@ -615,8 +615,6 @@ test "cast slice to const slice nested in error union and optional" {
|
||||
}
|
||||
|
||||
test "variable of optional of noreturn" {
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
var null_opv: ?noreturn = null;
|
||||
_ = &null_opv;
|
||||
try std.testing.expectEqual(@as(?noreturn, null), null_opv);
|
||||
|
||||
@ -238,7 +238,6 @@ test "regular in irregular packed struct" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const Irregular = packed struct {
|
||||
bar: Regular = Regular{},
|
||||
@ -435,6 +434,7 @@ test "nested packed struct field pointers" {
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_c) return error.SkipZigTest; // ubsan unaligned pointer access
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
if (native_endian != .little) return error.SkipZigTest; // Byte aligned packed struct field pointers have not been implemented yet
|
||||
|
||||
const S2 = packed struct {
|
||||
@ -1190,7 +1190,6 @@ test "packed struct field pointer aligned properly" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const Foo = packed struct {
|
||||
a: i32,
|
||||
|
||||
@ -3,8 +3,6 @@ const builtin = @import("builtin");
|
||||
const expectEqual = std.testing.expectEqual;
|
||||
|
||||
test "casting integer address to function pointer" {
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
addressToFunction();
|
||||
comptime addressToFunction();
|
||||
}
|
||||
@ -19,7 +17,6 @@ test "mutate through ptr initialized with constant ptrFromInt value" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
forceCompilerAnalyzeBranchHardCodedPtrDereference(false);
|
||||
}
|
||||
|
||||
@ -158,7 +158,6 @@ test "@TypeOf() has no runtime side effects" {
|
||||
|
||||
test "branching logic inside @TypeOf" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const S = struct {
|
||||
var data: i32 = 0;
|
||||
@ -412,7 +411,6 @@ test "Extern function calls, dereferences and field access in @TypeOf" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const Test = struct {
|
||||
fn test_fn_1(a: c_long) @TypeOf(c_fopen("test", "r").*) {
|
||||
|
||||
@ -124,7 +124,6 @@ test "slice of type" {
|
||||
|
||||
test "generic malloc free" {
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const a = memAlloc(u8, 10) catch unreachable;
|
||||
memFree(u8, a);
|
||||
@ -874,8 +873,6 @@ test "slice of void" {
|
||||
}
|
||||
|
||||
test "slice with dereferenced value" {
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
var a: usize = 0;
|
||||
const idx: *usize = &a;
|
||||
_ = blk: {
|
||||
@ -1004,7 +1001,6 @@ test "sentinel-terminated 0-length slices" {
|
||||
if (builtin.zig_backend == .stage2_x86) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const u32s: [4]u32 = [_]u32{ 0, 1, 2, 3 };
|
||||
|
||||
|
||||
@ -92,7 +92,6 @@ test "structs" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
var foo: StructFoo = undefined;
|
||||
@memset(@as([*]u8, @ptrCast(&foo))[0..@sizeOf(StructFoo)], 0);
|
||||
@ -175,7 +174,6 @@ const MemberFnTestFoo = struct {
|
||||
|
||||
test "call member function directly" {
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const instance = MemberFnTestFoo{ .x = 1234 };
|
||||
const result = MemberFnTestFoo.member(instance);
|
||||
@ -184,7 +182,6 @@ test "call member function directly" {
|
||||
|
||||
test "store member function in variable" {
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const instance = MemberFnTestFoo{ .x = 1234 };
|
||||
const memberFn = MemberFnTestFoo.member;
|
||||
@ -206,7 +203,6 @@ const MemberFnRand = struct {
|
||||
test "return struct byval from function" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const Bar = struct {
|
||||
x: i32,
|
||||
@ -255,7 +251,6 @@ test "usingnamespace within struct scope" {
|
||||
test "struct field init with catch" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const S = struct {
|
||||
fn doTheTest() !void {
|
||||
@ -631,7 +626,6 @@ fn getC(data: *const BitField1) u2 {
|
||||
test "default struct initialization fields" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const S = struct {
|
||||
a: i32 = 1234,
|
||||
@ -807,7 +801,6 @@ test "fn with C calling convention returns struct by value" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const S = struct {
|
||||
fn entry() !void {
|
||||
@ -907,8 +900,6 @@ test "anonymous struct literal syntax" {
|
||||
}
|
||||
|
||||
test "fully anonymous struct" {
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const S = struct {
|
||||
fn doTheTest() !void {
|
||||
try dump(.{
|
||||
@ -931,8 +922,6 @@ test "fully anonymous struct" {
|
||||
}
|
||||
|
||||
test "fully anonymous list literal" {
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const S = struct {
|
||||
fn doTheTest() !void {
|
||||
try dump(.{ @as(u32, 1234), @as(f64, 12.34), true, "hi" });
|
||||
@ -980,7 +969,6 @@ test "tuple element initialized with fn call" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const S = struct {
|
||||
fn doTheTest() !void {
|
||||
@ -1041,7 +1029,6 @@ test "type coercion of anon struct literal to struct" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const S = struct {
|
||||
const S2 = struct {
|
||||
@ -1081,7 +1068,6 @@ test "type coercion of pointer to anon struct literal to pointer to struct" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const S = struct {
|
||||
const S2 = struct {
|
||||
@ -1296,7 +1282,6 @@ test "initialize struct with empty literal" {
|
||||
|
||||
test "loading a struct pointer perfoms a copy" {
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const S = struct {
|
||||
a: i32,
|
||||
@ -1558,7 +1543,6 @@ test "discarded struct initialization works as expected" {
|
||||
test "function pointer in struct returns the struct" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const A = struct {
|
||||
const A = @This();
|
||||
@ -1766,8 +1750,6 @@ test "extern struct field pointer has correct alignment" {
|
||||
}
|
||||
|
||||
test "packed struct field in anonymous struct" {
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const T = packed struct {
|
||||
f1: bool = false,
|
||||
};
|
||||
@ -1779,8 +1761,6 @@ fn countFields(v: anytype) usize {
|
||||
}
|
||||
|
||||
test "struct init with no result pointer sets field result types" {
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const S = struct {
|
||||
// A function parameter has a result type, but no result pointer.
|
||||
fn f(s: struct { x: u32 }) u32 {
|
||||
@ -1922,8 +1902,6 @@ test "circular dependency through pointer field of a struct" {
|
||||
}
|
||||
|
||||
test "field calls do not force struct field init resolution" {
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const S = struct {
|
||||
x: u32 = blk: {
|
||||
_ = @TypeOf(make().dummyFn()); // runtime field call - S not fully resolved - dummyFn call should not force field init resolution
|
||||
@ -2057,7 +2035,6 @@ test "runtime value in nested initializer passed as pointer to function" {
|
||||
if (builtin.zig_backend == .stage2_x86) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const Bar = struct {
|
||||
b: u32,
|
||||
@ -2132,7 +2109,6 @@ test "assignment of field with padding" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const Mesh = extern struct {
|
||||
id: u32,
|
||||
@ -2163,7 +2139,6 @@ test "initiate global variable with runtime value" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const S = struct {
|
||||
field: i32,
|
||||
|
||||
@ -397,7 +397,6 @@ fn switchWithUnreachable(x: i32) i32 {
|
||||
|
||||
test "capture value of switch with all unreachable prongs" {
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const x = return_a_number() catch |err| switch (err) {
|
||||
else => unreachable,
|
||||
@ -503,7 +502,6 @@ test "switch prongs with error set cases make a new error set type for capture v
|
||||
|
||||
test "return result loc and then switch with range implicit casted to error union" {
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const S = struct {
|
||||
fn doTheTest() !void {
|
||||
|
||||
@ -4,7 +4,6 @@ const expect = std.testing.expect;
|
||||
|
||||
test "try on error union" {
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
try tryOnErrorUnionImpl();
|
||||
try comptime tryOnErrorUnionImpl();
|
||||
|
||||
@ -10,7 +10,6 @@ test "tuple concatenation" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const S = struct {
|
||||
fn doTheTest() !void {
|
||||
@ -56,7 +55,6 @@ test "more tuple concatenation" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const T = struct {
|
||||
fn consume_tuple(tuple: anytype, len: usize) !void {
|
||||
@ -326,8 +324,6 @@ test "tuple type with void field" {
|
||||
}
|
||||
|
||||
test "zero sized struct in tuple handled correctly" {
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const State = struct {
|
||||
const Self = @This();
|
||||
data: @Type(.{
|
||||
@ -369,7 +365,6 @@ test "branching inside tuple literal" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const S = struct {
|
||||
fn foo(a: anytype) !void {
|
||||
@ -474,7 +469,6 @@ test "coerce anon tuple to tuple" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
var x: u8 = 1;
|
||||
var y: u16 = 2;
|
||||
|
||||
@ -203,7 +203,6 @@ test "Type.Opaque" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const Opaque = @Type(.{
|
||||
.Opaque = .{
|
||||
|
||||
@ -8,7 +8,6 @@ test "ignore lval with underscore" {
|
||||
|
||||
test "ignore lval with underscore (while loop)" {
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
while (optionalReturnError()) |_| {
|
||||
while (optionalReturnError()) |_| {
|
||||
|
||||
@ -418,7 +418,6 @@ test "tagged union initialization with runtime void" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
try expect(testTaggedUnionInit({}));
|
||||
}
|
||||
|
||||
@ -14,8 +14,6 @@ fn add(args: anytype) i32 {
|
||||
}
|
||||
|
||||
test "add arbitrary args" {
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
try expect(add(.{ @as(i32, 1), @as(i32, 2), @as(i32, 3), @as(i32, 4) }) == 10);
|
||||
try expect(add(.{@as(i32, 1234)}) == 1234);
|
||||
try expect(add(.{}) == 0);
|
||||
@ -26,15 +24,12 @@ fn readFirstVarArg(args: anytype) void {
|
||||
}
|
||||
|
||||
test "send void arg to var args" {
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
readFirstVarArg(.{{}});
|
||||
}
|
||||
|
||||
test "pass args directly" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
try expect(addSomeStuff(.{ @as(i32, 1), @as(i32, 2), @as(i32, 3), @as(i32, 4) }) == 10);
|
||||
try expect(addSomeStuff(.{@as(i32, 1234)}) == 1234);
|
||||
@ -48,7 +43,6 @@ fn addSomeStuff(args: anytype) i32 {
|
||||
test "runtime parameter before var args" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
try expect((try extraFn(10, .{})) == 0);
|
||||
try expect((try extraFn(10, .{false})) == 1);
|
||||
@ -87,15 +81,11 @@ fn foo2(args: anytype) bool {
|
||||
}
|
||||
|
||||
test "array of var args functions" {
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
try expect(foos[0](.{}));
|
||||
try expect(!foos[1](.{}));
|
||||
}
|
||||
|
||||
test "pass zero length array to var args param" {
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
doNothingWithFirstArg(.{""});
|
||||
}
|
||||
|
||||
|
||||
@ -258,7 +258,6 @@ fn returnWithImplicitCastFromWhileLoopTest() anyerror!void {
|
||||
|
||||
test "while on error union with else result follow else prong" {
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const result = while (returnError()) |value| {
|
||||
break value;
|
||||
@ -268,7 +267,6 @@ test "while on error union with else result follow else prong" {
|
||||
|
||||
test "while on error union with else result follow break prong" {
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const result = while (returnSuccess(10)) |value| {
|
||||
break value;
|
||||
@ -315,7 +313,6 @@ test "while error 2 break statements and an else" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const S = struct {
|
||||
fn entry(opt_t: anyerror!bool, f: bool) !void {
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user