riscv: rewrite "binOp"

Reorganize how the binOp and genBinOp functions work.

I've spent quite a while here reading exactly through the spec and so many
tests are enabled because of several critical issues the old design had.

There are some regressions that will take a long time to figure out individually
so I will ignore them for now, and pray they get fixed by themselves. When
we're closer to 100% passing is when I will start diving into them one-by-one.
This commit is contained in:
David Rubin 2024-05-27 01:43:37 -07:00
parent a9ef016943
commit 4fd8900337
No known key found for this signature in database
GPG Key ID: C326E694CED89F6D
38 changed files with 1258 additions and 1116 deletions

File diff suppressed because it is too large Load Diff

View File

@ -4,6 +4,7 @@ data: Data,
const OpCode = enum(u7) {
OP = 0b0110011,
OP_IMM = 0b0010011,
OP_IMM_32 = 0b0011011,
OP_32 = 0b0111011,
BRANCH = 0b1100011,
@ -41,17 +42,22 @@ const Enc = struct {
funct3: u3,
funct7: u7,
},
/// funct3 + offset
fo: struct {
funct3: u3,
offset: u12 = 0,
},
/// funct5 + rm + fmt
fmt: struct {
funct5: u5,
rm: u3,
fmt: Fmt,
},
/// funct3
f: struct {
funct3: u3,
},
/// typ + funct3 + has_5
sh: struct {
typ: u6,
funct3: u3,
has_5: bool,
},
/// U-type
none,
},
@ -72,10 +78,14 @@ pub const Mnemonic = enum {
sltiu,
xori,
andi,
slli,
srli,
srai,
sllw,
slliw,
srliw,
sraiw,
addi,
jalr,
@ -98,19 +108,48 @@ pub const Mnemonic = enum {
// R Type
add,
addw,
sub,
subw,
@"and",
@"or",
sub,
slt,
mul,
sltu,
xor,
sll,
srl,
sra,
sllw,
srlw,
sraw,
// System
ecall,
ebreak,
unimp,
// M extension
mul,
mulw,
mulh,
mulhu,
mulhsu,
div,
divu,
divw,
divuw,
rem,
remu,
remw,
remuw,
// F extension (32-bit float)
fadds,
fsubs,
@ -170,19 +209,56 @@ pub const Mnemonic = enum {
.slt => .{ .opcode = .OP, .data = .{ .ff = .{ .funct3 = 0b010, .funct7 = 0b0000000 } } },
.mul => .{ .opcode = .OP, .data = .{ .ff = .{ .funct3 = 0b000, .funct7 = 0b0000001 } } },
.mulh => .{ .opcode = .OP, .data = .{ .ff = .{ .funct3 = 0b001, .funct7 = 0b0000001 } } },
.mulhsu => .{ .opcode = .OP, .data = .{ .ff = .{ .funct3 = 0b010, .funct7 = 0b0000001 } } },
.mulhu => .{ .opcode = .OP, .data = .{ .ff = .{ .funct3 = 0b011, .funct7 = 0b0000001 } } },
.div => .{ .opcode = .OP, .data = .{ .ff = .{ .funct3 = 0b100, .funct7 = 0b0000001 } } },
.divu => .{ .opcode = .OP, .data = .{ .ff = .{ .funct3 = 0b101, .funct7 = 0b0000001 } } },
.rem => .{ .opcode = .OP, .data = .{ .ff = .{ .funct3 = 0b110, .funct7 = 0b0000001 } } },
.remu => .{ .opcode = .OP, .data = .{ .ff = .{ .funct3 = 0b111, .funct7 = 0b0000001 } } },
.sll => .{ .opcode = .OP, .data = .{ .ff = .{ .funct3 = 0b001, .funct7 = 0b0000000 } } },
.srl => .{ .opcode = .OP, .data = .{ .ff = .{ .funct3 = 0b101, .funct7 = 0b0000000 } } },
.sra => .{ .opcode = .OP, .data = .{ .ff = .{ .funct3 = 0b101, .funct7 = 0b0100000 } } },
// OP_IMM
.addi => .{ .opcode = .OP_IMM, .data = .{ .fo = .{ .funct3 = 0b000 } } },
.andi => .{ .opcode = .OP_IMM, .data = .{ .fo = .{ .funct3 = 0b111 } } },
.xori => .{ .opcode = .OP_IMM, .data = .{ .fo = .{ .funct3 = 0b100 } } },
.addi => .{ .opcode = .OP_IMM, .data = .{ .f = .{ .funct3 = 0b000 } } },
.andi => .{ .opcode = .OP_IMM, .data = .{ .f = .{ .funct3 = 0b111 } } },
.xori => .{ .opcode = .OP_IMM, .data = .{ .f = .{ .funct3 = 0b100 } } },
.sltiu => .{ .opcode = .OP_IMM, .data = .{ .fo = .{ .funct3 = 0b011 } } },
.sltiu => .{ .opcode = .OP_IMM, .data = .{ .f = .{ .funct3 = 0b011 } } },
.slli => .{ .opcode = .OP_IMM, .data = .{ .fo = .{ .funct3 = 0b001 } } },
.srli => .{ .opcode = .OP_IMM, .data = .{ .fo = .{ .funct3 = 0b101 } } },
.srai => .{ .opcode = .OP_IMM, .data = .{ .fo = .{ .funct3 = 0b101, .offset = 1 << 10 } } },
.slli => .{ .opcode = .OP_IMM, .data = .{ .sh = .{ .typ = 0b000000, .funct3 = 0b001, .has_5 = true } } },
.srli => .{ .opcode = .OP_IMM, .data = .{ .sh = .{ .typ = 0b000000, .funct3 = 0b101, .has_5 = true } } },
.srai => .{ .opcode = .OP_IMM, .data = .{ .sh = .{ .typ = 0b010000, .funct3 = 0b101, .has_5 = true } } },
// OP_IMM_32
.slliw => .{ .opcode = .OP_IMM_32, .data = .{ .sh = .{ .typ = 0b000000, .funct3 = 0b001, .has_5 = false } } },
.srliw => .{ .opcode = .OP_IMM_32, .data = .{ .sh = .{ .typ = 0b000000, .funct3 = 0b101, .has_5 = false } } },
.sraiw => .{ .opcode = .OP_IMM_32, .data = .{ .sh = .{ .typ = 0b010000, .funct3 = 0b101, .has_5 = false } } },
// OP_32
.addw => .{ .opcode = .OP_32, .data = .{ .ff = .{ .funct3 = 0b000, .funct7 = 0b0000000 } } },
.subw => .{ .opcode = .OP_32, .data = .{ .ff = .{ .funct3 = 0b000, .funct7 = 0b0100000 } } },
.mulw => .{ .opcode = .OP_32, .data = .{ .ff = .{ .funct3 = 0b000, .funct7 = 0b0000001 } } },
.divw => .{ .opcode = .OP_32, .data = .{ .ff = .{ .funct3 = 0b100, .funct7 = 0b0000001 } } },
.divuw => .{ .opcode = .OP_32, .data = .{ .ff = .{ .funct3 = 0b101, .funct7 = 0b0000001 } } },
.remw => .{ .opcode = .OP_32, .data = .{ .ff = .{ .funct3 = 0b110, .funct7 = 0b0000001 } } },
.remuw => .{ .opcode = .OP_32, .data = .{ .ff = .{ .funct3 = 0b111, .funct7 = 0b0000001 } } },
.sllw => .{ .opcode = .OP_32, .data = .{ .ff = .{ .funct3 = 0b001, .funct7 = 0b0000000 } } },
.srlw => .{ .opcode = .OP_32, .data = .{ .ff = .{ .funct3 = 0b101, .funct7 = 0b0000000 } } },
.sraw => .{ .opcode = .OP_32, .data = .{ .ff = .{ .funct3 = 0b101, .funct7 = 0b0100000 } } },
// OP_FP
@ -226,43 +302,38 @@ pub const Mnemonic = enum {
// LOAD
.lb => .{ .opcode = .LOAD, .data = .{ .fo = .{ .funct3 = 0b000 } } },
.lh => .{ .opcode = .LOAD, .data = .{ .fo = .{ .funct3 = 0b001 } } },
.lw => .{ .opcode = .LOAD, .data = .{ .fo = .{ .funct3 = 0b010 } } },
.ld => .{ .opcode = .LOAD, .data = .{ .fo = .{ .funct3 = 0b011 } } },
.lbu => .{ .opcode = .LOAD, .data = .{ .fo = .{ .funct3 = 0b100 } } },
.lhu => .{ .opcode = .LOAD, .data = .{ .fo = .{ .funct3 = 0b101 } } },
.lwu => .{ .opcode = .LOAD, .data = .{ .fo = .{ .funct3 = 0b110 } } },
.lb => .{ .opcode = .LOAD, .data = .{ .f = .{ .funct3 = 0b000 } } },
.lh => .{ .opcode = .LOAD, .data = .{ .f = .{ .funct3 = 0b001 } } },
.lw => .{ .opcode = .LOAD, .data = .{ .f = .{ .funct3 = 0b010 } } },
.ld => .{ .opcode = .LOAD, .data = .{ .f = .{ .funct3 = 0b011 } } },
.lbu => .{ .opcode = .LOAD, .data = .{ .f = .{ .funct3 = 0b100 } } },
.lhu => .{ .opcode = .LOAD, .data = .{ .f = .{ .funct3 = 0b101 } } },
.lwu => .{ .opcode = .LOAD, .data = .{ .f = .{ .funct3 = 0b110 } } },
// STORE
.sb => .{ .opcode = .STORE, .data = .{ .fo = .{ .funct3 = 0b000 } } },
.sh => .{ .opcode = .STORE, .data = .{ .fo = .{ .funct3 = 0b001 } } },
.sw => .{ .opcode = .STORE, .data = .{ .fo = .{ .funct3 = 0b010 } } },
.sd => .{ .opcode = .STORE, .data = .{ .fo = .{ .funct3 = 0b011 } } },
.sb => .{ .opcode = .STORE, .data = .{ .f = .{ .funct3 = 0b000 } } },
.sh => .{ .opcode = .STORE, .data = .{ .f = .{ .funct3 = 0b001 } } },
.sw => .{ .opcode = .STORE, .data = .{ .f = .{ .funct3 = 0b010 } } },
.sd => .{ .opcode = .STORE, .data = .{ .f = .{ .funct3 = 0b011 } } },
// LOAD_FP
.flw => .{ .opcode = .LOAD_FP, .data = .{ .fo = .{ .funct3 = 0b010 } } },
.fld => .{ .opcode = .LOAD_FP, .data = .{ .fo = .{ .funct3 = 0b011 } } },
.flw => .{ .opcode = .LOAD_FP, .data = .{ .f = .{ .funct3 = 0b010 } } },
.fld => .{ .opcode = .LOAD_FP, .data = .{ .f = .{ .funct3 = 0b011 } } },
// STORE_FP
.fsw => .{ .opcode = .STORE_FP, .data = .{ .fo = .{ .funct3 = 0b010 } } },
.fsd => .{ .opcode = .STORE_FP, .data = .{ .fo = .{ .funct3 = 0b011 } } },
.fsw => .{ .opcode = .STORE_FP, .data = .{ .f = .{ .funct3 = 0b010 } } },
.fsd => .{ .opcode = .STORE_FP, .data = .{ .f = .{ .funct3 = 0b011 } } },
// JALR
.jalr => .{ .opcode = .JALR, .data = .{ .fo = .{ .funct3 = 0b000 } } },
// OP_32
.sllw => .{ .opcode = .OP_32, .data = .{ .ff = .{ .funct3 = 0b001, .funct7 = 0b0000000 } } },
.jalr => .{ .opcode = .JALR, .data = .{ .f = .{ .funct3 = 0b000 } } },
// LUI
@ -282,18 +353,18 @@ pub const Mnemonic = enum {
// BRANCH
.beq => .{ .opcode = .BRANCH, .data = .{ .fo = .{ .funct3 = 0b000 } } },
.beq => .{ .opcode = .BRANCH, .data = .{ .f = .{ .funct3 = 0b000 } } },
// SYSTEM
.ecall => .{ .opcode = .SYSTEM, .data = .{ .fo = .{ .funct3 = 0b000 } } },
.ebreak => .{ .opcode = .SYSTEM, .data = .{ .fo = .{ .funct3 = 0b000 } } },
.ecall => .{ .opcode = .SYSTEM, .data = .{ .f = .{ .funct3 = 0b000 } } },
.ebreak => .{ .opcode = .SYSTEM, .data = .{ .f = .{ .funct3 = 0b000 } } },
// NONE
.unimp => .{ .opcode = .NONE, .data = .{ .fo = .{ .funct3 = 0b000 } } },
.unimp => .{ .opcode = .NONE, .data = .{ .f = .{ .funct3 = 0b000 } } },
// zig fmt: on
@ -320,10 +391,15 @@ pub const InstEnc = enum {
.sltiu,
.xori,
.andi,
.slli,
.srli,
.srai,
.slliw,
.srliw,
.sraiw,
.ld,
.lw,
.lwu,
@ -357,14 +433,41 @@ pub const InstEnc = enum {
.slt,
.sltu,
.sll,
.srl,
.sra,
.sllw,
.mul,
.srlw,
.sraw,
.div,
.divu,
.divw,
.divuw,
.rem,
.remu,
.remw,
.remuw,
.xor,
.add,
.sub,
.@"and",
.@"or",
.add,
.addw,
.sub,
.subw,
.mul,
.mulw,
.mulh,
.mulhu,
.mulhsu,
.fadds,
.faddd,
@ -516,12 +619,12 @@ pub const Data = union(InstEnc) {
.imm0_11 = switch (mnem) {
.ecall => 0x000,
.ebreak => 0x001,
.unimp => 0,
.unimp => 0x000,
else => unreachable,
},
.opcode = @intFromEnum(enc.opcode),
.funct3 = enc.data.fo.funct3,
.funct3 = enc.data.f.funct3,
},
};
},
@ -567,20 +670,32 @@ pub const Data = union(InstEnc) {
.imm5_11 = @truncate(umm >> 5),
.opcode = @intFromEnum(enc.opcode),
.funct3 = enc.data.fo.funct3,
.funct3 = enc.data.f.funct3,
},
};
},
.I => {
assert(ops.len == 3);
return .{
.I = .{
.rd = ops[0].reg.encodeId(),
.rs1 = ops[1].reg.encodeId(),
.imm0_11 = ops[2].imm.asBits(u12) + enc.data.fo.offset,
.I = switch (enc.data) {
.f => |f| .{
.rd = ops[0].reg.encodeId(),
.rs1 = ops[1].reg.encodeId(),
.imm0_11 = ops[2].imm.asBits(u12),
.opcode = @intFromEnum(enc.opcode),
.funct3 = enc.data.fo.funct3,
.opcode = @intFromEnum(enc.opcode),
.funct3 = f.funct3,
},
.sh => |sh| .{
.rd = ops[0].reg.encodeId(),
.rs1 = ops[1].reg.encodeId(),
.imm0_11 = (@as(u12, sh.typ) << 6) |
if (sh.has_5) ops[2].imm.asBits(u6) else (@as(u6, 0) | ops[2].imm.asBits(u5)),
.opcode = @intFromEnum(enc.opcode),
.funct3 = sh.funct3,
},
else => unreachable,
},
};
},
@ -629,7 +744,7 @@ pub const Data = union(InstEnc) {
.imm12 = @truncate(umm >> 12),
.opcode = @intFromEnum(enc.opcode),
.funct3 = enc.data.fo.funct3,
.funct3 = enc.data.f.funct3,
},
};
},

View File

@ -44,6 +44,8 @@ pub fn lowerMir(lower: *Lower, index: Mir.Inst.Index) Error!struct {
insts: []const Instruction,
relocs: []const Reloc,
} {
const zcu = lower.bin_file.comp.module.?;
lower.result_insts = undefined;
lower.result_relocs = undefined;
errdefer lower.result_insts = undefined;
@ -75,13 +77,14 @@ pub fn lowerMir(lower: *Lower, index: Mir.Inst.Index) Error!struct {
const dest_reg_class = dest_reg.class();
const float = dest_reg_class == .float;
const src_size = rm.m.mod.size();
const src_size = rm.m.mod.size;
const unsigned = rm.m.mod.unsigned;
const tag: Encoding.Mnemonic = if (!float)
switch (src_size) {
.byte => .lb,
.hword => .lh,
.word => .lw,
.byte => if (unsigned) .lbu else .lb,
.hword => if (unsigned) .lhu else .lh,
.word => if (unsigned) .lwu else .lw,
.dword => .ld,
}
else switch (src_size) {
@ -103,7 +106,7 @@ pub fn lowerMir(lower: *Lower, index: Mir.Inst.Index) Error!struct {
const float = src_reg_class == .float;
// TODO: do we actually need this? are all stores not usize?
const dest_size = rm.m.mod.size();
const dest_size = rm.m.mod.size;
const tag: Encoding.Mnemonic = if (!float)
switch (dest_size) {
@ -181,10 +184,12 @@ pub fn lowerMir(lower: *Lower, index: Mir.Inst.Index) Error!struct {
try lower.emit(.lui, &.{
.{ .reg = dst_reg },
.{ .imm = lower.reloc(.{ .load_symbol_reloc = .{
.atom_index = data.atom_index,
.sym_index = data.sym_index,
} }) },
.{ .imm = lower.reloc(.{
.load_symbol_reloc = .{
.atom_index = data.atom_index,
.sym_index = data.sym_index,
},
}) },
});
// the above reloc implies this one
@ -237,7 +242,14 @@ pub fn lowerMir(lower: *Lower, index: Mir.Inst.Index) Error!struct {
const rs2 = compare.rs2;
const class = rs1.class();
const size = compare.size.bitSize();
const ty = compare.ty;
const size = std.math.ceilPowerOfTwo(u64, ty.bitSize(zcu)) catch {
return lower.fail("pseudo_compare size {}", .{ty.bitSize(zcu)});
};
const is_unsigned = ty.isUnsignedInt(zcu);
const less_than: Encoding.Mnemonic = if (is_unsigned) .sltu else .slt;
switch (class) {
.int => switch (op) {
@ -268,14 +280,14 @@ pub fn lowerMir(lower: *Lower, index: Mir.Inst.Index) Error!struct {
});
},
.gt => {
try lower.emit(.sltu, &.{
try lower.emit(less_than, &.{
.{ .reg = rd },
.{ .reg = rs1 },
.{ .reg = rs2 },
});
},
.gte => {
try lower.emit(.sltu, &.{
try lower.emit(less_than, &.{
.{ .reg = rd },
.{ .reg = rs1 },
.{ .reg = rs2 },
@ -288,14 +300,14 @@ pub fn lowerMir(lower: *Lower, index: Mir.Inst.Index) Error!struct {
});
},
.lt => {
try lower.emit(.slt, &.{
try lower.emit(less_than, &.{
.{ .reg = rd },
.{ .reg = rs1 },
.{ .reg = rs2 },
});
},
.lte => {
try lower.emit(.slt, &.{
try lower.emit(less_than, &.{
.{ .reg = rd },
.{ .reg = rs2 },
.{ .reg = rs1 },

View File

@ -39,16 +39,28 @@ pub const Inst = struct {
unimp,
add,
addw,
sub,
subw,
sltu,
slt,
srli,
slli,
srli,
srai,
slliw,
srliw,
sraiw,
sll,
srl,
sra,
sllw,
srlw,
sraw,
jal,
@ -69,6 +81,17 @@ pub const Inst = struct {
// M extension
mul,
mulw,
div,
divu,
divw,
divuw,
rem,
remu,
remw,
remuw,
// F extension (32-bit float)
fadds,
@ -216,7 +239,7 @@ pub const Inst = struct {
lt,
lte,
},
size: Memory.Size,
ty: Type,
},
reloc: struct {
@ -408,6 +431,8 @@ pub const RegisterList = struct {
const Mir = @This();
const std = @import("std");
const builtin = @import("builtin");
const Type = @import("../../type.zig").Type;
const assert = std.debug.assert;
const bits = @import("bits.zig");

View File

@ -19,19 +19,10 @@ pub const Memory = struct {
reloc: Symbol,
};
pub const Mod = union(enum(u1)) {
rm: struct {
size: Size,
disp: i32 = 0,
},
off: i32,
pub fn size(mod: Mod) Size {
return switch (mod) {
.rm => |rm| rm.size,
.off => Size.dword, // assumed to be a register size
};
}
pub const Mod = struct {
size: Size,
unsigned: bool,
disp: i32 = 0,
};
pub const Size = enum(u4) {
@ -76,10 +67,7 @@ pub const Memory = struct {
/// Asserts `mem` can be represented as a `FrameLoc`.
pub fn toFrameLoc(mem: Memory, mir: Mir) Mir.FrameLoc {
const offset: i32 = switch (mem.mod) {
.off => |off| off,
.rm => |rm| rm.disp,
};
const offset: i32 = mem.mod.disp;
switch (mem.base) {
.reg => |reg| {
@ -130,24 +118,6 @@ pub const Immediate = union(enum) {
};
}
pub fn asUnsigned(imm: Immediate, bit_size: u64) u64 {
return switch (imm) {
.signed => |x| switch (bit_size) {
1, 8 => @as(u8, @bitCast(@as(i8, @intCast(x)))),
16 => @as(u16, @bitCast(@as(i16, @intCast(x)))),
32, 64 => @as(u32, @bitCast(x)),
else => unreachable,
},
.unsigned => |x| switch (bit_size) {
1, 8 => @as(u8, @intCast(x)),
16 => @as(u16, @intCast(x)),
32 => @as(u32, @intCast(x)),
64 => x,
else => unreachable,
},
};
}
pub fn asBits(imm: Immediate, comptime T: type) T {
const int_info = @typeInfo(T).Int;
if (int_info.signedness != .unsigned) @compileError("Immediate.asBits needs unsigned T");

View File

@ -32,6 +32,31 @@ pub const Instruction = struct {
pub fn encode(inst: Instruction, writer: anytype) !void {
try writer.writeInt(u32, inst.encoding.data.toU32(), .little);
}
pub fn format(
inst: Instruction,
comptime fmt: []const u8,
_: std.fmt.FormatOptions,
writer: anytype,
) !void {
std.debug.assert(fmt.len == 0);
const encoding = inst.encoding;
try writer.print("{s} ", .{@tagName(encoding.mnemonic)});
var i: u32 = 0;
while (i < inst.ops.len and inst.ops[i] != .none) : (i += 1) {
if (i != inst.ops.len and i != 0) try writer.writeAll(", ");
switch (@as(Instruction.Operand, inst.ops[i])) {
.none => unreachable, // it's sliced out above
.reg => |reg| try writer.writeAll(@tagName(reg)),
.imm => |imm| try writer.print("{d}", .{imm.asSigned(64)}),
.mem => unreachable, // there is no "mem" operand in the actual instructions
}
}
}
};
const std = @import("std");

View File

@ -249,7 +249,6 @@ fn testBytesAlign(b: u8) !void {
}
test "@alignCast slices" {
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO

View File

@ -50,7 +50,6 @@ fn getArrayLen(a: []const u32) usize {
test "array concat with undefined" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
const S = struct {
fn doTheTest() !void {
@ -89,7 +88,6 @@ test "array concat with tuple" {
test "array init with concat" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
const a = 'a';
var i: [4]u8 = [2]u8{ a, 'b' } ++ [2]u8{ 'c', 'd' };
@ -99,7 +97,6 @@ test "array init with concat" {
test "array init with mult" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
const a = 'a';
var i: [8]u8 = [2]u8{ a, 'b' } ** 4;
@ -244,7 +241,6 @@ fn plusOne(x: u32) u32 {
test "single-item pointer to array indexing and slicing" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
try testSingleItemPtrArrayIndexSlice();
try comptime testSingleItemPtrArrayIndexSlice();
@ -270,7 +266,6 @@ fn doSomeMangling(array: *[4]u8) void {
test "implicit cast zero sized array ptr to slice" {
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
{
var b = "".*;
@ -309,7 +304,6 @@ const Str = struct { a: []Sub };
test "set global var array via slice embedded in struct" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
var s = Str{ .a = s_array[0..] };
@ -376,7 +370,6 @@ test "comptime evaluating function that takes array by value" {
test "runtime initialize array elem and then implicit cast to slice" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
var two: i32 = 2;
_ = &two;
@ -387,7 +380,6 @@ test "runtime initialize array elem and then implicit cast to slice" {
test "array literal as argument to function" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
const S = struct {
fn entry(two: i32) !void {
@ -416,7 +408,6 @@ test "double nested array to const slice cast in array literal" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
const S = struct {
fn entry(two: i32) !void {
@ -522,7 +513,6 @@ test "type deduction for array subscript expression" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
const S = struct {
fn doTheTest() !void {
@ -542,6 +532,7 @@ test "sentinel element count towards the ABI size calculation" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
const S = struct {
fn doTheTest() !void {
@ -956,7 +947,6 @@ test "array initialized with string literal" {
test "array initialized with array with sentinel" {
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
const S = struct {
a: u32,

View File

@ -709,7 +709,6 @@ test "result location is optional inside error union" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
const x = maybe(true) catch unreachable;
try expect(x.? == 42);
@ -1091,8 +1090,6 @@ test "orelse coercion as function argument" {
}
test "runtime-known globals initialized with undefined" {
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
const S = struct {
var array: [10]u32 = [_]u32{ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 };
var vp: [*]u32 = undefined;

View File

@ -165,6 +165,7 @@ test "@bitCast packed structs at runtime and comptime" {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
const Full = packed struct {
number: u16,
@ -191,6 +192,7 @@ test "@bitCast packed structs at runtime and comptime" {
test "@bitCast extern structs at runtime and comptime" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
const Full = extern struct {
number: u16,
@ -225,6 +227,7 @@ test "bitcast packed struct to integer and back" {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
const LevelUpMove = packed struct {
move_id: u9,

View File

@ -5,7 +5,6 @@ var result: []const u8 = "wrong";
test "pass string literal byvalue to a generic var param" {
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
start();

View File

@ -266,7 +266,6 @@ fn MakeType(comptime T: type) type {
test "implicit cast from *[N]T to [*c]T" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
var x: [4]u16 = [4]u16{ 0, 1, 2, 3 };
var y: [*c]u16 = &x;
@ -343,7 +342,6 @@ test "array coercion to undefined at runtime" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
@setRuntimeSafety(true);
@ -409,7 +407,6 @@ test "peer type unsigned int to signed" {
test "expected [*c]const u8, found [*:0]const u8" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
var a: [*:0]const u8 = "hello";
_ = &a;
@ -496,7 +493,6 @@ test "peer type resolution: [0]u8, []const u8, and anyerror![]u8" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
const S = struct {
fn doTheTest() anyerror!void {
@ -577,7 +573,6 @@ fn testPeerErrorAndArray2(x: u8) anyerror![]const u8 {
test "single-item pointer of array to slice to unknown length pointer" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
try testCastPtrOfArrayToSliceAndPtr();
try comptime testCastPtrOfArrayToSliceAndPtr();
@ -673,6 +668,7 @@ test "@floatCast cast down" {
test "peer type resolution: unreachable, error set, unreachable" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
const Error = error{
FileDescriptorAlreadyPresentInSet,
@ -834,7 +830,6 @@ test "peer cast *[0]T to E![]const T" {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
var buffer: [5]u8 = "abcde".*;
const buf: anyerror![]const u8 = buffer[0..];
@ -850,7 +845,6 @@ test "peer cast *[0]T to []const T" {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
var buffer: [5]u8 = "abcde".*;
const buf: []const u8 = buffer[0..];
@ -895,7 +889,6 @@ test "peer resolution of string literals" {
test "peer cast [:x]T to []T" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
const S = struct {
fn doTheTest() !void {
@ -912,7 +905,6 @@ test "peer cast [:x]T to []T" {
test "peer cast [N:x]T to [N]T" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
const S = struct {
fn doTheTest() !void {
@ -929,7 +921,6 @@ test "peer cast [N:x]T to [N]T" {
test "peer cast *[N:x]T to *[N]T" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
const S = struct {
fn doTheTest() !void {
@ -987,7 +978,6 @@ test "peer cast [:x]T to [*:x]T" {
test "peer type resolution implicit cast to return type" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
const S = struct {
fn doTheTest() !void {
@ -1008,7 +998,6 @@ test "peer type resolution implicit cast to return type" {
test "peer type resolution implicit cast to variable type" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
const S = struct {
fn doTheTest() !void {
@ -1052,7 +1041,6 @@ test "cast between C pointer with different but compatible types" {
test "peer type resolve string lit with sentinel-terminated mutable slice" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
var array: [4:0]u8 = undefined;
array[4] = 0; // TODO remove this when #4372 is solved
@ -1119,7 +1107,6 @@ test "implicit cast from [*]T to ?*anyopaque" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
var a = [_]u8{ 3, 2, 1 };
var runtime_zero: usize = 0;
@ -1319,7 +1306,6 @@ test "*const [N]null u8 to ?[]const u8" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
const S = struct {
fn doTheTest() !void {
@ -1363,7 +1349,6 @@ test "assignment to optional pointer result loc" {
}
test "cast between *[N]void and []void" {
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
var a: [4]void = undefined;
@ -1618,8 +1603,6 @@ test "optional slice passed as parameter coerced to allowzero many pointer" {
}
test "single item pointer to pointer to array to slice" {
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
var x: i32 = 1234;
try expect(@as([]const i32, @as(*[1]i32, &x))[0] == 1234);
const z1 = @as([]const i32, @as(*[1]i32, &x));
@ -1662,8 +1645,6 @@ test "@volatileCast without a result location" {
}
test "coercion from single-item pointer to @as to slice" {
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
var x: u32 = 1;
// Why the following line gets a compile error?
@ -1728,7 +1709,6 @@ test "peer type resolution: same array type with sentinel" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
var a: [2:0]u32 = .{ 0, 1 };
var b: [2:0]u32 = .{ 2, 3 };
@ -1751,7 +1731,6 @@ test "peer type resolution: array with sentinel and array without sentinel" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
var a: [2:0]u32 = .{ 0, 1 };
var b: [2]u32 = .{ 2, 3 };
@ -1956,7 +1935,6 @@ test "peer type resolution: array and tuple" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
var arr: [3]i32 = .{ 1, 2, 3 };
_ = &arr;
@ -2281,7 +2259,6 @@ test "peer type resolution: arrays of compatible types" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
var e0: u8 = 3;
var e1: u8 = 2;

View File

@ -618,6 +618,7 @@ test "enum with specified tag values" {
test "non-exhaustive enum" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
const S = struct {
const E = enum(u8) { a, b, _ };

View File

@ -856,7 +856,6 @@ test "alignment of wrapping an error union payload" {
test "compare error union and error set" {
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
var a: anyerror = error.Foo;
var b: anyerror!u32 = error.Bar;

View File

@ -395,6 +395,7 @@ test "return 0 from function that has u0 return type" {
test "statically initialized struct" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
st_init_str_foo.x += 1;
try expect(st_init_str_foo.x == 14);
@ -740,7 +741,6 @@ test "array concatenation of function calls" {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
var a = oneItem(3) ++ oneItem(4);
try expect(std.mem.eql(i32, &a, &[_]i32{ 3, 4 }));
@ -750,7 +750,6 @@ test "array multiplication of function calls" {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
var a = oneItem(3) ** scalar(2);
try expect(std.mem.eql(i32, &a, &[_]i32{ 3, 3 }));
@ -768,7 +767,6 @@ test "array concatenation peer resolves element types - value" {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
var a = [2]u3{ 1, 7 };
var b = [3]u8{ 200, 225, 255 };
@ -786,6 +784,7 @@ test "array concatenation peer resolves element types - pointer" {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
var a = [2]u3{ 1, 7 };
var b = [3]u8{ 200, 225, 255 };

View File

@ -200,7 +200,6 @@ test "for on slice with allowzero ptr" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
const S = struct {
fn doTheTest(slice: []const u8) !void {
@ -216,7 +215,6 @@ test "for on slice with allowzero ptr" {
test "else continue outer for" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
var i: usize = 6;
var buf: [5]u8 = undefined;
@ -313,7 +311,6 @@ test "slice and two counters, one is offset and one is runtime" {
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
const slice: []const u8 = "blah";
var start: usize = 0;
@ -343,7 +340,6 @@ test "two slices, one captured by-ref" {
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
var buf: [10]u8 = undefined;
const slice1: []const u8 = "blah";
@ -400,7 +396,6 @@ test "inline for with slice as the comptime-known" {
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
const comptime_slice = "hello";
var runtime_i: usize = 3;

View File

@ -158,7 +158,6 @@ test "generic fn with implicit cast" {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
try expect(getFirstByte(u8, &[_]u8{13}) == 13);
try expect(getFirstByte(u16, &[_]u16{
@ -320,7 +319,6 @@ test "generic function instantiation non-duplicates" {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
if (builtin.os.tag == .wasi) return error.SkipZigTest;
const S = struct {

View File

@ -29,7 +29,6 @@ test "slices pointing at the same address as global array." {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
const S = struct {
const a = [_]u8{ 1, 2, 3 };

View File

@ -45,7 +45,6 @@ var global_with_err: anyerror!u32 = error.SomeError;
test "unwrap mutable global var" {
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
if (global_with_val) |v| {
try expect(v == 0);

View File

@ -76,7 +76,6 @@ test "inline switch unions" {
test "inline else bool" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
var a = true;
_ = &a;

View File

@ -592,8 +592,6 @@ fn testSignedWrappingEval(x: i32) !void {
}
test "signed negation wrapping" {
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
try testSignedNegationWrappingEval(minInt(i16));
try comptime testSignedNegationWrappingEval(minInt(i16));
}
@ -664,8 +662,6 @@ test "bit shift a u1" {
}
test "truncating shift right" {
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
try testShrTrunc(maxInt(u16));
try comptime testShrTrunc(maxInt(u16));
}
@ -1455,8 +1451,6 @@ fn testShlExact(x: u8) !void {
}
test "exact shift right" {
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
try testShrExact(0b10110100);
try comptime testShrExact(0b10110100);
}

View File

@ -26,7 +26,6 @@ test "nan memory equality" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_c) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
// signaled
try testing.expect(mem.eql(u8, mem.asBytes(&snan_u16), mem.asBytes(&snan_f16)));

View File

@ -488,7 +488,6 @@ const NoReturn = struct {
test "optional of noreturn used with if" {
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
NoReturn.a = 64;
if (NoReturn.loop()) |_| {
@ -500,7 +499,6 @@ test "optional of noreturn used with if" {
test "optional of noreturn used with orelse" {
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
NoReturn.a = 64;
const val = NoReturn.testOrelse();

View File

@ -258,6 +258,7 @@ test "nested packed struct unaligned" {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
if (native_endian != .little) return error.SkipZigTest; // Byte aligned packed struct field pointers have not been implemented yet
const S1 = packed struct {
@ -330,6 +331,7 @@ test "byte-aligned field pointer offsets" {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
const S = struct {
const A = packed struct {
@ -491,7 +493,6 @@ test "@intFromPtr on a packed struct field" {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
if (native_endian != .little) return error.SkipZigTest;
const S = struct {
@ -515,7 +516,6 @@ test "@intFromPtr on a packed struct field unaligned and nested" {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
if (native_endian != .little) return error.SkipZigTest; // Byte aligned packed struct field pointers have not been implemented yet
const S1 = packed struct {

View File

@ -201,7 +201,6 @@ test "allowzero pointer and slice" {
if (builtin.zig_backend == .stage2_c) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
var ptr: [*]allowzero i32 = @ptrFromInt(0);
const opt_ptr: ?[*]allowzero i32 = ptr;
@ -440,7 +439,6 @@ test "indexing array with sentinel returns correct type" {
test "element pointer to slice" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
const S = struct {
fn doTheTest() !void {
@ -488,7 +486,6 @@ test "element pointer arithmetic to slice" {
test "array slicing to slice" {
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
const S = struct {
fn doTheTest() !void {

View File

@ -232,7 +232,6 @@ test "implicit optional pointer to optional anyopaque pointer" {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
var buf: [4]u8 = "aoeu".*;
const x: ?[*]u8 = &buf;
@ -245,7 +244,6 @@ test "@ptrCast slice to slice" {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
const S = struct {
fn foo(slice: []u32) []i32 {

View File

@ -28,6 +28,7 @@ fn dummy(a: bool, b: i32, c: f32) i32 {
test "reflection: @field" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
var f = Foo{
.one = 42,

View File

@ -81,7 +81,6 @@ const P = packed struct {
test "@offsetOf" {
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
// Packed structs have fixed memory layout
try expect(@offsetOf(P, "a") == 0);

View File

@ -67,7 +67,6 @@ test "comptime slice of undefined pointer of length 0" {
test "implicitly cast array of size 0 to slice" {
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
var msg = [_]u8{};
try assertLenIsZero(&msg);
@ -233,7 +232,6 @@ test "runtime safety lets us slice from len..len" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
var an_array = [_]u8{ 1, 2, 3 };
try expect(mem.eql(u8, sliceFromLenToLen(an_array[0..], 3, 3), ""));
@ -289,7 +287,6 @@ fn sliceSum(comptime q: []const u8) i32 {
test "slice type with custom alignment" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
const LazilyResolvedType = struct {
anything: i32,
@ -347,7 +344,6 @@ test "empty array to slice" {
test "@ptrCast slice to pointer" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
const S = struct {
fn doTheTest() !void {
@ -402,7 +398,6 @@ test "slice syntax resulting in pointer-to-array" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
const S = struct {
@ -622,7 +617,6 @@ test "slice syntax resulting in pointer-to-array" {
test "slice pointer-to-array null terminated" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
comptime {
var array = [5:0]u8{ 1, 2, 3, 4, 5 };
@ -641,7 +635,6 @@ test "slice pointer-to-array null terminated" {
test "slice pointer-to-array zero length" {
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
comptime {
{
@ -676,7 +669,6 @@ test "type coercion of pointer to anon struct literal to pointer to slice" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
const S = struct {
const U = union {
@ -786,7 +778,6 @@ test "slicing array with sentinel as end index" {
test "slicing slice with sentinel as end index" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
const S = struct {
fn do() !void {
@ -857,7 +848,6 @@ test "global slice field access" {
}
test "slice of void" {
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
var n: usize = 10;
@ -981,7 +971,6 @@ test "get address of element of zero-sized slice" {
if (builtin.zig_backend == .stage2_x86) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
const S = struct {
@ -996,7 +985,6 @@ test "sentinel-terminated 0-length slices" {
if (builtin.zig_backend == .stage2_x86) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
const u32s: [4]u32 = [_]u32{ 0, 1, 2, 3 };

View File

@ -68,6 +68,7 @@ const SmallStruct = struct {
test "lower unnamed constants" {
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
var foo = SmallStruct{ .a = 1, .b = 255 };
try expect(foo.first() == 1);
@ -874,6 +875,7 @@ test "packed struct field passed to generic function" {
test "anonymous struct literal syntax" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
const S = struct {
const Point = struct {
@ -1103,6 +1105,7 @@ test "packed struct with undefined initializers" {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
const S = struct {
const P = packed struct {
@ -1365,6 +1368,7 @@ test "store to comptime field" {
test "struct field init value is size of the struct" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
const namespace = struct {
const S = extern struct {

View File

@ -13,7 +13,6 @@ const NodeAligned = struct {
test "struct contains slice of itself" {
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
var other_nodes = [_]Node{
Node{
@ -54,7 +53,6 @@ test "struct contains slice of itself" {
test "struct contains aligned slice of itself" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
var other_nodes = [_]NodeAligned{
NodeAligned{

View File

@ -118,7 +118,6 @@ fn trueIfBoolFalseOtherwise(comptime T: type) bool {
test "switching on booleans" {
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
try testSwitchOnBools();
try comptime testSwitchOnBools();
@ -277,7 +276,6 @@ fn testSwitchEnumPtrCapture() !void {
test "switch handles all cases of number" {
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
try testSwitchHandleAllCases();
try comptime testSwitchHandleAllCases();
@ -647,7 +645,6 @@ test "switch prong pointer capture alignment" {
test "switch on pointer type" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
const S = struct {
const X = struct {
@ -912,7 +909,6 @@ test "peer type resolution on switch captures ignores unused payload bits" {
test "switch prong captures range" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
const S = struct {
fn a(b: []u3, c: u3) void {

View File

@ -27,6 +27,7 @@ test "this refer to module call private fn" {
test "this refer to container" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
var pt: Point(i32) = undefined;
pt.x = 12;

View File

@ -91,7 +91,6 @@ test "reslice of undefined global var slice" {
if (builtin.zig_backend == .stage2_x86) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
var stack_buf: [100]u8 = [_]u8{0} ** 100;
buf = &stack_buf;

View File

@ -2044,6 +2044,7 @@ test "extern union initialized via reintepreted struct field initializer" {
test "packed union initialized via reintepreted struct field initializer" {
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
const bytes = [_]u8{ 0xaa, 0xbb, 0xcc, 0xdd };
@ -2064,6 +2065,7 @@ test "packed union initialized via reintepreted struct field initializer" {
test "store of comptime reinterpreted memory to extern union" {
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
const bytes = [_]u8{ 0xaa, 0xbb, 0xcc, 0xdd };
@ -2086,6 +2088,7 @@ test "store of comptime reinterpreted memory to extern union" {
test "store of comptime reinterpreted memory to packed union" {
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
const bytes = [_]u8{ 0xaa, 0xbb, 0xcc, 0xdd };

View File

@ -166,7 +166,6 @@ test "array to vector" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
const S = struct {
fn doTheTest() !void {

View File

@ -379,7 +379,6 @@ test "while loop with comptime true condition needs no else block to return valu
test "int returned from switch in while" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
var x: u32 = 3;
const val: usize = while (true) switch (x) {

View File

@ -32,7 +32,6 @@ test "implicit unsigned integer to signed integer" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
var a: u8 = 250;
var b: i16 = a;
@ -80,7 +79,6 @@ test "cast small unsigned to larger signed" {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
try expect(castSmallUnsignedToLargerSigned1(200) == @as(i16, 200));
try expect(castSmallUnsignedToLargerSigned2(9999) == @as(i64, 9999));