mirror of
https://github.com/ziglang/zig.git
synced 2026-02-13 21:08:36 +00:00
Merge pull request #15076 from jacobly0/x86_64-backend
x86_64: implement things
This commit is contained in:
commit
32591c1d99
@ -29,8 +29,8 @@ pub const default_mode: ModeOverride = if (is_async) Mode.evented else .blocking
|
||||
|
||||
fn getStdOutHandle() os.fd_t {
|
||||
if (builtin.os.tag == .windows) {
|
||||
if (builtin.zig_backend == .stage2_x86_64 or builtin.zig_backend == .stage2_aarch64) {
|
||||
// TODO: this is just a temporary workaround until we advance x86 backend further along.
|
||||
if (builtin.zig_backend == .stage2_aarch64) {
|
||||
// TODO: this is just a temporary workaround until we advance aarch64 backend further along.
|
||||
return os.windows.GetStdHandle(os.windows.STD_OUTPUT_HANDLE) catch os.windows.INVALID_HANDLE_VALUE;
|
||||
}
|
||||
return os.windows.peb().ProcessParameters.hStdOutput;
|
||||
@ -55,8 +55,8 @@ pub fn getStdOut() File {
|
||||
|
||||
fn getStdErrHandle() os.fd_t {
|
||||
if (builtin.os.tag == .windows) {
|
||||
if (builtin.zig_backend == .stage2_x86_64 or builtin.zig_backend == .stage2_aarch64) {
|
||||
// TODO: this is just a temporary workaround until we advance x86 backend further along.
|
||||
if (builtin.zig_backend == .stage2_aarch64) {
|
||||
// TODO: this is just a temporary workaround until we advance aarch64 backend further along.
|
||||
return os.windows.GetStdHandle(os.windows.STD_ERROR_HANDLE) catch os.windows.INVALID_HANDLE_VALUE;
|
||||
}
|
||||
return os.windows.peb().ProcessParameters.hStdError;
|
||||
@ -81,8 +81,8 @@ pub fn getStdErr() File {
|
||||
|
||||
fn getStdInHandle() os.fd_t {
|
||||
if (builtin.os.tag == .windows) {
|
||||
if (builtin.zig_backend == .stage2_x86_64 or builtin.zig_backend == .stage2_aarch64) {
|
||||
// TODO: this is just a temporary workaround until we advance x86 backend further along.
|
||||
if (builtin.zig_backend == .stage2_aarch64) {
|
||||
// TODO: this is just a temporary workaround until we advance aarch64 backend further along.
|
||||
return os.windows.GetStdHandle(os.windows.STD_INPUT_HANDLE) catch os.windows.INVALID_HANDLE_VALUE;
|
||||
}
|
||||
return os.windows.peb().ProcessParameters.hStdInput;
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@ -121,7 +121,9 @@ pub fn lowerMir(emit: *Emit) InnerError!void {
|
||||
.sbb,
|
||||
.sfence,
|
||||
.shl,
|
||||
.shld,
|
||||
.shr,
|
||||
.shrd,
|
||||
.sub,
|
||||
.syscall,
|
||||
.@"test",
|
||||
@ -231,10 +233,10 @@ fn mirEncodeGeneric(emit: *Emit, tag: Mir.Inst.Tag, inst: Mir.Inst.Index) InnerE
|
||||
const prefix: Instruction.Prefix = switch (ops) {
|
||||
.lock_m_sib,
|
||||
.lock_m_rip,
|
||||
.lock_mi_u_sib,
|
||||
.lock_mi_u_rip,
|
||||
.lock_mi_s_sib,
|
||||
.lock_mi_s_rip,
|
||||
.lock_mi_sib_u,
|
||||
.lock_mi_rip_u,
|
||||
.lock_mi_sib_s,
|
||||
.lock_mi_rip_s,
|
||||
.lock_mr_sib,
|
||||
.lock_mr_rip,
|
||||
.lock_moffs_rax,
|
||||
@ -249,31 +251,36 @@ fn mirEncodeGeneric(emit: *Emit, tag: Mir.Inst.Tag, inst: Mir.Inst.Index) InnerE
|
||||
|
||||
switch (ops) {
|
||||
.none => {},
|
||||
.imm_s => op1 = .{ .imm = Immediate.s(@bitCast(i32, data.imm)) },
|
||||
.imm_u => op1 = .{ .imm = Immediate.u(data.imm) },
|
||||
.i_s => op1 = .{ .imm = Immediate.s(@bitCast(i32, data.i)) },
|
||||
.i_u => op1 = .{ .imm = Immediate.u(data.i) },
|
||||
.r => op1 = .{ .reg = data.r },
|
||||
.rr => {
|
||||
op1 = .{ .reg = data.rr.r1 };
|
||||
op2 = .{ .reg = data.rr.r2 };
|
||||
},
|
||||
.rrr => {
|
||||
op1 = .{ .reg = data.rrr.r1 };
|
||||
op2 = .{ .reg = data.rrr.r2 };
|
||||
op3 = .{ .reg = data.rrr.r3 };
|
||||
},
|
||||
.ri_s, .ri_u => {
|
||||
const imm = switch (ops) {
|
||||
.ri_s => Immediate.s(@bitCast(i32, data.ri.imm)),
|
||||
.ri_u => Immediate.u(data.ri.imm),
|
||||
.ri_s => Immediate.s(@bitCast(i32, data.ri.i)),
|
||||
.ri_u => Immediate.u(data.ri.i),
|
||||
else => unreachable,
|
||||
};
|
||||
op1 = .{ .reg = data.ri.r1 };
|
||||
op1 = .{ .reg = data.ri.r };
|
||||
op2 = .{ .imm = imm };
|
||||
},
|
||||
.ri64 => {
|
||||
const imm64 = emit.mir.extraData(Mir.Imm64, data.rx.payload).data;
|
||||
op1 = .{ .reg = data.rx.r1 };
|
||||
op1 = .{ .reg = data.rx.r };
|
||||
op2 = .{ .imm = Immediate.u(Mir.Imm64.decode(imm64)) };
|
||||
},
|
||||
.rri_s, .rri_u => {
|
||||
const imm = switch (ops) {
|
||||
.rri_s => Immediate.s(@bitCast(i32, data.rri.imm)),
|
||||
.rri_u => Immediate.u(data.rri.imm),
|
||||
.rri_s => Immediate.s(@bitCast(i32, data.rri.i)),
|
||||
.rri_u => Immediate.u(data.rri.i),
|
||||
else => unreachable,
|
||||
};
|
||||
op1 = .{ .reg = data.rri.r1 };
|
||||
@ -288,21 +295,21 @@ fn mirEncodeGeneric(emit: *Emit, tag: Mir.Inst.Tag, inst: Mir.Inst.Index) InnerE
|
||||
const mrip = emit.mir.extraData(Mir.MemoryRip, data.payload).data;
|
||||
op1 = .{ .mem = Mir.MemoryRip.decode(mrip) };
|
||||
},
|
||||
.mi_s_sib, .mi_u_sib, .lock_mi_s_sib, .lock_mi_u_sib => {
|
||||
const msib = emit.mir.extraData(Mir.MemorySib, data.xi.payload).data;
|
||||
.mi_sib_s, .mi_sib_u, .lock_mi_sib_s, .lock_mi_sib_u => {
|
||||
const msib = emit.mir.extraData(Mir.MemorySib, data.ix.payload).data;
|
||||
const imm = switch (ops) {
|
||||
.mi_s_sib, .lock_mi_s_sib => Immediate.s(@bitCast(i32, data.xi.imm)),
|
||||
.mi_u_sib, .lock_mi_u_sib => Immediate.u(data.xi.imm),
|
||||
.mi_sib_s, .lock_mi_sib_s => Immediate.s(@bitCast(i32, data.ix.i)),
|
||||
.mi_sib_u, .lock_mi_sib_u => Immediate.u(data.ix.i),
|
||||
else => unreachable,
|
||||
};
|
||||
op1 = .{ .mem = Mir.MemorySib.decode(msib) };
|
||||
op2 = .{ .imm = imm };
|
||||
},
|
||||
.mi_u_rip, .mi_s_rip, .lock_mi_u_rip, .lock_mi_s_rip => {
|
||||
const mrip = emit.mir.extraData(Mir.MemoryRip, data.xi.payload).data;
|
||||
.mi_rip_u, .mi_rip_s, .lock_mi_rip_u, .lock_mi_rip_s => {
|
||||
const mrip = emit.mir.extraData(Mir.MemoryRip, data.ix.payload).data;
|
||||
const imm = switch (ops) {
|
||||
.mi_s_rip, .lock_mi_s_rip => Immediate.s(@bitCast(i32, data.xi.imm)),
|
||||
.mi_u_rip, .lock_mi_u_rip => Immediate.u(data.xi.imm),
|
||||
.mi_rip_s, .lock_mi_rip_s => Immediate.s(@bitCast(i32, data.ix.i)),
|
||||
.mi_rip_u, .lock_mi_rip_u => Immediate.u(data.ix.i),
|
||||
else => unreachable,
|
||||
};
|
||||
op1 = .{ .mem = Mir.MemoryRip.decode(mrip) };
|
||||
@ -310,7 +317,7 @@ fn mirEncodeGeneric(emit: *Emit, tag: Mir.Inst.Tag, inst: Mir.Inst.Index) InnerE
|
||||
},
|
||||
.rm_sib, .mr_sib, .lock_mr_sib => {
|
||||
const msib = emit.mir.extraData(Mir.MemorySib, data.rx.payload).data;
|
||||
const op_r = .{ .reg = data.rx.r1 };
|
||||
const op_r = .{ .reg = data.rx.r };
|
||||
const op_m = .{ .mem = Mir.MemorySib.decode(msib) };
|
||||
switch (ops) {
|
||||
.rm_sib => {
|
||||
@ -326,7 +333,7 @@ fn mirEncodeGeneric(emit: *Emit, tag: Mir.Inst.Tag, inst: Mir.Inst.Index) InnerE
|
||||
},
|
||||
.rm_rip, .mr_rip, .lock_mr_rip => {
|
||||
const mrip = emit.mir.extraData(Mir.MemoryRip, data.rx.payload).data;
|
||||
const op_r = .{ .reg = data.rx.r1 };
|
||||
const op_r = .{ .reg = data.rx.r };
|
||||
const op_m = .{ .mem = Mir.MemoryRip.decode(mrip) };
|
||||
switch (ops) {
|
||||
.rm_rip => {
|
||||
@ -340,6 +347,30 @@ fn mirEncodeGeneric(emit: *Emit, tag: Mir.Inst.Tag, inst: Mir.Inst.Index) InnerE
|
||||
else => unreachable,
|
||||
}
|
||||
},
|
||||
.mrr_sib => {
|
||||
const msib = emit.mir.extraData(Mir.MemorySib, data.rrx.payload).data;
|
||||
op1 = .{ .mem = Mir.MemorySib.decode(msib) };
|
||||
op2 = .{ .reg = data.rrx.r1 };
|
||||
op2 = .{ .reg = data.rrx.r2 };
|
||||
},
|
||||
.mrr_rip => {
|
||||
const mrip = emit.mir.extraData(Mir.MemoryRip, data.rrx.payload).data;
|
||||
op1 = .{ .mem = Mir.MemoryRip.decode(mrip) };
|
||||
op2 = .{ .reg = data.rrx.r1 };
|
||||
op2 = .{ .reg = data.rrx.r2 };
|
||||
},
|
||||
.mri_sib => {
|
||||
const msib = emit.mir.extraData(Mir.MemorySib, data.rix.payload).data;
|
||||
op1 = .{ .mem = Mir.MemorySib.decode(msib) };
|
||||
op2 = .{ .reg = data.rix.r };
|
||||
op3 = .{ .imm = Immediate.u(data.rix.i) };
|
||||
},
|
||||
.mri_rip => {
|
||||
const mrip = emit.mir.extraData(Mir.MemoryRip, data.rix.payload).data;
|
||||
op1 = .{ .mem = Mir.MemoryRip.decode(mrip) };
|
||||
op2 = .{ .reg = data.rix.r };
|
||||
op3 = .{ .imm = Immediate.u(data.rix.i) };
|
||||
},
|
||||
else => return emit.fail("TODO handle generic encoding: {s}, {s}", .{
|
||||
@tagName(mnemonic),
|
||||
@tagName(ops),
|
||||
@ -451,12 +482,12 @@ fn mirMovsx(emit: *Emit, inst: Mir.Inst.Index) InnerError!void {
|
||||
},
|
||||
.rm_sib => {
|
||||
const msib = emit.mir.extraData(Mir.MemorySib, data.rx.payload).data;
|
||||
op1 = .{ .reg = data.rx.r1 };
|
||||
op1 = .{ .reg = data.rx.r };
|
||||
op2 = .{ .mem = Mir.MemorySib.decode(msib) };
|
||||
},
|
||||
.rm_rip => {
|
||||
const mrip = emit.mir.extraData(Mir.MemoryRip, data.rx.payload).data;
|
||||
op1 = .{ .reg = data.rx.r1 };
|
||||
op1 = .{ .reg = data.rx.r };
|
||||
op2 = .{ .mem = Mir.MemoryRip.decode(mrip) };
|
||||
},
|
||||
else => unreachable, // TODO
|
||||
@ -495,7 +526,7 @@ fn mirCmovcc(emit: *Emit, inst: Mir.Inst.Index) InnerError!void {
|
||||
const extra = emit.mir.extraData(Mir.MemorySib, data.payload).data;
|
||||
const mnemonic = mnemonicFromConditionCode("cmov", data.cc);
|
||||
return emit.encode(mnemonic, .{
|
||||
.op1 = .{ .reg = data.r1 },
|
||||
.op1 = .{ .reg = data.r },
|
||||
.op2 = .{ .mem = Mir.MemorySib.decode(extra) },
|
||||
});
|
||||
},
|
||||
@ -504,7 +535,7 @@ fn mirCmovcc(emit: *Emit, inst: Mir.Inst.Index) InnerError!void {
|
||||
const extra = emit.mir.extraData(Mir.MemoryRip, data.payload).data;
|
||||
const mnemonic = mnemonicFromConditionCode("cmov", data.cc);
|
||||
return emit.encode(mnemonic, .{
|
||||
.op1 = .{ .reg = data.r1 },
|
||||
.op1 = .{ .reg = data.r },
|
||||
.op2 = .{ .mem = Mir.MemoryRip.decode(extra) },
|
||||
});
|
||||
},
|
||||
@ -519,7 +550,7 @@ fn mirSetcc(emit: *Emit, inst: Mir.Inst.Index) InnerError!void {
|
||||
const data = emit.mir.instructions.items(.data)[inst].r_cc;
|
||||
const mnemonic = mnemonicFromConditionCode("set", data.cc);
|
||||
return emit.encode(mnemonic, .{
|
||||
.op1 = .{ .reg = data.r1 },
|
||||
.op1 = .{ .reg = data.r },
|
||||
});
|
||||
},
|
||||
.m_sib_cc => {
|
||||
|
||||
@ -262,15 +262,15 @@ pub fn format(
|
||||
try writer.print("+{s} ", .{tag});
|
||||
},
|
||||
.m, .mi, .m1, .mc => try writer.print("/{d} ", .{encoding.modRmExt()}),
|
||||
.mr, .rm, .rmi => try writer.writeAll("/r "),
|
||||
.mr, .rm, .rmi, .mri, .mrc => try writer.writeAll("/r "),
|
||||
}
|
||||
|
||||
switch (encoding.op_en) {
|
||||
.i, .d, .zi, .oi, .mi, .rmi => {
|
||||
.i, .d, .zi, .oi, .mi, .rmi, .mri => {
|
||||
const op = switch (encoding.op_en) {
|
||||
.i, .d => encoding.op1,
|
||||
.zi, .oi, .mi => encoding.op2,
|
||||
.rmi => encoding.op3,
|
||||
.rmi, .mri => encoding.op3,
|
||||
else => unreachable,
|
||||
};
|
||||
const tag = switch (op) {
|
||||
@ -285,7 +285,7 @@ pub fn format(
|
||||
};
|
||||
try writer.print("{s} ", .{tag});
|
||||
},
|
||||
.np, .fd, .td, .o, .m, .m1, .mc, .mr, .rm => {},
|
||||
.np, .fd, .td, .o, .m, .m1, .mc, .mr, .rm, .mrc => {},
|
||||
}
|
||||
|
||||
try writer.print("{s} ", .{@tagName(encoding.mnemonic)});
|
||||
@ -334,7 +334,7 @@ pub const Mnemonic = enum {
|
||||
rcl, rcr, ret, rol, ror,
|
||||
sal, sar, sbb,
|
||||
scas, scasb, scasd, scasq, scasw,
|
||||
shl, shr, sub, syscall,
|
||||
shl, shld, shr, shrd, sub, syscall,
|
||||
seta, setae, setb, setbe, setc, sete, setg, setge, setl, setle, setna, setnae,
|
||||
setnb, setnbe, setnc, setne, setng, setnge, setnl, setnle, setno, setnp, setns,
|
||||
setnz, seto, setp, setpe, setpo, sets, setz,
|
||||
@ -374,7 +374,8 @@ pub const OpEn = enum {
|
||||
i, zi,
|
||||
d, m,
|
||||
fd, td,
|
||||
m1, mc, mi, mr, rm, rmi,
|
||||
m1, mc, mi, mr, rm,
|
||||
rmi, mri, mrc,
|
||||
// zig fmt: on
|
||||
};
|
||||
|
||||
|
||||
@ -138,8 +138,12 @@ pub const Inst = struct {
|
||||
sfence,
|
||||
/// Logical shift left
|
||||
shl,
|
||||
/// Double precision shift left
|
||||
shld,
|
||||
/// Logical shift right
|
||||
shr,
|
||||
/// Double precision shift right
|
||||
shrd,
|
||||
/// Subtract
|
||||
sub,
|
||||
/// Syscall
|
||||
@ -284,10 +288,10 @@ pub const Inst = struct {
|
||||
ri64,
|
||||
/// Immediate (sign-extended) operand.
|
||||
/// Uses `imm` payload.
|
||||
imm_s,
|
||||
i_s,
|
||||
/// Immediate (unsigned) operand.
|
||||
/// Uses `imm` payload.
|
||||
imm_u,
|
||||
i_u,
|
||||
/// Relative displacement operand.
|
||||
/// Uses `imm` payload.
|
||||
rel,
|
||||
@ -316,23 +320,35 @@ pub const Inst = struct {
|
||||
/// Uses `x_cc` with extra data of type `MemoryRip`.
|
||||
m_rip_cc,
|
||||
/// Memory (SIB), immediate (unsigned) operands.
|
||||
/// Uses `xi` payload with extra data of type `MemorySib`.
|
||||
mi_u_sib,
|
||||
/// Uses `ix` payload with extra data of type `MemorySib`.
|
||||
mi_sib_u,
|
||||
/// Memory (RIP), immediate (unsigned) operands.
|
||||
/// Uses `xi` payload with extra data of type `MemoryRip`.
|
||||
mi_u_rip,
|
||||
/// Uses `ix` payload with extra data of type `MemoryRip`.
|
||||
mi_rip_u,
|
||||
/// Memory (SIB), immediate (sign-extend) operands.
|
||||
/// Uses `xi` payload with extra data of type `MemorySib`.
|
||||
mi_s_sib,
|
||||
/// Uses `ix` payload with extra data of type `MemorySib`.
|
||||
mi_sib_s,
|
||||
/// Memory (RIP), immediate (sign-extend) operands.
|
||||
/// Uses `xi` payload with extra data of type `MemoryRip`.
|
||||
mi_s_rip,
|
||||
/// Uses `ix` payload with extra data of type `MemoryRip`.
|
||||
mi_rip_s,
|
||||
/// Memory (SIB), register operands.
|
||||
/// Uses `rx` payload with extra data of type `MemorySib`.
|
||||
mr_sib,
|
||||
/// Memory (RIP), register operands.
|
||||
/// Uses `rx` payload with extra data of type `MemoryRip`.
|
||||
mr_rip,
|
||||
/// Memory (SIB), register, register operands.
|
||||
/// Uses `rrx` payload with extra data of type `MemorySib`.
|
||||
mrr_sib,
|
||||
/// Memory (RIP), register, register operands.
|
||||
/// Uses `rrx` payload with extra data of type `MemoryRip`.
|
||||
mrr_rip,
|
||||
/// Memory (SIB), register, immediate (byte) operands.
|
||||
/// Uses `rix` payload with extra data of type `MemorySib`.
|
||||
mri_sib,
|
||||
/// Memory (RIP), register, immediate (byte) operands.
|
||||
/// Uses `rix` payload with extra data of type `MemoryRip`.
|
||||
mri_rip,
|
||||
/// Rax, Memory moffs.
|
||||
/// Uses `payload` with extra data of type `MemoryMoffs`.
|
||||
rax_moffs,
|
||||
@ -347,16 +363,16 @@ pub const Inst = struct {
|
||||
lock_m_rip,
|
||||
/// Memory (SIB), immediate (unsigned) operands with lock prefix.
|
||||
/// Uses `xi` payload with extra data of type `MemorySib`.
|
||||
lock_mi_u_sib,
|
||||
lock_mi_sib_u,
|
||||
/// Memory (RIP), immediate (unsigned) operands with lock prefix.
|
||||
/// Uses `xi` payload with extra data of type `MemoryRip`.
|
||||
lock_mi_u_rip,
|
||||
lock_mi_rip_u,
|
||||
/// Memory (SIB), immediate (sign-extend) operands with lock prefix.
|
||||
/// Uses `xi` payload with extra data of type `MemorySib`.
|
||||
lock_mi_s_sib,
|
||||
lock_mi_sib_s,
|
||||
/// Memory (RIP), immediate (sign-extend) operands with lock prefix.
|
||||
/// Uses `xi` payload with extra data of type `MemoryRip`.
|
||||
lock_mi_s_rip,
|
||||
lock_mi_rip_s,
|
||||
/// Memory (SIB), register operands with lock prefix.
|
||||
/// Uses `rx` payload with extra data of type `MemorySib`.
|
||||
lock_mr_sib,
|
||||
@ -400,7 +416,7 @@ pub const Inst = struct {
|
||||
cc: bits.Condition,
|
||||
},
|
||||
/// A 32-bit immediate value.
|
||||
imm: u32,
|
||||
i: u32,
|
||||
r: Register,
|
||||
rr: struct {
|
||||
r1: Register,
|
||||
@ -414,16 +430,16 @@ pub const Inst = struct {
|
||||
rri: struct {
|
||||
r1: Register,
|
||||
r2: Register,
|
||||
imm: u32,
|
||||
i: u32,
|
||||
},
|
||||
/// Condition code (CC), followed by custom payload found in extra.
|
||||
x_cc: struct {
|
||||
payload: u32,
|
||||
cc: bits.Condition,
|
||||
payload: u32,
|
||||
},
|
||||
/// Register with condition code (CC).
|
||||
r_cc: struct {
|
||||
r1: Register,
|
||||
r: Register,
|
||||
cc: bits.Condition,
|
||||
},
|
||||
/// Register, register with condition code (CC).
|
||||
@ -434,24 +450,36 @@ pub const Inst = struct {
|
||||
},
|
||||
/// Register, immediate.
|
||||
ri: struct {
|
||||
r1: Register,
|
||||
imm: u32,
|
||||
r: Register,
|
||||
i: u32,
|
||||
},
|
||||
/// Register, followed by custom payload found in extra.
|
||||
rx: struct {
|
||||
r1: Register,
|
||||
r: Register,
|
||||
payload: u32,
|
||||
},
|
||||
/// Register with condition code (CC), followed by custom payload found in extra.
|
||||
rx_cc: struct {
|
||||
r1: Register,
|
||||
r: Register,
|
||||
cc: bits.Condition,
|
||||
payload: u32,
|
||||
},
|
||||
/// Custom payload followed by an immediate.
|
||||
xi: struct {
|
||||
/// Immediate, followed by Custom payload found in extra.
|
||||
ix: struct {
|
||||
i: u32,
|
||||
payload: u32,
|
||||
},
|
||||
/// Register, register, followed by Custom payload found in extra.
|
||||
rrx: struct {
|
||||
r1: Register,
|
||||
r2: Register,
|
||||
payload: u32,
|
||||
},
|
||||
/// Register, byte immediate, followed by Custom payload found in extra.
|
||||
rix: struct {
|
||||
r: Register,
|
||||
i: u8,
|
||||
payload: u32,
|
||||
imm: u32,
|
||||
},
|
||||
/// String instruction prefix and width.
|
||||
string: struct {
|
||||
|
||||
@ -411,20 +411,17 @@ pub const Memory = union(enum) {
|
||||
dword,
|
||||
qword,
|
||||
tbyte,
|
||||
dqword,
|
||||
|
||||
pub fn fromSize(size: u32) PtrSize {
|
||||
return if (size <= 1)
|
||||
.byte
|
||||
else if (size <= 2)
|
||||
.word
|
||||
else if (size <= 4)
|
||||
.dword
|
||||
else if (size <= 8)
|
||||
.qword
|
||||
else if (size == 10)
|
||||
.tbyte
|
||||
else
|
||||
unreachable;
|
||||
return switch (size) {
|
||||
1...1 => .byte,
|
||||
2...2 => .word,
|
||||
3...4 => .dword,
|
||||
5...8 => .qword,
|
||||
9...16 => .dqword,
|
||||
else => unreachable,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn fromBitSize(bit_size: u64) PtrSize {
|
||||
@ -434,6 +431,7 @@ pub const Memory = union(enum) {
|
||||
32 => .dword,
|
||||
64 => .qword,
|
||||
80 => .tbyte,
|
||||
128 => .dqword,
|
||||
else => unreachable,
|
||||
};
|
||||
}
|
||||
@ -445,6 +443,7 @@ pub const Memory = union(enum) {
|
||||
.dword => 32,
|
||||
.qword => 64,
|
||||
.tbyte => 80,
|
||||
.dqword => 128,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
@ -117,7 +117,7 @@ pub const Instruction = struct {
|
||||
|
||||
pub fn new(mnemonic: Mnemonic, args: Init) !Instruction {
|
||||
const encoding = (try Encoding.findByMnemonic(mnemonic, args)) orelse {
|
||||
log.debug("no encoding found for: {s} {s} {s} {s} {s} {s}", .{
|
||||
log.err("no encoding found for: {s} {s} {s} {s} {s} {s}", .{
|
||||
@tagName(args.prefix),
|
||||
@tagName(mnemonic),
|
||||
@tagName(Encoding.Op.fromOperand(args.op1)),
|
||||
@ -174,7 +174,7 @@ pub const Instruction = struct {
|
||||
.td => try encoder.imm64(inst.op1.mem.moffs.offset),
|
||||
else => {
|
||||
const mem_op = switch (encoding.op_en) {
|
||||
.m, .mi, .m1, .mc, .mr => inst.op1,
|
||||
.m, .mi, .m1, .mc, .mr, .mri, .mrc => inst.op1,
|
||||
.rm, .rmi => inst.op2,
|
||||
else => unreachable,
|
||||
};
|
||||
@ -182,7 +182,7 @@ pub const Instruction = struct {
|
||||
.reg => |reg| {
|
||||
const rm = switch (encoding.op_en) {
|
||||
.m, .mi, .m1, .mc => encoding.modRmExt(),
|
||||
.mr => inst.op2.reg.lowEnc(),
|
||||
.mr, .mri, .mrc => inst.op2.reg.lowEnc(),
|
||||
.rm, .rmi => inst.op1.reg.lowEnc(),
|
||||
else => unreachable,
|
||||
};
|
||||
@ -191,7 +191,7 @@ pub const Instruction = struct {
|
||||
.mem => |mem| {
|
||||
const op = switch (encoding.op_en) {
|
||||
.m, .mi, .m1, .mc => .none,
|
||||
.mr => inst.op2,
|
||||
.mr, .mri, .mrc => inst.op2,
|
||||
.rm, .rmi => inst.op1,
|
||||
else => unreachable,
|
||||
};
|
||||
@ -202,7 +202,7 @@ pub const Instruction = struct {
|
||||
|
||||
switch (encoding.op_en) {
|
||||
.mi => try encodeImm(inst.op2.imm, encoding.op2, encoder),
|
||||
.rmi => try encodeImm(inst.op3.imm, encoding.op3, encoder),
|
||||
.rmi, .mri => try encodeImm(inst.op3.imm, encoding.op3, encoder),
|
||||
else => {},
|
||||
}
|
||||
},
|
||||
@ -251,7 +251,7 @@ pub const Instruction = struct {
|
||||
else => unreachable,
|
||||
};
|
||||
} else null,
|
||||
.m, .mi, .m1, .mc, .mr => if (inst.op1.isSegmentRegister()) blk: {
|
||||
.m, .mi, .m1, .mc, .mr, .mri, .mrc => if (inst.op1.isSegmentRegister()) blk: {
|
||||
break :blk switch (inst.op1) {
|
||||
.reg => |r| r,
|
||||
.mem => |m| m.base().?,
|
||||
@ -275,13 +275,11 @@ pub const Instruction = struct {
|
||||
|
||||
switch (op_en) {
|
||||
.np, .i, .zi, .fd, .td, .d => {},
|
||||
.o, .oi => {
|
||||
rex.b = inst.op1.reg.isExtended();
|
||||
},
|
||||
.m, .mi, .m1, .mc, .mr, .rm, .rmi => {
|
||||
.o, .oi => rex.b = inst.op1.reg.isExtended(),
|
||||
.m, .mi, .m1, .mc, .mr, .rm, .rmi, .mri, .mrc => {
|
||||
const r_op = switch (op_en) {
|
||||
.rm, .rmi => inst.op1,
|
||||
.mr => inst.op2,
|
||||
.mr, .mri, .mrc => inst.op2,
|
||||
else => null,
|
||||
};
|
||||
if (r_op) |op| {
|
||||
@ -290,7 +288,7 @@ pub const Instruction = struct {
|
||||
|
||||
const b_x_op = switch (op_en) {
|
||||
.rm, .rmi => inst.op2,
|
||||
.m, .mi, .m1, .mc, .mr => inst.op1,
|
||||
.m, .mi, .m1, .mc, .mr, .mri, .mrc => inst.op1,
|
||||
else => unreachable,
|
||||
};
|
||||
switch (b_x_op) {
|
||||
|
||||
@ -257,8 +257,8 @@ pub const table = &[_]Entry{
|
||||
|
||||
.{ .cmpxchg, .mr, .rm8, .r8, .none, .none, &.{ 0x0f, 0xb0 }, 0, .none },
|
||||
.{ .cmpxchg, .mr, .rm8, .r8, .none, .none, &.{ 0x0f, 0xb0 }, 0, .rex },
|
||||
.{ .cmpxchg, .mr, .rm16, .r16, .none, .none, &.{ 0x0f, 0xb1 }, 0, .rex },
|
||||
.{ .cmpxchg, .mr, .rm32, .r32, .none, .none, &.{ 0x0f, 0xb1 }, 0, .rex },
|
||||
.{ .cmpxchg, .mr, .rm16, .r16, .none, .none, &.{ 0x0f, 0xb1 }, 0, .none },
|
||||
.{ .cmpxchg, .mr, .rm32, .r32, .none, .none, &.{ 0x0f, 0xb1 }, 0, .none },
|
||||
.{ .cmpxchg, .mr, .rm64, .r64, .none, .none, &.{ 0x0f, 0xb1 }, 0, .long },
|
||||
|
||||
.{ .cmpxchg8b , .m, .m64, .none, .none, .none, &.{ 0x0f, 0xc7 }, 1, .none },
|
||||
@ -693,6 +693,13 @@ pub const table = &[_]Entry{
|
||||
.{ .shl, .mi, .rm32, .imm8, .none, .none, &.{ 0xc1 }, 4, .none },
|
||||
.{ .shl, .mi, .rm64, .imm8, .none, .none, &.{ 0xc1 }, 4, .long },
|
||||
|
||||
.{ .shld, .mri, .rm16, .r16, .imm8, .none, &.{ 0x0f, 0xa4 }, 0, .none },
|
||||
.{ .shld, .mrc, .rm16, .r16, .cl, .none, &.{ 0x0f, 0xa5 }, 0, .none },
|
||||
.{ .shld, .mri, .rm32, .r32, .imm8, .none, &.{ 0x0f, 0xa4 }, 0, .none },
|
||||
.{ .shld, .mri, .rm64, .r64, .imm8, .none, &.{ 0x0f, 0xa4 }, 0, .long },
|
||||
.{ .shld, .mrc, .rm32, .r32, .cl, .none, &.{ 0x0f, 0xa5 }, 0, .none },
|
||||
.{ .shld, .mrc, .rm64, .r64, .cl, .none, &.{ 0x0f, 0xa5 }, 0, .long },
|
||||
|
||||
.{ .shr, .m1, .rm8, .unity, .none, .none, &.{ 0xd0 }, 5, .none },
|
||||
.{ .shr, .m1, .rm8, .unity, .none, .none, &.{ 0xd0 }, 5, .rex },
|
||||
.{ .shr, .m1, .rm16, .unity, .none, .none, &.{ 0xd1 }, 5, .none },
|
||||
@ -709,6 +716,13 @@ pub const table = &[_]Entry{
|
||||
.{ .shr, .mi, .rm32, .imm8, .none, .none, &.{ 0xc1 }, 5, .none },
|
||||
.{ .shr, .mi, .rm64, .imm8, .none, .none, &.{ 0xc1 }, 5, .long },
|
||||
|
||||
.{ .shrd, .mri, .rm16, .r16, .imm8, .none, &.{ 0x0f, 0xac }, 0, .none },
|
||||
.{ .shrd, .mrc, .rm16, .r16, .cl, .none, &.{ 0x0f, 0xad }, 0, .none },
|
||||
.{ .shrd, .mri, .rm32, .r32, .imm8, .none, &.{ 0x0f, 0xac }, 0, .none },
|
||||
.{ .shrd, .mri, .rm64, .r64, .imm8, .none, &.{ 0x0f, 0xac }, 0, .long },
|
||||
.{ .shrd, .mrc, .rm32, .r32, .cl, .none, &.{ 0x0f, 0xad }, 0, .none },
|
||||
.{ .shrd, .mrc, .rm64, .r64, .cl, .none, &.{ 0x0f, 0xad }, 0, .long },
|
||||
|
||||
.{ .stos, .np, .m8, .none, .none, .none, &.{ 0xaa }, 0, .none },
|
||||
.{ .stos, .np, .m16, .none, .none, .none, &.{ 0xab }, 0, .none },
|
||||
.{ .stos, .np, .m32, .none, .none, .none, &.{ 0xab }, 0, .none },
|
||||
|
||||
@ -1063,13 +1063,12 @@ pub fn genTypedValue(
|
||||
},
|
||||
.Optional => {
|
||||
if (typed_value.ty.isPtrLikeOptional()) {
|
||||
if (typed_value.val.isNull())
|
||||
return GenResult.mcv(.{ .immediate = 0 });
|
||||
if (typed_value.val.tag() == .null_value) return GenResult.mcv(.{ .immediate = 0 });
|
||||
|
||||
var buf: Type.Payload.ElemType = undefined;
|
||||
return genTypedValue(bin_file, src_loc, .{
|
||||
.ty = typed_value.ty.optionalChild(&buf),
|
||||
.val = typed_value.val,
|
||||
.val = if (typed_value.val.castTag(.opt_payload)) |pl| pl.data else typed_value.val,
|
||||
}, owner_decl_index);
|
||||
} else if (typed_value.ty.abiSize(target) == 1) {
|
||||
return GenResult.mcv(.{ .immediate = @boolToInt(!typed_value.val.isNull()) });
|
||||
|
||||
@ -149,17 +149,26 @@ pub fn RegisterManager(
|
||||
return RegisterLock{ .register = reg };
|
||||
}
|
||||
|
||||
/// Like `lockReg` but locks multiple registers.
|
||||
pub fn lockRegs(
|
||||
self: *Self,
|
||||
comptime count: comptime_int,
|
||||
regs: [count]Register,
|
||||
) [count]?RegisterLock {
|
||||
var results: [count]?RegisterLock = undefined;
|
||||
for (&results, regs) |*result, reg| result.* = self.lockReg(reg);
|
||||
return results;
|
||||
}
|
||||
|
||||
/// Like `lockRegAssumeUnused` but locks multiple registers.
|
||||
pub fn lockRegsAssumeUnused(
|
||||
self: *Self,
|
||||
comptime count: comptime_int,
|
||||
regs: [count]Register,
|
||||
) [count]RegisterLock {
|
||||
var buf: [count]RegisterLock = undefined;
|
||||
for (regs, 0..) |reg, i| {
|
||||
buf[i] = self.lockRegAssumeUnused(reg);
|
||||
}
|
||||
return buf;
|
||||
var results: [count]RegisterLock = undefined;
|
||||
for (&results, regs) |*result, reg| result.* = self.lockRegAssumeUnused(reg);
|
||||
return results;
|
||||
}
|
||||
|
||||
/// Unlocks the register allowing its re-allocation and re-use.
|
||||
|
||||
@ -70,7 +70,6 @@ test "array concat with undefined" {
|
||||
test "array concat with tuple" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
|
||||
const array: [2]u8 = .{ 1, 2 };
|
||||
{
|
||||
@ -641,7 +640,6 @@ test "tuple to array handles sentinel" {
|
||||
}
|
||||
|
||||
test "array init of container level array variable" {
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
|
||||
@ -5,7 +5,6 @@ const expectEqual = std.testing.expectEqual;
|
||||
|
||||
test "cmpxchg" {
|
||||
if (builtin.zig_backend == .stage2_wasm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
@ -96,7 +95,6 @@ test "cmpxchg with ptr" {
|
||||
|
||||
test "cmpxchg with ignored result" {
|
||||
if (builtin.zig_backend == .stage2_wasm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
@ -143,7 +141,6 @@ var a_global_variable = @as(u32, 1234);
|
||||
|
||||
test "cmpxchg on a global variable" {
|
||||
if (builtin.zig_backend == .stage2_wasm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
@ -384,7 +381,6 @@ fn testAtomicRmwInt128(comptime signedness: std.builtin.Signedness) !void {
|
||||
|
||||
test "atomics with different types" {
|
||||
if (builtin.zig_backend == .stage2_wasm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
|
||||
@ -157,7 +157,6 @@ test "bitcast generates a temporary value" {
|
||||
}
|
||||
|
||||
test "@bitCast packed structs at runtime and comptime" {
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
@ -215,7 +214,6 @@ test "@bitCast extern structs at runtime and comptime" {
|
||||
}
|
||||
|
||||
test "bitcast packed struct to integer and back" {
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
|
||||
@ -4,7 +4,6 @@ const expectEqualStrings = std.testing.expectEqualStrings;
|
||||
|
||||
test "slicing slices" {
|
||||
if (builtin.zig_backend == .stage2_wasm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
|
||||
@ -3,7 +3,6 @@ const builtin = @import("builtin");
|
||||
|
||||
test {
|
||||
if (builtin.zig_backend == .stage2_wasm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
|
||||
@ -15,7 +15,6 @@ fn takeFoo(foo: *const Foo) !void {
|
||||
|
||||
test {
|
||||
if (builtin.zig_backend == .stage2_wasm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
|
||||
@ -20,7 +20,6 @@ fn letter(e: Letter) u8 {
|
||||
|
||||
test {
|
||||
if (builtin.zig_backend == .stage2_wasm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
|
||||
@ -10,7 +10,6 @@ var buffer: [256]u8 = undefined;
|
||||
|
||||
test {
|
||||
if (builtin.zig_backend == .stage2_wasm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
|
||||
@ -14,7 +14,6 @@ fn bar(pointer: ?*anyopaque) void {
|
||||
test "fixed" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
|
||||
bar(t);
|
||||
|
||||
@ -11,7 +11,6 @@ const text =
|
||||
;
|
||||
|
||||
test "issue 6456" {
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
|
||||
|
||||
@ -4,7 +4,6 @@ const builtin = @import("builtin");
|
||||
test "@ptrCast from const to nullable" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
|
||||
const c: u8 = 4;
|
||||
@ -15,7 +14,6 @@ test "@ptrCast from const to nullable" {
|
||||
test "@ptrCast from var in empty struct to nullable" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
|
||||
const container = struct {
|
||||
|
||||
@ -8,7 +8,6 @@ const array = [_][]const []const u8{
|
||||
|
||||
test {
|
||||
if (builtin.zig_backend == .stage2_wasm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
|
||||
@ -44,7 +44,6 @@ pub fn b(x: *X) !void {
|
||||
}
|
||||
|
||||
test {
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
|
||||
@ -3,7 +3,6 @@ const builtin = @import("builtin");
|
||||
|
||||
test "empty file level struct" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
|
||||
const T = @import("empty_file_level_struct.zig");
|
||||
@ -15,7 +14,6 @@ test "empty file level struct" {
|
||||
|
||||
test "empty file level union" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
|
||||
const T = @import("empty_file_level_union.zig");
|
||||
|
||||
@ -96,7 +96,6 @@ test "discard the result of a function that returns a struct" {
|
||||
}
|
||||
|
||||
test "inline function call that calls optional function pointer, return pointer at callsite interacts correctly with callsite return type" {
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
|
||||
@ -2,7 +2,6 @@ const std = @import("std");
|
||||
const builtin = @import("builtin");
|
||||
|
||||
test "strlit to vector" {
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
|
||||
@ -560,7 +560,6 @@ fn testUnsignedNegationWrappingEval(x: u16) !void {
|
||||
test "negation wrapping" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
|
||||
try expectEqual(@as(u1, 1), negateWrap(u1, 1));
|
||||
}
|
||||
|
||||
@ -431,7 +431,6 @@ test "alignment of wrapping an optional payload" {
|
||||
test "Optional slice size is optimized" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
|
||||
try expect(@sizeOf(?[]u8) == @sizeOf([]u8));
|
||||
|
||||
@ -120,7 +120,6 @@ test "consistent size of packed structs" {
|
||||
}
|
||||
|
||||
test "correct sizeOf and offsets in packed structs" {
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
@ -188,7 +187,6 @@ test "correct sizeOf and offsets in packed structs" {
|
||||
}
|
||||
|
||||
test "nested packed structs" {
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
@ -254,7 +252,6 @@ test "regular in irregular packed struct" {
|
||||
}
|
||||
|
||||
test "byte-aligned field pointer offsets" {
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
@ -397,7 +394,6 @@ test "@ptrToInt on a packed struct field" {
|
||||
}
|
||||
|
||||
test "optional pointer in packed struct" {
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
@ -530,7 +526,6 @@ test "nested packed struct field access test" {
|
||||
test "runtime init of unnamed packed struct type" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
|
||||
var z: u8 = 123;
|
||||
@ -545,7 +540,6 @@ test "runtime init of unnamed packed struct type" {
|
||||
test "packed struct passed to callconv(.C) function" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
|
||||
const S = struct {
|
||||
@ -571,7 +565,6 @@ test "packed struct passed to callconv(.C) function" {
|
||||
test "overaligned pointer to packed struct" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
|
||||
|
||||
const S = packed struct { a: u32, b: u32 };
|
||||
var foo: S align(4) = .{ .a = 123, .b = 456 };
|
||||
|
||||
@ -6,7 +6,6 @@ const native_endian = builtin.cpu.arch.endian();
|
||||
|
||||
test "packed struct explicit backing integer" {
|
||||
if (builtin.zig_backend == .stage2_wasm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
|
||||
@ -412,7 +412,6 @@ test "@ptrToInt on null optional at comptime" {
|
||||
test "indexing array with sentinel returns correct type" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
|
||||
var s: [:0]const u8 = "abc";
|
||||
@ -497,7 +496,6 @@ test "pointer to constant decl preserves alignment" {
|
||||
test "ptrCast comptime known slice to C pointer" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
|
||||
const s: [:0]const u8 = "foo";
|
||||
|
||||
@ -688,7 +688,6 @@ test "slice field ptr var" {
|
||||
test "global slice field access" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
|
||||
const S = struct {
|
||||
@ -733,7 +732,6 @@ test "empty slice ptr is non null" {
|
||||
test "slice decays to many pointer" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
|
||||
|
||||
var buf: [8]u8 = "abcdefg\x00".*;
|
||||
const p: [*:0]const u8 = buf[0..7 :0];
|
||||
@ -744,7 +742,6 @@ test "write through pointer to optional slice arg" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
|
||||
|
||||
const S = struct {
|
||||
fn bar(foo: *?[]const u8) !void {
|
||||
|
||||
@ -387,7 +387,6 @@ const APackedStruct = packed struct {
|
||||
test "packed struct" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
|
||||
var foo = APackedStruct{
|
||||
@ -496,7 +495,6 @@ const Bitfields = packed struct {
|
||||
test "packed struct fields are ordered from LSB to MSB" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
|
||||
var all: u64 = 0x7765443322221111;
|
||||
@ -632,7 +630,6 @@ test "default struct initialization fields" {
|
||||
}
|
||||
|
||||
test "packed array 24bits" {
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
@ -827,7 +824,6 @@ test "non-packed struct with u128 entry in union" {
|
||||
}
|
||||
|
||||
test "packed struct field passed to generic function" {
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
@ -939,7 +935,6 @@ test "comptime struct field" {
|
||||
}
|
||||
|
||||
test "tuple element initialized with fn call" {
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
|
||||
@ -620,7 +620,6 @@ test "switch on error set with single else" {
|
||||
}
|
||||
|
||||
test "switch capture copies its payload" {
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
|
||||
@ -21,7 +21,6 @@ test "thread local variable" {
|
||||
|
||||
test "pointer to thread local array" {
|
||||
if (builtin.zig_backend == .stage2_wasm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_llvm) switch (builtin.cpu.arch) {
|
||||
|
||||
@ -23,7 +23,6 @@ test "casting to void with a macro" {
|
||||
}
|
||||
|
||||
test "initializer list expression" {
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
@ -52,7 +51,6 @@ test "reference to a struct type" {
|
||||
|
||||
test "cast negative integer to pointer" {
|
||||
if (builtin.zig_backend == .stage2_wasm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
@ -98,7 +96,6 @@ test "casting or calling a value with a paren-surrounded macro" {
|
||||
|
||||
test "nested comma operator" {
|
||||
if (builtin.zig_backend == .stage2_wasm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
@ -109,7 +106,6 @@ test "nested comma operator" {
|
||||
|
||||
test "cast functions" {
|
||||
if (builtin.zig_backend == .stage2_wasm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
@ -123,7 +119,6 @@ test "cast functions" {
|
||||
|
||||
test "large integer macro" {
|
||||
if (builtin.zig_backend == .stage2_wasm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
@ -133,7 +128,6 @@ test "large integer macro" {
|
||||
|
||||
test "string literal macro with embedded tab character" {
|
||||
if (builtin.zig_backend == .stage2_wasm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
@ -143,7 +137,6 @@ test "string literal macro with embedded tab character" {
|
||||
|
||||
test "string and char literals that are not UTF-8 encoded. Issue #12784" {
|
||||
if (builtin.zig_backend == .stage2_wasm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
@ -188,7 +181,6 @@ test "Macro that uses division operator. Issue #13162" {
|
||||
|
||||
test "Macro that uses remainder operator. Issue #13346" {
|
||||
if (builtin.zig_backend == .stage2_wasm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
|
||||
@ -381,7 +381,6 @@ test "tuple of struct concatenation and coercion to array" {
|
||||
test "nested runtime conditionals in tuple initializer" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
|
||||
|
||||
var data: u8 = 0;
|
||||
const x = .{
|
||||
|
||||
@ -7,7 +7,6 @@ const expectEqualStrings = testing.expectEqualStrings;
|
||||
test "tuple declaration type info" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
|
||||
|
||||
{
|
||||
const T = struct { comptime u32 align(2) = 1, []const u8 };
|
||||
@ -57,7 +56,6 @@ test "tuple declaration type info" {
|
||||
test "Tuple declaration usage" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
|
||||
|
||||
const T = struct { u32, []const u8 };
|
||||
var t: T = .{ 1, "foo" };
|
||||
|
||||
@ -200,7 +200,6 @@ test "Type.ErrorUnion" {
|
||||
|
||||
test "Type.Opaque" {
|
||||
if (builtin.zig_backend == .stage2_wasm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
@ -348,7 +347,6 @@ test "Type.Struct" {
|
||||
}
|
||||
|
||||
test "Type.Enum" {
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
|
||||
|
||||
@ -568,7 +568,6 @@ test "value from struct @typeInfo default_value can be loaded at comptime" {
|
||||
test "@typeInfo decls and usingnamespace" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
|
||||
const A = struct {
|
||||
const x = 5;
|
||||
|
||||
@ -64,7 +64,6 @@ test "anon field init" {
|
||||
}
|
||||
|
||||
test "basic" {
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
@ -228,7 +227,6 @@ test "local variable" {
|
||||
}
|
||||
|
||||
test "comptime parameters not converted to anytype in function type" {
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
|
||||
@ -1408,7 +1408,6 @@ test "union field ptr - zero sized field" {
|
||||
}
|
||||
|
||||
test "packed union in packed struct" {
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_wasm) return error.SkipZigTest; // TODO
|
||||
@ -1494,7 +1493,6 @@ test "union reassignment can use previous value" {
|
||||
}
|
||||
|
||||
test "packed union with zero-bit field" {
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_wasm) return error.SkipZigTest; // TODO
|
||||
|
||||
@ -1267,7 +1267,6 @@ test "store to vector in slice" {
|
||||
test "addition of vectors represented as strings" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
|
||||
const V = @Vector(3, u8);
|
||||
const foo: V = "foo".*;
|
||||
|
||||
@ -112,6 +112,7 @@ const test_targets = blk: {
|
||||
.os_tag = .windows,
|
||||
.abi = .gnu,
|
||||
},
|
||||
.single_threaded = true, // https://github.com/ziglang/zig/issues/15075
|
||||
.backend = .stage2_x86_64,
|
||||
},
|
||||
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user