mirror of
https://github.com/ziglang/zig.git
synced 2026-02-14 13:30:45 +00:00
x86_64: implement and test unary float builtins
This commit is contained in:
parent
af40bce08a
commit
3bd1b9e15f
@ -1871,8 +1871,7 @@ pub fn create(gpa: Allocator, options: InitOptions) !*Compilation {
|
||||
comp.job_queued_compiler_rt_lib = true;
|
||||
} else if (options.output_mode != .Obj) {
|
||||
log.debug("queuing a job to build compiler_rt_obj", .{});
|
||||
// If build-obj with -fcompiler-rt is requested, that is handled specially
|
||||
// elsewhere. In this case we are making a static library, so we ask
|
||||
// In this case we are making a static library, so we ask
|
||||
// for a compiler-rt object to put in it.
|
||||
comp.job_queued_compiler_rt_obj = true;
|
||||
}
|
||||
|
||||
@ -5407,19 +5407,19 @@ pub fn getAnonStructType(ip: *InternPool, gpa: Allocator, ini: AnonStructTypeIni
|
||||
|
||||
/// This is equivalent to `Key.FuncType` but adjusted to have a slice for `param_types`.
|
||||
pub const GetFuncTypeKey = struct {
|
||||
param_types: []Index,
|
||||
param_types: []const Index,
|
||||
return_type: Index,
|
||||
comptime_bits: u32,
|
||||
noalias_bits: u32,
|
||||
comptime_bits: u32 = 0,
|
||||
noalias_bits: u32 = 0,
|
||||
/// `null` means generic.
|
||||
alignment: ?Alignment,
|
||||
alignment: ?Alignment = .none,
|
||||
/// `null` means generic.
|
||||
cc: ?std.builtin.CallingConvention,
|
||||
is_var_args: bool,
|
||||
is_generic: bool,
|
||||
is_noinline: bool,
|
||||
section_is_generic: bool,
|
||||
addrspace_is_generic: bool,
|
||||
cc: ?std.builtin.CallingConvention = .Unspecified,
|
||||
is_var_args: bool = false,
|
||||
is_generic: bool = false,
|
||||
is_noinline: bool = false,
|
||||
section_is_generic: bool = false,
|
||||
addrspace_is_generic: bool = false,
|
||||
};
|
||||
|
||||
pub fn getFuncType(ip: *InternPool, gpa: Allocator, key: GetFuncTypeKey) Allocator.Error!Index {
|
||||
@ -5754,15 +5754,10 @@ pub fn getFuncInstance(ip: *InternPool, gpa: Allocator, arg: GetFuncInstanceKey)
|
||||
const func_ty = try ip.getFuncType(gpa, .{
|
||||
.param_types = arg.param_types,
|
||||
.return_type = arg.bare_return_type,
|
||||
.comptime_bits = 0,
|
||||
.noalias_bits = arg.noalias_bits,
|
||||
.alignment = arg.alignment,
|
||||
.cc = arg.cc,
|
||||
.is_var_args = false,
|
||||
.is_generic = false,
|
||||
.is_noinline = arg.is_noinline,
|
||||
.section_is_generic = false,
|
||||
.addrspace_is_generic = false,
|
||||
});
|
||||
|
||||
const generic_owner = unwrapCoercedFunc(ip, arg.generic_owner);
|
||||
|
||||
11
src/Sema.zig
11
src/Sema.zig
@ -7373,10 +7373,10 @@ fn analyzeCall(
|
||||
const memoized_arg_values = try sema.arena.alloc(InternPool.Index, func_ty_info.param_types.len);
|
||||
|
||||
const owner_info = mod.typeToFunc(fn_owner_decl.ty).?;
|
||||
const new_param_types = try sema.arena.alloc(InternPool.Index, owner_info.param_types.len);
|
||||
var new_fn_info: InternPool.GetFuncTypeKey = .{
|
||||
.param_types = try sema.arena.alloc(InternPool.Index, owner_info.param_types.len),
|
||||
.param_types = new_param_types,
|
||||
.return_type = owner_info.return_type,
|
||||
.comptime_bits = 0,
|
||||
.noalias_bits = owner_info.noalias_bits,
|
||||
.alignment = if (owner_info.align_is_generic) null else owner_info.alignment,
|
||||
.cc = if (owner_info.cc_is_generic) null else owner_info.cc,
|
||||
@ -7403,7 +7403,7 @@ fn analyzeCall(
|
||||
block,
|
||||
&child_block,
|
||||
inst,
|
||||
new_fn_info.param_types,
|
||||
new_param_types,
|
||||
&arg_i,
|
||||
args_info,
|
||||
is_comptime_call,
|
||||
@ -21144,16 +21144,11 @@ fn zirReify(
|
||||
|
||||
const ty = try mod.funcType(.{
|
||||
.param_types = param_types,
|
||||
.comptime_bits = 0,
|
||||
.noalias_bits = noalias_bits,
|
||||
.return_type = return_type.toIntern(),
|
||||
.alignment = alignment,
|
||||
.cc = cc,
|
||||
.is_var_args = is_var_args,
|
||||
.is_generic = false,
|
||||
.is_noinline = false,
|
||||
.section_is_generic = false,
|
||||
.addrspace_is_generic = false,
|
||||
});
|
||||
return Air.internedToRef(ty.toIntern());
|
||||
},
|
||||
|
||||
@ -1807,7 +1807,7 @@ fn genBody(self: *Self, body: []const Air.Inst.Index) InnerError!void {
|
||||
.log2,
|
||||
.log10,
|
||||
.round,
|
||||
=> try self.airUnaryMath(inst),
|
||||
=> |tag| try self.airUnaryMath(inst, tag),
|
||||
|
||||
.floor => try self.airRound(inst, 0b1_0_01),
|
||||
.ceil => try self.airRound(inst, 0b1_0_10),
|
||||
@ -5280,13 +5280,35 @@ fn airSqrt(self: *Self, inst: Air.Inst.Index) !void {
|
||||
return self.finishAir(inst, result, .{ un_op, .none, .none });
|
||||
}
|
||||
|
||||
fn airUnaryMath(self: *Self, inst: Air.Inst.Index) !void {
|
||||
fn airUnaryMath(self: *Self, inst: Air.Inst.Index, tag: Air.Inst.Tag) !void {
|
||||
const un_op = self.air.instructions.items(.data)[inst].un_op;
|
||||
_ = un_op;
|
||||
return self.fail("TODO implement airUnaryMath for {}", .{
|
||||
self.air.instructions.items(.tag)[inst],
|
||||
});
|
||||
//return self.finishAir(inst, result, .{ un_op, .none, .none });
|
||||
const ty = self.typeOf(un_op).toIntern();
|
||||
const result = try self.genCall(.{ .lib = .{
|
||||
.return_type = ty,
|
||||
.param_types = &.{ty},
|
||||
.callee = switch (tag) {
|
||||
inline .sin,
|
||||
.cos,
|
||||
.tan,
|
||||
.exp,
|
||||
.exp2,
|
||||
.log,
|
||||
.log2,
|
||||
.log10,
|
||||
.round,
|
||||
=> |comptime_tag| switch (ty) {
|
||||
.f16_type => "__" ++ @tagName(comptime_tag) ++ "h",
|
||||
.f32_type => @tagName(comptime_tag) ++ "f",
|
||||
.f64_type => @tagName(comptime_tag),
|
||||
.f80_type => "__" ++ @tagName(comptime_tag) ++ "x",
|
||||
.f128_type => @tagName(comptime_tag) ++ "q",
|
||||
.c_longdouble_type => @tagName(comptime_tag) ++ "l",
|
||||
else => unreachable,
|
||||
},
|
||||
else => unreachable,
|
||||
},
|
||||
} }, &.{un_op});
|
||||
return self.finishAir(inst, result, .{ un_op, .none, .none });
|
||||
}
|
||||
|
||||
fn reuseOperand(
|
||||
@ -7290,7 +7312,7 @@ fn genBinOp(
|
||||
|
||||
switch (air_tag) {
|
||||
.add, .add_wrap, .sub, .sub_wrap, .mul, .mul_wrap, .div_float, .div_exact => {},
|
||||
.div_trunc, .div_floor => if (self.hasFeature(.sse4_1)) try self.genRound(
|
||||
.div_trunc, .div_floor => try self.genRound(
|
||||
lhs_ty,
|
||||
dst_reg,
|
||||
.{ .register = dst_reg },
|
||||
@ -7299,9 +7321,7 @@ fn genBinOp(
|
||||
.div_floor => 0b1_0_01,
|
||||
else => unreachable,
|
||||
},
|
||||
) else return self.fail("TODO implement genBinOp for {s} {} without sse4_1 feature", .{
|
||||
@tagName(air_tag), lhs_ty.fmt(self.bin_file.options.module.?),
|
||||
}),
|
||||
),
|
||||
.bit_and, .bit_or, .xor => {},
|
||||
.max, .min => if (maybe_mask_reg) |mask_reg| if (self.hasFeature(.avx)) {
|
||||
const rhs_copy_reg = registerAlias(src_mcv.getReg().?, abi_size);
|
||||
@ -8124,31 +8144,59 @@ fn airFence(self: *Self, inst: Air.Inst.Index) !void {
|
||||
}
|
||||
|
||||
fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallModifier) !void {
|
||||
const mod = self.bin_file.options.module.?;
|
||||
if (modifier == .always_tail) return self.fail("TODO implement tail calls for x86_64", .{});
|
||||
|
||||
const pl_op = self.air.instructions.items(.data)[inst].pl_op;
|
||||
const callee = pl_op.operand;
|
||||
const extra = self.air.extraData(Air.Call, pl_op.payload);
|
||||
const args: []const Air.Inst.Ref = @ptrCast(self.air.extra[extra.end..][0..extra.data.args_len]);
|
||||
const ty = self.typeOf(callee);
|
||||
|
||||
const fn_ty = switch (ty.zigTypeTag(mod)) {
|
||||
.Fn => ty,
|
||||
.Pointer => ty.childType(mod),
|
||||
else => unreachable,
|
||||
const ret = try self.genCall(.{ .air = pl_op.operand }, args);
|
||||
|
||||
var bt = self.liveness.iterateBigTomb(inst);
|
||||
self.feed(&bt, pl_op.operand);
|
||||
for (args) |arg| self.feed(&bt, arg);
|
||||
|
||||
const result = if (self.liveness.isUnused(inst)) .unreach else ret;
|
||||
return self.finishAirResult(inst, result);
|
||||
}
|
||||
|
||||
fn genCall(self: *Self, info: union(enum) {
|
||||
air: Air.Inst.Ref,
|
||||
lib: struct {
|
||||
return_type: InternPool.Index,
|
||||
param_types: []const InternPool.Index,
|
||||
lib: ?[]const u8 = null,
|
||||
callee: []const u8,
|
||||
},
|
||||
}, args: []const Air.Inst.Ref) !MCValue {
|
||||
const mod = self.bin_file.options.module.?;
|
||||
|
||||
const fn_ty = switch (info) {
|
||||
.air => |callee| fn_info: {
|
||||
const callee_ty = self.typeOf(callee);
|
||||
break :fn_info switch (callee_ty.zigTypeTag(mod)) {
|
||||
.Fn => callee_ty,
|
||||
.Pointer => callee_ty.childType(mod),
|
||||
else => unreachable,
|
||||
};
|
||||
},
|
||||
.lib => |lib| try mod.funcType(.{
|
||||
.param_types = lib.param_types,
|
||||
.return_type = lib.return_type,
|
||||
.cc = .C,
|
||||
}),
|
||||
};
|
||||
|
||||
const fn_info = mod.typeToFunc(fn_ty).?;
|
||||
|
||||
var info = try self.resolveCallingConventionValues(fn_info, args[fn_info.param_types.len..], .call_frame);
|
||||
defer info.deinit(self);
|
||||
var call_info =
|
||||
try self.resolveCallingConventionValues(fn_info, args[fn_info.param_types.len..], .call_frame);
|
||||
defer call_info.deinit(self);
|
||||
|
||||
// We need a properly aligned and sized call frame to be able to call this function.
|
||||
{
|
||||
const needed_call_frame =
|
||||
FrameAlloc.init(.{
|
||||
.size = info.stack_byte_count,
|
||||
.alignment = info.stack_align,
|
||||
const needed_call_frame = FrameAlloc.init(.{
|
||||
.size = call_info.stack_byte_count,
|
||||
.alignment = call_info.stack_align,
|
||||
});
|
||||
const frame_allocs_slice = self.frame_allocs.slice();
|
||||
const stack_frame_size =
|
||||
@ -8164,24 +8212,20 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallModifier
|
||||
|
||||
// set stack arguments first because this can clobber registers
|
||||
// also clobber spill arguments as we go
|
||||
switch (info.return_value.long) {
|
||||
switch (call_info.return_value.long) {
|
||||
.none, .unreach => {},
|
||||
.indirect => |reg_off| try self.spillRegisters(&.{reg_off.reg}),
|
||||
else => unreachable,
|
||||
}
|
||||
for (args, info.args) |arg, mc_arg| {
|
||||
const arg_ty = self.typeOf(arg);
|
||||
const arg_mcv = try self.resolveInst(arg);
|
||||
switch (mc_arg) {
|
||||
.none => {},
|
||||
.register => |reg| try self.spillRegisters(&.{reg}),
|
||||
.load_frame => try self.genCopy(arg_ty, mc_arg, arg_mcv),
|
||||
else => unreachable,
|
||||
}
|
||||
}
|
||||
for (call_info.args, args) |dst_arg, src_arg| switch (dst_arg) {
|
||||
.none => {},
|
||||
.register => |reg| try self.spillRegisters(&.{reg}),
|
||||
.load_frame => try self.genCopy(self.typeOf(src_arg), dst_arg, try self.resolveInst(src_arg)),
|
||||
else => unreachable,
|
||||
};
|
||||
|
||||
// now we are free to set register arguments
|
||||
const ret_lock = switch (info.return_value.long) {
|
||||
const ret_lock = switch (call_info.return_value.long) {
|
||||
.none, .unreach => null,
|
||||
.indirect => |reg_off| lock: {
|
||||
const ret_ty = fn_info.return_type.toType();
|
||||
@ -8189,125 +8233,80 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallModifier
|
||||
try self.genSetReg(reg_off.reg, Type.usize, .{
|
||||
.lea_frame = .{ .index = frame_index, .off = -reg_off.off },
|
||||
});
|
||||
info.return_value.short = .{ .load_frame = .{ .index = frame_index } };
|
||||
call_info.return_value.short = .{ .load_frame = .{ .index = frame_index } };
|
||||
break :lock self.register_manager.lockRegAssumeUnused(reg_off.reg);
|
||||
},
|
||||
else => unreachable,
|
||||
};
|
||||
defer if (ret_lock) |lock| self.register_manager.unlockReg(lock);
|
||||
|
||||
for (args, info.args) |arg, mc_arg| {
|
||||
const arg_ty = self.typeOf(arg);
|
||||
const arg_mcv = try self.resolveInst(arg);
|
||||
switch (mc_arg) {
|
||||
for (call_info.args, args) |dst_arg, src_arg| {
|
||||
switch (dst_arg) {
|
||||
.none, .load_frame => {},
|
||||
.register => try self.genCopy(arg_ty, mc_arg, arg_mcv),
|
||||
.register => try self.genCopy(self.typeOf(src_arg), dst_arg, try self.resolveInst(src_arg)),
|
||||
else => unreachable,
|
||||
}
|
||||
}
|
||||
|
||||
// Due to incremental compilation, how function calls are generated depends
|
||||
// on linking.
|
||||
if (try self.air.value(callee, mod)) |func_value| {
|
||||
const func_key = mod.intern_pool.indexToKey(func_value.ip_index);
|
||||
if (switch (func_key) {
|
||||
.func => |func| func.owner_decl,
|
||||
.ptr => |ptr| switch (ptr.addr) {
|
||||
.decl => |decl| decl,
|
||||
switch (info) {
|
||||
.air => |callee| if (try self.air.value(callee, mod)) |func_value| {
|
||||
const func_key = mod.intern_pool.indexToKey(func_value.ip_index);
|
||||
if (switch (func_key) {
|
||||
.func => |func| func.owner_decl,
|
||||
.ptr => |ptr| switch (ptr.addr) {
|
||||
.decl => |decl| decl,
|
||||
else => null,
|
||||
},
|
||||
else => null,
|
||||
},
|
||||
else => null,
|
||||
}) |owner_decl| {
|
||||
if (self.bin_file.cast(link.File.Elf)) |elf_file| {
|
||||
const sym_index = try elf_file.getOrCreateMetadataForDecl(owner_decl);
|
||||
const sym = elf_file.symbol(sym_index);
|
||||
sym.flags.needs_got = true;
|
||||
_ = try sym.getOrCreateGotEntry(sym_index, elf_file);
|
||||
_ = try self.addInst(.{
|
||||
.tag = .call,
|
||||
.ops = .direct_got_reloc,
|
||||
.data = .{ .reloc = .{
|
||||
.atom_index = try self.owner.getSymbolIndex(self),
|
||||
.sym_index = sym.esym_index,
|
||||
} },
|
||||
});
|
||||
} else if (self.bin_file.cast(link.File.Coff)) |coff_file| {
|
||||
const atom = try coff_file.getOrCreateAtomForDecl(owner_decl);
|
||||
const sym_index = coff_file.getAtom(atom).getSymbolIndex().?;
|
||||
try self.genSetReg(.rax, Type.usize, .{ .lea_got = sym_index });
|
||||
try self.asmRegister(.{ ._, .call }, .rax);
|
||||
} else if (self.bin_file.cast(link.File.MachO)) |macho_file| {
|
||||
const atom = try macho_file.getOrCreateAtomForDecl(owner_decl);
|
||||
const sym_index = macho_file.getAtom(atom).getSymbolIndex().?;
|
||||
try self.genSetReg(.rax, Type.usize, .{ .lea_got = sym_index });
|
||||
try self.asmRegister(.{ ._, .call }, .rax);
|
||||
} else if (self.bin_file.cast(link.File.Plan9)) |p9| {
|
||||
const atom_index = try p9.seeDecl(owner_decl);
|
||||
const atom = p9.getAtom(atom_index);
|
||||
try self.asmMemory(.{ ._, .call }, Memory.sib(.qword, .{
|
||||
.base = .{ .reg = .ds },
|
||||
.disp = @intCast(atom.getOffsetTableAddress(p9)),
|
||||
}));
|
||||
} else unreachable;
|
||||
} else if (func_value.getExternFunc(mod)) |extern_func| {
|
||||
const decl_name = mod.intern_pool.stringToSlice(mod.declPtr(extern_func.decl).name);
|
||||
const lib_name = mod.intern_pool.stringToSliceUnwrap(extern_func.lib_name);
|
||||
if (self.bin_file.cast(link.File.Elf)) |elf_file| {
|
||||
const atom_index = try self.owner.getSymbolIndex(self);
|
||||
const sym_index = try elf_file.getGlobalSymbol(decl_name, lib_name);
|
||||
_ = try self.addInst(.{
|
||||
.tag = .call,
|
||||
.ops = .extern_fn_reloc,
|
||||
.data = .{ .reloc = .{
|
||||
.atom_index = atom_index,
|
||||
.sym_index = sym_index,
|
||||
} },
|
||||
});
|
||||
} else if (self.bin_file.cast(link.File.Coff)) |coff_file| {
|
||||
const atom_index = try self.owner.getSymbolIndex(self);
|
||||
const sym_index = try coff_file.getGlobalSymbol(decl_name, lib_name);
|
||||
_ = try self.addInst(.{
|
||||
.tag = .mov,
|
||||
.ops = .import_reloc,
|
||||
.data = .{ .rx = .{
|
||||
.r1 = .rax,
|
||||
.payload = try self.addExtra(Mir.Reloc{
|
||||
.atom_index = atom_index,
|
||||
.sym_index = sym_index,
|
||||
}),
|
||||
} },
|
||||
});
|
||||
try self.asmRegister(.{ ._, .call }, .rax);
|
||||
} else if (self.bin_file.cast(link.File.MachO)) |macho_file| {
|
||||
const atom_index = try self.owner.getSymbolIndex(self);
|
||||
const sym_index = try macho_file.getGlobalSymbol(decl_name, lib_name);
|
||||
_ = try self.addInst(.{
|
||||
.tag = .call,
|
||||
.ops = .extern_fn_reloc,
|
||||
.data = .{ .reloc = .{
|
||||
.atom_index = atom_index,
|
||||
.sym_index = sym_index,
|
||||
} },
|
||||
});
|
||||
}) |owner_decl| {
|
||||
if (self.bin_file.cast(link.File.Elf)) |elf_file| {
|
||||
const sym_index = try elf_file.getOrCreateMetadataForDecl(owner_decl);
|
||||
const sym = elf_file.symbol(sym_index);
|
||||
sym.flags.needs_got = true;
|
||||
_ = try sym.getOrCreateGotEntry(sym_index, elf_file);
|
||||
_ = try self.addInst(.{
|
||||
.tag = .call,
|
||||
.ops = .direct_got_reloc,
|
||||
.data = .{ .reloc = .{
|
||||
.atom_index = try self.owner.getSymbolIndex(self),
|
||||
.sym_index = sym.esym_index,
|
||||
} },
|
||||
});
|
||||
} else if (self.bin_file.cast(link.File.Coff)) |coff_file| {
|
||||
const atom = try coff_file.getOrCreateAtomForDecl(owner_decl);
|
||||
const sym_index = coff_file.getAtom(atom).getSymbolIndex().?;
|
||||
try self.genSetReg(.rax, Type.usize, .{ .lea_got = sym_index });
|
||||
try self.asmRegister(.{ ._, .call }, .rax);
|
||||
} else if (self.bin_file.cast(link.File.MachO)) |macho_file| {
|
||||
const atom = try macho_file.getOrCreateAtomForDecl(owner_decl);
|
||||
const sym_index = macho_file.getAtom(atom).getSymbolIndex().?;
|
||||
try self.genSetReg(.rax, Type.usize, .{ .lea_got = sym_index });
|
||||
try self.asmRegister(.{ ._, .call }, .rax);
|
||||
} else if (self.bin_file.cast(link.File.Plan9)) |p9| {
|
||||
const atom_index = try p9.seeDecl(owner_decl);
|
||||
const atom = p9.getAtom(atom_index);
|
||||
try self.asmMemory(.{ ._, .call }, Memory.sib(.qword, .{
|
||||
.base = .{ .reg = .ds },
|
||||
.disp = @intCast(atom.getOffsetTableAddress(p9)),
|
||||
}));
|
||||
} else unreachable;
|
||||
} else if (func_value.getExternFunc(mod)) |extern_func| {
|
||||
const lib_name = mod.intern_pool.stringToSliceUnwrap(extern_func.lib_name);
|
||||
const decl_name = mod.intern_pool.stringToSlice(mod.declPtr(extern_func.decl).name);
|
||||
try self.genExternSymbolRef(.call, lib_name, decl_name);
|
||||
} else {
|
||||
return self.fail("TODO implement calling extern functions", .{});
|
||||
return self.fail("TODO implement calling bitcasted functions", .{});
|
||||
}
|
||||
} else {
|
||||
return self.fail("TODO implement calling bitcasted functions", .{});
|
||||
}
|
||||
} else {
|
||||
assert(ty.zigTypeTag(mod) == .Pointer);
|
||||
const mcv = try self.resolveInst(callee);
|
||||
try self.genSetReg(.rax, Type.usize, mcv);
|
||||
try self.asmRegister(.{ ._, .call }, .rax);
|
||||
assert(self.typeOf(callee).zigTypeTag(mod) == .Pointer);
|
||||
try self.genSetReg(.rax, Type.usize, try self.resolveInst(callee));
|
||||
try self.asmRegister(.{ ._, .call }, .rax);
|
||||
},
|
||||
.lib => |lib| try self.genExternSymbolRef(.call, lib.lib, lib.callee),
|
||||
}
|
||||
|
||||
var bt = self.liveness.iterateBigTomb(inst);
|
||||
self.feed(&bt, callee);
|
||||
for (args) |arg| self.feed(&bt, arg);
|
||||
|
||||
const result = if (self.liveness.isUnused(inst)) .unreach else info.return_value.short;
|
||||
return self.finishAirResult(inst, result);
|
||||
return call_info.return_value.short;
|
||||
}
|
||||
|
||||
fn airRet(self: *Self, inst: Air.Inst.Index) !void {
|
||||
@ -10281,6 +10280,51 @@ fn genInlineMemset(self: *Self, dst_ptr: MCValue, value: MCValue, len: MCValue)
|
||||
try self.asmOpOnly(.{ .@"rep _sb", .sto });
|
||||
}
|
||||
|
||||
fn genExternSymbolRef(
|
||||
self: *Self,
|
||||
comptime tag: Mir.Inst.Tag,
|
||||
lib: ?[]const u8,
|
||||
callee: []const u8,
|
||||
) InnerError!void {
|
||||
const atom_index = try self.owner.getSymbolIndex(self);
|
||||
if (self.bin_file.cast(link.File.Elf)) |elf_file| {
|
||||
_ = try self.addInst(.{
|
||||
.tag = tag,
|
||||
.ops = .extern_fn_reloc,
|
||||
.data = .{ .reloc = .{
|
||||
.atom_index = atom_index,
|
||||
.sym_index = try elf_file.getGlobalSymbol(callee, lib),
|
||||
} },
|
||||
});
|
||||
} else if (self.bin_file.cast(link.File.Coff)) |coff_file| {
|
||||
_ = try self.addInst(.{
|
||||
.tag = .mov,
|
||||
.ops = .import_reloc,
|
||||
.data = .{ .rx = .{
|
||||
.r1 = .rax,
|
||||
.payload = try self.addExtra(Mir.Reloc{
|
||||
.atom_index = atom_index,
|
||||
.sym_index = try coff_file.getGlobalSymbol(callee, lib),
|
||||
}),
|
||||
} },
|
||||
});
|
||||
switch (tag) {
|
||||
.mov => {},
|
||||
.call => try self.asmRegister(.{ ._, .call }, .rax),
|
||||
else => unreachable,
|
||||
}
|
||||
} else if (self.bin_file.cast(link.File.MachO)) |macho_file| {
|
||||
_ = try self.addInst(.{
|
||||
.tag = .call,
|
||||
.ops = .extern_fn_reloc,
|
||||
.data = .{ .reloc = .{
|
||||
.atom_index = atom_index,
|
||||
.sym_index = try macho_file.getGlobalSymbol(callee, lib),
|
||||
} },
|
||||
});
|
||||
} else return self.fail("TODO implement calling extern functions", .{});
|
||||
}
|
||||
|
||||
fn genLazySymbolRef(
|
||||
self: *Self,
|
||||
comptime tag: Mir.Inst.Tag,
|
||||
|
||||
@ -6394,15 +6394,6 @@ pub const FuncGen = struct {
|
||||
const fn_ty = try mod.funcType(.{
|
||||
.param_types = &.{},
|
||||
.return_type = .void_type,
|
||||
.alignment = .none,
|
||||
.noalias_bits = 0,
|
||||
.comptime_bits = 0,
|
||||
.cc = .Unspecified,
|
||||
.is_var_args = false,
|
||||
.is_generic = false,
|
||||
.is_noinline = false,
|
||||
.section_is_generic = false,
|
||||
.addrspace_is_generic = false,
|
||||
});
|
||||
const fn_di_ty = try o.lowerDebugType(fn_ty, .full);
|
||||
const subprogram = dib.createFunction(
|
||||
|
||||
@ -2,8 +2,6 @@ const std = @import("std");
|
||||
const builtin = @import("builtin");
|
||||
const expect = std.testing.expect;
|
||||
const math = std.math;
|
||||
const pi = std.math.pi;
|
||||
const e = std.math.e;
|
||||
const has_f80_rt = switch (builtin.cpu.arch) {
|
||||
.x86_64, .x86 => true,
|
||||
else => false,
|
||||
@ -11,7 +9,7 @@ const has_f80_rt = switch (builtin.cpu.arch) {
|
||||
const no_x86_64_hardware_f16_support = builtin.zig_backend == .stage2_x86_64 and
|
||||
!std.Target.x86.featureSetHas(builtin.cpu.features, .f16c);
|
||||
|
||||
const epsilon_16 = 0.001;
|
||||
const epsilon_16 = 0.002;
|
||||
const epsilon = 0.000001;
|
||||
|
||||
fn epsForType(comptime T: type) T {
|
||||
@ -29,10 +27,10 @@ test "floating point comparisons" {
|
||||
}
|
||||
|
||||
fn testFloatComparisons() !void {
|
||||
inline for ([_]type{ f16, f32, f64, f128 }) |ty| {
|
||||
inline for ([_]type{ f16, f32, f64, f128 }) |T| {
|
||||
// No decimal part
|
||||
{
|
||||
const x: ty = 1.0;
|
||||
const x: T = 1.0;
|
||||
try expect(x == 1);
|
||||
try expect(x != 0);
|
||||
try expect(x > 0);
|
||||
@ -42,7 +40,7 @@ fn testFloatComparisons() !void {
|
||||
}
|
||||
// Non-zero decimal part
|
||||
{
|
||||
const x: ty = 1.5;
|
||||
const x: T = 1.5;
|
||||
try expect(x != 1);
|
||||
try expect(x != 2);
|
||||
try expect(x > 1);
|
||||
@ -54,11 +52,11 @@ fn testFloatComparisons() !void {
|
||||
}
|
||||
|
||||
test "different sized float comparisons" {
|
||||
if (no_x86_64_hardware_f16_support) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (no_x86_64_hardware_f16_support) return error.SkipZigTest;
|
||||
|
||||
try testDifferentSizedFloatComparisons();
|
||||
try comptime testDifferentSizedFloatComparisons();
|
||||
@ -73,9 +71,9 @@ fn testDifferentSizedFloatComparisons() !void {
|
||||
test "f80 comparisons" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_c and comptime builtin.cpu.arch.isArmOrThumb()) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_c and builtin.cpu.arch.isArmOrThumb()) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
|
||||
|
||||
try expect(compareF80(0.0, .eq, -0.0));
|
||||
try expect(compareF80(0.0, .lte, -0.0));
|
||||
@ -125,8 +123,8 @@ test "@sqrt" {
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
|
||||
try comptime testSqrt();
|
||||
try testSqrt();
|
||||
try comptime testSqrt();
|
||||
}
|
||||
|
||||
fn testSqrt() !void {
|
||||
@ -163,8 +161,8 @@ test "@sqrt with vectors" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
|
||||
try comptime testSqrtWithVectors();
|
||||
try testSqrtWithVectors();
|
||||
try comptime testSqrtWithVectors();
|
||||
}
|
||||
|
||||
fn testSqrtWithVectors() !void {
|
||||
@ -177,11 +175,11 @@ fn testSqrtWithVectors() !void {
|
||||
}
|
||||
|
||||
test "more @sqrt f16 tests" {
|
||||
if (no_x86_64_hardware_f16_support) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (no_x86_64_hardware_f16_support) return error.SkipZigTest;
|
||||
|
||||
// TODO these are not all passing at comptime
|
||||
try expect(@sqrt(@as(f16, 0.0)) == 0.0);
|
||||
@ -205,8 +203,8 @@ test "more @sqrt f16 tests" {
|
||||
test "another, possibly redundant @sqrt test" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (no_x86_64_hardware_f16_support) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (no_x86_64_hardware_f16_support) return error.SkipZigTest;
|
||||
|
||||
try testSqrtLegacy(f64, 12.0);
|
||||
try comptime testSqrtLegacy(f64, 12.0);
|
||||
@ -228,36 +226,61 @@ fn testSqrtLegacy(comptime T: type, x: T) !void {
|
||||
try expect(@sqrt(x * x) == x);
|
||||
}
|
||||
|
||||
test "@sin" {
|
||||
if (no_x86_64_hardware_f16_support) return error.SkipZigTest; // TODO
|
||||
test "@sin f16" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64 and builtin.target.ofmt != .elf) return error.SkipZigTest;
|
||||
if (no_x86_64_hardware_f16_support) return error.SkipZigTest;
|
||||
|
||||
try comptime testSin();
|
||||
try testSin();
|
||||
try testSin(&.{f16});
|
||||
try comptime testSin(&.{f16});
|
||||
}
|
||||
|
||||
fn testSin() !void {
|
||||
inline for ([_]type{ f16, f32, f64 }) |ty| {
|
||||
const eps = epsForType(ty);
|
||||
try expect(@sin(@as(ty, 0)) == 0);
|
||||
try expect(math.approxEqAbs(ty, @sin(@as(ty, std.math.pi)), 0, eps));
|
||||
try expect(math.approxEqAbs(ty, @sin(@as(ty, std.math.pi / 2.0)), 1, eps));
|
||||
try expect(math.approxEqAbs(ty, @sin(@as(ty, std.math.pi / 4.0)), 0.7071067811865475, eps));
|
||||
test "@sin f32/f64" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64 and builtin.target.ofmt != .elf) return error.SkipZigTest;
|
||||
|
||||
try testSin(&.{ f32, f64 });
|
||||
try comptime testSin(&.{ f32, f64 });
|
||||
}
|
||||
|
||||
test "@sin f80/f128/c_longdouble" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
|
||||
|
||||
try testSin(&.{ f80, f128, c_longdouble });
|
||||
try comptime testSin(&.{ f80, f128, c_longdouble });
|
||||
}
|
||||
|
||||
fn testSin(comptime Ts: []const type) !void {
|
||||
inline for (Ts) |T| {
|
||||
const eps = epsForType(T);
|
||||
var zero: T = 0;
|
||||
try expect(@sin(zero) == 0);
|
||||
var pi: T = std.math.pi;
|
||||
try expect(math.approxEqAbs(T, @sin(pi), 0, eps));
|
||||
try expect(math.approxEqAbs(T, @sin(pi / 2.0), 1, eps));
|
||||
try expect(math.approxEqAbs(T, @sin(pi / 4.0), 0.7071067811865475, eps));
|
||||
}
|
||||
}
|
||||
|
||||
test "@sin with vectors" {
|
||||
if (builtin.zig_backend == .stage2_wasm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
|
||||
|
||||
try comptime testSinWithVectors();
|
||||
try testSinWithVectors();
|
||||
try comptime testSinWithVectors();
|
||||
}
|
||||
|
||||
fn testSinWithVectors() !void {
|
||||
@ -269,36 +292,61 @@ fn testSinWithVectors() !void {
|
||||
try expect(math.approxEqAbs(f32, @sin(@as(f32, 4.4)), result[3], epsilon));
|
||||
}
|
||||
|
||||
test "@cos" {
|
||||
if (no_x86_64_hardware_f16_support) return error.SkipZigTest; // TODO
|
||||
test "@cos f16" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64 and builtin.target.ofmt != .elf) return error.SkipZigTest;
|
||||
if (no_x86_64_hardware_f16_support) return error.SkipZigTest;
|
||||
|
||||
try comptime testCos();
|
||||
try testCos();
|
||||
try testCos(&.{f16});
|
||||
try comptime testCos(&.{f16});
|
||||
}
|
||||
|
||||
fn testCos() !void {
|
||||
inline for ([_]type{ f16, f32, f64 }) |ty| {
|
||||
const eps = epsForType(ty);
|
||||
try expect(@cos(@as(ty, 0)) == 1);
|
||||
try expect(math.approxEqAbs(ty, @cos(@as(ty, std.math.pi)), -1, eps));
|
||||
try expect(math.approxEqAbs(ty, @cos(@as(ty, std.math.pi / 2.0)), 0, eps));
|
||||
try expect(math.approxEqAbs(ty, @cos(@as(ty, std.math.pi / 4.0)), 0.7071067811865475, eps));
|
||||
test "@cos f32/f64" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64 and builtin.target.ofmt != .elf) return error.SkipZigTest;
|
||||
|
||||
try testCos(&.{ f32, f64 });
|
||||
try comptime testCos(&.{ f32, f64 });
|
||||
}
|
||||
|
||||
test "@cos f80/f128/c_longdouble" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
|
||||
|
||||
try testCos(&.{ f80, f128, c_longdouble });
|
||||
try comptime testCos(&.{ f80, f128, c_longdouble });
|
||||
}
|
||||
|
||||
fn testCos(comptime Ts: []const type) !void {
|
||||
inline for (Ts) |T| {
|
||||
const eps = epsForType(T);
|
||||
var zero: T = 0;
|
||||
try expect(@cos(zero) == 1);
|
||||
var pi: T = std.math.pi;
|
||||
try expect(math.approxEqAbs(T, @cos(pi), -1, eps));
|
||||
try expect(math.approxEqAbs(T, @cos(pi / 2.0), 0, eps));
|
||||
try expect(math.approxEqAbs(T, @cos(pi / 4.0), 0.7071067811865475, eps));
|
||||
}
|
||||
}
|
||||
|
||||
test "@cos with vectors" {
|
||||
if (builtin.zig_backend == .stage2_wasm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
|
||||
|
||||
try comptime testCosWithVectors();
|
||||
try testCosWithVectors();
|
||||
try comptime testCosWithVectors();
|
||||
}
|
||||
|
||||
fn testCosWithVectors() !void {
|
||||
@ -310,35 +358,127 @@ fn testCosWithVectors() !void {
|
||||
try expect(math.approxEqAbs(f32, @cos(@as(f32, 4.4)), result[3], epsilon));
|
||||
}
|
||||
|
||||
test "@exp" {
|
||||
if (no_x86_64_hardware_f16_support) return error.SkipZigTest; // TODO
|
||||
test "@tan f16" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64 and builtin.target.ofmt != .elf) return error.SkipZigTest;
|
||||
if (no_x86_64_hardware_f16_support) return error.SkipZigTest;
|
||||
|
||||
try comptime testExp();
|
||||
try testExp();
|
||||
try testTan(&.{f16});
|
||||
try comptime testTan(&.{f16});
|
||||
}
|
||||
|
||||
fn testExp() !void {
|
||||
inline for ([_]type{ f16, f32, f64 }) |ty| {
|
||||
const eps = epsForType(ty);
|
||||
try expect(@exp(@as(ty, 0)) == 1);
|
||||
try expect(math.approxEqAbs(ty, @exp(@as(ty, 2)), 7.389056098930650, eps));
|
||||
try expect(math.approxEqAbs(ty, @exp(@as(ty, 5)), 148.4131591025766, eps));
|
||||
test "@tan f32/f64" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64 and builtin.target.ofmt != .elf) return error.SkipZigTest;
|
||||
|
||||
try testTan(&.{ f32, f64 });
|
||||
try comptime testTan(&.{ f32, f64 });
|
||||
}
|
||||
|
||||
test "@tan f80/f128/c_longdouble" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
|
||||
|
||||
try testTan(&.{ f80, f128, c_longdouble });
|
||||
try comptime testTan(&.{ f80, f128, c_longdouble });
|
||||
}
|
||||
|
||||
fn testTan(comptime Ts: []const type) !void {
|
||||
inline for (Ts) |T| {
|
||||
const eps = epsForType(T);
|
||||
var zero: T = 0;
|
||||
try expect(@tan(zero) == 0);
|
||||
var pi: T = std.math.pi;
|
||||
try expect(math.approxEqAbs(T, @tan(pi), 0, eps));
|
||||
try expect(math.approxEqAbs(T, @tan(pi / 3.0), 1.732050807568878, eps));
|
||||
try expect(math.approxEqAbs(T, @tan(pi / 4.0), 1, eps));
|
||||
}
|
||||
}
|
||||
|
||||
test "@tan with vectors" {
|
||||
if (builtin.zig_backend == .stage2_wasm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
|
||||
|
||||
try testTanWithVectors();
|
||||
try comptime testTanWithVectors();
|
||||
}
|
||||
|
||||
fn testTanWithVectors() !void {
|
||||
var v: @Vector(4, f32) = [_]f32{ 1.1, 2.2, 3.3, 4.4 };
|
||||
var result = @tan(v);
|
||||
try expect(math.approxEqAbs(f32, @tan(@as(f32, 1.1)), result[0], epsilon));
|
||||
try expect(math.approxEqAbs(f32, @tan(@as(f32, 2.2)), result[1], epsilon));
|
||||
try expect(math.approxEqAbs(f32, @tan(@as(f32, 3.3)), result[2], epsilon));
|
||||
try expect(math.approxEqAbs(f32, @tan(@as(f32, 4.4)), result[3], epsilon));
|
||||
}
|
||||
|
||||
test "@exp f16" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64 and builtin.target.ofmt != .elf) return error.SkipZigTest;
|
||||
if (no_x86_64_hardware_f16_support) return error.SkipZigTest;
|
||||
|
||||
try testExp(&.{f16});
|
||||
try comptime testExp(&.{f16});
|
||||
}
|
||||
|
||||
test "@exp f32/f64" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64 and builtin.target.ofmt != .elf) return error.SkipZigTest;
|
||||
|
||||
try testExp(&.{ f32, f64 });
|
||||
try comptime testExp(&.{ f32, f64 });
|
||||
}
|
||||
|
||||
test "@exp f80/f128/c_longdouble" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
|
||||
|
||||
try testExp(&.{ f80, f128, c_longdouble });
|
||||
try comptime testExp(&.{ f80, f128, c_longdouble });
|
||||
}
|
||||
|
||||
fn testExp(comptime Ts: []const type) !void {
|
||||
inline for (Ts) |T| {
|
||||
const eps = epsForType(T);
|
||||
var zero: T = 0;
|
||||
try expect(@exp(zero) == 1);
|
||||
var two: T = 2;
|
||||
try expect(math.approxEqAbs(T, @exp(two), 7.389056098930650, eps));
|
||||
var five: T = 5;
|
||||
try expect(math.approxEqAbs(T, @exp(five), 148.4131591025766, eps));
|
||||
}
|
||||
}
|
||||
|
||||
test "@exp with vectors" {
|
||||
if (builtin.zig_backend == .stage2_wasm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
|
||||
|
||||
try comptime testExpWithVectors();
|
||||
try testExpWithVectors();
|
||||
try comptime testExpWithVectors();
|
||||
}
|
||||
|
||||
fn testExpWithVectors() !void {
|
||||
@ -350,35 +490,61 @@ fn testExpWithVectors() !void {
|
||||
try expect(math.approxEqAbs(f32, @exp(@as(f32, 0.4)), result[3], epsilon));
|
||||
}
|
||||
|
||||
test "@exp2" {
|
||||
if (no_x86_64_hardware_f16_support) return error.SkipZigTest; // TODO
|
||||
test "@exp2 f16" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64 and builtin.target.ofmt != .elf) return error.SkipZigTest;
|
||||
if (no_x86_64_hardware_f16_support) return error.SkipZigTest;
|
||||
|
||||
try comptime testExp2();
|
||||
try testExp2();
|
||||
try testExp2(&.{f16});
|
||||
try comptime testExp2(&.{f16});
|
||||
}
|
||||
|
||||
fn testExp2() !void {
|
||||
inline for ([_]type{ f16, f32, f64 }) |ty| {
|
||||
const eps = epsForType(ty);
|
||||
try expect(@exp2(@as(ty, 2)) == 4);
|
||||
try expect(math.approxEqAbs(ty, @exp2(@as(ty, 1.5)), 2.8284271247462, eps));
|
||||
try expect(math.approxEqAbs(ty, @exp2(@as(ty, 4.5)), 22.627416997969, eps));
|
||||
test "@exp2 f32/f64" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64 and builtin.target.ofmt != .elf) return error.SkipZigTest;
|
||||
|
||||
try testExp2(&.{ f32, f64 });
|
||||
try comptime testExp2(&.{ f32, f64 });
|
||||
}
|
||||
|
||||
test "@exp2 f80/f128/c_longdouble" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
|
||||
|
||||
try testExp2(&.{ f80, f128, c_longdouble });
|
||||
try comptime testExp2(&.{ f80, f128, c_longdouble });
|
||||
}
|
||||
|
||||
fn testExp2(comptime Ts: []const type) !void {
|
||||
inline for (Ts) |T| {
|
||||
const eps = epsForType(T);
|
||||
var two: T = 2;
|
||||
try expect(@exp2(two) == 4);
|
||||
var one_point_five: T = 1.5;
|
||||
try expect(math.approxEqAbs(T, @exp2(one_point_five), 2.8284271247462, eps));
|
||||
var four_point_five: T = 4.5;
|
||||
try expect(math.approxEqAbs(T, @exp2(four_point_five), 22.627416997969, eps));
|
||||
}
|
||||
}
|
||||
|
||||
test "@exp2 with @vectors" {
|
||||
if (builtin.zig_backend == .stage2_wasm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
|
||||
|
||||
try comptime testExp2WithVectors();
|
||||
try testExp2WithVectors();
|
||||
try comptime testExp2WithVectors();
|
||||
}
|
||||
|
||||
fn testExp2WithVectors() !void {
|
||||
@ -390,44 +556,59 @@ fn testExp2WithVectors() !void {
|
||||
try expect(math.approxEqAbs(f32, @exp2(@as(f32, 0.4)), result[3], epsilon));
|
||||
}
|
||||
|
||||
test "@log" {
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
test "@log f16" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64 and builtin.target.ofmt != .elf) return error.SkipZigTest;
|
||||
if (no_x86_64_hardware_f16_support) return error.SkipZigTest;
|
||||
|
||||
try comptime testLog();
|
||||
try testLog();
|
||||
try testLog(&.{f16});
|
||||
try comptime testLog(&.{f16});
|
||||
}
|
||||
|
||||
fn testLog() !void {
|
||||
{
|
||||
var a: f16 = e;
|
||||
try expect(math.approxEqAbs(f16, @log(a), 1, epsilon));
|
||||
}
|
||||
{
|
||||
var a: f32 = e;
|
||||
try expect(@log(a) == 1 or @log(a) == @as(f32, @bitCast(@as(u32, 0x3f7fffff))));
|
||||
}
|
||||
{
|
||||
var a: f64 = e;
|
||||
try expect(@log(a) == 1 or @log(a) == @as(f64, @bitCast(@as(u64, 0x3ff0000000000000))));
|
||||
}
|
||||
inline for ([_]type{ f16, f32, f64 }) |ty| {
|
||||
const eps = epsForType(ty);
|
||||
try expect(math.approxEqAbs(ty, @log(@as(ty, 2)), 0.6931471805599, eps));
|
||||
try expect(math.approxEqAbs(ty, @log(@as(ty, 5)), 1.6094379124341, eps));
|
||||
test "@log f32/f64" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64 and builtin.target.ofmt != .elf) return error.SkipZigTest;
|
||||
|
||||
try testLog(&.{ f32, f64 });
|
||||
try comptime testLog(&.{ f32, f64 });
|
||||
}
|
||||
|
||||
test "@log f80/f128/c_longdouble" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
|
||||
|
||||
try testLog(&.{ f80, f128, c_longdouble });
|
||||
try comptime testLog(&.{ f80, f128, c_longdouble });
|
||||
}
|
||||
|
||||
fn testLog(comptime Ts: []const type) !void {
|
||||
inline for (Ts) |T| {
|
||||
const eps = epsForType(T);
|
||||
var e: T = std.math.e;
|
||||
try expect(math.approxEqAbs(T, @log(e), 1, eps));
|
||||
var two: T = 2;
|
||||
try expect(math.approxEqAbs(T, @log(two), 0.6931471805599, eps));
|
||||
var five: T = 5;
|
||||
try expect(math.approxEqAbs(T, @log(five), 1.6094379124341, eps));
|
||||
}
|
||||
}
|
||||
|
||||
test "@log with @vectors" {
|
||||
if (builtin.zig_backend == .stage2_wasm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
|
||||
|
||||
{
|
||||
var v: @Vector(4, f32) = [_]f32{ 1.1, 2.2, 0.3, 0.4 };
|
||||
@ -439,29 +620,54 @@ test "@log with @vectors" {
|
||||
}
|
||||
}
|
||||
|
||||
test "@log2" {
|
||||
if (no_x86_64_hardware_f16_support) return error.SkipZigTest; // TODO
|
||||
test "@log2 f16" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64 and builtin.target.ofmt != .elf) return error.SkipZigTest;
|
||||
if (no_x86_64_hardware_f16_support) return error.SkipZigTest;
|
||||
|
||||
try comptime testLog2();
|
||||
try testLog2();
|
||||
try testLog2(&.{f16});
|
||||
try comptime testLog2(&.{f16});
|
||||
}
|
||||
|
||||
fn testLog2() !void {
|
||||
inline for ([_]type{ f16, f32, f64 }) |ty| {
|
||||
const eps = epsForType(ty);
|
||||
try expect(@log2(@as(ty, 4)) == 2);
|
||||
try expect(math.approxEqAbs(ty, @log2(@as(ty, 6)), 2.5849625007212, eps));
|
||||
try expect(math.approxEqAbs(ty, @log2(@as(ty, 10)), 3.3219280948874, eps));
|
||||
test "@log2 f32/f64" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64 and builtin.target.ofmt != .elf) return error.SkipZigTest;
|
||||
|
||||
try testLog2(&.{ f32, f64 });
|
||||
try comptime testLog2(&.{ f32, f64 });
|
||||
}
|
||||
|
||||
test "@log2 f80/f128/c_longdouble" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
|
||||
|
||||
try testLog2(&.{ f80, f128, c_longdouble });
|
||||
try comptime testLog2(&.{ f80, f128, c_longdouble });
|
||||
}
|
||||
|
||||
fn testLog2(comptime Ts: []const type) !void {
|
||||
inline for (Ts) |T| {
|
||||
const eps = epsForType(T);
|
||||
var four: T = 4;
|
||||
try expect(@log2(four) == 2);
|
||||
var six: T = 6;
|
||||
try expect(math.approxEqAbs(T, @log2(six), 2.5849625007212, eps));
|
||||
var ten: T = 10;
|
||||
try expect(math.approxEqAbs(T, @log2(ten), 3.3219280948874, eps));
|
||||
}
|
||||
}
|
||||
|
||||
test "@log2 with vectors" {
|
||||
if (builtin.zig_backend == .stage2_wasm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
@ -469,9 +675,10 @@ test "@log2 with vectors" {
|
||||
if (builtin.zig_backend == .stage2_llvm and
|
||||
builtin.cpu.arch == .aarch64 and
|
||||
builtin.os.tag == .windows) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
|
||||
|
||||
try comptime testLog2WithVectors();
|
||||
try testLog2WithVectors();
|
||||
try comptime testLog2WithVectors();
|
||||
}
|
||||
|
||||
fn testLog2WithVectors() !void {
|
||||
@ -483,35 +690,61 @@ fn testLog2WithVectors() !void {
|
||||
try expect(@log2(@as(f32, 0.4)) == result[3]);
|
||||
}
|
||||
|
||||
test "@log10" {
|
||||
if (no_x86_64_hardware_f16_support) return error.SkipZigTest; // TODO
|
||||
test "@log10 f16" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64 and builtin.target.ofmt != .elf) return error.SkipZigTest;
|
||||
if (no_x86_64_hardware_f16_support) return error.SkipZigTest;
|
||||
|
||||
try comptime testLog10();
|
||||
try testLog10();
|
||||
try testLog10(&.{f16});
|
||||
try comptime testLog10(&.{f16});
|
||||
}
|
||||
|
||||
fn testLog10() !void {
|
||||
inline for ([_]type{ f16, f32, f64 }) |ty| {
|
||||
const eps = epsForType(ty);
|
||||
try expect(@log10(@as(ty, 100)) == 2);
|
||||
try expect(math.approxEqAbs(ty, @log10(@as(ty, 15)), 1.176091259056, eps));
|
||||
try expect(math.approxEqAbs(ty, @log10(@as(ty, 50)), 1.698970004336, eps));
|
||||
test "@log10 f32/f64" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64 and builtin.target.ofmt != .elf) return error.SkipZigTest;
|
||||
|
||||
try testLog10(&.{ f32, f64 });
|
||||
try comptime testLog10(&.{ f32, f64 });
|
||||
}
|
||||
|
||||
test "@log10 f80/f128/c_longdouble" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
|
||||
|
||||
try testLog10(&.{ f80, f128, c_longdouble });
|
||||
try comptime testLog10(&.{ f80, f128, c_longdouble });
|
||||
}
|
||||
|
||||
fn testLog10(comptime Ts: []const type) !void {
|
||||
inline for (Ts) |T| {
|
||||
const eps = epsForType(T);
|
||||
var hundred: T = 100;
|
||||
try expect(@log10(hundred) == 2);
|
||||
var fifteen: T = 15;
|
||||
try expect(math.approxEqAbs(T, @log10(fifteen), 1.176091259056, eps));
|
||||
var fifty: T = 50;
|
||||
try expect(math.approxEqAbs(T, @log10(fifty), 1.698970004336, eps));
|
||||
}
|
||||
}
|
||||
|
||||
test "@log10 with vectors" {
|
||||
if (builtin.zig_backend == .stage2_wasm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
|
||||
|
||||
try comptime testLog10WithVectors();
|
||||
try testLog10WithVectors();
|
||||
try comptime testLog10WithVectors();
|
||||
}
|
||||
|
||||
fn testLog10WithVectors() !void {
|
||||
@ -528,8 +761,8 @@ test "@abs" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
|
||||
try comptime testFabs();
|
||||
try testFabs();
|
||||
try comptime testFabs();
|
||||
}
|
||||
|
||||
fn testFabs() !void {
|
||||
@ -556,8 +789,8 @@ test "@abs with vectors" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
|
||||
try comptime testFabsWithVectors();
|
||||
try testFabsWithVectors();
|
||||
try comptime testFabsWithVectors();
|
||||
}
|
||||
|
||||
fn testFabsWithVectors() !void {
|
||||
@ -573,9 +806,9 @@ test "another, possibly redundant, @abs test" {
|
||||
if (builtin.zig_backend == .stage2_wasm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_c and comptime builtin.cpu.arch.isArmOrThumb()) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_c and builtin.cpu.arch.isArmOrThumb()) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
|
||||
|
||||
try testFabsLegacy(f128, 12.0);
|
||||
try comptime testFabsLegacy(f128, 12.0);
|
||||
@ -596,9 +829,9 @@ test "@abs f80" {
|
||||
if (builtin.zig_backend == .stage2_wasm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_c and comptime builtin.cpu.arch.isArmOrThumb()) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_c and builtin.cpu.arch.isArmOrThumb()) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
|
||||
|
||||
try testFabsLegacy(f80, 12.0);
|
||||
try comptime testFabsLegacy(f80, 12.0);
|
||||
@ -614,9 +847,9 @@ test "a third @abs test, surely there should not be three fabs tests" {
|
||||
if (builtin.zig_backend == .stage2_wasm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_c and comptime builtin.cpu.arch.isArmOrThumb()) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_c and builtin.cpu.arch.isArmOrThumb()) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
|
||||
|
||||
inline for ([_]type{ f16, f32, f64, f80, f128, c_longdouble }) |T| {
|
||||
// normals
|
||||
@ -645,8 +878,8 @@ test "@floor" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
|
||||
try comptime testFloor();
|
||||
try testFloor();
|
||||
try comptime testFloor();
|
||||
}
|
||||
|
||||
fn testFloor() !void {
|
||||
@ -664,14 +897,14 @@ fn testFloor() !void {
|
||||
|
||||
test "@floor with vectors" {
|
||||
if (builtin.zig_backend == .stage2_wasm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86_64 and
|
||||
!comptime std.Target.x86.featureSetHas(builtin.cpu.features, .sse4_1)) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64 and
|
||||
!comptime std.Target.x86.featureSetHas(builtin.cpu.features, .sse4_1)) return error.SkipZigTest;
|
||||
|
||||
try comptime testFloorWithVectors();
|
||||
try testFloorWithVectors();
|
||||
try comptime testFloorWithVectors();
|
||||
}
|
||||
|
||||
fn testFloorWithVectors() !void {
|
||||
@ -686,8 +919,8 @@ fn testFloorWithVectors() !void {
|
||||
test "another, possibly redundant, @floor test" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
|
||||
|
||||
try testFloorLegacy(f64, 12.0);
|
||||
try comptime testFloorLegacy(f64, 12.0);
|
||||
@ -705,9 +938,9 @@ test "another, possibly redundant, @floor test" {
|
||||
test "@floor f80" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_c and comptime builtin.cpu.arch.isArmOrThumb()) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_c and builtin.cpu.arch.isArmOrThumb()) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
|
||||
|
||||
if (builtin.zig_backend == .stage2_llvm and builtin.os.tag == .windows) {
|
||||
// https://github.com/ziglang/zig/issues/12602
|
||||
@ -721,9 +954,9 @@ test "@floor f80" {
|
||||
test "@floor f128" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_c and comptime builtin.cpu.arch.isArmOrThumb()) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_c and builtin.cpu.arch.isArmOrThumb()) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
|
||||
|
||||
try testFloorLegacy(f128, 12.0);
|
||||
try comptime testFloorLegacy(f128, 12.0);
|
||||
@ -740,8 +973,8 @@ test "@ceil" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
|
||||
try comptime testCeil();
|
||||
try testCeil();
|
||||
try comptime testCeil();
|
||||
}
|
||||
|
||||
fn testCeil() !void {
|
||||
@ -759,14 +992,14 @@ fn testCeil() !void {
|
||||
|
||||
test "@ceil with vectors" {
|
||||
if (builtin.zig_backend == .stage2_wasm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86_64 and
|
||||
!comptime std.Target.x86.featureSetHas(builtin.cpu.features, .sse4_1)) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64 and
|
||||
!comptime std.Target.x86.featureSetHas(builtin.cpu.features, .sse4_1)) return error.SkipZigTest;
|
||||
|
||||
try comptime testCeilWithVectors();
|
||||
try testCeilWithVectors();
|
||||
try comptime testCeilWithVectors();
|
||||
}
|
||||
|
||||
fn testCeilWithVectors() !void {
|
||||
@ -781,8 +1014,8 @@ fn testCeilWithVectors() !void {
|
||||
test "another, possibly redundant, @ceil test" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
|
||||
|
||||
try testCeilLegacy(f64, 12.0);
|
||||
try comptime testCeilLegacy(f64, 12.0);
|
||||
@ -800,9 +1033,9 @@ test "another, possibly redundant, @ceil test" {
|
||||
test "@ceil f80" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_c and comptime builtin.cpu.arch.isArmOrThumb()) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_c and builtin.cpu.arch.isArmOrThumb()) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
|
||||
|
||||
if (builtin.zig_backend == .stage2_llvm and builtin.os.tag == .windows) {
|
||||
// https://github.com/ziglang/zig/issues/12602
|
||||
@ -816,9 +1049,9 @@ test "@ceil f80" {
|
||||
test "@ceil f128" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_c and comptime builtin.cpu.arch.isArmOrThumb()) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_c and builtin.cpu.arch.isArmOrThumb()) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
|
||||
|
||||
try testCeilLegacy(f128, 12.0);
|
||||
try comptime testCeilLegacy(f128, 12.0);
|
||||
@ -835,8 +1068,8 @@ test "@trunc" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
|
||||
try comptime testTrunc();
|
||||
try testTrunc();
|
||||
try comptime testTrunc();
|
||||
}
|
||||
|
||||
fn testTrunc() !void {
|
||||
@ -854,14 +1087,14 @@ fn testTrunc() !void {
|
||||
|
||||
test "@trunc with vectors" {
|
||||
if (builtin.zig_backend == .stage2_wasm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86_64 and
|
||||
!comptime std.Target.x86.featureSetHas(builtin.cpu.features, .sse4_1)) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64 and
|
||||
!comptime std.Target.x86.featureSetHas(builtin.cpu.features, .sse4_1)) return error.SkipZigTest;
|
||||
|
||||
try comptime testTruncWithVectors();
|
||||
try testTruncWithVectors();
|
||||
try comptime testTruncWithVectors();
|
||||
}
|
||||
|
||||
fn testTruncWithVectors() !void {
|
||||
@ -876,8 +1109,8 @@ fn testTruncWithVectors() !void {
|
||||
test "another, possibly redundant, @trunc test" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
|
||||
|
||||
if (builtin.zig_backend == .stage2_llvm and builtin.cpu.arch.isMIPS()) {
|
||||
// https://github.com/ziglang/zig/issues/16846
|
||||
@ -900,9 +1133,9 @@ test "another, possibly redundant, @trunc test" {
|
||||
test "@trunc f80" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_c and comptime builtin.cpu.arch.isArmOrThumb()) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_c and builtin.cpu.arch.isArmOrThumb()) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
|
||||
|
||||
if (builtin.zig_backend == .stage2_llvm and builtin.os.tag == .windows) {
|
||||
// https://github.com/ziglang/zig/issues/12602
|
||||
@ -922,9 +1155,9 @@ test "@trunc f80" {
|
||||
test "@trunc f128" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_c and comptime builtin.cpu.arch.isArmOrThumb()) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_c and builtin.cpu.arch.isArmOrThumb()) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
|
||||
|
||||
try testTruncLegacy(f128, 12.0);
|
||||
try comptime testTruncLegacy(f128, 12.0);
|
||||
@ -945,11 +1178,11 @@ fn testTruncLegacy(comptime T: type, x: T) !void {
|
||||
}
|
||||
|
||||
test "negation f16" {
|
||||
if (no_x86_64_hardware_f16_support) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (no_x86_64_hardware_f16_support) return error.SkipZigTest;
|
||||
|
||||
if (builtin.os.tag == .freebsd) {
|
||||
// TODO file issue to track this failure
|
||||
@ -1011,11 +1244,11 @@ test "negation f64" {
|
||||
}
|
||||
|
||||
test "negation f80" {
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
|
||||
|
||||
const S = struct {
|
||||
fn doTheTest() !void {
|
||||
@ -1032,11 +1265,11 @@ test "negation f80" {
|
||||
}
|
||||
|
||||
test "negation f128" {
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
|
||||
|
||||
const S = struct {
|
||||
fn doTheTest() !void {
|
||||
@ -1075,11 +1308,11 @@ test "f128 at compile time is lossy" {
|
||||
}
|
||||
|
||||
test "comptime fixed-width float zero divided by zero produces NaN" {
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
|
||||
|
||||
inline for (.{ f16, f32, f64, f80, f128 }) |F| {
|
||||
try expect(math.isNan(@as(F, 0) / @as(F, 0)));
|
||||
@ -1182,11 +1415,11 @@ test "nan negation f128" {
|
||||
}
|
||||
|
||||
test "nan negation f80" {
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
|
||||
|
||||
const nan_comptime = comptime math.nan(f80);
|
||||
const neg_nan_comptime = -nan_comptime;
|
||||
|
||||
@ -7,6 +7,8 @@ const maxInt = std.math.maxInt;
|
||||
const minInt = std.math.minInt;
|
||||
const mem = std.mem;
|
||||
const math = std.math;
|
||||
const no_x86_64_hardware_f16_support = builtin.zig_backend == .stage2_x86_64 and
|
||||
!std.Target.x86.featureSetHas(builtin.cpu.features, .f16c);
|
||||
|
||||
test "assignment operators" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
@ -1437,19 +1439,29 @@ fn fmodOne(comptime T: type, a: T, b: T, c: T, epsilon: T) !void {
|
||||
try expect(@abs(@mod(@as(T, a), @as(T, b)) - @as(T, c)) < epsilon);
|
||||
}
|
||||
|
||||
test "@round" {
|
||||
test "@round f16" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64 and builtin.target.ofmt != .elf) return error.SkipZigTest;
|
||||
if (no_x86_64_hardware_f16_support) return error.SkipZigTest; // TODO
|
||||
|
||||
try testRound(f16, 12.0);
|
||||
try comptime testRound(f16, 12.0);
|
||||
}
|
||||
|
||||
test "@round f32/f64" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64 and builtin.target.ofmt != .elf) return error.SkipZigTest;
|
||||
|
||||
try testRound(f64, 12.0);
|
||||
try comptime testRound(f64, 12.0);
|
||||
try testRound(f32, 12.0);
|
||||
try comptime testRound(f32, 12.0);
|
||||
try testRound(f16, 12.0);
|
||||
try comptime testRound(f16, 12.0);
|
||||
|
||||
const x = 14.0;
|
||||
const y = x + 0.4;
|
||||
@ -1464,6 +1476,7 @@ test "@round f80" {
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_c and comptime builtin.cpu.arch.isArmOrThumb()) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64 and builtin.target.ofmt != .elf) return error.SkipZigTest;
|
||||
|
||||
try testRound(f80, 12.0);
|
||||
try comptime testRound(f80, 12.0);
|
||||
@ -1476,6 +1489,7 @@ test "@round f128" {
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_c and comptime builtin.cpu.arch.isArmOrThumb()) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64 and builtin.target.ofmt != .elf) return error.SkipZigTest;
|
||||
|
||||
try testRound(f128, 12.0);
|
||||
try comptime testRound(f128, 12.0);
|
||||
@ -1608,11 +1622,6 @@ test "signed zeros are represented properly" {
|
||||
try comptime S.doTheTest();
|
||||
}
|
||||
|
||||
test "comptime sin and ln" {
|
||||
const v = comptime (@sin(@as(f32, 1)) + @log(@as(f32, 5)));
|
||||
try expect(v == @sin(@as(f32, 1)) + @log(@as(f32, 5)));
|
||||
}
|
||||
|
||||
test "absFloat" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user