x86_64: rewrite scalar @addWithOverflow

This commit is contained in:
Jacob Young 2025-03-25 07:45:52 -04:00
parent 426684b2f8
commit 80068b6e59
4 changed files with 998 additions and 20 deletions

File diff suppressed because it is too large Load Diff

View File

@ -266,7 +266,8 @@ pub fn classifySystemV(ty: Type, zcu: *Zcu, target: *const std.Target, ctx: Cont
// separately.".
const ty_size = ty.abiSize(zcu);
switch (ty.containerLayout(zcu)) {
.auto, .@"extern" => {},
.auto => unreachable,
.@"extern" => {},
.@"packed" => {
assert(ty_size <= 16);
result[0] = .integer;
@ -345,7 +346,8 @@ fn classifySystemVStruct(
);
if (zcu.typeToStruct(field_ty)) |field_loaded_struct| {
switch (field_loaded_struct.layout) {
.auto, .@"extern" => {
.auto => unreachable,
.@"extern" => {
byte_offset = classifySystemVStruct(result, byte_offset, field_loaded_struct, zcu, target);
continue;
},
@ -353,7 +355,8 @@ fn classifySystemVStruct(
}
} else if (zcu.typeToUnion(field_ty)) |field_loaded_union| {
switch (field_loaded_union.flagsUnordered(ip).layout) {
.auto, .@"extern" => {
.auto => unreachable,
.@"extern" => {
byte_offset = classifySystemVUnion(result, byte_offset, field_loaded_union, zcu, target);
continue;
},
@ -386,7 +389,8 @@ fn classifySystemVUnion(
const field_ty = Type.fromInterned(loaded_union.field_types.get(ip)[field_index]);
if (zcu.typeToStruct(field_ty)) |field_loaded_struct| {
switch (field_loaded_struct.layout) {
.auto, .@"extern" => {
.auto => unreachable,
.@"extern" => {
_ = classifySystemVStruct(result, starting_byte_offset, field_loaded_struct, zcu, target);
continue;
},
@ -394,7 +398,8 @@ fn classifySystemVUnion(
}
} else if (zcu.typeToUnion(field_ty)) |field_loaded_union| {
switch (field_loaded_union.flagsUnordered(ip).layout) {
.auto, .@"extern" => {
.auto => unreachable,
.@"extern" => {
_ = classifySystemVUnion(result, starting_byte_offset, field_loaded_union, zcu, target);
continue;
},

View File

@ -5262,6 +5262,17 @@ test addUnsafe {
try test_add_unsafe.testFloatVectors();
}
inline fn addSafe(comptime Type: type, lhs: Type, rhs: Type) AddOneBit(Type) {
@setRuntimeSafety(true);
return @as(AddOneBit(Type), lhs) + rhs;
}
test addSafe {
const test_add_safe = binary(addSafe, .{});
try test_add_safe.testInts();
try test_add_safe.testFloats();
try test_add_safe.testFloatVectors();
}
inline fn addWrap(comptime Type: type, lhs: Type, rhs: Type) Type {
return lhs +% rhs;
}
@ -5416,6 +5427,14 @@ test min {
try test_min.testFloatVectors();
}
inline fn addWithOverflow(comptime Type: type, lhs: Type, rhs: Type) struct { Type, u1 } {
return @addWithOverflow(lhs, rhs);
}
test addWithOverflow {
const test_add_with_overflow = binary(addWithOverflow, .{});
try test_add_with_overflow.testInts();
}
inline fn equal(comptime Type: type, lhs: Type, rhs: Type) @TypeOf(lhs == rhs) {
return lhs == rhs;
}
@ -5519,15 +5538,16 @@ test shl {
try test_shl.testInts();
}
inline fn shlExact(comptime Type: type, lhs: Type, rhs: Type) Type {
inline fn shlExactUnsafe(comptime Type: type, lhs: Type, rhs: Type) Type {
@setRuntimeSafety(false);
const bit_cast_rhs: @Type(.{ .int = .{ .signedness = .unsigned, .bits = @bitSizeOf(Type) } }) = @bitCast(rhs);
const truncate_rhs: Log2Int(Type) = @truncate(bit_cast_rhs);
const final_rhs = if (comptime cast(Log2Int(Type), @bitSizeOf(Type))) |bits| truncate_rhs % bits else truncate_rhs;
return @shlExact(lhs << final_rhs >> final_rhs, final_rhs);
}
test shlExact {
const test_shl_exact = binary(shlExact, .{});
try test_shl_exact.testInts();
test shlExactUnsafe {
const test_shl_exact_unsafe = binary(shlExactUnsafe, .{});
try test_shl_exact_unsafe.testInts();
}
inline fn bitXor(comptime Type: type, lhs: Type, rhs: Type) @TypeOf(lhs ^ rhs) {

View File

@ -153,6 +153,9 @@ pub noinline fn checkExpected(expected: anytype, actual: @TypeOf(expected), comp
);
},
},
.@"struct" => |@"struct"| inline for (@"struct".fields) |field| {
try checkExpected(@field(expected, field.name), @field(actual, field.name), compare);
} else return,
};
if (switch (@typeInfo(Expected)) {
else => unexpected,