mirror of
https://github.com/ziglang/zig.git
synced 2025-12-06 06:13:07 +00:00
Merge pull request #18067 from ziglang/use-BoundedArray-less
std: use BoundedArray less
This commit is contained in:
commit
e4977f3e89
35
deps/aro/aro/Compilation.zig
vendored
35
deps/aro/aro/Compilation.zig
vendored
@ -582,10 +582,9 @@ fn generateFloatMacros(w: anytype, prefix: []const u8, semantics: target_util.FP
|
||||
},
|
||||
);
|
||||
|
||||
var defPrefix = std.BoundedArray(u8, 32).init(0) catch unreachable;
|
||||
defPrefix.writer().print("__{s}_", .{prefix}) catch return error.OutOfMemory;
|
||||
|
||||
const prefix_slice = defPrefix.constSlice();
|
||||
var def_prefix_buf: [32]u8 = undefined;
|
||||
const prefix_slice = std.fmt.bufPrint(&def_prefix_buf, "__{s}_", .{prefix}) catch
|
||||
return error.OutOfMemory;
|
||||
|
||||
try w.print("#define {s}DENORM_MIN__ {s}{s}\n", .{ prefix_slice, denormMin, ext });
|
||||
try w.print("#define {s}HAS_DENORM__\n", .{prefix_slice});
|
||||
@ -770,18 +769,18 @@ fn generateExactWidthType(comp: *const Compilation, w: anytype, mapper: StrInt.T
|
||||
ty = if (unsigned) comp.types.int64.makeIntegerUnsigned() else comp.types.int64;
|
||||
}
|
||||
|
||||
var prefix = std.BoundedArray(u8, 16).init(0) catch unreachable;
|
||||
prefix.writer().print("{s}{d}", .{ if (unsigned) "__UINT" else "__INT", width }) catch return error.OutOfMemory;
|
||||
var buffer: [16]u8 = undefined;
|
||||
const suffix = "_TYPE__";
|
||||
const full = std.fmt.bufPrint(&buffer, "{s}{d}{s}", .{
|
||||
if (unsigned) "__UINT" else "__INT", width, suffix,
|
||||
}) catch return error.OutOfMemory;
|
||||
|
||||
{
|
||||
const len = prefix.len;
|
||||
defer prefix.resize(len) catch unreachable; // restoring previous size
|
||||
prefix.appendSliceAssumeCapacity("_TYPE__");
|
||||
try generateTypeMacro(w, mapper, prefix.constSlice(), ty, comp.langopts);
|
||||
}
|
||||
try generateTypeMacro(w, mapper, full, ty, comp.langopts);
|
||||
|
||||
try comp.generateFmt(prefix.constSlice(), w, ty);
|
||||
try comp.generateSuffixMacro(prefix.constSlice(), w, ty);
|
||||
const prefix = full[0 .. full.len - suffix.len]; // remove "_TYPE__"
|
||||
|
||||
try comp.generateFmt(prefix, w, ty);
|
||||
try comp.generateSuffixMacro(prefix, w, ty);
|
||||
}
|
||||
|
||||
pub fn hasFloat128(comp: *const Compilation) bool {
|
||||
@ -908,10 +907,12 @@ fn generateExactWidthIntMax(comp: *const Compilation, w: anytype, specifier: Typ
|
||||
ty = if (unsigned) comp.types.int64.makeIntegerUnsigned() else comp.types.int64;
|
||||
}
|
||||
|
||||
var name = std.BoundedArray(u8, 6).init(0) catch unreachable;
|
||||
name.writer().print("{s}{d}", .{ if (unsigned) "UINT" else "INT", bit_count }) catch return error.OutOfMemory;
|
||||
var name_buffer: [6]u8 = undefined;
|
||||
const name = std.fmt.bufPrint(&name_buffer, "{s}{d}", .{
|
||||
if (unsigned) "UINT" else "INT", bit_count,
|
||||
}) catch return error.OutOfMemory;
|
||||
|
||||
return comp.generateIntMax(w, name.constSlice(), ty);
|
||||
return comp.generateIntMax(w, name, ty);
|
||||
}
|
||||
|
||||
fn generateIntWidth(comp: *Compilation, w: anytype, name: []const u8, ty: Type) !void {
|
||||
|
||||
41
deps/aro/aro/Driver/GCCDetector.zig
vendored
41
deps/aro/aro/Driver/GCCDetector.zig
vendored
@ -29,7 +29,7 @@ pub fn appendToolPath(self: *const GCCDetector, tc: *Toolchain) !void {
|
||||
}, .program);
|
||||
}
|
||||
|
||||
fn addDefaultGCCPrefixes(prefixes: *PathPrefixes, tc: *const Toolchain) !void {
|
||||
fn addDefaultGCCPrefixes(prefixes: *std.ArrayListUnmanaged([]const u8), tc: *const Toolchain) !void {
|
||||
const sysroot = tc.getSysroot();
|
||||
const target = tc.getTarget();
|
||||
if (sysroot.len == 0 and target.os.tag == .linux and tc.filesystem.exists("/opt/rh")) {
|
||||
@ -57,14 +57,12 @@ fn addDefaultGCCPrefixes(prefixes: *PathPrefixes, tc: *const Toolchain) !void {
|
||||
}
|
||||
}
|
||||
|
||||
const PathPrefixes = std.BoundedArray([]const u8, 16);
|
||||
|
||||
fn collectLibDirsAndTriples(
|
||||
tc: *Toolchain,
|
||||
lib_dirs: *PathPrefixes,
|
||||
triple_aliases: *PathPrefixes,
|
||||
biarch_libdirs: *PathPrefixes,
|
||||
biarch_triple_aliases: *PathPrefixes,
|
||||
lib_dirs: *std.ArrayListUnmanaged([]const u8),
|
||||
triple_aliases: *std.ArrayListUnmanaged([]const u8),
|
||||
biarch_libdirs: *std.ArrayListUnmanaged([]const u8),
|
||||
biarch_triple_aliases: *std.ArrayListUnmanaged([]const u8),
|
||||
) !void {
|
||||
const AArch64LibDirs: [2][]const u8 = .{ "/lib64", "/lib" };
|
||||
const AArch64Triples: [4][]const u8 = .{ "aarch64-none-linux-gnu", "aarch64-linux-gnu", "aarch64-redhat-linux", "aarch64-suse-linux" };
|
||||
@ -408,10 +406,18 @@ pub fn discover(self: *GCCDetector, tc: *Toolchain) !void {
|
||||
else
|
||||
target_util.get32BitArchVariant(target);
|
||||
|
||||
var candidate_lib_dirs: PathPrefixes = .{};
|
||||
var candidate_triple_aliases: PathPrefixes = .{};
|
||||
var candidate_biarch_lib_dirs: PathPrefixes = .{};
|
||||
var candidate_biarch_triple_aliases: PathPrefixes = .{};
|
||||
var candidate_lib_dirs_buffer: [16][]const u8 = undefined;
|
||||
var candidate_lib_dirs = std.ArrayListUnmanaged([]const u8).initBuffer(&candidate_lib_dirs_buffer);
|
||||
|
||||
var candidate_triple_aliases_buffer: [16][]const u8 = undefined;
|
||||
var candidate_triple_aliases = std.ArrayListUnmanaged([]const u8).initBuffer(&candidate_triple_aliases_buffer);
|
||||
|
||||
var candidate_biarch_lib_dirs_buffer: [16][]const u8 = undefined;
|
||||
var candidate_biarch_lib_dirs = std.ArrayListUnmanaged([]const u8).initBuffer(&candidate_biarch_lib_dirs_buffer);
|
||||
|
||||
var candidate_biarch_triple_aliases_buffer: [16][]const u8 = undefined;
|
||||
var candidate_biarch_triple_aliases = std.ArrayListUnmanaged([]const u8).initBuffer(&candidate_biarch_triple_aliases_buffer);
|
||||
|
||||
try collectLibDirsAndTriples(
|
||||
tc,
|
||||
&candidate_lib_dirs,
|
||||
@ -433,7 +439,8 @@ pub fn discover(self: *GCCDetector, tc: *Toolchain) !void {
|
||||
}
|
||||
}
|
||||
|
||||
var prefixes: PathPrefixes = .{};
|
||||
var prefixes_buf: [16][]const u8 = undefined;
|
||||
var prefixes = std.ArrayListUnmanaged([]const u8).initBuffer(&prefixes_buf);
|
||||
const gcc_toolchain_dir = gccToolchainDir(tc);
|
||||
if (gcc_toolchain_dir.len != 0) {
|
||||
const adjusted = if (gcc_toolchain_dir[gcc_toolchain_dir.len - 1] == '/')
|
||||
@ -455,10 +462,10 @@ pub fn discover(self: *GCCDetector, tc: *Toolchain) !void {
|
||||
}
|
||||
|
||||
const v0 = GCCVersion.parse("0.0.0");
|
||||
for (prefixes.constSlice()) |prefix| {
|
||||
for (prefixes.items) |prefix| {
|
||||
if (!tc.filesystem.exists(prefix)) continue;
|
||||
|
||||
for (candidate_lib_dirs.constSlice()) |suffix| {
|
||||
for (candidate_lib_dirs.items) |suffix| {
|
||||
defer fib.reset();
|
||||
const lib_dir = std.fs.path.join(fib.allocator(), &.{ prefix, suffix }) catch continue;
|
||||
if (!tc.filesystem.exists(lib_dir)) continue;
|
||||
@ -467,17 +474,17 @@ pub fn discover(self: *GCCDetector, tc: *Toolchain) !void {
|
||||
const gcc_cross_dir_exists = tc.filesystem.joinedExists(&.{ lib_dir, "/gcc-cross" });
|
||||
|
||||
try self.scanLibDirForGCCTriple(tc, target, lib_dir, triple_str, false, gcc_dir_exists, gcc_cross_dir_exists);
|
||||
for (candidate_triple_aliases.constSlice()) |candidate| {
|
||||
for (candidate_triple_aliases.items) |candidate| {
|
||||
try self.scanLibDirForGCCTriple(tc, target, lib_dir, candidate, false, gcc_dir_exists, gcc_cross_dir_exists);
|
||||
}
|
||||
}
|
||||
for (candidate_biarch_lib_dirs.constSlice()) |suffix| {
|
||||
for (candidate_biarch_lib_dirs.items) |suffix| {
|
||||
const lib_dir = std.fs.path.join(fib.allocator(), &.{ prefix, suffix }) catch continue;
|
||||
if (!tc.filesystem.exists(lib_dir)) continue;
|
||||
|
||||
const gcc_dir_exists = tc.filesystem.joinedExists(&.{ lib_dir, "/gcc" });
|
||||
const gcc_cross_dir_exists = tc.filesystem.joinedExists(&.{ lib_dir, "/gcc-cross" });
|
||||
for (candidate_biarch_triple_aliases.constSlice()) |candidate| {
|
||||
for (candidate_biarch_triple_aliases.items) |candidate| {
|
||||
try self.scanLibDirForGCCTriple(tc, target, lib_dir, candidate, true, gcc_dir_exists, gcc_cross_dir_exists);
|
||||
}
|
||||
}
|
||||
|
||||
4
deps/aro/aro/Parser.zig
vendored
4
deps/aro/aro/Parser.zig
vendored
@ -7796,7 +7796,7 @@ fn stringLiteral(p: *Parser) Error!Result {
|
||||
}
|
||||
},
|
||||
};
|
||||
for (char_literal_parser.errors.constSlice()) |item| {
|
||||
for (char_literal_parser.errors()) |item| {
|
||||
try p.errExtra(item.tag, p.tok_i, item.extra);
|
||||
}
|
||||
}
|
||||
@ -7911,7 +7911,7 @@ fn charLiteral(p: *Parser) Error!Result {
|
||||
char_literal_parser.err(.char_lit_too_wide, .{ .none = {} });
|
||||
}
|
||||
|
||||
for (char_literal_parser.errors.constSlice()) |item| {
|
||||
for (char_literal_parser.errors()) |item| {
|
||||
try p.errExtra(item.tag, p.tok_i, item.extra);
|
||||
}
|
||||
}
|
||||
|
||||
24
deps/aro/aro/text_literal.zig
vendored
24
deps/aro/aro/text_literal.zig
vendored
@ -157,7 +157,8 @@ pub const Parser = struct {
|
||||
max_codepoint: u21,
|
||||
/// We only want to issue a max of 1 error per char literal
|
||||
errored: bool = false,
|
||||
errors: std.BoundedArray(CharDiagnostic, 4) = .{},
|
||||
errors_buffer: [4]CharDiagnostic,
|
||||
errors_len: usize,
|
||||
comp: *const Compilation,
|
||||
|
||||
pub fn init(literal: []const u8, kind: Kind, max_codepoint: u21, comp: *const Compilation) Parser {
|
||||
@ -166,6 +167,8 @@ pub const Parser = struct {
|
||||
.comp = comp,
|
||||
.kind = kind,
|
||||
.max_codepoint = max_codepoint,
|
||||
.errors_buffer = undefined,
|
||||
.errors_len = 0,
|
||||
};
|
||||
}
|
||||
|
||||
@ -178,19 +181,28 @@ pub const Parser = struct {
|
||||
};
|
||||
}
|
||||
|
||||
pub fn errors(p: *Parser) []CharDiagnostic {
|
||||
return p.errors_buffer[0..p.errors_len];
|
||||
}
|
||||
|
||||
pub fn err(self: *Parser, tag: Diagnostics.Tag, extra: Diagnostics.Message.Extra) void {
|
||||
if (self.errored) return;
|
||||
self.errored = true;
|
||||
const diagnostic = .{ .tag = tag, .extra = extra };
|
||||
self.errors.append(diagnostic) catch {
|
||||
_ = self.errors.pop();
|
||||
self.errors.append(diagnostic) catch unreachable;
|
||||
};
|
||||
if (self.errors_len == self.errors_buffer.len) {
|
||||
self.errors_buffer[self.errors_buffer.len - 1] = diagnostic;
|
||||
} else {
|
||||
self.errors_buffer[self.errors_len] = diagnostic;
|
||||
self.errors_len += 1;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn warn(self: *Parser, tag: Diagnostics.Tag, extra: Diagnostics.Message.Extra) void {
|
||||
if (self.errored) return;
|
||||
self.errors.append(.{ .tag = tag, .extra = extra }) catch {};
|
||||
if (self.errors_len < self.errors_buffer.len) {
|
||||
self.errors_buffer[self.errors_len] = .{ .tag = tag, .extra = extra };
|
||||
self.errors_len += 1;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn next(self: *Parser) ?Item {
|
||||
|
||||
@ -633,6 +633,17 @@ pub fn ArrayListAlignedUnmanaged(comptime T: type, comptime alignment: ?u29) typ
|
||||
return self;
|
||||
}
|
||||
|
||||
/// Initialize with externally-managed memory. The buffer determines the
|
||||
/// capacity, and the length is set to zero.
|
||||
/// When initialized this way, all methods that accept an Allocator
|
||||
/// argument are illegal to call.
|
||||
pub fn initBuffer(buffer: Slice) Self {
|
||||
return .{
|
||||
.items = buffer[0..0],
|
||||
.capacity = buffer.len,
|
||||
};
|
||||
}
|
||||
|
||||
/// Release all allocated memory.
|
||||
pub fn deinit(self: *Self, allocator: Allocator) void {
|
||||
allocator.free(self.allocatedSlice());
|
||||
|
||||
@ -12,7 +12,6 @@ const math = std.math;
|
||||
const mem = std.mem;
|
||||
const meta = std.meta;
|
||||
const testing = std.testing;
|
||||
const BoundedArray = std.BoundedArray;
|
||||
const assert = std.debug.assert;
|
||||
const Endian = std.builtin.Endian;
|
||||
|
||||
@ -63,46 +62,54 @@ pub fn Uint(comptime max_bits: comptime_int) type {
|
||||
|
||||
return struct {
|
||||
const Self = @This();
|
||||
|
||||
const max_limbs_count = math.divCeil(usize, max_bits, t_bits) catch unreachable;
|
||||
const Limbs = BoundedArray(Limb, max_limbs_count);
|
||||
limbs: Limbs,
|
||||
|
||||
limbs_buffer: [max_limbs_count]Limb,
|
||||
/// The number of active limbs.
|
||||
limbs_len: usize,
|
||||
|
||||
/// Number of bytes required to serialize an integer.
|
||||
pub const encoded_bytes = math.divCeil(usize, max_bits, 8) catch unreachable;
|
||||
|
||||
// Returns the number of active limbs.
|
||||
fn limbs_count(self: Self) usize {
|
||||
return self.limbs.len;
|
||||
/// Constant slice of active limbs.
|
||||
fn limbsConst(self: *const Self) []const Limb {
|
||||
return self.limbs_buffer[0..self.limbs_len];
|
||||
}
|
||||
|
||||
/// Mutable slice of active limbs.
|
||||
fn limbs(self: *Self) []Limb {
|
||||
return self.limbs_buffer[0..self.limbs_len];
|
||||
}
|
||||
|
||||
// Removes limbs whose value is zero from the active limbs.
|
||||
fn normalize(self: Self) Self {
|
||||
var res = self;
|
||||
if (self.limbs_count() < 2) {
|
||||
if (self.limbs_len < 2) {
|
||||
return res;
|
||||
}
|
||||
var i = self.limbs_count() - 1;
|
||||
while (i > 0 and res.limbs.get(i) == 0) : (i -= 1) {}
|
||||
res.limbs.resize(i + 1) catch unreachable;
|
||||
var i = self.limbs_len - 1;
|
||||
while (i > 0 and res.limbsConst()[i] == 0) : (i -= 1) {}
|
||||
res.limbs_len = i + 1;
|
||||
assert(res.limbs_len <= res.limbs_buffer.len);
|
||||
return res;
|
||||
}
|
||||
|
||||
/// The zero integer.
|
||||
pub const zero = zero: {
|
||||
var limbs = Limbs.init(0) catch unreachable;
|
||||
limbs.appendNTimesAssumeCapacity(0, max_limbs_count);
|
||||
break :zero Self{ .limbs = limbs };
|
||||
pub const zero: Self = .{
|
||||
.limbs_buffer = [1]Limb{0} ** max_limbs_count,
|
||||
.limbs_len = max_limbs_count,
|
||||
};
|
||||
|
||||
/// Creates a new big integer from a primitive type.
|
||||
/// This function may not run in constant time.
|
||||
pub fn fromPrimitive(comptime T: type, x_: T) OverflowError!Self {
|
||||
var x = x_;
|
||||
var out = Self.zero;
|
||||
for (0..out.limbs.capacity()) |i| {
|
||||
const t = if (@bitSizeOf(T) > t_bits) @as(TLimb, @truncate(x)) else x;
|
||||
out.limbs.set(i, t);
|
||||
pub fn fromPrimitive(comptime T: type, init_value: T) OverflowError!Self {
|
||||
var x = init_value;
|
||||
var out: Self = .{
|
||||
.limbs_buffer = undefined,
|
||||
.limbs_len = max_limbs_count,
|
||||
};
|
||||
for (&out.limbs_buffer) |*limb| {
|
||||
limb.* = if (@bitSizeOf(T) > t_bits) @as(TLimb, @truncate(x)) else x;
|
||||
x = math.shr(T, x, t_bits);
|
||||
}
|
||||
if (x != 0) {
|
||||
@ -115,13 +122,13 @@ pub fn Uint(comptime max_bits: comptime_int) type {
|
||||
/// This function may not run in constant time.
|
||||
pub fn toPrimitive(self: Self, comptime T: type) OverflowError!T {
|
||||
var x: T = 0;
|
||||
var i = self.limbs_count() - 1;
|
||||
var i = self.limbs_len - 1;
|
||||
while (true) : (i -= 1) {
|
||||
if (@bitSizeOf(T) >= t_bits and math.shr(T, x, @bitSizeOf(T) - t_bits) != 0) {
|
||||
return error.Overflow;
|
||||
}
|
||||
x = math.shl(T, x, t_bits);
|
||||
const v = math.cast(T, self.limbs.get(i)) orelse return error.Overflow;
|
||||
const v = math.cast(T, self.limbsConst()[i]) orelse return error.Overflow;
|
||||
x |= v;
|
||||
if (i == 0) break;
|
||||
}
|
||||
@ -140,9 +147,9 @@ pub fn Uint(comptime max_bits: comptime_int) type {
|
||||
.big => bytes.len - 1,
|
||||
.little => 0,
|
||||
};
|
||||
for (0..self.limbs.len) |i| {
|
||||
for (0..self.limbs_len) |i| {
|
||||
var remaining_bits = t_bits;
|
||||
var limb = self.limbs.get(i);
|
||||
var limb = self.limbsConst()[i];
|
||||
while (remaining_bits >= 8) {
|
||||
bytes[out_i] |= math.shl(u8, @as(u8, @truncate(limb)), shift);
|
||||
const consumed = 8 - shift;
|
||||
@ -152,7 +159,7 @@ pub fn Uint(comptime max_bits: comptime_int) type {
|
||||
switch (endian) {
|
||||
.big => {
|
||||
if (out_i == 0) {
|
||||
if (i != self.limbs.len - 1 or limb != 0) {
|
||||
if (i != self.limbs_len - 1 or limb != 0) {
|
||||
return error.Overflow;
|
||||
}
|
||||
return;
|
||||
@ -162,7 +169,7 @@ pub fn Uint(comptime max_bits: comptime_int) type {
|
||||
.little => {
|
||||
out_i += 1;
|
||||
if (out_i == bytes.len) {
|
||||
if (i != self.limbs.len - 1 or limb != 0) {
|
||||
if (i != self.limbs_len - 1 or limb != 0) {
|
||||
return error.Overflow;
|
||||
}
|
||||
return;
|
||||
@ -187,20 +194,20 @@ pub fn Uint(comptime max_bits: comptime_int) type {
|
||||
};
|
||||
while (true) {
|
||||
const bi = bytes[i];
|
||||
out.limbs.set(out_i, out.limbs.get(out_i) | math.shl(Limb, bi, shift));
|
||||
out.limbs()[out_i] |= math.shl(Limb, bi, shift);
|
||||
shift += 8;
|
||||
if (shift >= t_bits) {
|
||||
shift -= t_bits;
|
||||
out.limbs.set(out_i, @as(TLimb, @truncate(out.limbs.get(out_i))));
|
||||
out.limbs()[out_i] = @as(TLimb, @truncate(out.limbs()[out_i]));
|
||||
const overflow = math.shr(Limb, bi, 8 - shift);
|
||||
out_i += 1;
|
||||
if (out_i >= out.limbs.len) {
|
||||
if (out_i >= out.limbs_len) {
|
||||
if (overflow != 0 or i != 0) {
|
||||
return error.Overflow;
|
||||
}
|
||||
break;
|
||||
}
|
||||
out.limbs.set(out_i, overflow);
|
||||
out.limbs()[out_i] = overflow;
|
||||
}
|
||||
switch (endian) {
|
||||
.big => {
|
||||
@ -218,32 +225,31 @@ pub fn Uint(comptime max_bits: comptime_int) type {
|
||||
|
||||
/// Returns `true` if both integers are equal.
|
||||
pub fn eql(x: Self, y: Self) bool {
|
||||
return crypto.utils.timingSafeEql([max_limbs_count]Limb, x.limbs.buffer, y.limbs.buffer);
|
||||
return crypto.utils.timingSafeEql([max_limbs_count]Limb, x.limbs_buffer, y.limbs_buffer);
|
||||
}
|
||||
|
||||
/// Compares two integers.
|
||||
pub fn compare(x: Self, y: Self) math.Order {
|
||||
return crypto.utils.timingSafeCompare(
|
||||
Limb,
|
||||
x.limbs.constSlice(),
|
||||
y.limbs.constSlice(),
|
||||
x.limbsConst(),
|
||||
y.limbsConst(),
|
||||
.little,
|
||||
);
|
||||
}
|
||||
|
||||
/// Returns `true` if the integer is zero.
|
||||
pub fn isZero(x: Self) bool {
|
||||
const x_limbs = x.limbs.constSlice();
|
||||
var t: Limb = 0;
|
||||
for (0..x.limbs_count()) |i| {
|
||||
t |= x_limbs[i];
|
||||
for (x.limbsConst()) |elem| {
|
||||
t |= elem;
|
||||
}
|
||||
return ct.eql(t, 0);
|
||||
}
|
||||
|
||||
/// Returns `true` if the integer is odd.
|
||||
pub fn isOdd(x: Self) bool {
|
||||
return @as(bool, @bitCast(@as(u1, @truncate(x.limbs.get(0)))));
|
||||
return @as(u1, @truncate(x.limbsConst()[0])) != 0;
|
||||
}
|
||||
|
||||
/// Adds `y` to `x`, and returns `true` if the operation overflowed.
|
||||
@ -258,39 +264,31 @@ pub fn Uint(comptime max_bits: comptime_int) type {
|
||||
|
||||
// Replaces the limbs of `x` with the limbs of `y` if `on` is `true`.
|
||||
fn cmov(x: *Self, on: bool, y: Self) void {
|
||||
const x_limbs = x.limbs.slice();
|
||||
const y_limbs = y.limbs.constSlice();
|
||||
for (0..y.limbs_count()) |i| {
|
||||
x_limbs[i] = ct.select(on, y_limbs[i], x_limbs[i]);
|
||||
for (x.limbs(), y.limbsConst()) |*x_limb, y_limb| {
|
||||
x_limb.* = ct.select(on, y_limb, x_limb.*);
|
||||
}
|
||||
}
|
||||
|
||||
// Adds `y` to `x` if `on` is `true`, and returns `true` if the operation overflowed.
|
||||
// Adds `y` to `x` if `on` is `true`, and returns `true` if the
|
||||
// operation overflowed.
|
||||
fn conditionalAddWithOverflow(x: *Self, on: bool, y: Self) u1 {
|
||||
assert(x.limbs_count() == y.limbs_count()); // Operands must have the same size.
|
||||
const x_limbs = x.limbs.slice();
|
||||
const y_limbs = y.limbs.constSlice();
|
||||
|
||||
var carry: u1 = 0;
|
||||
for (0..x.limbs_count()) |i| {
|
||||
const res = x_limbs[i] + y_limbs[i] + carry;
|
||||
x_limbs[i] = ct.select(on, @as(TLimb, @truncate(res)), x_limbs[i]);
|
||||
carry = @as(u1, @truncate(res >> t_bits));
|
||||
for (x.limbs(), y.limbsConst()) |*x_limb, y_limb| {
|
||||
const res = x_limb.* + y_limb + carry;
|
||||
x_limb.* = ct.select(on, @as(TLimb, @truncate(res)), x_limb.*);
|
||||
carry = @truncate(res >> t_bits);
|
||||
}
|
||||
return carry;
|
||||
}
|
||||
|
||||
// Subtracts `y` from `x` if `on` is `true`, and returns `true` if the operation overflowed.
|
||||
// Subtracts `y` from `x` if `on` is `true`, and returns `true` if the
|
||||
// operation overflowed.
|
||||
fn conditionalSubWithOverflow(x: *Self, on: bool, y: Self) u1 {
|
||||
assert(x.limbs_count() == y.limbs_count()); // Operands must have the same size.
|
||||
const x_limbs = x.limbs.slice();
|
||||
const y_limbs = y.limbs.constSlice();
|
||||
|
||||
var borrow: u1 = 0;
|
||||
for (0..x.limbs_count()) |i| {
|
||||
const res = x_limbs[i] -% y_limbs[i] -% borrow;
|
||||
x_limbs[i] = ct.select(on, @as(TLimb, @truncate(res)), x_limbs[i]);
|
||||
borrow = @as(u1, @truncate(res >> t_bits));
|
||||
for (x.limbs(), y.limbsConst()) |*x_limb, y_limb| {
|
||||
const res = x_limb.* -% y_limb -% borrow;
|
||||
x_limb.* = ct.select(on, @as(TLimb, @truncate(res)), x_limb.*);
|
||||
borrow = @truncate(res >> t_bits);
|
||||
}
|
||||
return borrow;
|
||||
}
|
||||
@ -315,7 +313,7 @@ fn Fe_(comptime bits: comptime_int) type {
|
||||
|
||||
// The number of active limbs to represent the field element.
|
||||
fn limbs_count(self: Self) usize {
|
||||
return self.v.limbs_count();
|
||||
return self.v.limbs_len;
|
||||
}
|
||||
|
||||
/// Creates a field element from a primitive.
|
||||
@ -398,7 +396,7 @@ pub fn Modulus(comptime max_bits: comptime_int) type {
|
||||
|
||||
// Number of active limbs in the modulus.
|
||||
fn limbs_count(self: Self) usize {
|
||||
return self.v.limbs_count();
|
||||
return self.v.limbs_len;
|
||||
}
|
||||
|
||||
/// Actual size of the modulus, in bits.
|
||||
@ -409,7 +407,7 @@ pub fn Modulus(comptime max_bits: comptime_int) type {
|
||||
/// Returns the element `1`.
|
||||
pub fn one(self: Self) Fe {
|
||||
var fe = self.zero;
|
||||
fe.v.limbs.set(0, 1);
|
||||
fe.v.limbs()[0] = 1;
|
||||
return fe;
|
||||
}
|
||||
|
||||
@ -419,10 +417,10 @@ pub fn Modulus(comptime max_bits: comptime_int) type {
|
||||
if (!v_.isOdd()) return error.EvenModulus;
|
||||
|
||||
var v = v_.normalize();
|
||||
const hi = v.limbs.get(v.limbs_count() - 1);
|
||||
const lo = v.limbs.get(0);
|
||||
const hi = v.limbsConst()[v.limbs_len - 1];
|
||||
const lo = v.limbsConst()[0];
|
||||
|
||||
if (v.limbs_count() < 2 and lo < 3) {
|
||||
if (v.limbs_len < 2 and lo < 3) {
|
||||
return error.ModulusTooSmall;
|
||||
}
|
||||
|
||||
@ -481,18 +479,19 @@ pub fn Modulus(comptime max_bits: comptime_int) type {
|
||||
const new_len = self.limbs_count();
|
||||
if (fe.limbs_count() < new_len) return error.Overflow;
|
||||
var acc: Limb = 0;
|
||||
for (fe.v.limbs.constSlice()[new_len..]) |limb| {
|
||||
for (fe.v.limbsConst()[new_len..]) |limb| {
|
||||
acc |= limb;
|
||||
}
|
||||
if (acc != 0) return error.Overflow;
|
||||
try fe.v.limbs.resize(new_len);
|
||||
if (new_len > fe.v.limbs_buffer.len) return error.Overflow;
|
||||
fe.v.limbs_len = new_len;
|
||||
}
|
||||
|
||||
// Computes R^2 for the Montgomery representation.
|
||||
fn computeRR(self: *Self) void {
|
||||
self.rr = self.zero;
|
||||
const n = self.rr.limbs_count();
|
||||
self.rr.v.limbs.set(n - 1, 1);
|
||||
self.rr.v.limbs()[n - 1] = 1;
|
||||
for ((n - 1)..(2 * n)) |_| {
|
||||
self.shiftIn(&self.rr, 0);
|
||||
}
|
||||
@ -502,9 +501,9 @@ pub fn Modulus(comptime max_bits: comptime_int) type {
|
||||
/// Computes x << t_bits + y (mod m)
|
||||
fn shiftIn(self: Self, x: *Fe, y: Limb) void {
|
||||
var d = self.zero;
|
||||
const x_limbs = x.v.limbs.slice();
|
||||
const d_limbs = d.v.limbs.slice();
|
||||
const m_limbs = self.v.limbs.constSlice();
|
||||
const x_limbs = x.v.limbs();
|
||||
const d_limbs = d.v.limbs();
|
||||
const m_limbs = self.v.limbsConst();
|
||||
|
||||
var need_sub = false;
|
||||
var i: usize = t_bits - 1;
|
||||
@ -569,18 +568,18 @@ pub fn Modulus(comptime max_bits: comptime_int) type {
|
||||
/// Reduces an arbitrary `Uint`, converting it to a field element.
|
||||
pub fn reduce(self: Self, x: anytype) Fe {
|
||||
var out = self.zero;
|
||||
var i = x.limbs_count() - 1;
|
||||
var i = x.limbs_len - 1;
|
||||
if (self.limbs_count() >= 2) {
|
||||
const start = @min(i, self.limbs_count() - 2);
|
||||
var j = start;
|
||||
while (true) : (j -= 1) {
|
||||
out.v.limbs.set(j, x.limbs.get(i));
|
||||
out.v.limbs()[j] = x.limbsConst()[i];
|
||||
i -= 1;
|
||||
if (j == 0) break;
|
||||
}
|
||||
}
|
||||
while (true) : (i -= 1) {
|
||||
self.shiftIn(&out, x.limbs.get(i));
|
||||
self.shiftIn(&out, x.limbsConst()[i]);
|
||||
if (i == 0) break;
|
||||
}
|
||||
return out;
|
||||
@ -591,10 +590,10 @@ pub fn Modulus(comptime max_bits: comptime_int) type {
|
||||
assert(d.limbs_count() == y.limbs_count());
|
||||
assert(d.limbs_count() == self.limbs_count());
|
||||
|
||||
const a_limbs = x.v.limbs.constSlice();
|
||||
const b_limbs = y.v.limbs.constSlice();
|
||||
const d_limbs = d.v.limbs.slice();
|
||||
const m_limbs = self.v.limbs.constSlice();
|
||||
const a_limbs = x.v.limbsConst();
|
||||
const b_limbs = y.v.limbsConst();
|
||||
const d_limbs = d.v.limbs();
|
||||
const m_limbs = self.v.limbsConst();
|
||||
|
||||
var overflow: u1 = 0;
|
||||
for (0..self.limbs_count()) |i| {
|
||||
@ -685,7 +684,7 @@ pub fn Modulus(comptime max_bits: comptime_int) type {
|
||||
const k: u1 = @truncate(b >> j);
|
||||
if (k != 0) {
|
||||
const t = self.montgomeryMul(out, x_m);
|
||||
@memcpy(out.v.limbs.slice(), t.v.limbs.constSlice());
|
||||
@memcpy(out.v.limbs(), t.v.limbsConst());
|
||||
}
|
||||
if (j == 0) break;
|
||||
}
|
||||
@ -731,7 +730,7 @@ pub fn Modulus(comptime max_bits: comptime_int) type {
|
||||
}
|
||||
const t1 = self.montgomeryMul(out, t0);
|
||||
if (public) {
|
||||
@memcpy(out.v.limbs.slice(), t1.v.limbs.constSlice());
|
||||
@memcpy(out.v.limbs(), t1.v.limbsConst());
|
||||
} else {
|
||||
out.v.cmov(!ct.eql(k, 0), t1.v);
|
||||
}
|
||||
@ -790,9 +789,9 @@ pub fn Modulus(comptime max_bits: comptime_int) type {
|
||||
pub fn powPublic(self: Self, x: Fe, e: Fe) NullExponentError!Fe {
|
||||
var e_normalized = Fe{ .v = e.v.normalize() };
|
||||
var buf_: [Fe.encoded_bytes]u8 = undefined;
|
||||
var buf = buf_[0 .. math.divCeil(usize, e_normalized.v.limbs_count() * t_bits, 8) catch unreachable];
|
||||
var buf = buf_[0 .. math.divCeil(usize, e_normalized.v.limbs_len * t_bits, 8) catch unreachable];
|
||||
e_normalized.toBytes(buf, .little) catch unreachable;
|
||||
const leading = @clz(e_normalized.v.limbs.get(e_normalized.v.limbs_count() - carry_bits));
|
||||
const leading = @clz(e_normalized.v.limbsConst()[e_normalized.v.limbs_len - carry_bits]);
|
||||
buf = buf[0 .. buf.len - leading / 8];
|
||||
return self.powWithEncodedPublicExponent(x, buf, .little);
|
||||
}
|
||||
@ -835,20 +834,16 @@ const ct_protected = struct {
|
||||
|
||||
// Compares two big integers in constant time, returning true if x < y.
|
||||
fn limbsCmpLt(x: anytype, y: @TypeOf(x)) bool {
|
||||
assert(x.limbs_count() == y.limbs_count());
|
||||
const x_limbs = x.limbs.constSlice();
|
||||
const y_limbs = y.limbs.constSlice();
|
||||
|
||||
var c: u1 = 0;
|
||||
for (0..x.limbs_count()) |i| {
|
||||
c = @as(u1, @truncate((x_limbs[i] -% y_limbs[i] -% c) >> t_bits));
|
||||
for (x.limbsConst(), y.limbsConst()) |x_limb, y_limb| {
|
||||
c = @truncate((x_limb -% y_limb -% c) >> t_bits);
|
||||
}
|
||||
return @as(bool, @bitCast(c));
|
||||
return c != 0;
|
||||
}
|
||||
|
||||
// Compares two big integers in constant time, returning true if x >= y.
|
||||
fn limbsCmpGeq(x: anytype, y: @TypeOf(x)) bool {
|
||||
return @as(bool, @bitCast(1 - @intFromBool(ct.limbsCmpLt(x, y))));
|
||||
return !ct.limbsCmpLt(x, y);
|
||||
}
|
||||
|
||||
// Multiplies two limbs and returns the result as a wide limb.
|
||||
|
||||
@ -2204,28 +2204,29 @@ pub const Dir = struct {
|
||||
name: []const u8,
|
||||
parent_dir: Dir,
|
||||
iter: IterableDir.Iterator,
|
||||
|
||||
fn closeAll(items: []@This()) void {
|
||||
for (items) |*item| item.iter.dir.close();
|
||||
}
|
||||
};
|
||||
|
||||
var stack = std.BoundedArray(StackItem, 16){};
|
||||
defer {
|
||||
for (stack.slice()) |*item| {
|
||||
item.iter.dir.close();
|
||||
}
|
||||
}
|
||||
var stack_buffer: [16]StackItem = undefined;
|
||||
var stack = std.ArrayListUnmanaged(StackItem).initBuffer(&stack_buffer);
|
||||
defer StackItem.closeAll(stack.items);
|
||||
|
||||
stack.appendAssumeCapacity(StackItem{
|
||||
stack.appendAssumeCapacity(.{
|
||||
.name = sub_path,
|
||||
.parent_dir = self,
|
||||
.iter = initial_iterable_dir.iterateAssumeFirstIteration(),
|
||||
});
|
||||
|
||||
process_stack: while (stack.len != 0) {
|
||||
var top = &(stack.slice()[stack.len - 1]);
|
||||
process_stack: while (stack.items.len != 0) {
|
||||
var top = &stack.items[stack.items.len - 1];
|
||||
while (try top.iter.next()) |entry| {
|
||||
var treat_as_dir = entry.kind == .directory;
|
||||
handle_entry: while (true) {
|
||||
if (treat_as_dir) {
|
||||
if (stack.ensureUnusedCapacity(1)) {
|
||||
if (stack.unusedCapacitySlice().len >= 1) {
|
||||
var iterable_dir = top.iter.dir.openIterableDir(entry.name, .{ .no_follow = true }) catch |err| switch (err) {
|
||||
error.NotDir => {
|
||||
treat_as_dir = false;
|
||||
@ -2251,13 +2252,13 @@ pub const Dir = struct {
|
||||
error.DeviceBusy,
|
||||
=> |e| return e,
|
||||
};
|
||||
stack.appendAssumeCapacity(StackItem{
|
||||
stack.appendAssumeCapacity(.{
|
||||
.name = entry.name,
|
||||
.parent_dir = top.iter.dir,
|
||||
.iter = iterable_dir.iterateAssumeFirstIteration(),
|
||||
});
|
||||
continue :process_stack;
|
||||
} else |_| {
|
||||
} else {
|
||||
try top.iter.dir.deleteTreeMinStackSizeWithKindHint(entry.name, entry.kind);
|
||||
break :handle_entry;
|
||||
}
|
||||
@ -2301,7 +2302,7 @@ pub const Dir = struct {
|
||||
// pop the value from the stack.
|
||||
const parent_dir = top.parent_dir;
|
||||
const name = top.name;
|
||||
_ = stack.pop();
|
||||
stack.items.len -= 1;
|
||||
|
||||
var need_to_retry: bool = false;
|
||||
parent_dir.deleteDir(name) catch |err| switch (err) {
|
||||
@ -2374,7 +2375,7 @@ pub const Dir = struct {
|
||||
};
|
||||
// We know there is room on the stack since we are just re-adding
|
||||
// the StackItem that we previously popped.
|
||||
stack.appendAssumeCapacity(StackItem{
|
||||
stack.appendAssumeCapacity(.{
|
||||
.name = name,
|
||||
.parent_dir = parent_dir,
|
||||
.iter = iterable_dir.iterateAssumeFirstIteration(),
|
||||
|
||||
@ -1246,13 +1246,16 @@ pub const Parser = struct {
|
||||
self.nextToken(.normal) catch unreachable;
|
||||
switch (statement_type) {
|
||||
.file_version, .product_version => {
|
||||
var parts = std.BoundedArray(*Node, 4){};
|
||||
var parts_buffer: [4]*Node = undefined;
|
||||
var parts = std.ArrayListUnmanaged(*Node).initBuffer(&parts_buffer);
|
||||
|
||||
while (parts.len < 4) {
|
||||
while (true) {
|
||||
const value = try self.parseExpression(.{ .allowed_types = .{ .number = true } });
|
||||
parts.addOneAssumeCapacity().* = value;
|
||||
|
||||
if (parts.len == 4 or !(try self.parseOptionalToken(.comma))) {
|
||||
if (parts.unusedCapacitySlice().len == 0 or
|
||||
!(try self.parseOptionalToken(.comma)))
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
@ -1260,7 +1263,7 @@ pub const Parser = struct {
|
||||
const node = try self.state.arena.create(Node.VersionStatement);
|
||||
node.* = .{
|
||||
.type = type_token,
|
||||
.parts = try self.state.arena.dupe(*Node, parts.slice()),
|
||||
.parts = try self.state.arena.dupe(*Node, parts.items),
|
||||
};
|
||||
return &node.base;
|
||||
},
|
||||
|
||||
@ -601,9 +601,8 @@ fn renderFieldName(writer: anytype, operands: []const g.Operand, field_index: us
|
||||
const operand = operands[field_index];
|
||||
|
||||
// Should be enough for all names - adjust as needed.
|
||||
var name_buffer = std.BoundedArray(u8, 64){
|
||||
.buffer = undefined,
|
||||
};
|
||||
var name_backing_buffer: [64]u8 = undefined;
|
||||
var name_buffer = std.ArrayListUnmanaged(u8).initBuffer(&name_backing_buffer);
|
||||
|
||||
derive_from_kind: {
|
||||
// Operand names are often in the json encoded as "'Name'" (with two sets of quotes).
|
||||
@ -617,33 +616,33 @@ fn renderFieldName(writer: anytype, operands: []const g.Operand, field_index: us
|
||||
// Use the same loop to transform to snake-case.
|
||||
for (name) |c| {
|
||||
switch (c) {
|
||||
'a'...'z', '0'...'9' => try name_buffer.append(c),
|
||||
'A'...'Z' => try name_buffer.append(std.ascii.toLower(c)),
|
||||
' ', '~' => try name_buffer.append('_'),
|
||||
'a'...'z', '0'...'9' => name_buffer.appendAssumeCapacity(c),
|
||||
'A'...'Z' => name_buffer.appendAssumeCapacity(std.ascii.toLower(c)),
|
||||
' ', '~' => name_buffer.appendAssumeCapacity('_'),
|
||||
else => break :derive_from_kind,
|
||||
}
|
||||
}
|
||||
|
||||
// Assume there are no duplicate 'name' fields.
|
||||
try writer.print("{}", .{std.zig.fmtId(name_buffer.slice())});
|
||||
try writer.print("{}", .{std.zig.fmtId(name_buffer.items)});
|
||||
return;
|
||||
}
|
||||
|
||||
// Translate to snake case.
|
||||
name_buffer.len = 0;
|
||||
name_buffer.items.len = 0;
|
||||
for (operand.kind, 0..) |c, i| {
|
||||
switch (c) {
|
||||
'a'...'z', '0'...'9' => try name_buffer.append(c),
|
||||
'a'...'z', '0'...'9' => name_buffer.appendAssumeCapacity(c),
|
||||
'A'...'Z' => if (i > 0 and std.ascii.isLower(operand.kind[i - 1])) {
|
||||
try name_buffer.appendSlice(&[_]u8{ '_', std.ascii.toLower(c) });
|
||||
name_buffer.appendSliceAssumeCapacity(&[_]u8{ '_', std.ascii.toLower(c) });
|
||||
} else {
|
||||
try name_buffer.append(std.ascii.toLower(c));
|
||||
name_buffer.appendAssumeCapacity(std.ascii.toLower(c));
|
||||
},
|
||||
else => unreachable, // Assume that the name is valid C-syntax (and contains no underscores).
|
||||
}
|
||||
}
|
||||
|
||||
try writer.print("{}", .{std.zig.fmtId(name_buffer.slice())});
|
||||
try writer.print("{}", .{std.zig.fmtId(name_buffer.items)});
|
||||
|
||||
// For fields derived from type name, there could be any amount.
|
||||
// Simply check against all other fields, and if another similar one exists, add a number.
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user