mirror of
https://github.com/ziglang/zig.git
synced 2025-12-24 15:13:08 +00:00
std.mem.Allocator: keep the undefined memset
Reversal on the decision: the Allocator interface is the correct place for the memset to undefined because it allows Allocator implementations to bypass the interface and use a backing allocator directly, skipping the performance penalty of memsetting the entire allocation, which may be very large, as well as having valuable zeroes on them. closes #4298
This commit is contained in:
parent
2c5113f6d1
commit
36e9b0f026
@ -157,6 +157,9 @@ pub const Config = struct {
|
|||||||
|
|
||||||
/// Enables emitting info messages with the size and address of every allocation.
|
/// Enables emitting info messages with the size and address of every allocation.
|
||||||
verbose_log: bool = false,
|
verbose_log: bool = false,
|
||||||
|
|
||||||
|
/// Tell whether the backing allocator returns already-zeroed memory.
|
||||||
|
backing_allocator_zeroes: bool = true,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub const Check = enum { ok, leak };
|
pub const Check = enum { ok, leak };
|
||||||
@ -179,7 +182,8 @@ pub fn GeneralPurposeAllocator(comptime config: Config) type {
|
|||||||
|
|
||||||
const Self = @This();
|
const Self = @This();
|
||||||
|
|
||||||
/// The initial state of a `GeneralPurposeAllocator`, containing no allocations and backed by the system page allocator.
|
/// The initial state of a `GeneralPurposeAllocator`, containing no
|
||||||
|
/// allocations and backed by the system page allocator.
|
||||||
pub const init: Self = .{
|
pub const init: Self = .{
|
||||||
.backing_allocator = std.heap.page_allocator,
|
.backing_allocator = std.heap.page_allocator,
|
||||||
.buckets = [1]Buckets{.{}} ** small_bucket_count,
|
.buckets = [1]Buckets{.{}} ** small_bucket_count,
|
||||||
@ -508,7 +512,7 @@ pub fn GeneralPurposeAllocator(comptime config: Config) type {
|
|||||||
fn collectStackTrace(first_trace_addr: usize, addresses: *[stack_n]usize) void {
|
fn collectStackTrace(first_trace_addr: usize, addresses: *[stack_n]usize) void {
|
||||||
if (stack_n == 0) return;
|
if (stack_n == 0) return;
|
||||||
@memset(addresses, 0);
|
@memset(addresses, 0);
|
||||||
var stack_trace = StackTrace{
|
var stack_trace: StackTrace = .{
|
||||||
.instruction_addresses = addresses,
|
.instruction_addresses = addresses,
|
||||||
.index = 0,
|
.index = 0,
|
||||||
};
|
};
|
||||||
@ -1092,23 +1096,30 @@ pub fn GeneralPurposeAllocator(comptime config: Config) type {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn createBucket(self: *Self, size_class: usize) Error!*BucketHeader {
|
fn createBucket(self: *Self, size_class: usize) Error!*BucketHeader {
|
||||||
const page = try self.backing_allocator.alignedAlloc(u8, page_size, page_size);
|
const alignment: mem.Alignment = .fromByteUnits(page_size);
|
||||||
errdefer self.backing_allocator.free(page);
|
const page = self.backing_allocator.rawAlloc(page_size, alignment, @returnAddress()) orelse
|
||||||
|
return error.OutOfMemory;
|
||||||
|
errdefer self.backing_allocator.rawFree(page[0..page_size], alignment, @returnAddress());
|
||||||
|
|
||||||
const bucket_size = bucketSize(size_class);
|
const bucket_size = bucketSize(size_class);
|
||||||
const bucket_bytes = try self.backing_allocator.alignedAlloc(u8, @alignOf(BucketHeader), bucket_size);
|
const header_align: mem.Alignment = .fromByteUnits(@alignOf(BucketHeader));
|
||||||
const ptr: *BucketHeader = @ptrCast(bucket_bytes.ptr);
|
const ptr: *BucketHeader = @alignCast(@ptrCast(self.backing_allocator.rawAlloc(
|
||||||
|
bucket_size,
|
||||||
|
header_align,
|
||||||
|
@returnAddress(),
|
||||||
|
) orelse return error.OutOfMemory));
|
||||||
ptr.* = .{
|
ptr.* = .{
|
||||||
.page = page.ptr,
|
.page = @alignCast(page),
|
||||||
.alloc_cursor = 0,
|
.alloc_cursor = 0,
|
||||||
.used_count = 0,
|
.used_count = 0,
|
||||||
};
|
};
|
||||||
// Set the used bits to all zeroes
|
if (!config.backing_allocator_zeroes) {
|
||||||
@memset(@as([*]u8, @as(*[1]u8, ptr.usedBits(0)))[0..usedBitsCount(size_class)], 0);
|
@memset(@as([*]u8, @as(*[1]u8, ptr.usedBits(0)))[0..usedBitsCount(size_class)], 0);
|
||||||
if (config.safety) {
|
if (config.safety) {
|
||||||
// Set the requested sizes to zeroes
|
// Set the requested sizes to zeroes
|
||||||
@memset(mem.sliceAsBytes(ptr.requestedSizes(size_class)), 0);
|
@memset(mem.sliceAsBytes(ptr.requestedSizes(size_class)), 0);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
return ptr;
|
return ptr;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|||||||
@ -128,25 +128,25 @@ pub fn noFree(
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// This function is not intended to be called except from within the
|
/// This function is not intended to be called except from within the
|
||||||
/// implementation of an Allocator
|
/// implementation of an `Allocator`.
|
||||||
pub inline fn rawAlloc(a: Allocator, len: usize, alignment: Alignment, ret_addr: usize) ?[*]u8 {
|
pub inline fn rawAlloc(a: Allocator, len: usize, alignment: Alignment, ret_addr: usize) ?[*]u8 {
|
||||||
return a.vtable.alloc(a.ptr, len, alignment, ret_addr);
|
return a.vtable.alloc(a.ptr, len, alignment, ret_addr);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// This function is not intended to be called except from within the
|
/// This function is not intended to be called except from within the
|
||||||
/// implementation of an Allocator.
|
/// implementation of an `Allocator`.
|
||||||
pub inline fn rawResize(a: Allocator, memory: []u8, alignment: Alignment, new_len: usize, ret_addr: usize) bool {
|
pub inline fn rawResize(a: Allocator, memory: []u8, alignment: Alignment, new_len: usize, ret_addr: usize) bool {
|
||||||
return a.vtable.resize(a.ptr, memory, alignment, new_len, ret_addr);
|
return a.vtable.resize(a.ptr, memory, alignment, new_len, ret_addr);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// This function is not intended to be called except from within the
|
/// This function is not intended to be called except from within the
|
||||||
/// implementation of an Allocator.
|
/// implementation of an `Allocator`.
|
||||||
pub inline fn rawRemap(a: Allocator, memory: []u8, alignment: Alignment, new_len: usize, ret_addr: usize) ?[*]u8 {
|
pub inline fn rawRemap(a: Allocator, memory: []u8, alignment: Alignment, new_len: usize, ret_addr: usize) ?[*]u8 {
|
||||||
return a.vtable.remap(a.ptr, memory, alignment, new_len, ret_addr);
|
return a.vtable.remap(a.ptr, memory, alignment, new_len, ret_addr);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// This function is not intended to be called except from within the
|
/// This function is not intended to be called except from within the
|
||||||
/// implementation of an Allocator
|
/// implementation of an `Allocator`.
|
||||||
pub inline fn rawFree(a: Allocator, memory: []u8, alignment: Alignment, ret_addr: usize) void {
|
pub inline fn rawFree(a: Allocator, memory: []u8, alignment: Alignment, ret_addr: usize) void {
|
||||||
return a.vtable.free(a.ptr, memory, alignment, ret_addr);
|
return a.vtable.free(a.ptr, memory, alignment, ret_addr);
|
||||||
}
|
}
|
||||||
@ -271,7 +271,6 @@ fn allocBytesWithAlignment(self: Allocator, comptime alignment: u29, byte_count:
|
|||||||
}
|
}
|
||||||
|
|
||||||
const byte_ptr = self.rawAlloc(byte_count, .fromByteUnits(alignment), return_address) orelse return Error.OutOfMemory;
|
const byte_ptr = self.rawAlloc(byte_count, .fromByteUnits(alignment), return_address) orelse return Error.OutOfMemory;
|
||||||
// TODO: https://github.com/ziglang/zig/issues/4298
|
|
||||||
@memset(byte_ptr[0..byte_count], undefined);
|
@memset(byte_ptr[0..byte_count], undefined);
|
||||||
return @alignCast(byte_ptr);
|
return @alignCast(byte_ptr);
|
||||||
}
|
}
|
||||||
@ -391,7 +390,6 @@ pub fn reallocAdvanced(
|
|||||||
return error.OutOfMemory;
|
return error.OutOfMemory;
|
||||||
const copy_len = @min(byte_count, old_byte_slice.len);
|
const copy_len = @min(byte_count, old_byte_slice.len);
|
||||||
@memcpy(new_mem[0..copy_len], old_byte_slice[0..copy_len]);
|
@memcpy(new_mem[0..copy_len], old_byte_slice[0..copy_len]);
|
||||||
// TODO https://github.com/ziglang/zig/issues/4298
|
|
||||||
@memset(old_byte_slice, undefined);
|
@memset(old_byte_slice, undefined);
|
||||||
self.rawFree(old_byte_slice, .fromByteUnits(Slice.alignment), return_address);
|
self.rawFree(old_byte_slice, .fromByteUnits(Slice.alignment), return_address);
|
||||||
|
|
||||||
@ -408,7 +406,6 @@ pub fn free(self: Allocator, memory: anytype) void {
|
|||||||
const bytes_len = bytes.len + if (Slice.sentinel() != null) @sizeOf(Slice.child) else 0;
|
const bytes_len = bytes.len + if (Slice.sentinel() != null) @sizeOf(Slice.child) else 0;
|
||||||
if (bytes_len == 0) return;
|
if (bytes_len == 0) return;
|
||||||
const non_const_ptr = @constCast(bytes.ptr);
|
const non_const_ptr = @constCast(bytes.ptr);
|
||||||
// TODO: https://github.com/ziglang/zig/issues/4298
|
|
||||||
@memset(non_const_ptr[0..bytes_len], undefined);
|
@memset(non_const_ptr[0..bytes_len], undefined);
|
||||||
self.rawFree(non_const_ptr[0..bytes_len], .fromByteUnits(Slice.alignment), @returnAddress());
|
self.rawFree(non_const_ptr[0..bytes_len], .fromByteUnits(Slice.alignment), @returnAddress());
|
||||||
}
|
}
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user