mirror of
https://github.com/ziglang/zig.git
synced 2026-02-13 21:08:36 +00:00
self-hosted: test all out of memory conditions
This commit is contained in:
parent
c4e7d05ce3
commit
ed4d94a5d5
@ -8,6 +8,7 @@ const warn = std.debug.warn;
|
||||
const Tokenizer = @import("tokenizer.zig").Tokenizer;
|
||||
const Token = @import("tokenizer.zig").Token;
|
||||
const Parser = @import("parser.zig").Parser;
|
||||
const assert = std.debug.assert;
|
||||
|
||||
pub fn main() -> %void {
|
||||
main2() %% |err| {
|
||||
@ -68,28 +69,48 @@ pub fn main2() -> %void {
|
||||
|
||||
var fixed_buffer_mem: [100 * 1024]u8 = undefined;
|
||||
|
||||
fn testCanonical(source: []const u8) {
|
||||
var fixed_allocator = mem.FixedBufferAllocator.init(fixed_buffer_mem[0..]);
|
||||
const allocator = &fixed_allocator.allocator;
|
||||
|
||||
fn testParse(source: []const u8, allocator: &mem.Allocator) -> %[]u8 {
|
||||
var tokenizer = Tokenizer.init(source);
|
||||
var parser = Parser.init(&tokenizer, allocator, "(memory buffer)");
|
||||
defer parser.deinit();
|
||||
|
||||
const root_node = parser.parse() %% unreachable;
|
||||
const root_node = %return parser.parse();
|
||||
defer parser.freeAst(root_node);
|
||||
|
||||
var buffer = std.Buffer.initSize(allocator, 0) %% unreachable;
|
||||
var buffer = %return std.Buffer.initSize(allocator, 0);
|
||||
var buffer_out_stream = io.BufferOutStream.init(&buffer);
|
||||
parser.renderSource(&buffer_out_stream.stream, root_node) %% unreachable;
|
||||
%return parser.renderSource(&buffer_out_stream.stream, root_node);
|
||||
return buffer.toOwnedSlice();
|
||||
}
|
||||
|
||||
if (!mem.eql(u8, buffer.toSliceConst(), source)) {
|
||||
warn("\n====== expected this output: =========\n");
|
||||
warn("{}", source);
|
||||
warn("\n======== instead found this: =========\n");
|
||||
warn("{}", buffer.toSliceConst());
|
||||
warn("\n======================================\n");
|
||||
@panic("test failed");
|
||||
fn testCanonical(source: []const u8) {
|
||||
const needed_alloc_count = {
|
||||
// Try it once with unlimited memory, make sure it works
|
||||
var fixed_allocator = mem.FixedBufferAllocator.init(fixed_buffer_mem[0..]);
|
||||
var failing_allocator = std.debug.FailingAllocator.init(&fixed_allocator.allocator, @maxValue(usize));
|
||||
const result_source = testParse(source, &failing_allocator.allocator) %% @panic("test failed");
|
||||
if (!mem.eql(u8, result_source, source)) {
|
||||
warn("\n====== expected this output: =========\n");
|
||||
warn("{}", source);
|
||||
warn("\n======== instead found this: =========\n");
|
||||
warn("{}", result_source);
|
||||
warn("\n======================================\n");
|
||||
@panic("test failed");
|
||||
}
|
||||
failing_allocator.allocator.free(result_source);
|
||||
failing_allocator.index
|
||||
};
|
||||
|
||||
var fail_index = needed_alloc_count;
|
||||
while (fail_index != 0) {
|
||||
fail_index -= 1;
|
||||
var fixed_allocator = mem.FixedBufferAllocator.init(fixed_buffer_mem[0..]);
|
||||
var failing_allocator = std.debug.FailingAllocator.init(&fixed_allocator.allocator, fail_index);
|
||||
if (testParse(source, &failing_allocator.allocator)) |_| {
|
||||
@panic("non-deterministic memory usage");
|
||||
} else |err| {
|
||||
assert(err == error.OutOfMemory);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -18,6 +18,7 @@ pub const Parser = struct {
|
||||
put_back_tokens: [2]Token,
|
||||
put_back_count: usize,
|
||||
source_file_name: []const u8,
|
||||
cleanup_root_node: ?&ast.NodeRoot,
|
||||
|
||||
// This memory contents are used only during a function call. It's used to repurpose memory;
|
||||
// specifically so that freeAst can be guaranteed to succeed.
|
||||
@ -32,10 +33,12 @@ pub const Parser = struct {
|
||||
.put_back_count = 0,
|
||||
.source_file_name = source_file_name,
|
||||
.utility_bytes = []align(utility_bytes_align) u8{},
|
||||
.cleanup_root_node = null,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn deinit(self: &Parser) {
|
||||
assert(self.cleanup_root_node == null);
|
||||
self.allocator.free(self.utility_bytes);
|
||||
}
|
||||
|
||||
@ -115,13 +118,29 @@ pub const Parser = struct {
|
||||
}
|
||||
|
||||
pub fn parse(self: &Parser) -> %&ast.NodeRoot {
|
||||
const result = self.parseInner() %% |err| {
|
||||
if (self.cleanup_root_node) |root_node| {
|
||||
self.freeAst(root_node);
|
||||
}
|
||||
err
|
||||
};
|
||||
self.cleanup_root_node = null;
|
||||
return result;
|
||||
}
|
||||
|
||||
pub fn parseInner(self: &Parser) -> %&ast.NodeRoot {
|
||||
var stack = self.initUtilityArrayList(State);
|
||||
defer self.deinitUtilityArrayList(stack);
|
||||
|
||||
const root_node = %return self.createRoot();
|
||||
%defer self.allocator.destroy(root_node);
|
||||
%return stack.append(State.TopLevel);
|
||||
%defer self.freeAst(root_node);
|
||||
const root_node = {
|
||||
const root_node = %return self.createRoot();
|
||||
%defer self.allocator.destroy(root_node);
|
||||
// This stack append has to succeed for freeAst to work
|
||||
%return stack.append(State.TopLevel);
|
||||
root_node
|
||||
};
|
||||
assert(self.cleanup_root_node == null);
|
||||
self.cleanup_root_node = root_node;
|
||||
|
||||
while (true) {
|
||||
//{
|
||||
@ -1063,11 +1082,15 @@ pub const Parser = struct {
|
||||
const new_byte_count = self.utility_bytes.len - self.utility_bytes.len % @sizeOf(T);
|
||||
self.utility_bytes = self.allocator.alignedShrink(u8, utility_bytes_align, self.utility_bytes, new_byte_count);
|
||||
const typed_slice = ([]T)(self.utility_bytes);
|
||||
return ArrayList(T).fromOwnedSlice(self.allocator, typed_slice);
|
||||
return ArrayList(T) {
|
||||
.allocator = self.allocator,
|
||||
.items = typed_slice,
|
||||
.len = 0,
|
||||
};
|
||||
}
|
||||
|
||||
fn deinitUtilityArrayList(self: &Parser, list: var) {
|
||||
self.utility_bytes = ([]align(utility_bytes_align) u8)(list.toOwnedSlice());
|
||||
self.utility_bytes = ([]align(utility_bytes_align) u8)(list.items);
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
@ -968,3 +968,59 @@ fn readILeb128(in_stream: &io.InStream) -> %i64 {
|
||||
pub const global_allocator = &global_fixed_allocator.allocator;
|
||||
var global_fixed_allocator = mem.FixedBufferAllocator.init(global_allocator_mem[0..]);
|
||||
var global_allocator_mem: [100 * 1024]u8 = undefined;
|
||||
|
||||
/// Allocator that fails after N allocations, useful for making sure out of
|
||||
/// memory conditions are handled correctly.
|
||||
pub const FailingAllocator = struct {
|
||||
allocator: mem.Allocator,
|
||||
index: usize,
|
||||
fail_index: usize,
|
||||
internal_allocator: &mem.Allocator,
|
||||
allocated_bytes: usize,
|
||||
|
||||
pub fn init(allocator: &mem.Allocator, fail_index: usize) -> FailingAllocator {
|
||||
return FailingAllocator {
|
||||
.internal_allocator = allocator,
|
||||
.fail_index = fail_index,
|
||||
.index = 0,
|
||||
.allocated_bytes = 0,
|
||||
.allocator = mem.Allocator {
|
||||
.allocFn = alloc,
|
||||
.reallocFn = realloc,
|
||||
.freeFn = free,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
fn alloc(allocator: &mem.Allocator, n: usize, alignment: u29) -> %[]u8 {
|
||||
const self = @fieldParentPtr(FailingAllocator, "allocator", allocator);
|
||||
if (self.index == self.fail_index) {
|
||||
return error.OutOfMemory;
|
||||
}
|
||||
self.index += 1;
|
||||
const result = %return self.internal_allocator.allocFn(self.internal_allocator, n, alignment);
|
||||
self.allocated_bytes += result.len;
|
||||
return result;
|
||||
}
|
||||
|
||||
fn realloc(allocator: &mem.Allocator, old_mem: []u8, new_size: usize, alignment: u29) -> %[]u8 {
|
||||
const self = @fieldParentPtr(FailingAllocator, "allocator", allocator);
|
||||
if (new_size <= old_mem.len) {
|
||||
self.allocated_bytes -= old_mem.len - new_size;
|
||||
return self.internal_allocator.reallocFn(self.internal_allocator, old_mem, new_size, alignment);
|
||||
}
|
||||
if (self.index == self.fail_index) {
|
||||
return error.OutOfMemory;
|
||||
}
|
||||
self.index += 1;
|
||||
const result = %return self.internal_allocator.reallocFn(self.internal_allocator, old_mem, new_size, alignment);
|
||||
self.allocated_bytes += new_size - old_mem.len;
|
||||
return result;
|
||||
}
|
||||
|
||||
fn free(allocator: &mem.Allocator, bytes: []u8) {
|
||||
const self = @fieldParentPtr(FailingAllocator, "allocator", allocator);
|
||||
self.allocated_bytes -= bytes.len;
|
||||
return self.internal_allocator.freeFn(self.internal_allocator, bytes);
|
||||
}
|
||||
};
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user