mirror of
https://github.com/ziglang/zig.git
synced 2025-12-06 06:13:07 +00:00
allocgate: stage 1 and 2 building
This commit is contained in:
parent
47bc13bc59
commit
75548b50ff
@ -6,7 +6,7 @@ pub fn main() !void {
|
||||
var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
|
||||
defer arena.deinit();
|
||||
|
||||
const allocator = &arena.allocator;
|
||||
const allocator = arena.getAllocator();
|
||||
|
||||
const out_dir = "out";
|
||||
try std.fs.cwd().makePath(out_dir);
|
||||
|
||||
@ -21,7 +21,7 @@ pub fn main() !void {
|
||||
var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
|
||||
defer arena.deinit();
|
||||
|
||||
const allocator = &arena.allocator;
|
||||
const allocator = arena.getAllocator();
|
||||
|
||||
var args_it = process.args();
|
||||
|
||||
|
||||
@ -7362,7 +7362,7 @@ fn amain() !void {
|
||||
}
|
||||
|
||||
var global_download_frame: anyframe = undefined;
|
||||
fn fetchUrl(allocator: *Allocator, url: []const u8) ![]u8 {
|
||||
fn fetchUrl(allocator: Allocator, url: []const u8) ![]u8 {
|
||||
_ = url; // this is just an example, we don't actually do it!
|
||||
const result = try allocator.dupe(u8, "this is the downloaded url contents");
|
||||
errdefer allocator.free(result);
|
||||
@ -7374,7 +7374,7 @@ fn fetchUrl(allocator: *Allocator, url: []const u8) ![]u8 {
|
||||
}
|
||||
|
||||
var global_file_frame: anyframe = undefined;
|
||||
fn readFile(allocator: *Allocator, filename: []const u8) ![]u8 {
|
||||
fn readFile(allocator: Allocator, filename: []const u8) ![]u8 {
|
||||
_ = filename; // this is just an example, we don't actually do it!
|
||||
const result = try allocator.dupe(u8, "this is the file contents");
|
||||
errdefer allocator.free(result);
|
||||
@ -7433,7 +7433,7 @@ fn amain() !void {
|
||||
std.debug.print("file_text: {s}\n", .{file_text});
|
||||
}
|
||||
|
||||
fn fetchUrl(allocator: *Allocator, url: []const u8) ![]u8 {
|
||||
fn fetchUrl(allocator: Allocator, url: []const u8) ![]u8 {
|
||||
_ = url; // this is just an example, we don't actually do it!
|
||||
const result = try allocator.dupe(u8, "this is the downloaded url contents");
|
||||
errdefer allocator.free(result);
|
||||
@ -7441,7 +7441,7 @@ fn fetchUrl(allocator: *Allocator, url: []const u8) ![]u8 {
|
||||
return result;
|
||||
}
|
||||
|
||||
fn readFile(allocator: *Allocator, filename: []const u8) ![]u8 {
|
||||
fn readFile(allocator: Allocator, filename: []const u8) ![]u8 {
|
||||
_ = filename; // this is just an example, we don't actually do it!
|
||||
const result = try allocator.dupe(u8, "this is the file contents");
|
||||
errdefer allocator.free(result);
|
||||
@ -10050,8 +10050,8 @@ pub fn main() void {
|
||||
C has a default allocator - <code>malloc</code>, <code>realloc</code>, and <code>free</code>.
|
||||
When linking against libc, Zig exposes this allocator with {#syntax#}std.heap.c_allocator{#endsyntax#}.
|
||||
However, by convention, there is no default allocator in Zig. Instead, functions which need to
|
||||
allocate accept an {#syntax#}*Allocator{#endsyntax#} parameter. Likewise, data structures such as
|
||||
{#syntax#}std.ArrayList{#endsyntax#} accept an {#syntax#}*Allocator{#endsyntax#} parameter in
|
||||
allocate accept an {#syntax#}Allocator{#endsyntax#} parameter. Likewise, data structures such as
|
||||
{#syntax#}std.ArrayList{#endsyntax#} accept an {#syntax#}Allocator{#endsyntax#} parameter in
|
||||
their initialization functions:
|
||||
</p>
|
||||
{#code_begin|test|allocator#}
|
||||
@ -10061,12 +10061,12 @@ const expect = std.testing.expect;
|
||||
|
||||
test "using an allocator" {
|
||||
var buffer: [100]u8 = undefined;
|
||||
const allocator = &std.heap.FixedBufferAllocator.init(&buffer).allocator;
|
||||
const allocator = std.heap.FixedBufferAllocator.init(&buffer).getAllocator();
|
||||
const result = try concat(allocator, "foo", "bar");
|
||||
try expect(std.mem.eql(u8, "foobar", result));
|
||||
}
|
||||
|
||||
fn concat(allocator: *Allocator, a: []const u8, b: []const u8) ![]u8 {
|
||||
fn concat(allocator: Allocator, a: []const u8, b: []const u8) ![]u8 {
|
||||
const result = try allocator.alloc(u8, a.len + b.len);
|
||||
std.mem.copy(u8, result, a);
|
||||
std.mem.copy(u8, result[a.len..], b);
|
||||
@ -10091,7 +10091,7 @@ fn concat(allocator: *Allocator, a: []const u8, b: []const u8) ![]u8 {
|
||||
</p>
|
||||
<ol>
|
||||
<li>
|
||||
Are you making a library? In this case, best to accept an {#syntax#}*Allocator{#endsyntax#}
|
||||
Are you making a library? In this case, best to accept an {#syntax#}Allocator{#endsyntax#}
|
||||
as a parameter and allow your library's users to decide what allocator to use.
|
||||
</li>
|
||||
<li>Are you linking libc? In this case, {#syntax#}std.heap.c_allocator{#endsyntax#} is likely
|
||||
@ -10114,7 +10114,7 @@ pub fn main() !void {
|
||||
var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
|
||||
defer arena.deinit();
|
||||
|
||||
const allocator = &arena.allocator;
|
||||
const allocator = arena.getAllocator();
|
||||
|
||||
const ptr = try allocator.create(i32);
|
||||
std.debug.print("ptr={*}\n", .{ptr});
|
||||
@ -10281,7 +10281,7 @@ test "string literal to constant slice" {
|
||||
<p>
|
||||
For example, the function's documentation may say "caller owns the returned memory", in which case
|
||||
the code that calls the function must have a plan for when to free that memory. Probably in this situation,
|
||||
the function will accept an {#syntax#}*Allocator{#endsyntax#} parameter.
|
||||
the function will accept an {#syntax#}Allocator{#endsyntax#} parameter.
|
||||
</p>
|
||||
<p>
|
||||
Sometimes the lifetime of a pointer may be more complicated. For example, the
|
||||
@ -10820,7 +10820,7 @@ const std = @import("std");
|
||||
|
||||
pub fn main() !void {
|
||||
var general_purpose_allocator = std.heap.GeneralPurposeAllocator(.{}){};
|
||||
const gpa = &general_purpose_allocator.allocator;
|
||||
const gpa = general_purpose_allocator.getAllocator();
|
||||
const args = try std.process.argsAlloc(gpa);
|
||||
defer std.process.argsFree(gpa, args);
|
||||
|
||||
@ -10842,7 +10842,7 @@ const PreopenList = std.fs.wasi.PreopenList;
|
||||
|
||||
pub fn main() !void {
|
||||
var general_purpose_allocator = std.heap.GeneralPurposeAllocator(.{}){};
|
||||
const gpa = &general_purpose_allocator.allocator;
|
||||
const gpa = general_purpose_allocator.getAllocator();
|
||||
|
||||
var preopens = PreopenList.init(gpa);
|
||||
defer preopens.deinit();
|
||||
|
||||
@ -173,12 +173,12 @@ pub const Loop = struct {
|
||||
// We need at least one of these in case the fs thread wants to use onNextTick
|
||||
const extra_thread_count = thread_count - 1;
|
||||
const resume_node_count = std.math.max(extra_thread_count, 1);
|
||||
self.eventfd_resume_nodes = try self.arena.allocator.alloc(
|
||||
self.eventfd_resume_nodes = try self.arena.getAllocator().alloc(
|
||||
std.atomic.Stack(ResumeNode.EventFd).Node,
|
||||
resume_node_count,
|
||||
);
|
||||
|
||||
self.extra_threads = try self.arena.allocator.alloc(Thread, extra_thread_count);
|
||||
self.extra_threads = try self.arena.getAllocator().alloc(Thread, extra_thread_count);
|
||||
|
||||
try self.initOsData(extra_thread_count);
|
||||
errdefer self.deinitOsData();
|
||||
|
||||
@ -98,7 +98,7 @@ pub fn generate(gpa: Allocator, tree: Ast) Allocator.Error!Zir {
|
||||
|
||||
var astgen: AstGen = .{
|
||||
.gpa = gpa,
|
||||
.arena = &arena.allocator,
|
||||
.arena = arena.getAllocator(),
|
||||
.tree = &tree,
|
||||
};
|
||||
defer astgen.deinit(gpa);
|
||||
@ -1939,6 +1939,7 @@ fn blockExprStmts(gz: *GenZir, parent_scope: *Scope, statements: []const Ast.Nod
|
||||
|
||||
var block_arena = std.heap.ArenaAllocator.init(gz.astgen.gpa);
|
||||
defer block_arena.deinit();
|
||||
const block_arena_allocator = block_arena.getAllocator();
|
||||
|
||||
var noreturn_src_node: Ast.Node.Index = 0;
|
||||
var scope = parent_scope;
|
||||
@ -1959,13 +1960,13 @@ fn blockExprStmts(gz: *GenZir, parent_scope: *Scope, statements: []const Ast.Nod
|
||||
}
|
||||
switch (node_tags[statement]) {
|
||||
// zig fmt: off
|
||||
.global_var_decl => scope = try varDecl(gz, scope, statement, &block_arena.allocator, tree.globalVarDecl(statement)),
|
||||
.local_var_decl => scope = try varDecl(gz, scope, statement, &block_arena.allocator, tree.localVarDecl(statement)),
|
||||
.simple_var_decl => scope = try varDecl(gz, scope, statement, &block_arena.allocator, tree.simpleVarDecl(statement)),
|
||||
.aligned_var_decl => scope = try varDecl(gz, scope, statement, &block_arena.allocator, tree.alignedVarDecl(statement)),
|
||||
.global_var_decl => scope = try varDecl(gz, scope, statement, block_arena_allocator, tree.globalVarDecl(statement)),
|
||||
.local_var_decl => scope = try varDecl(gz, scope, statement, block_arena_allocator, tree.localVarDecl(statement)),
|
||||
.simple_var_decl => scope = try varDecl(gz, scope, statement, block_arena_allocator, tree.simpleVarDecl(statement)),
|
||||
.aligned_var_decl => scope = try varDecl(gz, scope, statement, block_arena_allocator, tree.alignedVarDecl(statement)),
|
||||
|
||||
.@"defer" => scope = try makeDeferScope(gz.astgen, scope, statement, &block_arena.allocator, .defer_normal),
|
||||
.@"errdefer" => scope = try makeDeferScope(gz.astgen, scope, statement, &block_arena.allocator, .defer_error),
|
||||
.@"defer" => scope = try makeDeferScope(gz.astgen, scope, statement, block_arena_allocator, .defer_normal),
|
||||
.@"errdefer" => scope = try makeDeferScope(gz.astgen, scope, statement, block_arena_allocator, .defer_error),
|
||||
|
||||
.assign => try assign(gz, scope, statement),
|
||||
|
||||
|
||||
@ -412,28 +412,29 @@ pub const AllErrors = struct {
|
||||
errors: *std.ArrayList(Message),
|
||||
module_err_msg: Module.ErrorMsg,
|
||||
) !void {
|
||||
const notes = try arena.allocator.alloc(Message, module_err_msg.notes.len);
|
||||
const allocator = arena.getAllocator();
|
||||
const notes = try allocator.alloc(Message, module_err_msg.notes.len);
|
||||
for (notes) |*note, i| {
|
||||
const module_note = module_err_msg.notes[i];
|
||||
const source = try module_note.src_loc.file_scope.getSource(module.gpa);
|
||||
const byte_offset = try module_note.src_loc.byteOffset(module.gpa);
|
||||
const loc = std.zig.findLineColumn(source, byte_offset);
|
||||
const file_path = try module_note.src_loc.file_scope.fullPath(&arena.allocator);
|
||||
const file_path = try module_note.src_loc.file_scope.fullPath(allocator);
|
||||
note.* = .{
|
||||
.src = .{
|
||||
.src_path = file_path,
|
||||
.msg = try arena.allocator.dupe(u8, module_note.msg),
|
||||
.msg = try allocator.dupe(u8, module_note.msg),
|
||||
.byte_offset = byte_offset,
|
||||
.line = @intCast(u32, loc.line),
|
||||
.column = @intCast(u32, loc.column),
|
||||
.source_line = try arena.allocator.dupe(u8, loc.source_line),
|
||||
.source_line = try allocator.dupe(u8, loc.source_line),
|
||||
},
|
||||
};
|
||||
}
|
||||
if (module_err_msg.src_loc.lazy == .entire_file) {
|
||||
try errors.append(.{
|
||||
.plain = .{
|
||||
.msg = try arena.allocator.dupe(u8, module_err_msg.msg),
|
||||
.msg = try allocator.dupe(u8, module_err_msg.msg),
|
||||
},
|
||||
});
|
||||
return;
|
||||
@ -441,16 +442,16 @@ pub const AllErrors = struct {
|
||||
const source = try module_err_msg.src_loc.file_scope.getSource(module.gpa);
|
||||
const byte_offset = try module_err_msg.src_loc.byteOffset(module.gpa);
|
||||
const loc = std.zig.findLineColumn(source, byte_offset);
|
||||
const file_path = try module_err_msg.src_loc.file_scope.fullPath(&arena.allocator);
|
||||
const file_path = try module_err_msg.src_loc.file_scope.fullPath(allocator);
|
||||
try errors.append(.{
|
||||
.src = .{
|
||||
.src_path = file_path,
|
||||
.msg = try arena.allocator.dupe(u8, module_err_msg.msg),
|
||||
.msg = try allocator.dupe(u8, module_err_msg.msg),
|
||||
.byte_offset = byte_offset,
|
||||
.line = @intCast(u32, loc.line),
|
||||
.column = @intCast(u32, loc.column),
|
||||
.notes = notes,
|
||||
.source_line = try arena.allocator.dupe(u8, loc.source_line),
|
||||
.source_line = try allocator.dupe(u8, loc.source_line),
|
||||
},
|
||||
});
|
||||
}
|
||||
@ -548,11 +549,12 @@ pub const AllErrors = struct {
|
||||
msg: []const u8,
|
||||
optional_children: ?AllErrors,
|
||||
) !void {
|
||||
const duped_msg = try arena.allocator.dupe(u8, msg);
|
||||
const allocator = arena.getAllocator();
|
||||
const duped_msg = try allocator.dupe(u8, msg);
|
||||
if (optional_children) |*children| {
|
||||
try errors.append(.{ .plain = .{
|
||||
.msg = duped_msg,
|
||||
.notes = try dupeList(children.list, &arena.allocator),
|
||||
.notes = try dupeList(children.list, allocator),
|
||||
} });
|
||||
} else {
|
||||
try errors.append(.{ .plain = .{ .msg = duped_msg } });
|
||||
@ -786,7 +788,7 @@ fn addPackageTableToCacheHash(
|
||||
seen_table: *std.AutoHashMap(*Package, void),
|
||||
hash_type: union(enum) { path_bytes, files: *Cache.Manifest },
|
||||
) (error{OutOfMemory} || std.os.GetCwdError)!void {
|
||||
const allocator = &arena.allocator;
|
||||
const allocator = arena.getAllocator();
|
||||
|
||||
const packages = try allocator.alloc(Package.Table.KV, pkg_table.count());
|
||||
{
|
||||
@ -850,7 +852,7 @@ pub fn create(gpa: Allocator, options: InitOptions) !*Compilation {
|
||||
// initialization and then is freed in deinit().
|
||||
var arena_allocator = std.heap.ArenaAllocator.init(gpa);
|
||||
errdefer arena_allocator.deinit();
|
||||
const arena = &arena_allocator.allocator;
|
||||
const arena = arena_allocator.getAllocator();
|
||||
|
||||
// We put the `Compilation` itself in the arena. Freeing the arena will free the module.
|
||||
// It's initialized later after we prepare the initialization options.
|
||||
@ -1208,7 +1210,7 @@ pub fn create(gpa: Allocator, options: InitOptions) !*Compilation {
|
||||
{
|
||||
var local_arena = std.heap.ArenaAllocator.init(gpa);
|
||||
defer local_arena.deinit();
|
||||
var seen_table = std.AutoHashMap(*Package, void).init(&local_arena.allocator);
|
||||
var seen_table = std.AutoHashMap(*Package, void).init(local_arena.getAllocator());
|
||||
try addPackageTableToCacheHash(&hash, &local_arena, main_pkg.table, &seen_table, .path_bytes);
|
||||
}
|
||||
hash.add(valgrind);
|
||||
@ -2011,6 +2013,7 @@ pub fn totalErrorCount(self: *Compilation) usize {
|
||||
pub fn getAllErrorsAlloc(self: *Compilation) !AllErrors {
|
||||
var arena = std.heap.ArenaAllocator.init(self.gpa);
|
||||
errdefer arena.deinit();
|
||||
const arena_allocator = arena.getAllocator();
|
||||
|
||||
var errors = std.ArrayList(AllErrors.Message).init(self.gpa);
|
||||
defer errors.deinit();
|
||||
@ -2024,8 +2027,8 @@ pub fn getAllErrorsAlloc(self: *Compilation) !AllErrors {
|
||||
// C error reporting bubbling up.
|
||||
try errors.append(.{
|
||||
.src = .{
|
||||
.src_path = try arena.allocator.dupe(u8, c_object.src.src_path),
|
||||
.msg = try std.fmt.allocPrint(&arena.allocator, "unable to build C object: {s}", .{
|
||||
.src_path = try arena_allocator.dupe(u8, c_object.src.src_path),
|
||||
.msg = try std.fmt.allocPrint(arena_allocator, "unable to build C object: {s}", .{
|
||||
err_msg.msg,
|
||||
}),
|
||||
.byte_offset = 0,
|
||||
@ -2050,7 +2053,7 @@ pub fn getAllErrorsAlloc(self: *Compilation) !AllErrors {
|
||||
// must have completed successfully.
|
||||
const tree = try entry.key_ptr.*.getTree(module.gpa);
|
||||
assert(tree.errors.len == 0);
|
||||
try AllErrors.addZir(&arena.allocator, &errors, entry.key_ptr.*);
|
||||
try AllErrors.addZir(arena_allocator, &errors, entry.key_ptr.*);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -2089,7 +2092,7 @@ pub fn getAllErrorsAlloc(self: *Compilation) !AllErrors {
|
||||
if (errors.items.len == 0 and self.link_error_flags.no_entry_point_found) {
|
||||
try errors.append(.{
|
||||
.plain = .{
|
||||
.msg = try std.fmt.allocPrint(&arena.allocator, "no entry point found", .{}),
|
||||
.msg = try std.fmt.allocPrint(arena_allocator, "no entry point found", .{}),
|
||||
},
|
||||
});
|
||||
}
|
||||
@ -2121,7 +2124,7 @@ pub fn getAllErrorsAlloc(self: *Compilation) !AllErrors {
|
||||
assert(errors.items.len == self.totalErrorCount());
|
||||
|
||||
return AllErrors{
|
||||
.list = try arena.allocator.dupe(AllErrors.Message, errors.items),
|
||||
.list = try arena_allocator.dupe(AllErrors.Message, errors.items),
|
||||
.arena = arena.state,
|
||||
};
|
||||
}
|
||||
@ -2292,7 +2295,7 @@ fn processOneJob(comp: *Compilation, job: Job, main_progress_node: *std.Progress
|
||||
|
||||
var tmp_arena = std.heap.ArenaAllocator.init(gpa);
|
||||
defer tmp_arena.deinit();
|
||||
const sema_arena = &tmp_arena.allocator;
|
||||
const sema_arena = tmp_arena.getAllocator();
|
||||
|
||||
const sema_frame = tracy.namedFrame("sema");
|
||||
var sema_frame_ended = false;
|
||||
@ -2387,7 +2390,7 @@ fn processOneJob(comp: *Compilation, job: Job, main_progress_node: *std.Progress
|
||||
.decl = decl,
|
||||
.fwd_decl = fwd_decl.toManaged(gpa),
|
||||
.typedefs = c_codegen.TypedefMap.init(gpa),
|
||||
.typedefs_arena = &typedefs_arena.allocator,
|
||||
.typedefs_arena = typedefs_arena.getAllocator(),
|
||||
};
|
||||
defer dg.fwd_decl.deinit();
|
||||
defer dg.typedefs.deinit();
|
||||
@ -2841,7 +2844,7 @@ pub fn cImport(comp: *Compilation, c_src: []const u8) !CImportResult {
|
||||
const digest = if (!actual_hit) digest: {
|
||||
var arena_allocator = std.heap.ArenaAllocator.init(comp.gpa);
|
||||
defer arena_allocator.deinit();
|
||||
const arena = &arena_allocator.allocator;
|
||||
const arena = arena_allocator.getAllocator();
|
||||
|
||||
const tmp_digest = man.hash.peek();
|
||||
const tmp_dir_sub_path = try std.fs.path.join(arena, &[_][]const u8{ "o", &tmp_digest });
|
||||
@ -3096,7 +3099,7 @@ fn updateCObject(comp: *Compilation, c_object: *CObject, c_obj_prog_node: *std.P
|
||||
|
||||
var arena_allocator = std.heap.ArenaAllocator.init(comp.gpa);
|
||||
defer arena_allocator.deinit();
|
||||
const arena = &arena_allocator.allocator;
|
||||
const arena = arena_allocator.getAllocator();
|
||||
|
||||
const c_source_basename = std.fs.path.basename(c_object.src.src_path);
|
||||
|
||||
@ -4417,7 +4420,7 @@ fn updateStage1Module(comp: *Compilation, main_progress_node: *std.Progress.Node
|
||||
|
||||
var arena_allocator = std.heap.ArenaAllocator.init(comp.gpa);
|
||||
defer arena_allocator.deinit();
|
||||
const arena = &arena_allocator.allocator;
|
||||
const arena = arena_allocator.getAllocator();
|
||||
|
||||
// Here we use the legacy stage1 C++ compiler to compile Zig code.
|
||||
const mod = comp.bin_file.options.module.?;
|
||||
@ -4454,7 +4457,7 @@ fn updateStage1Module(comp: *Compilation, main_progress_node: *std.Progress.Node
|
||||
|
||||
_ = try man.addFile(main_zig_file, null);
|
||||
{
|
||||
var seen_table = std.AutoHashMap(*Package, void).init(&arena_allocator.allocator);
|
||||
var seen_table = std.AutoHashMap(*Package, void).init(arena_allocator.getAllocator());
|
||||
try addPackageTableToCacheHash(&man.hash, &arena_allocator, mod.main_pkg.table, &seen_table, .{ .files = &man });
|
||||
}
|
||||
man.hash.add(comp.bin_file.options.valgrind);
|
||||
|
||||
@ -878,7 +878,7 @@ test "error prereq - continuation expecting end-of-line" {
|
||||
// - tokenize input, emit textual representation, and compare to expect
|
||||
fn depTokenizer(input: []const u8, expect: []const u8) !void {
|
||||
var arena_allocator = std.heap.ArenaAllocator.init(std.testing.allocator);
|
||||
const arena = &arena_allocator.allocator;
|
||||
const arena = arena_allocator.getAllocator();
|
||||
defer arena_allocator.deinit();
|
||||
|
||||
var it: Tokenizer = .{ .bytes = input };
|
||||
|
||||
@ -517,7 +517,7 @@ pub const Decl = struct {
|
||||
|
||||
pub fn finalizeNewArena(decl: *Decl, arena: *std.heap.ArenaAllocator) !void {
|
||||
assert(decl.value_arena == null);
|
||||
const arena_state = try arena.allocator.create(std.heap.ArenaAllocator.State);
|
||||
const arena_state = try arena.getAllocator().create(std.heap.ArenaAllocator.State);
|
||||
arena_state.* = arena.state;
|
||||
decl.value_arena = arena_state;
|
||||
}
|
||||
@ -3159,10 +3159,11 @@ pub fn semaFile(mod: *Module, file: *File) SemaError!void {
|
||||
const gpa = mod.gpa;
|
||||
var new_decl_arena = std.heap.ArenaAllocator.init(gpa);
|
||||
errdefer new_decl_arena.deinit();
|
||||
const new_decl_arena_allocator = new_decl_arena.getAllocator();
|
||||
|
||||
const struct_obj = try new_decl_arena.allocator.create(Module.Struct);
|
||||
const struct_ty = try Type.Tag.@"struct".create(&new_decl_arena.allocator, struct_obj);
|
||||
const struct_val = try Value.Tag.ty.create(&new_decl_arena.allocator, struct_ty);
|
||||
const struct_obj = try new_decl_arena_allocator.create(Module.Struct);
|
||||
const struct_ty = try Type.Tag.@"struct".create(new_decl_arena_allocator, struct_obj);
|
||||
const struct_val = try Value.Tag.ty.create(new_decl_arena_allocator, struct_ty);
|
||||
const ty_ty = comptime Type.initTag(.type);
|
||||
struct_obj.* = .{
|
||||
.owner_decl = undefined, // set below
|
||||
@ -3202,12 +3203,13 @@ pub fn semaFile(mod: *Module, file: *File) SemaError!void {
|
||||
|
||||
var sema_arena = std.heap.ArenaAllocator.init(gpa);
|
||||
defer sema_arena.deinit();
|
||||
const sema_arena_allocator = sema_arena.getAllocator();
|
||||
|
||||
var sema: Sema = .{
|
||||
.mod = mod,
|
||||
.gpa = gpa,
|
||||
.arena = &sema_arena.allocator,
|
||||
.perm_arena = &new_decl_arena.allocator,
|
||||
.arena = sema_arena_allocator,
|
||||
.perm_arena = new_decl_arena_allocator,
|
||||
.code = file.zir,
|
||||
.owner_decl = new_decl,
|
||||
.func = null,
|
||||
@ -3216,7 +3218,7 @@ pub fn semaFile(mod: *Module, file: *File) SemaError!void {
|
||||
};
|
||||
defer sema.deinit();
|
||||
|
||||
var wip_captures = try WipCaptureScope.init(gpa, &new_decl_arena.allocator, null);
|
||||
var wip_captures = try WipCaptureScope.init(gpa, new_decl_arena_allocator, null);
|
||||
defer wip_captures.deinit();
|
||||
|
||||
var block_scope: Sema.Block = .{
|
||||
@ -3265,15 +3267,17 @@ fn semaDecl(mod: *Module, decl: *Decl) !bool {
|
||||
// We need the memory for the Type to go into the arena for the Decl
|
||||
var decl_arena = std.heap.ArenaAllocator.init(gpa);
|
||||
errdefer decl_arena.deinit();
|
||||
const decl_arena_allocator = decl_arena.getAllocator();
|
||||
|
||||
var analysis_arena = std.heap.ArenaAllocator.init(gpa);
|
||||
defer analysis_arena.deinit();
|
||||
const analysis_arena_allocator = analysis_arena.getAllocator();
|
||||
|
||||
var sema: Sema = .{
|
||||
.mod = mod,
|
||||
.gpa = gpa,
|
||||
.arena = &analysis_arena.allocator,
|
||||
.perm_arena = &decl_arena.allocator,
|
||||
.arena = analysis_arena_allocator,
|
||||
.perm_arena = decl_arena_allocator,
|
||||
.code = zir,
|
||||
.owner_decl = decl,
|
||||
.func = null,
|
||||
@ -3296,7 +3300,7 @@ fn semaDecl(mod: *Module, decl: *Decl) !bool {
|
||||
}
|
||||
log.debug("semaDecl {*} ({s})", .{ decl, decl.name });
|
||||
|
||||
var wip_captures = try WipCaptureScope.init(gpa, &decl_arena.allocator, decl.src_scope);
|
||||
var wip_captures = try WipCaptureScope.init(gpa, decl_arena_allocator, decl.src_scope);
|
||||
defer wip_captures.deinit();
|
||||
|
||||
var block_scope: Sema.Block = .{
|
||||
@ -3356,7 +3360,7 @@ fn semaDecl(mod: *Module, decl: *Decl) !bool {
|
||||
// not the struct itself.
|
||||
try sema.resolveTypeLayout(&block_scope, src, decl_tv.ty);
|
||||
|
||||
const decl_arena_state = try decl_arena.allocator.create(std.heap.ArenaAllocator.State);
|
||||
const decl_arena_state = try decl_arena_allocator.create(std.heap.ArenaAllocator.State);
|
||||
|
||||
if (decl.is_usingnamespace) {
|
||||
const ty_ty = Type.initTag(.type);
|
||||
@ -3370,7 +3374,7 @@ fn semaDecl(mod: *Module, decl: *Decl) !bool {
|
||||
}
|
||||
|
||||
decl.ty = ty_ty;
|
||||
decl.val = try Value.Tag.ty.create(&decl_arena.allocator, ty);
|
||||
decl.val = try Value.Tag.ty.create(decl_arena_allocator, ty);
|
||||
decl.align_val = Value.initTag(.null_value);
|
||||
decl.linksection_val = Value.initTag(.null_value);
|
||||
decl.has_tv = true;
|
||||
@ -3400,10 +3404,10 @@ fn semaDecl(mod: *Module, decl: *Decl) !bool {
|
||||
decl.clearValues(gpa);
|
||||
}
|
||||
|
||||
decl.ty = try decl_tv.ty.copy(&decl_arena.allocator);
|
||||
decl.val = try decl_tv.val.copy(&decl_arena.allocator);
|
||||
decl.align_val = try align_val.copy(&decl_arena.allocator);
|
||||
decl.linksection_val = try linksection_val.copy(&decl_arena.allocator);
|
||||
decl.ty = try decl_tv.ty.copy(decl_arena_allocator);
|
||||
decl.val = try decl_tv.val.copy(decl_arena_allocator);
|
||||
decl.align_val = try align_val.copy(decl_arena_allocator);
|
||||
decl.linksection_val = try linksection_val.copy(decl_arena_allocator);
|
||||
decl.@"addrspace" = address_space;
|
||||
decl.has_tv = true;
|
||||
decl.owns_tv = owns_tv;
|
||||
@ -3453,7 +3457,7 @@ fn semaDecl(mod: *Module, decl: *Decl) !bool {
|
||||
decl.owns_tv = true;
|
||||
queue_linker_work = true;
|
||||
|
||||
const copied_init = try variable.init.copy(&decl_arena.allocator);
|
||||
const copied_init = try variable.init.copy(decl_arena_allocator);
|
||||
variable.init = copied_init;
|
||||
}
|
||||
},
|
||||
@ -3476,10 +3480,10 @@ fn semaDecl(mod: *Module, decl: *Decl) !bool {
|
||||
},
|
||||
}
|
||||
|
||||
decl.ty = try decl_tv.ty.copy(&decl_arena.allocator);
|
||||
decl.val = try decl_tv.val.copy(&decl_arena.allocator);
|
||||
decl.align_val = try align_val.copy(&decl_arena.allocator);
|
||||
decl.linksection_val = try linksection_val.copy(&decl_arena.allocator);
|
||||
decl.ty = try decl_tv.ty.copy(decl_arena_allocator);
|
||||
decl.val = try decl_tv.val.copy(decl_arena_allocator);
|
||||
decl.align_val = try align_val.copy(decl_arena_allocator);
|
||||
decl.linksection_val = try linksection_val.copy(decl_arena_allocator);
|
||||
decl.@"addrspace" = address_space;
|
||||
decl.has_tv = true;
|
||||
decl_arena_state.* = decl_arena.state;
|
||||
@ -4128,12 +4132,13 @@ pub fn analyzeFnBody(mod: *Module, decl: *Decl, func: *Fn, arena: Allocator) Sem
|
||||
// Use the Decl's arena for captured values.
|
||||
var decl_arena = decl.value_arena.?.promote(gpa);
|
||||
defer decl.value_arena.?.* = decl_arena.state;
|
||||
const decl_arena_allocator = decl_arena.getAllocator();
|
||||
|
||||
var sema: Sema = .{
|
||||
.mod = mod,
|
||||
.gpa = gpa,
|
||||
.arena = arena,
|
||||
.perm_arena = &decl_arena.allocator,
|
||||
.perm_arena = decl_arena_allocator,
|
||||
.code = decl.getFileScope().zir,
|
||||
.owner_decl = decl,
|
||||
.func = func,
|
||||
@ -4147,7 +4152,7 @@ pub fn analyzeFnBody(mod: *Module, decl: *Decl, func: *Fn, arena: Allocator) Sem
|
||||
try sema.air_extra.ensureTotalCapacity(gpa, reserved_count);
|
||||
sema.air_extra.items.len += reserved_count;
|
||||
|
||||
var wip_captures = try WipCaptureScope.init(gpa, &decl_arena.allocator, decl.src_scope);
|
||||
var wip_captures = try WipCaptureScope.init(gpa, decl_arena_allocator, decl.src_scope);
|
||||
defer wip_captures.deinit();
|
||||
|
||||
var inner_block: Sema.Block = .{
|
||||
@ -4751,7 +4756,7 @@ pub fn populateTestFunctions(mod: *Module) !void {
|
||||
// decl reference it as a slice.
|
||||
var new_decl_arena = std.heap.ArenaAllocator.init(gpa);
|
||||
errdefer new_decl_arena.deinit();
|
||||
const arena = &new_decl_arena.allocator;
|
||||
const arena = new_decl_arena.getAllocator();
|
||||
|
||||
const test_fn_vals = try arena.alloc(Value, mod.test_functions.count());
|
||||
const array_decl = try mod.createAnonymousDeclFromDecl(decl, decl.src_namespace, null, .{
|
||||
@ -4770,10 +4775,10 @@ pub fn populateTestFunctions(mod: *Module) !void {
|
||||
const test_name_decl = n: {
|
||||
var name_decl_arena = std.heap.ArenaAllocator.init(gpa);
|
||||
errdefer name_decl_arena.deinit();
|
||||
const bytes = try name_decl_arena.allocator.dupe(u8, test_name_slice);
|
||||
const bytes = try arena.dupe(u8, test_name_slice);
|
||||
const test_name_decl = try mod.createAnonymousDeclFromDecl(array_decl, array_decl.src_namespace, null, .{
|
||||
.ty = try Type.Tag.array_u8.create(&name_decl_arena.allocator, bytes.len),
|
||||
.val = try Value.Tag.bytes.create(&name_decl_arena.allocator, bytes),
|
||||
.ty = try Type.Tag.array_u8.create(arena, bytes.len),
|
||||
.val = try Value.Tag.bytes.create(arena, bytes),
|
||||
});
|
||||
try test_name_decl.finalizeNewArena(&name_decl_arena);
|
||||
break :n test_name_decl;
|
||||
@ -4802,7 +4807,7 @@ pub fn populateTestFunctions(mod: *Module) !void {
|
||||
{
|
||||
var new_decl_arena = std.heap.ArenaAllocator.init(gpa);
|
||||
errdefer new_decl_arena.deinit();
|
||||
const arena = &new_decl_arena.allocator;
|
||||
const arena = new_decl_arena.getAllocator();
|
||||
|
||||
// This copy accesses the old Decl Type/Value so it must be done before `clearValues`.
|
||||
const new_ty = try Type.Tag.const_slice.create(arena, try tmp_test_fn_ty.copy(arena));
|
||||
|
||||
126
src/Sema.zig
126
src/Sema.zig
@ -418,7 +418,7 @@ pub const Block = struct {
|
||||
finished: bool,
|
||||
|
||||
pub fn arena(wad: *WipAnonDecl) Allocator {
|
||||
return &wad.new_decl_arena.allocator;
|
||||
return wad.new_decl_arena.getAllocator();
|
||||
}
|
||||
|
||||
pub fn deinit(wad: *WipAnonDecl) void {
|
||||
@ -1594,10 +1594,11 @@ fn zirStructDecl(
|
||||
|
||||
var new_decl_arena = std.heap.ArenaAllocator.init(sema.gpa);
|
||||
errdefer new_decl_arena.deinit();
|
||||
const new_decl_arena_allocator = new_decl_arena.getAllocator();
|
||||
|
||||
const struct_obj = try new_decl_arena.allocator.create(Module.Struct);
|
||||
const struct_ty = try Type.Tag.@"struct".create(&new_decl_arena.allocator, struct_obj);
|
||||
const struct_val = try Value.Tag.ty.create(&new_decl_arena.allocator, struct_ty);
|
||||
const struct_obj = try new_decl_arena_allocator.create(Module.Struct);
|
||||
const struct_ty = try Type.Tag.@"struct".create(new_decl_arena_allocator, struct_obj);
|
||||
const struct_val = try Value.Tag.ty.create(new_decl_arena_allocator, struct_ty);
|
||||
const type_name = try sema.createTypeName(block, small.name_strategy);
|
||||
const new_decl = try sema.mod.createAnonymousDeclNamed(block, .{
|
||||
.ty = Type.type,
|
||||
@ -1698,15 +1699,16 @@ fn zirEnumDecl(
|
||||
|
||||
var new_decl_arena = std.heap.ArenaAllocator.init(gpa);
|
||||
errdefer new_decl_arena.deinit();
|
||||
const new_decl_arena_allocator = new_decl_arena.getAllocator();
|
||||
|
||||
const enum_obj = try new_decl_arena.allocator.create(Module.EnumFull);
|
||||
const enum_ty_payload = try new_decl_arena.allocator.create(Type.Payload.EnumFull);
|
||||
const enum_obj = try new_decl_arena_allocator.create(Module.EnumFull);
|
||||
const enum_ty_payload = try new_decl_arena_allocator.create(Type.Payload.EnumFull);
|
||||
enum_ty_payload.* = .{
|
||||
.base = .{ .tag = if (small.nonexhaustive) .enum_nonexhaustive else .enum_full },
|
||||
.data = enum_obj,
|
||||
};
|
||||
const enum_ty = Type.initPayload(&enum_ty_payload.base);
|
||||
const enum_val = try Value.Tag.ty.create(&new_decl_arena.allocator, enum_ty);
|
||||
const enum_val = try Value.Tag.ty.create(new_decl_arena_allocator, enum_ty);
|
||||
const type_name = try sema.createTypeName(block, small.name_strategy);
|
||||
const new_decl = try mod.createAnonymousDeclNamed(block, .{
|
||||
.ty = Type.type,
|
||||
@ -1790,17 +1792,17 @@ fn zirEnumDecl(
|
||||
break :blk try sema.resolveType(block, src, tag_type_ref);
|
||||
}
|
||||
const bits = std.math.log2_int_ceil(usize, fields_len);
|
||||
break :blk try Type.Tag.int_unsigned.create(&new_decl_arena.allocator, bits);
|
||||
break :blk try Type.Tag.int_unsigned.create(new_decl_arena_allocator, bits);
|
||||
};
|
||||
enum_obj.tag_ty = tag_ty;
|
||||
}
|
||||
|
||||
try enum_obj.fields.ensureTotalCapacity(&new_decl_arena.allocator, fields_len);
|
||||
try enum_obj.fields.ensureTotalCapacity(new_decl_arena_allocator, fields_len);
|
||||
const any_values = for (sema.code.extra[body_end..][0..bit_bags_count]) |bag| {
|
||||
if (bag != 0) break true;
|
||||
} else false;
|
||||
if (any_values) {
|
||||
try enum_obj.values.ensureTotalCapacityContext(&new_decl_arena.allocator, fields_len, .{
|
||||
try enum_obj.values.ensureTotalCapacityContext(new_decl_arena_allocator, fields_len, .{
|
||||
.ty = enum_obj.tag_ty,
|
||||
});
|
||||
}
|
||||
@ -1820,7 +1822,7 @@ fn zirEnumDecl(
|
||||
extra_index += 1;
|
||||
|
||||
// This string needs to outlive the ZIR code.
|
||||
const field_name = try new_decl_arena.allocator.dupe(u8, field_name_zir);
|
||||
const field_name = try new_decl_arena_allocator.dupe(u8, field_name_zir);
|
||||
|
||||
const gop = enum_obj.fields.getOrPutAssumeCapacity(field_name);
|
||||
if (gop.found_existing) {
|
||||
@ -1843,12 +1845,12 @@ fn zirEnumDecl(
|
||||
// that points to this default value expression rather than the struct.
|
||||
// But only resolve the source location if we need to emit a compile error.
|
||||
const tag_val = (try sema.resolveInstConst(block, src, tag_val_ref)).val;
|
||||
const copied_tag_val = try tag_val.copy(&new_decl_arena.allocator);
|
||||
const copied_tag_val = try tag_val.copy(new_decl_arena_allocator);
|
||||
enum_obj.values.putAssumeCapacityNoClobberContext(copied_tag_val, {}, .{
|
||||
.ty = enum_obj.tag_ty,
|
||||
});
|
||||
} else if (any_values) {
|
||||
const tag_val = try Value.Tag.int_u64.create(&new_decl_arena.allocator, field_i);
|
||||
const tag_val = try Value.Tag.int_u64.create(new_decl_arena_allocator, field_i);
|
||||
enum_obj.values.putAssumeCapacityNoClobberContext(tag_val, {}, .{ .ty = enum_obj.tag_ty });
|
||||
}
|
||||
}
|
||||
@ -1887,16 +1889,17 @@ fn zirUnionDecl(
|
||||
|
||||
var new_decl_arena = std.heap.ArenaAllocator.init(sema.gpa);
|
||||
errdefer new_decl_arena.deinit();
|
||||
const new_decl_arena_allocator = new_decl_arena.getAllocator();
|
||||
|
||||
const union_obj = try new_decl_arena.allocator.create(Module.Union);
|
||||
const union_obj = try new_decl_arena_allocator.create(Module.Union);
|
||||
const type_tag: Type.Tag = if (small.has_tag_type or small.auto_enum_tag) .union_tagged else .@"union";
|
||||
const union_payload = try new_decl_arena.allocator.create(Type.Payload.Union);
|
||||
const union_payload = try new_decl_arena_allocator.create(Type.Payload.Union);
|
||||
union_payload.* = .{
|
||||
.base = .{ .tag = type_tag },
|
||||
.data = union_obj,
|
||||
};
|
||||
const union_ty = Type.initPayload(&union_payload.base);
|
||||
const union_val = try Value.Tag.ty.create(&new_decl_arena.allocator, union_ty);
|
||||
const union_val = try Value.Tag.ty.create(new_decl_arena_allocator, union_ty);
|
||||
const type_name = try sema.createTypeName(block, small.name_strategy);
|
||||
const new_decl = try sema.mod.createAnonymousDeclNamed(block, .{
|
||||
.ty = Type.type,
|
||||
@ -1955,15 +1958,16 @@ fn zirOpaqueDecl(
|
||||
|
||||
var new_decl_arena = std.heap.ArenaAllocator.init(gpa);
|
||||
errdefer new_decl_arena.deinit();
|
||||
const new_decl_arena_allocator = new_decl_arena.getAllocator();
|
||||
|
||||
const opaque_obj = try new_decl_arena.allocator.create(Module.Opaque);
|
||||
const opaque_ty_payload = try new_decl_arena.allocator.create(Type.Payload.Opaque);
|
||||
const opaque_obj = try new_decl_arena_allocator.create(Module.Opaque);
|
||||
const opaque_ty_payload = try new_decl_arena_allocator.create(Type.Payload.Opaque);
|
||||
opaque_ty_payload.* = .{
|
||||
.base = .{ .tag = .@"opaque" },
|
||||
.data = opaque_obj,
|
||||
};
|
||||
const opaque_ty = Type.initPayload(&opaque_ty_payload.base);
|
||||
const opaque_val = try Value.Tag.ty.create(&new_decl_arena.allocator, opaque_ty);
|
||||
const opaque_val = try Value.Tag.ty.create(new_decl_arena_allocator, opaque_ty);
|
||||
const type_name = try sema.createTypeName(block, small.name_strategy);
|
||||
const new_decl = try mod.createAnonymousDeclNamed(block, .{
|
||||
.ty = Type.type,
|
||||
@ -2008,10 +2012,11 @@ fn zirErrorSetDecl(
|
||||
|
||||
var new_decl_arena = std.heap.ArenaAllocator.init(gpa);
|
||||
errdefer new_decl_arena.deinit();
|
||||
const new_decl_arena_allocator = new_decl_arena.getAllocator();
|
||||
|
||||
const error_set = try new_decl_arena.allocator.create(Module.ErrorSet);
|
||||
const error_set_ty = try Type.Tag.error_set.create(&new_decl_arena.allocator, error_set);
|
||||
const error_set_val = try Value.Tag.ty.create(&new_decl_arena.allocator, error_set_ty);
|
||||
const error_set = try new_decl_arena_allocator.create(Module.ErrorSet);
|
||||
const error_set_ty = try Type.Tag.error_set.create(new_decl_arena_allocator, error_set);
|
||||
const error_set_val = try Value.Tag.ty.create(new_decl_arena_allocator, error_set_ty);
|
||||
const type_name = try sema.createTypeName(block, name_strategy);
|
||||
const new_decl = try sema.mod.createAnonymousDeclNamed(block, .{
|
||||
.ty = Type.type,
|
||||
@ -2019,9 +2024,9 @@ fn zirErrorSetDecl(
|
||||
}, type_name);
|
||||
new_decl.owns_tv = true;
|
||||
errdefer sema.mod.abortAnonDecl(new_decl);
|
||||
const names = try new_decl_arena.allocator.alloc([]const u8, fields.len);
|
||||
const names = try new_decl_arena_allocator.alloc([]const u8, fields.len);
|
||||
for (fields) |str_index, i| {
|
||||
names[i] = try new_decl_arena.allocator.dupe(u8, sema.code.nullTerminatedString(str_index));
|
||||
names[i] = try new_decl_arena_allocator.dupe(u8, sema.code.nullTerminatedString(str_index));
|
||||
}
|
||||
error_set.* = .{
|
||||
.owner_decl = new_decl,
|
||||
@ -3935,7 +3940,7 @@ fn analyzeCall(
|
||||
{
|
||||
var arena_allocator = std.heap.ArenaAllocator.init(gpa);
|
||||
errdefer arena_allocator.deinit();
|
||||
const arena = &arena_allocator.allocator;
|
||||
const arena = arena_allocator.getAllocator();
|
||||
|
||||
for (memoized_call_key.args) |*arg| {
|
||||
arg.* = try arg.*.copy(arena);
|
||||
@ -4069,6 +4074,7 @@ fn analyzeCall(
|
||||
|
||||
var new_decl_arena = std.heap.ArenaAllocator.init(sema.gpa);
|
||||
errdefer new_decl_arena.deinit();
|
||||
const new_decl_arena_allocator = new_decl_arena.getAllocator();
|
||||
|
||||
// Re-run the block that creates the function, with the comptime parameters
|
||||
// pre-populated inside `inst_map`. This causes `param_comptime` and
|
||||
@ -4078,13 +4084,13 @@ fn analyzeCall(
|
||||
.mod = mod,
|
||||
.gpa = gpa,
|
||||
.arena = sema.arena,
|
||||
.perm_arena = &new_decl_arena.allocator,
|
||||
.perm_arena = new_decl_arena_allocator,
|
||||
.code = fn_zir,
|
||||
.owner_decl = new_decl,
|
||||
.func = null,
|
||||
.fn_ret_ty = Type.void,
|
||||
.owner_func = null,
|
||||
.comptime_args = try new_decl_arena.allocator.alloc(TypedValue, uncasted_args.len),
|
||||
.comptime_args = try new_decl_arena_allocator.alloc(TypedValue, uncasted_args.len),
|
||||
.comptime_args_fn_inst = module_fn.zir_body_inst,
|
||||
.preallocated_new_func = new_module_func,
|
||||
};
|
||||
@ -4168,7 +4174,7 @@ fn analyzeCall(
|
||||
else => continue,
|
||||
}
|
||||
const arg = child_sema.inst_map.get(inst).?;
|
||||
const copied_arg_ty = try child_sema.typeOf(arg).copy(&new_decl_arena.allocator);
|
||||
const copied_arg_ty = try child_sema.typeOf(arg).copy(new_decl_arena_allocator);
|
||||
if (child_sema.resolveMaybeUndefValAllowVariables(
|
||||
&child_block,
|
||||
.unneeded,
|
||||
@ -4176,7 +4182,7 @@ fn analyzeCall(
|
||||
) catch unreachable) |arg_val| {
|
||||
child_sema.comptime_args[arg_i] = .{
|
||||
.ty = copied_arg_ty,
|
||||
.val = try arg_val.copy(&new_decl_arena.allocator),
|
||||
.val = try arg_val.copy(new_decl_arena_allocator),
|
||||
};
|
||||
} else {
|
||||
child_sema.comptime_args[arg_i] = .{
|
||||
@ -4191,8 +4197,8 @@ fn analyzeCall(
|
||||
try wip_captures.finalize();
|
||||
|
||||
// Populate the Decl ty/val with the function and its type.
|
||||
new_decl.ty = try child_sema.typeOf(new_func_inst).copy(&new_decl_arena.allocator);
|
||||
new_decl.val = try Value.Tag.function.create(&new_decl_arena.allocator, new_func);
|
||||
new_decl.ty = try child_sema.typeOf(new_func_inst).copy(new_decl_arena_allocator);
|
||||
new_decl.val = try Value.Tag.function.create(new_decl_arena_allocator, new_func);
|
||||
new_decl.analysis = .complete;
|
||||
|
||||
log.debug("generic function '{s}' instantiated with type {}", .{
|
||||
@ -6047,8 +6053,8 @@ fn zirSwitchBlock(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError
|
||||
defer arena.deinit();
|
||||
|
||||
const target = sema.mod.getTarget();
|
||||
const min_int = try operand_ty.minInt(&arena.allocator, target);
|
||||
const max_int = try operand_ty.maxInt(&arena.allocator, target);
|
||||
const min_int = try operand_ty.minInt(arena.getAllocator(), target);
|
||||
const max_int = try operand_ty.maxInt(arena.getAllocator(), target);
|
||||
if (try range_set.spans(min_int, max_int, operand_ty)) {
|
||||
if (special_prong == .@"else") {
|
||||
return sema.fail(
|
||||
@ -12795,7 +12801,7 @@ const ComptimePtrMutationKit = struct {
|
||||
|
||||
fn beginArena(self: *ComptimePtrMutationKit, gpa: Allocator) Allocator {
|
||||
self.decl_arena = self.decl_ref_mut.decl.value_arena.?.promote(gpa);
|
||||
return &self.decl_arena.allocator;
|
||||
return self.decl_arena.getAllocator();
|
||||
}
|
||||
|
||||
fn finishArena(self: *ComptimePtrMutationKit) void {
|
||||
@ -14287,6 +14293,7 @@ fn semaStructFields(
|
||||
|
||||
var decl_arena = decl.value_arena.?.promote(gpa);
|
||||
defer decl.value_arena.?.* = decl_arena.state;
|
||||
const decl_arena_allocator = decl_arena.getAllocator();
|
||||
|
||||
var analysis_arena = std.heap.ArenaAllocator.init(gpa);
|
||||
defer analysis_arena.deinit();
|
||||
@ -14294,8 +14301,8 @@ fn semaStructFields(
|
||||
var sema: Sema = .{
|
||||
.mod = mod,
|
||||
.gpa = gpa,
|
||||
.arena = &analysis_arena.allocator,
|
||||
.perm_arena = &decl_arena.allocator,
|
||||
.arena = analysis_arena.getAllocator(),
|
||||
.perm_arena = decl_arena_allocator,
|
||||
.code = zir,
|
||||
.owner_decl = decl,
|
||||
.func = null,
|
||||
@ -14304,7 +14311,7 @@ fn semaStructFields(
|
||||
};
|
||||
defer sema.deinit();
|
||||
|
||||
var wip_captures = try WipCaptureScope.init(gpa, &decl_arena.allocator, decl.src_scope);
|
||||
var wip_captures = try WipCaptureScope.init(gpa, decl_arena_allocator, decl.src_scope);
|
||||
defer wip_captures.deinit();
|
||||
|
||||
var block_scope: Block = .{
|
||||
@ -14328,7 +14335,7 @@ fn semaStructFields(
|
||||
|
||||
try wip_captures.finalize();
|
||||
|
||||
try struct_obj.fields.ensureTotalCapacity(&decl_arena.allocator, fields_len);
|
||||
try struct_obj.fields.ensureTotalCapacity(decl_arena_allocator, fields_len);
|
||||
|
||||
const bits_per_field = 4;
|
||||
const fields_per_u32 = 32 / bits_per_field;
|
||||
@ -14359,7 +14366,7 @@ fn semaStructFields(
|
||||
extra_index += 1;
|
||||
|
||||
// This string needs to outlive the ZIR code.
|
||||
const field_name = try decl_arena.allocator.dupe(u8, field_name_zir);
|
||||
const field_name = try decl_arena_allocator.dupe(u8, field_name_zir);
|
||||
const field_ty: Type = if (field_type_ref == .none)
|
||||
Type.initTag(.noreturn)
|
||||
else
|
||||
@ -14371,7 +14378,7 @@ fn semaStructFields(
|
||||
const gop = struct_obj.fields.getOrPutAssumeCapacity(field_name);
|
||||
assert(!gop.found_existing);
|
||||
gop.value_ptr.* = .{
|
||||
.ty = try field_ty.copy(&decl_arena.allocator),
|
||||
.ty = try field_ty.copy(decl_arena_allocator),
|
||||
.abi_align = Value.initTag(.abi_align_default),
|
||||
.default_val = Value.initTag(.unreachable_value),
|
||||
.is_comptime = is_comptime,
|
||||
@ -14385,7 +14392,7 @@ fn semaStructFields(
|
||||
// that points to this alignment expression rather than the struct.
|
||||
// But only resolve the source location if we need to emit a compile error.
|
||||
const abi_align_val = (try sema.resolveInstConst(&block_scope, src, align_ref)).val;
|
||||
gop.value_ptr.abi_align = try abi_align_val.copy(&decl_arena.allocator);
|
||||
gop.value_ptr.abi_align = try abi_align_val.copy(decl_arena_allocator);
|
||||
}
|
||||
if (has_default) {
|
||||
const default_ref = @intToEnum(Zir.Inst.Ref, zir.extra[extra_index]);
|
||||
@ -14396,7 +14403,7 @@ fn semaStructFields(
|
||||
// But only resolve the source location if we need to emit a compile error.
|
||||
const default_val = (try sema.resolveMaybeUndefVal(&block_scope, src, default_inst)) orelse
|
||||
return sema.failWithNeededComptime(&block_scope, src);
|
||||
gop.value_ptr.default_val = try default_val.copy(&decl_arena.allocator);
|
||||
gop.value_ptr.default_val = try default_val.copy(decl_arena_allocator);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -14454,6 +14461,7 @@ fn semaUnionFields(mod: *Module, union_obj: *Module.Union) CompileError!void {
|
||||
|
||||
var decl_arena = union_obj.owner_decl.value_arena.?.promote(gpa);
|
||||
defer union_obj.owner_decl.value_arena.?.* = decl_arena.state;
|
||||
const decl_arena_allocator = decl_arena.getAllocator();
|
||||
|
||||
var analysis_arena = std.heap.ArenaAllocator.init(gpa);
|
||||
defer analysis_arena.deinit();
|
||||
@ -14461,8 +14469,8 @@ fn semaUnionFields(mod: *Module, union_obj: *Module.Union) CompileError!void {
|
||||
var sema: Sema = .{
|
||||
.mod = mod,
|
||||
.gpa = gpa,
|
||||
.arena = &analysis_arena.allocator,
|
||||
.perm_arena = &decl_arena.allocator,
|
||||
.arena = analysis_arena.getAllocator(),
|
||||
.perm_arena = decl_arena_allocator,
|
||||
.code = zir,
|
||||
.owner_decl = decl,
|
||||
.func = null,
|
||||
@ -14471,7 +14479,7 @@ fn semaUnionFields(mod: *Module, union_obj: *Module.Union) CompileError!void {
|
||||
};
|
||||
defer sema.deinit();
|
||||
|
||||
var wip_captures = try WipCaptureScope.init(gpa, &decl_arena.allocator, decl.src_scope);
|
||||
var wip_captures = try WipCaptureScope.init(gpa, decl_arena_allocator, decl.src_scope);
|
||||
defer wip_captures.deinit();
|
||||
|
||||
var block_scope: Block = .{
|
||||
@ -14495,7 +14503,7 @@ fn semaUnionFields(mod: *Module, union_obj: *Module.Union) CompileError!void {
|
||||
|
||||
try wip_captures.finalize();
|
||||
|
||||
try union_obj.fields.ensureTotalCapacity(&decl_arena.allocator, fields_len);
|
||||
try union_obj.fields.ensureTotalCapacity(decl_arena_allocator, fields_len);
|
||||
|
||||
var int_tag_ty: Type = undefined;
|
||||
var enum_field_names: ?*Module.EnumNumbered.NameMap = null;
|
||||
@ -14571,7 +14579,7 @@ fn semaUnionFields(mod: *Module, union_obj: *Module.Union) CompileError!void {
|
||||
}
|
||||
|
||||
// This string needs to outlive the ZIR code.
|
||||
const field_name = try decl_arena.allocator.dupe(u8, field_name_zir);
|
||||
const field_name = try decl_arena_allocator.dupe(u8, field_name_zir);
|
||||
if (enum_field_names) |set| {
|
||||
set.putAssumeCapacity(field_name, {});
|
||||
}
|
||||
@ -14589,7 +14597,7 @@ fn semaUnionFields(mod: *Module, union_obj: *Module.Union) CompileError!void {
|
||||
const gop = union_obj.fields.getOrPutAssumeCapacity(field_name);
|
||||
assert(!gop.found_existing);
|
||||
gop.value_ptr.* = .{
|
||||
.ty = try field_ty.copy(&decl_arena.allocator),
|
||||
.ty = try field_ty.copy(decl_arena_allocator),
|
||||
.abi_align = Value.initTag(.abi_align_default),
|
||||
};
|
||||
|
||||
@ -14598,7 +14606,7 @@ fn semaUnionFields(mod: *Module, union_obj: *Module.Union) CompileError!void {
|
||||
// that points to this alignment expression rather than the struct.
|
||||
// But only resolve the source location if we need to emit a compile error.
|
||||
const abi_align_val = (try sema.resolveInstConst(&block_scope, src, align_ref)).val;
|
||||
gop.value_ptr.abi_align = try abi_align_val.copy(&decl_arena.allocator);
|
||||
gop.value_ptr.abi_align = try abi_align_val.copy(decl_arena_allocator);
|
||||
} else {
|
||||
gop.value_ptr.abi_align = Value.initTag(.abi_align_default);
|
||||
}
|
||||
@ -14615,15 +14623,16 @@ fn generateUnionTagTypeNumbered(
|
||||
|
||||
var new_decl_arena = std.heap.ArenaAllocator.init(sema.gpa);
|
||||
errdefer new_decl_arena.deinit();
|
||||
const new_decl_arena_allocator = new_decl_arena.getAllocator();
|
||||
|
||||
const enum_obj = try new_decl_arena.allocator.create(Module.EnumNumbered);
|
||||
const enum_ty_payload = try new_decl_arena.allocator.create(Type.Payload.EnumNumbered);
|
||||
const enum_obj = try new_decl_arena_allocator.create(Module.EnumNumbered);
|
||||
const enum_ty_payload = try new_decl_arena_allocator.create(Type.Payload.EnumNumbered);
|
||||
enum_ty_payload.* = .{
|
||||
.base = .{ .tag = .enum_numbered },
|
||||
.data = enum_obj,
|
||||
};
|
||||
const enum_ty = Type.initPayload(&enum_ty_payload.base);
|
||||
const enum_val = try Value.Tag.ty.create(&new_decl_arena.allocator, enum_ty);
|
||||
const enum_val = try Value.Tag.ty.create(new_decl_arena_allocator, enum_ty);
|
||||
// TODO better type name
|
||||
const new_decl = try mod.createAnonymousDecl(block, .{
|
||||
.ty = Type.type,
|
||||
@ -14640,8 +14649,8 @@ fn generateUnionTagTypeNumbered(
|
||||
.node_offset = 0,
|
||||
};
|
||||
// Here we pre-allocate the maps using the decl arena.
|
||||
try enum_obj.fields.ensureTotalCapacity(&new_decl_arena.allocator, fields_len);
|
||||
try enum_obj.values.ensureTotalCapacityContext(&new_decl_arena.allocator, fields_len, .{ .ty = int_ty });
|
||||
try enum_obj.fields.ensureTotalCapacity(new_decl_arena_allocator, fields_len);
|
||||
try enum_obj.values.ensureTotalCapacityContext(new_decl_arena_allocator, fields_len, .{ .ty = int_ty });
|
||||
try new_decl.finalizeNewArena(&new_decl_arena);
|
||||
return enum_ty;
|
||||
}
|
||||
@ -14651,15 +14660,16 @@ fn generateUnionTagTypeSimple(sema: *Sema, block: *Block, fields_len: u32) !Type
|
||||
|
||||
var new_decl_arena = std.heap.ArenaAllocator.init(sema.gpa);
|
||||
errdefer new_decl_arena.deinit();
|
||||
const new_decl_arena_allocator = new_decl_arena.getAllocator();
|
||||
|
||||
const enum_obj = try new_decl_arena.allocator.create(Module.EnumSimple);
|
||||
const enum_ty_payload = try new_decl_arena.allocator.create(Type.Payload.EnumSimple);
|
||||
const enum_obj = try new_decl_arena_allocator.create(Module.EnumSimple);
|
||||
const enum_ty_payload = try new_decl_arena_allocator.create(Type.Payload.EnumSimple);
|
||||
enum_ty_payload.* = .{
|
||||
.base = .{ .tag = .enum_simple },
|
||||
.data = enum_obj,
|
||||
};
|
||||
const enum_ty = Type.initPayload(&enum_ty_payload.base);
|
||||
const enum_val = try Value.Tag.ty.create(&new_decl_arena.allocator, enum_ty);
|
||||
const enum_val = try Value.Tag.ty.create(new_decl_arena_allocator, enum_ty);
|
||||
// TODO better type name
|
||||
const new_decl = try mod.createAnonymousDecl(block, .{
|
||||
.ty = Type.type,
|
||||
@ -14674,7 +14684,7 @@ fn generateUnionTagTypeSimple(sema: *Sema, block: *Block, fields_len: u32) !Type
|
||||
.node_offset = 0,
|
||||
};
|
||||
// Here we pre-allocate the maps using the decl arena.
|
||||
try enum_obj.fields.ensureTotalCapacity(&new_decl_arena.allocator, fields_len);
|
||||
try enum_obj.fields.ensureTotalCapacity(new_decl_arena_allocator, fields_len);
|
||||
try new_decl.finalizeNewArena(&new_decl_arena);
|
||||
return enum_ty;
|
||||
}
|
||||
|
||||
@ -390,6 +390,7 @@ pub const DeclGen = struct {
|
||||
// Fall back to generic implementation.
|
||||
var arena = std.heap.ArenaAllocator.init(dg.module.gpa);
|
||||
defer arena.deinit();
|
||||
const arena_allocator = arena.getAllocator();
|
||||
|
||||
try writer.writeAll("{");
|
||||
var index: usize = 0;
|
||||
@ -397,7 +398,7 @@ pub const DeclGen = struct {
|
||||
const elem_ty = ty.elemType();
|
||||
while (index < len) : (index += 1) {
|
||||
if (index != 0) try writer.writeAll(",");
|
||||
const elem_val = try val.elemValue(&arena.allocator, index);
|
||||
const elem_val = try val.elemValue(arena_allocator, index);
|
||||
try dg.renderValue(writer, elem_ty, elem_val);
|
||||
}
|
||||
if (ty.sentinel()) |sentinel_val| {
|
||||
|
||||
@ -331,7 +331,7 @@ pub const Object = struct {
|
||||
|
||||
var arena_allocator = std.heap.ArenaAllocator.init(comp.gpa);
|
||||
defer arena_allocator.deinit();
|
||||
const arena = &arena_allocator.allocator;
|
||||
const arena = arena_allocator.getAllocator();
|
||||
|
||||
const mod = comp.bin_file.options.module.?;
|
||||
const cache_dir = mod.zig_cache_artifact_directory;
|
||||
@ -779,7 +779,7 @@ pub const DeclGen = struct {
|
||||
|
||||
// The Type memory is ephemeral; since we want to store a longer-lived
|
||||
// reference, we need to copy it here.
|
||||
gop.key_ptr.* = try t.copy(&dg.object.type_map_arena.allocator);
|
||||
gop.key_ptr.* = try t.copy(dg.object.type_map_arena.getAllocator());
|
||||
|
||||
const opaque_obj = t.castTag(.@"opaque").?.data;
|
||||
const name = try opaque_obj.getFullyQualifiedName(gpa);
|
||||
@ -837,7 +837,7 @@ pub const DeclGen = struct {
|
||||
|
||||
// The Type memory is ephemeral; since we want to store a longer-lived
|
||||
// reference, we need to copy it here.
|
||||
gop.key_ptr.* = try t.copy(&dg.object.type_map_arena.allocator);
|
||||
gop.key_ptr.* = try t.copy(dg.object.type_map_arena.getAllocator());
|
||||
|
||||
const struct_obj = t.castTag(.@"struct").?.data;
|
||||
|
||||
@ -871,7 +871,7 @@ pub const DeclGen = struct {
|
||||
|
||||
// The Type memory is ephemeral; since we want to store a longer-lived
|
||||
// reference, we need to copy it here.
|
||||
gop.key_ptr.* = try t.copy(&dg.object.type_map_arena.allocator);
|
||||
gop.key_ptr.* = try t.copy(dg.object.type_map_arena.getAllocator());
|
||||
|
||||
const union_obj = t.cast(Type.Payload.Union).?.data;
|
||||
const target = dg.module.getTarget();
|
||||
@ -2485,7 +2485,7 @@ pub const FuncGen = struct {
|
||||
|
||||
var arena_allocator = std.heap.ArenaAllocator.init(self.gpa);
|
||||
defer arena_allocator.deinit();
|
||||
const arena = &arena_allocator.allocator;
|
||||
const arena = arena_allocator.getAllocator();
|
||||
|
||||
const llvm_params_len = args.len;
|
||||
const llvm_param_types = try arena.alloc(*const llvm.Type, llvm_params_len);
|
||||
|
||||
@ -85,7 +85,7 @@ fn dumpStatusReport() !void {
|
||||
const anal = zir_state orelse return;
|
||||
// Note: We have the panic mutex here, so we can safely use the global crash heap.
|
||||
var fba = std.heap.FixedBufferAllocator.init(&crash_heap);
|
||||
const allocator = &fba.allocator;
|
||||
const allocator = fba.getAllocator();
|
||||
|
||||
const stderr = io.getStdErr().writer();
|
||||
const block: *Sema.Block = anal.block;
|
||||
|
||||
@ -65,7 +65,7 @@ pub fn loadMetaData(gpa: Allocator, zig_lib_dir: std.fs.Dir) LoadMetaDataError!*
|
||||
|
||||
var arena_allocator = std.heap.ArenaAllocator.init(gpa);
|
||||
errdefer arena_allocator.deinit();
|
||||
const arena = &arena_allocator.allocator;
|
||||
const arena = arena_allocator.getAllocator();
|
||||
|
||||
var all_versions = std.ArrayListUnmanaged(std.builtin.Version){};
|
||||
var all_functions = std.ArrayListUnmanaged(Fn){};
|
||||
@ -256,7 +256,7 @@ pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile) !void {
|
||||
const gpa = comp.gpa;
|
||||
var arena_allocator = std.heap.ArenaAllocator.init(gpa);
|
||||
defer arena_allocator.deinit();
|
||||
const arena = &arena_allocator.allocator;
|
||||
const arena = arena_allocator.getAllocator();
|
||||
|
||||
switch (crt_file) {
|
||||
.crti_o => {
|
||||
@ -711,7 +711,7 @@ pub fn buildSharedObjects(comp: *Compilation) !void {
|
||||
|
||||
var arena_allocator = std.heap.ArenaAllocator.init(comp.gpa);
|
||||
defer arena_allocator.deinit();
|
||||
const arena = &arena_allocator.allocator;
|
||||
const arena = arena_allocator.getAllocator();
|
||||
|
||||
const target = comp.getTarget();
|
||||
const target_version = target.os.version_range.linux.glibc;
|
||||
|
||||
@ -89,7 +89,7 @@ pub fn buildLibCXX(comp: *Compilation) !void {
|
||||
|
||||
var arena_allocator = std.heap.ArenaAllocator.init(comp.gpa);
|
||||
defer arena_allocator.deinit();
|
||||
const arena = &arena_allocator.allocator;
|
||||
const arena = arena_allocator.getAllocator();
|
||||
|
||||
const root_name = "c++";
|
||||
const output_mode = .Lib;
|
||||
@ -236,7 +236,7 @@ pub fn buildLibCXXABI(comp: *Compilation) !void {
|
||||
|
||||
var arena_allocator = std.heap.ArenaAllocator.init(comp.gpa);
|
||||
defer arena_allocator.deinit();
|
||||
const arena = &arena_allocator.allocator;
|
||||
const arena = arena_allocator.getAllocator();
|
||||
|
||||
const root_name = "c++abi";
|
||||
const output_mode = .Lib;
|
||||
|
||||
@ -15,7 +15,7 @@ pub fn buildTsan(comp: *Compilation) !void {
|
||||
|
||||
var arena_allocator = std.heap.ArenaAllocator.init(comp.gpa);
|
||||
defer arena_allocator.deinit();
|
||||
const arena = &arena_allocator.allocator;
|
||||
const arena = arena_allocator.getAllocator();
|
||||
|
||||
const root_name = "tsan";
|
||||
const output_mode = .Lib;
|
||||
|
||||
@ -17,7 +17,7 @@ pub fn buildStaticLib(comp: *Compilation) !void {
|
||||
|
||||
var arena_allocator = std.heap.ArenaAllocator.init(comp.gpa);
|
||||
defer arena_allocator.deinit();
|
||||
const arena = &arena_allocator.allocator;
|
||||
const arena = arena_allocator.getAllocator();
|
||||
|
||||
const root_name = "unwind";
|
||||
const output_mode = .Lib;
|
||||
|
||||
@ -628,7 +628,7 @@ pub const File = struct {
|
||||
|
||||
var arena_allocator = std.heap.ArenaAllocator.init(base.allocator);
|
||||
defer arena_allocator.deinit();
|
||||
const arena = &arena_allocator.allocator;
|
||||
const arena = arena_allocator.getAllocator();
|
||||
|
||||
const directory = base.options.emit.?.directory; // Just an alias to make it shorter to type.
|
||||
|
||||
|
||||
@ -128,7 +128,7 @@ pub fn updateFunc(self: *C, module: *Module, func: *Module.Fn, air: Air, livenes
|
||||
.decl = decl,
|
||||
.fwd_decl = fwd_decl.toManaged(module.gpa),
|
||||
.typedefs = typedefs.promote(module.gpa),
|
||||
.typedefs_arena = &self.arena.allocator,
|
||||
.typedefs_arena = self.arena.getAllocator(),
|
||||
},
|
||||
.code = code.toManaged(module.gpa),
|
||||
.indent_writer = undefined, // set later so we can get a pointer to object.code
|
||||
@ -193,7 +193,7 @@ pub fn updateDecl(self: *C, module: *Module, decl: *Module.Decl) !void {
|
||||
.decl = decl,
|
||||
.fwd_decl = fwd_decl.toManaged(module.gpa),
|
||||
.typedefs = typedefs.promote(module.gpa),
|
||||
.typedefs_arena = &self.arena.allocator,
|
||||
.typedefs_arena = self.arena.getAllocator(),
|
||||
},
|
||||
.code = code.toManaged(module.gpa),
|
||||
.indent_writer = undefined, // set later so we can get a pointer to object.code
|
||||
|
||||
@ -877,7 +877,7 @@ fn linkWithLLD(self: *Coff, comp: *Compilation) !void {
|
||||
|
||||
var arena_allocator = std.heap.ArenaAllocator.init(self.base.allocator);
|
||||
defer arena_allocator.deinit();
|
||||
const arena = &arena_allocator.allocator;
|
||||
const arena = arena_allocator.getAllocator();
|
||||
|
||||
const directory = self.base.options.emit.?.directory; // Just an alias to make it shorter to type.
|
||||
|
||||
|
||||
@ -1243,7 +1243,7 @@ fn linkWithLLD(self: *Elf, comp: *Compilation) !void {
|
||||
|
||||
var arena_allocator = std.heap.ArenaAllocator.init(self.base.allocator);
|
||||
defer arena_allocator.deinit();
|
||||
const arena = &arena_allocator.allocator;
|
||||
const arena = arena_allocator.getAllocator();
|
||||
|
||||
const directory = self.base.options.emit.?.directory; // Just an alias to make it shorter to type.
|
||||
|
||||
|
||||
@ -412,7 +412,7 @@ pub fn flushModule(self: *MachO, comp: *Compilation) !void {
|
||||
|
||||
var arena_allocator = std.heap.ArenaAllocator.init(self.base.allocator);
|
||||
defer arena_allocator.deinit();
|
||||
const arena = &arena_allocator.allocator;
|
||||
const arena = arena_allocator.getAllocator();
|
||||
|
||||
const directory = self.base.options.emit.?.directory; // Just an alias to make it shorter to type.
|
||||
|
||||
@ -5379,7 +5379,7 @@ fn snapshotState(self: *MachO) !void {
|
||||
|
||||
var arena_allocator = std.heap.ArenaAllocator.init(self.base.allocator);
|
||||
defer arena_allocator.deinit();
|
||||
const arena = &arena_allocator.allocator;
|
||||
const arena = arena_allocator.getAllocator();
|
||||
|
||||
const out_file = try emit.directory.handle.createFile("snapshots.json", .{
|
||||
.truncate = self.cold_start,
|
||||
|
||||
@ -168,7 +168,7 @@ fn putFn(self: *Plan9, decl: *Module.Decl, out: FnDeclOutput) !void {
|
||||
try fn_map_res.value_ptr.functions.put(gpa, decl, out);
|
||||
} else {
|
||||
const file = decl.getFileScope();
|
||||
const arena = &self.path_arena.allocator;
|
||||
const arena = self.path_arena.getAllocator();
|
||||
// each file gets a symbol
|
||||
fn_map_res.value_ptr.* = .{
|
||||
.sym_index = blk: {
|
||||
|
||||
@ -950,7 +950,7 @@ fn linkWithLLD(self: *Wasm, comp: *Compilation) !void {
|
||||
|
||||
var arena_allocator = std.heap.ArenaAllocator.init(self.base.allocator);
|
||||
defer arena_allocator.deinit();
|
||||
const arena = &arena_allocator.allocator;
|
||||
const arena = arena_allocator.getAllocator();
|
||||
|
||||
const directory = self.base.options.emit.?.directory; // Just an alias to make it shorter to type.
|
||||
|
||||
|
||||
@ -120,7 +120,7 @@ pub const LibStub = struct {
|
||||
err: {
|
||||
log.debug("trying to parse as []TbdV4", .{});
|
||||
const inner = lib_stub.yaml.parse([]TbdV4) catch break :err;
|
||||
var out = try lib_stub.yaml.arena.allocator.alloc(Tbd, inner.len);
|
||||
var out = try lib_stub.yaml.arena.getAllocator().alloc(Tbd, inner.len);
|
||||
for (inner) |doc, i| {
|
||||
out[i] = .{ .v4 = doc };
|
||||
}
|
||||
@ -130,7 +130,7 @@ pub const LibStub = struct {
|
||||
err: {
|
||||
log.debug("trying to parse as TbdV4", .{});
|
||||
const inner = lib_stub.yaml.parse(TbdV4) catch break :err;
|
||||
var out = try lib_stub.yaml.arena.allocator.alloc(Tbd, 1);
|
||||
var out = try lib_stub.yaml.arena.getAllocator().alloc(Tbd, 1);
|
||||
out[0] = .{ .v4 = inner };
|
||||
break :blk out;
|
||||
}
|
||||
@ -148,7 +148,7 @@ pub const LibStub = struct {
|
||||
err: {
|
||||
log.debug("trying to parse as TbdV3", .{});
|
||||
const inner = lib_stub.yaml.parse(TbdV3) catch break :err;
|
||||
var out = try lib_stub.yaml.arena.allocator.alloc(Tbd, 1);
|
||||
var out = try lib_stub.yaml.arena.getAllocator().alloc(Tbd, 1);
|
||||
out[0] = .{ .v3 = inner };
|
||||
break :blk out;
|
||||
}
|
||||
|
||||
@ -248,15 +248,16 @@ pub const Yaml = struct {
|
||||
|
||||
pub fn load(allocator: Allocator, source: []const u8) !Yaml {
|
||||
var arena = ArenaAllocator.init(allocator);
|
||||
const arena_allocator = arena.getAllocator();
|
||||
|
||||
var tree = Tree.init(&arena.allocator);
|
||||
var tree = Tree.init(arena_allocator);
|
||||
try tree.parse(source);
|
||||
|
||||
var docs = std.ArrayList(Value).init(&arena.allocator);
|
||||
var docs = std.ArrayList(Value).init(arena_allocator);
|
||||
try docs.ensureUnusedCapacity(tree.docs.items.len);
|
||||
|
||||
for (tree.docs.items) |node| {
|
||||
const value = try Value.fromNode(&arena.allocator, &tree, node, null);
|
||||
const value = try Value.fromNode(arena_allocator, &tree, node, null);
|
||||
docs.appendAssumeCapacity(value);
|
||||
}
|
||||
|
||||
@ -299,7 +300,7 @@ pub const Yaml = struct {
|
||||
.Pointer => |info| {
|
||||
switch (info.size) {
|
||||
.Slice => {
|
||||
var parsed = try self.arena.allocator.alloc(info.child, self.docs.items.len);
|
||||
var parsed = try self.arena.getAllocator().alloc(info.child, self.docs.items.len);
|
||||
for (self.docs.items) |doc, i| {
|
||||
parsed[i] = try self.parseValue(info.child, doc);
|
||||
}
|
||||
@ -361,7 +362,7 @@ pub const Yaml = struct {
|
||||
|
||||
inline for (struct_info.fields) |field| {
|
||||
const value: ?Value = map.get(field.name) orelse blk: {
|
||||
const field_name = try mem.replaceOwned(u8, &self.arena.allocator, field.name, "_", "-");
|
||||
const field_name = try mem.replaceOwned(u8, self.arena.getAllocator(), field.name, "_", "-");
|
||||
break :blk map.get(field_name);
|
||||
};
|
||||
|
||||
@ -382,7 +383,7 @@ pub const Yaml = struct {
|
||||
|
||||
fn parsePointer(self: *Yaml, comptime T: type, value: Value) Error!T {
|
||||
const ptr_info = @typeInfo(T).Pointer;
|
||||
const arena = &self.arena.allocator;
|
||||
const arena = self.arena.getAllocator();
|
||||
|
||||
switch (ptr_info.size) {
|
||||
.Slice => {
|
||||
|
||||
@ -139,7 +139,7 @@ pub fn main() anyerror!void {
|
||||
const gpa = gpa: {
|
||||
if (!builtin.link_libc) {
|
||||
gpa_need_deinit = true;
|
||||
break :gpa &general_purpose_allocator.allocator;
|
||||
break :gpa general_purpose_allocator.getAllocator();
|
||||
}
|
||||
// We would prefer to use raw libc allocator here, but cannot
|
||||
// use it if it won't support the alignment we need.
|
||||
@ -153,7 +153,7 @@ pub fn main() anyerror!void {
|
||||
};
|
||||
var arena_instance = std.heap.ArenaAllocator.init(gpa);
|
||||
defer arena_instance.deinit();
|
||||
const arena = &arena_instance.allocator;
|
||||
const arena = arena_instance.getAllocator();
|
||||
|
||||
const args = try process.argsAlloc(arena);
|
||||
|
||||
@ -3619,7 +3619,7 @@ pub fn cmdFmt(gpa: Allocator, arena: Allocator, args: []const []const u8) !void
|
||||
var errors = std.ArrayList(Compilation.AllErrors.Message).init(gpa);
|
||||
defer errors.deinit();
|
||||
|
||||
try Compilation.AllErrors.addZir(&arena_instance.allocator, &errors, &file);
|
||||
try Compilation.AllErrors.addZir(arena_instance.getAllocator(), &errors, &file);
|
||||
const ttyconf: std.debug.TTY.Config = switch (color) {
|
||||
.auto => std.debug.detectTTYConfig(),
|
||||
.on => .escape_codes,
|
||||
@ -3818,7 +3818,7 @@ fn fmtPathFile(
|
||||
var errors = std.ArrayList(Compilation.AllErrors.Message).init(fmt.gpa);
|
||||
defer errors.deinit();
|
||||
|
||||
try Compilation.AllErrors.addZir(&arena_instance.allocator, &errors, &file);
|
||||
try Compilation.AllErrors.addZir(arena_instance.getAllocator(), &errors, &file);
|
||||
const ttyconf: std.debug.TTY.Config = switch (fmt.color) {
|
||||
.auto => std.debug.detectTTYConfig(),
|
||||
.on => .escape_codes,
|
||||
|
||||
@ -25,7 +25,7 @@ pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile) !void {
|
||||
}
|
||||
var arena_allocator = std.heap.ArenaAllocator.init(comp.gpa);
|
||||
defer arena_allocator.deinit();
|
||||
const arena = &arena_allocator.allocator;
|
||||
const arena = arena_allocator.getAllocator();
|
||||
|
||||
switch (crt_file) {
|
||||
.crt2_o => {
|
||||
@ -281,7 +281,7 @@ fn add_cc_args(
|
||||
pub fn buildImportLib(comp: *Compilation, lib_name: []const u8) !void {
|
||||
var arena_allocator = std.heap.ArenaAllocator.init(comp.gpa);
|
||||
defer arena_allocator.deinit();
|
||||
const arena = &arena_allocator.allocator;
|
||||
const arena = arena_allocator.getAllocator();
|
||||
|
||||
const def_file_path = findDef(comp, arena, lib_name) catch |err| switch (err) {
|
||||
error.FileNotFound => {
|
||||
|
||||
@ -25,7 +25,7 @@ pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile) !void {
|
||||
const gpa = comp.gpa;
|
||||
var arena_allocator = std.heap.ArenaAllocator.init(gpa);
|
||||
defer arena_allocator.deinit();
|
||||
const arena = &arena_allocator.allocator;
|
||||
const arena = arena_allocator.getAllocator();
|
||||
|
||||
switch (crt_file) {
|
||||
.crti_o => {
|
||||
|
||||
@ -47,7 +47,7 @@ pub fn dump(gpa: Allocator, air: Air, zir: Zir, liveness: Liveness) void {
|
||||
|
||||
var writer: Writer = .{
|
||||
.gpa = gpa,
|
||||
.arena = &arena.allocator,
|
||||
.arena = arena.getAllocator(),
|
||||
.air = air,
|
||||
.zir = zir,
|
||||
.liveness = liveness,
|
||||
|
||||
@ -19,7 +19,7 @@ pub fn renderAsTextToFile(
|
||||
|
||||
var writer: Writer = .{
|
||||
.gpa = gpa,
|
||||
.arena = &arena.allocator,
|
||||
.arena = arena.getAllocator(),
|
||||
.file = scope_file,
|
||||
.code = scope_file.zir,
|
||||
.indent = 0,
|
||||
@ -74,7 +74,7 @@ pub fn renderInstructionContext(
|
||||
|
||||
var writer: Writer = .{
|
||||
.gpa = gpa,
|
||||
.arena = &arena.allocator,
|
||||
.arena = arena.getAllocator(),
|
||||
.file = scope_file,
|
||||
.code = scope_file.zir,
|
||||
.indent = if (indent < 2) 2 else indent,
|
||||
@ -106,7 +106,7 @@ pub fn renderSingleInstruction(
|
||||
|
||||
var writer: Writer = .{
|
||||
.gpa = gpa,
|
||||
.arena = &arena.allocator,
|
||||
.arena = arena.getAllocator(),
|
||||
.file = scope_file,
|
||||
.code = scope_file.zir,
|
||||
.indent = indent,
|
||||
|
||||
@ -38,7 +38,7 @@ pub fn main(argc: c_int, argv: [*][*:0]u8) callconv(.C) c_int {
|
||||
const gpa = std.heap.c_allocator;
|
||||
var arena_instance = std.heap.ArenaAllocator.init(gpa);
|
||||
defer arena_instance.deinit();
|
||||
const arena = &arena_instance.allocator;
|
||||
const arena = arena_instance.getAllocator();
|
||||
|
||||
const args = arena.alloc([]const u8, @intCast(usize, argc)) catch fatal("{s}", .{"OutOfMemory"});
|
||||
for (args) |*arg, i| {
|
||||
|
||||
@ -692,7 +692,7 @@ pub const TestContext = struct {
|
||||
|
||||
var arena_allocator = std.heap.ArenaAllocator.init(allocator);
|
||||
defer arena_allocator.deinit();
|
||||
const arena = &arena_allocator.allocator;
|
||||
const arena = arena_allocator.getAllocator();
|
||||
|
||||
var tmp = std.testing.tmpDir(.{});
|
||||
defer tmp.cleanup();
|
||||
|
||||
@ -373,13 +373,14 @@ pub fn translate(
|
||||
// from this function.
|
||||
var arena = std.heap.ArenaAllocator.init(gpa);
|
||||
errdefer arena.deinit();
|
||||
const arena_allocator = arena.getAllocator();
|
||||
|
||||
var context = Context{
|
||||
.gpa = gpa,
|
||||
.arena = &arena.allocator,
|
||||
.arena = arena_allocator,
|
||||
.source_manager = ast_unit.getSourceManager(),
|
||||
.alias_list = AliasList.init(gpa),
|
||||
.global_scope = try arena.allocator.create(Scope.Root),
|
||||
.global_scope = try arena_allocator.create(Scope.Root),
|
||||
.clang_context = ast_unit.getASTContext(),
|
||||
.pattern_list = try PatternList.init(gpa),
|
||||
};
|
||||
|
||||
@ -67,7 +67,7 @@ pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile) !void {
|
||||
const gpa = comp.gpa;
|
||||
var arena_allocator = std.heap.ArenaAllocator.init(gpa);
|
||||
defer arena_allocator.deinit();
|
||||
const arena = &arena_allocator.allocator;
|
||||
const arena = arena_allocator.getAllocator();
|
||||
|
||||
switch (crt_file) {
|
||||
.crt1_reactor_o => {
|
||||
|
||||
@ -16,7 +16,7 @@ pub fn main() !void {
|
||||
// skip my own exe name
|
||||
_ = arg_it.skip();
|
||||
|
||||
a = &arena.allocator;
|
||||
a = arena.getAllocator();
|
||||
|
||||
const zig_exe_rel = try (arg_it.next(a) orelse {
|
||||
std.debug.print("Expected first argument to be path to zig compiler\n", .{});
|
||||
|
||||
@ -491,7 +491,7 @@ pub fn addCases(cases: *tests.CompareOutputContext) void {
|
||||
\\pub fn main() !void {
|
||||
\\ var allocator_buf: [10]u8 = undefined;
|
||||
\\ var fixedBufferAllocator = std.mem.validationWrap(std.heap.FixedBufferAllocator.init(&allocator_buf));
|
||||
\\ const allocator = &std.heap.loggingAllocator(&fixedBufferAllocator.allocator).allocator;
|
||||
\\ const allocator = std.heap.loggingAllocator(fixedBufferAllocator.getAllocator()).getAllocator();
|
||||
\\
|
||||
\\ var a = try allocator.alloc(u8, 10);
|
||||
\\ a = allocator.shrink(a, 5);
|
||||
|
||||
@ -6550,9 +6550,9 @@ pub fn addCases(ctx: *TestContext) !void {
|
||||
ctx.objErrStage1("method call with first arg type wrong container",
|
||||
\\pub const List = struct {
|
||||
\\ len: usize,
|
||||
\\ allocator: *Allocator,
|
||||
\\ allocator: Allocator,
|
||||
\\
|
||||
\\ pub fn init(allocator: *Allocator) List {
|
||||
\\ pub fn init(allocator: Allocator) List {
|
||||
\\ return List {
|
||||
\\ .len = 0,
|
||||
\\ .allocator = allocator,
|
||||
@ -6573,7 +6573,7 @@ pub fn addCases(ctx: *TestContext) !void {
|
||||
\\ x.init();
|
||||
\\}
|
||||
, &[_][]const u8{
|
||||
"tmp.zig:23:5: error: expected type '*Allocator', found '*List'",
|
||||
"tmp.zig:23:5: error: expected type 'Allocator', found '*List'",
|
||||
});
|
||||
|
||||
ctx.objErrStage1("binary not on number literal",
|
||||
@ -7569,7 +7569,7 @@ pub fn addCases(ctx: *TestContext) !void {
|
||||
\\
|
||||
\\export fn entry() void {
|
||||
\\ const a = MdNode.Header {
|
||||
\\ .text = MdText.init(&std.testing.allocator),
|
||||
\\ .text = MdText.init(std.testing.allocator),
|
||||
\\ .weight = HeaderWeight.H1,
|
||||
\\ };
|
||||
\\ _ = a;
|
||||
|
||||
@ -16,7 +16,7 @@ const Token = union(enum) {
|
||||
};
|
||||
|
||||
var gpa = std.heap.GeneralPurposeAllocator(.{}){};
|
||||
var global_allocator = &gpa.allocator;
|
||||
const global_allocator = gpa.getAllocator();
|
||||
|
||||
fn tokenize(input: []const u8) !ArrayList(Token) {
|
||||
const State = enum {
|
||||
|
||||
@ -8,7 +8,7 @@ const warn = std.log.warn;
|
||||
pub fn main() !void {
|
||||
var arena_instance = std.heap.ArenaAllocator.init(std.heap.page_allocator);
|
||||
defer arena_instance.deinit();
|
||||
const arena = &arena_instance.allocator;
|
||||
const arena = arena_instance.getAllocator();
|
||||
|
||||
const args = try process.argsAlloc(arena);
|
||||
|
||||
|
||||
@ -4,7 +4,7 @@ const g = @import("spirv/grammar.zig");
|
||||
pub fn main() !void {
|
||||
var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
|
||||
defer arena.deinit();
|
||||
const allocator = &arena.allocator;
|
||||
const allocator = arena.getAllocator();
|
||||
|
||||
const args = try std.process.argsAlloc(allocator);
|
||||
if (args.len != 2) {
|
||||
|
||||
@ -25,7 +25,7 @@ pub fn main() !void {
|
||||
|
||||
var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
|
||||
defer arena.deinit();
|
||||
const ally = &arena.allocator;
|
||||
const ally = arena.getAllocator();
|
||||
|
||||
var symbols = std.ArrayList(Symbol).init(ally);
|
||||
var sections = std.ArrayList([]const u8).init(ally);
|
||||
|
||||
@ -9,7 +9,7 @@ pub fn main() anyerror!void {
|
||||
var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
|
||||
defer arena.deinit();
|
||||
|
||||
const allocator = &arena.allocator;
|
||||
const allocator = arena.getAllocator();
|
||||
|
||||
const args = try std.process.argsAlloc(allocator);
|
||||
|
||||
|
||||
@ -284,7 +284,7 @@ const LibCVendor = enum {
|
||||
|
||||
pub fn main() !void {
|
||||
var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
|
||||
const allocator = &arena.allocator;
|
||||
const allocator = arena.getAllocator();
|
||||
const args = try std.process.argsAlloc(allocator);
|
||||
var search_paths = std.ArrayList([]const u8).init(allocator);
|
||||
var opt_out_dir: ?[]const u8 = null;
|
||||
|
||||
@ -10,7 +10,7 @@ pub fn main() !void {
|
||||
defer root_node.end();
|
||||
|
||||
var arena_allocator = std.heap.ArenaAllocator.init(std.heap.page_allocator);
|
||||
const arena = &arena_allocator.allocator;
|
||||
const arena = arena_allocator.getAllocator();
|
||||
|
||||
const args = try std.process.argsAlloc(arena);
|
||||
const path_to_walk = args[1];
|
||||
|
||||
@ -450,8 +450,13 @@ const cpu_targets = struct {
|
||||
pub fn main() anyerror!void {
|
||||
var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
|
||||
defer arena.deinit();
|
||||
<<<<<<< HEAD
|
||||
const allocator = &arena.allocator;
|
||||
|
||||
=======
|
||||
|
||||
const allocator = arena.getAllocator();
|
||||
>>>>>>> 11157e318 (allocgate: stage 1 and 2 building)
|
||||
const args = try std.process.argsAlloc(allocator);
|
||||
|
||||
if (args.len <= 1) {
|
||||
|
||||
@ -769,7 +769,7 @@ const llvm_targets = [_]LlvmTarget{
|
||||
pub fn main() anyerror!void {
|
||||
var arena_state = std.heap.ArenaAllocator.init(std.heap.page_allocator);
|
||||
defer arena_state.deinit();
|
||||
const arena = &arena_state.allocator;
|
||||
const arena = arena_state.getAllocator();
|
||||
|
||||
const args = try std.process.argsAlloc(arena);
|
||||
if (args.len <= 1) {
|
||||
@ -845,7 +845,7 @@ fn processOneTarget(job: Job) anyerror!void {
|
||||
|
||||
var arena_state = std.heap.ArenaAllocator.init(std.heap.page_allocator);
|
||||
defer arena_state.deinit();
|
||||
const arena = &arena_state.allocator;
|
||||
const arena = arena_state.getAllocator();
|
||||
|
||||
var progress_node = job.root_progress.start(llvm_target.zig_name, 3);
|
||||
progress_node.activate();
|
||||
|
||||
@ -133,7 +133,7 @@ const Function = struct {
|
||||
|
||||
pub fn main() !void {
|
||||
var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
|
||||
const allocator = &arena.allocator;
|
||||
const allocator = arena.getAllocator();
|
||||
const args = try std.process.argsAlloc(allocator);
|
||||
const in_glibc_dir = args[1]; // path to the unzipped tarball of glibc, e.g. ~/downloads/glibc-2.25
|
||||
const zig_src_dir = args[2]; // path to the source checkout of zig, lib dir, e.g. ~/zig-src/lib
|
||||
|
||||
@ -48,7 +48,7 @@ const Version = struct {
|
||||
pub fn main() !void {
|
||||
var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
|
||||
defer arena.deinit();
|
||||
const allocator = &arena.allocator;
|
||||
const allocator = arena.getAllocator();
|
||||
|
||||
const args = try std.process.argsAlloc(allocator);
|
||||
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user