Merge pull request #7647 from ziglang/stage2-comptime-fn-call

stage2: comptime function calls and inline function calls
This commit is contained in:
Andrew Kelley 2021-01-02 22:01:51 -08:00 committed by GitHub
commit d8f3f14532
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
17 changed files with 884 additions and 590 deletions

View File

@ -220,7 +220,6 @@ pub fn build(b: *Builder) !void {
}
const log_scopes = b.option([]const []const u8, "log", "Which log scopes to enable") orelse &[0][]const u8{};
const zir_dumps = b.option([]const []const u8, "dump-zir", "Which functions to dump ZIR for before codegen") orelse &[0][]const u8{};
const opt_version_string = b.option([]const u8, "version-string", "Override Zig version string. Default is to find out with git.");
const version = if (opt_version_string) |version| version else v: {
@ -277,7 +276,6 @@ pub fn build(b: *Builder) !void {
exe.addBuildOption(std.SemanticVersion, "semver", semver);
exe.addBuildOption([]const []const u8, "log_scopes", log_scopes);
exe.addBuildOption([]const []const u8, "zir_dumps", zir_dumps);
exe.addBuildOption(bool, "enable_tracy", tracy != null);
exe.addBuildOption(bool, "is_stage1", is_stage1);
if (tracy) |tracy_path| {

View File

@ -1459,24 +1459,29 @@ pub fn performAllTheWork(self: *Compilation) error{ TimerUnsupported, OutOfMemor
const module = self.bin_file.options.module.?;
if (decl.typed_value.most_recent.typed_value.val.castTag(.function)) |payload| {
const func = payload.data;
switch (func.analysis) {
switch (func.state) {
.queued => module.analyzeFnBody(decl, func) catch |err| switch (err) {
error.AnalysisFail => {
assert(func.analysis != .in_progress);
assert(func.state != .in_progress);
continue;
},
error.OutOfMemory => return error.OutOfMemory,
},
.in_progress => unreachable,
.inline_only => unreachable, // don't queue work for this
.sema_failure, .dependency_failure => continue,
.success => {},
}
// Here we tack on additional allocations to the Decl's arena. The allocations are
// lifetime annotations in the ZIR.
// Here we tack on additional allocations to the Decl's arena. The allocations
// are lifetime annotations in the ZIR.
var decl_arena = decl.typed_value.most_recent.arena.?.promote(module.gpa);
defer decl.typed_value.most_recent.arena.?.* = decl_arena.state;
log.debug("analyze liveness of {s}\n", .{decl.name});
try liveness.analyze(module.gpa, &decl_arena.allocator, func.analysis.success);
try liveness.analyze(module.gpa, &decl_arena.allocator, func.body);
if (std.builtin.mode == .Debug and self.verbose_ir) {
func.dump(module.*);
}
}
assert(decl.typed_value.most_recent.typed_value.ty.hasCodeGenBits());

View File

@ -268,6 +268,11 @@ pub const Decl = struct {
}
}
/// Asserts that the `Decl` is part of AST and not ZIRModule.
pub fn getFileScope(self: *Decl) *Scope.File {
return self.scope.cast(Scope.Container).?.file_scope;
}
fn removeDependant(self: *Decl, other: *Decl) void {
self.dependants.removeAssertDiscard(other);
}
@ -281,46 +286,32 @@ pub const Decl = struct {
/// Extern functions do not have this data structure; they are represented by
/// the `Decl` only, with a `Value` tag of `extern_fn`.
pub const Fn = struct {
/// This memory owned by the Decl's TypedValue.Managed arena allocator.
analysis: union(enum) {
queued: *ZIR,
owner_decl: *Decl,
/// Contains un-analyzed ZIR instructions generated from Zig source AST.
/// Even after we finish analysis, the ZIR is kept in memory, so that
/// comptime and inline function calls can happen.
zir: zir.Module.Body,
/// undefined unless analysis state is `success`.
body: Body,
state: Analysis,
pub const Analysis = enum {
queued,
/// This function intentionally only has ZIR generated because it is marked
/// inline, which means no runtime version of the function will be generated.
inline_only,
in_progress,
/// There will be a corresponding ErrorMsg in Module.failed_decls
sema_failure,
/// This Fn might be OK but it depends on another Decl which did not successfully complete
/// semantic analysis.
/// This Fn might be OK but it depends on another Decl which did not
/// successfully complete semantic analysis.
dependency_failure,
success: Body,
},
owner_decl: *Decl,
/// This memory is temporary and points to stack memory for the duration
/// of Fn analysis.
pub const Analysis = struct {
inner_block: Scope.Block,
};
/// Contains un-analyzed ZIR instructions generated from Zig source AST.
pub const ZIR = struct {
body: zir.Module.Body,
arena: std.heap.ArenaAllocator.State,
success,
};
/// For debugging purposes.
pub fn dump(self: *Fn, mod: Module) void {
std.debug.print("Module.Function(name={s}) ", .{self.owner_decl.name});
switch (self.analysis) {
.queued => {
std.debug.print("queued\n", .{});
},
.in_progress => {
std.debug.print("in_progress\n", .{});
},
else => {
std.debug.print("\n", .{});
zir.dumpFn(mod, self);
},
}
zir.dumpFn(mod, self);
}
};
@ -761,21 +752,60 @@ pub const Scope = struct {
/// during semantic analysis of the block.
pub const Block = struct {
pub const base_tag: Tag = .block;
base: Scope = Scope{ .tag = base_tag },
parent: ?*Block,
/// Maps ZIR to TZIR. Shared to sub-blocks.
inst_table: *InstTable,
func: ?*Fn,
decl: *Decl,
instructions: ArrayListUnmanaged(*Inst),
/// Points to the arena allocator of DeclAnalysis
arena: *Allocator,
label: ?Label = null,
inlining: ?*Inlining,
is_comptime: bool,
pub const InstTable = std.AutoHashMap(*zir.Inst, *Inst);
/// This `Block` maps a block ZIR instruction to the corresponding
/// TZIR instruction for break instruction analysis.
pub const Label = struct {
zir_block: *zir.Inst.Block,
merges: Merges,
};
/// This `Block` indicates that an inline function call is happening
/// and return instructions should be analyzed as a break instruction
/// to this TZIR block instruction.
/// It is shared among all the blocks in an inline or comptime called
/// function.
pub const Inlining = struct {
/// Shared state among the entire inline/comptime call stack.
shared: *Shared,
/// We use this to count from 0 so that arg instructions know
/// which parameter index they are, without having to store
/// a parameter index with each arg instruction.
param_index: usize,
casted_args: []*Inst,
merges: Merges,
pub const Shared = struct {
caller: ?*Fn,
branch_count: u64,
branch_quota: u64,
};
};
pub const Merges = struct {
results: ArrayListUnmanaged(*Inst),
block_inst: *Inst.Block,
};
/// For debugging purposes.
pub fn dump(self: *Block, mod: Module) void {
zir.dumpBlock(mod, self);
}
};
/// This is a temporary structure, references to it are valid only
@ -992,11 +1022,11 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
defer tracy.end();
const container_scope = decl.scope.cast(Scope.Container).?;
const tree = try self.getAstTree(container_scope);
const tree = try self.getAstTree(container_scope.file_scope);
const ast_node = tree.root_node.decls()[decl.src_index];
switch (ast_node.tag) {
.FnProto => {
const fn_proto = @fieldParentPtr(ast.Node.FnProto, "base", ast_node);
const fn_proto = ast_node.castTag(.FnProto).?;
decl.analysis = .in_progress;
@ -1062,7 +1092,7 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
.param_types = param_types,
}, .{});
if (self.comp.verbose_ir) {
if (std.builtin.mode == .Debug and self.comp.verbose_ir) {
zir.dumpZir(self.gpa, "fn_type", decl.name, fn_type_scope.instructions.items) catch {};
}
@ -1071,12 +1101,17 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
errdefer decl_arena.deinit();
const decl_arena_state = try decl_arena.allocator.create(std.heap.ArenaAllocator.State);
var inst_table = Scope.Block.InstTable.init(self.gpa);
defer inst_table.deinit();
var block_scope: Scope.Block = .{
.parent = null,
.inst_table = &inst_table,
.func = null,
.decl = decl,
.instructions = .{},
.arena = &decl_arena.allocator,
.inlining = null,
.is_comptime = false,
};
defer block_scope.instructions.deinit(self.gpa);
@ -1113,14 +1148,11 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
const new_func = try decl_arena.allocator.create(Fn);
const fn_payload = try decl_arena.allocator.create(Value.Payload.Function);
const fn_zir = blk: {
// This scope's arena memory is discarded after the ZIR generation
// pass completes, and semantic analysis of it completes.
var gen_scope_arena = std.heap.ArenaAllocator.init(self.gpa);
errdefer gen_scope_arena.deinit();
const fn_zir: zir.Module.Body = blk: {
// We put the ZIR inside the Decl arena.
var gen_scope: Scope.GenZIR = .{
.decl = decl,
.arena = &gen_scope_arena.allocator,
.arena = &decl_arena.allocator,
.parent = decl.scope,
};
defer gen_scope.instructions.deinit(self.gpa);
@ -1131,8 +1163,8 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
for (fn_proto.params()) |param, i| {
const name_token = param.name_token.?;
const src = tree.token_locs[name_token].start;
const param_name = tree.tokenSlice(name_token); // TODO: call identifierTokenString
const arg = try gen_scope_arena.allocator.create(zir.Inst.Arg);
const param_name = try self.identifierTokenString(&gen_scope.base, name_token);
const arg = try decl_arena.allocator.create(zir.Inst.Arg);
arg.* = .{
.base = .{
.tag = .arg,
@ -1144,7 +1176,7 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
.kw_args = .{},
};
gen_scope.instructions.items[i] = &arg.base;
const sub_scope = try gen_scope_arena.allocator.create(Scope.LocalVal);
const sub_scope = try decl_arena.allocator.create(Scope.LocalVal);
sub_scope.* = .{
.parent = params_scope,
.gen_zir = &gen_scope,
@ -1165,22 +1197,29 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
_ = try astgen.addZIRNoOp(self, &gen_scope.base, src, .returnvoid);
}
if (self.comp.verbose_ir) {
if (std.builtin.mode == .Debug and self.comp.verbose_ir) {
zir.dumpZir(self.gpa, "fn_body", decl.name, gen_scope.instructions.items) catch {};
}
const fn_zir = try gen_scope_arena.allocator.create(Fn.ZIR);
fn_zir.* = .{
.body = .{
.instructions = try gen_scope.arena.dupe(*zir.Inst, gen_scope.instructions.items),
},
.arena = gen_scope_arena.state,
break :blk .{
.instructions = try gen_scope.arena.dupe(*zir.Inst, gen_scope.instructions.items),
};
break :blk fn_zir;
};
const is_inline = blk: {
if (fn_proto.getExternExportInlineToken()) |maybe_inline_token| {
if (tree.token_ids[maybe_inline_token] == .Keyword_inline) {
break :blk true;
}
}
break :blk false;
};
const anal_state = ([2]Fn.Analysis{ .queued, .inline_only })[@boolToInt(is_inline)];
new_func.* = .{
.analysis = .{ .queued = fn_zir },
.state = anal_state,
.zir = fn_zir,
.body = undefined,
.owner_decl = decl,
};
fn_payload.* = .{
@ -1189,11 +1228,16 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
};
var prev_type_has_bits = false;
var prev_is_inline = false;
var type_changed = true;
if (decl.typedValueManaged()) |tvm| {
prev_type_has_bits = tvm.typed_value.ty.hasCodeGenBits();
type_changed = !tvm.typed_value.ty.eql(fn_type);
if (tvm.typed_value.val.castTag(.function)) |payload| {
const prev_func = payload.data;
prev_is_inline = prev_func.state == .inline_only;
}
tvm.deinit(self.gpa);
}
@ -1211,18 +1255,26 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
decl.analysis = .complete;
decl.generation = self.generation;
if (fn_type.hasCodeGenBits()) {
if (!is_inline and fn_type.hasCodeGenBits()) {
// We don't fully codegen the decl until later, but we do need to reserve a global
// offset table index for it. This allows us to codegen decls out of dependency order,
// increasing how many computations can be done in parallel.
try self.comp.bin_file.allocateDeclIndexes(decl);
try self.comp.work_queue.writeItem(.{ .codegen_decl = decl });
} else if (prev_type_has_bits) {
} else if (!prev_is_inline and prev_type_has_bits) {
self.comp.bin_file.freeDecl(decl);
}
if (fn_proto.getExternExportInlineToken()) |maybe_export_token| {
if (tree.token_ids[maybe_export_token] == .Keyword_export) {
if (is_inline) {
return self.failTok(
&block_scope.base,
maybe_export_token,
"export of inline function",
.{},
);
}
const export_src = tree.token_locs[maybe_export_token].start;
const name_loc = tree.token_locs[fn_proto.getNameToken().?];
const name = tree.tokenSliceLoc(name_loc);
@ -1230,7 +1282,7 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
try self.analyzeExport(&block_scope.base, export_src, name, decl);
}
}
return type_changed;
return type_changed or is_inline != prev_is_inline;
},
.VarDecl => {
const var_decl = @fieldParentPtr(ast.Node.VarDecl, "base", ast_node);
@ -1242,12 +1294,17 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
errdefer decl_arena.deinit();
const decl_arena_state = try decl_arena.allocator.create(std.heap.ArenaAllocator.State);
var decl_inst_table = Scope.Block.InstTable.init(self.gpa);
defer decl_inst_table.deinit();
var block_scope: Scope.Block = .{
.parent = null,
.inst_table = &decl_inst_table,
.func = null,
.decl = decl,
.instructions = .{},
.arena = &decl_arena.allocator,
.inlining = null,
.is_comptime = true,
};
defer block_scope.instructions.deinit(self.gpa);
@ -1303,23 +1360,30 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
const src = tree.token_locs[init_node.firstToken()].start;
const init_inst = try astgen.expr(self, &gen_scope.base, init_result_loc, init_node);
if (self.comp.verbose_ir) {
if (std.builtin.mode == .Debug and self.comp.verbose_ir) {
zir.dumpZir(self.gpa, "var_init", decl.name, gen_scope.instructions.items) catch {};
}
var var_inst_table = Scope.Block.InstTable.init(self.gpa);
defer var_inst_table.deinit();
var inner_block: Scope.Block = .{
.parent = null,
.inst_table = &var_inst_table,
.func = null,
.decl = decl,
.instructions = .{},
.arena = &gen_scope_arena.allocator,
.inlining = null,
.is_comptime = true,
};
defer inner_block.instructions.deinit(self.gpa);
try zir_sema.analyzeBody(self, &inner_block.base, .{ .instructions = gen_scope.instructions.items });
try zir_sema.analyzeBody(self, &inner_block, .{
.instructions = gen_scope.instructions.items,
});
// The result location guarantees the type coercion.
const analyzed_init_inst = init_inst.analyzed_inst.?;
const analyzed_init_inst = var_inst_table.get(init_inst).?;
// The is_comptime in the Scope.Block guarantees the result is comptime-known.
const val = analyzed_init_inst.value().?;
@ -1347,7 +1411,7 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
.val = Value.initTag(.type_type),
});
const var_type = try astgen.expr(self, &type_scope.base, .{ .ty = type_type }, type_node);
if (self.comp.verbose_ir) {
if (std.builtin.mode == .Debug and self.comp.verbose_ir) {
zir.dumpZir(self.gpa, "var_type", decl.name, type_scope.instructions.items) catch {};
}
@ -1423,21 +1487,26 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
defer gen_scope.instructions.deinit(self.gpa);
_ = try astgen.comptimeExpr(self, &gen_scope.base, .none, comptime_decl.expr);
if (self.comp.verbose_ir) {
if (std.builtin.mode == .Debug and self.comp.verbose_ir) {
zir.dumpZir(self.gpa, "comptime_block", decl.name, gen_scope.instructions.items) catch {};
}
var inst_table = Scope.Block.InstTable.init(self.gpa);
defer inst_table.deinit();
var block_scope: Scope.Block = .{
.parent = null,
.inst_table = &inst_table,
.func = null,
.decl = decl,
.instructions = .{},
.arena = &analysis_arena.allocator,
.inlining = null,
.is_comptime = true,
};
defer block_scope.instructions.deinit(self.gpa);
_ = try zir_sema.analyzeBody(self, &block_scope.base, .{
_ = try zir_sema.analyzeBody(self, &block_scope, .{
.instructions = gen_scope.instructions.items,
});
@ -1496,12 +1565,10 @@ fn getSrcModule(self: *Module, root_scope: *Scope.ZIRModule) !*zir.Module {
}
}
fn getAstTree(self: *Module, container_scope: *Scope.Container) !*ast.Tree {
pub fn getAstTree(self: *Module, root_scope: *Scope.File) !*ast.Tree {
const tracy = trace(@src());
defer tracy.end();
const root_scope = container_scope.file_scope;
switch (root_scope.status) {
.never_loaded, .unloaded_success => {
try self.failed_files.ensureCapacity(self.gpa, self.failed_files.items().len + 1);
@ -1549,7 +1616,7 @@ pub fn analyzeContainer(self: *Module, container_scope: *Scope.Container) !void
// We may be analyzing it for the first time, or this may be
// an incremental update. This code handles both cases.
const tree = try self.getAstTree(container_scope);
const tree = try self.getAstTree(container_scope.file_scope);
const decls = tree.root_node.decls();
try self.comp.work_queue.ensureUnusedCapacity(decls.len);
@ -1806,25 +1873,28 @@ pub fn analyzeFnBody(self: *Module, decl: *Decl, func: *Fn) !void {
// Use the Decl's arena for function memory.
var arena = decl.typed_value.most_recent.arena.?.promote(self.gpa);
defer decl.typed_value.most_recent.arena.?.* = arena.state;
var inst_table = Scope.Block.InstTable.init(self.gpa);
defer inst_table.deinit();
var inner_block: Scope.Block = .{
.parent = null,
.inst_table = &inst_table,
.func = func,
.decl = decl,
.instructions = .{},
.arena = &arena.allocator,
.inlining = null,
.is_comptime = false,
};
defer inner_block.instructions.deinit(self.gpa);
const fn_zir = func.analysis.queued;
defer fn_zir.arena.promote(self.gpa).deinit();
func.analysis = .{ .in_progress = {} };
func.state = .in_progress;
log.debug("set {s} to in_progress\n", .{decl.name});
try zir_sema.analyzeBody(self, &inner_block.base, fn_zir.body);
try zir_sema.analyzeBody(self, &inner_block, func.zir);
const instructions = try arena.allocator.dupe(*Inst, inner_block.instructions.items);
func.analysis = .{ .success = .{ .instructions = instructions } };
func.state = .success;
func.body = .{ .instructions = instructions };
log.debug("set {s} to success\n", .{decl.name});
}
@ -2321,7 +2391,7 @@ pub fn analyzeDeclRef(self: *Module, scope: *Scope, src: usize, decl: *Decl) Inn
self.ensureDeclAnalyzed(decl) catch |err| {
if (scope.cast(Scope.Block)) |block| {
if (block.func) |func| {
func.analysis = .dependency_failure;
func.state = .dependency_failure;
} else {
block.decl.analysis = .dependency_failure;
}
@ -3020,11 +3090,20 @@ fn failWithOwnedErrorMsg(self: *Module, scope: *Scope, src: usize, err_msg: *Com
},
.block => {
const block = scope.cast(Scope.Block).?;
if (block.func) |func| {
func.analysis = .sema_failure;
if (block.inlining) |inlining| {
if (inlining.shared.caller) |func| {
func.state = .sema_failure;
} else {
block.decl.analysis = .sema_failure;
block.decl.generation = self.generation;
}
} else {
block.decl.analysis = .sema_failure;
block.decl.generation = self.generation;
if (block.func) |func| {
func.state = .sema_failure;
} else {
block.decl.analysis = .sema_failure;
block.decl.generation = self.generation;
}
}
self.failed_decls.putAssumeCapacityNoClobber(block.decl, err_msg);
},
@ -3380,10 +3459,12 @@ pub fn addSafetyCheck(mod: *Module, parent_block: *Scope.Block, ok: *Inst, panic
var fail_block: Scope.Block = .{
.parent = parent_block,
.inst_table = parent_block.inst_table,
.func = parent_block.func,
.decl = parent_block.decl,
.instructions = .{},
.arena = parent_block.arena,
.inlining = parent_block.inlining,
.is_comptime = parent_block.is_comptime,
};
defer fail_block.instructions.deinit(mod.gpa);
@ -3427,3 +3508,34 @@ pub fn validateVarType(mod: *Module, scope: *Scope, src: usize, ty: Type) !void
return mod.fail(scope, src, "variable of type '{}' must be const or comptime", .{ty});
}
}
/// Identifier token -> String (allocated in scope.arena())
pub fn identifierTokenString(mod: *Module, scope: *Scope, token: ast.TokenIndex) InnerError![]const u8 {
const tree = scope.tree();
const ident_name = tree.tokenSlice(token);
if (mem.startsWith(u8, ident_name, "@")) {
const raw_string = ident_name[1..];
var bad_index: usize = undefined;
return std.zig.parseStringLiteral(scope.arena(), raw_string, &bad_index) catch |err| switch (err) {
error.InvalidCharacter => {
const bad_byte = raw_string[bad_index];
const src = tree.token_locs[token].start;
return mod.fail(scope, src + 1 + bad_index, "invalid string literal character: '{c}'\n", .{bad_byte});
},
else => |e| return e,
};
}
return ident_name;
}
pub fn emitBackwardBranch(mod: *Module, block: *Scope.Block, src: usize) !void {
const shared = block.inlining.?.shared;
shared.branch_count += 1;
if (shared.branch_count > shared.branch_quota) {
// TODO show the "called from here" stack
return mod.fail(&block.base, src, "evaluation exceeded {d} backwards branches", .{
shared.branch_quota,
});
}
}

View File

@ -384,7 +384,7 @@ fn breakExpr(mod: *Module, parent_scope: *Scope, node: *ast.Node.ControlFlowExpr
.local_val => scope = scope.cast(Scope.LocalVal).?.parent,
.local_ptr => scope = scope.cast(Scope.LocalPtr).?.parent,
else => if (node.getLabel()) |break_label| {
const label_name = try identifierTokenString(mod, parent_scope, break_label);
const label_name = try mod.identifierTokenString(parent_scope, break_label);
return mod.failTok(parent_scope, break_label, "label not found: '{s}'", .{label_name});
} else {
return mod.failTok(parent_scope, src, "break expression outside loop", .{});
@ -426,7 +426,7 @@ fn continueExpr(mod: *Module, parent_scope: *Scope, node: *ast.Node.ControlFlowE
.local_val => scope = scope.cast(Scope.LocalVal).?.parent,
.local_ptr => scope = scope.cast(Scope.LocalPtr).?.parent,
else => if (node.getLabel()) |break_label| {
const label_name = try identifierTokenString(mod, parent_scope, break_label);
const label_name = try mod.identifierTokenString(parent_scope, break_label);
return mod.failTok(parent_scope, break_label, "label not found: '{s}'", .{label_name});
} else {
return mod.failTok(parent_scope, src, "continue expression outside loop", .{});
@ -551,7 +551,7 @@ fn varDecl(
}
const tree = scope.tree();
const name_src = tree.token_locs[node.name_token].start;
const ident_name = try identifierTokenString(mod, scope, node.name_token);
const ident_name = try mod.identifierTokenString(scope, node.name_token);
// Local variables shadowing detection, including function parameters.
{
@ -843,7 +843,7 @@ fn typeInixOp(mod: *Module, scope: *Scope, node: *ast.Node.SimpleInfixOp, op_ins
fn enumLiteral(mod: *Module, scope: *Scope, node: *ast.Node.EnumLiteral) !*zir.Inst {
const tree = scope.tree();
const src = tree.token_locs[node.name].start;
const name = try identifierTokenString(mod, scope, node.name);
const name = try mod.identifierTokenString(scope, node.name);
return addZIRInst(mod, scope, src, zir.Inst.EnumLiteral, .{ .name = name }, .{});
}
@ -864,7 +864,7 @@ fn errorSetDecl(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.Erro
for (decls) |decl, i| {
const tag = decl.castTag(.ErrorTag).?;
fields[i] = try identifierTokenString(mod, scope, tag.name_token);
fields[i] = try mod.identifierTokenString(scope, tag.name_token);
}
// analyzing the error set results in a decl ref, so we might need to dereference it
@ -988,36 +988,16 @@ fn orelseCatchExpr(
/// Return whether the identifier names of two tokens are equal. Resolves @"" tokens without allocating.
/// OK in theory it could do it without allocating. This implementation allocates when the @"" form is used.
fn tokenIdentEql(mod: *Module, scope: *Scope, token1: ast.TokenIndex, token2: ast.TokenIndex) !bool {
const ident_name_1 = try identifierTokenString(mod, scope, token1);
const ident_name_2 = try identifierTokenString(mod, scope, token2);
const ident_name_1 = try mod.identifierTokenString(scope, token1);
const ident_name_2 = try mod.identifierTokenString(scope, token2);
return mem.eql(u8, ident_name_1, ident_name_2);
}
/// Identifier token -> String (allocated in scope.arena())
fn identifierTokenString(mod: *Module, scope: *Scope, token: ast.TokenIndex) InnerError![]const u8 {
const tree = scope.tree();
const ident_name = tree.tokenSlice(token);
if (mem.startsWith(u8, ident_name, "@")) {
const raw_string = ident_name[1..];
var bad_index: usize = undefined;
return std.zig.parseStringLiteral(scope.arena(), raw_string, &bad_index) catch |err| switch (err) {
error.InvalidCharacter => {
const bad_byte = raw_string[bad_index];
const src = tree.token_locs[token].start;
return mod.fail(scope, src + 1 + bad_index, "invalid string literal character: '{c}'\n", .{bad_byte});
},
else => |e| return e,
};
}
return ident_name;
}
pub fn identifierStringInst(mod: *Module, scope: *Scope, node: *ast.Node.OneToken) InnerError!*zir.Inst {
const tree = scope.tree();
const src = tree.token_locs[node.token].start;
const ident_name = try identifierTokenString(mod, scope, node.token);
const ident_name = try mod.identifierTokenString(scope, node.token);
return addZIRInst(mod, scope, src, zir.Inst.Str, .{ .bytes = ident_name }, .{});
}
@ -1936,7 +1916,7 @@ fn identifier(mod: *Module, scope: *Scope, rl: ResultLoc, ident: *ast.Node.OneTo
defer tracy.end();
const tree = scope.tree();
const ident_name = try identifierTokenString(mod, scope, ident.token);
const ident_name = try mod.identifierTokenString(scope, ident.token);
const src = tree.token_locs[ident.token].start;
if (mem.eql(u8, ident_name, "_")) {
return mod.failNode(scope, &ident.base, "TODO implement '_' identifier", .{});

View File

@ -532,7 +532,7 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
self.code.items.len += 4;
try self.dbgSetPrologueEnd();
try self.genBody(self.mod_fn.analysis.success);
try self.genBody(self.mod_fn.body);
const stack_end = self.max_end_stack;
if (stack_end > math.maxInt(i32))
@ -576,7 +576,7 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
});
} else {
try self.dbgSetPrologueEnd();
try self.genBody(self.mod_fn.analysis.success);
try self.genBody(self.mod_fn.body);
try self.dbgSetEpilogueBegin();
}
},
@ -593,7 +593,7 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
try self.dbgSetPrologueEnd();
try self.genBody(self.mod_fn.analysis.success);
try self.genBody(self.mod_fn.body);
// Backpatch stack offset
const stack_end = self.max_end_stack;
@ -638,13 +638,13 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
writeInt(u32, try self.code.addManyAsArray(4), Instruction.pop(.al, .{ .fp, .pc }).toU32());
} else {
try self.dbgSetPrologueEnd();
try self.genBody(self.mod_fn.analysis.success);
try self.genBody(self.mod_fn.body);
try self.dbgSetEpilogueBegin();
}
},
else => {
try self.dbgSetPrologueEnd();
try self.genBody(self.mod_fn.analysis.success);
try self.genBody(self.mod_fn.body);
try self.dbgSetEpilogueBegin();
},
}

View File

@ -275,7 +275,7 @@ pub fn generate(file: *C, module: *Module, decl: *Decl) !void {
try writer.writeAll(" {");
const func: *Module.Fn = func_payload.data;
const instructions = func.analysis.success.instructions;
const instructions = func.body.instructions;
if (instructions.len > 0) {
try writer.writeAll("\n");
for (instructions) |inst| {

View File

@ -63,7 +63,7 @@ pub fn genCode(buf: *ArrayList(u8), decl: *Decl) !void {
// TODO: check for and handle death of instructions
const tv = decl.typed_value.most_recent.typed_value;
const mod_fn = tv.val.castTag(.function).?.data;
for (mod_fn.analysis.success.instructions) |inst| try genInst(buf, decl, inst);
for (mod_fn.body.instructions) |inst| try genInst(buf, decl, inst);
// Write 'end' opcode
try writer.writeByte(0x0B);

View File

@ -2,7 +2,6 @@ pub const have_llvm = true;
pub const version: [:0]const u8 = "@ZIG_VERSION@";
pub const semver = try @import("std").SemanticVersion.parse(version);
pub const log_scopes: []const []const u8 = &[_][]const u8{};
pub const zir_dumps: []const []const u8 = &[_][]const u8{};
pub const enable_tracy = false;
pub const is_stage1 = true;
pub const skip_non_native = false;

View File

@ -2178,16 +2178,6 @@ pub fn updateDecl(self: *Elf, module: *Module, decl: *Module.Decl) !void {
else => false,
};
if (is_fn) {
const zir_dumps = if (std.builtin.is_test) &[0][]const u8{} else build_options.zir_dumps;
if (zir_dumps.len != 0) {
for (zir_dumps) |fn_name| {
if (mem.eql(u8, mem.spanZ(decl.name), fn_name)) {
std.debug.print("\n{s}\n", .{decl.name});
typed_value.val.castTag(.function).?.data.dump(module.*);
}
}
}
// For functions we need to add a prologue to the debug line program.
try dbg_line_buffer.ensureCapacity(26);

View File

@ -936,16 +936,6 @@ pub fn initDeclDebugBuffers(
const typed_value = decl.typed_value.most_recent.typed_value;
switch (typed_value.ty.zigTypeTag()) {
.Fn => {
const zir_dumps = if (std.builtin.is_test) &[0][]const u8{} else build_options.zir_dumps;
if (zir_dumps.len != 0) {
for (zir_dumps) |fn_name| {
if (mem.eql(u8, mem.spanZ(decl.name), fn_name)) {
std.debug.print("\n{}\n", .{decl.name});
typed_value.val.cast(Value.Payload.Function).?.func.dump(module.*);
}
}
}
// For functions we need to add a prologue to the debug line program.
try dbg_line_buffer.ensureCapacity(26);

View File

@ -294,7 +294,7 @@ pub const LLVMIRModule = struct {
const entry_block = llvm_func.appendBasicBlock("Entry");
self.builder.positionBuilderAtEnd(entry_block);
const instructions = func.analysis.success.instructions;
const instructions = func.body.instructions;
for (instructions) |inst| {
switch (inst.tag) {
.breakpoint => try self.genBreakpoint(inst.castTag(.breakpoint).?),

View File

@ -1818,7 +1818,7 @@ fn buildOutputType(
};
updateModule(gpa, comp, zir_out_path, hook) catch |err| switch (err) {
error.SemanticAnalyzeFail => process.exit(1),
error.SemanticAnalyzeFail => if (!watch) process.exit(1),
else => |e| return e,
};
try comp.makeBinFileExecutable();

View File

@ -330,11 +330,14 @@ pub const Value = extern union {
.int_type => return self.copyPayloadShallow(allocator, Payload.IntType),
.int_u64 => return self.copyPayloadShallow(allocator, Payload.U64),
.int_i64 => return self.copyPayloadShallow(allocator, Payload.I64),
.int_big_positive => {
@panic("TODO implement copying of big ints");
},
.int_big_negative => {
@panic("TODO implement copying of big ints");
.int_big_positive, .int_big_negative => {
const old_payload = self.cast(Payload.BigInt).?;
const new_payload = try allocator.create(Payload.BigInt);
new_payload.* = .{
.base = .{ .tag = self.ptr_otherwise.tag },
.data = try allocator.dupe(std.math.big.Limb, old_payload.data),
};
return Value{ .ptr_otherwise = &new_payload.base };
},
.function => return self.copyPayloadShallow(allocator, Payload.Function),
.extern_fn => return self.copyPayloadShallow(allocator, Payload.Decl),

View File

@ -25,12 +25,13 @@ pub const Decl = struct {
/// These are instructions that correspond to the ZIR text format. See `ir.Inst` for
/// in-memory, analyzed instructions with types and values.
/// We use a table to map these instruction to their respective semantically analyzed
/// instructions because it is possible to have multiple analyses on the same ZIR
/// happening at the same time.
pub const Inst = struct {
tag: Tag,
/// Byte offset into the source.
src: usize,
/// Pre-allocated field for mapping ZIR text instructions to post-analysis instructions.
analyzed_inst: ?*ir.Inst = null,
/// These names are used directly as the instruction names in the text format.
pub const Tag = enum {
@ -793,7 +794,9 @@ pub const Inst = struct {
fn_type: *Inst,
body: Module.Body,
},
kw_args: struct {},
kw_args: struct {
is_inline: bool = false,
},
};
pub const FnType = struct {
@ -1847,44 +1850,325 @@ pub fn emit(allocator: *Allocator, old_module: *IrModule) !Module {
/// For debugging purposes, prints a function representation to stderr.
pub fn dumpFn(old_module: IrModule, module_fn: *IrModule.Fn) void {
const allocator = old_module.gpa;
var ctx: EmitZIR = .{
var ctx: DumpTzir = .{
.allocator = allocator,
.decls = .{},
.arena = std.heap.ArenaAllocator.init(allocator),
.old_module = &old_module,
.next_auto_name = 0,
.names = std.StringArrayHashMap(void).init(allocator),
.primitive_table = std.AutoHashMap(Inst.Primitive.Builtin, *Decl).init(allocator),
.indent = 0,
.block_table = std.AutoHashMap(*ir.Inst.Block, *Inst.Block).init(allocator),
.loop_table = std.AutoHashMap(*ir.Inst.Loop, *Inst.Loop).init(allocator),
.metadata = std.AutoHashMap(*Inst, Module.MetaData).init(allocator),
.body_metadata = std.AutoHashMap(*Module.Body, Module.BodyMetaData).init(allocator),
.module_fn = module_fn,
.indent = 2,
.inst_table = DumpTzir.InstTable.init(allocator),
.partial_inst_table = DumpTzir.InstTable.init(allocator),
.const_table = DumpTzir.InstTable.init(allocator),
};
defer ctx.metadata.deinit();
defer ctx.body_metadata.deinit();
defer ctx.block_table.deinit();
defer ctx.loop_table.deinit();
defer ctx.decls.deinit(allocator);
defer ctx.names.deinit();
defer ctx.primitive_table.deinit();
defer ctx.inst_table.deinit();
defer ctx.partial_inst_table.deinit();
defer ctx.const_table.deinit();
defer ctx.arena.deinit();
const fn_ty = module_fn.owner_decl.typed_value.most_recent.typed_value.ty;
_ = ctx.emitFn(module_fn, 0, fn_ty) catch |err| {
std.debug.print("unable to dump function: {s}\n", .{@errorName(err)});
return;
};
var module = Module{
.decls = ctx.decls.items,
.arena = ctx.arena,
.metadata = ctx.metadata,
.body_metadata = ctx.body_metadata,
};
module.dump();
switch (module_fn.state) {
.queued => std.debug.print("(queued)", .{}),
.inline_only => std.debug.print("(inline_only)", .{}),
.in_progress => std.debug.print("(in_progress)", .{}),
.sema_failure => std.debug.print("(sema_failure)", .{}),
.dependency_failure => std.debug.print("(dependency_failure)", .{}),
.success => {
const writer = std.io.getStdErr().writer();
ctx.dump(module_fn.body, writer) catch @panic("failed to dump TZIR");
},
}
}
const DumpTzir = struct {
allocator: *Allocator,
arena: std.heap.ArenaAllocator,
old_module: *const IrModule,
module_fn: *IrModule.Fn,
indent: usize,
inst_table: InstTable,
partial_inst_table: InstTable,
const_table: InstTable,
next_index: usize = 0,
next_partial_index: usize = 0,
next_const_index: usize = 0,
const InstTable = std.AutoArrayHashMap(*ir.Inst, usize);
fn dump(dtz: *DumpTzir, body: ir.Body, writer: std.fs.File.Writer) !void {
// First pass to pre-populate the table so that we can show even invalid references.
// Must iterate the same order we iterate the second time.
// We also look for constants and put them in the const_table.
for (body.instructions) |inst| {
try dtz.inst_table.put(inst, dtz.next_index);
dtz.next_index += 1;
switch (inst.tag) {
.alloc,
.retvoid,
.unreach,
.breakpoint,
.dbg_stmt,
=> {},
.ref,
.ret,
.bitcast,
.not,
.isnonnull,
.isnull,
.iserr,
.ptrtoint,
.floatcast,
.intcast,
.load,
.unwrap_optional,
.wrap_optional,
=> {
const un_op = inst.cast(ir.Inst.UnOp).?;
try dtz.findConst(un_op.operand);
},
.add,
.sub,
.cmp_lt,
.cmp_lte,
.cmp_eq,
.cmp_gte,
.cmp_gt,
.cmp_neq,
.store,
.booland,
.boolor,
.bitand,
.bitor,
.xor,
=> {
const bin_op = inst.cast(ir.Inst.BinOp).?;
try dtz.findConst(bin_op.lhs);
try dtz.findConst(bin_op.rhs);
},
.arg => {},
.br => {
const br = inst.castTag(.br).?;
try dtz.findConst(&br.block.base);
try dtz.findConst(br.operand);
},
.brvoid => {
const brvoid = inst.castTag(.brvoid).?;
try dtz.findConst(&brvoid.block.base);
},
// TODO fill out this debug printing
.assembly,
.block,
.call,
.condbr,
.constant,
.loop,
.varptr,
.switchbr,
=> {},
}
}
std.debug.print("Module.Function(name={s}):\n", .{dtz.module_fn.owner_decl.name});
for (dtz.const_table.items()) |entry| {
const constant = entry.key.castTag(.constant).?;
try writer.print(" @{d}: {} = {};\n", .{
entry.value, constant.base.ty, constant.val,
});
}
return dtz.dumpBody(body, writer);
}
fn dumpBody(dtz: *DumpTzir, body: ir.Body, writer: std.fs.File.Writer) !void {
for (body.instructions) |inst| {
const my_index = dtz.next_partial_index;
try dtz.partial_inst_table.put(inst, my_index);
dtz.next_partial_index += 1;
try writer.writeByteNTimes(' ', dtz.indent);
try writer.print("%{d}: {} = {s}(", .{
my_index, inst.ty, @tagName(inst.tag),
});
switch (inst.tag) {
.alloc,
.retvoid,
.unreach,
.breakpoint,
.dbg_stmt,
=> try writer.writeAll(")\n"),
.ref,
.ret,
.bitcast,
.not,
.isnonnull,
.isnull,
.iserr,
.ptrtoint,
.floatcast,
.intcast,
.load,
.unwrap_optional,
.wrap_optional,
=> {
const un_op = inst.cast(ir.Inst.UnOp).?;
if (dtz.partial_inst_table.get(un_op.operand)) |operand_index| {
try writer.print("%{d})\n", .{operand_index});
} else if (dtz.const_table.get(un_op.operand)) |operand_index| {
try writer.print("@{d})\n", .{operand_index});
} else if (dtz.inst_table.get(un_op.operand)) |operand_index| {
try writer.print("%{d}) // Instruction does not dominate all uses!\n", .{
operand_index,
});
} else {
try writer.writeAll("!BADREF!)\n");
}
},
.add,
.sub,
.cmp_lt,
.cmp_lte,
.cmp_eq,
.cmp_gte,
.cmp_gt,
.cmp_neq,
.store,
.booland,
.boolor,
.bitand,
.bitor,
.xor,
=> {
var lhs_kinky: ?usize = null;
var rhs_kinky: ?usize = null;
const bin_op = inst.cast(ir.Inst.BinOp).?;
if (dtz.partial_inst_table.get(bin_op.lhs)) |operand_index| {
try writer.print("%{d}, ", .{operand_index});
} else if (dtz.const_table.get(bin_op.lhs)) |operand_index| {
try writer.print("@{d}, ", .{operand_index});
} else if (dtz.inst_table.get(bin_op.lhs)) |operand_index| {
lhs_kinky = operand_index;
try writer.print("%{d}, ", .{operand_index});
} else {
try writer.writeAll("!BADREF!, ");
}
if (dtz.partial_inst_table.get(bin_op.rhs)) |operand_index| {
try writer.print("%{d}", .{operand_index});
} else if (dtz.const_table.get(bin_op.rhs)) |operand_index| {
try writer.print("@{d}", .{operand_index});
} else if (dtz.inst_table.get(bin_op.rhs)) |operand_index| {
rhs_kinky = operand_index;
try writer.print("%{d}", .{operand_index});
} else {
try writer.writeAll("!BADREF!");
}
if (lhs_kinky != null or rhs_kinky != null) {
try writer.writeAll(") // Instruction does not dominate all uses!");
if (lhs_kinky) |lhs| {
try writer.print(" %{d}", .{lhs});
}
if (rhs_kinky) |rhs| {
try writer.print(" %{d}", .{rhs});
}
try writer.writeAll("\n");
} else {
try writer.writeAll(")\n");
}
},
.arg => {
const arg = inst.castTag(.arg).?;
try writer.print("{s})\n", .{arg.name});
},
.br => {
const br = inst.castTag(.br).?;
var lhs_kinky: ?usize = null;
var rhs_kinky: ?usize = null;
if (dtz.partial_inst_table.get(&br.block.base)) |operand_index| {
try writer.print("%{d}, ", .{operand_index});
} else if (dtz.const_table.get(&br.block.base)) |operand_index| {
try writer.print("@{d}, ", .{operand_index});
} else if (dtz.inst_table.get(&br.block.base)) |operand_index| {
lhs_kinky = operand_index;
try writer.print("%{d}, ", .{operand_index});
} else {
try writer.writeAll("!BADREF!, ");
}
if (dtz.partial_inst_table.get(br.operand)) |operand_index| {
try writer.print("%{d}", .{operand_index});
} else if (dtz.const_table.get(br.operand)) |operand_index| {
try writer.print("@{d}", .{operand_index});
} else if (dtz.inst_table.get(br.operand)) |operand_index| {
rhs_kinky = operand_index;
try writer.print("%{d}", .{operand_index});
} else {
try writer.writeAll("!BADREF!");
}
if (lhs_kinky != null or rhs_kinky != null) {
try writer.writeAll(") // Instruction does not dominate all uses!");
if (lhs_kinky) |lhs| {
try writer.print(" %{d}", .{lhs});
}
if (rhs_kinky) |rhs| {
try writer.print(" %{d}", .{rhs});
}
try writer.writeAll("\n");
} else {
try writer.writeAll(")\n");
}
},
.brvoid => {
const brvoid = inst.castTag(.brvoid).?;
if (dtz.partial_inst_table.get(&brvoid.block.base)) |operand_index| {
try writer.print("%{d})\n", .{operand_index});
} else if (dtz.const_table.get(&brvoid.block.base)) |operand_index| {
try writer.print("@{d})\n", .{operand_index});
} else if (dtz.inst_table.get(&brvoid.block.base)) |operand_index| {
try writer.print("%{d}) // Instruction does not dominate all uses!\n", .{
operand_index,
});
} else {
try writer.writeAll("!BADREF!)\n");
}
},
// TODO fill out this debug printing
.assembly,
.block,
.call,
.condbr,
.constant,
.loop,
.varptr,
.switchbr,
=> {
try writer.writeAll("!TODO!)\n");
},
}
}
}
fn findConst(dtz: *DumpTzir, operand: *ir.Inst) !void {
if (operand.tag == .constant) {
try dtz.const_table.put(operand, dtz.next_const_index);
dtz.next_const_index += 1;
}
}
};
const EmitZIR = struct {
allocator: *Allocator,
arena: std.heap.ArenaAllocator,
@ -2072,11 +2356,12 @@ const EmitZIR = struct {
var instructions = std.ArrayList(*Inst).init(self.allocator);
defer instructions.deinit();
switch (module_fn.analysis) {
switch (module_fn.state) {
.queued => unreachable,
.in_progress => unreachable,
.success => |body| {
try self.emitBody(body, &inst_table, &instructions);
.inline_only => unreachable,
.success => {
try self.emitBody(module_fn.body, &inst_table, &instructions);
},
.sema_failure => {
const err_msg = self.old_module.failed_decls.get(module_fn.owner_decl).?;
@ -2154,7 +2439,9 @@ const EmitZIR = struct {
.fn_type = fn_type.inst,
.body = .{ .instructions = arena_instrs },
},
.kw_args = .{},
.kw_args = .{
.is_inline = module_fn.state == .inline_only,
},
};
return self.emitUnnamedDecl(&fn_inst.base);
}

View File

@ -159,16 +159,11 @@ pub fn analyzeInst(mod: *Module, scope: *Scope, old_inst: *zir.Inst) InnerError!
}
}
pub fn analyzeBody(mod: *Module, scope: *Scope, body: zir.Module.Body) !void {
for (body.instructions) |src_inst, i| {
const analyzed_inst = try analyzeInst(mod, scope, src_inst);
src_inst.analyzed_inst = analyzed_inst;
pub fn analyzeBody(mod: *Module, block: *Scope.Block, body: zir.Module.Body) !void {
for (body.instructions) |src_inst| {
const analyzed_inst = try analyzeInst(mod, &block.base, src_inst);
try block.inst_table.putNoClobber(src_inst, analyzed_inst);
if (analyzed_inst.ty.zigTypeTag() == .NoReturn) {
for (body.instructions[i..]) |unreachable_inst| {
if (unreachable_inst.castTag(.dbg_stmt)) |dbg_stmt| {
return mod.fail(scope, dbg_stmt.base.src, "unreachable code", .{});
}
}
break;
}
}
@ -180,8 +175,8 @@ pub fn analyzeBodyValueAsType(
zir_result_inst: *zir.Inst,
body: zir.Module.Body,
) !Type {
try analyzeBody(mod, &block_scope.base, body);
const result_inst = zir_result_inst.analyzed_inst.?;
try analyzeBody(mod, block_scope, body);
const result_inst = block_scope.inst_table.get(zir_result_inst).?;
const val = try mod.resolveConstValue(&block_scope.base, result_inst);
return val.toType(block_scope.base.arena());
}
@ -264,30 +259,9 @@ fn resolveCompleteZirDecl(mod: *Module, scope: *Scope, src_decl: *zir.Decl) Inne
return decl;
}
/// TODO Look into removing this function. The body is only needed for .zir files, not .zig files.
pub fn resolveInst(mod: *Module, scope: *Scope, old_inst: *zir.Inst) InnerError!*Inst {
if (old_inst.analyzed_inst) |inst| return inst;
// If this assert trips, the instruction that was referenced did not get properly
// analyzed before it was referenced.
const zir_module = scope.namespace().cast(Scope.ZIRModule).?;
const entry = if (old_inst.cast(zir.Inst.DeclVal)) |declval| blk: {
const decl_name = declval.positionals.name;
const entry = zir_module.contents.module.findDecl(decl_name) orelse
return mod.fail(scope, old_inst.src, "decl '{s}' not found", .{decl_name});
break :blk entry;
} else blk: {
// If this assert trips, the instruction that was referenced did not get
// properly analyzed by a previous instruction analysis before it was
// referenced by the current one.
break :blk zir_module.contents.module.findInstDecl(old_inst).?;
};
const decl = try resolveCompleteZirDecl(mod, scope, entry.decl);
const decl_ref = try mod.analyzeDeclRef(scope, old_inst.src, decl);
// Note: it would be tempting here to store the result into old_inst.analyzed_inst field,
// but this would prevent the analyzeDeclRef from happening, which is needed to properly
// detect Decl dependencies and dependency failures on updates.
return mod.analyzeDeref(scope, old_inst.src, decl_ref, old_inst.src);
pub fn resolveInst(mod: *Module, scope: *Scope, zir_inst: *zir.Inst) InnerError!*Inst {
const block = scope.cast(Scope.Block).?;
return block.inst_table.get(zir_inst).?; // Instruction does not dominate all uses!
}
fn resolveConstString(mod: *Module, scope: *Scope, old_inst: *zir.Inst) ![]u8 {
@ -575,7 +549,12 @@ fn analyzeInstCompileError(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) In
}
fn analyzeInstArg(mod: *Module, scope: *Scope, inst: *zir.Inst.Arg) InnerError!*Inst {
const b = try mod.requireRuntimeBlock(scope, inst.base.src);
const b = try mod.requireFunctionBlock(scope, inst.base.src);
if (b.inlining) |inlining| {
const param_index = inlining.param_index;
inlining.param_index += 1;
return inlining.casted_args[param_index];
}
const fn_ty = b.func.?.owner_decl.typed_value.most_recent.typed_value.ty;
const param_index = b.instructions.items.len;
const param_count = fn_ty.fnParamLen();
@ -608,15 +587,17 @@ fn analyzeInstLoop(mod: *Module, scope: *Scope, inst: *zir.Inst.Loop) InnerError
var child_block: Scope.Block = .{
.parent = parent_block,
.inst_table = parent_block.inst_table,
.func = parent_block.func,
.decl = parent_block.decl,
.instructions = .{},
.arena = parent_block.arena,
.inlining = parent_block.inlining,
.is_comptime = parent_block.is_comptime,
};
defer child_block.instructions.deinit(mod.gpa);
try analyzeBody(mod, &child_block.base, inst.positionals.body);
try analyzeBody(mod, &child_block, inst.positionals.body);
// Loop repetition is implied so the last instruction may or may not be a noreturn instruction.
@ -630,16 +611,18 @@ fn analyzeInstBlockFlat(mod: *Module, scope: *Scope, inst: *zir.Inst.Block, is_c
var child_block: Scope.Block = .{
.parent = parent_block,
.inst_table = parent_block.inst_table,
.func = parent_block.func,
.decl = parent_block.decl,
.instructions = .{},
.arena = parent_block.arena,
.label = null,
.inlining = parent_block.inlining,
.is_comptime = parent_block.is_comptime or is_comptime,
};
defer child_block.instructions.deinit(mod.gpa);
try analyzeBody(mod, &child_block.base, inst.positionals.body);
try analyzeBody(mod, &child_block, inst.positionals.body);
try parent_block.instructions.appendSlice(mod.gpa, child_block.instructions.items);
@ -668,6 +651,7 @@ fn analyzeInstBlock(mod: *Module, scope: *Scope, inst: *zir.Inst.Block, is_compt
var child_block: Scope.Block = .{
.parent = parent_block,
.inst_table = parent_block.inst_table,
.func = parent_block.func,
.decl = parent_block.decl,
.instructions = .{},
@ -675,38 +659,53 @@ fn analyzeInstBlock(mod: *Module, scope: *Scope, inst: *zir.Inst.Block, is_compt
// TODO @as here is working around a stage1 miscompilation bug :(
.label = @as(?Scope.Block.Label, Scope.Block.Label{
.zir_block = inst,
.results = .{},
.block_inst = block_inst,
.merges = .{
.results = .{},
.block_inst = block_inst,
},
}),
.inlining = parent_block.inlining,
.is_comptime = is_comptime or parent_block.is_comptime,
};
const label = &child_block.label.?;
const merges = &child_block.label.?.merges;
defer child_block.instructions.deinit(mod.gpa);
defer label.results.deinit(mod.gpa);
defer merges.results.deinit(mod.gpa);
try analyzeBody(mod, &child_block.base, inst.positionals.body);
try analyzeBody(mod, &child_block, inst.positionals.body);
return analyzeBlockBody(mod, scope, &child_block, merges);
}
fn analyzeBlockBody(
mod: *Module,
scope: *Scope,
child_block: *Scope.Block,
merges: *Scope.Block.Merges,
) InnerError!*Inst {
const parent_block = scope.cast(Scope.Block).?;
// Blocks must terminate with noreturn instruction.
assert(child_block.instructions.items.len != 0);
assert(child_block.instructions.items[child_block.instructions.items.len - 1].ty.isNoReturn());
if (label.results.items.len == 0) {
// No need for a block instruction. We can put the new instructions directly into the parent block.
if (merges.results.items.len == 0) {
// No need for a block instruction. We can put the new instructions
// directly into the parent block.
const copied_instructions = try parent_block.arena.dupe(*Inst, child_block.instructions.items);
try parent_block.instructions.appendSlice(mod.gpa, copied_instructions);
return copied_instructions[copied_instructions.len - 1];
}
if (label.results.items.len == 1) {
if (merges.results.items.len == 1) {
const last_inst_index = child_block.instructions.items.len - 1;
const last_inst = child_block.instructions.items[last_inst_index];
if (last_inst.breakBlock()) |br_block| {
if (br_block == block_inst) {
if (br_block == merges.block_inst) {
// No need for a block instruction. We can put the new instructions directly into the parent block.
// Here we omit the break instruction.
const copied_instructions = try parent_block.arena.dupe(*Inst, child_block.instructions.items[0..last_inst_index]);
try parent_block.instructions.appendSlice(mod.gpa, copied_instructions);
return label.results.items[0];
return merges.results.items[0];
}
}
}
@ -715,10 +714,10 @@ fn analyzeInstBlock(mod: *Module, scope: *Scope, inst: *zir.Inst.Block, is_compt
// Need to set the type and emit the Block instruction. This allows machine code generation
// to emit a jump instruction to after the block when it encounters the break.
try parent_block.instructions.append(mod.gpa, &block_inst.base);
block_inst.base.ty = try mod.resolvePeerTypes(scope, label.results.items);
block_inst.body = .{ .instructions = try parent_block.arena.dupe(*Inst, child_block.instructions.items) };
return &block_inst.base;
try parent_block.instructions.append(mod.gpa, &merges.block_inst.base);
merges.block_inst.base.ty = try mod.resolvePeerTypes(scope, merges.results.items);
merges.block_inst.body = .{ .instructions = try parent_block.arena.dupe(*Inst, child_block.instructions.items) };
return &merges.block_inst.base;
}
fn analyzeInstBreakpoint(mod: *Module, scope: *Scope, inst: *zir.Inst.NoOp) InnerError!*Inst {
@ -826,28 +825,108 @@ fn analyzeInstCall(mod: *Module, scope: *Scope, inst: *zir.Inst.Call) InnerError
const ret_type = func.ty.fnReturnType();
const b = try mod.requireRuntimeBlock(scope, inst.base.src);
const b = try mod.requireFunctionBlock(scope, inst.base.src);
const is_comptime_call = b.is_comptime or inst.kw_args.modifier == .compile_time;
const is_inline_call = is_comptime_call or inst.kw_args.modifier == .always_inline or blk: {
// This logic will get simplified by
// https://github.com/ziglang/zig/issues/6429
if (try mod.resolveDefinedValue(scope, func)) |func_val| {
const module_fn = switch (func_val.tag()) {
.function => func_val.castTag(.function).?.data,
else => break :blk false,
};
break :blk module_fn.state == .inline_only;
}
break :blk false;
};
if (is_inline_call) {
const func_val = try mod.resolveConstValue(scope, func);
const module_fn = switch (func_val.tag()) {
.function => func_val.castTag(.function).?.data,
.extern_fn => return mod.fail(scope, inst.base.src, "{s} call of extern function", .{
@as([]const u8, if (is_comptime_call) "comptime" else "inline"),
}),
else => unreachable,
};
// Analyze the ZIR. The same ZIR gets analyzed into a runtime function
// or an inlined call depending on what union tag the `label` field is
// set to in the `Scope.Block`.
// This block instruction will be used to capture the return value from the
// inlined function.
const block_inst = try scope.arena().create(Inst.Block);
block_inst.* = .{
.base = .{
.tag = Inst.Block.base_tag,
.ty = ret_type,
.src = inst.base.src,
},
.body = undefined,
};
// If this is the top of the inline/comptime call stack, we use this data.
// Otherwise we pass on the shared data from the parent scope.
var shared_inlining = Scope.Block.Inlining.Shared{
.branch_count = 0,
.branch_quota = 1000,
.caller = b.func,
};
// This one is shared among sub-blocks within the same callee, but not
// shared among the entire inline/comptime call stack.
var inlining = Scope.Block.Inlining{
.shared = if (b.inlining) |inlining| inlining.shared else &shared_inlining,
.param_index = 0,
.casted_args = casted_args,
.merges = .{
.results = .{},
.block_inst = block_inst,
},
};
var inst_table = Scope.Block.InstTable.init(mod.gpa);
defer inst_table.deinit();
var child_block: Scope.Block = .{
.parent = null,
.inst_table = &inst_table,
.func = module_fn,
// Note that we pass the caller's Decl, not the callee. This causes
// compile errors to be attached (correctly) to the caller's Decl.
.decl = scope.decl().?,
.instructions = .{},
.arena = scope.arena(),
.label = null,
.inlining = &inlining,
.is_comptime = is_comptime_call,
};
const merges = &child_block.inlining.?.merges;
defer child_block.instructions.deinit(mod.gpa);
defer merges.results.deinit(mod.gpa);
try mod.emitBackwardBranch(&child_block, inst.base.src);
// This will have return instructions analyzed as break instructions to
// the block_inst above.
try analyzeBody(mod, &child_block, module_fn.zir);
const result = try analyzeBlockBody(mod, scope, &child_block, merges);
if (result.castTag(.constant)) |constant| {
log.debug("inline call resulted in {}", .{constant.val});
} else {
log.debug("inline call resulted in {}", .{result});
}
return result;
}
return mod.addCall(b, inst.base.src, ret_type, func, casted_args);
}
fn analyzeInstFn(mod: *Module, scope: *Scope, fn_inst: *zir.Inst.Fn) InnerError!*Inst {
const fn_type = try resolveType(mod, scope, fn_inst.positionals.fn_type);
const fn_zir = blk: {
var fn_arena = std.heap.ArenaAllocator.init(mod.gpa);
errdefer fn_arena.deinit();
const fn_zir = try scope.arena().create(Module.Fn.ZIR);
fn_zir.* = .{
.body = .{
.instructions = fn_inst.positionals.body.instructions,
},
.arena = fn_arena.state,
};
break :blk fn_zir;
};
const new_func = try scope.arena().create(Module.Fn);
new_func.* = .{
.analysis = .{ .queued = fn_zir },
.state = if (fn_inst.kw_args.is_inline) .inline_only else .queued,
.zir = fn_inst.positionals.body,
.body = undefined,
.owner_decl = scope.decl().?,
};
return mod.constInst(scope, fn_inst.base.src, .{
@ -1312,17 +1391,17 @@ fn analyzeInstSwitchBr(mod: *Module, scope: *Scope, inst: *zir.Inst.SwitchBr) In
const item = try mod.resolveConstValue(scope, casted);
if (target_val.eql(item)) {
try analyzeBody(mod, scope, case.body);
try analyzeBody(mod, scope.cast(Scope.Block).?, case.body);
return mod.constNoReturn(scope, inst.base.src);
}
}
try analyzeBody(mod, scope, inst.positionals.else_body);
try analyzeBody(mod, scope.cast(Scope.Block).?, inst.positionals.else_body);
return mod.constNoReturn(scope, inst.base.src);
}
if (inst.positionals.cases.len == 0) {
// no cases just analyze else_branch
try analyzeBody(mod, scope, inst.positionals.else_body);
try analyzeBody(mod, scope.cast(Scope.Block).?, inst.positionals.else_body);
return mod.constNoReturn(scope, inst.base.src);
}
@ -1331,10 +1410,12 @@ fn analyzeInstSwitchBr(mod: *Module, scope: *Scope, inst: *zir.Inst.SwitchBr) In
var case_block: Scope.Block = .{
.parent = parent_block,
.inst_table = parent_block.inst_table,
.func = parent_block.func,
.decl = parent_block.decl,
.instructions = .{},
.arena = parent_block.arena,
.inlining = parent_block.inlining,
.is_comptime = parent_block.is_comptime,
};
defer case_block.instructions.deinit(mod.gpa);
@ -1347,7 +1428,7 @@ fn analyzeInstSwitchBr(mod: *Module, scope: *Scope, inst: *zir.Inst.SwitchBr) In
const casted = try mod.coerce(scope, target.ty, resolved);
const item = try mod.resolveConstValue(scope, casted);
try analyzeBody(mod, &case_block.base, case.body);
try analyzeBody(mod, &case_block, case.body);
cases[i] = .{
.item = item,
@ -1356,7 +1437,7 @@ fn analyzeInstSwitchBr(mod: *Module, scope: *Scope, inst: *zir.Inst.SwitchBr) In
}
case_block.instructions.items.len = 0;
try analyzeBody(mod, &case_block.base, inst.positionals.else_body);
try analyzeBody(mod, &case_block, inst.positionals.else_body);
const else_body: ir.Body = .{
.instructions = try parent_block.arena.dupe(*Inst, case_block.instructions.items),
@ -1509,7 +1590,7 @@ fn analyzeInstImport(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerErr
return mod.fail(scope, inst.base.src, "unable to find '{s}'", .{operand});
},
else => {
// TODO user friendly error to string
// TODO: make sure this gets retried and not cached
return mod.fail(scope, inst.base.src, "unable to open '{s}': {s}", .{ operand, @errorName(err) });
},
};
@ -1674,24 +1755,26 @@ fn analyzeInstComptimeOp(mod: *Module, scope: *Scope, res_type: Type, inst: *zir
}
const is_int = res_type.isInt() or res_type.zigTypeTag() == .ComptimeInt;
const value = try switch (inst.base.tag) {
const value = switch (inst.base.tag) {
.add => blk: {
const val = if (is_int)
Module.intAdd(scope.arena(), lhs_val, rhs_val)
try Module.intAdd(scope.arena(), lhs_val, rhs_val)
else
mod.floatAdd(scope, res_type, inst.base.src, lhs_val, rhs_val);
try mod.floatAdd(scope, res_type, inst.base.src, lhs_val, rhs_val);
break :blk val;
},
.sub => blk: {
const val = if (is_int)
Module.intSub(scope.arena(), lhs_val, rhs_val)
try Module.intSub(scope.arena(), lhs_val, rhs_val)
else
mod.floatSub(scope, res_type, inst.base.src, lhs_val, rhs_val);
try mod.floatSub(scope, res_type, inst.base.src, lhs_val, rhs_val);
break :blk val;
},
else => return mod.fail(scope, inst.base.src, "TODO Implement arithmetic operand '{s}'", .{@tagName(inst.base.tag)}),
};
log.debug("{s}({}, {}) result: {}", .{ @tagName(inst.base.tag), lhs_val, rhs_val, value });
return mod.constInst(scope, inst.base.src, .{
.ty = res_type,
.val = value,
@ -1860,35 +1943,39 @@ fn analyzeInstCondBr(mod: *Module, scope: *Scope, inst: *zir.Inst.CondBr) InnerE
const uncasted_cond = try resolveInst(mod, scope, inst.positionals.condition);
const cond = try mod.coerce(scope, Type.initTag(.bool), uncasted_cond);
const parent_block = scope.cast(Scope.Block).?;
if (try mod.resolveDefinedValue(scope, cond)) |cond_val| {
const body = if (cond_val.toBool()) &inst.positionals.then_body else &inst.positionals.else_body;
try analyzeBody(mod, scope, body.*);
try analyzeBody(mod, parent_block, body.*);
return mod.constNoReturn(scope, inst.base.src);
}
const parent_block = try mod.requireRuntimeBlock(scope, inst.base.src);
var true_block: Scope.Block = .{
.parent = parent_block,
.inst_table = parent_block.inst_table,
.func = parent_block.func,
.decl = parent_block.decl,
.instructions = .{},
.arena = parent_block.arena,
.inlining = parent_block.inlining,
.is_comptime = parent_block.is_comptime,
};
defer true_block.instructions.deinit(mod.gpa);
try analyzeBody(mod, &true_block.base, inst.positionals.then_body);
try analyzeBody(mod, &true_block, inst.positionals.then_body);
var false_block: Scope.Block = .{
.parent = parent_block,
.inst_table = parent_block.inst_table,
.func = parent_block.func,
.decl = parent_block.decl,
.instructions = .{},
.arena = parent_block.arena,
.inlining = parent_block.inlining,
.is_comptime = parent_block.is_comptime,
};
defer false_block.instructions.deinit(mod.gpa);
try analyzeBody(mod, &false_block.base, inst.positionals.else_body);
try analyzeBody(mod, &false_block, inst.positionals.else_body);
const then_body: ir.Body = .{ .instructions = try scope.arena().dupe(*Inst, true_block.instructions.items) };
const else_body: ir.Body = .{ .instructions = try scope.arena().dupe(*Inst, false_block.instructions.items) };
@ -1912,12 +1999,26 @@ fn analyzeInstUnreachable(
fn analyzeInstRet(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
const operand = try resolveInst(mod, scope, inst.positionals.operand);
const b = try mod.requireRuntimeBlock(scope, inst.base.src);
const b = try mod.requireFunctionBlock(scope, inst.base.src);
if (b.inlining) |inlining| {
// We are inlining a function call; rewrite the `ret` as a `break`.
try inlining.merges.results.append(mod.gpa, operand);
return mod.addBr(b, inst.base.src, inlining.merges.block_inst, operand);
}
return mod.addUnOp(b, inst.base.src, Type.initTag(.noreturn), .ret, operand);
}
fn analyzeInstRetVoid(mod: *Module, scope: *Scope, inst: *zir.Inst.NoOp) InnerError!*Inst {
const b = try mod.requireRuntimeBlock(scope, inst.base.src);
const b = try mod.requireFunctionBlock(scope, inst.base.src);
if (b.inlining) |inlining| {
// We are inlining a function call; rewrite the `retvoid` as a `breakvoid`.
const void_inst = try mod.constVoid(scope, inst.base.src);
try inlining.merges.results.append(mod.gpa, void_inst);
return mod.addBr(b, inst.base.src, inlining.merges.block_inst, void_inst);
}
if (b.func) |func| {
// Need to emit a compile error if returning void is not allowed.
const void_inst = try mod.constVoid(scope, inst.base.src);
@ -1949,9 +2050,9 @@ fn analyzeBreak(
while (opt_block) |block| {
if (block.label) |*label| {
if (label.zir_block == zir_block) {
try label.results.append(mod.gpa, operand);
const b = try mod.requireRuntimeBlock(scope, src);
return mod.addBr(b, src, label.block_inst, operand);
try label.merges.results.append(mod.gpa, operand);
const b = try mod.requireFunctionBlock(scope, src);
return mod.addBr(b, src, label.merges.block_inst, operand);
}
}
opt_block = block.parent;

View File

@ -27,7 +27,6 @@ const wasi = std.zig.CrossTarget{
};
pub fn addCases(ctx: *TestContext) !void {
try @import("zir.zig").addCases(ctx);
try @import("cbe.zig").addCases(ctx);
try @import("spu-ii.zig").addCases(ctx);
try @import("arm.zig").addCases(ctx);
@ -318,7 +317,7 @@ pub fn addCases(ctx: *TestContext) !void {
}
{
var case = ctx.exe("adding numbers at runtime", linux_x64);
var case = ctx.exe("adding numbers at runtime and comptime", linux_x64);
case.addCompareOutput(
\\export fn _start() noreturn {
\\ add(3, 4);
@ -342,6 +341,54 @@ pub fn addCases(ctx: *TestContext) !void {
,
"",
);
// comptime function call
case.addCompareOutput(
\\export fn _start() noreturn {
\\ exit();
\\}
\\
\\fn add(a: u32, b: u32) u32 {
\\ return a + b;
\\}
\\
\\const x = add(3, 4);
\\
\\fn exit() noreturn {
\\ asm volatile ("syscall"
\\ :
\\ : [number] "{rax}" (231),
\\ [arg1] "{rdi}" (x - 7)
\\ : "rcx", "r11", "memory"
\\ );
\\ unreachable;
\\}
,
"",
);
// Inline function call
case.addCompareOutput(
\\export fn _start() noreturn {
\\ var x: usize = 3;
\\ const y = add(1, 2, x);
\\ exit(y - 6);
\\}
\\
\\inline fn add(a: usize, b: usize, c: usize) usize {
\\ return a + b + c;
\\}
\\
\\fn exit(code: usize) noreturn {
\\ asm volatile ("syscall"
\\ :
\\ : [number] "{rax}" (231),
\\ [arg1] "{rdi}" (code)
\\ : "rcx", "r11", "memory"
\\ );
\\ unreachable;
\\}
,
"",
);
}
{
@ -1331,4 +1378,102 @@ pub fn addCases(ctx: *TestContext) !void {
\\}
, &[_][]const u8{":2:9: error: variable of type '@Type(.Null)' must be const or comptime"});
}
{
var case = ctx.exe("compile error in inline fn call fixed", linux_x64);
case.addError(
\\export fn _start() noreturn {
\\ var x: usize = 3;
\\ const y = add(10, 2, x);
\\ exit(y - 6);
\\}
\\
\\inline fn add(a: usize, b: usize, c: usize) usize {
\\ if (a == 10) @compileError("bad");
\\ return a + b + c;
\\}
\\
\\fn exit(code: usize) noreturn {
\\ asm volatile ("syscall"
\\ :
\\ : [number] "{rax}" (231),
\\ [arg1] "{rdi}" (code)
\\ : "rcx", "r11", "memory"
\\ );
\\ unreachable;
\\}
, &[_][]const u8{":8:18: error: bad"});
case.addCompareOutput(
\\export fn _start() noreturn {
\\ var x: usize = 3;
\\ const y = add(1, 2, x);
\\ exit(y - 6);
\\}
\\
\\inline fn add(a: usize, b: usize, c: usize) usize {
\\ if (a == 10) @compileError("bad");
\\ return a + b + c;
\\}
\\
\\fn exit(code: usize) noreturn {
\\ asm volatile ("syscall"
\\ :
\\ : [number] "{rax}" (231),
\\ [arg1] "{rdi}" (code)
\\ : "rcx", "r11", "memory"
\\ );
\\ unreachable;
\\}
,
"",
);
}
{
var case = ctx.exe("recursive inline function", linux_x64);
case.addCompareOutput(
\\export fn _start() noreturn {
\\ const y = fibonacci(7);
\\ exit(y - 21);
\\}
\\
\\inline fn fibonacci(n: usize) usize {
\\ if (n <= 2) return n;
\\ return fibonacci(n - 2) + fibonacci(n - 1);
\\}
\\
\\fn exit(code: usize) noreturn {
\\ asm volatile ("syscall"
\\ :
\\ : [number] "{rax}" (231),
\\ [arg1] "{rdi}" (code)
\\ : "rcx", "r11", "memory"
\\ );
\\ unreachable;
\\}
,
"",
);
case.addError(
\\export fn _start() noreturn {
\\ const y = fibonacci(999);
\\ exit(y - 21);
\\}
\\
\\inline fn fibonacci(n: usize) usize {
\\ if (n <= 2) return n;
\\ return fibonacci(n - 2) + fibonacci(n - 1);
\\}
\\
\\fn exit(code: usize) noreturn {
\\ asm volatile ("syscall"
\\ :
\\ : [number] "{rax}" (231),
\\ [arg1] "{rdi}" (code)
\\ : "rcx", "r11", "memory"
\\ );
\\ unreachable;
\\}
, &[_][]const u8{":8:10: error: evaluation exceeded 1000 backwards branches"});
}
}

View File

@ -1,316 +0,0 @@
const std = @import("std");
const TestContext = @import("../../src/test.zig").TestContext;
// self-hosted does not yet support PE executable files / COFF object files
// or mach-o files. So we do the ZIR transform test cases cross compiling for
// x86_64-linux.
const linux_x64 = std.zig.CrossTarget{
.cpu_arch = .x86_64,
.os_tag = .linux,
};
pub fn addCases(ctx: *TestContext) !void {
ctx.transformZIR("referencing decls which appear later in the file", linux_x64,
\\@void = primitive(void)
\\@fnty = fntype([], @void, cc=C)
\\
\\@9 = str("entry")
\\@11 = export(@9, "entry")
\\
\\@entry = fn(@fnty, {
\\ %11 = returnvoid()
\\})
,
\\@void = primitive(void)
\\@fnty = fntype([], @void, cc=C)
\\@9 = declref("9__anon_0")
\\@9__anon_0 = str("entry")
\\@unnamed$4 = str("entry")
\\@unnamed$5 = export(@unnamed$4, "entry")
\\@11 = primitive(void_value)
\\@unnamed$7 = fntype([], @void, cc=C)
\\@entry = fn(@unnamed$7, {
\\ %0 = returnvoid() ; deaths=0b1000000000000000
\\})
\\
);
ctx.transformZIR("elemptr, add, cmp, condbr, return, breakpoint", linux_x64,
\\@void = primitive(void)
\\@usize = primitive(usize)
\\@fnty = fntype([], @void, cc=C)
\\@0 = int(0)
\\@1 = int(1)
\\@2 = int(2)
\\@3 = int(3)
\\
\\@entry = fn(@fnty, {
\\ %a = str("\x32\x08\x01\x0a")
\\ %a_ref = ref(%a)
\\ %eptr0 = elemptr(%a_ref, @0)
\\ %eptr1 = elemptr(%a_ref, @1)
\\ %eptr2 = elemptr(%a_ref, @2)
\\ %eptr3 = elemptr(%a_ref, @3)
\\ %v0 = deref(%eptr0)
\\ %v1 = deref(%eptr1)
\\ %v2 = deref(%eptr2)
\\ %v3 = deref(%eptr3)
\\ %x0 = add(%v0, %v1)
\\ %x1 = add(%v2, %v3)
\\ %result = add(%x0, %x1)
\\
\\ %expected = int(69)
\\ %ok = cmp_eq(%result, %expected)
\\ %10 = condbr(%ok, {
\\ %11 = returnvoid()
\\ }, {
\\ %12 = breakpoint()
\\ })
\\})
\\
\\@9 = str("entry")
\\@11 = export(@9, "entry")
,
\\@void = primitive(void)
\\@fnty = fntype([], @void, cc=C)
\\@0 = int(0)
\\@1 = int(1)
\\@2 = int(2)
\\@3 = int(3)
\\@unnamed$6 = fntype([], @void, cc=C)
\\@entry = fn(@unnamed$6, {
\\ %0 = returnvoid() ; deaths=0b1000000000000000
\\})
\\@entry__anon_1 = str("2\x08\x01\n")
\\@9 = declref("9__anon_0")
\\@9__anon_0 = str("entry")
\\@unnamed$11 = str("entry")
\\@unnamed$12 = export(@unnamed$11, "entry")
\\@11 = primitive(void_value)
\\
);
{
var case = ctx.objZIR("reference cycle with compile error in the cycle", linux_x64);
case.addTransform(
\\@void = primitive(void)
\\@fnty = fntype([], @void, cc=C)
\\
\\@9 = str("entry")
\\@11 = export(@9, "entry")
\\
\\@entry = fn(@fnty, {
\\ %0 = call(@a, [])
\\ %1 = returnvoid()
\\})
\\
\\@a = fn(@fnty, {
\\ %0 = call(@b, [])
\\ %1 = returnvoid()
\\})
\\
\\@b = fn(@fnty, {
\\ %0 = call(@a, [])
\\ %1 = returnvoid()
\\})
,
\\@void = primitive(void)
\\@fnty = fntype([], @void, cc=C)
\\@9 = declref("9__anon_0")
\\@9__anon_0 = str("entry")
\\@unnamed$4 = str("entry")
\\@unnamed$5 = export(@unnamed$4, "entry")
\\@11 = primitive(void_value)
\\@unnamed$7 = fntype([], @void, cc=C)
\\@entry = fn(@unnamed$7, {
\\ %0 = call(@a, [], modifier=auto) ; deaths=0b1000000000000001
\\ %1 = returnvoid() ; deaths=0b1000000000000000
\\})
\\@unnamed$9 = fntype([], @void, cc=C)
\\@a = fn(@unnamed$9, {
\\ %0 = call(@b, [], modifier=auto) ; deaths=0b1000000000000001
\\ %1 = returnvoid() ; deaths=0b1000000000000000
\\})
\\@unnamed$11 = fntype([], @void, cc=C)
\\@b = fn(@unnamed$11, {
\\ %0 = call(@a, [], modifier=auto) ; deaths=0b1000000000000001
\\ %1 = returnvoid() ; deaths=0b1000000000000000
\\})
\\
);
// Now we introduce a compile error
case.addError(
\\@void = primitive(void)
\\@fnty = fntype([], @void, cc=C)
\\
\\@9 = str("entry")
\\@11 = export(@9, "entry")
\\
\\@entry = fn(@fnty, {
\\ %0 = call(@a, [])
\\ %1 = returnvoid()
\\})
\\
\\@a = fn(@fnty, {
\\ %0 = call(@c, [])
\\ %1 = returnvoid()
\\})
\\
\\@b = str("message")
\\
\\@c = fn(@fnty, {
\\ %9 = compileerror(@b)
\\ %0 = call(@a, [])
\\ %1 = returnvoid()
\\})
,
&[_][]const u8{
":20:21: error: message",
},
);
// Now we remove the call to `a`. `a` and `b` form a cycle, but no entry points are
// referencing either of them. This tests that the cycle is detected, and the error
// goes away.
case.addTransform(
\\@void = primitive(void)
\\@fnty = fntype([], @void, cc=C)
\\
\\@9 = str("entry")
\\@11 = export(@9, "entry")
\\
\\@entry = fn(@fnty, {
\\ %0 = returnvoid()
\\})
\\
\\@a = fn(@fnty, {
\\ %0 = call(@c, [])
\\ %1 = returnvoid()
\\})
\\
\\@b = str("message")
\\
\\@c = fn(@fnty, {
\\ %9 = compileerror(@b)
\\ %0 = call(@a, [])
\\ %1 = returnvoid()
\\})
,
\\@void = primitive(void)
\\@fnty = fntype([], @void, cc=C)
\\@9 = declref("9__anon_3")
\\@9__anon_3 = str("entry")
\\@unnamed$4 = str("entry")
\\@unnamed$5 = export(@unnamed$4, "entry")
\\@11 = primitive(void_value)
\\@unnamed$7 = fntype([], @void, cc=C)
\\@entry = fn(@unnamed$7, {
\\ %0 = returnvoid() ; deaths=0b1000000000000000
\\})
\\
);
}
if (std.Target.current.os.tag != .linux or
std.Target.current.cpu.arch != .x86_64)
{
// TODO implement self-hosted PE (.exe file) linking
// TODO implement more ZIR so we don't depend on x86_64-linux
return;
}
ctx.compareOutputZIR("hello world ZIR",
\\@noreturn = primitive(noreturn)
\\@void = primitive(void)
\\@usize = primitive(usize)
\\@0 = int(0)
\\@1 = int(1)
\\@2 = int(2)
\\@3 = int(3)
\\
\\@msg = str("Hello, world!\n")
\\
\\@start_fnty = fntype([], @noreturn, cc=Naked)
\\@start = fn(@start_fnty, {
\\ %SYS_exit_group = int(231)
\\ %exit_code = as(@usize, @0)
\\
\\ %syscall = str("syscall")
\\ %sysoutreg = str("={rax}")
\\ %rax = str("{rax}")
\\ %rdi = str("{rdi}")
\\ %rcx = str("rcx")
\\ %rdx = str("{rdx}")
\\ %rsi = str("{rsi}")
\\ %r11 = str("r11")
\\ %memory = str("memory")
\\
\\ %SYS_write = as(@usize, @1)
\\ %STDOUT_FILENO = as(@usize, @1)
\\
\\ %msg_addr = ptrtoint(@msg)
\\
\\ %len_name = str("len")
\\ %msg_len_ptr = fieldptr(@msg, %len_name)
\\ %msg_len = deref(%msg_len_ptr)
\\ %rc_write = asm(%syscall, @usize,
\\ volatile=1,
\\ output=%sysoutreg,
\\ inputs=[%rax, %rdi, %rsi, %rdx],
\\ clobbers=[%rcx, %r11, %memory],
\\ args=[%SYS_write, %STDOUT_FILENO, %msg_addr, %msg_len])
\\
\\ %rc_exit = asm(%syscall, @usize,
\\ volatile=1,
\\ output=%sysoutreg,
\\ inputs=[%rax, %rdi],
\\ clobbers=[%rcx, %r11, %memory],
\\ args=[%SYS_exit_group, %exit_code])
\\
\\ %99 = unreachable()
\\});
\\
\\@9 = str("_start")
\\@11 = export(@9, "start")
,
\\Hello, world!
\\
);
ctx.compareOutputZIR("function call with no args no return value",
\\@noreturn = primitive(noreturn)
\\@void = primitive(void)
\\@usize = primitive(usize)
\\@0 = int(0)
\\@1 = int(1)
\\@2 = int(2)
\\@3 = int(3)
\\
\\@exit0_fnty = fntype([], @noreturn)
\\@exit0 = fn(@exit0_fnty, {
\\ %SYS_exit_group = int(231)
\\ %exit_code = as(@usize, @0)
\\
\\ %syscall = str("syscall")
\\ %sysoutreg = str("={rax}")
\\ %rax = str("{rax}")
\\ %rdi = str("{rdi}")
\\ %rcx = str("rcx")
\\ %r11 = str("r11")
\\ %memory = str("memory")
\\
\\ %rc = asm(%syscall, @usize,
\\ volatile=1,
\\ output=%sysoutreg,
\\ inputs=[%rax, %rdi],
\\ clobbers=[%rcx, %r11, %memory],
\\ args=[%SYS_exit_group, %exit_code])
\\
\\ %99 = unreachable()
\\});
\\
\\@start_fnty = fntype([], @noreturn, cc=Naked)
\\@start = fn(@start_fnty, {
\\ %0 = call(@exit0, [])
\\})
\\@9 = str("_start")
\\@11 = export(@9, "start")
, "");
}