stage2: improvements to @setEvalBranchQuota

* extract magic number into a constant
 * properly use result location casting for the operand
 * naming convention for ZIR instructions
This commit is contained in:
Andrew Kelley 2021-01-04 13:40:01 -07:00
parent 638f93ebdc
commit 7e64dc4221
4 changed files with 34 additions and 13 deletions

View File

@ -23,6 +23,8 @@ const trace = @import("tracy.zig").trace;
const astgen = @import("astgen.zig");
const zir_sema = @import("zir_sema.zig");
const default_eval_branch_quota = 1000;
/// General-purpose allocator. Used for both temporary and long-term storage.
gpa: *Allocator,
comp: *Compilation,
@ -1105,7 +1107,7 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
var inst_table = Scope.Block.InstTable.init(self.gpa);
defer inst_table.deinit();
var branch_quota: u32 = 1000;
var branch_quota: u32 = default_eval_branch_quota;
var block_scope: Scope.Block = .{
.parent = null,
@ -1301,7 +1303,7 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
var decl_inst_table = Scope.Block.InstTable.init(self.gpa);
defer decl_inst_table.deinit();
var branch_quota: u32 = 1000;
var branch_quota: u32 = default_eval_branch_quota;
var block_scope: Scope.Block = .{
.parent = null,
@ -1374,7 +1376,7 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
var var_inst_table = Scope.Block.InstTable.init(self.gpa);
defer var_inst_table.deinit();
var branch_quota_vi: u32 = 1000;
var branch_quota_vi: u32 = default_eval_branch_quota;
var inner_block: Scope.Block = .{
.parent = null,
.inst_table = &var_inst_table,
@ -1503,7 +1505,7 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
var inst_table = Scope.Block.InstTable.init(self.gpa);
defer inst_table.deinit();
var branch_quota: u32 = 1000;
var branch_quota: u32 = default_eval_branch_quota;
var block_scope: Scope.Block = .{
.parent = null,
@ -1887,7 +1889,7 @@ pub fn analyzeFnBody(self: *Module, decl: *Decl, func: *Fn) !void {
defer decl.typed_value.most_recent.arena.?.* = arena.state;
var inst_table = Scope.Block.InstTable.init(self.gpa);
defer inst_table.deinit();
var branch_quota: u32 = 1000;
var branch_quota: u32 = default_eval_branch_quota;
var inner_block: Scope.Block = .{
.parent = null,

View File

@ -2322,12 +2322,12 @@ fn setEvalBranchQuota(mod: *Module, scope: *Scope, call: *ast.Node.BuiltinCall)
const tree = scope.tree();
const src = tree.token_locs[call.builtin_token].start;
const params = call.params();
const target = try expr(mod, scope, .none, params[0]);
const u32_type = try addZIRInstConst(mod, scope, src, .{
.ty = Type.initTag(.type),
.val = Value.initTag(.u32_type),
});
return addZIRUnOp(mod, scope, src, .setevalbranchquota, try rlWrap(mod, scope, .{ .ty = u32_type }, target));
const quota = try expr(mod, scope, .{ .ty = u32_type }, params[0]);
return addZIRUnOp(mod, scope, src, .set_eval_branch_quota, quota);
}
fn typeOf(mod: *Module, scope: *Scope, rl: ResultLoc, call: *ast.Node.BuiltinCall) InnerError!*zir.Inst {

View File

@ -127,8 +127,9 @@ pub const Inst = struct {
coerce_to_ptr_elem,
/// Emit an error message and fail compilation.
compileerror,
/// Changes the maximum number of backwards branches that compile-time code execution can use before giving up and making a compile error.
setevalbranchquota,
/// Changes the maximum number of backwards branches that compile-time
/// code execution can use before giving up and making a compile error.
set_eval_branch_quota,
/// Conditional branch. Splits control flow based on a boolean condition value.
condbr,
/// Special case, has no textual representation.
@ -349,7 +350,7 @@ pub const Inst = struct {
.anyframe_type,
.bitnot,
.import,
.setevalbranchquota,
.set_eval_branch_quota,
=> UnOp,
.add,
@ -538,7 +539,7 @@ pub const Inst = struct {
.switch_range,
.typeof_peer,
.resolve_inferred_alloc,
.setevalbranchquota,
.set_eval_branch_quota,
=> false,
.@"break",

View File

@ -81,7 +81,7 @@ pub fn analyzeInst(mod: *Module, scope: *Scope, old_inst: *zir.Inst) InnerError!
.mut_slice_type => return analyzeInstSimplePtrType(mod, scope, old_inst.castTag(.mut_slice_type).?, true, .Slice),
.ptr_type => return analyzeInstPtrType(mod, scope, old_inst.castTag(.ptr_type).?),
.store => return analyzeInstStore(mod, scope, old_inst.castTag(.store).?),
.setevalbranchquota => return analyzeInstSetEvalBranchQuota(mod, scope, old_inst.castTag(.setevalbranchquota).?),
.set_eval_branch_quota => return analyzeInstSetEvalBranchQuota(mod, scope, old_inst.castTag(.set_eval_branch_quota).?),
.str => return analyzeInstStr(mod, scope, old_inst.castTag(.str).?),
.int => {
const big_int = old_inst.castTag(.int).?.positionals.int;
@ -281,6 +281,24 @@ fn resolveType(mod: *Module, scope: *Scope, old_inst: *zir.Inst) !Type {
return val.toType(scope.arena());
}
/// Appropriate to call when the coercion has already been done by result
/// location semantics. Asserts the value fits in the provided `Int` type.
/// Only supports `Int` types 64 bits or less.
fn resolveAlreadyCoercedInt(
mod: *Module,
scope: *Scope,
old_inst: *zir.Inst,
comptime Int: type,
) !Int {
comptime assert(@typeInfo(Int).Int.bits <= 64);
const new_inst = try resolveInst(mod, scope, old_inst);
const val = try mod.resolveConstValue(scope, new_inst);
switch (@typeInfo(Int).Int.signedness) {
.signed => return @intCast(Int, val.toSignedInt()),
.unsigned => return @intCast(Int, val.toUnsignedInt()),
}
}
fn resolveInt(mod: *Module, scope: *Scope, old_inst: *zir.Inst, dest_type: Type) !u64 {
const new_inst = try resolveInst(mod, scope, old_inst);
const coerced = try mod.coerce(scope, dest_type, new_inst);
@ -493,7 +511,7 @@ fn analyzeInstSetEvalBranchQuota(
inst: *zir.Inst.UnOp,
) InnerError!*Inst {
const b = try mod.requireFunctionBlock(scope, inst.base.src);
const quota = @truncate(u32, try resolveInt(mod, scope, inst.positionals.operand, Type.initTag(.u32)));
const quota = try resolveAlreadyCoercedInt(mod, scope, inst.positionals.operand, u32);
if (b.branch_quota.* < quota)
b.branch_quota.* = quota;
return mod.constVoid(scope, inst.base.src);