Merge branch 'g-w1-stage2-evalbranch'

closes #7682
This commit is contained in:
Andrew Kelley
2021-01-04 13:40:51 -07:00
5 changed files with 97 additions and 5 deletions

View File

@@ -23,6 +23,8 @@ const trace = @import("tracy.zig").trace;
const astgen = @import("astgen.zig");
const zir_sema = @import("zir_sema.zig");
const default_eval_branch_quota = 1000;
/// General-purpose allocator. Used for both temporary and long-term storage.
gpa: *Allocator,
comp: *Compilation,
@@ -765,6 +767,8 @@ pub const Scope = struct {
label: ?Label = null,
inlining: ?*Inlining,
is_comptime: bool,
/// Shared to sub-blocks.
branch_quota: *u32,
pub const InstTable = std.AutoHashMap(*zir.Inst, *Inst);
@@ -792,8 +796,7 @@ pub const Scope = struct {
pub const Shared = struct {
caller: ?*Fn,
branch_count: u64,
branch_quota: u64,
branch_count: u32,
};
};
@@ -1104,6 +1107,8 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
var inst_table = Scope.Block.InstTable.init(self.gpa);
defer inst_table.deinit();
var branch_quota: u32 = default_eval_branch_quota;
var block_scope: Scope.Block = .{
.parent = null,
.inst_table = &inst_table,
@@ -1113,6 +1118,7 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
.arena = &decl_arena.allocator,
.inlining = null,
.is_comptime = false,
.branch_quota = &branch_quota,
};
defer block_scope.instructions.deinit(self.gpa);
@@ -1297,6 +1303,8 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
var decl_inst_table = Scope.Block.InstTable.init(self.gpa);
defer decl_inst_table.deinit();
var branch_quota: u32 = default_eval_branch_quota;
var block_scope: Scope.Block = .{
.parent = null,
.inst_table = &decl_inst_table,
@@ -1306,6 +1314,7 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
.arena = &decl_arena.allocator,
.inlining = null,
.is_comptime = true,
.branch_quota = &branch_quota,
};
defer block_scope.instructions.deinit(self.gpa);
@@ -1367,6 +1376,7 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
var var_inst_table = Scope.Block.InstTable.init(self.gpa);
defer var_inst_table.deinit();
var branch_quota_vi: u32 = default_eval_branch_quota;
var inner_block: Scope.Block = .{
.parent = null,
.inst_table = &var_inst_table,
@@ -1376,6 +1386,7 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
.arena = &gen_scope_arena.allocator,
.inlining = null,
.is_comptime = true,
.branch_quota = &branch_quota_vi,
};
defer inner_block.instructions.deinit(self.gpa);
try zir_sema.analyzeBody(self, &inner_block, .{
@@ -1494,6 +1505,8 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
var inst_table = Scope.Block.InstTable.init(self.gpa);
defer inst_table.deinit();
var branch_quota: u32 = default_eval_branch_quota;
var block_scope: Scope.Block = .{
.parent = null,
.inst_table = &inst_table,
@@ -1503,6 +1516,7 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
.arena = &analysis_arena.allocator,
.inlining = null,
.is_comptime = true,
.branch_quota = &branch_quota,
};
defer block_scope.instructions.deinit(self.gpa);
@@ -1875,6 +1889,8 @@ pub fn analyzeFnBody(self: *Module, decl: *Decl, func: *Fn) !void {
defer decl.typed_value.most_recent.arena.?.* = arena.state;
var inst_table = Scope.Block.InstTable.init(self.gpa);
defer inst_table.deinit();
var branch_quota: u32 = default_eval_branch_quota;
var inner_block: Scope.Block = .{
.parent = null,
.inst_table = &inst_table,
@@ -1884,6 +1900,7 @@ pub fn analyzeFnBody(self: *Module, decl: *Decl, func: *Fn) !void {
.arena = &arena.allocator,
.inlining = null,
.is_comptime = false,
.branch_quota = &branch_quota,
};
defer inner_block.instructions.deinit(self.gpa);
@@ -3466,7 +3483,9 @@ pub fn addSafetyCheck(mod: *Module, parent_block: *Scope.Block, ok: *Inst, panic
.arena = parent_block.arena,
.inlining = parent_block.inlining,
.is_comptime = parent_block.is_comptime,
.branch_quota = parent_block.branch_quota,
};
defer fail_block.instructions.deinit(mod.gpa);
_ = try mod.safetyPanic(&fail_block, ok.src, panic_id);
@@ -3532,10 +3551,10 @@ pub fn identifierTokenString(mod: *Module, scope: *Scope, token: ast.TokenIndex)
pub fn emitBackwardBranch(mod: *Module, block: *Scope.Block, src: usize) !void {
const shared = block.inlining.?.shared;
shared.branch_count += 1;
if (shared.branch_count > shared.branch_quota) {
if (shared.branch_count > block.branch_quota.*) {
// TODO show the "called from here" stack
return mod.fail(&block.base, src, "evaluation exceeded {d} backwards branches", .{
shared.branch_quota,
block.branch_quota.*,
});
}
}

View File

@@ -2317,6 +2317,19 @@ fn compileError(mod: *Module, scope: *Scope, call: *ast.Node.BuiltinCall) InnerE
return addZIRUnOp(mod, scope, src, .compileerror, target);
}
fn setEvalBranchQuota(mod: *Module, scope: *Scope, call: *ast.Node.BuiltinCall) InnerError!*zir.Inst {
try ensureBuiltinParamCount(mod, scope, call, 1);
const tree = scope.tree();
const src = tree.token_locs[call.builtin_token].start;
const params = call.params();
const u32_type = try addZIRInstConst(mod, scope, src, .{
.ty = Type.initTag(.type),
.val = Value.initTag(.u32_type),
});
const quota = try expr(mod, scope, .{ .ty = u32_type }, params[0]);
return addZIRUnOp(mod, scope, src, .set_eval_branch_quota, quota);
}
fn typeOf(mod: *Module, scope: *Scope, rl: ResultLoc, call: *ast.Node.BuiltinCall) InnerError!*zir.Inst {
const tree = scope.tree();
const arena = scope.arena();
@@ -2362,6 +2375,8 @@ fn builtinCall(mod: *Module, scope: *Scope, rl: ResultLoc, call: *ast.Node.Built
return rlWrap(mod, scope, rl, try import(mod, scope, call));
} else if (mem.eql(u8, builtin_name, "@compileError")) {
return compileError(mod, scope, call);
} else if (mem.eql(u8, builtin_name, "@setEvalBranchQuota")) {
return setEvalBranchQuota(mod, scope, call);
} else {
return mod.failTok(scope, call.builtin_token, "invalid builtin function: '{s}'", .{builtin_name});
}

View File

@@ -127,6 +127,9 @@ pub const Inst = struct {
coerce_to_ptr_elem,
/// Emit an error message and fail compilation.
compileerror,
/// Changes the maximum number of backwards branches that compile-time
/// code execution can use before giving up and making a compile error.
set_eval_branch_quota,
/// Conditional branch. Splits control flow based on a boolean condition value.
condbr,
/// Special case, has no textual representation.
@@ -347,6 +350,7 @@ pub const Inst = struct {
.anyframe_type,
.bitnot,
.import,
.set_eval_branch_quota,
=> UnOp,
.add,
@@ -535,6 +539,7 @@ pub const Inst = struct {
.switch_range,
.typeof_peer,
.resolve_inferred_alloc,
.set_eval_branch_quota,
=> false,
.@"break",

View File

@@ -81,6 +81,7 @@ pub fn analyzeInst(mod: *Module, scope: *Scope, old_inst: *zir.Inst) InnerError!
.mut_slice_type => return analyzeInstSimplePtrType(mod, scope, old_inst.castTag(.mut_slice_type).?, true, .Slice),
.ptr_type => return analyzeInstPtrType(mod, scope, old_inst.castTag(.ptr_type).?),
.store => return analyzeInstStore(mod, scope, old_inst.castTag(.store).?),
.set_eval_branch_quota => return analyzeInstSetEvalBranchQuota(mod, scope, old_inst.castTag(.set_eval_branch_quota).?),
.str => return analyzeInstStr(mod, scope, old_inst.castTag(.str).?),
.int => {
const big_int = old_inst.castTag(.int).?.positionals.int;
@@ -280,6 +281,24 @@ fn resolveType(mod: *Module, scope: *Scope, old_inst: *zir.Inst) !Type {
return val.toType(scope.arena());
}
/// Appropriate to call when the coercion has already been done by result
/// location semantics. Asserts the value fits in the provided `Int` type.
/// Only supports `Int` types 64 bits or less.
fn resolveAlreadyCoercedInt(
mod: *Module,
scope: *Scope,
old_inst: *zir.Inst,
comptime Int: type,
) !Int {
comptime assert(@typeInfo(Int).Int.bits <= 64);
const new_inst = try resolveInst(mod, scope, old_inst);
const val = try mod.resolveConstValue(scope, new_inst);
switch (@typeInfo(Int).Int.signedness) {
.signed => return @intCast(Int, val.toSignedInt()),
.unsigned => return @intCast(Int, val.toUnsignedInt()),
}
}
fn resolveInt(mod: *Module, scope: *Scope, old_inst: *zir.Inst, dest_type: Type) !u64 {
const new_inst = try resolveInst(mod, scope, old_inst);
const coerced = try mod.coerce(scope, dest_type, new_inst);
@@ -486,6 +505,18 @@ fn analyzeInstStoreToInferredPtr(
return mod.storePtr(scope, inst.base.src, bitcasted_ptr, value);
}
fn analyzeInstSetEvalBranchQuota(
mod: *Module,
scope: *Scope,
inst: *zir.Inst.UnOp,
) InnerError!*Inst {
const b = try mod.requireFunctionBlock(scope, inst.base.src);
const quota = try resolveAlreadyCoercedInt(mod, scope, inst.positionals.operand, u32);
if (b.branch_quota.* < quota)
b.branch_quota.* = quota;
return mod.constVoid(scope, inst.base.src);
}
fn analyzeInstStore(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
const ptr = try resolveInst(mod, scope, inst.positionals.lhs);
const value = try resolveInst(mod, scope, inst.positionals.rhs);
@@ -594,6 +625,7 @@ fn analyzeInstLoop(mod: *Module, scope: *Scope, inst: *zir.Inst.Loop) InnerError
.arena = parent_block.arena,
.inlining = parent_block.inlining,
.is_comptime = parent_block.is_comptime,
.branch_quota = parent_block.branch_quota,
};
defer child_block.instructions.deinit(mod.gpa);
@@ -619,6 +651,7 @@ fn analyzeInstBlockFlat(mod: *Module, scope: *Scope, inst: *zir.Inst.Block, is_c
.label = null,
.inlining = parent_block.inlining,
.is_comptime = parent_block.is_comptime or is_comptime,
.branch_quota = parent_block.branch_quota,
};
defer child_block.instructions.deinit(mod.gpa);
@@ -666,6 +699,7 @@ fn analyzeInstBlock(mod: *Module, scope: *Scope, inst: *zir.Inst.Block, is_compt
}),
.inlining = parent_block.inlining,
.is_comptime = is_comptime or parent_block.is_comptime,
.branch_quota = parent_block.branch_quota,
};
const merges = &child_block.label.?.merges;
@@ -867,7 +901,6 @@ fn analyzeInstCall(mod: *Module, scope: *Scope, inst: *zir.Inst.Call) InnerError
// Otherwise we pass on the shared data from the parent scope.
var shared_inlining = Scope.Block.Inlining.Shared{
.branch_count = 0,
.branch_quota = 1000,
.caller = b.func,
};
// This one is shared among sub-blocks within the same callee, but not
@@ -896,7 +929,9 @@ fn analyzeInstCall(mod: *Module, scope: *Scope, inst: *zir.Inst.Call) InnerError
.label = null,
.inlining = &inlining,
.is_comptime = is_comptime_call,
.branch_quota = b.branch_quota,
};
const merges = &child_block.inlining.?.merges;
defer child_block.instructions.deinit(mod.gpa);
@@ -1417,6 +1452,7 @@ fn analyzeInstSwitchBr(mod: *Module, scope: *Scope, inst: *zir.Inst.SwitchBr) In
.arena = parent_block.arena,
.inlining = parent_block.inlining,
.is_comptime = parent_block.is_comptime,
.branch_quota = parent_block.branch_quota,
};
defer case_block.instructions.deinit(mod.gpa);
@@ -1960,6 +1996,7 @@ fn analyzeInstCondBr(mod: *Module, scope: *Scope, inst: *zir.Inst.CondBr) InnerE
.arena = parent_block.arena,
.inlining = parent_block.inlining,
.is_comptime = parent_block.is_comptime,
.branch_quota = parent_block.branch_quota,
};
defer true_block.instructions.deinit(mod.gpa);
try analyzeBody(mod, &true_block, inst.positionals.then_body);
@@ -1973,6 +2010,7 @@ fn analyzeInstCondBr(mod: *Module, scope: *Scope, inst: *zir.Inst.CondBr) InnerE
.arena = parent_block.arena,
.inlining = parent_block.inlining,
.is_comptime = parent_block.is_comptime,
.branch_quota = parent_block.branch_quota,
};
defer false_block.instructions.deinit(mod.gpa);
try analyzeBody(mod, &false_block, inst.positionals.else_body);

View File

@@ -67,7 +67,22 @@ pub fn addCases(ctx: *TestContext) !void {
\\}
, "");
}
{
var case = ctx.exeFromCompiledC("@setEvalBranchQuota", .{});
case.addCompareOutput(
\\export fn main() i32 {
\\ @setEvalBranchQuota(1001);
\\ const y = rec(1001);
\\ return y - 1;
\\}
\\
\\inline fn rec(n: usize) usize {
\\ if (n <= 1) return n;
\\ return rec(n - 1);
\\}
, "");
}
ctx.c("empty start function", linux_x64,
\\export fn _start() noreturn {
\\ unreachable;