From 638f93ebdceb860974aae54b6f8c2c9f52157305 Mon Sep 17 00:00:00 2001 From: g-w1 Date: Sun, 3 Jan 2021 15:45:22 -0500 Subject: [PATCH 1/2] stage2: implementation of `@setEvalBranchQuota`: `@setEvalBranchQuota` can be called before the comptime/inline call stack is created. For example: ```zig @setEvalBranchQuota(100); comptime { while (true) {} } ``` Here we need to set the branch_quota before the comptime block creates a scope for the branch_count. --- src/Module.zig | 25 +++++++++++++++++++++---- src/astgen.zig | 15 +++++++++++++++ src/zir.zig | 4 ++++ src/zir_sema.zig | 22 +++++++++++++++++++++- test/stage2/cbe.zig | 15 +++++++++++++++ 5 files changed, 76 insertions(+), 5 deletions(-) diff --git a/src/Module.zig b/src/Module.zig index 24ea48043b..ce4fd51bb9 100644 --- a/src/Module.zig +++ b/src/Module.zig @@ -765,6 +765,8 @@ pub const Scope = struct { label: ?Label = null, inlining: ?*Inlining, is_comptime: bool, + /// Shared to sub-blocks. + branch_quota: *u32, pub const InstTable = std.AutoHashMap(*zir.Inst, *Inst); @@ -792,8 +794,7 @@ pub const Scope = struct { pub const Shared = struct { caller: ?*Fn, - branch_count: u64, - branch_quota: u64, + branch_count: u32, }; }; @@ -1104,6 +1105,8 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool { var inst_table = Scope.Block.InstTable.init(self.gpa); defer inst_table.deinit(); + var branch_quota: u32 = 1000; + var block_scope: Scope.Block = .{ .parent = null, .inst_table = &inst_table, @@ -1113,6 +1116,7 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool { .arena = &decl_arena.allocator, .inlining = null, .is_comptime = false, + .branch_quota = &branch_quota, }; defer block_scope.instructions.deinit(self.gpa); @@ -1297,6 +1301,8 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool { var decl_inst_table = Scope.Block.InstTable.init(self.gpa); defer decl_inst_table.deinit(); + var branch_quota: u32 = 1000; + var block_scope: Scope.Block = .{ .parent = null, .inst_table = &decl_inst_table, @@ -1306,6 +1312,7 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool { .arena = &decl_arena.allocator, .inlining = null, .is_comptime = true, + .branch_quota = &branch_quota, }; defer block_scope.instructions.deinit(self.gpa); @@ -1367,6 +1374,7 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool { var var_inst_table = Scope.Block.InstTable.init(self.gpa); defer var_inst_table.deinit(); + var branch_quota_vi: u32 = 1000; var inner_block: Scope.Block = .{ .parent = null, .inst_table = &var_inst_table, @@ -1376,6 +1384,7 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool { .arena = &gen_scope_arena.allocator, .inlining = null, .is_comptime = true, + .branch_quota = &branch_quota_vi, }; defer inner_block.instructions.deinit(self.gpa); try zir_sema.analyzeBody(self, &inner_block, .{ @@ -1494,6 +1503,8 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool { var inst_table = Scope.Block.InstTable.init(self.gpa); defer inst_table.deinit(); + var branch_quota: u32 = 1000; + var block_scope: Scope.Block = .{ .parent = null, .inst_table = &inst_table, @@ -1503,6 +1514,7 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool { .arena = &analysis_arena.allocator, .inlining = null, .is_comptime = true, + .branch_quota = &branch_quota, }; defer block_scope.instructions.deinit(self.gpa); @@ -1875,6 +1887,8 @@ pub fn analyzeFnBody(self: *Module, decl: *Decl, func: *Fn) !void { defer decl.typed_value.most_recent.arena.?.* = arena.state; var inst_table = Scope.Block.InstTable.init(self.gpa); defer inst_table.deinit(); + var branch_quota: u32 = 1000; + var inner_block: Scope.Block = .{ .parent = null, .inst_table = &inst_table, @@ -1884,6 +1898,7 @@ pub fn analyzeFnBody(self: *Module, decl: *Decl, func: *Fn) !void { .arena = &arena.allocator, .inlining = null, .is_comptime = false, + .branch_quota = &branch_quota, }; defer inner_block.instructions.deinit(self.gpa); @@ -3466,7 +3481,9 @@ pub fn addSafetyCheck(mod: *Module, parent_block: *Scope.Block, ok: *Inst, panic .arena = parent_block.arena, .inlining = parent_block.inlining, .is_comptime = parent_block.is_comptime, + .branch_quota = parent_block.branch_quota, }; + defer fail_block.instructions.deinit(mod.gpa); _ = try mod.safetyPanic(&fail_block, ok.src, panic_id); @@ -3532,10 +3549,10 @@ pub fn identifierTokenString(mod: *Module, scope: *Scope, token: ast.TokenIndex) pub fn emitBackwardBranch(mod: *Module, block: *Scope.Block, src: usize) !void { const shared = block.inlining.?.shared; shared.branch_count += 1; - if (shared.branch_count > shared.branch_quota) { + if (shared.branch_count > block.branch_quota.*) { // TODO show the "called from here" stack return mod.fail(&block.base, src, "evaluation exceeded {d} backwards branches", .{ - shared.branch_quota, + block.branch_quota.*, }); } } diff --git a/src/astgen.zig b/src/astgen.zig index 7e4e9e2271..a24470c304 100644 --- a/src/astgen.zig +++ b/src/astgen.zig @@ -2317,6 +2317,19 @@ fn compileError(mod: *Module, scope: *Scope, call: *ast.Node.BuiltinCall) InnerE return addZIRUnOp(mod, scope, src, .compileerror, target); } +fn setEvalBranchQuota(mod: *Module, scope: *Scope, call: *ast.Node.BuiltinCall) InnerError!*zir.Inst { + try ensureBuiltinParamCount(mod, scope, call, 1); + const tree = scope.tree(); + const src = tree.token_locs[call.builtin_token].start; + const params = call.params(); + const target = try expr(mod, scope, .none, params[0]); + const u32_type = try addZIRInstConst(mod, scope, src, .{ + .ty = Type.initTag(.type), + .val = Value.initTag(.u32_type), + }); + return addZIRUnOp(mod, scope, src, .setevalbranchquota, try rlWrap(mod, scope, .{ .ty = u32_type }, target)); +} + fn typeOf(mod: *Module, scope: *Scope, rl: ResultLoc, call: *ast.Node.BuiltinCall) InnerError!*zir.Inst { const tree = scope.tree(); const arena = scope.arena(); @@ -2362,6 +2375,8 @@ fn builtinCall(mod: *Module, scope: *Scope, rl: ResultLoc, call: *ast.Node.Built return rlWrap(mod, scope, rl, try import(mod, scope, call)); } else if (mem.eql(u8, builtin_name, "@compileError")) { return compileError(mod, scope, call); + } else if (mem.eql(u8, builtin_name, "@setEvalBranchQuota")) { + return setEvalBranchQuota(mod, scope, call); } else { return mod.failTok(scope, call.builtin_token, "invalid builtin function: '{s}'", .{builtin_name}); } diff --git a/src/zir.zig b/src/zir.zig index 3fd2ac7c80..25984b665f 100644 --- a/src/zir.zig +++ b/src/zir.zig @@ -127,6 +127,8 @@ pub const Inst = struct { coerce_to_ptr_elem, /// Emit an error message and fail compilation. compileerror, + /// Changes the maximum number of backwards branches that compile-time code execution can use before giving up and making a compile error. + setevalbranchquota, /// Conditional branch. Splits control flow based on a boolean condition value. condbr, /// Special case, has no textual representation. @@ -347,6 +349,7 @@ pub const Inst = struct { .anyframe_type, .bitnot, .import, + .setevalbranchquota, => UnOp, .add, @@ -535,6 +538,7 @@ pub const Inst = struct { .switch_range, .typeof_peer, .resolve_inferred_alloc, + .setevalbranchquota, => false, .@"break", diff --git a/src/zir_sema.zig b/src/zir_sema.zig index a5627933e1..605dc7dcf4 100644 --- a/src/zir_sema.zig +++ b/src/zir_sema.zig @@ -81,6 +81,7 @@ pub fn analyzeInst(mod: *Module, scope: *Scope, old_inst: *zir.Inst) InnerError! .mut_slice_type => return analyzeInstSimplePtrType(mod, scope, old_inst.castTag(.mut_slice_type).?, true, .Slice), .ptr_type => return analyzeInstPtrType(mod, scope, old_inst.castTag(.ptr_type).?), .store => return analyzeInstStore(mod, scope, old_inst.castTag(.store).?), + .setevalbranchquota => return analyzeInstSetEvalBranchQuota(mod, scope, old_inst.castTag(.setevalbranchquota).?), .str => return analyzeInstStr(mod, scope, old_inst.castTag(.str).?), .int => { const big_int = old_inst.castTag(.int).?.positionals.int; @@ -486,6 +487,18 @@ fn analyzeInstStoreToInferredPtr( return mod.storePtr(scope, inst.base.src, bitcasted_ptr, value); } +fn analyzeInstSetEvalBranchQuota( + mod: *Module, + scope: *Scope, + inst: *zir.Inst.UnOp, +) InnerError!*Inst { + const b = try mod.requireFunctionBlock(scope, inst.base.src); + const quota = @truncate(u32, try resolveInt(mod, scope, inst.positionals.operand, Type.initTag(.u32))); + if (b.branch_quota.* < quota) + b.branch_quota.* = quota; + return mod.constVoid(scope, inst.base.src); +} + fn analyzeInstStore(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst { const ptr = try resolveInst(mod, scope, inst.positionals.lhs); const value = try resolveInst(mod, scope, inst.positionals.rhs); @@ -594,6 +607,7 @@ fn analyzeInstLoop(mod: *Module, scope: *Scope, inst: *zir.Inst.Loop) InnerError .arena = parent_block.arena, .inlining = parent_block.inlining, .is_comptime = parent_block.is_comptime, + .branch_quota = parent_block.branch_quota, }; defer child_block.instructions.deinit(mod.gpa); @@ -619,6 +633,7 @@ fn analyzeInstBlockFlat(mod: *Module, scope: *Scope, inst: *zir.Inst.Block, is_c .label = null, .inlining = parent_block.inlining, .is_comptime = parent_block.is_comptime or is_comptime, + .branch_quota = parent_block.branch_quota, }; defer child_block.instructions.deinit(mod.gpa); @@ -666,6 +681,7 @@ fn analyzeInstBlock(mod: *Module, scope: *Scope, inst: *zir.Inst.Block, is_compt }), .inlining = parent_block.inlining, .is_comptime = is_comptime or parent_block.is_comptime, + .branch_quota = parent_block.branch_quota, }; const merges = &child_block.label.?.merges; @@ -867,7 +883,6 @@ fn analyzeInstCall(mod: *Module, scope: *Scope, inst: *zir.Inst.Call) InnerError // Otherwise we pass on the shared data from the parent scope. var shared_inlining = Scope.Block.Inlining.Shared{ .branch_count = 0, - .branch_quota = 1000, .caller = b.func, }; // This one is shared among sub-blocks within the same callee, but not @@ -896,7 +911,9 @@ fn analyzeInstCall(mod: *Module, scope: *Scope, inst: *zir.Inst.Call) InnerError .label = null, .inlining = &inlining, .is_comptime = is_comptime_call, + .branch_quota = b.branch_quota, }; + const merges = &child_block.inlining.?.merges; defer child_block.instructions.deinit(mod.gpa); @@ -1417,6 +1434,7 @@ fn analyzeInstSwitchBr(mod: *Module, scope: *Scope, inst: *zir.Inst.SwitchBr) In .arena = parent_block.arena, .inlining = parent_block.inlining, .is_comptime = parent_block.is_comptime, + .branch_quota = parent_block.branch_quota, }; defer case_block.instructions.deinit(mod.gpa); @@ -1960,6 +1978,7 @@ fn analyzeInstCondBr(mod: *Module, scope: *Scope, inst: *zir.Inst.CondBr) InnerE .arena = parent_block.arena, .inlining = parent_block.inlining, .is_comptime = parent_block.is_comptime, + .branch_quota = parent_block.branch_quota, }; defer true_block.instructions.deinit(mod.gpa); try analyzeBody(mod, &true_block, inst.positionals.then_body); @@ -1973,6 +1992,7 @@ fn analyzeInstCondBr(mod: *Module, scope: *Scope, inst: *zir.Inst.CondBr) InnerE .arena = parent_block.arena, .inlining = parent_block.inlining, .is_comptime = parent_block.is_comptime, + .branch_quota = parent_block.branch_quota, }; defer false_block.instructions.deinit(mod.gpa); try analyzeBody(mod, &false_block, inst.positionals.else_body); diff --git a/test/stage2/cbe.zig b/test/stage2/cbe.zig index a0a4587983..b227d6a783 100644 --- a/test/stage2/cbe.zig +++ b/test/stage2/cbe.zig @@ -67,7 +67,22 @@ pub fn addCases(ctx: *TestContext) !void { \\} , ""); } + { + var case = ctx.exeFromCompiledC("@setEvalBranchQuota", .{}); + case.addCompareOutput( + \\export fn main() i32 { + \\ @setEvalBranchQuota(1001); + \\ const y = rec(1001); + \\ return y - 1; + \\} + \\ + \\inline fn rec(n: usize) usize { + \\ if (n <= 1) return n; + \\ return rec(n - 1); + \\} + , ""); + } ctx.c("empty start function", linux_x64, \\export fn _start() noreturn { \\ unreachable; From 7e64dc42215c93a2d1d6b7fa4f5e07b885788a7d Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Mon, 4 Jan 2021 13:40:01 -0700 Subject: [PATCH 2/2] stage2: improvements to `@setEvalBranchQuota` * extract magic number into a constant * properly use result location casting for the operand * naming convention for ZIR instructions --- src/Module.zig | 12 +++++++----- src/astgen.zig | 4 ++-- src/zir.zig | 9 +++++---- src/zir_sema.zig | 22 ++++++++++++++++++++-- 4 files changed, 34 insertions(+), 13 deletions(-) diff --git a/src/Module.zig b/src/Module.zig index ce4fd51bb9..6a4575394a 100644 --- a/src/Module.zig +++ b/src/Module.zig @@ -23,6 +23,8 @@ const trace = @import("tracy.zig").trace; const astgen = @import("astgen.zig"); const zir_sema = @import("zir_sema.zig"); +const default_eval_branch_quota = 1000; + /// General-purpose allocator. Used for both temporary and long-term storage. gpa: *Allocator, comp: *Compilation, @@ -1105,7 +1107,7 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool { var inst_table = Scope.Block.InstTable.init(self.gpa); defer inst_table.deinit(); - var branch_quota: u32 = 1000; + var branch_quota: u32 = default_eval_branch_quota; var block_scope: Scope.Block = .{ .parent = null, @@ -1301,7 +1303,7 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool { var decl_inst_table = Scope.Block.InstTable.init(self.gpa); defer decl_inst_table.deinit(); - var branch_quota: u32 = 1000; + var branch_quota: u32 = default_eval_branch_quota; var block_scope: Scope.Block = .{ .parent = null, @@ -1374,7 +1376,7 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool { var var_inst_table = Scope.Block.InstTable.init(self.gpa); defer var_inst_table.deinit(); - var branch_quota_vi: u32 = 1000; + var branch_quota_vi: u32 = default_eval_branch_quota; var inner_block: Scope.Block = .{ .parent = null, .inst_table = &var_inst_table, @@ -1503,7 +1505,7 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool { var inst_table = Scope.Block.InstTable.init(self.gpa); defer inst_table.deinit(); - var branch_quota: u32 = 1000; + var branch_quota: u32 = default_eval_branch_quota; var block_scope: Scope.Block = .{ .parent = null, @@ -1887,7 +1889,7 @@ pub fn analyzeFnBody(self: *Module, decl: *Decl, func: *Fn) !void { defer decl.typed_value.most_recent.arena.?.* = arena.state; var inst_table = Scope.Block.InstTable.init(self.gpa); defer inst_table.deinit(); - var branch_quota: u32 = 1000; + var branch_quota: u32 = default_eval_branch_quota; var inner_block: Scope.Block = .{ .parent = null, diff --git a/src/astgen.zig b/src/astgen.zig index a24470c304..8275a05d77 100644 --- a/src/astgen.zig +++ b/src/astgen.zig @@ -2322,12 +2322,12 @@ fn setEvalBranchQuota(mod: *Module, scope: *Scope, call: *ast.Node.BuiltinCall) const tree = scope.tree(); const src = tree.token_locs[call.builtin_token].start; const params = call.params(); - const target = try expr(mod, scope, .none, params[0]); const u32_type = try addZIRInstConst(mod, scope, src, .{ .ty = Type.initTag(.type), .val = Value.initTag(.u32_type), }); - return addZIRUnOp(mod, scope, src, .setevalbranchquota, try rlWrap(mod, scope, .{ .ty = u32_type }, target)); + const quota = try expr(mod, scope, .{ .ty = u32_type }, params[0]); + return addZIRUnOp(mod, scope, src, .set_eval_branch_quota, quota); } fn typeOf(mod: *Module, scope: *Scope, rl: ResultLoc, call: *ast.Node.BuiltinCall) InnerError!*zir.Inst { diff --git a/src/zir.zig b/src/zir.zig index 25984b665f..8019d0e030 100644 --- a/src/zir.zig +++ b/src/zir.zig @@ -127,8 +127,9 @@ pub const Inst = struct { coerce_to_ptr_elem, /// Emit an error message and fail compilation. compileerror, - /// Changes the maximum number of backwards branches that compile-time code execution can use before giving up and making a compile error. - setevalbranchquota, + /// Changes the maximum number of backwards branches that compile-time + /// code execution can use before giving up and making a compile error. + set_eval_branch_quota, /// Conditional branch. Splits control flow based on a boolean condition value. condbr, /// Special case, has no textual representation. @@ -349,7 +350,7 @@ pub const Inst = struct { .anyframe_type, .bitnot, .import, - .setevalbranchquota, + .set_eval_branch_quota, => UnOp, .add, @@ -538,7 +539,7 @@ pub const Inst = struct { .switch_range, .typeof_peer, .resolve_inferred_alloc, - .setevalbranchquota, + .set_eval_branch_quota, => false, .@"break", diff --git a/src/zir_sema.zig b/src/zir_sema.zig index 605dc7dcf4..ef15a4bd45 100644 --- a/src/zir_sema.zig +++ b/src/zir_sema.zig @@ -81,7 +81,7 @@ pub fn analyzeInst(mod: *Module, scope: *Scope, old_inst: *zir.Inst) InnerError! .mut_slice_type => return analyzeInstSimplePtrType(mod, scope, old_inst.castTag(.mut_slice_type).?, true, .Slice), .ptr_type => return analyzeInstPtrType(mod, scope, old_inst.castTag(.ptr_type).?), .store => return analyzeInstStore(mod, scope, old_inst.castTag(.store).?), - .setevalbranchquota => return analyzeInstSetEvalBranchQuota(mod, scope, old_inst.castTag(.setevalbranchquota).?), + .set_eval_branch_quota => return analyzeInstSetEvalBranchQuota(mod, scope, old_inst.castTag(.set_eval_branch_quota).?), .str => return analyzeInstStr(mod, scope, old_inst.castTag(.str).?), .int => { const big_int = old_inst.castTag(.int).?.positionals.int; @@ -281,6 +281,24 @@ fn resolveType(mod: *Module, scope: *Scope, old_inst: *zir.Inst) !Type { return val.toType(scope.arena()); } +/// Appropriate to call when the coercion has already been done by result +/// location semantics. Asserts the value fits in the provided `Int` type. +/// Only supports `Int` types 64 bits or less. +fn resolveAlreadyCoercedInt( + mod: *Module, + scope: *Scope, + old_inst: *zir.Inst, + comptime Int: type, +) !Int { + comptime assert(@typeInfo(Int).Int.bits <= 64); + const new_inst = try resolveInst(mod, scope, old_inst); + const val = try mod.resolveConstValue(scope, new_inst); + switch (@typeInfo(Int).Int.signedness) { + .signed => return @intCast(Int, val.toSignedInt()), + .unsigned => return @intCast(Int, val.toUnsignedInt()), + } +} + fn resolveInt(mod: *Module, scope: *Scope, old_inst: *zir.Inst, dest_type: Type) !u64 { const new_inst = try resolveInst(mod, scope, old_inst); const coerced = try mod.coerce(scope, dest_type, new_inst); @@ -493,7 +511,7 @@ fn analyzeInstSetEvalBranchQuota( inst: *zir.Inst.UnOp, ) InnerError!*Inst { const b = try mod.requireFunctionBlock(scope, inst.base.src); - const quota = @truncate(u32, try resolveInt(mod, scope, inst.positionals.operand, Type.initTag(.u32))); + const quota = try resolveAlreadyCoercedInt(mod, scope, inst.positionals.operand, u32); if (b.branch_quota.* < quota) b.branch_quota.* = quota; return mod.constVoid(scope, inst.base.src);