Merge pull request #12383 from ziglang/stage2-stack-traces
several improvements to error return tracing in the self-hosted compiler
This commit is contained in:
@@ -867,10 +867,9 @@ pub fn panicOutOfBounds(index: usize, len: usize) noreturn {
|
||||
std.debug.panic("attempt to index out of bound: index {d}, len {d}", .{ index, len });
|
||||
}
|
||||
|
||||
pub noinline fn returnError(maybe_st: ?*StackTrace) void {
|
||||
pub noinline fn returnError(st: *StackTrace) void {
|
||||
@setCold(true);
|
||||
@setRuntimeSafety(false);
|
||||
const st = maybe_st orelse return;
|
||||
addErrRetTraceAddr(st, @returnAddress());
|
||||
}
|
||||
|
||||
|
||||
@@ -3070,6 +3070,19 @@ fn emitDbgNode(gz: *GenZir, node: Ast.Node.Index) !void {
|
||||
const line = astgen.source_line - gz.decl_line;
|
||||
const column = astgen.source_column;
|
||||
|
||||
if (gz.instructions.items.len > 0) {
|
||||
const last = gz.instructions.items[gz.instructions.items.len - 1];
|
||||
const zir_tags = astgen.instructions.items(.tag);
|
||||
if (zir_tags[last] == .dbg_stmt) {
|
||||
const zir_datas = astgen.instructions.items(.data);
|
||||
zir_datas[last].dbg_stmt = .{
|
||||
.line = line,
|
||||
.column = column,
|
||||
};
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
_ = try gz.add(.{ .tag = .dbg_stmt, .data = .{
|
||||
.dbg_stmt = .{
|
||||
.line = line,
|
||||
@@ -5050,6 +5063,16 @@ fn tryExpr(
|
||||
|
||||
if (parent_gz.in_defer) return astgen.failNode(node, "'try' not allowed inside defer expression", .{});
|
||||
|
||||
// Ensure debug line/column information is emitted for this try expression.
|
||||
// Then we will save the line/column so that we can emit another one that goes
|
||||
// "backwards" because we want to evaluate the operand, but then put the debug
|
||||
// info back at the try keyword for error return tracing.
|
||||
if (!parent_gz.force_comptime) {
|
||||
try emitDbgNode(parent_gz, node);
|
||||
}
|
||||
const try_line = astgen.source_line - parent_gz.decl_line;
|
||||
const try_column = astgen.source_column;
|
||||
|
||||
const operand_rl: ResultLoc = switch (rl) {
|
||||
.ref => .ref,
|
||||
else => .none,
|
||||
@@ -5079,6 +5102,7 @@ fn tryExpr(
|
||||
};
|
||||
const err_code = try else_scope.addUnNode(err_tag, operand, node);
|
||||
try genDefers(&else_scope, &fn_block.base, scope, .{ .both = err_code });
|
||||
try emitDbgStmt(&else_scope, try_line, try_column);
|
||||
_ = try else_scope.addUnNode(.ret_node, err_code, node);
|
||||
|
||||
try else_scope.setTryBody(try_inst, operand);
|
||||
@@ -6585,6 +6609,16 @@ fn ret(gz: *GenZir, scope: *Scope, node: Ast.Node.Index) InnerError!Zir.Inst.Ref
|
||||
|
||||
if (gz.in_defer) return astgen.failNode(node, "cannot return from defer expression", .{});
|
||||
|
||||
// Ensure debug line/column information is emitted for this return expression.
|
||||
// Then we will save the line/column so that we can emit another one that goes
|
||||
// "backwards" because we want to evaluate the operand, but then put the debug
|
||||
// info back at the return keyword for error return tracing.
|
||||
if (!gz.force_comptime) {
|
||||
try emitDbgNode(gz, node);
|
||||
}
|
||||
const ret_line = astgen.source_line - gz.decl_line;
|
||||
const ret_column = astgen.source_column;
|
||||
|
||||
const defer_outer = &astgen.fn_block.?.base;
|
||||
|
||||
const operand_node = node_datas[node].lhs;
|
||||
@@ -6603,11 +6637,13 @@ fn ret(gz: *GenZir, scope: *Scope, node: Ast.Node.Index) InnerError!Zir.Inst.Ref
|
||||
const defer_counts = countDefers(astgen, defer_outer, scope);
|
||||
if (!defer_counts.need_err_code) {
|
||||
try genDefers(gz, defer_outer, scope, .both_sans_err);
|
||||
try emitDbgStmt(gz, ret_line, ret_column);
|
||||
_ = try gz.addStrTok(.ret_err_value, err_name_str_index, ident_token);
|
||||
return Zir.Inst.Ref.unreachable_value;
|
||||
}
|
||||
const err_code = try gz.addStrTok(.ret_err_value_code, err_name_str_index, ident_token);
|
||||
try genDefers(gz, defer_outer, scope, .{ .both = err_code });
|
||||
try emitDbgStmt(gz, ret_line, ret_column);
|
||||
_ = try gz.addUnNode(.ret_node, err_code, node);
|
||||
return Zir.Inst.Ref.unreachable_value;
|
||||
}
|
||||
@@ -6626,6 +6662,7 @@ fn ret(gz: *GenZir, scope: *Scope, node: Ast.Node.Index) InnerError!Zir.Inst.Ref
|
||||
.never => {
|
||||
// Returning a value that cannot be an error; skip error defers.
|
||||
try genDefers(gz, defer_outer, scope, .normal_only);
|
||||
try emitDbgStmt(gz, ret_line, ret_column);
|
||||
try gz.addRet(rl, operand, node);
|
||||
return Zir.Inst.Ref.unreachable_value;
|
||||
},
|
||||
@@ -6633,6 +6670,7 @@ fn ret(gz: *GenZir, scope: *Scope, node: Ast.Node.Index) InnerError!Zir.Inst.Ref
|
||||
// Value is always an error. Emit both error defers and regular defers.
|
||||
const err_code = if (rl == .ptr) try gz.addUnNode(.load, rl.ptr, node) else operand;
|
||||
try genDefers(gz, defer_outer, scope, .{ .both = err_code });
|
||||
try emitDbgStmt(gz, ret_line, ret_column);
|
||||
try gz.addRet(rl, operand, node);
|
||||
return Zir.Inst.Ref.unreachable_value;
|
||||
},
|
||||
@@ -6641,6 +6679,7 @@ fn ret(gz: *GenZir, scope: *Scope, node: Ast.Node.Index) InnerError!Zir.Inst.Ref
|
||||
if (!defer_counts.have_err) {
|
||||
// Only regular defers; no branch needed.
|
||||
try genDefers(gz, defer_outer, scope, .normal_only);
|
||||
try emitDbgStmt(gz, ret_line, ret_column);
|
||||
try gz.addRet(rl, operand, node);
|
||||
return Zir.Inst.Ref.unreachable_value;
|
||||
}
|
||||
@@ -6654,6 +6693,7 @@ fn ret(gz: *GenZir, scope: *Scope, node: Ast.Node.Index) InnerError!Zir.Inst.Ref
|
||||
defer then_scope.unstack();
|
||||
|
||||
try genDefers(&then_scope, defer_outer, scope, .normal_only);
|
||||
try emitDbgStmt(&then_scope, ret_line, ret_column);
|
||||
try then_scope.addRet(rl, operand, node);
|
||||
|
||||
var else_scope = gz.makeSubBlock(scope);
|
||||
@@ -6663,6 +6703,7 @@ fn ret(gz: *GenZir, scope: *Scope, node: Ast.Node.Index) InnerError!Zir.Inst.Ref
|
||||
.both = try else_scope.addUnNode(.err_union_code, result, node),
|
||||
};
|
||||
try genDefers(&else_scope, defer_outer, scope, which_ones);
|
||||
try emitDbgStmt(&else_scope, ret_line, ret_column);
|
||||
try else_scope.addRet(rl, operand, node);
|
||||
|
||||
try setCondBrPayload(condbr, is_non_err, &then_scope, 0, &else_scope, 0);
|
||||
@@ -11702,3 +11743,14 @@ fn countBodyLenAfterFixups(astgen: *AstGen, body: []const Zir.Inst.Index) u32 {
|
||||
}
|
||||
return @intCast(u32, count);
|
||||
}
|
||||
|
||||
fn emitDbgStmt(gz: *GenZir, line: u32, column: u32) !void {
|
||||
if (gz.force_comptime) return;
|
||||
|
||||
_ = try gz.add(.{ .tag = .dbg_stmt, .data = .{
|
||||
.dbg_stmt = .{
|
||||
.line = line,
|
||||
.column = column,
|
||||
},
|
||||
} });
|
||||
}
|
||||
|
||||
161
src/Sema.zig
161
src/Sema.zig
@@ -14489,6 +14489,20 @@ fn zirBoolBr(
|
||||
const rhs_result = try sema.resolveBody(rhs_block, body, inst);
|
||||
_ = try rhs_block.addBr(block_inst, rhs_result);
|
||||
|
||||
return finishCondBr(sema, parent_block, &child_block, &then_block, &else_block, lhs, block_inst);
|
||||
}
|
||||
|
||||
fn finishCondBr(
|
||||
sema: *Sema,
|
||||
parent_block: *Block,
|
||||
child_block: *Block,
|
||||
then_block: *Block,
|
||||
else_block: *Block,
|
||||
cond: Air.Inst.Ref,
|
||||
block_inst: Air.Inst.Index,
|
||||
) !Air.Inst.Ref {
|
||||
const gpa = sema.gpa;
|
||||
|
||||
try sema.air_extra.ensureUnusedCapacity(gpa, @typeInfo(Air.CondBr).Struct.fields.len +
|
||||
then_block.instructions.items.len + else_block.instructions.items.len +
|
||||
@typeInfo(Air.Block).Struct.fields.len + child_block.instructions.items.len + 1);
|
||||
@@ -14501,7 +14515,7 @@ fn zirBoolBr(
|
||||
sema.air_extra.appendSliceAssumeCapacity(else_block.instructions.items);
|
||||
|
||||
_ = try child_block.addInst(.{ .tag = .cond_br, .data = .{ .pl_op = .{
|
||||
.operand = lhs,
|
||||
.operand = cond,
|
||||
.payload = cond_br_payload,
|
||||
} } });
|
||||
|
||||
@@ -14871,10 +14885,83 @@ fn zirRetLoad(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Zir
|
||||
const operand = try sema.analyzeLoad(block, src, ret_ptr, src);
|
||||
return sema.analyzeRet(block, operand, src);
|
||||
}
|
||||
|
||||
if (sema.wantErrorReturnTracing()) {
|
||||
const is_non_err = try sema.analyzePtrIsNonErr(block, src, ret_ptr);
|
||||
return retWithErrTracing(sema, block, src, is_non_err, .ret_load, ret_ptr);
|
||||
}
|
||||
|
||||
_ = try block.addUnOp(.ret_load, ret_ptr);
|
||||
return always_noreturn;
|
||||
}
|
||||
|
||||
fn retWithErrTracing(
|
||||
sema: *Sema,
|
||||
block: *Block,
|
||||
src: LazySrcLoc,
|
||||
is_non_err: Air.Inst.Ref,
|
||||
ret_tag: Air.Inst.Tag,
|
||||
operand: Air.Inst.Ref,
|
||||
) CompileError!Zir.Inst.Index {
|
||||
const need_check = switch (is_non_err) {
|
||||
.bool_true => {
|
||||
_ = try block.addUnOp(ret_tag, operand);
|
||||
return always_noreturn;
|
||||
},
|
||||
.bool_false => false,
|
||||
else => true,
|
||||
};
|
||||
const gpa = sema.gpa;
|
||||
const unresolved_stack_trace_ty = try sema.getBuiltinType(block, src, "StackTrace");
|
||||
const stack_trace_ty = try sema.resolveTypeFields(block, src, unresolved_stack_trace_ty);
|
||||
const ptr_stack_trace_ty = try Type.Tag.single_mut_pointer.create(sema.arena, stack_trace_ty);
|
||||
const err_return_trace = try block.addTy(.err_return_trace, ptr_stack_trace_ty);
|
||||
const return_err_fn = try sema.getBuiltin(block, src, "returnError");
|
||||
const args: [1]Air.Inst.Ref = .{err_return_trace};
|
||||
|
||||
if (!need_check) {
|
||||
_ = try sema.analyzeCall(block, return_err_fn, src, src, .never_inline, false, &args, null);
|
||||
_ = try block.addUnOp(ret_tag, operand);
|
||||
return always_noreturn;
|
||||
}
|
||||
|
||||
var then_block = block.makeSubBlock();
|
||||
defer then_block.instructions.deinit(gpa);
|
||||
_ = try then_block.addUnOp(ret_tag, operand);
|
||||
|
||||
var else_block = block.makeSubBlock();
|
||||
defer else_block.instructions.deinit(gpa);
|
||||
_ = try sema.analyzeCall(&else_block, return_err_fn, src, src, .never_inline, false, &args, null);
|
||||
_ = try else_block.addUnOp(ret_tag, operand);
|
||||
|
||||
try sema.air_extra.ensureUnusedCapacity(gpa, @typeInfo(Air.CondBr).Struct.fields.len +
|
||||
then_block.instructions.items.len + else_block.instructions.items.len +
|
||||
@typeInfo(Air.Block).Struct.fields.len + 1);
|
||||
|
||||
const cond_br_payload = sema.addExtraAssumeCapacity(Air.CondBr{
|
||||
.then_body_len = @intCast(u32, then_block.instructions.items.len),
|
||||
.else_body_len = @intCast(u32, else_block.instructions.items.len),
|
||||
});
|
||||
sema.air_extra.appendSliceAssumeCapacity(then_block.instructions.items);
|
||||
sema.air_extra.appendSliceAssumeCapacity(else_block.instructions.items);
|
||||
|
||||
_ = try block.addInst(.{ .tag = .cond_br, .data = .{ .pl_op = .{
|
||||
.operand = is_non_err,
|
||||
.payload = cond_br_payload,
|
||||
} } });
|
||||
|
||||
return always_noreturn;
|
||||
}
|
||||
|
||||
fn wantErrorReturnTracing(sema: *Sema) bool {
|
||||
// TODO implement this feature in all the backends and then delete this check.
|
||||
const backend_supports_error_return_tracing = sema.mod.comp.bin_file.options.use_llvm;
|
||||
|
||||
return sema.fn_ret_ty.isError() and
|
||||
sema.mod.comp.bin_file.options.error_return_tracing and
|
||||
backend_supports_error_return_tracing;
|
||||
}
|
||||
|
||||
fn addToInferredErrorSet(sema: *Sema, uncasted_operand: Air.Inst.Ref) !void {
|
||||
assert(sema.fn_ret_ty.zigTypeTag() == .ErrorUnion);
|
||||
|
||||
@@ -14920,27 +15007,15 @@ fn analyzeRet(
|
||||
return always_noreturn;
|
||||
}
|
||||
|
||||
// TODO implement this feature in all the backends and then delete this check.
|
||||
const backend_supports_error_return_tracing =
|
||||
sema.mod.comp.bin_file.options.use_llvm;
|
||||
try sema.resolveTypeLayout(block, src, sema.fn_ret_ty);
|
||||
|
||||
if (sema.fn_ret_ty.isError() and
|
||||
sema.mod.comp.bin_file.options.error_return_tracing and
|
||||
backend_supports_error_return_tracing)
|
||||
ret_err: {
|
||||
if (try sema.resolveMaybeUndefVal(block, src, operand)) |ret_val| {
|
||||
if (ret_val.tag() != .@"error") break :ret_err;
|
||||
}
|
||||
const return_err_fn = try sema.getBuiltin(block, src, "returnError");
|
||||
const unresolved_stack_trace_ty = try sema.getBuiltinType(block, src, "StackTrace");
|
||||
const stack_trace_ty = try sema.resolveTypeFields(block, src, unresolved_stack_trace_ty);
|
||||
const ptr_stack_trace_ty = try Type.Tag.optional_single_mut_pointer.create(sema.arena, stack_trace_ty);
|
||||
const err_return_trace = try block.addTy(.err_return_trace, ptr_stack_trace_ty);
|
||||
const args: [1]Air.Inst.Ref = .{err_return_trace};
|
||||
_ = try sema.analyzeCall(block, return_err_fn, src, src, .never_inline, false, &args, null);
|
||||
if (sema.wantErrorReturnTracing()) {
|
||||
// Avoid adding a frame to the error return trace in case the value is comptime-known
|
||||
// to be not an error.
|
||||
const is_non_err = try sema.analyzeIsNonErr(block, src, operand);
|
||||
return retWithErrTracing(sema, block, src, is_non_err, .ret, operand);
|
||||
}
|
||||
|
||||
try sema.resolveTypeLayout(block, src, sema.fn_ret_ty);
|
||||
_ = try block.addUnOp(.ret, operand);
|
||||
return always_noreturn;
|
||||
}
|
||||
@@ -25418,6 +25493,27 @@ fn analyzeIsNull(
|
||||
return block.addUnOp(air_tag, operand);
|
||||
}
|
||||
|
||||
fn analyzePtrIsNonErrComptimeOnly(
|
||||
sema: *Sema,
|
||||
block: *Block,
|
||||
src: LazySrcLoc,
|
||||
operand: Air.Inst.Ref,
|
||||
) CompileError!Air.Inst.Ref {
|
||||
const ptr_ty = sema.typeOf(operand);
|
||||
assert(ptr_ty.zigTypeTag() == .Pointer);
|
||||
const child_ty = ptr_ty.childType();
|
||||
|
||||
const child_tag = child_ty.zigTypeTag();
|
||||
if (child_tag != .ErrorSet and child_tag != .ErrorUnion) return Air.Inst.Ref.bool_true;
|
||||
if (child_tag == .ErrorSet) return Air.Inst.Ref.bool_false;
|
||||
assert(child_tag == .ErrorUnion);
|
||||
|
||||
_ = block;
|
||||
_ = src;
|
||||
|
||||
return Air.Inst.Ref.none;
|
||||
}
|
||||
|
||||
fn analyzeIsNonErrComptimeOnly(
|
||||
sema: *Sema,
|
||||
block: *Block,
|
||||
@@ -25431,10 +25527,16 @@ fn analyzeIsNonErrComptimeOnly(
|
||||
assert(ot == .ErrorUnion);
|
||||
|
||||
if (Air.refToIndex(operand)) |operand_inst| {
|
||||
const air_tags = sema.air_instructions.items(.tag);
|
||||
if (air_tags[operand_inst] == .wrap_errunion_payload) {
|
||||
return Air.Inst.Ref.bool_true;
|
||||
switch (sema.air_instructions.items(.tag)[operand_inst]) {
|
||||
.wrap_errunion_payload => return Air.Inst.Ref.bool_true,
|
||||
.wrap_errunion_err => return Air.Inst.Ref.bool_false,
|
||||
else => {},
|
||||
}
|
||||
} else if (operand == .undef) {
|
||||
return sema.addConstUndef(Type.bool);
|
||||
} else {
|
||||
// None of the ref tags can be errors.
|
||||
return Air.Inst.Ref.bool_true;
|
||||
}
|
||||
|
||||
const maybe_operand_val = try sema.resolveMaybeUndefVal(block, src, operand);
|
||||
@@ -25510,6 +25612,21 @@ fn analyzeIsNonErr(
|
||||
}
|
||||
}
|
||||
|
||||
fn analyzePtrIsNonErr(
|
||||
sema: *Sema,
|
||||
block: *Block,
|
||||
src: LazySrcLoc,
|
||||
operand: Air.Inst.Ref,
|
||||
) CompileError!Air.Inst.Ref {
|
||||
const result = try sema.analyzePtrIsNonErrComptimeOnly(block, src, operand);
|
||||
if (result == .none) {
|
||||
try sema.requireRuntimeBlock(block, src, null);
|
||||
return block.addUnOp(.is_non_err_ptr, operand);
|
||||
} else {
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
fn analyzeSlice(
|
||||
sema: *Sema,
|
||||
block: *Block,
|
||||
|
||||
@@ -22,6 +22,7 @@ pub fn addCases(cases: *tests.StackTracesContext) void {
|
||||
.ReleaseSafe = .{
|
||||
.exclude_os = .{
|
||||
.windows, // segfault
|
||||
.linux, // defeated by aggressive inlining
|
||||
},
|
||||
.expect =
|
||||
\\error: TheSkyIsFalling
|
||||
|
||||
Reference in New Issue
Block a user