stage2: implement function call inlining in the frontend
* remove the -Ddump-zir thing. that's handled through --verbose-ir * rework Fn to have an is_inline flag without requiring any more memory on the heap per function. * implement a rough first version of dumping typed zir (tzir) which is a lot more helpful for debugging than what we had before. We don't have a way to parse it though. * keep track of whether the inline-ness of a function changes because if it does we have to go update callsites. * add compile error for inline and export used together. inline function calls and comptime function calls are implemented the same way. A block instruction is set up to capture the result, and then a scope is set up that has a flag for is_comptime and some state if the scope is being inlined. when analyzing `ret` instructions, zig looks for inlining state in the scope, and if found, treats `ret` as a `break` instruction instead, with the target block being the one set up at the inline callsite. Follow-up items: * Complete out the debug TZIR dumping code. * Don't redundantly generate ZIR for each inline/comptime function call. Instead we should add a new state enum tag to Fn. * comptime and inlining branch quotas. * Add more test cases.
This commit is contained in:
@@ -220,7 +220,6 @@ pub fn build(b: *Builder) !void {
|
||||
}
|
||||
|
||||
const log_scopes = b.option([]const []const u8, "log", "Which log scopes to enable") orelse &[0][]const u8{};
|
||||
const zir_dumps = b.option([]const []const u8, "dump-zir", "Which functions to dump ZIR for before codegen") orelse &[0][]const u8{};
|
||||
|
||||
const opt_version_string = b.option([]const u8, "version-string", "Override Zig version string. Default is to find out with git.");
|
||||
const version = if (opt_version_string) |version| version else v: {
|
||||
@@ -277,7 +276,6 @@ pub fn build(b: *Builder) !void {
|
||||
exe.addBuildOption(std.SemanticVersion, "semver", semver);
|
||||
|
||||
exe.addBuildOption([]const []const u8, "log_scopes", log_scopes);
|
||||
exe.addBuildOption([]const []const u8, "zir_dumps", zir_dumps);
|
||||
exe.addBuildOption(bool, "enable_tracy", tracy != null);
|
||||
exe.addBuildOption(bool, "is_stage1", is_stage1);
|
||||
if (tracy) |tracy_path| {
|
||||
|
||||
@@ -1459,10 +1459,10 @@ pub fn performAllTheWork(self: *Compilation) error{ TimerUnsupported, OutOfMemor
|
||||
const module = self.bin_file.options.module.?;
|
||||
if (decl.typed_value.most_recent.typed_value.val.castTag(.function)) |payload| {
|
||||
const func = payload.data;
|
||||
switch (func.analysis) {
|
||||
switch (func.bits.state) {
|
||||
.queued => module.analyzeFnBody(decl, func) catch |err| switch (err) {
|
||||
error.AnalysisFail => {
|
||||
assert(func.analysis != .in_progress);
|
||||
assert(func.bits.state != .in_progress);
|
||||
continue;
|
||||
},
|
||||
error.OutOfMemory => return error.OutOfMemory,
|
||||
@@ -1471,12 +1471,16 @@ pub fn performAllTheWork(self: *Compilation) error{ TimerUnsupported, OutOfMemor
|
||||
.sema_failure, .dependency_failure => continue,
|
||||
.success => {},
|
||||
}
|
||||
// Here we tack on additional allocations to the Decl's arena. The allocations are
|
||||
// lifetime annotations in the ZIR.
|
||||
// Here we tack on additional allocations to the Decl's arena. The allocations
|
||||
// are lifetime annotations in the ZIR.
|
||||
var decl_arena = decl.typed_value.most_recent.arena.?.promote(module.gpa);
|
||||
defer decl.typed_value.most_recent.arena.?.* = decl_arena.state;
|
||||
log.debug("analyze liveness of {s}\n", .{decl.name});
|
||||
try liveness.analyze(module.gpa, &decl_arena.allocator, func.analysis.success);
|
||||
try liveness.analyze(module.gpa, &decl_arena.allocator, func.data.body);
|
||||
|
||||
if (self.verbose_ir) {
|
||||
func.dump(module.*);
|
||||
}
|
||||
}
|
||||
|
||||
assert(decl.typed_value.most_recent.typed_value.ty.hasCodeGenBits());
|
||||
|
||||
156
src/Module.zig
156
src/Module.zig
@@ -286,23 +286,40 @@ pub const Decl = struct {
|
||||
/// Extern functions do not have this data structure; they are represented by
|
||||
/// the `Decl` only, with a `Value` tag of `extern_fn`.
|
||||
pub const Fn = struct {
|
||||
/// This memory owned by the Decl's TypedValue.Managed arena allocator.
|
||||
analysis: union(enum) {
|
||||
queued: *ZIR,
|
||||
in_progress,
|
||||
/// There will be a corresponding ErrorMsg in Module.failed_decls
|
||||
sema_failure,
|
||||
/// This Fn might be OK but it depends on another Decl which did not successfully complete
|
||||
/// semantic analysis.
|
||||
dependency_failure,
|
||||
success: Body,
|
||||
bits: packed struct {
|
||||
/// Get and set this field via `analysis` and `setAnalysis`.
|
||||
state: Analysis.Tag,
|
||||
/// We carry this state into `Fn` instead of leaving it in the AST so that
|
||||
/// analysis of function calls can happen even on functions whose AST has
|
||||
/// been unloaded from memory.
|
||||
is_inline: bool,
|
||||
unused_bits: u4 = 0,
|
||||
},
|
||||
/// Get and set this data via `analysis` and `setAnalysis`.
|
||||
data: union {
|
||||
none: void,
|
||||
zir: *ZIR,
|
||||
body: Body,
|
||||
},
|
||||
owner_decl: *Decl,
|
||||
|
||||
/// This memory is temporary and points to stack memory for the duration
|
||||
/// of Fn analysis.
|
||||
pub const Analysis = struct {
|
||||
inner_block: Scope.Block,
|
||||
pub const Analysis = union(Tag) {
|
||||
queued: *ZIR,
|
||||
in_progress,
|
||||
sema_failure,
|
||||
dependency_failure,
|
||||
success: Body,
|
||||
|
||||
pub const Tag = enum(u3) {
|
||||
queued,
|
||||
in_progress,
|
||||
/// There will be a corresponding ErrorMsg in Module.failed_decls
|
||||
sema_failure,
|
||||
/// This Fn might be OK but it depends on another Decl which did not
|
||||
/// successfully complete semantic analysis.
|
||||
dependency_failure,
|
||||
success,
|
||||
};
|
||||
};
|
||||
|
||||
/// Contains un-analyzed ZIR instructions generated from Zig source AST.
|
||||
@@ -311,22 +328,37 @@ pub const Fn = struct {
|
||||
arena: std.heap.ArenaAllocator.State,
|
||||
};
|
||||
|
||||
/// For debugging purposes.
|
||||
pub fn dump(self: *Fn, mod: Module) void {
|
||||
std.debug.print("Module.Function(name={s}) ", .{self.owner_decl.name});
|
||||
switch (self.analysis) {
|
||||
.queued => {
|
||||
std.debug.print("queued\n", .{});
|
||||
pub fn analysis(self: Fn) Analysis {
|
||||
return switch (self.bits.state) {
|
||||
.queued => .{ .queued = self.data.zir },
|
||||
.success => .{ .success = self.data.body },
|
||||
.in_progress => .in_progress,
|
||||
.sema_failure => .sema_failure,
|
||||
.dependency_failure => .dependency_failure,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn setAnalysis(self: *Fn, anal: Analysis) void {
|
||||
switch (anal) {
|
||||
.queued => |zir_ptr| {
|
||||
self.bits.state = .queued;
|
||||
self.data = .{ .zir = zir_ptr };
|
||||
},
|
||||
.in_progress => {
|
||||
std.debug.print("in_progress\n", .{});
|
||||
.success => |body| {
|
||||
self.bits.state = .success;
|
||||
self.data = .{ .body = body };
|
||||
},
|
||||
else => {
|
||||
std.debug.print("\n", .{});
|
||||
zir.dumpFn(mod, self);
|
||||
.in_progress, .sema_failure, .dependency_failure => {
|
||||
self.bits.state = anal;
|
||||
self.data = .{ .none = {} };
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
/// For debugging purposes.
|
||||
pub fn dump(self: *Fn, mod: Module) void {
|
||||
zir.dumpFn(mod, self);
|
||||
}
|
||||
};
|
||||
|
||||
pub const Var = struct {
|
||||
@@ -773,13 +805,33 @@ pub const Scope = struct {
|
||||
instructions: ArrayListUnmanaged(*Inst),
|
||||
/// Points to the arena allocator of DeclAnalysis
|
||||
arena: *Allocator,
|
||||
label: ?Label = null,
|
||||
label: Label = Label.none,
|
||||
is_comptime: bool,
|
||||
|
||||
pub const Label = struct {
|
||||
zir_block: *zir.Inst.Block,
|
||||
results: ArrayListUnmanaged(*Inst),
|
||||
block_inst: *Inst.Block,
|
||||
pub const Label = union(enum) {
|
||||
none,
|
||||
/// This `Block` maps a block ZIR instruction to the corresponding
|
||||
/// TZIR instruction for break instruction analysis.
|
||||
breaking: struct {
|
||||
zir_block: *zir.Inst.Block,
|
||||
merges: Merges,
|
||||
},
|
||||
/// This `Block` indicates that an inline function call is happening
|
||||
/// and return instructions should be analyzed as a break instruction
|
||||
/// to this TZIR block instruction.
|
||||
inlining: struct {
|
||||
/// We use this to count from 0 so that arg instructions know
|
||||
/// which parameter index they are, without having to store
|
||||
/// a parameter index with each arg instruction.
|
||||
param_index: usize,
|
||||
casted_args: []*Inst,
|
||||
merges: Merges,
|
||||
},
|
||||
|
||||
pub const Merges = struct {
|
||||
results: ArrayListUnmanaged(*Inst),
|
||||
block_inst: *Inst.Block,
|
||||
};
|
||||
};
|
||||
|
||||
/// For debugging purposes.
|
||||
@@ -1189,8 +1241,21 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
|
||||
break :blk fn_zir;
|
||||
};
|
||||
|
||||
const is_inline = blk: {
|
||||
if (fn_proto.getExternExportInlineToken()) |maybe_inline_token| {
|
||||
if (tree.token_ids[maybe_inline_token] == .Keyword_inline) {
|
||||
break :blk true;
|
||||
}
|
||||
}
|
||||
break :blk false;
|
||||
};
|
||||
|
||||
new_func.* = .{
|
||||
.analysis = .{ .queued = fn_zir },
|
||||
.bits = .{
|
||||
.state = .queued,
|
||||
.is_inline = is_inline,
|
||||
},
|
||||
.data = .{ .zir = fn_zir },
|
||||
.owner_decl = decl,
|
||||
};
|
||||
fn_payload.* = .{
|
||||
@@ -1199,11 +1264,16 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
|
||||
};
|
||||
|
||||
var prev_type_has_bits = false;
|
||||
var prev_is_inline = false;
|
||||
var type_changed = true;
|
||||
|
||||
if (decl.typedValueManaged()) |tvm| {
|
||||
prev_type_has_bits = tvm.typed_value.ty.hasCodeGenBits();
|
||||
type_changed = !tvm.typed_value.ty.eql(fn_type);
|
||||
if (tvm.typed_value.val.castTag(.function)) |payload| {
|
||||
const prev_func = payload.data;
|
||||
prev_is_inline = prev_func.bits.is_inline;
|
||||
}
|
||||
|
||||
tvm.deinit(self.gpa);
|
||||
}
|
||||
@@ -1221,18 +1291,26 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
|
||||
decl.analysis = .complete;
|
||||
decl.generation = self.generation;
|
||||
|
||||
if (fn_type.hasCodeGenBits()) {
|
||||
if (!is_inline and fn_type.hasCodeGenBits()) {
|
||||
// We don't fully codegen the decl until later, but we do need to reserve a global
|
||||
// offset table index for it. This allows us to codegen decls out of dependency order,
|
||||
// increasing how many computations can be done in parallel.
|
||||
try self.comp.bin_file.allocateDeclIndexes(decl);
|
||||
try self.comp.work_queue.writeItem(.{ .codegen_decl = decl });
|
||||
} else if (prev_type_has_bits) {
|
||||
} else if (!prev_is_inline and prev_type_has_bits) {
|
||||
self.comp.bin_file.freeDecl(decl);
|
||||
}
|
||||
|
||||
if (fn_proto.getExternExportInlineToken()) |maybe_export_token| {
|
||||
if (tree.token_ids[maybe_export_token] == .Keyword_export) {
|
||||
if (is_inline) {
|
||||
return self.failTok(
|
||||
&block_scope.base,
|
||||
maybe_export_token,
|
||||
"export of inline function",
|
||||
.{},
|
||||
);
|
||||
}
|
||||
const export_src = tree.token_locs[maybe_export_token].start;
|
||||
const name_loc = tree.token_locs[fn_proto.getNameToken().?];
|
||||
const name = tree.tokenSliceLoc(name_loc);
|
||||
@@ -1240,7 +1318,7 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
|
||||
try self.analyzeExport(&block_scope.base, export_src, name, decl);
|
||||
}
|
||||
}
|
||||
return type_changed;
|
||||
return type_changed or is_inline != prev_is_inline;
|
||||
},
|
||||
.VarDecl => {
|
||||
const var_decl = @fieldParentPtr(ast.Node.VarDecl, "base", ast_node);
|
||||
@@ -1824,15 +1902,15 @@ pub fn analyzeFnBody(self: *Module, decl: *Decl, func: *Fn) !void {
|
||||
};
|
||||
defer inner_block.instructions.deinit(self.gpa);
|
||||
|
||||
const fn_zir = func.analysis.queued;
|
||||
const fn_zir = func.data.zir;
|
||||
defer fn_zir.arena.promote(self.gpa).deinit();
|
||||
func.analysis = .{ .in_progress = {} };
|
||||
func.setAnalysis(.in_progress);
|
||||
log.debug("set {s} to in_progress\n", .{decl.name});
|
||||
|
||||
try zir_sema.analyzeBody(self, &inner_block.base, fn_zir.body);
|
||||
|
||||
const instructions = try arena.allocator.dupe(*Inst, inner_block.instructions.items);
|
||||
func.analysis = .{ .success = .{ .instructions = instructions } };
|
||||
func.setAnalysis(.{ .success = .{ .instructions = instructions } });
|
||||
log.debug("set {s} to success\n", .{decl.name});
|
||||
}
|
||||
|
||||
@@ -2329,7 +2407,7 @@ pub fn analyzeDeclRef(self: *Module, scope: *Scope, src: usize, decl: *Decl) Inn
|
||||
self.ensureDeclAnalyzed(decl) catch |err| {
|
||||
if (scope.cast(Scope.Block)) |block| {
|
||||
if (block.func) |func| {
|
||||
func.analysis = .dependency_failure;
|
||||
func.setAnalysis(.dependency_failure);
|
||||
} else {
|
||||
block.decl.analysis = .dependency_failure;
|
||||
}
|
||||
@@ -3029,7 +3107,7 @@ fn failWithOwnedErrorMsg(self: *Module, scope: *Scope, src: usize, err_msg: *Com
|
||||
.block => {
|
||||
const block = scope.cast(Scope.Block).?;
|
||||
if (block.func) |func| {
|
||||
func.analysis = .sema_failure;
|
||||
func.setAnalysis(.sema_failure);
|
||||
} else {
|
||||
block.decl.analysis = .sema_failure;
|
||||
block.decl.generation = self.generation;
|
||||
|
||||
@@ -532,7 +532,7 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
|
||||
self.code.items.len += 4;
|
||||
|
||||
try self.dbgSetPrologueEnd();
|
||||
try self.genBody(self.mod_fn.analysis.success);
|
||||
try self.genBody(self.mod_fn.data.body);
|
||||
|
||||
const stack_end = self.max_end_stack;
|
||||
if (stack_end > math.maxInt(i32))
|
||||
@@ -576,7 +576,7 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
|
||||
});
|
||||
} else {
|
||||
try self.dbgSetPrologueEnd();
|
||||
try self.genBody(self.mod_fn.analysis.success);
|
||||
try self.genBody(self.mod_fn.data.body);
|
||||
try self.dbgSetEpilogueBegin();
|
||||
}
|
||||
},
|
||||
@@ -593,7 +593,7 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
|
||||
|
||||
try self.dbgSetPrologueEnd();
|
||||
|
||||
try self.genBody(self.mod_fn.analysis.success);
|
||||
try self.genBody(self.mod_fn.data.body);
|
||||
|
||||
// Backpatch stack offset
|
||||
const stack_end = self.max_end_stack;
|
||||
@@ -638,13 +638,13 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
|
||||
writeInt(u32, try self.code.addManyAsArray(4), Instruction.pop(.al, .{ .fp, .pc }).toU32());
|
||||
} else {
|
||||
try self.dbgSetPrologueEnd();
|
||||
try self.genBody(self.mod_fn.analysis.success);
|
||||
try self.genBody(self.mod_fn.data.body);
|
||||
try self.dbgSetEpilogueBegin();
|
||||
}
|
||||
},
|
||||
else => {
|
||||
try self.dbgSetPrologueEnd();
|
||||
try self.genBody(self.mod_fn.analysis.success);
|
||||
try self.genBody(self.mod_fn.data.body);
|
||||
try self.dbgSetEpilogueBegin();
|
||||
},
|
||||
}
|
||||
|
||||
@@ -275,7 +275,7 @@ pub fn generate(file: *C, module: *Module, decl: *Decl) !void {
|
||||
try writer.writeAll(" {");
|
||||
|
||||
const func: *Module.Fn = func_payload.data;
|
||||
const instructions = func.analysis.success.instructions;
|
||||
const instructions = func.data.body.instructions;
|
||||
if (instructions.len > 0) {
|
||||
try writer.writeAll("\n");
|
||||
for (instructions) |inst| {
|
||||
|
||||
@@ -63,7 +63,7 @@ pub fn genCode(buf: *ArrayList(u8), decl: *Decl) !void {
|
||||
// TODO: check for and handle death of instructions
|
||||
const tv = decl.typed_value.most_recent.typed_value;
|
||||
const mod_fn = tv.val.castTag(.function).?.data;
|
||||
for (mod_fn.analysis.success.instructions) |inst| try genInst(buf, decl, inst);
|
||||
for (mod_fn.data.body.instructions) |inst| try genInst(buf, decl, inst);
|
||||
|
||||
// Write 'end' opcode
|
||||
try writer.writeByte(0x0B);
|
||||
|
||||
@@ -2,7 +2,6 @@ pub const have_llvm = true;
|
||||
pub const version: [:0]const u8 = "@ZIG_VERSION@";
|
||||
pub const semver = try @import("std").SemanticVersion.parse(version);
|
||||
pub const log_scopes: []const []const u8 = &[_][]const u8{};
|
||||
pub const zir_dumps: []const []const u8 = &[_][]const u8{};
|
||||
pub const enable_tracy = false;
|
||||
pub const is_stage1 = true;
|
||||
pub const skip_non_native = false;
|
||||
|
||||
@@ -2178,16 +2178,6 @@ pub fn updateDecl(self: *Elf, module: *Module, decl: *Module.Decl) !void {
|
||||
else => false,
|
||||
};
|
||||
if (is_fn) {
|
||||
const zir_dumps = if (std.builtin.is_test) &[0][]const u8{} else build_options.zir_dumps;
|
||||
if (zir_dumps.len != 0) {
|
||||
for (zir_dumps) |fn_name| {
|
||||
if (mem.eql(u8, mem.spanZ(decl.name), fn_name)) {
|
||||
std.debug.print("\n{s}\n", .{decl.name});
|
||||
typed_value.val.castTag(.function).?.data.dump(module.*);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// For functions we need to add a prologue to the debug line program.
|
||||
try dbg_line_buffer.ensureCapacity(26);
|
||||
|
||||
|
||||
@@ -936,16 +936,6 @@ pub fn initDeclDebugBuffers(
|
||||
const typed_value = decl.typed_value.most_recent.typed_value;
|
||||
switch (typed_value.ty.zigTypeTag()) {
|
||||
.Fn => {
|
||||
const zir_dumps = if (std.builtin.is_test) &[0][]const u8{} else build_options.zir_dumps;
|
||||
if (zir_dumps.len != 0) {
|
||||
for (zir_dumps) |fn_name| {
|
||||
if (mem.eql(u8, mem.spanZ(decl.name), fn_name)) {
|
||||
std.debug.print("\n{}\n", .{decl.name});
|
||||
typed_value.val.cast(Value.Payload.Function).?.func.dump(module.*);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// For functions we need to add a prologue to the debug line program.
|
||||
try dbg_line_buffer.ensureCapacity(26);
|
||||
|
||||
|
||||
@@ -294,7 +294,7 @@ pub const LLVMIRModule = struct {
|
||||
const entry_block = llvm_func.appendBasicBlock("Entry");
|
||||
self.builder.positionBuilderAtEnd(entry_block);
|
||||
|
||||
const instructions = func.analysis.success.instructions;
|
||||
const instructions = func.data.body.instructions;
|
||||
for (instructions) |inst| {
|
||||
switch (inst.tag) {
|
||||
.breakpoint => try self.genBreakpoint(inst.castTag(.breakpoint).?),
|
||||
|
||||
349
src/zir.zig
349
src/zir.zig
@@ -793,7 +793,9 @@ pub const Inst = struct {
|
||||
fn_type: *Inst,
|
||||
body: Module.Body,
|
||||
},
|
||||
kw_args: struct {},
|
||||
kw_args: struct {
|
||||
is_inline: bool = false,
|
||||
},
|
||||
};
|
||||
|
||||
pub const FnType = struct {
|
||||
@@ -1847,83 +1849,258 @@ pub fn emit(allocator: *Allocator, old_module: *IrModule) !Module {
|
||||
/// For debugging purposes, prints a function representation to stderr.
|
||||
pub fn dumpFn(old_module: IrModule, module_fn: *IrModule.Fn) void {
|
||||
const allocator = old_module.gpa;
|
||||
var ctx: EmitZIR = .{
|
||||
var ctx: DumpTzir = .{
|
||||
.allocator = allocator,
|
||||
.decls = .{},
|
||||
.arena = std.heap.ArenaAllocator.init(allocator),
|
||||
.old_module = &old_module,
|
||||
.next_auto_name = 0,
|
||||
.names = std.StringArrayHashMap(void).init(allocator),
|
||||
.primitive_table = std.AutoHashMap(Inst.Primitive.Builtin, *Decl).init(allocator),
|
||||
.indent = 0,
|
||||
.block_table = std.AutoHashMap(*ir.Inst.Block, *Inst.Block).init(allocator),
|
||||
.loop_table = std.AutoHashMap(*ir.Inst.Loop, *Inst.Loop).init(allocator),
|
||||
.metadata = std.AutoHashMap(*Inst, Module.MetaData).init(allocator),
|
||||
.body_metadata = std.AutoHashMap(*Module.Body, Module.BodyMetaData).init(allocator),
|
||||
.module_fn = module_fn,
|
||||
.indent = 2,
|
||||
.inst_table = DumpTzir.InstTable.init(allocator),
|
||||
.partial_inst_table = DumpTzir.InstTable.init(allocator),
|
||||
.const_table = DumpTzir.InstTable.init(allocator),
|
||||
};
|
||||
defer ctx.metadata.deinit();
|
||||
defer ctx.body_metadata.deinit();
|
||||
defer ctx.block_table.deinit();
|
||||
defer ctx.loop_table.deinit();
|
||||
defer ctx.decls.deinit(allocator);
|
||||
defer ctx.names.deinit();
|
||||
defer ctx.primitive_table.deinit();
|
||||
defer ctx.inst_table.deinit();
|
||||
defer ctx.partial_inst_table.deinit();
|
||||
defer ctx.const_table.deinit();
|
||||
defer ctx.arena.deinit();
|
||||
|
||||
const fn_ty = module_fn.owner_decl.typed_value.most_recent.typed_value.ty;
|
||||
_ = ctx.emitFn(module_fn, 0, fn_ty) catch |err| {
|
||||
std.debug.print("unable to dump function: {s}\n", .{@errorName(err)});
|
||||
return;
|
||||
};
|
||||
var module = Module{
|
||||
.decls = ctx.decls.items,
|
||||
.arena = ctx.arena,
|
||||
.metadata = ctx.metadata,
|
||||
.body_metadata = ctx.body_metadata,
|
||||
};
|
||||
|
||||
module.dump();
|
||||
switch (module_fn.analysis()) {
|
||||
.queued => std.debug.print("(queued)", .{}),
|
||||
.in_progress => std.debug.print("(in_progress)", .{}),
|
||||
.sema_failure => std.debug.print("(sema_failure)", .{}),
|
||||
.dependency_failure => std.debug.print("(dependency_failure)", .{}),
|
||||
.success => |body| {
|
||||
ctx.dump(body, std.io.getStdErr().writer()) catch @panic("failed to dump TZIR");
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
/// For debugging purposes, prints a function representation to stderr.
|
||||
pub fn dumpBlock(old_module: IrModule, module_block: *IrModule.Scope.Block) void {
|
||||
const allocator = old_module.gpa;
|
||||
var ctx: EmitZIR = .{
|
||||
.allocator = allocator,
|
||||
.decls = .{},
|
||||
.arena = std.heap.ArenaAllocator.init(allocator),
|
||||
.old_module = &old_module,
|
||||
.next_auto_name = 0,
|
||||
.names = std.StringArrayHashMap(void).init(allocator),
|
||||
.primitive_table = std.AutoHashMap(Inst.Primitive.Builtin, *Decl).init(allocator),
|
||||
.indent = 0,
|
||||
.block_table = std.AutoHashMap(*ir.Inst.Block, *Inst.Block).init(allocator),
|
||||
.loop_table = std.AutoHashMap(*ir.Inst.Loop, *Inst.Loop).init(allocator),
|
||||
.metadata = std.AutoHashMap(*Inst, Module.MetaData).init(allocator),
|
||||
.body_metadata = std.AutoHashMap(*Module.Body, Module.BodyMetaData).init(allocator),
|
||||
};
|
||||
defer ctx.metadata.deinit();
|
||||
defer ctx.body_metadata.deinit();
|
||||
defer ctx.block_table.deinit();
|
||||
defer ctx.loop_table.deinit();
|
||||
defer ctx.decls.deinit(allocator);
|
||||
defer ctx.names.deinit();
|
||||
defer ctx.primitive_table.deinit();
|
||||
defer ctx.arena.deinit();
|
||||
const DumpTzir = struct {
|
||||
allocator: *Allocator,
|
||||
arena: std.heap.ArenaAllocator,
|
||||
old_module: *const IrModule,
|
||||
module_fn: *IrModule.Fn,
|
||||
indent: usize,
|
||||
inst_table: InstTable,
|
||||
partial_inst_table: InstTable,
|
||||
const_table: InstTable,
|
||||
next_index: usize = 0,
|
||||
next_partial_index: usize = 0,
|
||||
next_const_index: usize = 0,
|
||||
|
||||
_ = ctx.emitBlock(module_block, 0) catch |err| {
|
||||
std.debug.print("unable to dump function: {}\n", .{err});
|
||||
return;
|
||||
};
|
||||
var module = Module{
|
||||
.decls = ctx.decls.items,
|
||||
.arena = ctx.arena,
|
||||
.metadata = ctx.metadata,
|
||||
.body_metadata = ctx.body_metadata,
|
||||
};
|
||||
const InstTable = std.AutoArrayHashMap(*ir.Inst, usize);
|
||||
|
||||
module.dump();
|
||||
}
|
||||
fn dump(dtz: *DumpTzir, body: ir.Body, writer: std.fs.File.Writer) !void {
|
||||
// First pass to pre-populate the table so that we can show even invalid references.
|
||||
// Must iterate the same order we iterate the second time.
|
||||
// We also look for constants and put them in the const_table.
|
||||
for (body.instructions) |inst| {
|
||||
try dtz.inst_table.put(inst, dtz.next_index);
|
||||
dtz.next_index += 1;
|
||||
switch (inst.tag) {
|
||||
.alloc,
|
||||
.retvoid,
|
||||
.unreach,
|
||||
.breakpoint,
|
||||
.dbg_stmt,
|
||||
=> {},
|
||||
|
||||
.ref,
|
||||
.ret,
|
||||
.bitcast,
|
||||
.not,
|
||||
.isnonnull,
|
||||
.isnull,
|
||||
.iserr,
|
||||
.ptrtoint,
|
||||
.floatcast,
|
||||
.intcast,
|
||||
.load,
|
||||
.unwrap_optional,
|
||||
.wrap_optional,
|
||||
=> {
|
||||
const un_op = inst.cast(ir.Inst.UnOp).?;
|
||||
try dtz.findConst(un_op.operand);
|
||||
},
|
||||
|
||||
.add,
|
||||
.sub,
|
||||
.cmp_lt,
|
||||
.cmp_lte,
|
||||
.cmp_eq,
|
||||
.cmp_gte,
|
||||
.cmp_gt,
|
||||
.cmp_neq,
|
||||
.store,
|
||||
.booland,
|
||||
.boolor,
|
||||
.bitand,
|
||||
.bitor,
|
||||
.xor,
|
||||
=> {
|
||||
const bin_op = inst.cast(ir.Inst.BinOp).?;
|
||||
try dtz.findConst(bin_op.lhs);
|
||||
try dtz.findConst(bin_op.rhs);
|
||||
},
|
||||
|
||||
.arg => {},
|
||||
|
||||
// TODO fill out this debug printing
|
||||
.assembly,
|
||||
.block,
|
||||
.br,
|
||||
.brvoid,
|
||||
.call,
|
||||
.condbr,
|
||||
.constant,
|
||||
.loop,
|
||||
.varptr,
|
||||
.switchbr,
|
||||
=> {},
|
||||
}
|
||||
}
|
||||
|
||||
std.debug.print("Module.Function(name={s}):\n", .{dtz.module_fn.owner_decl.name});
|
||||
|
||||
for (dtz.const_table.items()) |entry| {
|
||||
const constant = entry.key.castTag(.constant).?;
|
||||
try writer.print(" @{d}: {} = {};\n", .{
|
||||
entry.value, constant.base.ty, constant.val,
|
||||
});
|
||||
}
|
||||
|
||||
return dtz.dumpBody(body, writer);
|
||||
}
|
||||
|
||||
fn dumpBody(dtz: *DumpTzir, body: ir.Body, writer: std.fs.File.Writer) !void {
|
||||
for (body.instructions) |inst| {
|
||||
const my_index = dtz.next_partial_index;
|
||||
try dtz.partial_inst_table.put(inst, my_index);
|
||||
dtz.next_partial_index += 1;
|
||||
|
||||
try writer.writeByteNTimes(' ', dtz.indent);
|
||||
try writer.print("%{d}: {} = {s}(", .{
|
||||
my_index, inst.ty, @tagName(inst.tag),
|
||||
});
|
||||
switch (inst.tag) {
|
||||
.alloc,
|
||||
.retvoid,
|
||||
.unreach,
|
||||
.breakpoint,
|
||||
.dbg_stmt,
|
||||
=> try writer.writeAll(")\n"),
|
||||
|
||||
.ref,
|
||||
.ret,
|
||||
.bitcast,
|
||||
.not,
|
||||
.isnonnull,
|
||||
.isnull,
|
||||
.iserr,
|
||||
.ptrtoint,
|
||||
.floatcast,
|
||||
.intcast,
|
||||
.load,
|
||||
.unwrap_optional,
|
||||
.wrap_optional,
|
||||
=> {
|
||||
const un_op = inst.cast(ir.Inst.UnOp).?;
|
||||
if (dtz.partial_inst_table.get(un_op.operand)) |operand_index| {
|
||||
try writer.print("%{d})\n", .{operand_index});
|
||||
} else if (dtz.const_table.get(un_op.operand)) |operand_index| {
|
||||
try writer.print("@{d})\n", .{operand_index});
|
||||
} else if (dtz.inst_table.get(un_op.operand)) |operand_index| {
|
||||
try writer.print("%{d}) // Instruction does not dominate all uses!\n", .{
|
||||
operand_index,
|
||||
});
|
||||
} else {
|
||||
try writer.writeAll("!BADREF!)\n");
|
||||
}
|
||||
},
|
||||
|
||||
.add,
|
||||
.sub,
|
||||
.cmp_lt,
|
||||
.cmp_lte,
|
||||
.cmp_eq,
|
||||
.cmp_gte,
|
||||
.cmp_gt,
|
||||
.cmp_neq,
|
||||
.store,
|
||||
.booland,
|
||||
.boolor,
|
||||
.bitand,
|
||||
.bitor,
|
||||
.xor,
|
||||
=> {
|
||||
var lhs_kinky: ?usize = null;
|
||||
var rhs_kinky: ?usize = null;
|
||||
|
||||
const bin_op = inst.cast(ir.Inst.BinOp).?;
|
||||
if (dtz.partial_inst_table.get(bin_op.lhs)) |operand_index| {
|
||||
try writer.print("%{d}, ", .{operand_index});
|
||||
} else if (dtz.const_table.get(bin_op.lhs)) |operand_index| {
|
||||
try writer.print("@{d}, ", .{operand_index});
|
||||
} else if (dtz.inst_table.get(bin_op.lhs)) |operand_index| {
|
||||
lhs_kinky = operand_index;
|
||||
try writer.print("%{d}, ", .{operand_index});
|
||||
} else {
|
||||
try writer.writeAll("!BADREF!, ");
|
||||
}
|
||||
if (dtz.partial_inst_table.get(bin_op.rhs)) |operand_index| {
|
||||
try writer.print("%{d}", .{operand_index});
|
||||
} else if (dtz.const_table.get(bin_op.rhs)) |operand_index| {
|
||||
try writer.print("@{d}", .{operand_index});
|
||||
} else if (dtz.inst_table.get(bin_op.rhs)) |operand_index| {
|
||||
rhs_kinky = operand_index;
|
||||
try writer.print("%{d}", .{operand_index});
|
||||
} else {
|
||||
try writer.writeAll("!BADREF!");
|
||||
}
|
||||
if (lhs_kinky != null or rhs_kinky != null) {
|
||||
try writer.writeAll(") // Instruction does not dominate all uses!");
|
||||
if (lhs_kinky) |lhs| {
|
||||
try writer.print(" %{d}", .{lhs});
|
||||
}
|
||||
if (rhs_kinky) |rhs| {
|
||||
try writer.print(" %{d}", .{rhs});
|
||||
}
|
||||
try writer.writeAll("\n");
|
||||
} else {
|
||||
try writer.writeAll(")\n");
|
||||
}
|
||||
},
|
||||
|
||||
.arg => {
|
||||
const arg = inst.castTag(.arg).?;
|
||||
try writer.print("{s})\n", .{arg.name});
|
||||
},
|
||||
|
||||
// TODO fill out this debug printing
|
||||
.assembly,
|
||||
.block,
|
||||
.br,
|
||||
.brvoid,
|
||||
.call,
|
||||
.condbr,
|
||||
.constant,
|
||||
.loop,
|
||||
.varptr,
|
||||
.switchbr,
|
||||
=> {
|
||||
try writer.writeAll("!TODO!)\n");
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn findConst(dtz: *DumpTzir, operand: *ir.Inst) !void {
|
||||
if (operand.tag == .constant) {
|
||||
try dtz.const_table.put(operand, dtz.next_const_index);
|
||||
dtz.next_const_index += 1;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const EmitZIR = struct {
|
||||
allocator: *Allocator,
|
||||
@@ -2105,36 +2282,6 @@ const EmitZIR = struct {
|
||||
return &declref_inst.base;
|
||||
}
|
||||
|
||||
fn emitBlock(self: *EmitZIR, module_block: *IrModule.Scope.Block, src: usize) Allocator.Error!*Decl {
|
||||
var inst_table = std.AutoHashMap(*ir.Inst, *Inst).init(self.allocator);
|
||||
defer inst_table.deinit();
|
||||
|
||||
var instructions = std.ArrayList(*Inst).init(self.allocator);
|
||||
defer instructions.deinit();
|
||||
|
||||
const body: ir.Body = .{ .instructions = module_block.instructions.items };
|
||||
try self.emitBody(body, &inst_table, &instructions);
|
||||
|
||||
const fn_type = try self.emitType(src, Type.initTag(.void));
|
||||
|
||||
const arena_instrs = try self.arena.allocator.alloc(*Inst, instructions.items.len);
|
||||
mem.copy(*Inst, arena_instrs, instructions.items);
|
||||
|
||||
const fn_inst = try self.arena.allocator.create(Inst.Fn);
|
||||
fn_inst.* = .{
|
||||
.base = .{
|
||||
.src = src,
|
||||
.tag = Inst.Fn.base_tag,
|
||||
},
|
||||
.positionals = .{
|
||||
.fn_type = fn_type.inst,
|
||||
.body = .{ .instructions = arena_instrs },
|
||||
},
|
||||
.kw_args = .{},
|
||||
};
|
||||
return self.emitUnnamedDecl(&fn_inst.base);
|
||||
}
|
||||
|
||||
fn emitFn(self: *EmitZIR, module_fn: *IrModule.Fn, src: usize, ty: Type) Allocator.Error!*Decl {
|
||||
var inst_table = std.AutoHashMap(*ir.Inst, *Inst).init(self.allocator);
|
||||
defer inst_table.deinit();
|
||||
@@ -2142,7 +2289,7 @@ const EmitZIR = struct {
|
||||
var instructions = std.ArrayList(*Inst).init(self.allocator);
|
||||
defer instructions.deinit();
|
||||
|
||||
switch (module_fn.analysis) {
|
||||
switch (module_fn.analysis()) {
|
||||
.queued => unreachable,
|
||||
.in_progress => unreachable,
|
||||
.success => |body| {
|
||||
@@ -2224,7 +2371,9 @@ const EmitZIR = struct {
|
||||
.fn_type = fn_type.inst,
|
||||
.body = .{ .instructions = arena_instrs },
|
||||
},
|
||||
.kw_args = .{},
|
||||
.kw_args = .{
|
||||
.is_inline = module_fn.bits.is_inline,
|
||||
},
|
||||
};
|
||||
return self.emitUnnamedDecl(&fn_inst.base);
|
||||
}
|
||||
|
||||
239
src/zir_sema.zig
239
src/zir_sema.zig
@@ -577,7 +577,15 @@ fn analyzeInstCompileError(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) In
|
||||
}
|
||||
|
||||
fn analyzeInstArg(mod: *Module, scope: *Scope, inst: *zir.Inst.Arg) InnerError!*Inst {
|
||||
const b = try mod.requireRuntimeBlock(scope, inst.base.src);
|
||||
const b = try mod.requireFunctionBlock(scope, inst.base.src);
|
||||
switch (b.label) {
|
||||
.none, .breaking => {},
|
||||
.inlining => |*inlining| {
|
||||
const param_index = inlining.param_index;
|
||||
inlining.param_index += 1;
|
||||
return inlining.casted_args[param_index];
|
||||
},
|
||||
}
|
||||
const fn_ty = b.func.?.owner_decl.typed_value.most_recent.typed_value.ty;
|
||||
const param_index = b.instructions.items.len;
|
||||
const param_count = fn_ty.fnParamLen();
|
||||
@@ -636,7 +644,7 @@ fn analyzeInstBlockFlat(mod: *Module, scope: *Scope, inst: *zir.Inst.Block, is_c
|
||||
.decl = parent_block.decl,
|
||||
.instructions = .{},
|
||||
.arena = parent_block.arena,
|
||||
.label = null,
|
||||
.label = .none,
|
||||
.is_comptime = parent_block.is_comptime or is_comptime,
|
||||
};
|
||||
defer child_block.instructions.deinit(mod.gpa);
|
||||
@@ -674,41 +682,56 @@ fn analyzeInstBlock(mod: *Module, scope: *Scope, inst: *zir.Inst.Block, is_compt
|
||||
.decl = parent_block.decl,
|
||||
.instructions = .{},
|
||||
.arena = parent_block.arena,
|
||||
// TODO @as here is working around a stage1 miscompilation bug :(
|
||||
.label = @as(?Scope.Block.Label, Scope.Block.Label{
|
||||
.zir_block = inst,
|
||||
.results = .{},
|
||||
.block_inst = block_inst,
|
||||
}),
|
||||
.label = Scope.Block.Label{
|
||||
.breaking = .{
|
||||
.zir_block = inst,
|
||||
.merges = .{
|
||||
.results = .{},
|
||||
.block_inst = block_inst,
|
||||
},
|
||||
},
|
||||
},
|
||||
.is_comptime = is_comptime or parent_block.is_comptime,
|
||||
};
|
||||
const label = &child_block.label.?;
|
||||
const merges = &child_block.label.breaking.merges;
|
||||
|
||||
defer child_block.instructions.deinit(mod.gpa);
|
||||
defer label.results.deinit(mod.gpa);
|
||||
defer merges.results.deinit(mod.gpa);
|
||||
|
||||
try analyzeBody(mod, &child_block.base, inst.positionals.body);
|
||||
|
||||
return analyzeBlockBody(mod, scope, &child_block, merges);
|
||||
}
|
||||
|
||||
fn analyzeBlockBody(
|
||||
mod: *Module,
|
||||
scope: *Scope,
|
||||
child_block: *Scope.Block,
|
||||
merges: *Scope.Block.Label.Merges,
|
||||
) InnerError!*Inst {
|
||||
const parent_block = scope.cast(Scope.Block).?;
|
||||
|
||||
// Blocks must terminate with noreturn instruction.
|
||||
assert(child_block.instructions.items.len != 0);
|
||||
assert(child_block.instructions.items[child_block.instructions.items.len - 1].ty.isNoReturn());
|
||||
|
||||
if (label.results.items.len == 0) {
|
||||
// No need for a block instruction. We can put the new instructions directly into the parent block.
|
||||
if (merges.results.items.len == 0) {
|
||||
// No need for a block instruction. We can put the new instructions
|
||||
// directly into the parent block.
|
||||
const copied_instructions = try parent_block.arena.dupe(*Inst, child_block.instructions.items);
|
||||
try parent_block.instructions.appendSlice(mod.gpa, copied_instructions);
|
||||
return copied_instructions[copied_instructions.len - 1];
|
||||
}
|
||||
if (label.results.items.len == 1) {
|
||||
if (merges.results.items.len == 1) {
|
||||
const last_inst_index = child_block.instructions.items.len - 1;
|
||||
const last_inst = child_block.instructions.items[last_inst_index];
|
||||
if (last_inst.breakBlock()) |br_block| {
|
||||
if (br_block == block_inst) {
|
||||
if (br_block == merges.block_inst) {
|
||||
// No need for a block instruction. We can put the new instructions directly into the parent block.
|
||||
// Here we omit the break instruction.
|
||||
const copied_instructions = try parent_block.arena.dupe(*Inst, child_block.instructions.items[0..last_inst_index]);
|
||||
try parent_block.instructions.appendSlice(mod.gpa, copied_instructions);
|
||||
return label.results.items[0];
|
||||
return merges.results.items[0];
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -717,10 +740,10 @@ fn analyzeInstBlock(mod: *Module, scope: *Scope, inst: *zir.Inst.Block, is_compt
|
||||
|
||||
// Need to set the type and emit the Block instruction. This allows machine code generation
|
||||
// to emit a jump instruction to after the block when it encounters the break.
|
||||
try parent_block.instructions.append(mod.gpa, &block_inst.base);
|
||||
block_inst.base.ty = try mod.resolvePeerTypes(scope, label.results.items);
|
||||
block_inst.body = .{ .instructions = try parent_block.arena.dupe(*Inst, child_block.instructions.items) };
|
||||
return &block_inst.base;
|
||||
try parent_block.instructions.append(mod.gpa, &merges.block_inst.base);
|
||||
merges.block_inst.base.ty = try mod.resolvePeerTypes(scope, merges.results.items);
|
||||
merges.block_inst.body = .{ .instructions = try parent_block.arena.dupe(*Inst, child_block.instructions.items) };
|
||||
return &merges.block_inst.base;
|
||||
}
|
||||
|
||||
fn analyzeInstBreakpoint(mod: *Module, scope: *Scope, inst: *zir.Inst.NoOp) InnerError!*Inst {
|
||||
@@ -829,14 +852,32 @@ fn analyzeInstCall(mod: *Module, scope: *Scope, inst: *zir.Inst.Call) InnerError
|
||||
const ret_type = func.ty.fnReturnType();
|
||||
|
||||
const b = try mod.requireFunctionBlock(scope, inst.base.src);
|
||||
if (b.is_comptime) {
|
||||
const fn_val = try mod.resolveConstValue(scope, func);
|
||||
const module_fn = switch (fn_val.tag()) {
|
||||
.function => fn_val.castTag(.function).?.data,
|
||||
.extern_fn => return mod.fail(scope, inst.base.src, "comptime call of extern function", .{}),
|
||||
const is_comptime_call = b.is_comptime or inst.kw_args.modifier == .compile_time;
|
||||
const is_inline_call = is_comptime_call or inst.kw_args.modifier == .always_inline or blk: {
|
||||
// This logic will get simplified by
|
||||
// https://github.com/ziglang/zig/issues/6429
|
||||
if (try mod.resolveDefinedValue(scope, func)) |func_val| {
|
||||
const module_fn = switch (func_val.tag()) {
|
||||
.function => func_val.castTag(.function).?.data,
|
||||
else => break :blk false,
|
||||
};
|
||||
break :blk module_fn.bits.is_inline;
|
||||
}
|
||||
break :blk false;
|
||||
};
|
||||
if (is_inline_call) {
|
||||
const func_val = try mod.resolveConstValue(scope, func);
|
||||
const module_fn = switch (func_val.tag()) {
|
||||
.function => func_val.castTag(.function).?.data,
|
||||
.extern_fn => return mod.fail(scope, inst.base.src, "{s} call of extern function", .{
|
||||
@as([]const u8, if (is_comptime_call) "comptime" else "inline"),
|
||||
}),
|
||||
else => unreachable,
|
||||
};
|
||||
const callee_decl = module_fn.owner_decl;
|
||||
// TODO: De-duplicate this with the code in Module.zig that generates
|
||||
// ZIR for the same function and re-use the same ZIR for runtime function
|
||||
// generation and for inline/comptime calls.
|
||||
const callee_file_scope = callee_decl.getFileScope();
|
||||
const tree = mod.getAstTree(callee_file_scope) catch |err| switch (err) {
|
||||
error.OutOfMemory => return error.OutOfMemory,
|
||||
@@ -859,23 +900,31 @@ fn analyzeInstCall(mod: *Module, scope: *Scope, inst: *zir.Inst.Call) InnerError
|
||||
};
|
||||
defer gen_scope.instructions.deinit(mod.gpa);
|
||||
|
||||
// Add a const instruction for each parameter.
|
||||
// We need an instruction for each parameter, and they must be first in the body.
|
||||
try gen_scope.instructions.resize(mod.gpa, fn_proto.params_len);
|
||||
var params_scope = &gen_scope.base;
|
||||
for (fn_proto.params()) |param, i| {
|
||||
const name_token = param.name_token.?;
|
||||
const src = tree.token_locs[name_token].start;
|
||||
const param_name = try mod.identifierTokenString(scope, name_token);
|
||||
const arg_val = try mod.resolveConstValue(scope, casted_args[i]);
|
||||
const arg = try astgen.addZIRInstConst(mod, params_scope, src, .{
|
||||
.ty = casted_args[i].ty,
|
||||
.val = arg_val,
|
||||
});
|
||||
const arg = try call_arena.allocator.create(zir.Inst.Arg);
|
||||
arg.* = .{
|
||||
.base = .{
|
||||
.tag = .arg,
|
||||
.src = src,
|
||||
},
|
||||
.positionals = .{
|
||||
.name = param_name,
|
||||
},
|
||||
.kw_args = .{},
|
||||
};
|
||||
gen_scope.instructions.items[i] = &arg.base;
|
||||
const sub_scope = try call_arena.allocator.create(Scope.LocalVal);
|
||||
sub_scope.* = .{
|
||||
.parent = params_scope,
|
||||
.gen_zir = &gen_scope,
|
||||
.name = param_name,
|
||||
.inst = arg,
|
||||
.inst = &arg.base,
|
||||
};
|
||||
params_scope = &sub_scope.base;
|
||||
}
|
||||
@@ -896,42 +945,52 @@ fn analyzeInstCall(mod: *Module, scope: *Scope, inst: *zir.Inst.Call) InnerError
|
||||
zir.dumpZir(mod.gpa, "fn_body_callee", callee_decl.name, gen_scope.instructions.items) catch {};
|
||||
}
|
||||
|
||||
// Analyze the ZIR.
|
||||
var inner_block: Scope.Block = .{
|
||||
// Analyze the ZIR. The same ZIR gets analyzed into a runtime function
|
||||
// or an inlined call depending on what union tag the `label` field is
|
||||
// set to in the `Scope.Block`.
|
||||
// This block instruction will be used to capture the return value from the
|
||||
// inlined function.
|
||||
const block_inst = try scope.arena().create(Inst.Block);
|
||||
block_inst.* = .{
|
||||
.base = .{
|
||||
.tag = Inst.Block.base_tag,
|
||||
.ty = ret_type,
|
||||
.src = inst.base.src,
|
||||
},
|
||||
.body = undefined,
|
||||
};
|
||||
var child_block: Scope.Block = .{
|
||||
.parent = null,
|
||||
.func = module_fn,
|
||||
.decl = callee_decl,
|
||||
// Note that we pass the caller's Decl, not the callee. This causes
|
||||
// compile errors to be attached (correctly) to the caller's Decl.
|
||||
.decl = scope.decl().?,
|
||||
.instructions = .{},
|
||||
.arena = &call_arena.allocator,
|
||||
.is_comptime = true,
|
||||
.arena = scope.arena(),
|
||||
.label = Scope.Block.Label{
|
||||
.inlining = .{
|
||||
.param_index = 0,
|
||||
.casted_args = casted_args,
|
||||
.merges = .{
|
||||
.results = .{},
|
||||
.block_inst = block_inst,
|
||||
},
|
||||
},
|
||||
},
|
||||
.is_comptime = is_comptime_call,
|
||||
};
|
||||
defer inner_block.instructions.deinit(mod.gpa);
|
||||
const merges = &child_block.label.inlining.merges;
|
||||
|
||||
// TODO make sure compile errors that happen from this analyzeBody are reported correctly
|
||||
// and attach to the caller Decl not the callee.
|
||||
try analyzeBody(mod, &inner_block.base, .{
|
||||
defer child_block.instructions.deinit(mod.gpa);
|
||||
defer merges.results.deinit(mod.gpa);
|
||||
|
||||
// This will have return instructions analyzed as break instructions to
|
||||
// the block_inst above.
|
||||
try analyzeBody(mod, &child_block.base, .{
|
||||
.instructions = gen_scope.instructions.items,
|
||||
});
|
||||
|
||||
if (mod.comp.verbose_ir) {
|
||||
inner_block.dump(mod.*);
|
||||
}
|
||||
|
||||
assert(inner_block.instructions.items.len == 1);
|
||||
const only_inst = inner_block.instructions.items[0];
|
||||
switch (only_inst.tag) {
|
||||
.ret => {
|
||||
const ret_inst = only_inst.castTag(.ret).?;
|
||||
const operand = ret_inst.operand;
|
||||
const callee_arena = scope.arena();
|
||||
return mod.constInst(scope, inst.base.src, .{
|
||||
.ty = try operand.ty.copy(callee_arena),
|
||||
.val = try operand.value().?.copy(callee_arena),
|
||||
});
|
||||
},
|
||||
.retvoid => return mod.constVoid(scope, inst.base.src),
|
||||
else => unreachable,
|
||||
}
|
||||
return analyzeBlockBody(mod, scope, &child_block, merges);
|
||||
}
|
||||
|
||||
return mod.addCall(b, inst.base.src, ret_type, func, casted_args);
|
||||
@@ -954,7 +1013,11 @@ fn analyzeInstFn(mod: *Module, scope: *Scope, fn_inst: *zir.Inst.Fn) InnerError!
|
||||
};
|
||||
const new_func = try scope.arena().create(Module.Fn);
|
||||
new_func.* = .{
|
||||
.analysis = .{ .queued = fn_zir },
|
||||
.bits = .{
|
||||
.state = .queued,
|
||||
.is_inline = fn_inst.kw_args.is_inline,
|
||||
},
|
||||
.data = .{ .zir = fn_zir },
|
||||
.owner_decl = scope.decl().?,
|
||||
};
|
||||
return mod.constInst(scope, fn_inst.base.src, .{
|
||||
@@ -2020,21 +2083,41 @@ fn analyzeInstUnreachable(
|
||||
fn analyzeInstRet(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
|
||||
const operand = try resolveInst(mod, scope, inst.positionals.operand);
|
||||
const b = try mod.requireFunctionBlock(scope, inst.base.src);
|
||||
return mod.addUnOp(b, inst.base.src, Type.initTag(.noreturn), .ret, operand);
|
||||
|
||||
switch (b.label) {
|
||||
.inlining => |*inlining| {
|
||||
// We are inlining a function call; rewrite the `ret` as a `break`.
|
||||
try inlining.merges.results.append(mod.gpa, operand);
|
||||
return mod.addBr(b, inst.base.src, inlining.merges.block_inst, operand);
|
||||
},
|
||||
.none, .breaking => {
|
||||
return mod.addUnOp(b, inst.base.src, Type.initTag(.noreturn), .ret, operand);
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn analyzeInstRetVoid(mod: *Module, scope: *Scope, inst: *zir.Inst.NoOp) InnerError!*Inst {
|
||||
const b = try mod.requireFunctionBlock(scope, inst.base.src);
|
||||
if (b.func) |func| {
|
||||
// Need to emit a compile error if returning void is not allowed.
|
||||
const void_inst = try mod.constVoid(scope, inst.base.src);
|
||||
const fn_ty = func.owner_decl.typed_value.most_recent.typed_value.ty;
|
||||
const casted_void = try mod.coerce(scope, fn_ty.fnReturnType(), void_inst);
|
||||
if (casted_void.ty.zigTypeTag() != .Void) {
|
||||
return mod.addUnOp(b, inst.base.src, Type.initTag(.noreturn), .ret, casted_void);
|
||||
}
|
||||
switch (b.label) {
|
||||
.inlining => |*inlining| {
|
||||
// We are inlining a function call; rewrite the `retvoid` as a `breakvoid`.
|
||||
const void_inst = try mod.constVoid(scope, inst.base.src);
|
||||
try inlining.merges.results.append(mod.gpa, void_inst);
|
||||
return mod.addBr(b, inst.base.src, inlining.merges.block_inst, void_inst);
|
||||
},
|
||||
.none, .breaking => {
|
||||
if (b.func) |func| {
|
||||
// Need to emit a compile error if returning void is not allowed.
|
||||
const void_inst = try mod.constVoid(scope, inst.base.src);
|
||||
const fn_ty = func.owner_decl.typed_value.most_recent.typed_value.ty;
|
||||
const casted_void = try mod.coerce(scope, fn_ty.fnReturnType(), void_inst);
|
||||
if (casted_void.ty.zigTypeTag() != .Void) {
|
||||
return mod.addUnOp(b, inst.base.src, Type.initTag(.noreturn), .ret, casted_void);
|
||||
}
|
||||
}
|
||||
return mod.addNoOp(b, inst.base.src, Type.initTag(.noreturn), .retvoid);
|
||||
},
|
||||
}
|
||||
return mod.addNoOp(b, inst.base.src, Type.initTag(.noreturn), .retvoid);
|
||||
}
|
||||
|
||||
fn floatOpAllowed(tag: zir.Inst.Tag) bool {
|
||||
@@ -2054,12 +2137,16 @@ fn analyzeBreak(
|
||||
) InnerError!*Inst {
|
||||
var opt_block = scope.cast(Scope.Block);
|
||||
while (opt_block) |block| {
|
||||
if (block.label) |*label| {
|
||||
if (label.zir_block == zir_block) {
|
||||
try label.results.append(mod.gpa, operand);
|
||||
const b = try mod.requireRuntimeBlock(scope, src);
|
||||
return mod.addBr(b, src, label.block_inst, operand);
|
||||
}
|
||||
switch (block.label) {
|
||||
.none => {},
|
||||
.breaking => |*label| {
|
||||
if (label.zir_block == zir_block) {
|
||||
try label.merges.results.append(mod.gpa, operand);
|
||||
const b = try mod.requireFunctionBlock(scope, src);
|
||||
return mod.addBr(b, src, label.merges.block_inst, operand);
|
||||
}
|
||||
},
|
||||
.inlining => unreachable, // Invalid `break` ZIR inside inline function call.
|
||||
}
|
||||
opt_block = block.parent;
|
||||
} else unreachable;
|
||||
|
||||
@@ -30,7 +30,7 @@ pub fn addCases(ctx: *TestContext) !void {
|
||||
\\@unnamed$7 = fntype([], @void, cc=C)
|
||||
\\@entry = fn(@unnamed$7, {
|
||||
\\ %0 = returnvoid() ; deaths=0b1000000000000000
|
||||
\\})
|
||||
\\}, is_inline=0)
|
||||
\\
|
||||
);
|
||||
ctx.transformZIR("elemptr, add, cmp, condbr, return, breakpoint", linux_x64,
|
||||
@@ -78,7 +78,7 @@ pub fn addCases(ctx: *TestContext) !void {
|
||||
\\@unnamed$6 = fntype([], @void, cc=C)
|
||||
\\@entry = fn(@unnamed$6, {
|
||||
\\ %0 = returnvoid() ; deaths=0b1000000000000000
|
||||
\\})
|
||||
\\}, is_inline=0)
|
||||
\\@entry__anon_1 = str("2\x08\x01\n")
|
||||
\\@9 = declref("9__anon_0")
|
||||
\\@9__anon_0 = str("entry")
|
||||
@@ -123,17 +123,17 @@ pub fn addCases(ctx: *TestContext) !void {
|
||||
\\@entry = fn(@unnamed$7, {
|
||||
\\ %0 = call(@a, [], modifier=auto) ; deaths=0b1000000000000001
|
||||
\\ %1 = returnvoid() ; deaths=0b1000000000000000
|
||||
\\})
|
||||
\\}, is_inline=0)
|
||||
\\@unnamed$9 = fntype([], @void, cc=C)
|
||||
\\@a = fn(@unnamed$9, {
|
||||
\\ %0 = call(@b, [], modifier=auto) ; deaths=0b1000000000000001
|
||||
\\ %1 = returnvoid() ; deaths=0b1000000000000000
|
||||
\\})
|
||||
\\}, is_inline=0)
|
||||
\\@unnamed$11 = fntype([], @void, cc=C)
|
||||
\\@b = fn(@unnamed$11, {
|
||||
\\ %0 = call(@a, [], modifier=auto) ; deaths=0b1000000000000001
|
||||
\\ %1 = returnvoid() ; deaths=0b1000000000000000
|
||||
\\})
|
||||
\\}, is_inline=0)
|
||||
\\
|
||||
);
|
||||
// Now we introduce a compile error
|
||||
@@ -203,7 +203,7 @@ pub fn addCases(ctx: *TestContext) !void {
|
||||
\\@unnamed$7 = fntype([], @void, cc=C)
|
||||
\\@entry = fn(@unnamed$7, {
|
||||
\\ %0 = returnvoid() ; deaths=0b1000000000000000
|
||||
\\})
|
||||
\\}, is_inline=0)
|
||||
\\
|
||||
);
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user