stage2: Module and Sema are compiling again

Next up is reworking the seam between the LazySrcLoc emitted by Sema
and the byte offsets currently expected by codegen.

And then the big one: updating astgen.zig to use the new memory layout.
This commit is contained in:
Andrew Kelley
2021-03-17 22:54:56 -07:00
parent 38b3d4b00a
commit 66245ac834
12 changed files with 1121 additions and 967 deletions

View File

@@ -13,6 +13,9 @@ Merge TODO list:
* finish implementing SrcLoc byteOffset function
* audit Module.zig for use of token_starts - it should only be when
resolving LazySrcLoc
* audit all the .unneeded src locations
* audit the calls in codegen toSrcLocWithDecl specifically if there is inlined function
calls from other files.
Performance optimizations to look into:
@@ -30,71 +33,6 @@ Random snippets of code that I deleted and need to make sure get
re-integrated appropriately:
fn zirArg(mod: *Module, scope: *Scope, inst: *zir.Inst.Arg) InnerError!*Inst {
const fn_ty = b.func.?.owner_decl.typed_value.most_recent.typed_value.ty;
const param_index = b.instructions.items.len;
const param_count = fn_ty.fnParamLen();
if (param_index >= param_count) {
return mod.fail(scope, inst.base.src, "parameter index {d} outside list of length {d}", .{
param_index,
param_count,
});
}
const param_type = fn_ty.fnParamType(param_index);
const name = try scope.arena().dupeZ(u8, inst.positionals.name);
return mod.addArg(b, inst.base.src, param_type, name);
}
fn zirReturnVoid(mod: *Module, scope: *Scope, inst: *zir.Inst.NoOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const b = try mod.requireFunctionBlock(scope, inst.base.src);
if (b.inlining) |inlining| {
// We are inlining a function call; rewrite the `retvoid` as a `breakvoid`.
const void_inst = try mod.constVoid(scope, inst.base.src);
try inlining.merges.results.append(mod.gpa, void_inst);
const br = try mod.addBr(b, inst.base.src, inlining.merges.block_inst, void_inst);
return &br.base;
}
if (b.func) |func| {
// Need to emit a compile error if returning void is not allowed.
const void_inst = try mod.constVoid(scope, inst.base.src);
const fn_ty = func.owner_decl.typed_value.most_recent.typed_value.ty;
const casted_void = try mod.coerce(scope, fn_ty.fnReturnType(), void_inst);
if (casted_void.ty.zigTypeTag() != .Void) {
return mod.addUnOp(b, inst.base.src, Type.initTag(.noreturn), .ret, casted_void);
}
}
return mod.addNoOp(b, inst.base.src, Type.initTag(.noreturn), .retvoid);
}
fn zirReturn(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const operand = try resolveInst(mod, scope, inst.positionals.operand);
const b = try mod.requireFunctionBlock(scope, inst.base.src);
if (b.inlining) |inlining| {
// We are inlining a function call; rewrite the `ret` as a `break`.
try inlining.merges.results.append(mod.gpa, operand);
const br = try mod.addBr(b, inst.base.src, inlining.merges.block_inst, operand);
return &br.base;
}
return mod.addUnOp(b, inst.base.src, Type.initTag(.noreturn), .ret, operand);
}
fn zirPrimitive(mod: *Module, scope: *Scope, primitive: *zir.Inst.Primitive) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
return mod.constInst(scope, primitive.base.src, primitive.positionals.tag.toTypedValue());
}
/// Each Decl gets its own string interning, in order to avoid contention when
/// using multiple threads to analyze Decls in parallel. Any particular Decl will only
@@ -106,23 +44,3 @@ fn zirPrimitive(mod: *Module, scope: *Scope, primitive: *zir.Inst.Primitive) Inn
pub fn errSrcLoc(mod: *Module, scope: *Scope, src: LazySrcLoc) SrcLoc {
const file_scope = scope.getFileScope();
switch (src) {
.byte_offset => |off| return .{
.file_scope = file_scope,
.byte_offset = off,
},
.token_offset => |off| {
@panic("TODO errSrcLoc for token_offset");
},
.node_offset => |off| {
@panic("TODO errSrcLoc for node_offset");
},
.node_offset_var_decl_ty => |off| {
@panic("TODO errSrcLoc for node_offset_var_decl_ty");
},
}
}

View File

@@ -22,7 +22,7 @@ pub const Result = union(enum) {
/// Invalid unicode escape at this index.
invalid_unicode_escape: usize,
/// The left brace at this index is missing a matching right brace.
missing_matching_brace: usize,
missing_matching_rbrace: usize,
/// Expected unicode digits at this index.
expected_unicode_digits: usize,
};

View File

@@ -237,10 +237,10 @@ pub const Decl = struct {
}
}
pub fn srcLoc(decl: *const Decl) SrcLoc {
pub fn srcLoc(decl: *Decl) SrcLoc {
return .{
.decl = decl,
.byte_offset = 0,
.container = .{ .decl = decl },
.lazy = .{ .node_offset = 0 },
};
}
@@ -352,7 +352,7 @@ pub const Fn = struct {
/// For debugging purposes.
pub fn dump(func: *Fn, mod: Module) void {
zir.dumpFn(mod, func);
ir.dumpFn(mod, func);
}
};
@@ -381,12 +381,12 @@ pub const Scope = struct {
/// Returns the arena Allocator associated with the Decl of the Scope.
pub fn arena(scope: *Scope) *Allocator {
switch (scope.tag) {
.block => return scope.cast(Block).?.arena,
.gen_zir => return scope.cast(GenZir).?.arena,
.local_val => return scope.cast(LocalVal).?.gen_zir.arena,
.local_ptr => return scope.cast(LocalPtr).?.gen_zir.arena,
.gen_suspend => return scope.cast(GenZir).?.arena,
.gen_nosuspend => return scope.cast(Nosuspend).?.gen_zir.arena,
.block => return scope.cast(Block).?.sema.arena,
.gen_zir => return scope.cast(GenZir).?.zir_code.arena,
.local_val => return scope.cast(LocalVal).?.gen_zir.zir_code.arena,
.local_ptr => return scope.cast(LocalPtr).?.gen_zir.zir_code.arena,
.gen_suspend => return scope.cast(GenZir).?.zir_code.arena,
.gen_nosuspend => return scope.cast(Nosuspend).?.gen_zir.zir_code.arena,
.file => unreachable,
.container => unreachable,
.decl_ref => unreachable,
@@ -399,12 +399,12 @@ pub const Scope = struct {
pub fn ownerDecl(scope: *Scope) ?*Decl {
return switch (scope.tag) {
.block => scope.cast(Block).?.owner_decl,
.block => scope.cast(Block).?.sema.owner_decl,
.gen_zir => scope.cast(GenZir).?.zir_code.decl,
.local_val => scope.cast(LocalVal).?.gen_zir.decl,
.local_ptr => scope.cast(LocalPtr).?.gen_zir.decl,
.gen_suspend => return scope.cast(GenZir).?.decl,
.gen_nosuspend => return scope.cast(Nosuspend).?.gen_zir.decl,
.local_val => scope.cast(LocalVal).?.gen_zir.zir_code.decl,
.local_ptr => scope.cast(LocalPtr).?.gen_zir.zir_code.decl,
.gen_suspend => return scope.cast(GenZir).?.zir_code.decl,
.gen_nosuspend => return scope.cast(Nosuspend).?.gen_zir.zir_code.decl,
.file => null,
.container => null,
.decl_ref => scope.cast(DeclRef).?.decl,
@@ -415,10 +415,10 @@ pub const Scope = struct {
return switch (scope.tag) {
.block => scope.cast(Block).?.src_decl,
.gen_zir => scope.cast(GenZir).?.zir_code.decl,
.local_val => scope.cast(LocalVal).?.gen_zir.decl,
.local_ptr => scope.cast(LocalPtr).?.gen_zir.decl,
.gen_suspend => return scope.cast(GenZir).?.decl,
.gen_nosuspend => return scope.cast(Nosuspend).?.gen_zir.decl,
.local_val => scope.cast(LocalVal).?.gen_zir.zir_code.decl,
.local_ptr => scope.cast(LocalPtr).?.gen_zir.zir_code.decl,
.gen_suspend => return scope.cast(GenZir).?.zir_code.decl,
.gen_nosuspend => return scope.cast(Nosuspend).?.gen_zir.zir_code.decl,
.file => null,
.container => null,
.decl_ref => scope.cast(DeclRef).?.decl,
@@ -463,11 +463,11 @@ pub const Scope = struct {
.file => return &scope.cast(File).?.tree,
.block => return &scope.cast(Block).?.src_decl.container.file_scope.tree,
.gen_zir => return &scope.cast(GenZir).?.decl.container.file_scope.tree,
.local_val => return &scope.cast(LocalVal).?.gen_zir.decl.container.file_scope.tree,
.local_ptr => return &scope.cast(LocalPtr).?.gen_zir.decl.container.file_scope.tree,
.local_val => return &scope.cast(LocalVal).?.gen_zir.zir_code.decl.container.file_scope.tree,
.local_ptr => return &scope.cast(LocalPtr).?.gen_zir.zir_code.decl.container.file_scope.tree,
.container => return &scope.cast(Container).?.file_scope.tree,
.gen_suspend => return &scope.cast(GenZir).?.decl.container.file_scope.tree,
.gen_nosuspend => return &scope.cast(Nosuspend).?.gen_zir.decl.container.file_scope.tree,
.gen_nosuspend => return &scope.cast(Nosuspend).?.gen_zir.zir_code.decl.container.file_scope.tree,
.decl_ref => return &scope.cast(DeclRef).?.decl.container.file_scope.tree,
}
}
@@ -529,7 +529,7 @@ pub const Scope = struct {
.block => return @fieldParentPtr(Block, "base", cur).src_decl.container.file_scope,
.gen_suspend => @fieldParentPtr(GenZir, "base", cur).parent,
.gen_nosuspend => @fieldParentPtr(Nosuspend, "base", cur).parent,
.decl_ref => @fieldParentPtr(DeclRef, "base", cur).decl.container.file_scope,
.decl_ref => return @fieldParentPtr(DeclRef, "base", cur).decl.container.file_scope,
};
}
}
@@ -730,11 +730,6 @@ pub const Scope = struct {
pub const Inlining = struct {
/// Shared state among the entire inline/comptime call stack.
shared: *Shared,
/// We use this to count from 0 so that arg instructions know
/// which parameter index they are, without having to store
/// a parameter index with each arg instruction.
param_index: usize,
casted_args: []*ir.Inst,
merges: Merges,
pub const Shared = struct {
@@ -762,16 +757,12 @@ pub const Scope = struct {
pub fn makeSubBlock(parent: *Block) Block {
return .{
.parent = parent,
.inst_map = parent.inst_map,
.func = parent.func,
.owner_decl = parent.owner_decl,
.sema = parent.sema,
.src_decl = parent.src_decl,
.instructions = .{},
.arena = parent.arena,
.label = null,
.inlining = parent.inlining,
.is_comptime = parent.is_comptime,
.branch_quota = parent.branch_quota,
};
}
@@ -795,7 +786,7 @@ pub const Scope = struct {
ty: Type,
comptime tag: ir.Inst.Tag,
) !*ir.Inst {
const inst = try block.arena.create(tag.Type());
const inst = try block.sema.arena.create(tag.Type());
inst.* = .{
.base = .{
.tag = tag,
@@ -814,7 +805,7 @@ pub const Scope = struct {
tag: ir.Inst.Tag,
operand: *ir.Inst,
) !*ir.Inst {
const inst = try block.arena.create(ir.Inst.UnOp);
const inst = try block.sema.arena.create(ir.Inst.UnOp);
inst.* = .{
.base = .{
.tag = tag,
@@ -835,7 +826,7 @@ pub const Scope = struct {
lhs: *ir.Inst,
rhs: *ir.Inst,
) !*ir.Inst {
const inst = try block.arena.create(ir.Inst.BinOp);
const inst = try block.sema.arena.create(ir.Inst.BinOp);
inst.* = .{
.base = .{
.tag = tag,
@@ -854,7 +845,7 @@ pub const Scope = struct {
target_block: *ir.Inst.Block,
operand: *ir.Inst,
) !*ir.Inst.Br {
const inst = try scope_block.arena.create(ir.Inst.Br);
const inst = try scope_block.sema.arena.create(ir.Inst.Br);
inst.* = .{
.base = .{
.tag = .br,
@@ -875,7 +866,7 @@ pub const Scope = struct {
then_body: ir.Body,
else_body: ir.Body,
) !*ir.Inst {
const inst = try block.arena.create(ir.Inst.CondBr);
const inst = try block.sema.arena.create(ir.Inst.CondBr);
inst.* = .{
.base = .{
.tag = .condbr,
@@ -897,7 +888,7 @@ pub const Scope = struct {
func: *ir.Inst,
args: []const *ir.Inst,
) !*ir.Inst {
const inst = try block.arena.create(ir.Inst.Call);
const inst = try block.sema.arena.create(ir.Inst.Call);
inst.* = .{
.base = .{
.tag = .call,
@@ -918,7 +909,7 @@ pub const Scope = struct {
cases: []ir.Inst.SwitchBr.Case,
else_body: ir.Body,
) !*ir.Inst {
const inst = try block.arena.create(ir.Inst.SwitchBr);
const inst = try block.sema.arena.create(ir.Inst.SwitchBr);
inst.* = .{
.base = .{
.tag = .switchbr,
@@ -946,7 +937,7 @@ pub const Scope = struct {
zir_code: *WipZirCode,
/// Keeps track of the list of instructions in this scope only. References
/// to instructions in `zir_code`.
instructions: std.ArrayListUnmanaged(zir.Inst.Index) = .{},
instructions: std.ArrayListUnmanaged(zir.Inst.Ref) = .{},
label: ?Label = null,
break_block: zir.Inst.Index = 0,
continue_block: zir.Inst.Index = 0,
@@ -978,12 +969,12 @@ pub const Scope = struct {
};
pub fn addFnTypeCc(gz: *GenZir, args: struct {
param_types: []const zir.Inst.Index,
ret_ty: zir.Inst.Index,
cc: zir.Inst.Index,
param_types: []const zir.Inst.Ref,
ret_ty: zir.Inst.Ref,
cc: zir.Inst.Ref,
}) !zir.Inst.Index {
const gpa = gz.zir_code.gpa;
try gz.instructions.ensureCapacity(gpa, gz.instructions.items + 1);
try gz.instructions.ensureCapacity(gpa, gz.instructions.items.len + 1);
try gz.zir_code.instructions.ensureCapacity(gpa, gz.zir_code.instructions.len + 1);
try gz.zir_code.extra.ensureCapacity(gpa, gz.zir_code.extra.len +
@typeInfo(zir.Inst.FnTypeCc).Struct.fields.len + args.param_types.len);
@@ -994,7 +985,7 @@ pub const Scope = struct {
}) catch unreachable; // Capacity is ensured above.
gz.zir_code.extra.appendSliceAssumeCapacity(args.param_types);
const new_index = @intCast(zir.Inst.Index, gz.zir_code.instructions.len);
const new_index = gz.zir_code.instructions.len;
gz.zir_code.instructions.appendAssumeCapacity(.{
.tag = .fn_type_cc,
.data = .{ .fn_type = .{
@@ -1002,17 +993,18 @@ pub const Scope = struct {
.payload_index = payload_index,
} },
});
gz.instructions.appendAssumeCapacity(new_index);
return new_index;
const result = @intCast(zir.Inst.Ref, new_index + gz.zir_code.ref_start_index);
gz.instructions.appendAssumeCapacity(result);
return result;
}
pub fn addFnType(
gz: *GenZir,
ret_ty: zir.Inst.Index,
param_types: []const zir.Inst.Index,
ret_ty: zir.Inst.Ref,
param_types: []const zir.Inst.Ref,
) !zir.Inst.Index {
const gpa = gz.zir_code.gpa;
try gz.instructions.ensureCapacity(gpa, gz.instructions.items + 1);
try gz.instructions.ensureCapacity(gpa, gz.instructions.items.len + 1);
try gz.zir_code.instructions.ensureCapacity(gpa, gz.zir_code.instructions.len + 1);
try gz.zir_code.extra.ensureCapacity(gpa, gz.zir_code.extra.len +
@typeInfo(zir.Inst.FnType).Struct.fields.len + param_types.len);
@@ -1022,7 +1014,7 @@ pub const Scope = struct {
}) catch unreachable; // Capacity is ensured above.
gz.zir_code.extra.appendSliceAssumeCapacity(param_types);
const new_index = @intCast(zir.Inst.Index, gz.zir_code.instructions.len);
const new_index = gz.zir_code.instructions.len;
gz.zir_code.instructions.appendAssumeCapacity(.{
.tag = .fn_type_cc,
.data = .{ .fn_type = .{
@@ -1030,29 +1022,118 @@ pub const Scope = struct {
.payload_index = payload_index,
} },
});
gz.instructions.appendAssumeCapacity(new_index);
return new_index;
const result = @intCast(zir.Inst.Ref, new_index + gz.zir_code.ref_start_index);
gz.instructions.appendAssumeCapacity(result);
return result;
}
pub fn addRetTok(
gz: *GenZir,
operand: zir.Inst.Index,
src_tok: ast.TokenIndex,
operand: zir.Inst.Ref,
/// Absolute token index. This function does the conversion to Decl offset.
abs_tok_index: ast.TokenIndex,
) !zir.Inst.Index {
const gpa = gz.zir_code.gpa;
try gz.instructions.ensureCapacity(gpa, gz.instructions.items + 1);
try gz.instructions.ensureCapacity(gpa, gz.instructions.items.len + 1);
try gz.zir_code.instructions.ensureCapacity(gpa, gz.zir_code.instructions.len + 1);
const new_index = @intCast(zir.Inst.Index, gz.zir_code.instructions.len);
const new_index = gz.zir_code.instructions.len;
gz.zir_code.instructions.appendAssumeCapacity(.{
.tag = .ret_tok,
.data = .{ .fn_type = .{
.operand = operand,
.src_tok = src_tok,
.src_tok = abs_tok_index - gz.zir_code.decl.srcToken(),
} },
});
gz.instructions.appendAssumeCapacity(new_index);
return new_index;
const result = @intCast(zir.Inst.Ref, new_index + gz.zir_code.ref_start_index);
gz.instructions.appendAssumeCapacity(result);
return result;
}
pub fn addInt(gz: *GenZir, integer: u64) !zir.Inst.Index {
const gpa = gz.zir_code.gpa;
try gz.instructions.ensureCapacity(gpa, gz.instructions.items.len + 1);
try gz.zir_code.instructions.ensureCapacity(gpa, gz.zir_code.instructions.len + 1);
const new_index = gz.zir_code.instructions.len;
gz.zir_code.instructions.appendAssumeCapacity(.{
.tag = .int,
.data = .{ .int = integer },
});
const result = @intCast(zir.Inst.Ref, new_index + gz.zir_code.ref_start_index);
gz.instructions.appendAssumeCapacity(result);
return result;
}
pub fn addUnNode(
gz: *GenZir,
tag: zir.Inst.Tag,
operand: zir.Inst.Ref,
/// Absolute node index. This function does the conversion to offset from Decl.
abs_node_index: ast.Node.Index,
) !zir.Inst.Ref {
const gpa = gz.zir_code.gpa;
try gz.instructions.ensureCapacity(gpa, gz.instructions.items.len + 1);
try gz.zir_code.instructions.ensureCapacity(gpa, gz.zir_code.instructions.len + 1);
const new_index = gz.zir_code.instructions.len;
gz.zir_code.instructions.appendAssumeCapacity(.{
.tag = tag,
.data = .{ .un_node = .{
.operand = operand,
.src_node = abs_node_index - gz.zir_code.decl.srcNode(),
} },
});
const result = @intCast(zir.Inst.Ref, new_index + gz.zir_code.ref_start_index);
gz.instructions.appendAssumeCapacity(result);
return result;
}
pub fn addUnTok(
gz: *GenZir,
tag: zir.Inst.Tag,
operand: zir.Inst.Ref,
/// Absolute token index. This function does the conversion to Decl offset.
abs_tok_index: ast.TokenIndex,
) !zir.Inst.Ref {
const gpa = gz.zir_code.gpa;
try gz.instructions.ensureCapacity(gpa, gz.instructions.items.len + 1);
try gz.zir_code.instructions.ensureCapacity(gpa, gz.zir_code.instructions.len + 1);
const new_index = gz.zir_code.instructions.len;
gz.zir_code.instructions.appendAssumeCapacity(.{
.tag = tag,
.data = .{ .un_tok = .{
.operand = operand,
.src_tok = abs_tok_index - gz.zir_code.decl.srcToken(),
} },
});
const result = @intCast(zir.Inst.Ref, new_index + gz.zir_code.ref_start_index);
gz.instructions.appendAssumeCapacity(result);
return result;
}
pub fn addBin(
gz: *GenZir,
tag: zir.Inst.Tag,
lhs: zir.Inst.Ref,
rhs: zir.Inst.Ref,
) !zir.Inst.Ref {
const gpa = gz.zir_code.gpa;
try gz.instructions.ensureCapacity(gpa, gz.instructions.items.len + 1);
try gz.zir_code.instructions.ensureCapacity(gpa, gz.zir_code.instructions.len + 1);
const new_index = gz.zir_code.instructions.len;
gz.zir_code.instructions.appendAssumeCapacity(.{
.tag = tag,
.data = .{ .bin = .{
.lhs = lhs,
.rhs = rhs,
} },
});
const result = @intCast(zir.Inst.Ref, new_index + gz.zir_code.ref_start_index);
gz.instructions.appendAssumeCapacity(result);
return result;
}
};
@@ -1106,7 +1187,9 @@ pub const WipZirCode = struct {
instructions: std.MultiArrayList(zir.Inst) = .{},
string_bytes: std.ArrayListUnmanaged(u8) = .{},
extra: std.ArrayListUnmanaged(u32) = .{},
arg_count: usize = 0,
/// The end of special indexes. `zir.Inst.Ref` subtracts against this number to convert
/// to `zir.Inst.Index`. The default here is correct if there are 0 parameters.
ref_start_index: usize = zir.const_inst_list.len,
decl: *Decl,
gpa: *Allocator,
arena: *Allocator,
@@ -1189,6 +1272,7 @@ pub const SrcLoc = struct {
.byte_abs,
.token_abs,
.node_abs,
=> src_loc.container.file_scope,
.byte_offset,
@@ -1201,6 +1285,13 @@ pub const SrcLoc = struct {
.node_offset_builtin_call_argn,
.node_offset_array_access_index,
.node_offset_slice_sentinel,
.node_offset_call_func,
.node_offset_field_name,
.node_offset_deref_ptr,
.node_offset_asm_source,
.node_offset_asm_ret_ty,
.node_offset_if_cond,
.node_offset_anyframe_type,
=> src_loc.container.decl.container.file_scope,
};
}
@@ -1218,6 +1309,13 @@ pub const SrcLoc = struct {
const token_starts = tree.tokens.items(.start);
return token_starts[tok_index];
},
.node_abs => |node_index| {
const file_scope = src_loc.container.file_scope;
const tree = try mod.getAstTree(file_scope);
const token_starts = tree.tokens.items(.start);
const tok_index = tree.firstToken(node_index);
return token_starts[tok_index];
},
.byte_offset => |byte_off| {
const decl = src_loc.container.decl;
return decl.srcByteOffset() + byte_off;
@@ -1244,6 +1342,13 @@ pub const SrcLoc = struct {
.node_offset_builtin_call_argn => unreachable, // Handled specially in `Sema`.
.node_offset_array_access_index => @panic("TODO"),
.node_offset_slice_sentinel => @panic("TODO"),
.node_offset_call_func => @panic("TODO"),
.node_offset_field_name => @panic("TODO"),
.node_offset_deref_ptr => @panic("TODO"),
.node_offset_asm_source => @panic("TODO"),
.node_offset_asm_ret_ty => @panic("TODO"),
.node_offset_if_cond => @panic("TODO"),
.node_offset_anyframe_type => @panic("TODO"),
}
}
};
@@ -1276,6 +1381,10 @@ pub const LazySrcLoc = union(enum) {
/// offset from 0. The source file is determined contextually.
/// Inside a `SrcLoc`, the `file_scope` union field will be active.
token_abs: u32,
/// The source location points to an AST node within a source file,
/// offset from 0. The source file is determined contextually.
/// Inside a `SrcLoc`, the `file_scope` union field will be active.
node_abs: u32,
/// The source location points to a byte offset within a source file,
/// offset from the byte offset of the Decl within the file.
/// The Decl is determined contextually.
@@ -1322,6 +1431,48 @@ pub const LazySrcLoc = union(enum) {
/// to the sentinel expression.
/// The Decl is determined contextually.
node_offset_slice_sentinel: u32,
/// The source location points to the callee expression of a function
/// call expression, found by taking this AST node index offset from the containing
/// Decl AST node, which points to a function call AST node. Next, navigate
/// to the callee expression.
/// The Decl is determined contextually.
node_offset_call_func: u32,
/// The source location points to the field name of a field access expression,
/// found by taking this AST node index offset from the containing
/// Decl AST node, which points to a field access AST node. Next, navigate
/// to the field name token.
/// The Decl is determined contextually.
node_offset_field_name: u32,
/// The source location points to the pointer of a pointer deref expression,
/// found by taking this AST node index offset from the containing
/// Decl AST node, which points to a pointer deref AST node. Next, navigate
/// to the pointer expression.
/// The Decl is determined contextually.
node_offset_deref_ptr: u32,
/// The source location points to the assembly source code of an inline assembly
/// expression, found by taking this AST node index offset from the containing
/// Decl AST node, which points to inline assembly AST node. Next, navigate
/// to the asm template source code.
/// The Decl is determined contextually.
node_offset_asm_source: u32,
/// The source location points to the return type of an inline assembly
/// expression, found by taking this AST node index offset from the containing
/// Decl AST node, which points to inline assembly AST node. Next, navigate
/// to the return type expression.
/// The Decl is determined contextually.
node_offset_asm_ret_ty: u32,
/// The source location points to the condition expression of an if
/// expression, found by taking this AST node index offset from the containing
/// Decl AST node, which points to an if expression AST node. Next, navigate
/// to the condition expression.
/// The Decl is determined contextually.
node_offset_if_cond: u32,
/// The source location points to the type expression of an `anyframe->T`
/// expression, found by taking this AST node index offset from the containing
/// Decl AST node, which points to a `anyframe->T` expression AST node. Next, navigate
/// to the type expression.
/// The Decl is determined contextually.
node_offset_anyframe_type: u32,
/// Upgrade to a `SrcLoc` based on the `Decl` or file in the provided scope.
pub fn toSrcLoc(lazy: LazySrcLoc, scope: *Scope) SrcLoc {
@@ -1330,6 +1481,7 @@ pub const LazySrcLoc = union(enum) {
.todo,
.byte_abs,
.token_abs,
.node_abs,
=> .{
.container = .{ .file_scope = scope.getFileScope() },
.lazy = lazy,
@@ -1345,12 +1497,56 @@ pub const LazySrcLoc = union(enum) {
.node_offset_builtin_call_argn,
.node_offset_array_access_index,
.node_offset_slice_sentinel,
.node_offset_call_func,
.node_offset_field_name,
.node_offset_deref_ptr,
.node_offset_asm_source,
.node_offset_asm_ret_ty,
.node_offset_if_cond,
.node_offset_anyframe_type,
=> .{
.container = .{ .decl = scope.srcDecl().? },
.lazy = lazy,
},
};
}
/// Upgrade to a `SrcLoc` based on the `Decl` provided.
pub fn toSrcLocWithDecl(lazy: LazySrcLoc, decl: *Decl) SrcLoc {
return switch (lazy) {
.unneeded,
.todo,
.byte_abs,
.token_abs,
.node_abs,
=> .{
.container = .{ .file_scope = decl.getFileScope() },
.lazy = lazy,
},
.byte_offset,
.token_offset,
.node_offset,
.node_offset_var_decl_ty,
.node_offset_for_cond,
.node_offset_builtin_call_arg0,
.node_offset_builtin_call_arg1,
.node_offset_builtin_call_argn,
.node_offset_array_access_index,
.node_offset_slice_sentinel,
.node_offset_call_func,
.node_offset_field_name,
.node_offset_deref_ptr,
.node_offset_asm_source,
.node_offset_asm_ret_ty,
.node_offset_if_cond,
.node_offset_anyframe_type,
=> .{
.container = .{ .decl = decl },
.lazy = lazy,
},
};
}
};
pub const InnerError = error{ OutOfMemory, AnalysisFail };
@@ -2255,7 +2451,7 @@ fn astgenAndSemaVarDecl(
return type_changed;
}
fn declareDeclDependency(mod: *Module, depender: *Decl, dependee: *Decl) !void {
pub fn declareDeclDependency(mod: *Module, depender: *Decl, dependee: *Decl) !void {
try depender.dependencies.ensureCapacity(mod.gpa, depender.dependencies.items().len + 1);
try dependee.dependants.ensureCapacity(mod.gpa, dependee.dependants.items().len + 1);
@@ -3144,8 +3340,8 @@ pub fn lookupDeclName(mod: *Module, scope: *Scope, ident_name: []const u8) ?*Dec
return mod.decl_table.get(name_hash);
}
fn makeIntType(mod: *Module, scope: *Scope, signed: bool, bits: u16) !Type {
const int_payload = try scope.arena().create(Type.Payload.Bits);
pub fn makeIntType(arena: *Allocator, signed: bool, bits: u16) !Type {
const int_payload = try arena.create(Type.Payload.Bits);
int_payload.* = .{
.base = .{
.tag = if (signed) .int_signed else .int_unsigned,
@@ -3252,45 +3448,51 @@ pub fn failWithOwnedErrorMsg(mod: *Module, scope: *Scope, err_msg: *ErrorMsg) In
if (inlining.shared.caller) |func| {
func.state = .sema_failure;
} else {
block.owner_decl.analysis = .sema_failure;
block.owner_decl.generation = mod.generation;
block.sema.owner_decl.analysis = .sema_failure;
block.sema.owner_decl.generation = mod.generation;
}
} else {
if (block.func) |func| {
if (block.sema.func) |func| {
func.state = .sema_failure;
} else {
block.owner_decl.analysis = .sema_failure;
block.owner_decl.generation = mod.generation;
block.sema.owner_decl.analysis = .sema_failure;
block.sema.owner_decl.generation = mod.generation;
}
}
mod.failed_decls.putAssumeCapacityNoClobber(block.owner_decl, err_msg);
mod.failed_decls.putAssumeCapacityNoClobber(block.sema.owner_decl, err_msg);
},
.gen_zir, .gen_suspend => {
const gen_zir = scope.cast(Scope.GenZir).?;
gen_zir.decl.analysis = .sema_failure;
gen_zir.decl.generation = mod.generation;
mod.failed_decls.putAssumeCapacityNoClobber(gen_zir.decl, err_msg);
gen_zir.zir_code.decl.analysis = .sema_failure;
gen_zir.zir_code.decl.generation = mod.generation;
mod.failed_decls.putAssumeCapacityNoClobber(gen_zir.zir_code.decl, err_msg);
},
.local_val => {
const gen_zir = scope.cast(Scope.LocalVal).?.gen_zir;
gen_zir.decl.analysis = .sema_failure;
gen_zir.decl.generation = mod.generation;
mod.failed_decls.putAssumeCapacityNoClobber(gen_zir.decl, err_msg);
gen_zir.zir_code.decl.analysis = .sema_failure;
gen_zir.zir_code.decl.generation = mod.generation;
mod.failed_decls.putAssumeCapacityNoClobber(gen_zir.zir_code.decl, err_msg);
},
.local_ptr => {
const gen_zir = scope.cast(Scope.LocalPtr).?.gen_zir;
gen_zir.decl.analysis = .sema_failure;
gen_zir.decl.generation = mod.generation;
mod.failed_decls.putAssumeCapacityNoClobber(gen_zir.decl, err_msg);
gen_zir.zir_code.decl.analysis = .sema_failure;
gen_zir.zir_code.decl.generation = mod.generation;
mod.failed_decls.putAssumeCapacityNoClobber(gen_zir.zir_code.decl, err_msg);
},
.gen_nosuspend => {
const gen_zir = scope.cast(Scope.Nosuspend).?.gen_zir;
gen_zir.decl.analysis = .sema_failure;
gen_zir.decl.generation = mod.generation;
mod.failed_decls.putAssumeCapacityNoClobber(gen_zir.decl, err_msg);
gen_zir.zir_code.decl.analysis = .sema_failure;
gen_zir.zir_code.decl.generation = mod.generation;
mod.failed_decls.putAssumeCapacityNoClobber(gen_zir.zir_code.decl, err_msg);
},
.file => unreachable,
.container => unreachable,
.decl_ref => {
const decl_ref = scope.cast(Scope.DeclRef).?;
decl_ref.decl.analysis = .sema_failure;
decl_ref.decl.generation = mod.generation;
mod.failed_decls.putAssumeCapacityNoClobber(decl_ref.decl, err_msg);
},
}
return error.AnalysisFail;
}
@@ -3344,14 +3546,12 @@ pub fn intSub(allocator: *Allocator, lhs: Value, rhs: Value) !Value {
}
pub fn floatAdd(
mod: *Module,
scope: *Scope,
arena: *Allocator,
float_type: Type,
src: LazySrcLoc,
lhs: Value,
rhs: Value,
) !Value {
const arena = scope.arena();
switch (float_type.tag()) {
.f16 => {
@panic("TODO add __trunctfhf2 to compiler-rt");
@@ -3379,14 +3579,12 @@ pub fn floatAdd(
}
pub fn floatSub(
mod: *Module,
scope: *Scope,
arena: *Allocator,
float_type: Type,
src: LazySrcLoc,
lhs: Value,
rhs: Value,
) !Value {
const arena = scope.arena();
switch (float_type.tag()) {
.f16 => {
@panic("TODO add __trunctfhf2 to compiler-rt");
@@ -3584,7 +3782,6 @@ pub fn optimizeMode(mod: Module) std.builtin.Mode {
pub fn identifierTokenString(mod: *Module, scope: *Scope, token: ast.TokenIndex) InnerError![]const u8 {
const tree = scope.tree();
const token_tags = tree.tokens.items(.tag);
const token_starts = tree.tokens.items(.start);
assert(token_tags[token] == .identifier);
const ident_name = tree.tokenSlice(token);
if (!mem.startsWith(u8, ident_name, "@")) {
@@ -3592,7 +3789,7 @@ pub fn identifierTokenString(mod: *Module, scope: *Scope, token: ast.TokenIndex)
}
var buf = std.ArrayList(u8).init(mod.gpa);
defer buf.deinit();
try parseStrLit(mod, scope, buf, ident_name, 1);
try parseStrLit(mod, scope, token, &buf, ident_name, 1);
return buf.toOwnedSlice();
}
@@ -3607,13 +3804,12 @@ pub fn appendIdentStr(
) InnerError!void {
const tree = scope.tree();
const token_tags = tree.tokens.items(.tag);
const token_starts = tree.tokens.items(.start);
assert(token_tags[token] == .identifier);
const ident_name = tree.tokenSlice(token);
if (!mem.startsWith(u8, ident_name, "@")) {
return buf.appendSlice(ident_name);
} else {
return parseStrLit(scope, buf, ident_name, 1);
return parseStrLit(scope, token, buf, ident_name, 1);
}
}
@@ -3621,57 +3817,60 @@ pub fn appendIdentStr(
pub fn parseStrLit(
mod: *Module,
scope: *Scope,
buf: *ArrayList(u8),
token: ast.TokenIndex,
buf: *std.ArrayList(u8),
bytes: []const u8,
offset: usize,
offset: u32,
) InnerError!void {
const tree = scope.tree();
const token_starts = tree.tokens.items(.start);
const raw_string = bytes[offset..];
switch (try std.zig.string_literal.parseAppend(buf, raw_string)) {
.success => return,
.invalid_character => |bad_index| {
return mod.fail(
return mod.failOff(
scope,
token_starts[token] + offset + bad_index,
token_starts[token] + offset + @intCast(u32, bad_index),
"invalid string literal character: '{c}'",
.{raw_string[bad_index]},
);
},
.expected_hex_digits => |bad_index| {
return mod.fail(
return mod.failOff(
scope,
token_starts[token] + offset + bad_index,
token_starts[token] + offset + @intCast(u32, bad_index),
"expected hex digits after '\\x'",
.{},
);
},
.invalid_hex_escape => |bad_index| {
return mod.fail(
return mod.failOff(
scope,
token_starts[token] + offset + bad_index,
token_starts[token] + offset + @intCast(u32, bad_index),
"invalid hex digit: '{c}'",
.{raw_string[bad_index]},
);
},
.invalid_unicode_escape => |bad_index| {
return mod.fail(
return mod.failOff(
scope,
token_starts[token] + offset + bad_index,
token_starts[token] + offset + @intCast(u32, bad_index),
"invalid unicode digit: '{c}'",
.{raw_string[bad_index]},
);
},
.missing_matching_brace => |bad_index| {
return mod.fail(
.missing_matching_rbrace => |bad_index| {
return mod.failOff(
scope,
token_starts[token] + offset + bad_index,
token_starts[token] + offset + @intCast(u32, bad_index),
"missing matching '}}' character",
.{},
);
},
.expected_unicode_digits => |bad_index| {
return mod.fail(
return mod.failOff(
scope,
token_starts[token] + offset + bad_index,
token_starts[token] + offset + @intCast(u32, bad_index),
"expected unicode digits after '\\u'",
.{},
);

File diff suppressed because it is too large Load Diff

View File

@@ -25,18 +25,18 @@ pub const ResultLoc = union(enum) {
/// of an assignment uses this kind of result location.
ref,
/// The expression will be coerced into this type, but it will be evaluated as an rvalue.
ty: zir.Inst.Index,
ty: zir.Inst.Ref,
/// The expression must store its result into this typed pointer. The result instruction
/// from the expression must be ignored.
ptr: zir.Inst.Index,
ptr: zir.Inst.Ref,
/// The expression must store its result into this allocation, which has an inferred type.
/// The result instruction from the expression must be ignored.
/// Always an instruction with tag `alloc_inferred`.
inferred_ptr: zir.Inst.Index,
inferred_ptr: zir.Inst.Ref,
/// The expression must store its result into this pointer, which is a typed pointer that
/// has been bitcasted to whatever the expression's type is.
/// The result instruction from the expression must be ignored.
bitcasted_ptr: zir.Inst.Index,
bitcasted_ptr: zir.Inst.Ref,
/// There is a pointer for the expression to store its result into, however, its type
/// is inferred based on peer type resolution for a `zir.Inst.Block`.
/// The result instruction from the expression must be ignored.
@@ -1133,10 +1133,9 @@ fn varDecl(
// or an rvalue as a result location. If it is an rvalue, we can use the instruction as
// the variable, no memory location needed.
if (!nodeMayNeedMemoryLocation(scope, var_decl.ast.init_node)) {
const result_loc: ResultLoc = if (var_decl.ast.type_node != 0)
.{ .ty = try typeExpr(mod, scope, var_decl.ast.type_node) }
else
.none;
const result_loc: ResultLoc = if (var_decl.ast.type_node != 0) .{
.ty = try typeExpr(mod, scope, var_decl.ast.type_node),
} else .none;
const init_inst = try expr(mod, scope, result_loc, var_decl.ast.init_node);
const sub_scope = try block_arena.create(Scope.LocalVal);
sub_scope.* = .{
@@ -2539,16 +2538,13 @@ fn switchExpr(
if (underscore_src != null) special_prong = .underscore;
var cases = try block_scope.arena.alloc(zir.Inst.SwitchBr.Case, simple_case_count);
const rl_and_tag: struct { rl: ResultLoc, tag: zir.Inst.Tag } = if (any_payload_is_ref)
.{
.rl = .ref,
.tag = .switchbr_ref,
}
else
.{
.rl = .none,
.tag = .switchbr,
};
const rl_and_tag: struct { rl: ResultLoc, tag: zir.Inst.Tag } = if (any_payload_is_ref) .{
.rl = .ref,
.tag = .switchbr_ref,
} else .{
.rl = .none,
.tag = .switchbr,
};
const target = try expr(mod, &block_scope.base, rl_and_tag.rl, target_node);
const switch_inst = try addZirInstT(mod, &block_scope.base, switch_src, zir.Inst.SwitchBr, rl_and_tag.tag, .{
.target = target,
@@ -2980,11 +2976,12 @@ fn integerLiteral(
const main_tokens = tree.nodes.items(.main_token);
const int_token = main_tokens[int_lit];
const prefixed_bytes = tree.tokenSlice(int_token);
const gz = scope.getGenZir();
if (std.fmt.parseInt(u64, prefixed_bytes, 0)) |small_int| {
const result: zir.Inst.Index = switch (small_int) {
0 => @enumToInt(zir.Const.zero),
1 => @enumToInt(zir.Const.one),
else => try addZirInt(small_int),
else => try gz.addInt(small_int),
};
return rvalue(mod, scope, rl, result);
} else |err| {
@@ -3418,6 +3415,10 @@ fn callExpr(
node: ast.Node.Index,
call: ast.full.Call,
) InnerError!*zir.Inst {
if (true) {
@panic("TODO update for zir-memory-layout branch");
}
if (call.async_token) |async_token| {
return mod.failTok(scope, async_token, "TODO implement async fn call", .{});
}
@@ -3512,7 +3513,7 @@ fn nosuspendExpr(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Inde
const tree = scope.tree();
var child_scope = Scope.Nosuspend{
.parent = scope,
.gen_zir = scope.getGenZIR(),
.gen_zir = scope.getGenZir(),
.src = tree.tokens.items(.start)[tree.nodes.items(.main_token)[node]],
};
@@ -3808,33 +3809,42 @@ fn nodeMayNeedMemoryLocation(scope: *Scope, start_node: ast.Node.Index) bool {
/// result locations must call this function on their result.
/// As an example, if the `ResultLoc` is `ptr`, it will write the result to the pointer.
/// If the `ResultLoc` is `ty`, it will coerce the result to the type.
fn rvalue(mod: *Module, scope: *Scope, rl: ResultLoc, result: *zir.Inst) InnerError!*zir.Inst {
fn rvalue(
mod: *Module,
scope: *Scope,
rl: ResultLoc,
result: zir.Inst.Ref,
src_node: ast.Node.Index,
) InnerError!zir.Inst.Ref {
const gz = scope.getGenZir();
switch (rl) {
.none => return result,
.discard => {
// Emit a compile error for discarding error values.
_ = try addZIRUnOp(mod, scope, result.src, .ensure_result_non_error, result);
_ = try gz.addUnNode(.ensure_result_non_error, result, src_node);
return result;
},
.ref => {
// We need a pointer but we have a value.
return addZIRUnOp(mod, scope, result.src, .ref, result);
const tree = scope.tree();
const src_token = tree.firstToken(src_node);
return gz.addUnTok(.ref, result, src_tok);
},
.ty => |ty_inst| return addZIRBinOp(mod, scope, result.src, .as, ty_inst, result),
.ty => |ty_inst| return gz.addBin(.as, ty_inst, result),
.ptr => |ptr_inst| {
_ = try addZIRBinOp(mod, scope, result.src, .store, ptr_inst, result);
_ = try gz.addBin(.store, ptr_inst, result);
return result;
},
.bitcasted_ptr => |bitcasted_ptr| {
return mod.fail(scope, result.src, "TODO implement rvalue .bitcasted_ptr", .{});
return mod.failNode(scope, src_node, "TODO implement rvalue .bitcasted_ptr", .{});
},
.inferred_ptr => |alloc| {
_ = try addZIRBinOp(mod, scope, result.src, .store_to_inferred_ptr, &alloc.base, result);
_ = try gz.addBin(.store_to_inferred_ptr, alloc, result);
return result;
},
.block_ptr => |block_scope| {
block_scope.rvalue_rl_count += 1;
_ = try addZIRBinOp(mod, scope, result.src, .store_to_block_ptr, block_scope.rl_ptr.?, result);
_ = try gz.addBin(.store_to_block_ptr, block_scope.rl_ptr.?, result);
return result;
},
}

View File

@@ -17,6 +17,7 @@ const DW = std.dwarf;
const leb128 = std.leb;
const log = std.log.scoped(.codegen);
const build_options = @import("build_options");
const LazySrcLoc = Module.LazySrcLoc;
/// The codegen-related data that is stored in `ir.Inst.Block` instructions.
pub const BlockData = struct {
@@ -978,7 +979,7 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
/// Copies a value to a register without tracking the register. The register is not considered
/// allocated. A second call to `copyToTmpRegister` may return the same register.
/// This can have a side effect of spilling instructions to the stack to free up a register.
fn copyToTmpRegister(self: *Self, src: usize, ty: Type, mcv: MCValue) !Register {
fn copyToTmpRegister(self: *Self, src: LazySrcLoc, ty: Type, mcv: MCValue) !Register {
const reg = self.findUnusedReg() orelse b: {
// We'll take over the first register. Move the instruction that was previously
// there to a stack allocation.
@@ -1457,7 +1458,7 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
fn genArmBinOpCode(
self: *Self,
src: usize,
src: LazySrcLoc,
dst_reg: Register,
lhs_mcv: MCValue,
rhs_mcv: MCValue,
@@ -1620,7 +1621,7 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
fn genX8664BinMathCode(
self: *Self,
src: usize,
src: LazySrcLoc,
dst_ty: Type,
dst_mcv: MCValue,
src_mcv: MCValue,
@@ -1706,7 +1707,7 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
}
}
fn genX8664ModRMRegToStack(self: *Self, src: usize, ty: Type, off: u32, reg: Register, opcode: u8) !void {
fn genX8664ModRMRegToStack(self: *Self, src: LazySrcLoc, ty: Type, off: u32, reg: Register, opcode: u8) !void {
const abi_size = ty.abiSize(self.target.*);
const adj_off = off + abi_size;
try self.code.ensureCapacity(self.code.items.len + 7);
@@ -1807,7 +1808,7 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
return result;
}
fn genBreakpoint(self: *Self, src: usize) !MCValue {
fn genBreakpoint(self: *Self, src: LazySrcLoc) !MCValue {
switch (arch) {
.i386, .x86_64 => {
try self.code.append(0xcc); // int3
@@ -2221,7 +2222,7 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
}
}
fn ret(self: *Self, src: usize, mcv: MCValue) !MCValue {
fn ret(self: *Self, src: LazySrcLoc, mcv: MCValue) !MCValue {
const ret_ty = self.fn_type.fnReturnType();
try self.setRegOrMem(src, ret_ty, self.ret_mcv, mcv);
switch (arch) {
@@ -2558,7 +2559,7 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
}
/// Send control flow to the `index` of `self.code`.
fn jump(self: *Self, src: usize, index: usize) !void {
fn jump(self: *Self, src: LazySrcLoc, index: usize) !void {
switch (arch) {
.i386, .x86_64 => {
try self.code.ensureCapacity(self.code.items.len + 5);
@@ -2615,7 +2616,7 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
}
}
fn performReloc(self: *Self, src: usize, reloc: Reloc) !void {
fn performReloc(self: *Self, src: LazySrcLoc, reloc: Reloc) !void {
switch (reloc) {
.rel32 => |pos| {
const amt = self.code.items.len - (pos + 4);
@@ -2679,7 +2680,7 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
}
}
fn br(self: *Self, src: usize, block: *ir.Inst.Block, operand: *ir.Inst) !MCValue {
fn br(self: *Self, src: LazySrcLoc, block: *ir.Inst.Block, operand: *ir.Inst) !MCValue {
if (operand.ty.hasCodeGenBits()) {
const operand_mcv = try self.resolveInst(operand);
const block_mcv = @bitCast(MCValue, block.codegen.mcv);
@@ -2692,7 +2693,7 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
return self.brVoid(src, block);
}
fn brVoid(self: *Self, src: usize, block: *ir.Inst.Block) !MCValue {
fn brVoid(self: *Self, src: LazySrcLoc, block: *ir.Inst.Block) !MCValue {
// Emit a jump with a relocation. It will be patched up after the block ends.
try block.codegen.relocs.ensureCapacity(self.gpa, block.codegen.relocs.items.len + 1);
@@ -2896,7 +2897,7 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
}
/// Sets the value without any modifications to register allocation metadata or stack allocation metadata.
fn setRegOrMem(self: *Self, src: usize, ty: Type, loc: MCValue, val: MCValue) !void {
fn setRegOrMem(self: *Self, src: LazySrcLoc, ty: Type, loc: MCValue, val: MCValue) !void {
switch (loc) {
.none => return,
.register => |reg| return self.genSetReg(src, ty, reg, val),
@@ -2908,7 +2909,7 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
}
}
fn genSetStack(self: *Self, src: usize, ty: Type, stack_offset: u32, mcv: MCValue) InnerError!void {
fn genSetStack(self: *Self, src: LazySrcLoc, ty: Type, stack_offset: u32, mcv: MCValue) InnerError!void {
switch (arch) {
.arm, .armeb => switch (mcv) {
.dead => unreachable,
@@ -3111,7 +3112,8 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
4, 8 => {
const offset = if (math.cast(i9, adj_off)) |imm|
Instruction.LoadStoreOffset.imm_post_index(-imm)
else |_| Instruction.LoadStoreOffset.reg(try self.copyToTmpRegister(src, Type.initTag(.u64), MCValue{ .immediate = adj_off }));
else |_|
Instruction.LoadStoreOffset.reg(try self.copyToTmpRegister(src, Type.initTag(.u64), MCValue{ .immediate = adj_off }));
const rn: Register = switch (arch) {
.aarch64, .aarch64_be => .x29,
.aarch64_32 => .w29,
@@ -3140,7 +3142,7 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
}
}
fn genSetReg(self: *Self, src: usize, ty: Type, reg: Register, mcv: MCValue) InnerError!void {
fn genSetReg(self: *Self, src: LazySrcLoc, ty: Type, reg: Register, mcv: MCValue) InnerError!void {
switch (arch) {
.arm, .armeb => switch (mcv) {
.dead => unreachable,
@@ -3762,7 +3764,7 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
return mcv;
}
fn genTypedValue(self: *Self, src: usize, typed_value: TypedValue) InnerError!MCValue {
fn genTypedValue(self: *Self, src: LazySrcLoc, typed_value: TypedValue) InnerError!MCValue {
if (typed_value.val.isUndef())
return MCValue{ .undef = {} };
const ptr_bits = self.target.cpu.arch.ptrBitWidth();
@@ -3835,7 +3837,7 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
};
/// Caller must call `CallMCValues.deinit`.
fn resolveCallingConventionValues(self: *Self, src: usize, fn_ty: Type) !CallMCValues {
fn resolveCallingConventionValues(self: *Self, src: LazySrcLoc, fn_ty: Type) !CallMCValues {
const cc = fn_ty.fnCallingConvention();
const param_types = try self.gpa.alloc(Type, fn_ty.fnParamLen());
defer self.gpa.free(param_types);
@@ -4049,13 +4051,11 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
};
}
fn fail(self: *Self, src: usize, comptime format: []const u8, args: anytype) InnerError {
fn fail(self: *Self, src: LazySrcLoc, comptime format: []const u8, args: anytype) InnerError {
@setCold(true);
assert(self.err_msg == null);
self.err_msg = try ErrorMsg.create(self.bin_file.allocator, .{
.file_scope = self.src_loc.file_scope,
.byte_offset = src,
}, format, args);
const src_loc = src.toSrcLocWithDecl(self.mod_fn.owner_decl);
self.err_msg = try ErrorMsg.create(self.bin_file.allocator, src_loc, format, args);
return error.CodegenFail;
}

View File

@@ -591,7 +591,7 @@ pub const Body = struct {
};
/// For debugging purposes, prints a function representation to stderr.
pub fn dumpFn(old_module: IrModule, module_fn: *IrModule.Fn) void {
pub fn dumpFn(old_module: Module, module_fn: *Module.Fn) void {
const allocator = old_module.gpa;
var ctx: DumpTzir = .{
.allocator = allocator,
@@ -622,10 +622,10 @@ pub fn dumpFn(old_module: IrModule, module_fn: *IrModule.Fn) void {
}
const DumpTzir = struct {
allocator: *Allocator,
allocator: *std.mem.Allocator,
arena: std.heap.ArenaAllocator,
old_module: *const IrModule,
module_fn: *IrModule.Fn,
old_module: *const Module,
module_fn: *Module.Fn,
indent: usize,
inst_table: InstTable,
partial_inst_table: InstTable,
@@ -634,12 +634,12 @@ const DumpTzir = struct {
next_partial_index: usize = 0,
next_const_index: usize = 0,
const InstTable = std.AutoArrayHashMap(*ir.Inst, usize);
const InstTable = std.AutoArrayHashMap(*Inst, usize);
/// TODO: Improve this code to include a stack of ir.Body and store the instructions
/// TODO: Improve this code to include a stack of Body and store the instructions
/// in there. Now we are putting all the instructions in a function local table,
/// however instructions that are in a Body can be thown away when the Body ends.
fn dump(dtz: *DumpTzir, body: ir.Body, writer: std.fs.File.Writer) !void {
fn dump(dtz: *DumpTzir, body: Body, writer: std.fs.File.Writer) !void {
// First pass to pre-populate the table so that we can show even invalid references.
// Must iterate the same order we iterate the second time.
// We also look for constants and put them in the const_table.
@@ -657,7 +657,7 @@ const DumpTzir = struct {
return dtz.dumpBody(body, writer);
}
fn fetchInstsAndResolveConsts(dtz: *DumpTzir, body: ir.Body) error{OutOfMemory}!void {
fn fetchInstsAndResolveConsts(dtz: *DumpTzir, body: Body) error{OutOfMemory}!void {
for (body.instructions) |inst| {
try dtz.inst_table.put(inst, dtz.next_index);
dtz.next_index += 1;
@@ -694,13 +694,16 @@ const DumpTzir = struct {
.unwrap_errunion_payload_ptr,
.unwrap_errunion_err_ptr,
=> {
const un_op = inst.cast(ir.Inst.UnOp).?;
const un_op = inst.cast(Inst.UnOp).?;
try dtz.findConst(un_op.operand);
},
.add,
.addwrap,
.sub,
.subwrap,
.mul,
.mulwrap,
.cmp_lt,
.cmp_lte,
.cmp_eq,
@@ -714,7 +717,7 @@ const DumpTzir = struct {
.bit_or,
.xor,
=> {
const bin_op = inst.cast(ir.Inst.BinOp).?;
const bin_op = inst.cast(Inst.BinOp).?;
try dtz.findConst(bin_op.lhs);
try dtz.findConst(bin_op.rhs);
},
@@ -770,7 +773,7 @@ const DumpTzir = struct {
}
}
fn dumpBody(dtz: *DumpTzir, body: ir.Body, writer: std.fs.File.Writer) (std.fs.File.WriteError || error{OutOfMemory})!void {
fn dumpBody(dtz: *DumpTzir, body: Body, writer: std.fs.File.Writer) (std.fs.File.WriteError || error{OutOfMemory})!void {
for (body.instructions) |inst| {
const my_index = dtz.next_partial_index;
try dtz.partial_inst_table.put(inst, my_index);
@@ -812,7 +815,7 @@ const DumpTzir = struct {
.unwrap_errunion_payload_ptr,
.unwrap_errunion_err_ptr,
=> {
const un_op = inst.cast(ir.Inst.UnOp).?;
const un_op = inst.cast(Inst.UnOp).?;
const kinky = try dtz.writeInst(writer, un_op.operand);
if (kinky != null) {
try writer.writeAll(") // Instruction does not dominate all uses!\n");
@@ -822,8 +825,11 @@ const DumpTzir = struct {
},
.add,
.addwrap,
.sub,
.subwrap,
.mul,
.mulwrap,
.cmp_lt,
.cmp_lte,
.cmp_eq,
@@ -837,7 +843,7 @@ const DumpTzir = struct {
.bit_or,
.xor,
=> {
const bin_op = inst.cast(ir.Inst.BinOp).?;
const bin_op = inst.cast(Inst.BinOp).?;
const lhs_kinky = try dtz.writeInst(writer, bin_op.lhs);
try writer.writeAll(", ");
@@ -1008,7 +1014,7 @@ const DumpTzir = struct {
}
}
fn writeInst(dtz: *DumpTzir, writer: std.fs.File.Writer, inst: *ir.Inst) !?usize {
fn writeInst(dtz: *DumpTzir, writer: std.fs.File.Writer, inst: *Inst) !?usize {
if (dtz.partial_inst_table.get(inst)) |operand_index| {
try writer.print("%{d}", .{operand_index});
return null;
@@ -1024,7 +1030,7 @@ const DumpTzir = struct {
}
}
fn findConst(dtz: *DumpTzir, operand: *ir.Inst) !void {
fn findConst(dtz: *DumpTzir, operand: *Inst) !void {
if (operand.tag == .constant) {
try dtz.const_table.put(operand, dtz.next_const_index);
dtz.next_const_index += 1;

View File

@@ -727,7 +727,7 @@ pub fn freeDecl(self: *Coff, decl: *Module.Decl) void {
self.offset_table_free_list.append(self.base.allocator, decl.link.coff.offset_table_index) catch {};
}
pub fn updateDeclExports(self: *Coff, module: *Module, decl: *const Module.Decl, exports: []const *Module.Export) !void {
pub fn updateDeclExports(self: *Coff, module: *Module, decl: *Module.Decl, exports: []const *Module.Export) !void {
if (self.llvm_ir_module) |_| return;
for (exports) |exp| {

View File

@@ -2670,7 +2670,7 @@ fn writeDeclDebugInfo(self: *Elf, text_block: *TextBlock, dbg_info_buf: []const
pub fn updateDeclExports(
self: *Elf,
module: *Module,
decl: *const Module.Decl,
decl: *Module.Decl,
exports: []const *Module.Export,
) !void {
if (self.llvm_ir_module) |_| return;

View File

@@ -834,7 +834,7 @@ fn linkWithLLD(self: *MachO, comp: *Compilation) !void {
}
},
else => {
log.err("{s} terminated", .{ argv.items[0] });
log.err("{s} terminated", .{argv.items[0]});
return error.LLDCrashed;
},
}
@@ -1323,7 +1323,7 @@ pub fn updateDeclLineNumber(self: *MachO, module: *Module, decl: *const Module.D
pub fn updateDeclExports(
self: *MachO,
module: *Module,
decl: *const Module.Decl,
decl: *Module.Decl,
exports: []const *Module.Export,
) !void {
const tracy = trace(@src());

View File

@@ -94,9 +94,7 @@ pub const Type = extern union {
.anyframe_T, .@"anyframe" => return .AnyFrame,
.@"struct", .empty_struct => return .Struct,
.@"enum" => return .Enum,
.@"union" => return .Union,
.empty_struct => return .Struct,
.var_args_param => unreachable, // can be any type
}
@@ -484,9 +482,6 @@ pub const Type = extern union {
.error_set_single => return self.copyPayloadShallow(allocator, Payload.Name),
.empty_struct => return self.copyPayloadShallow(allocator, Payload.ContainerScope),
.@"enum" => return self.copyPayloadShallow(allocator, Payload.Enum),
.@"struct" => return self.copyPayloadShallow(allocator, Payload.Struct),
.@"union" => return self.copyPayloadShallow(allocator, Payload.Union),
.@"opaque" => return self.copyPayloadShallow(allocator, Payload.Opaque),
}
}
@@ -725,9 +720,6 @@ pub const Type = extern union {
.inferred_alloc_const => return out_stream.writeAll("(inferred_alloc_const)"),
.inferred_alloc_mut => return out_stream.writeAll("(inferred_alloc_mut)"),
// TODO use declaration name
.@"enum" => return out_stream.writeAll("enum {}"),
.@"struct" => return out_stream.writeAll("struct {}"),
.@"union" => return out_stream.writeAll("union {}"),
.@"opaque" => return out_stream.writeAll("opaque {}"),
}
unreachable;
@@ -839,10 +831,6 @@ pub const Type = extern union {
return payload.error_set.hasCodeGenBits() or payload.payload.hasCodeGenBits();
},
.@"enum" => @panic("TODO"),
.@"struct" => @panic("TODO"),
.@"union" => @panic("TODO"),
.c_void,
.void,
.type,
@@ -864,7 +852,7 @@ pub const Type = extern union {
pub fn isNoReturn(self: Type) bool {
const definitely_correct_result = self.zigTypeTag() == .NoReturn;
const fast_result = self.tag_if_small_enough == Tag.noreturn;
const fast_result = self.tag_if_small_enough == @enumToInt(Tag.noreturn);
assert(fast_result == definitely_correct_result);
return fast_result;
}
@@ -970,10 +958,6 @@ pub const Type = extern union {
@panic("TODO abiAlignment error union");
},
.@"enum" => self.cast(Payload.Enum).?.abiAlignment(target),
.@"struct" => @panic("TODO"),
.@"union" => @panic("TODO"),
.c_void,
.void,
.type,
@@ -1122,10 +1106,6 @@ pub const Type = extern union {
}
@panic("TODO abiSize error union");
},
.@"enum" => @panic("TODO"),
.@"struct" => @panic("TODO"),
.@"union" => @panic("TODO"),
};
}
@@ -1195,9 +1175,6 @@ pub const Type = extern union {
.error_set,
.error_set_single,
.empty_struct,
.@"enum",
.@"struct",
.@"union",
.@"opaque",
.var_args_param,
=> false,
@@ -1273,9 +1250,6 @@ pub const Type = extern union {
.error_set,
.error_set_single,
.empty_struct,
.@"enum",
.@"struct",
.@"union",
.@"opaque",
.var_args_param,
=> unreachable,
@@ -1372,9 +1346,6 @@ pub const Type = extern union {
.empty_struct,
.inferred_alloc_const,
.inferred_alloc_mut,
.@"enum",
.@"struct",
.@"union",
.@"opaque",
.var_args_param,
=> false,
@@ -1453,9 +1424,6 @@ pub const Type = extern union {
.empty_struct,
.inferred_alloc_const,
.inferred_alloc_mut,
.@"enum",
.@"struct",
.@"union",
.@"opaque",
.var_args_param,
=> false,
@@ -1543,9 +1511,6 @@ pub const Type = extern union {
.empty_struct,
.inferred_alloc_const,
.inferred_alloc_mut,
.@"enum",
.@"struct",
.@"union",
.@"opaque",
.var_args_param,
=> false,
@@ -1628,9 +1593,6 @@ pub const Type = extern union {
.empty_struct,
.inferred_alloc_const,
.inferred_alloc_mut,
.@"enum",
.@"struct",
.@"union",
.@"opaque",
.var_args_param,
=> false,
@@ -1755,9 +1717,6 @@ pub const Type = extern union {
.empty_struct => unreachable,
.inferred_alloc_const => unreachable,
.inferred_alloc_mut => unreachable,
.@"enum" => unreachable,
.@"struct" => unreachable,
.@"union" => unreachable,
.@"opaque" => unreachable,
.var_args_param => unreachable,
@@ -1908,9 +1867,6 @@ pub const Type = extern union {
.empty_struct,
.inferred_alloc_const,
.inferred_alloc_mut,
.@"enum",
.@"struct",
.@"union",
.@"opaque",
.var_args_param,
=> unreachable,
@@ -1983,9 +1939,6 @@ pub const Type = extern union {
.empty_struct,
.inferred_alloc_const,
.inferred_alloc_mut,
.@"enum",
.@"struct",
.@"union",
.@"opaque",
.var_args_param,
=> unreachable,
@@ -2073,9 +2026,6 @@ pub const Type = extern union {
.empty_struct,
.inferred_alloc_const,
.inferred_alloc_mut,
.@"enum",
.@"struct",
.@"union",
.@"opaque",
.var_args_param,
=> false,
@@ -2159,9 +2109,6 @@ pub const Type = extern union {
.empty_struct,
.inferred_alloc_const,
.inferred_alloc_mut,
.@"enum",
.@"struct",
.@"union",
.@"opaque",
.var_args_param,
=> false,
@@ -2231,9 +2178,6 @@ pub const Type = extern union {
.empty_struct,
.inferred_alloc_const,
.inferred_alloc_mut,
.@"enum",
.@"struct",
.@"union",
.@"opaque",
.var_args_param,
=> unreachable,
@@ -2331,9 +2275,6 @@ pub const Type = extern union {
.empty_struct,
.inferred_alloc_const,
.inferred_alloc_mut,
.@"enum",
.@"struct",
.@"union",
.@"opaque",
.var_args_param,
=> false,
@@ -2452,9 +2393,6 @@ pub const Type = extern union {
.empty_struct,
.inferred_alloc_const,
.inferred_alloc_mut,
.@"enum",
.@"struct",
.@"union",
.@"opaque",
.var_args_param,
=> unreachable,
@@ -2539,9 +2477,6 @@ pub const Type = extern union {
.empty_struct,
.inferred_alloc_const,
.inferred_alloc_mut,
.@"enum",
.@"struct",
.@"union",
.@"opaque",
.var_args_param,
=> unreachable,
@@ -2625,9 +2560,6 @@ pub const Type = extern union {
.empty_struct,
.inferred_alloc_const,
.inferred_alloc_mut,
.@"enum",
.@"struct",
.@"union",
.@"opaque",
.var_args_param,
=> unreachable,
@@ -2711,9 +2643,6 @@ pub const Type = extern union {
.empty_struct,
.inferred_alloc_const,
.inferred_alloc_mut,
.@"enum",
.@"struct",
.@"union",
.@"opaque",
.var_args_param,
=> unreachable,
@@ -2794,9 +2723,6 @@ pub const Type = extern union {
.empty_struct,
.inferred_alloc_const,
.inferred_alloc_mut,
.@"enum",
.@"struct",
.@"union",
.@"opaque",
.var_args_param,
=> unreachable,
@@ -2877,9 +2803,6 @@ pub const Type = extern union {
.empty_struct,
.inferred_alloc_const,
.inferred_alloc_mut,
.@"enum",
.@"struct",
.@"union",
.@"opaque",
.var_args_param,
=> unreachable,
@@ -2960,9 +2883,6 @@ pub const Type = extern union {
.empty_struct,
.inferred_alloc_const,
.inferred_alloc_mut,
.@"enum",
.@"struct",
.@"union",
.@"opaque",
.var_args_param,
=> false,
@@ -3028,10 +2948,6 @@ pub const Type = extern union {
.var_args_param,
=> return null,
.@"enum" => @panic("TODO onePossibleValue enum"),
.@"struct" => @panic("TODO onePossibleValue struct"),
.@"union" => @panic("TODO onePossibleValue union"),
.empty_struct => return Value.initTag(.empty_struct_value),
.void => return Value.initTag(.void_value),
.noreturn => return Value.initTag(.unreachable_value),
@@ -3139,9 +3055,6 @@ pub const Type = extern union {
.empty_struct,
.inferred_alloc_const,
.inferred_alloc_mut,
.@"enum",
.@"struct",
.@"union",
.@"opaque",
.var_args_param,
=> return false,
@@ -3237,9 +3150,6 @@ pub const Type = extern union {
=> unreachable,
.empty_struct => self.castTag(.empty_struct).?.data,
.@"enum" => &self.castTag(.@"enum").?.scope,
.@"struct" => &self.castTag(.@"struct").?.scope,
.@"union" => &self.castTag(.@"union").?.scope,
.@"opaque" => &self.castTag(.@"opaque").?.scope,
};
}
@@ -3386,9 +3296,6 @@ pub const Type = extern union {
error_set,
error_set_single,
empty_struct,
@"enum",
@"struct",
@"union",
@"opaque",
pub const last_no_payload_tag = Tag.inferred_alloc_const;
@@ -3467,11 +3374,7 @@ pub const Type = extern union {
.int_unsigned,
=> Payload.Bits,
.error_set,
.@"enum",
.@"struct",
.@"union",
=> Payload.Decl,
.error_set => Payload.Decl,
.array => Payload.Array,
.array_sentinel => Payload.ArraySentinel,

View File

@@ -34,6 +34,7 @@ pub const Code = struct {
/// The meaning of this data is determined by `Inst.Tag` value.
extra: []u32,
/// First ZIR instruction in this `Code`.
/// `extra` at this index contains a `Ref` for every root member.
root_start: Inst.Index,
/// Number of ZIR instructions in the implicit root block of the `Code`.
root_len: u32,
@@ -358,10 +359,9 @@ pub const Inst = struct {
/// Same as `alloc` except mutable.
alloc_mut,
/// Same as `alloc` except the type is inferred.
/// lhs and rhs unused.
/// The operand is unused.
alloc_inferred,
/// Same as `alloc_inferred` except mutable.
/// lhs and rhs unused.
alloc_inferred_mut,
/// Create an `anyframe->T`.
/// Uses the `un_node` field. AST node is the `anyframe->T` syntax. Operand is the type.
@@ -370,9 +370,11 @@ pub const Inst = struct {
array_cat,
/// Array multiplication `a ** b`
array_mul,
/// lhs is length, rhs is element type.
/// `[N]T` syntax. No source location provided.
/// Uses the `bin` union field. lhs is length, rhs is element type.
array_type,
/// lhs is length, ArrayTypeSentinel[rhs]
/// `[N:S]T` syntax. No source location provided.
/// Uses the `array_type_sentinel` field.
array_type_sentinel,
/// Given a pointer to an indexable object, returns the len property. This is
/// used by for loops. This instruction also emits a for-loop specific compile
@@ -407,10 +409,11 @@ pub const Inst = struct {
/// Bitwise OR. `|`
bit_or,
/// A labeled block of code, which can return a value.
/// Uses the `pl_node` union field.
/// Uses the `pl_node` union field. Payload is `MultiOp`.
block,
/// A block of code, which can return a value. There are no instructions that break out of
/// this block; it is implied that the final instruction is the result.
/// Uses the `pl_node` union field. Payload is `MultiOp`.
block_flat,
/// Same as `block` but additionally makes the inner instructions execute at comptime.
block_comptime,
@@ -433,7 +436,7 @@ pub const Inst = struct {
/// the operand is assumed to be the void value.
/// Uses the `un_tok` union field.
break_void_tok,
/// lhs and rhs unused.
/// Uses the `node` union field.
breakpoint,
/// Function call with modifier `.auto`.
/// Uses `pl_node`. AST node is the function call. Payload is `Call`.
@@ -471,8 +474,11 @@ pub const Inst = struct {
/// The payload is `MultiOp`.
compile_log,
/// Conditional branch. Splits control flow based on a boolean condition value.
/// Uses the `pl_node` union field. AST node is an if, while, for, etc.
/// Payload is `CondBr`.
condbr,
/// Special case, has no textual representation.
/// Uses the `const` union field.
@"const",
/// Declares the beginning of a statement. Used for debug info.
/// Uses the `node` union field.
@@ -512,7 +518,7 @@ pub const Inst = struct {
error_union_type,
/// Create an error set. extra[lhs..rhs]. The values are token index offsets.
error_set,
/// `error.Foo` syntax. uses the `tok` field of the Data union.
/// `error.Foo` syntax. Uses the `str_tok` field of the Data union.
error_value,
/// Given a pointer to a struct or object that contains virtual fields, returns a pointer
/// to the named field. The field name is stored in string_bytes. Used by a.b syntax.
@@ -532,6 +538,8 @@ pub const Inst = struct {
field_val_named,
/// Convert a larger float type to any other float type, possibly causing
/// a loss of precision.
/// Uses the `pl_node` field. AST is the `@floatCast` syntax.
/// Payload is `Bin` with lhs as the dest type, rhs the operand.
floatcast,
/// Returns a function type, assuming unspecified calling convention.
/// Uses the `fn_type` union field. `payload_index` points to a `FnType`.
@@ -550,6 +558,8 @@ pub const Inst = struct {
int,
/// Convert an integer value to another integer type, asserting that the destination type
/// can hold the same mathematical value.
/// Uses the `pl_node` field. AST is the `@intCast` syntax.
/// Payload is `Bin` with lhs as the dest type, rhs the operand.
intcast,
/// Make an integer type out of signedness and bit count.
/// lhs is signedness, rhs is bit count.
@@ -574,7 +584,8 @@ pub const Inst = struct {
is_err_ptr,
/// A labeled block of code that loops forever. At the end of the body it is implied
/// to repeat; no explicit "repeat" instruction terminates loop bodies.
/// SubRange[lhs..rhs]
/// Uses the `pl_node` field. The AST node is either a for loop or while loop.
/// The payload is `MultiOp`.
loop,
/// Merge two error sets into one, `E1 || E2`.
merge_error_sets,
@@ -677,12 +688,12 @@ pub const Inst = struct {
typeof_peer,
/// Asserts control-flow will not reach this instruction. Not safety checked - the compiler
/// will assume the correctness of this instruction.
/// lhs and rhs unused.
/// Uses the `node` union field.
unreachable_unsafe,
/// Asserts control-flow will not reach this instruction. In safety-checked modes,
/// this will generate a call to the panic function unless it can be proven unreachable
/// by the compiler.
/// lhs and rhs unused.
/// Uses the `node` union field.
unreachable_safe,
/// Bitwise XOR. `^`
xor,
@@ -742,7 +753,7 @@ pub const Inst = struct {
/// Takes a *E!T and raises a compiler error if T != void
/// Uses the `un_tok` field.
ensure_err_payload_void,
/// An enum literal. Uses the `str` union field.
/// An enum literal. Uses the `str_tok` union field.
enum_literal,
/// Suspend an async function. The suspend block has 0 or 1 statements in it.
/// Uses the `un_node` union field.
@@ -995,6 +1006,7 @@ pub const Inst = struct {
bin: Bin,
decl: *Module.Decl,
@"const": *TypedValue,
/// For strings which may contain null bytes.
str: struct {
/// Offset into `string_bytes`.
start: u32,
@@ -1005,14 +1017,28 @@ pub const Inst = struct {
return code.string_bytes[self.start..][0..self.len];
}
},
str_tok: struct {
/// Offset into `string_bytes`. Null-terminated.
start: u32,
/// Offset from Decl AST token index.
src_tok: u32,
pub fn get(self: @This(), code: Code) [:0]const u8 {
return code.nullTerminatedString(self.start);
}
pub fn src(self: @This()) LazySrcLoc {
return .{ .token_offset = self.src_tok };
}
},
/// Offset from Decl AST token index.
tok: ast.TokenIndex,
/// Offset from Decl AST node index.
node: ast.Node.Index,
int: u64,
condbr: struct {
condition: Ref,
/// index into extra.
array_type_sentinel: struct {
len: Ref,
/// index into extra, points to an `ArrayTypeSentinel`
payload_index: u32,
},
ptr_type_simple: struct {
@@ -1100,10 +1126,11 @@ pub const Inst = struct {
args_len: u32,
};
/// This data is stored inside extra, with two sets of trailing indexes:
/// This data is stored inside extra, with two sets of trailing `Ref`:
/// * 0. the then body, according to `then_body_len`.
/// * 1. the else body, according to `else_body_len`.
pub const CondBr = struct {
condition: Ref,
then_body_len: u32,
else_body_len: u32,
};