Zcu: introduce PerThread and pass to all the functions

This commit is contained in:
Jacob Young
2024-06-15 16:10:53 -04:00
parent 8f20e81b88
commit 525f341f33
56 changed files with 11199 additions and 9894 deletions

View File

@@ -13,12 +13,10 @@ const trace = @import("tracy.zig").trace;
const Air = @import("Air.zig");
const Allocator = mem.Allocator;
const Compilation = @import("Compilation.zig");
const ErrorMsg = Module.ErrorMsg;
const ErrorMsg = Zcu.ErrorMsg;
const InternPool = @import("InternPool.zig");
const Liveness = @import("Liveness.zig");
const Zcu = @import("Zcu.zig");
/// Deprecated.
const Module = Zcu;
const Target = std.Target;
const Type = @import("Type.zig");
const Value = @import("Value.zig");
@@ -47,14 +45,15 @@ pub const DebugInfoOutput = union(enum) {
pub fn generateFunction(
lf: *link.File,
src_loc: Module.LazySrcLoc,
pt: Zcu.PerThread,
src_loc: Zcu.LazySrcLoc,
func_index: InternPool.Index,
air: Air,
liveness: Liveness,
code: *std.ArrayList(u8),
debug_output: DebugInfoOutput,
) CodeGenError!Result {
const zcu = lf.comp.module.?;
const zcu = pt.zcu;
const func = zcu.funcInfo(func_index);
const decl = zcu.declPtr(func.owner_decl);
const namespace = zcu.namespacePtr(decl.src_namespace);
@@ -62,35 +61,36 @@ pub fn generateFunction(
switch (target.cpu.arch) {
.arm,
.armeb,
=> return @import("arch/arm/CodeGen.zig").generate(lf, src_loc, func_index, air, liveness, code, debug_output),
=> return @import("arch/arm/CodeGen.zig").generate(lf, pt, src_loc, func_index, air, liveness, code, debug_output),
.aarch64,
.aarch64_be,
.aarch64_32,
=> return @import("arch/aarch64/CodeGen.zig").generate(lf, src_loc, func_index, air, liveness, code, debug_output),
.riscv64 => return @import("arch/riscv64/CodeGen.zig").generate(lf, src_loc, func_index, air, liveness, code, debug_output),
.sparc64 => return @import("arch/sparc64/CodeGen.zig").generate(lf, src_loc, func_index, air, liveness, code, debug_output),
.x86_64 => return @import("arch/x86_64/CodeGen.zig").generate(lf, src_loc, func_index, air, liveness, code, debug_output),
=> return @import("arch/aarch64/CodeGen.zig").generate(lf, pt, src_loc, func_index, air, liveness, code, debug_output),
.riscv64 => return @import("arch/riscv64/CodeGen.zig").generate(lf, pt, src_loc, func_index, air, liveness, code, debug_output),
.sparc64 => return @import("arch/sparc64/CodeGen.zig").generate(lf, pt, src_loc, func_index, air, liveness, code, debug_output),
.x86_64 => return @import("arch/x86_64/CodeGen.zig").generate(lf, pt, src_loc, func_index, air, liveness, code, debug_output),
.wasm32,
.wasm64,
=> return @import("arch/wasm/CodeGen.zig").generate(lf, src_loc, func_index, air, liveness, code, debug_output),
=> return @import("arch/wasm/CodeGen.zig").generate(lf, pt, src_loc, func_index, air, liveness, code, debug_output),
else => unreachable,
}
}
pub fn generateLazyFunction(
lf: *link.File,
src_loc: Module.LazySrcLoc,
pt: Zcu.PerThread,
src_loc: Zcu.LazySrcLoc,
lazy_sym: link.File.LazySymbol,
code: *std.ArrayList(u8),
debug_output: DebugInfoOutput,
) CodeGenError!Result {
const zcu = lf.comp.module.?;
const zcu = pt.zcu;
const decl_index = lazy_sym.ty.getOwnerDecl(zcu);
const decl = zcu.declPtr(decl_index);
const namespace = zcu.namespacePtr(decl.src_namespace);
const target = namespace.fileScope(zcu).mod.resolved_target.result;
switch (target.cpu.arch) {
.x86_64 => return @import("arch/x86_64/CodeGen.zig").generateLazy(lf, src_loc, lazy_sym, code, debug_output),
.x86_64 => return @import("arch/x86_64/CodeGen.zig").generateLazy(lf, pt, src_loc, lazy_sym, code, debug_output),
else => unreachable,
}
}
@@ -105,7 +105,8 @@ fn writeFloat(comptime F: type, f: F, target: Target, endian: std.builtin.Endian
pub fn generateLazySymbol(
bin_file: *link.File,
src_loc: Module.LazySrcLoc,
pt: Zcu.PerThread,
src_loc: Zcu.LazySrcLoc,
lazy_sym: link.File.LazySymbol,
// TODO don't use an "out" parameter like this; put it in the result instead
alignment: *Alignment,
@@ -119,25 +120,24 @@ pub fn generateLazySymbol(
defer tracy.end();
const comp = bin_file.comp;
const zcu = comp.module.?;
const ip = &zcu.intern_pool;
const ip = &pt.zcu.intern_pool;
const target = comp.root_mod.resolved_target.result;
const endian = target.cpu.arch.endian();
const gpa = comp.gpa;
log.debug("generateLazySymbol: kind = {s}, ty = {}", .{
@tagName(lazy_sym.kind),
lazy_sym.ty.fmt(zcu),
lazy_sym.ty.fmt(pt),
});
if (lazy_sym.kind == .code) {
alignment.* = target_util.defaultFunctionAlignment(target);
return generateLazyFunction(bin_file, src_loc, lazy_sym, code, debug_output);
return generateLazyFunction(bin_file, pt, src_loc, lazy_sym, code, debug_output);
}
if (lazy_sym.ty.isAnyError(zcu)) {
if (lazy_sym.ty.isAnyError(pt.zcu)) {
alignment.* = .@"4";
const err_names = zcu.global_error_set.keys();
const err_names = pt.zcu.global_error_set.keys();
mem.writeInt(u32, try code.addManyAsArray(4), @intCast(err_names.len), endian);
var offset = code.items.len;
try code.resize((1 + err_names.len + 1) * 4);
@@ -151,9 +151,9 @@ pub fn generateLazySymbol(
}
mem.writeInt(u32, code.items[offset..][0..4], @intCast(code.items.len), endian);
return Result.ok;
} else if (lazy_sym.ty.zigTypeTag(zcu) == .Enum) {
} else if (lazy_sym.ty.zigTypeTag(pt.zcu) == .Enum) {
alignment.* = .@"1";
const tag_names = lazy_sym.ty.enumFields(zcu);
const tag_names = lazy_sym.ty.enumFields(pt.zcu);
for (0..tag_names.len) |tag_index| {
const tag_name = tag_names.get(ip)[tag_index].toSlice(ip);
try code.ensureUnusedCapacity(tag_name.len + 1);
@@ -165,13 +165,14 @@ pub fn generateLazySymbol(
gpa,
src_loc,
"TODO implement generateLazySymbol for {s} {}",
.{ @tagName(lazy_sym.kind), lazy_sym.ty.fmt(zcu) },
.{ @tagName(lazy_sym.kind), lazy_sym.ty.fmt(pt) },
) };
}
pub fn generateSymbol(
bin_file: *link.File,
src_loc: Module.LazySrcLoc,
pt: Zcu.PerThread,
src_loc: Zcu.LazySrcLoc,
val: Value,
code: *std.ArrayList(u8),
debug_output: DebugInfoOutput,
@@ -180,17 +181,17 @@ pub fn generateSymbol(
const tracy = trace(@src());
defer tracy.end();
const mod = bin_file.comp.module.?;
const mod = pt.zcu;
const ip = &mod.intern_pool;
const ty = val.typeOf(mod);
const target = mod.getTarget();
const endian = target.cpu.arch.endian();
log.debug("generateSymbol: val = {}", .{val.fmtValue(mod, null)});
log.debug("generateSymbol: val = {}", .{val.fmtValue(pt, null)});
if (val.isUndefDeep(mod)) {
const abi_size = math.cast(usize, ty.abiSize(mod)) orelse return error.Overflow;
const abi_size = math.cast(usize, ty.abiSize(pt)) orelse return error.Overflow;
try code.appendNTimes(0xaa, abi_size);
return .ok;
}
@@ -236,9 +237,9 @@ pub fn generateSymbol(
.empty_enum_value,
=> unreachable, // non-runtime values
.int => {
const abi_size = math.cast(usize, ty.abiSize(mod)) orelse return error.Overflow;
const abi_size = math.cast(usize, ty.abiSize(pt)) orelse return error.Overflow;
var space: Value.BigIntSpace = undefined;
const int_val = val.toBigInt(&space, mod);
const int_val = val.toBigInt(&space, pt);
int_val.writeTwosComplement(try code.addManyAsSlice(abi_size), endian);
},
.err => |err| {
@@ -252,14 +253,14 @@ pub fn generateSymbol(
.payload => 0,
};
if (!payload_ty.hasRuntimeBitsIgnoreComptime(mod)) {
if (!payload_ty.hasRuntimeBitsIgnoreComptime(pt)) {
try code.writer().writeInt(u16, err_val, endian);
return .ok;
}
const payload_align = payload_ty.abiAlignment(mod);
const error_align = Type.anyerror.abiAlignment(mod);
const abi_align = ty.abiAlignment(mod);
const payload_align = payload_ty.abiAlignment(pt);
const error_align = Type.anyerror.abiAlignment(pt);
const abi_align = ty.abiAlignment(pt);
// error value first when its type is larger than the error union's payload
if (error_align.order(payload_align) == .gt) {
@@ -269,8 +270,8 @@ pub fn generateSymbol(
// emit payload part of the error union
{
const begin = code.items.len;
switch (try generateSymbol(bin_file, src_loc, Value.fromInterned(switch (error_union.val) {
.err_name => try mod.intern(.{ .undef = payload_ty.toIntern() }),
switch (try generateSymbol(bin_file, pt, src_loc, Value.fromInterned(switch (error_union.val) {
.err_name => try pt.intern(.{ .undef = payload_ty.toIntern() }),
.payload => |payload| payload,
}), code, debug_output, reloc_info)) {
.ok => {},
@@ -300,7 +301,7 @@ pub fn generateSymbol(
},
.enum_tag => |enum_tag| {
const int_tag_ty = ty.intTagType(mod);
switch (try generateSymbol(bin_file, src_loc, try mod.getCoerced(Value.fromInterned(enum_tag.int), int_tag_ty), code, debug_output, reloc_info)) {
switch (try generateSymbol(bin_file, pt, src_loc, try pt.getCoerced(Value.fromInterned(enum_tag.int), int_tag_ty), code, debug_output, reloc_info)) {
.ok => {},
.fail => |em| return .{ .fail = em },
}
@@ -311,21 +312,21 @@ pub fn generateSymbol(
.f64 => |f64_val| writeFloat(f64, f64_val, target, endian, try code.addManyAsArray(8)),
.f80 => |f80_val| {
writeFloat(f80, f80_val, target, endian, try code.addManyAsArray(10));
const abi_size = math.cast(usize, ty.abiSize(mod)) orelse return error.Overflow;
const abi_size = math.cast(usize, ty.abiSize(pt)) orelse return error.Overflow;
try code.appendNTimes(0, abi_size - 10);
},
.f128 => |f128_val| writeFloat(f128, f128_val, target, endian, try code.addManyAsArray(16)),
},
.ptr => switch (try lowerPtr(bin_file, src_loc, val.toIntern(), code, debug_output, reloc_info, 0)) {
.ptr => switch (try lowerPtr(bin_file, pt, src_loc, val.toIntern(), code, debug_output, reloc_info, 0)) {
.ok => {},
.fail => |em| return .{ .fail = em },
},
.slice => |slice| {
switch (try generateSymbol(bin_file, src_loc, Value.fromInterned(slice.ptr), code, debug_output, reloc_info)) {
switch (try generateSymbol(bin_file, pt, src_loc, Value.fromInterned(slice.ptr), code, debug_output, reloc_info)) {
.ok => {},
.fail => |em| return .{ .fail = em },
}
switch (try generateSymbol(bin_file, src_loc, Value.fromInterned(slice.len), code, debug_output, reloc_info)) {
switch (try generateSymbol(bin_file, pt, src_loc, Value.fromInterned(slice.len), code, debug_output, reloc_info)) {
.ok => {},
.fail => |em| return .{ .fail = em },
}
@@ -333,11 +334,11 @@ pub fn generateSymbol(
.opt => {
const payload_type = ty.optionalChild(mod);
const payload_val = val.optionalValue(mod);
const abi_size = math.cast(usize, ty.abiSize(mod)) orelse return error.Overflow;
const abi_size = math.cast(usize, ty.abiSize(pt)) orelse return error.Overflow;
if (ty.optionalReprIsPayload(mod)) {
if (payload_val) |value| {
switch (try generateSymbol(bin_file, src_loc, value, code, debug_output, reloc_info)) {
switch (try generateSymbol(bin_file, pt, src_loc, value, code, debug_output, reloc_info)) {
.ok => {},
.fail => |em| return Result{ .fail = em },
}
@@ -345,10 +346,12 @@ pub fn generateSymbol(
try code.appendNTimes(0, abi_size);
}
} else {
const padding = abi_size - (math.cast(usize, payload_type.abiSize(mod)) orelse return error.Overflow) - 1;
if (payload_type.hasRuntimeBits(mod)) {
const value = payload_val orelse Value.fromInterned((try mod.intern(.{ .undef = payload_type.toIntern() })));
switch (try generateSymbol(bin_file, src_loc, value, code, debug_output, reloc_info)) {
const padding = abi_size - (math.cast(usize, payload_type.abiSize(pt)) orelse return error.Overflow) - 1;
if (payload_type.hasRuntimeBits(pt)) {
const value = payload_val orelse Value.fromInterned(try pt.intern(.{
.undef = payload_type.toIntern(),
}));
switch (try generateSymbol(bin_file, pt, src_loc, value, code, debug_output, reloc_info)) {
.ok => {},
.fail => |em| return Result{ .fail = em },
}
@@ -363,7 +366,7 @@ pub fn generateSymbol(
.elems, .repeated_elem => {
var index: u64 = 0;
while (index < array_type.lenIncludingSentinel()) : (index += 1) {
switch (try generateSymbol(bin_file, src_loc, Value.fromInterned(switch (aggregate.storage) {
switch (try generateSymbol(bin_file, pt, src_loc, Value.fromInterned(switch (aggregate.storage) {
.bytes => unreachable,
.elems => |elems| elems[@intCast(index)],
.repeated_elem => |elem| if (index < array_type.len)
@@ -378,8 +381,7 @@ pub fn generateSymbol(
},
},
.vector_type => |vector_type| {
const abi_size = math.cast(usize, ty.abiSize(mod)) orelse
return error.Overflow;
const abi_size = math.cast(usize, ty.abiSize(pt)) orelse return error.Overflow;
if (vector_type.child == .bool_type) {
const bytes = try code.addManyAsSlice(abi_size);
@memset(bytes, 0xaa);
@@ -424,7 +426,7 @@ pub fn generateSymbol(
.elems, .repeated_elem => {
var index: u64 = 0;
while (index < vector_type.len) : (index += 1) {
switch (try generateSymbol(bin_file, src_loc, Value.fromInterned(switch (aggregate.storage) {
switch (try generateSymbol(bin_file, pt, src_loc, Value.fromInterned(switch (aggregate.storage) {
.bytes => unreachable,
.elems => |elems| elems[
math.cast(usize, index) orelse return error.Overflow
@@ -439,7 +441,7 @@ pub fn generateSymbol(
}
const padding = abi_size -
(math.cast(usize, Type.fromInterned(vector_type.child).abiSize(mod) * vector_type.len) orelse
(math.cast(usize, Type.fromInterned(vector_type.child).abiSize(pt) * vector_type.len) orelse
return error.Overflow);
if (padding > 0) try code.appendNTimes(0, padding);
}
@@ -452,10 +454,10 @@ pub fn generateSymbol(
0..,
) |field_ty, comptime_val, index| {
if (comptime_val != .none) continue;
if (!Type.fromInterned(field_ty).hasRuntimeBits(mod)) continue;
if (!Type.fromInterned(field_ty).hasRuntimeBits(pt)) continue;
const field_val = switch (aggregate.storage) {
.bytes => |bytes| try ip.get(mod.gpa, .{ .int = .{
.bytes => |bytes| try pt.intern(.{ .int = .{
.ty = field_ty,
.storage = .{ .u64 = bytes.at(index, ip) },
} }),
@@ -463,14 +465,14 @@ pub fn generateSymbol(
.repeated_elem => |elem| elem,
};
switch (try generateSymbol(bin_file, src_loc, Value.fromInterned(field_val), code, debug_output, reloc_info)) {
switch (try generateSymbol(bin_file, pt, src_loc, Value.fromInterned(field_val), code, debug_output, reloc_info)) {
.ok => {},
.fail => |em| return Result{ .fail = em },
}
const unpadded_field_end = code.items.len - struct_begin;
// Pad struct members if required
const padded_field_end = ty.structFieldOffset(index + 1, mod);
const padded_field_end = ty.structFieldOffset(index + 1, pt);
const padding = math.cast(usize, padded_field_end - unpadded_field_end) orelse
return error.Overflow;
@@ -483,15 +485,14 @@ pub fn generateSymbol(
const struct_type = ip.loadStructType(ty.toIntern());
switch (struct_type.layout) {
.@"packed" => {
const abi_size = math.cast(usize, ty.abiSize(mod)) orelse
return error.Overflow;
const abi_size = math.cast(usize, ty.abiSize(pt)) orelse return error.Overflow;
const current_pos = code.items.len;
try code.appendNTimes(0, abi_size);
var bits: u16 = 0;
for (struct_type.field_types.get(ip), 0..) |field_ty, index| {
const field_val = switch (aggregate.storage) {
.bytes => |bytes| try ip.get(mod.gpa, .{ .int = .{
.bytes => |bytes| try pt.intern(.{ .int = .{
.ty = field_ty,
.storage = .{ .u64 = bytes.at(index, ip) },
} }),
@@ -502,18 +503,18 @@ pub fn generateSymbol(
// pointer may point to a decl which must be marked used
// but can also result in a relocation. Therefore we handle those separately.
if (Type.fromInterned(field_ty).zigTypeTag(mod) == .Pointer) {
const field_size = math.cast(usize, Type.fromInterned(field_ty).abiSize(mod)) orelse
const field_size = math.cast(usize, Type.fromInterned(field_ty).abiSize(pt)) orelse
return error.Overflow;
var tmp_list = try std.ArrayList(u8).initCapacity(code.allocator, field_size);
defer tmp_list.deinit();
switch (try generateSymbol(bin_file, src_loc, Value.fromInterned(field_val), &tmp_list, debug_output, reloc_info)) {
switch (try generateSymbol(bin_file, pt, src_loc, Value.fromInterned(field_val), &tmp_list, debug_output, reloc_info)) {
.ok => @memcpy(code.items[current_pos..][0..tmp_list.items.len], tmp_list.items),
.fail => |em| return Result{ .fail = em },
}
} else {
Value.fromInterned(field_val).writeToPackedMemory(Type.fromInterned(field_ty), mod, code.items[current_pos..], bits) catch unreachable;
Value.fromInterned(field_val).writeToPackedMemory(Type.fromInterned(field_ty), pt, code.items[current_pos..], bits) catch unreachable;
}
bits += @intCast(Type.fromInterned(field_ty).bitSize(mod));
bits += @intCast(Type.fromInterned(field_ty).bitSize(pt));
}
},
.auto, .@"extern" => {
@@ -524,10 +525,10 @@ pub fn generateSymbol(
var it = struct_type.iterateRuntimeOrder(ip);
while (it.next()) |field_index| {
const field_ty = field_types[field_index];
if (!Type.fromInterned(field_ty).hasRuntimeBits(mod)) continue;
if (!Type.fromInterned(field_ty).hasRuntimeBits(pt)) continue;
const field_val = switch (ip.indexToKey(val.toIntern()).aggregate.storage) {
.bytes => |bytes| try ip.get(mod.gpa, .{ .int = .{
.bytes => |bytes| try pt.intern(.{ .int = .{
.ty = field_ty,
.storage = .{ .u64 = bytes.at(field_index, ip) },
} }),
@@ -541,7 +542,7 @@ pub fn generateSymbol(
) orelse return error.Overflow;
if (padding > 0) try code.appendNTimes(0, padding);
switch (try generateSymbol(bin_file, src_loc, Value.fromInterned(field_val), code, debug_output, reloc_info)) {
switch (try generateSymbol(bin_file, pt, src_loc, Value.fromInterned(field_val), code, debug_output, reloc_info)) {
.ok => {},
.fail => |em| return Result{ .fail = em },
}
@@ -562,15 +563,15 @@ pub fn generateSymbol(
else => unreachable,
},
.un => |un| {
const layout = ty.unionGetLayout(mod);
const layout = ty.unionGetLayout(pt);
if (layout.payload_size == 0) {
return generateSymbol(bin_file, src_loc, Value.fromInterned(un.tag), code, debug_output, reloc_info);
return generateSymbol(bin_file, pt, src_loc, Value.fromInterned(un.tag), code, debug_output, reloc_info);
}
// Check if we should store the tag first.
if (layout.tag_size > 0 and layout.tag_align.compare(.gte, layout.payload_align)) {
switch (try generateSymbol(bin_file, src_loc, Value.fromInterned(un.tag), code, debug_output, reloc_info)) {
switch (try generateSymbol(bin_file, pt, src_loc, Value.fromInterned(un.tag), code, debug_output, reloc_info)) {
.ok => {},
.fail => |em| return Result{ .fail = em },
}
@@ -580,28 +581,28 @@ pub fn generateSymbol(
if (un.tag != .none) {
const field_index = ty.unionTagFieldIndex(Value.fromInterned(un.tag), mod).?;
const field_ty = Type.fromInterned(union_obj.field_types.get(ip)[field_index]);
if (!field_ty.hasRuntimeBits(mod)) {
if (!field_ty.hasRuntimeBits(pt)) {
try code.appendNTimes(0xaa, math.cast(usize, layout.payload_size) orelse return error.Overflow);
} else {
switch (try generateSymbol(bin_file, src_loc, Value.fromInterned(un.val), code, debug_output, reloc_info)) {
switch (try generateSymbol(bin_file, pt, src_loc, Value.fromInterned(un.val), code, debug_output, reloc_info)) {
.ok => {},
.fail => |em| return Result{ .fail = em },
}
const padding = math.cast(usize, layout.payload_size - field_ty.abiSize(mod)) orelse return error.Overflow;
const padding = math.cast(usize, layout.payload_size - field_ty.abiSize(pt)) orelse return error.Overflow;
if (padding > 0) {
try code.appendNTimes(0, padding);
}
}
} else {
switch (try generateSymbol(bin_file, src_loc, Value.fromInterned(un.val), code, debug_output, reloc_info)) {
switch (try generateSymbol(bin_file, pt, src_loc, Value.fromInterned(un.val), code, debug_output, reloc_info)) {
.ok => {},
.fail => |em| return Result{ .fail = em },
}
}
if (layout.tag_size > 0 and layout.tag_align.compare(.lt, layout.payload_align)) {
switch (try generateSymbol(bin_file, src_loc, Value.fromInterned(un.tag), code, debug_output, reloc_info)) {
switch (try generateSymbol(bin_file, pt, src_loc, Value.fromInterned(un.tag), code, debug_output, reloc_info)) {
.ok => {},
.fail => |em| return Result{ .fail = em },
}
@@ -618,22 +619,24 @@ pub fn generateSymbol(
fn lowerPtr(
bin_file: *link.File,
src_loc: Module.LazySrcLoc,
pt: Zcu.PerThread,
src_loc: Zcu.LazySrcLoc,
ptr_val: InternPool.Index,
code: *std.ArrayList(u8),
debug_output: DebugInfoOutput,
reloc_info: RelocInfo,
prev_offset: u64,
) CodeGenError!Result {
const zcu = bin_file.comp.module.?;
const zcu = pt.zcu;
const ptr = zcu.intern_pool.indexToKey(ptr_val).ptr;
const offset: u64 = prev_offset + ptr.byte_offset;
return switch (ptr.base_addr) {
.decl => |decl| try lowerDeclRef(bin_file, src_loc, decl, code, debug_output, reloc_info, offset),
.anon_decl => |ad| try lowerAnonDeclRef(bin_file, src_loc, ad, code, debug_output, reloc_info, offset),
.int => try generateSymbol(bin_file, src_loc, try zcu.intValue(Type.usize, offset), code, debug_output, reloc_info),
.decl => |decl| try lowerDeclRef(bin_file, pt, src_loc, decl, code, debug_output, reloc_info, offset),
.anon_decl => |ad| try lowerAnonDeclRef(bin_file, pt, src_loc, ad, code, debug_output, reloc_info, offset),
.int => try generateSymbol(bin_file, pt, src_loc, try pt.intValue(Type.usize, offset), code, debug_output, reloc_info),
.eu_payload => |eu_ptr| try lowerPtr(
bin_file,
pt,
src_loc,
eu_ptr,
code,
@@ -641,11 +644,12 @@ fn lowerPtr(
reloc_info,
offset + errUnionPayloadOffset(
Value.fromInterned(eu_ptr).typeOf(zcu).childType(zcu).errorUnionPayload(zcu),
zcu,
pt,
),
),
.opt_payload => |opt_ptr| try lowerPtr(
bin_file,
pt,
src_loc,
opt_ptr,
code,
@@ -666,12 +670,12 @@ fn lowerPtr(
};
},
.Struct, .Union => switch (base_ty.containerLayout(zcu)) {
.auto => base_ty.structFieldOffset(@intCast(field.index), zcu),
.auto => base_ty.structFieldOffset(@intCast(field.index), pt),
.@"extern", .@"packed" => unreachable,
},
else => unreachable,
};
return lowerPtr(bin_file, src_loc, field.base, code, debug_output, reloc_info, offset + field_off);
return lowerPtr(bin_file, pt, src_loc, field.base, code, debug_output, reloc_info, offset + field_off);
},
.arr_elem, .comptime_field, .comptime_alloc => unreachable,
};
@@ -683,7 +687,8 @@ const RelocInfo = struct {
fn lowerAnonDeclRef(
lf: *link.File,
src_loc: Module.LazySrcLoc,
pt: Zcu.PerThread,
src_loc: Zcu.LazySrcLoc,
anon_decl: InternPool.Key.Ptr.BaseAddr.AnonDecl,
code: *std.ArrayList(u8),
debug_output: DebugInfoOutput,
@@ -691,22 +696,21 @@ fn lowerAnonDeclRef(
offset: u64,
) CodeGenError!Result {
_ = debug_output;
const zcu = lf.comp.module.?;
const ip = &zcu.intern_pool;
const ip = &pt.zcu.intern_pool;
const target = lf.comp.root_mod.resolved_target.result;
const ptr_width_bytes = @divExact(target.ptrBitWidth(), 8);
const decl_val = anon_decl.val;
const decl_ty = Type.fromInterned(ip.typeOf(decl_val));
log.debug("lowerAnonDecl: ty = {}", .{decl_ty.fmt(zcu)});
const is_fn_body = decl_ty.zigTypeTag(zcu) == .Fn;
if (!is_fn_body and !decl_ty.hasRuntimeBits(zcu)) {
log.debug("lowerAnonDecl: ty = {}", .{decl_ty.fmt(pt)});
const is_fn_body = decl_ty.zigTypeTag(pt.zcu) == .Fn;
if (!is_fn_body and !decl_ty.hasRuntimeBits(pt)) {
try code.appendNTimes(0xaa, ptr_width_bytes);
return Result.ok;
}
const decl_align = ip.indexToKey(anon_decl.orig_ty).ptr_type.flags.alignment;
const res = try lf.lowerAnonDecl(decl_val, decl_align, src_loc);
const res = try lf.lowerAnonDecl(pt, decl_val, decl_align, src_loc);
switch (res) {
.ok => {},
.fail => |em| return .{ .fail = em },
@@ -730,7 +734,8 @@ fn lowerAnonDeclRef(
fn lowerDeclRef(
lf: *link.File,
src_loc: Module.LazySrcLoc,
pt: Zcu.PerThread,
src_loc: Zcu.LazySrcLoc,
decl_index: InternPool.DeclIndex,
code: *std.ArrayList(u8),
debug_output: DebugInfoOutput,
@@ -739,14 +744,14 @@ fn lowerDeclRef(
) CodeGenError!Result {
_ = src_loc;
_ = debug_output;
const zcu = lf.comp.module.?;
const zcu = pt.zcu;
const decl = zcu.declPtr(decl_index);
const namespace = zcu.namespacePtr(decl.src_namespace);
const target = namespace.fileScope(zcu).mod.resolved_target.result;
const ptr_width = target.ptrBitWidth();
const is_fn_body = decl.typeOf(zcu).zigTypeTag(zcu) == .Fn;
if (!is_fn_body and !decl.typeOf(zcu).hasRuntimeBits(zcu)) {
if (!is_fn_body and !decl.typeOf(zcu).hasRuntimeBits(pt)) {
try code.appendNTimes(0xaa, @divExact(ptr_width, 8));
return Result.ok;
}
@@ -814,7 +819,7 @@ pub const GenResult = union(enum) {
fn fail(
gpa: Allocator,
src_loc: Module.LazySrcLoc,
src_loc: Zcu.LazySrcLoc,
comptime format: []const u8,
args: anytype,
) Allocator.Error!GenResult {
@@ -825,14 +830,15 @@ pub const GenResult = union(enum) {
fn genDeclRef(
lf: *link.File,
src_loc: Module.LazySrcLoc,
pt: Zcu.PerThread,
src_loc: Zcu.LazySrcLoc,
val: Value,
ptr_decl_index: InternPool.DeclIndex,
) CodeGenError!GenResult {
const zcu = lf.comp.module.?;
const zcu = pt.zcu;
const ip = &zcu.intern_pool;
const ty = val.typeOf(zcu);
log.debug("genDeclRef: val = {}", .{val.fmtValue(zcu, null)});
log.debug("genDeclRef: val = {}", .{val.fmtValue(pt, null)});
const ptr_decl = zcu.declPtr(ptr_decl_index);
const namespace = zcu.namespacePtr(ptr_decl.src_namespace);
@@ -848,7 +854,7 @@ fn genDeclRef(
};
const decl = zcu.declPtr(decl_index);
if (!decl.typeOf(zcu).isFnOrHasRuntimeBitsIgnoreComptime(zcu)) {
if (!decl.typeOf(zcu).isFnOrHasRuntimeBitsIgnoreComptime(pt)) {
const imm: u64 = switch (ptr_bytes) {
1 => 0xaa,
2 => 0xaaaa,
@@ -865,12 +871,12 @@ fn genDeclRef(
// TODO this feels clunky. Perhaps we should check for it in `genTypedValue`?
if (ty.castPtrToFn(zcu)) |fn_ty| {
if (zcu.typeToFunc(fn_ty).?.is_generic) {
return GenResult.mcv(.{ .immediate = fn_ty.abiAlignment(zcu).toByteUnits().? });
return GenResult.mcv(.{ .immediate = fn_ty.abiAlignment(pt).toByteUnits().? });
}
} else if (ty.zigTypeTag(zcu) == .Pointer) {
const elem_ty = ty.elemType2(zcu);
if (!elem_ty.hasRuntimeBits(zcu)) {
return GenResult.mcv(.{ .immediate = elem_ty.abiAlignment(zcu).toByteUnits().? });
if (!elem_ty.hasRuntimeBits(pt)) {
return GenResult.mcv(.{ .immediate = elem_ty.abiAlignment(pt).toByteUnits().? });
}
}
@@ -931,15 +937,15 @@ fn genDeclRef(
fn genUnnamedConst(
lf: *link.File,
src_loc: Module.LazySrcLoc,
pt: Zcu.PerThread,
src_loc: Zcu.LazySrcLoc,
val: Value,
owner_decl_index: InternPool.DeclIndex,
) CodeGenError!GenResult {
const zcu = lf.comp.module.?;
const gpa = lf.comp.gpa;
log.debug("genUnnamedConst: val = {}", .{val.fmtValue(zcu, null)});
log.debug("genUnnamedConst: val = {}", .{val.fmtValue(pt, null)});
const local_sym_index = lf.lowerUnnamedConst(val, owner_decl_index) catch |err| {
const local_sym_index = lf.lowerUnnamedConst(pt, val, owner_decl_index) catch |err| {
return GenResult.fail(gpa, src_loc, "lowering unnamed constant failed: {s}", .{@errorName(err)});
};
switch (lf.tag) {
@@ -970,15 +976,16 @@ fn genUnnamedConst(
pub fn genTypedValue(
lf: *link.File,
src_loc: Module.LazySrcLoc,
pt: Zcu.PerThread,
src_loc: Zcu.LazySrcLoc,
val: Value,
owner_decl_index: InternPool.DeclIndex,
) CodeGenError!GenResult {
const zcu = lf.comp.module.?;
const zcu = pt.zcu;
const ip = &zcu.intern_pool;
const ty = val.typeOf(zcu);
log.debug("genTypedValue: val = {}", .{val.fmtValue(zcu, null)});
log.debug("genTypedValue: val = {}", .{val.fmtValue(pt, null)});
if (val.isUndef(zcu))
return GenResult.mcv(.undef);
@@ -990,7 +997,7 @@ pub fn genTypedValue(
if (!ty.isSlice(zcu)) switch (ip.indexToKey(val.toIntern())) {
.ptr => |ptr| if (ptr.byte_offset == 0) switch (ptr.base_addr) {
.decl => |decl| return genDeclRef(lf, src_loc, val, decl),
.decl => |decl| return genDeclRef(lf, pt, src_loc, val, decl),
else => {},
},
else => {},
@@ -1007,7 +1014,7 @@ pub fn genTypedValue(
.none => {},
else => switch (ip.indexToKey(val.toIntern())) {
.int => {
return GenResult.mcv(.{ .immediate = val.toUnsignedInt(zcu) });
return GenResult.mcv(.{ .immediate = val.toUnsignedInt(pt) });
},
else => {},
},
@@ -1017,8 +1024,8 @@ pub fn genTypedValue(
const info = ty.intInfo(zcu);
if (info.bits <= ptr_bits) {
const unsigned: u64 = switch (info.signedness) {
.signed => @bitCast(val.toSignedInt(zcu)),
.unsigned => val.toUnsignedInt(zcu),
.signed => @bitCast(val.toSignedInt(pt)),
.unsigned => val.toUnsignedInt(pt),
};
return GenResult.mcv(.{ .immediate = unsigned });
}
@@ -1030,11 +1037,12 @@ pub fn genTypedValue(
if (ty.isPtrLikeOptional(zcu)) {
return genTypedValue(
lf,
pt,
src_loc,
val.optionalValue(zcu) orelse return GenResult.mcv(.{ .immediate = 0 }),
owner_decl_index,
);
} else if (ty.abiSize(zcu) == 1) {
} else if (ty.abiSize(pt) == 1) {
return GenResult.mcv(.{ .immediate = @intFromBool(!val.isNull(zcu)) });
}
},
@@ -1042,6 +1050,7 @@ pub fn genTypedValue(
const enum_tag = ip.indexToKey(val.toIntern()).enum_tag;
return genTypedValue(
lf,
pt,
src_loc,
Value.fromInterned(enum_tag.int),
owner_decl_index,
@@ -1055,14 +1064,15 @@ pub fn genTypedValue(
.ErrorUnion => {
const err_type = ty.errorUnionSet(zcu);
const payload_type = ty.errorUnionPayload(zcu);
if (!payload_type.hasRuntimeBitsIgnoreComptime(zcu)) {
if (!payload_type.hasRuntimeBitsIgnoreComptime(pt)) {
// We use the error type directly as the type.
const err_int_ty = try zcu.errorIntType();
const err_int_ty = try pt.errorIntType();
switch (ip.indexToKey(val.toIntern()).error_union.val) {
.err_name => |err_name| return genTypedValue(
lf,
pt,
src_loc,
Value.fromInterned(try zcu.intern(.{ .err = .{
Value.fromInterned(try pt.intern(.{ .err = .{
.ty = err_type.toIntern(),
.name = err_name,
} })),
@@ -1070,8 +1080,9 @@ pub fn genTypedValue(
),
.payload => return genTypedValue(
lf,
pt,
src_loc,
try zcu.intValue(err_int_ty, 0),
try pt.intValue(err_int_ty, 0),
owner_decl_index,
),
}
@@ -1090,26 +1101,26 @@ pub fn genTypedValue(
else => {},
}
return genUnnamedConst(lf, src_loc, val, owner_decl_index);
return genUnnamedConst(lf, pt, src_loc, val, owner_decl_index);
}
pub fn errUnionPayloadOffset(payload_ty: Type, mod: *Module) u64 {
if (!payload_ty.hasRuntimeBitsIgnoreComptime(mod)) return 0;
const payload_align = payload_ty.abiAlignment(mod);
const error_align = Type.anyerror.abiAlignment(mod);
if (payload_align.compare(.gte, error_align) or !payload_ty.hasRuntimeBitsIgnoreComptime(mod)) {
pub fn errUnionPayloadOffset(payload_ty: Type, pt: Zcu.PerThread) u64 {
if (!payload_ty.hasRuntimeBitsIgnoreComptime(pt)) return 0;
const payload_align = payload_ty.abiAlignment(pt);
const error_align = Type.anyerror.abiAlignment(pt);
if (payload_align.compare(.gte, error_align) or !payload_ty.hasRuntimeBitsIgnoreComptime(pt)) {
return 0;
} else {
return payload_align.forward(Type.anyerror.abiSize(mod));
return payload_align.forward(Type.anyerror.abiSize(pt));
}
}
pub fn errUnionErrorOffset(payload_ty: Type, mod: *Module) u64 {
if (!payload_ty.hasRuntimeBitsIgnoreComptime(mod)) return 0;
const payload_align = payload_ty.abiAlignment(mod);
const error_align = Type.anyerror.abiAlignment(mod);
if (payload_align.compare(.gte, error_align) and payload_ty.hasRuntimeBitsIgnoreComptime(mod)) {
return error_align.forward(payload_ty.abiSize(mod));
pub fn errUnionErrorOffset(payload_ty: Type, pt: Zcu.PerThread) u64 {
if (!payload_ty.hasRuntimeBitsIgnoreComptime(pt)) return 0;
const payload_align = payload_ty.abiAlignment(pt);
const error_align = Type.anyerror.abiAlignment(pt);
if (payload_align.compare(.gte, error_align) and payload_ty.hasRuntimeBitsIgnoreComptime(pt)) {
return error_align.forward(payload_ty.abiSize(pt));
} else {
return 0;
}