compiler: eliminate Decl.value_arena and Sema.perm_arena

The main motivation for this commit is eliminating Decl.value_arena.
Everything else is dominoes.

Decl.name used to be stored in the GPA, now it is stored in InternPool.
It ended up being simpler to migrate other strings to be interned as
well, such as struct field names, union field names, and a few others.
This ended up requiring a big diff, sorry about that. But the changes
are pretty nice, we finally start to take advantage of InternPool's
existence.

global_error_set and error_name_list are simplified. Now it is a single
ArrayHashMap(NullTerminatedString, void) and the index is the error tag
value.

Module.tmp_hack_arena is re-introduced (it was removed in
eeff407941560ce8eb5b737b2436dfa93cfd3a0c) in order to deal with
comptime_args, optimized_order, and struct and union fields. After
structs and unions get moved into InternPool properly, tmp_hack_arena
can be deleted again.
This commit is contained in:
Andrew Kelley
2023-06-01 23:46:04 -07:00
parent 34dae73005
commit 69b7b91092
26 changed files with 1159 additions and 1131 deletions

View File

@@ -1317,7 +1317,7 @@ pub fn create(gpa: Allocator, options: InitOptions) !*Compilation {
.global_zir_cache = global_zir_cache,
.local_zir_cache = local_zir_cache,
.emit_h = emit_h,
.error_name_list = .{},
.tmp_hack_arena = std.heap.ArenaAllocator.init(gpa),
};
try module.init();
@@ -2627,7 +2627,7 @@ pub fn getAllErrorsAlloc(self: *Compilation) !ErrorBundle {
var it = module.failed_files.iterator();
while (it.next()) |entry| {
if (entry.value_ptr.*) |msg| {
try addModuleErrorMsg(&bundle, msg.*);
try addModuleErrorMsg(module, &bundle, msg.*);
} else {
// Must be ZIR errors. Note that this may include AST errors.
// addZirErrorMessages asserts that the tree is loaded.
@@ -2640,7 +2640,7 @@ pub fn getAllErrorsAlloc(self: *Compilation) !ErrorBundle {
var it = module.failed_embed_files.iterator();
while (it.next()) |entry| {
const msg = entry.value_ptr.*;
try addModuleErrorMsg(&bundle, msg.*);
try addModuleErrorMsg(module, &bundle, msg.*);
}
}
{
@@ -2650,7 +2650,7 @@ pub fn getAllErrorsAlloc(self: *Compilation) !ErrorBundle {
// Skip errors for Decls within files that had a parse failure.
// We'll try again once parsing succeeds.
if (module.declFileScope(decl_index).okToReportErrors()) {
try addModuleErrorMsg(&bundle, entry.value_ptr.*.*);
try addModuleErrorMsg(module, &bundle, entry.value_ptr.*.*);
if (module.cimport_errors.get(entry.key_ptr.*)) |cimport_errors| for (cimport_errors) |c_error| {
try bundle.addRootErrorMessage(.{
.msg = try bundle.addString(std.mem.span(c_error.msg)),
@@ -2675,12 +2675,12 @@ pub fn getAllErrorsAlloc(self: *Compilation) !ErrorBundle {
// Skip errors for Decls within files that had a parse failure.
// We'll try again once parsing succeeds.
if (module.declFileScope(decl_index).okToReportErrors()) {
try addModuleErrorMsg(&bundle, entry.value_ptr.*.*);
try addModuleErrorMsg(module, &bundle, entry.value_ptr.*.*);
}
}
}
for (module.failed_exports.values()) |value| {
try addModuleErrorMsg(&bundle, value.*);
try addModuleErrorMsg(module, &bundle, value.*);
}
}
@@ -2728,7 +2728,7 @@ pub fn getAllErrorsAlloc(self: *Compilation) !ErrorBundle {
};
}
try addModuleErrorMsg(&bundle, err_msg);
try addModuleErrorMsg(module, &bundle, err_msg);
}
}
@@ -2784,8 +2784,9 @@ pub const ErrorNoteHashContext = struct {
}
};
pub fn addModuleErrorMsg(eb: *ErrorBundle.Wip, module_err_msg: Module.ErrorMsg) !void {
pub fn addModuleErrorMsg(mod: *Module, eb: *ErrorBundle.Wip, module_err_msg: Module.ErrorMsg) !void {
const gpa = eb.gpa;
const ip = &mod.intern_pool;
const err_source = module_err_msg.src_loc.file_scope.getSource(gpa) catch |err| {
const file_path = try module_err_msg.src_loc.file_scope.fullPath(gpa);
defer gpa.free(file_path);
@@ -2811,7 +2812,7 @@ pub fn addModuleErrorMsg(eb: *ErrorBundle.Wip, module_err_msg: Module.ErrorMsg)
.src_loc = .none,
});
break;
} else if (module_reference.decl == null) {
} else if (module_reference.decl == .none) {
try ref_traces.append(gpa, .{
.decl_name = 0,
.src_loc = .none,
@@ -2824,7 +2825,7 @@ pub fn addModuleErrorMsg(eb: *ErrorBundle.Wip, module_err_msg: Module.ErrorMsg)
const rt_file_path = try module_reference.src_loc.file_scope.fullPath(gpa);
defer gpa.free(rt_file_path);
try ref_traces.append(gpa, .{
.decl_name = try eb.addString(std.mem.sliceTo(module_reference.decl.?, 0)),
.decl_name = try eb.addString(ip.stringToSliceUnwrap(module_reference.decl).?),
.src_loc = try eb.addSourceLocation(.{
.src_path = try eb.addString(rt_file_path),
.span_start = span.start,

View File

@@ -124,6 +124,8 @@ pub const String = enum(u32) {
/// An index into `string_bytes`.
pub const NullTerminatedString = enum(u32) {
/// This is distinct from `none` - it is a valid index that represents empty string.
empty = 0,
_,
pub fn toString(self: NullTerminatedString) String {
@@ -157,6 +159,8 @@ pub const NullTerminatedString = enum(u32) {
/// An index into `string_bytes` which might be `none`.
pub const OptionalNullTerminatedString = enum(u32) {
/// This is distinct from `none` - it is a valid index that represents empty string.
empty = 0,
none = std.math.maxInt(u32),
_,
@@ -2447,6 +2451,9 @@ pub const MemoizedCall = struct {
pub fn init(ip: *InternPool, gpa: Allocator) !void {
assert(ip.items.len == 0);
// Reserve string index 0 for an empty string.
assert((try ip.getOrPutString(gpa, "")) == .empty);
// So that we can use `catch unreachable` below.
try ip.items.ensureUnusedCapacity(gpa, static_keys.len);
try ip.map.ensureUnusedCapacity(gpa, static_keys.len);
@@ -5222,6 +5229,28 @@ pub fn getOrPutString(
return ip.getOrPutTrailingString(gpa, s.len + 1);
}
pub fn getOrPutStringFmt(
ip: *InternPool,
gpa: Allocator,
comptime format: []const u8,
args: anytype,
) Allocator.Error!NullTerminatedString {
const start = ip.string_bytes.items.len;
try ip.string_bytes.writer(gpa).print(format, args);
try ip.string_bytes.append(gpa, 0);
return ip.getOrPutTrailingString(gpa, ip.string_bytes.items.len - start);
}
pub fn getOrPutStringOpt(
ip: *InternPool,
gpa: Allocator,
optional_string: ?[]const u8,
) Allocator.Error!OptionalNullTerminatedString {
const s = optional_string orelse return .none;
const interned = try getOrPutString(ip, gpa, s);
return interned.toOptional();
}
/// Uses the last len bytes of ip.string_bytes as the key.
pub fn getOrPutTrailingString(
ip: *InternPool,
@@ -5273,6 +5302,10 @@ pub fn stringToSliceUnwrap(ip: *const InternPool, s: OptionalNullTerminatedStrin
return ip.stringToSlice(s.unwrap() orelse return null);
}
pub fn stringEqlSlice(ip: *const InternPool, a: NullTerminatedString, b: []const u8) bool {
return std.mem.eql(u8, stringToSlice(ip, a), b);
}
pub fn typeOf(ip: *const InternPool, index: Index) Index {
// This optimization of static keys is required so that typeOf can be called
// on static keys that haven't been added yet during static key initialization.

View File

@@ -88,6 +88,14 @@ embed_table: std.StringHashMapUnmanaged(*EmbedFile) = .{},
/// Stores all Type and Value objects; periodically garbage collected.
intern_pool: InternPool = .{},
/// To be eliminated in a future commit by moving more data into InternPool.
/// Current uses that must be eliminated:
/// * Struct comptime_args
/// * Struct optimized_order
/// * Union fields
/// This memory lives until the Module is destroyed.
tmp_hack_arena: std.heap.ArenaAllocator,
/// This is currently only used for string literals.
memoized_decls: std.AutoHashMapUnmanaged(InternPool.Index, Decl.Index) = .{},
@@ -125,13 +133,8 @@ cimport_errors: std.AutoArrayHashMapUnmanaged(Decl.Index, []CImportError) = .{},
/// contains Decls that need to be deleted if they end up having no references to them.
deletion_set: std.AutoArrayHashMapUnmanaged(Decl.Index, void) = .{},
/// Error tags and their values, tag names are duped with mod.gpa.
/// Corresponds with `error_name_list`.
global_error_set: std.StringHashMapUnmanaged(ErrorInt) = .{},
/// ErrorInt -> []const u8 for fast lookups for @intToError at comptime
/// Corresponds with `global_error_set`.
error_name_list: ArrayListUnmanaged([]const u8),
/// Key is the error name, index is the error tag value. Index 0 has a length-0 string.
global_error_set: GlobalErrorSet = .{},
/// Incrementing integer used to compare against the corresponding Decl
/// field to determine whether a Decl's status applies to an ongoing update, or a
@@ -182,6 +185,8 @@ reference_table: std.AutoHashMapUnmanaged(Decl.Index, struct {
src: LazySrcLoc,
}) = .{},
pub const GlobalErrorSet = std.AutoArrayHashMapUnmanaged(InternPool.NullTerminatedString, void);
pub const CImportError = struct {
offset: u32,
line: u32,
@@ -248,7 +253,11 @@ pub const GlobalEmitH = struct {
pub const ErrorInt = u32;
pub const Export = struct {
options: std.builtin.ExportOptions,
name: InternPool.NullTerminatedString,
linkage: std.builtin.GlobalLinkage,
section: InternPool.OptionalNullTerminatedString,
visibility: std.builtin.SymbolVisibility,
src: LazySrcLoc,
/// The Decl that performs the export. Note that this is *not* the Decl being exported.
owner_decl: Decl.Index,
@@ -392,8 +401,7 @@ const ValueArena = struct {
};
pub const Decl = struct {
/// Allocated with Module's allocator; outlives the ZIR code.
name: [*:0]const u8,
name: InternPool.NullTerminatedString,
/// The most recent Type of the Decl after a successful semantic analysis.
/// Populated when `has_tv`.
ty: Type,
@@ -401,15 +409,11 @@ pub const Decl = struct {
/// Populated when `has_tv`.
val: Value,
/// Populated when `has_tv`.
/// Points to memory inside value_arena.
@"linksection": ?[*:0]const u8,
@"linksection": InternPool.OptionalNullTerminatedString,
/// Populated when `has_tv`.
@"align": u32,
/// Populated when `has_tv`.
@"addrspace": std.builtin.AddressSpace,
/// The memory for ty, val, align, linksection, and captures.
/// If this is `null` then there is no memory management needed.
value_arena: ?*ValueArena = null,
/// The direct parent namespace of the Decl.
/// Reference to externally owned memory.
/// In the case of the Decl corresponding to a file, this is
@@ -564,13 +568,7 @@ pub const Decl = struct {
function_body,
};
pub fn clearName(decl: *Decl, gpa: Allocator) void {
gpa.free(mem.sliceTo(decl.name, 0));
decl.name = undefined;
}
pub fn clearValues(decl: *Decl, mod: *Module) void {
const gpa = mod.gpa;
if (decl.getOwnedFunctionIndex(mod).unwrap()) |func| {
_ = mod.align_stack_fns.remove(func);
if (mod.funcPtr(func).comptime_args != null) {
@@ -579,19 +577,6 @@ pub const Decl = struct {
mod.destroyFunc(func);
}
_ = mod.memoized_decls.remove(decl.val.ip_index);
if (decl.value_arena) |value_arena| {
value_arena.deinit(gpa);
decl.value_arena = null;
decl.has_tv = false;
decl.owns_tv = false;
}
}
pub fn finalizeNewArena(decl: *Decl, arena: *std.heap.ArenaAllocator) !void {
assert(decl.value_arena == null);
const value_arena = try arena.allocator().create(ValueArena);
value_arena.* = .{ .state = arena.state };
decl.value_arena = value_arena;
}
/// This name is relative to the containing namespace of the decl.
@@ -692,7 +677,7 @@ pub const Decl = struct {
}
pub fn renderFullyQualifiedName(decl: Decl, mod: *Module, writer: anytype) !void {
const unqualified_name = mem.sliceTo(decl.name, 0);
const unqualified_name = mod.intern_pool.stringToSlice(decl.name);
if (decl.name_fully_qualified) {
return writer.writeAll(unqualified_name);
}
@@ -700,24 +685,27 @@ pub const Decl = struct {
}
pub fn renderFullyQualifiedDebugName(decl: Decl, mod: *Module, writer: anytype) !void {
const unqualified_name = mem.sliceTo(decl.name, 0);
const unqualified_name = mod.intern_pool.stringToSlice(decl.name);
return mod.namespacePtr(decl.src_namespace).renderFullyQualifiedDebugName(mod, unqualified_name, writer);
}
pub fn getFullyQualifiedName(decl: Decl, mod: *Module) ![:0]u8 {
var buffer = std.ArrayList(u8).init(mod.gpa);
defer buffer.deinit();
try decl.renderFullyQualifiedName(mod, buffer.writer());
pub fn getFullyQualifiedName(decl: Decl, mod: *Module) !InternPool.NullTerminatedString {
const gpa = mod.gpa;
const ip = &mod.intern_pool;
const start = ip.string_bytes.items.len;
try decl.renderFullyQualifiedName(mod, ip.string_bytes.writer(gpa));
// Sanitize the name for nvptx which is more restrictive.
// TODO This should be handled by the backend, not the frontend. Have a
// look at how the C backend does it for inspiration.
if (mod.comp.bin_file.options.target.cpu.arch.isNvptx()) {
for (buffer.items) |*byte| switch (byte.*) {
for (ip.string_bytes.items[start..]) |*byte| switch (byte.*) {
'{', '}', '*', '[', ']', '(', ')', ',', ' ', '\'' => byte.* = '_',
else => {},
};
}
return buffer.toOwnedSliceSentinel(0);
return ip.getOrPutTrailingString(gpa, ip.string_bytes.items.len - start);
}
pub fn typedValue(decl: Decl) error{AnalysisFail}!TypedValue {
@@ -804,11 +792,11 @@ pub const Decl = struct {
pub fn dump(decl: *Decl) void {
const loc = std.zig.findLineColumn(decl.scope.source.bytes, decl.src);
std.debug.print("{s}:{d}:{d} name={s} status={s}", .{
std.debug.print("{s}:{d}:{d} name={d} status={s}", .{
decl.scope.sub_file_path,
loc.line + 1,
loc.column + 1,
mem.sliceTo(decl.name, 0),
@enumToInt(decl.name),
@tagName(decl.analysis),
});
if (decl.has_tv) {
@@ -922,15 +910,15 @@ pub const Struct = struct {
}
};
pub const Fields = std.StringArrayHashMapUnmanaged(Field);
pub const Fields = std.AutoArrayHashMapUnmanaged(InternPool.NullTerminatedString, Field);
/// The `Type` and `Value` memory is owned by the arena of the Struct's owner_decl.
pub const Field = struct {
/// Uses `noreturn` to indicate `anytype`.
/// undefined until `status` is >= `have_field_types`.
ty: Type,
/// Uses `unreachable_value` to indicate no default.
default_val: Value,
/// Uses `none` to indicate no default.
default_val: InternPool.Index,
/// Zero means to use the ABI alignment of the type.
abi_align: u32,
/// undefined until `status` is `have_layout`.
@@ -982,7 +970,7 @@ pub const Struct = struct {
/// runtime version of the struct.
pub const omitted_field = std.math.maxInt(u32);
pub fn getFullyQualifiedName(s: *Struct, mod: *Module) ![:0]u8 {
pub fn getFullyQualifiedName(s: *Struct, mod: *Module) !InternPool.NullTerminatedString {
return mod.declPtr(s.owner_decl).getFullyQualifiedName(mod);
}
@@ -1141,9 +1129,9 @@ pub const Union = struct {
}
};
pub const Fields = std.StringArrayHashMapUnmanaged(Field);
pub const Fields = std.AutoArrayHashMapUnmanaged(InternPool.NullTerminatedString, Field);
pub fn getFullyQualifiedName(s: *Union, mod: *Module) ![:0]u8 {
pub fn getFullyQualifiedName(s: *Union, mod: *Module) !InternPool.NullTerminatedString {
return mod.declPtr(s.owner_decl).getFullyQualifiedName(mod);
}
@@ -1569,15 +1557,15 @@ pub const Fn = struct {
pub const DeclAdapter = struct {
mod: *Module,
pub fn hash(self: @This(), s: []const u8) u32 {
pub fn hash(self: @This(), s: InternPool.NullTerminatedString) u32 {
_ = self;
return @truncate(u32, std.hash.Wyhash.hash(0, s));
return std.hash.uint32(@enumToInt(s));
}
pub fn eql(self: @This(), a: []const u8, b_decl_index: Decl.Index, b_index: usize) bool {
pub fn eql(self: @This(), a: InternPool.NullTerminatedString, b_decl_index: Decl.Index, b_index: usize) bool {
_ = b_index;
const b_decl = self.mod.declPtr(b_decl_index);
return mem.eql(u8, a, mem.sliceTo(b_decl.name, 0));
return a == b_decl.name;
}
};
@@ -1628,16 +1616,14 @@ pub const Namespace = struct {
pub fn hash(ctx: @This(), decl_index: Decl.Index) u32 {
const decl = ctx.module.declPtr(decl_index);
return @truncate(u32, std.hash.Wyhash.hash(0, mem.sliceTo(decl.name, 0)));
return std.hash.uint32(@enumToInt(decl.name));
}
pub fn eql(ctx: @This(), a_decl_index: Decl.Index, b_decl_index: Decl.Index, b_index: usize) bool {
_ = b_index;
const a_decl = ctx.module.declPtr(a_decl_index);
const b_decl = ctx.module.declPtr(b_decl_index);
const a_name = mem.sliceTo(a_decl.name, 0);
const b_name = mem.sliceTo(b_decl.name, 0);
return mem.eql(u8, a_name, b_name);
return a_decl.name == b_decl.name;
}
};
@@ -1649,8 +1635,6 @@ pub const Namespace = struct {
pub fn destroyDecls(ns: *Namespace, mod: *Module) void {
const gpa = mod.gpa;
log.debug("destroyDecls {*}", .{ns});
var decls = ns.decls;
ns.decls = .{};
@@ -1676,8 +1660,6 @@ pub const Namespace = struct {
) !void {
const gpa = mod.gpa;
log.debug("deleteAllDecls {*}", .{ns});
var decls = ns.decls;
ns.decls = .{};
@@ -1712,7 +1694,8 @@ pub const Namespace = struct {
if (ns.parent.unwrap()) |parent| {
const decl_index = ns.getDeclIndex(mod);
const decl = mod.declPtr(decl_index);
try mod.namespacePtr(parent).renderFullyQualifiedName(mod, mem.sliceTo(decl.name, 0), writer);
const decl_name = mod.intern_pool.stringToSlice(decl.name);
try mod.namespacePtr(parent).renderFullyQualifiedName(mod, decl_name, writer);
} else {
try ns.file_scope.renderFullyQualifiedName(writer);
}
@@ -1733,7 +1716,8 @@ pub const Namespace = struct {
if (ns.parent.unwrap()) |parent| {
const decl_index = ns.getDeclIndex(mod);
const decl = mod.declPtr(decl_index);
try mod.namespacePtr(parent).renderFullyQualifiedDebugName(mod, mem.sliceTo(decl.name, 0), writer);
const decl_name = mod.intern_pool.stringToSlice(decl.name);
try mod.namespacePtr(parent).renderFullyQualifiedDebugName(mod, decl_name, writer);
} else {
try ns.file_scope.renderFullyQualifiedDebugName(writer);
separator_char = ':';
@@ -1927,11 +1911,11 @@ pub const File = struct {
};
}
pub fn fullyQualifiedNameZ(file: File, gpa: Allocator) ![:0]u8 {
var buf = std.ArrayList(u8).init(gpa);
defer buf.deinit();
try file.renderFullyQualifiedName(buf.writer());
return buf.toOwnedSliceSentinel(0);
pub fn fullyQualifiedName(file: File, mod: *Module) !InternPool.NullTerminatedString {
const ip = &mod.intern_pool;
const start = ip.string_bytes.items.len;
try file.renderFullyQualifiedName(ip.string_bytes.writer(mod.gpa));
return ip.getOrPutTrailingString(mod.gpa, ip.string_bytes.items.len - start);
}
/// Returns the full path to this file relative to its package.
@@ -2055,7 +2039,7 @@ pub const ErrorMsg = struct {
reference_trace: []Trace = &.{},
pub const Trace = struct {
decl: ?[*:0]const u8,
decl: InternPool.OptionalNullTerminatedString,
src_loc: SrcLoc,
hidden: u32 = 0,
};
@@ -3180,8 +3164,8 @@ pub const CompileError = error{
pub fn init(mod: *Module) !void {
const gpa = mod.gpa;
try mod.error_name_list.append(gpa, "(no error)");
try mod.intern_pool.init(gpa);
try mod.global_error_set.put(gpa, .empty, {});
}
pub fn deinit(mod: *Module) void {
@@ -3282,15 +3266,8 @@ pub fn deinit(mod: *Module) void {
}
mod.export_owners.deinit(gpa);
{
var it = mod.global_error_set.keyIterator();
while (it.next()) |key| {
gpa.free(key.*);
}
mod.global_error_set.deinit(gpa);
}
mod.global_error_set.deinit(gpa);
mod.error_name_list.deinit(gpa);
mod.test_functions.deinit(gpa);
mod.align_stack_fns.deinit(gpa);
mod.monomorphed_funcs.deinit(gpa);
@@ -3305,13 +3282,13 @@ pub fn deinit(mod: *Module) void {
mod.memoized_decls.deinit(gpa);
mod.intern_pool.deinit(gpa);
mod.tmp_hack_arena.deinit();
}
pub fn destroyDecl(mod: *Module, decl_index: Decl.Index) void {
const gpa = mod.gpa;
{
const decl = mod.declPtr(decl_index);
log.debug("destroy {*} ({s})", .{ decl, decl.name });
_ = mod.test_functions.swapRemove(decl_index);
if (decl.deletion_flag) {
assert(mod.deletion_set.swapRemove(decl_index));
@@ -3329,7 +3306,6 @@ pub fn destroyDecl(mod: *Module, decl_index: Decl.Index) void {
decl.clearValues(mod);
decl.dependants.deinit(gpa);
decl.dependencies.deinit(gpa);
decl.clearName(gpa);
decl.* = undefined;
}
mod.decls_free_list.append(gpa, decl_index) catch {
@@ -3391,11 +3367,7 @@ pub fn declIsRoot(mod: *Module, decl_index: Decl.Index) bool {
}
fn freeExportList(gpa: Allocator, export_list: *ArrayListUnmanaged(*Export)) void {
for (export_list.items) |exp| {
gpa.free(exp.options.name);
if (exp.options.section) |s| gpa.free(s);
gpa.destroy(exp);
}
for (export_list.items) |exp| gpa.destroy(exp);
export_list.deinit(gpa);
}
@@ -3814,9 +3786,6 @@ fn updateZirRefs(mod: *Module, file: *File, old_zir: Zir) !void {
if (decl.zir_decl_index != 0) {
const old_zir_decl_index = decl.zir_decl_index;
const new_zir_decl_index = extra_map.get(old_zir_decl_index) orelse {
log.debug("updateZirRefs {s}: delete {*} ({s})", .{
file.sub_file_path, decl, decl.name,
});
try file.deleted_decls.append(gpa, decl_index);
continue;
};
@@ -3824,14 +3793,7 @@ fn updateZirRefs(mod: *Module, file: *File, old_zir: Zir) !void {
decl.zir_decl_index = new_zir_decl_index;
const new_hash = decl.contentsHashZir(new_zir);
if (!std.zig.srcHashEql(old_hash, new_hash)) {
log.debug("updateZirRefs {s}: outdated {*} ({s}) {d} => {d}", .{
file.sub_file_path, decl, decl.name, old_zir_decl_index, new_zir_decl_index,
});
try file.outdated_decls.append(gpa, decl_index);
} else {
log.debug("updateZirRefs {s}: unchanged {*} ({s}) {d} => {d}", .{
file.sub_file_path, decl, decl.name, old_zir_decl_index, new_zir_decl_index,
});
}
}
@@ -4031,8 +3993,6 @@ pub fn ensureDeclAnalyzed(mod: *Module, decl_index: Decl.Index) SemaError!void {
.complete => return,
.outdated => blk: {
log.debug("re-analyzing {*} ({s})", .{ decl, decl.name });
// The exports this Decl performs will be re-discovered, so we remove them here
// prior to re-analysis.
try mod.deleteDeclExports(decl_index);
@@ -4047,9 +4007,6 @@ pub fn ensureDeclAnalyzed(mod: *Module, decl_index: Decl.Index) SemaError!void {
const dep = mod.declPtr(dep_index);
dep.removeDependant(decl_index);
if (dep.dependants.count() == 0 and !dep.deletion_flag) {
log.debug("insert {*} ({s}) dependant {*} ({s}) into deletion set", .{
decl, decl.name, dep, dep.name,
});
try mod.markDeclForDeletion(dep_index);
}
}
@@ -4061,7 +4018,7 @@ pub fn ensureDeclAnalyzed(mod: *Module, decl_index: Decl.Index) SemaError!void {
.unreferenced => false,
};
var decl_prog_node = mod.sema_prog_node.start(mem.sliceTo(decl.name, 0), 0);
var decl_prog_node = mod.sema_prog_node.start(mod.intern_pool.stringToSlice(decl.name), 0);
decl_prog_node.activate();
defer decl_prog_node.end();
@@ -4190,14 +4147,11 @@ pub fn ensureFuncBodyAnalyzed(mod: *Module, func_index: Fn.Index) SemaError!void
if (no_bin_file and !dump_air and !dump_llvm_ir) return;
log.debug("analyze liveness of {s}", .{decl.name});
var liveness = try Liveness.analyze(gpa, air, &mod.intern_pool);
defer liveness.deinit(gpa);
if (dump_air) {
const fqn = try decl.getFullyQualifiedName(mod);
defer mod.gpa.free(fqn);
const fqn = mod.intern_pool.stringToSlice(try decl.getFullyQualifiedName(mod));
std.debug.print("# Begin Function AIR: {s}:\n", .{fqn});
@import("print_air.zig").dump(mod, air, liveness);
std.debug.print("# End Function AIR: {s}\n\n", .{fqn});
@@ -4354,9 +4308,6 @@ pub fn semaFile(mod: *Module, file: *File) SemaError!void {
if (file.root_decl != .none) return;
const gpa = mod.gpa;
var new_decl_arena = std.heap.ArenaAllocator.init(gpa);
errdefer new_decl_arena.deinit();
const new_decl_arena_allocator = new_decl_arena.allocator();
// Because these three things each reference each other, `undefined`
// placeholders are used before being set after the struct type gains an
@@ -4394,7 +4345,7 @@ pub fn semaFile(mod: *Module, file: *File) SemaError!void {
new_namespace.ty = struct_ty.toType();
file.root_decl = new_decl_index.toOptional();
new_decl.name = try file.fullyQualifiedNameZ(gpa);
new_decl.name = try file.fullyQualifiedName(mod);
new_decl.src_line = 0;
new_decl.is_pub = true;
new_decl.is_exported = false;
@@ -4403,7 +4354,7 @@ pub fn semaFile(mod: *Module, file: *File) SemaError!void {
new_decl.ty = Type.type;
new_decl.val = struct_ty.toValue();
new_decl.@"align" = 0;
new_decl.@"linksection" = null;
new_decl.@"linksection" = .none;
new_decl.has_tv = true;
new_decl.owns_tv = true;
new_decl.alive = true; // This Decl corresponds to a File and is therefore always alive.
@@ -4431,7 +4382,6 @@ pub fn semaFile(mod: *Module, file: *File) SemaError!void {
.mod = mod,
.gpa = gpa,
.arena = sema_arena_allocator,
.perm_arena = new_decl_arena_allocator,
.code = file.zir,
.owner_decl = new_decl,
.owner_decl_index = new_decl_index,
@@ -4484,8 +4434,6 @@ pub fn semaFile(mod: *Module, file: *File) SemaError!void {
} else {
new_decl.analysis = .file_failure;
}
try new_decl.finalizeNewArena(&new_decl_arena);
}
/// Returns `true` if the Decl type changed.
@@ -4507,28 +4455,8 @@ fn semaDecl(mod: *Module, decl_index: Decl.Index) !bool {
decl.analysis = .in_progress;
// We need the memory for the Type to go into the arena for the Decl
var decl_arena = std.heap.ArenaAllocator.init(gpa);
const decl_arena_allocator = decl_arena.allocator();
const decl_value_arena = blk: {
errdefer decl_arena.deinit();
const s = try decl_arena_allocator.create(ValueArena);
s.* = .{ .state = undefined };
break :blk s;
};
defer {
if (decl.value_arena) |value_arena| {
assert(value_arena.state_acquired == null);
decl_value_arena.prev = value_arena;
}
decl_value_arena.state = decl_arena.state;
decl.value_arena = decl_value_arena;
}
var analysis_arena = std.heap.ArenaAllocator.init(gpa);
defer analysis_arena.deinit();
const analysis_arena_allocator = analysis_arena.allocator();
var comptime_mutable_decls = std.ArrayList(Decl.Index).init(gpa);
defer comptime_mutable_decls.deinit();
@@ -4536,8 +4464,7 @@ fn semaDecl(mod: *Module, decl_index: Decl.Index) !bool {
var sema: Sema = .{
.mod = mod,
.gpa = gpa,
.arena = analysis_arena_allocator,
.perm_arena = decl_arena_allocator,
.arena = analysis_arena.allocator(),
.code = zir,
.owner_decl = decl,
.owner_decl_index = decl_index,
@@ -4551,7 +4478,6 @@ fn semaDecl(mod: *Module, decl_index: Decl.Index) !bool {
defer sema.deinit();
if (mod.declIsRoot(decl_index)) {
log.debug("semaDecl root {*} ({s})", .{ decl, decl.name });
const main_struct_inst = Zir.main_struct_inst;
const struct_index = decl.getOwnedStructIndex(mod).unwrap().?;
const struct_obj = mod.structPtr(struct_index);
@@ -4563,7 +4489,6 @@ fn semaDecl(mod: *Module, decl_index: Decl.Index) !bool {
decl.generation = mod.generation;
return false;
}
log.debug("semaDecl {*} ({s})", .{ decl, decl.name });
var wip_captures = try WipCaptureScope.init(gpa, decl.src_scope);
defer wip_captures.deinit();
@@ -4619,7 +4544,7 @@ fn semaDecl(mod: *Module, decl_index: Decl.Index) !bool {
decl.ty = InternPool.Index.type_type.toType();
decl.val = ty.toValue();
decl.@"align" = 0;
decl.@"linksection" = null;
decl.@"linksection" = .none;
decl.has_tv = true;
decl.owns_tv = false;
decl.analysis = .complete;
@@ -4646,7 +4571,7 @@ fn semaDecl(mod: *Module, decl_index: Decl.Index) !bool {
decl.clearValues(mod);
decl.ty = decl_tv.ty;
decl.val = try decl_tv.val.copy(decl_arena_allocator);
decl.val = (try decl_tv.val.intern(decl_tv.ty, mod)).toValue();
// linksection, align, and addrspace were already set by Sema
decl.has_tv = true;
decl.owns_tv = owns_tv;
@@ -4660,7 +4585,9 @@ fn semaDecl(mod: *Module, decl_index: Decl.Index) !bool {
return sema.fail(&block_scope, export_src, "export of inline function", .{});
}
// The scope needs to have the decl in it.
const options: std.builtin.ExportOptions = .{ .name = mem.sliceTo(decl.name, 0) };
const options: std.builtin.ExportOptions = .{
.name = mod.intern_pool.stringToSlice(decl.name),
};
try sema.analyzeExport(&block_scope, export_src, options, decl_index);
}
return type_changed or is_inline != prev_is_inline;
@@ -4693,14 +4620,13 @@ fn semaDecl(mod: *Module, decl_index: Decl.Index) !bool {
.func => {},
else => {
log.debug("send global const to linker: {*} ({s})", .{ decl, decl.name });
queue_linker_work = true;
},
},
}
decl.ty = decl_tv.ty;
decl.val = try decl_tv.val.copy(decl_arena_allocator);
decl.val = (try decl_tv.val.intern(decl_tv.ty, mod)).toValue();
decl.@"align" = blk: {
const align_ref = decl.zirAlignRef(mod);
if (align_ref == .none) break :blk 0;
@@ -4708,14 +4634,15 @@ fn semaDecl(mod: *Module, decl_index: Decl.Index) !bool {
};
decl.@"linksection" = blk: {
const linksection_ref = decl.zirLinksectionRef(mod);
if (linksection_ref == .none) break :blk null;
if (linksection_ref == .none) break :blk .none;
const bytes = try sema.resolveConstString(&block_scope, section_src, linksection_ref, "linksection must be comptime-known");
if (mem.indexOfScalar(u8, bytes, 0) != null) {
return sema.fail(&block_scope, section_src, "linksection cannot contain null bytes", .{});
} else if (bytes.len == 0) {
return sema.fail(&block_scope, section_src, "linksection cannot be empty", .{});
}
break :blk (try decl_arena_allocator.dupeZ(u8, bytes)).ptr;
const section = try mod.intern_pool.getOrPutString(gpa, bytes);
break :blk section.toOptional();
};
decl.@"addrspace" = blk: {
const addrspace_ctx: Sema.AddressSpaceContext = switch (mod.intern_pool.indexToKey(decl_tv.val.toIntern())) {
@@ -4743,7 +4670,6 @@ fn semaDecl(mod: *Module, decl_index: Decl.Index) !bool {
(queue_linker_work and try sema.typeHasRuntimeBits(decl.ty));
if (has_runtime_bits) {
log.debug("queue linker work for {*} ({s})", .{ decl, decl.name });
// Needed for codegen_decl which will call updateDecl and then the
// codegen backend wants full access to the Decl Type.
@@ -4759,7 +4685,9 @@ fn semaDecl(mod: *Module, decl_index: Decl.Index) !bool {
if (decl.is_exported) {
const export_src: LazySrcLoc = .{ .token_offset = @boolToInt(decl.is_pub) };
// The scope needs to have the decl in it.
const options: std.builtin.ExportOptions = .{ .name = mem.sliceTo(decl.name, 0) };
const options: std.builtin.ExportOptions = .{
.name = mod.intern_pool.stringToSlice(decl.name),
};
try sema.analyzeExport(&block_scope, export_src, options, decl_index);
}
@@ -4785,10 +4713,6 @@ pub fn declareDeclDependencyType(mod: *Module, depender_index: Decl.Index, depen
}
}
log.debug("{*} ({s}) depends on {*} ({s})", .{
depender, depender.name, dependee, dependee.name,
});
if (dependee.deletion_flag) {
dependee.deletion_flag = false;
assert(mod.deletion_set.swapRemove(dependee_index));
@@ -5138,6 +5062,7 @@ fn scanDecl(iter: *ScanDeclIter, decl_sub_index: usize, flags: u4) Allocator.Err
const namespace = mod.namespacePtr(namespace_index);
const gpa = mod.gpa;
const zir = namespace.file_scope.zir;
const ip = &mod.intern_pool;
// zig fmt: off
const is_pub = (flags & 0b0001) != 0;
@@ -5157,31 +5082,31 @@ fn scanDecl(iter: *ScanDeclIter, decl_sub_index: usize, flags: u4) Allocator.Err
// Every Decl needs a name.
var is_named_test = false;
var kind: Decl.Kind = .named;
const decl_name: [:0]const u8 = switch (decl_name_index) {
const decl_name: InternPool.NullTerminatedString = switch (decl_name_index) {
0 => name: {
if (export_bit) {
const i = iter.usingnamespace_index;
iter.usingnamespace_index += 1;
kind = .@"usingnamespace";
break :name try std.fmt.allocPrintZ(gpa, "usingnamespace_{d}", .{i});
break :name try ip.getOrPutStringFmt(gpa, "usingnamespace_{d}", .{i});
} else {
const i = iter.comptime_index;
iter.comptime_index += 1;
kind = .@"comptime";
break :name try std.fmt.allocPrintZ(gpa, "comptime_{d}", .{i});
break :name try ip.getOrPutStringFmt(gpa, "comptime_{d}", .{i});
}
},
1 => name: {
const i = iter.unnamed_test_index;
iter.unnamed_test_index += 1;
kind = .@"test";
break :name try std.fmt.allocPrintZ(gpa, "test_{d}", .{i});
break :name try ip.getOrPutStringFmt(gpa, "test_{d}", .{i});
},
2 => name: {
is_named_test = true;
const test_name = zir.nullTerminatedString(decl_doccomment_index);
kind = .@"test";
break :name try std.fmt.allocPrintZ(gpa, "decltest.{s}", .{test_name});
break :name try ip.getOrPutStringFmt(gpa, "decltest.{s}", .{test_name});
},
else => name: {
const raw_name = zir.nullTerminatedString(decl_name_index);
@@ -5189,14 +5114,12 @@ fn scanDecl(iter: *ScanDeclIter, decl_sub_index: usize, flags: u4) Allocator.Err
is_named_test = true;
const test_name = zir.nullTerminatedString(decl_name_index + 1);
kind = .@"test";
break :name try std.fmt.allocPrintZ(gpa, "test.{s}", .{test_name});
break :name try ip.getOrPutStringFmt(gpa, "test.{s}", .{test_name});
} else {
break :name try gpa.dupeZ(u8, raw_name);
break :name try ip.getOrPutString(gpa, raw_name);
}
},
};
var must_free_decl_name = true;
defer if (must_free_decl_name) gpa.free(decl_name);
const is_exported = export_bit and decl_name_index != 0;
if (kind == .@"usingnamespace") try namespace.usingnamespace_set.ensureUnusedCapacity(gpa, 1);
@@ -5204,7 +5127,7 @@ fn scanDecl(iter: *ScanDeclIter, decl_sub_index: usize, flags: u4) Allocator.Err
// We create a Decl for it regardless of analysis status.
const gop = try namespace.decls.getOrPutContextAdapted(
gpa,
@as([]const u8, mem.sliceTo(decl_name, 0)),
decl_name,
DeclAdapter{ .mod = mod },
Namespace.DeclContext{ .module = mod },
);
@@ -5214,11 +5137,9 @@ fn scanDecl(iter: *ScanDeclIter, decl_sub_index: usize, flags: u4) Allocator.Err
const new_decl = mod.declPtr(new_decl_index);
new_decl.kind = kind;
new_decl.name = decl_name;
must_free_decl_name = false;
if (kind == .@"usingnamespace") {
namespace.usingnamespace_set.putAssumeCapacity(new_decl_index, is_pub);
}
log.debug("scan new {*} ({s}) into {*}", .{ new_decl, decl_name, namespace });
new_decl.src_line = line;
gop.key_ptr.* = new_decl_index;
// Exported decls, comptime decls, usingnamespace decls, and
@@ -5239,7 +5160,7 @@ fn scanDecl(iter: *ScanDeclIter, decl_sub_index: usize, flags: u4) Allocator.Err
if (!comp.bin_file.options.is_test) break :blk false;
if (decl_pkg != mod.main_pkg) break :blk false;
if (comp.test_filter) |test_filter| {
if (mem.indexOf(u8, decl_name, test_filter) == null) {
if (mem.indexOf(u8, ip.stringToSlice(decl_name), test_filter) == null) {
break :blk false;
}
}
@@ -5270,7 +5191,7 @@ fn scanDecl(iter: *ScanDeclIter, decl_sub_index: usize, flags: u4) Allocator.Err
gpa,
src_loc,
"duplicate test name: {s}",
.{decl_name},
.{ip.stringToSlice(decl_name)},
);
errdefer msg.destroy(gpa);
try mod.failed_decls.putNoClobber(gpa, decl_index, msg);
@@ -5281,7 +5202,6 @@ fn scanDecl(iter: *ScanDeclIter, decl_sub_index: usize, flags: u4) Allocator.Err
};
try mod.errNoteNonLazy(other_src_loc, msg, "other test here", .{});
}
log.debug("scan existing {*} ({s}) of {*}", .{ decl, decl.name, namespace });
// Update the AST node of the decl; even if its contents are unchanged, it may
// have been re-ordered.
decl.src_node = decl_node;
@@ -5315,7 +5235,6 @@ pub fn clearDecl(
defer tracy.end();
const decl = mod.declPtr(decl_index);
log.debug("clearing {*} ({s})", .{ decl, decl.name });
const gpa = mod.gpa;
try mod.deletion_set.ensureUnusedCapacity(gpa, decl.dependencies.count());
@@ -5330,9 +5249,6 @@ pub fn clearDecl(
const dep = mod.declPtr(dep_index);
dep.removeDependant(decl_index);
if (dep.dependants.count() == 0 and !dep.deletion_flag) {
log.debug("insert {*} ({s}) dependant {*} ({s}) into deletion set", .{
decl, decl.name, dep, dep.name,
});
// We don't recursively perform a deletion here, because during the update,
// another reference to it may turn up.
dep.deletion_flag = true;
@@ -5387,7 +5303,6 @@ pub fn clearDecl(
/// This function is exclusively called for anonymous decls.
pub fn deleteUnusedDecl(mod: *Module, decl_index: Decl.Index) void {
const decl = mod.declPtr(decl_index);
log.debug("deleteUnusedDecl {d} ({s})", .{ decl_index, decl.name });
assert(!mod.declIsRoot(decl_index));
assert(mod.namespacePtr(decl.src_namespace).anon_decls.swapRemove(decl_index));
@@ -5415,7 +5330,6 @@ fn markDeclForDeletion(mod: *Module, decl_index: Decl.Index) !void {
/// If other decls depend on this decl, they must be aborted first.
pub fn abortAnonDecl(mod: *Module, decl_index: Decl.Index) void {
const decl = mod.declPtr(decl_index);
log.debug("abortAnonDecl {*} ({s})", .{ decl, decl.name });
assert(!mod.declIsRoot(decl_index));
assert(mod.namespacePtr(decl.src_namespace).anon_decls.swapRemove(decl_index));
@@ -5468,21 +5382,20 @@ fn deleteDeclExports(mod: *Module, decl_index: Decl.Index) Allocator.Error!void
}
}
if (mod.comp.bin_file.cast(link.File.Elf)) |elf| {
elf.deleteDeclExport(decl_index, exp.options.name);
elf.deleteDeclExport(decl_index, exp.name);
}
if (mod.comp.bin_file.cast(link.File.MachO)) |macho| {
try macho.deleteDeclExport(decl_index, exp.options.name);
try macho.deleteDeclExport(decl_index, exp.name);
}
if (mod.comp.bin_file.cast(link.File.Wasm)) |wasm| {
wasm.deleteDeclExport(decl_index);
}
if (mod.comp.bin_file.cast(link.File.Coff)) |coff| {
coff.deleteDeclExport(decl_index, exp.options.name);
coff.deleteDeclExport(decl_index, exp.name);
}
if (mod.failed_exports.fetchSwapRemove(exp)) |failed_kv| {
failed_kv.value.destroy(mod.gpa);
}
mod.gpa.free(exp.options.name);
mod.gpa.destroy(exp);
}
export_owners.deinit(mod.gpa);
@@ -5497,11 +5410,6 @@ pub fn analyzeFnBody(mod: *Module, func_index: Fn.Index, arena: Allocator) SemaE
const decl_index = func.owner_decl;
const decl = mod.declPtr(decl_index);
// Use the Decl's arena for captured values.
var decl_arena: std.heap.ArenaAllocator = undefined;
const decl_arena_allocator = decl.value_arena.?.acquire(gpa, &decl_arena);
defer decl.value_arena.?.release(&decl_arena);
var comptime_mutable_decls = std.ArrayList(Decl.Index).init(gpa);
defer comptime_mutable_decls.deinit();
@@ -5512,7 +5420,6 @@ pub fn analyzeFnBody(mod: *Module, func_index: Fn.Index, arena: Allocator) SemaE
.mod = mod,
.gpa = gpa,
.arena = arena,
.perm_arena = decl_arena_allocator,
.code = decl.getFileScope(mod).zir,
.owner_decl = decl,
.owner_decl_index = decl_index,
@@ -5616,7 +5523,6 @@ pub fn analyzeFnBody(mod: *Module, func_index: Fn.Index, arena: Allocator) SemaE
}
func.state = .in_progress;
log.debug("set {s} to in_progress", .{decl.name});
const last_arg_index = inner_block.instructions.items.len;
@@ -5677,7 +5583,6 @@ pub fn analyzeFnBody(mod: *Module, func_index: Fn.Index, arena: Allocator) SemaE
sema.air_extra.items[@enumToInt(Air.ExtraIndex.main_block)] = main_block_index;
func.state = .success;
log.debug("set {s} to success", .{decl.name});
// Finally we must resolve the return type and parameter types so that backends
// have full access to type information.
@@ -5724,7 +5629,6 @@ pub fn analyzeFnBody(mod: *Module, func_index: Fn.Index, arena: Allocator) SemaE
fn markOutdatedDecl(mod: *Module, decl_index: Decl.Index) !void {
const decl = mod.declPtr(decl_index);
log.debug("mark outdated {*} ({s})", .{ decl, decl.name });
try mod.comp.work_queue.writeItem(.{ .analyze_decl = decl_index });
if (mod.failed_decls.fetchSwapRemove(decl_index)) |kv| {
kv.value.destroy(mod.gpa);
@@ -5821,7 +5725,7 @@ pub fn allocateNewDecl(
.ty = undefined,
.val = undefined,
.@"align" = undefined,
.@"linksection" = undefined,
.@"linksection" = .none,
.@"addrspace" = .generic,
.analysis = .unreferenced,
.deletion_flag = false,
@@ -5839,25 +5743,20 @@ pub fn allocateNewDecl(
return decl_and_index.decl_index;
}
/// Get error value for error tag `name`.
pub fn getErrorValue(mod: *Module, name: []const u8) !std.StringHashMapUnmanaged(ErrorInt).KV {
pub fn getErrorValue(
mod: *Module,
name: InternPool.NullTerminatedString,
) Allocator.Error!ErrorInt {
const gop = try mod.global_error_set.getOrPut(mod.gpa, name);
if (gop.found_existing) {
return std.StringHashMapUnmanaged(ErrorInt).KV{
.key = gop.key_ptr.*,
.value = gop.value_ptr.*,
};
}
return @intCast(ErrorInt, gop.index);
}
errdefer assert(mod.global_error_set.remove(name));
try mod.error_name_list.ensureUnusedCapacity(mod.gpa, 1);
gop.key_ptr.* = try mod.gpa.dupe(u8, name);
gop.value_ptr.* = @intCast(ErrorInt, mod.error_name_list.items.len);
mod.error_name_list.appendAssumeCapacity(gop.key_ptr.*);
return std.StringHashMapUnmanaged(ErrorInt).KV{
.key = gop.key_ptr.*,
.value = gop.value_ptr.*,
};
pub fn getErrorValueFromSlice(
mod: *Module,
name: []const u8,
) Allocator.Error!ErrorInt {
const interned_name = try mod.intern_pool.getOrPutString(mod.gpa, name);
return getErrorValue(mod, interned_name);
}
pub fn createAnonymousDecl(mod: *Module, block: *Sema.Block, typed_value: TypedValue) !Decl.Index {
@@ -5874,24 +5773,23 @@ pub fn createAnonymousDeclFromDecl(
) !Decl.Index {
const new_decl_index = try mod.allocateNewDecl(namespace, src_decl.src_node, src_scope);
errdefer mod.destroyDecl(new_decl_index);
const name = try std.fmt.allocPrintZ(mod.gpa, "{s}__anon_{d}", .{
src_decl.name, @enumToInt(new_decl_index),
const ip = &mod.intern_pool;
const name = try ip.getOrPutStringFmt(mod.gpa, "{s}__anon_{d}", .{
ip.stringToSlice(src_decl.name), @enumToInt(new_decl_index),
});
try mod.initNewAnonDecl(new_decl_index, src_decl.src_line, namespace, tv, name);
return new_decl_index;
}
/// Takes ownership of `name` even if it returns an error.
pub fn initNewAnonDecl(
mod: *Module,
new_decl_index: Decl.Index,
src_line: u32,
namespace: Namespace.Index,
typed_value: TypedValue,
name: [:0]u8,
name: InternPool.NullTerminatedString,
) Allocator.Error!void {
assert(typed_value.ty.toIntern() == mod.intern_pool.typeOf(typed_value.val.toIntern()));
errdefer mod.gpa.free(name);
const new_decl = mod.declPtr(new_decl_index);
@@ -5900,7 +5798,7 @@ pub fn initNewAnonDecl(
new_decl.ty = typed_value.ty;
new_decl.val = typed_value.val;
new_decl.@"align" = 0;
new_decl.@"linksection" = null;
new_decl.@"linksection" = .none;
new_decl.has_tv = true;
new_decl.analysis = .complete;
new_decl.generation = mod.generation;
@@ -6330,12 +6228,11 @@ pub fn processOutdatedAndDeletedDecls(mod: *Module) !void {
// deletion set at this time.
for (file.deleted_decls.items) |decl_index| {
const decl = mod.declPtr(decl_index);
log.debug("deleted from source: {*} ({s})", .{ decl, decl.name });
// Remove from the namespace it resides in, preserving declaration order.
assert(decl.zir_decl_index != 0);
_ = mod.namespacePtr(decl.src_namespace).decls.orderedRemoveAdapted(
@as([]const u8, mem.sliceTo(decl.name, 0)),
decl.name,
DeclAdapter{ .mod = mod },
);
@@ -6357,7 +6254,7 @@ pub fn processOutdatedAndDeletedDecls(mod: *Module) !void {
pub fn processExports(mod: *Module) !void {
const gpa = mod.gpa;
// Map symbol names to `Export` for name collision detection.
var symbol_exports: std.StringArrayHashMapUnmanaged(*Export) = .{};
var symbol_exports: std.AutoArrayHashMapUnmanaged(InternPool.NullTerminatedString, *Export) = .{};
defer symbol_exports.deinit(gpa);
var it = mod.decl_exports.iterator();
@@ -6365,13 +6262,13 @@ pub fn processExports(mod: *Module) !void {
const exported_decl = entry.key_ptr.*;
const exports = entry.value_ptr.items;
for (exports) |new_export| {
const gop = try symbol_exports.getOrPut(gpa, new_export.options.name);
const gop = try symbol_exports.getOrPut(gpa, new_export.name);
if (gop.found_existing) {
new_export.status = .failed_retryable;
try mod.failed_exports.ensureUnusedCapacity(gpa, 1);
const src_loc = new_export.getSrcLoc(mod);
const msg = try ErrorMsg.create(gpa, src_loc, "exported symbol collision: {s}", .{
new_export.options.name,
mod.intern_pool.stringToSlice(new_export.name),
});
errdefer msg.destroy(gpa);
const other_export = gop.value_ptr.*;
@@ -6408,8 +6305,9 @@ pub fn populateTestFunctions(
const builtin_file = (mod.importPkg(builtin_pkg) catch unreachable).file;
const root_decl = mod.declPtr(builtin_file.root_decl.unwrap().?);
const builtin_namespace = mod.namespacePtr(root_decl.src_namespace);
const test_functions_str = try mod.intern_pool.getOrPutString(gpa, "test_functions");
const decl_index = builtin_namespace.decls.getKeyAdapted(
@as([]const u8, "test_functions"),
test_functions_str,
DeclAdapter{ .mod = mod },
).?;
{
@@ -6443,7 +6341,7 @@ pub fn populateTestFunctions(
for (test_fn_vals, mod.test_functions.keys()) |*test_fn_val, test_decl_index| {
const test_decl = mod.declPtr(test_decl_index);
const test_decl_name = mem.span(test_decl.name);
const test_decl_name = mod.intern_pool.stringToSlice(test_decl.name);
const test_name_decl_index = n: {
const test_name_decl_ty = try mod.arrayType(.{
.len = test_decl_name.len,
@@ -7156,7 +7054,7 @@ pub fn opaqueSrcLoc(mod: *Module, opaque_type: InternPool.Key.OpaqueType) SrcLoc
return mod.declPtr(opaque_type.decl).srcLoc(mod);
}
pub fn opaqueFullyQualifiedName(mod: *Module, opaque_type: InternPool.Key.OpaqueType) ![:0]u8 {
pub fn opaqueFullyQualifiedName(mod: *Module, opaque_type: InternPool.Key.OpaqueType) !InternPool.NullTerminatedString {
return mod.declPtr(opaque_type.decl).getFullyQualifiedName(mod);
}

File diff suppressed because it is too large Load Diff

View File

@@ -201,10 +201,10 @@ pub fn print(
},
.variable => return writer.writeAll("(variable)"),
.extern_func => |extern_func| return writer.print("(extern function '{s}')", .{
mod.declPtr(extern_func.decl).name,
mod.intern_pool.stringToSlice(mod.declPtr(extern_func.decl).name),
}),
.func => |func| return writer.print("(function '{s}')", .{
mod.declPtr(mod.funcPtr(func.index).owner_decl).name,
.func => |func| return writer.print("(function '{d}')", .{
mod.intern_pool.stringToSlice(mod.declPtr(mod.funcPtr(func.index).owner_decl).name),
}),
.int => |int| switch (int.storage) {
inline .u64, .i64, .big_int => |x| return writer.print("{}", .{x}),
@@ -296,19 +296,20 @@ fn printAggregate(
}
if (ty.zigTypeTag(mod) == .Struct) {
try writer.writeAll(".{");
const max_len = std.math.min(ty.structFieldCount(mod), max_aggregate_items);
const max_len = @min(ty.structFieldCount(mod), max_aggregate_items);
var i: u32 = 0;
while (i < max_len) : (i += 1) {
for (0..max_len) |i| {
if (i != 0) try writer.writeAll(", ");
if (switch (mod.intern_pool.indexToKey(ty.toIntern())) {
.struct_type => |struct_type| mod.structPtrUnwrap(struct_type.index).?.fields.keys()[i],
.anon_struct_type => |anon_struct_type| if (anon_struct_type.isTuple())
null
else
mod.intern_pool.stringToSlice(anon_struct_type.names[i]),
const field_name = switch (mod.intern_pool.indexToKey(ty.toIntern())) {
.struct_type => |x| mod.structPtrUnwrap(x.index).?.fields.keys()[i].toOptional(),
.anon_struct_type => |x| if (x.isTuple()) .none else x.names[i].toOptional(),
else => unreachable,
}) |field_name| try writer.print(".{s} = ", .{field_name});
};
if (field_name.unwrap()) |name_ip| try writer.print(".{s} = ", .{
mod.intern_pool.stringToSlice(name_ip),
});
try print(.{
.ty = ty.structFieldType(i, mod),
.val = try val.fieldValue(mod, i),

View File

@@ -4350,7 +4350,7 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallModifier
.data = .{ .reg = .x30 },
});
} else if (func_value.getExternFunc(mod)) |extern_func| {
const decl_name = mem.sliceTo(mod.declPtr(extern_func.decl).name, 0);
const decl_name = mod.intern_pool.stringToSlice(mod.declPtr(extern_func.decl).name);
const lib_name = mod.intern_pool.stringToSliceUnwrap(extern_func.lib_name);
if (self.bin_file.cast(link.File.MachO)) |macho_file| {
const sym_index = try macho_file.getGlobalSymbol(decl_name, lib_name);

View File

@@ -276,8 +276,6 @@ pub fn generate(
assert(fn_owner_decl.has_tv);
const fn_type = fn_owner_decl.ty;
log.debug("fn {s}", .{fn_owner_decl.name});
var branch_stack = std.ArrayList(Branch).init(bin_file.allocator);
defer {
assert(branch_stack.items.len == 1);

View File

@@ -2208,7 +2208,7 @@ fn airCall(func: *CodeGen, inst: Air.Inst.Index, modifier: std.builtin.CallModif
const atom = func.bin_file.getAtomPtr(atom_index);
const type_index = try func.bin_file.storeDeclType(extern_func.decl, func_type);
try func.bin_file.addOrUpdateImport(
mem.sliceTo(ext_decl.name, 0),
mod.intern_pool.stringToSlice(ext_decl.name),
atom.getSymbolIndex().?,
mod.intern_pool.stringToSliceUnwrap(ext_decl.getOwnedExternFunc(mod).?.lib_name),
type_index,
@@ -3180,9 +3180,8 @@ fn lowerConstant(func: *CodeGen, arg_val: Value, ty: Type) InnerError!WValue {
}
},
.err => |err| {
const name = mod.intern_pool.stringToSlice(err.name);
const kv = try mod.getErrorValue(name);
return WValue{ .imm32 = kv.value };
const int = try mod.getErrorValue(err.name);
return WValue{ .imm32 = int };
},
.error_union => |error_union| {
const err_tv: TypedValue = switch (error_union.val) {
@@ -3320,18 +3319,15 @@ fn valueAsI32(func: *const CodeGen, val: Value, ty: Type) i32 {
.enum_tag => |enum_tag| intIndexAsI32(&mod.intern_pool, enum_tag.int, mod),
.int => |int| intStorageAsI32(int.storage, mod),
.ptr => |ptr| intIndexAsI32(&mod.intern_pool, ptr.addr.int, mod),
.err => |err| @bitCast(i32, mod.global_error_set.get(mod.intern_pool.stringToSlice(err.name)).?),
.err => |err| @bitCast(i32, @intCast(Module.ErrorInt, mod.global_error_set.getIndex(err.name).?)),
else => unreachable,
},
}
switch (ty.zigTypeTag(mod)) {
.ErrorSet => {
const kv = func.bin_file.base.options.module.?.getErrorValue(val.getError(mod).?) catch unreachable; // passed invalid `Value` to function
return @bitCast(i32, kv.value);
},
return switch (ty.zigTypeTag(mod)) {
.ErrorSet => @bitCast(i32, val.getErrorInt(mod)),
else => unreachable, // Programmer called this function for an illegal type
}
};
}
fn intIndexAsI32(ip: *const InternPool, int: InternPool.Index, mod: *Module) i32 {
@@ -6874,8 +6870,7 @@ fn getTagNameFunction(func: *CodeGen, enum_ty: Type) InnerError!u32 {
defer arena_allocator.deinit();
const arena = arena_allocator.allocator();
const fqn = try mod.declPtr(enum_decl_index).getFullyQualifiedName(mod);
defer mod.gpa.free(fqn);
const fqn = mod.intern_pool.stringToSlice(try mod.declPtr(enum_decl_index).getFullyQualifiedName(mod));
const func_name = try std.fmt.allocPrintZ(arena, "__zig_tag_name_{s}", .{fqn});
// check if we already generated code for this.
@@ -7037,9 +7032,8 @@ fn airErrorSetHasValue(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
var lowest: ?u32 = null;
var highest: ?u32 = null;
for (names) |name_ip| {
const name = mod.intern_pool.stringToSlice(name_ip);
const err_int = mod.global_error_set.get(name).?;
for (names) |name| {
const err_int = @intCast(Module.ErrorInt, mod.global_error_set.getIndex(name).?);
if (lowest) |*l| {
if (err_int < l.*) {
l.* = err_int;

View File

@@ -8132,7 +8132,7 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallModifier
}));
} else unreachable;
} else if (func_value.getExternFunc(mod)) |extern_func| {
const decl_name = mem.sliceTo(mod.declPtr(extern_func.decl).name, 0);
const decl_name = mod.intern_pool.stringToSlice(mod.declPtr(extern_func.decl).name);
const lib_name = mod.intern_pool.stringToSliceUnwrap(extern_func.lib_name);
if (self.bin_file.cast(link.File.Coff)) |coff_file| {
const atom_index = try self.owner.getSymbolIndex(self);

View File

@@ -142,11 +142,12 @@ pub fn generateLazySymbol(
if (lazy_sym.ty.isAnyError(mod)) {
alignment.* = 4;
const err_names = mod.error_name_list.items;
const err_names = mod.global_error_set.keys();
mem.writeInt(u32, try code.addManyAsArray(4), @intCast(u32, err_names.len), endian);
var offset = code.items.len;
try code.resize((1 + err_names.len + 1) * 4);
for (err_names) |err_name| {
for (err_names) |err_name_nts| {
const err_name = mod.intern_pool.stringToSlice(err_name_nts);
mem.writeInt(u32, code.items[offset..][0..4], @intCast(u32, code.items.len), endian);
offset += 4;
try code.ensureUnusedCapacity(err_name.len + 1);
@@ -251,15 +252,13 @@ pub fn generateSymbol(
val.writeTwosComplement(try code.addManyAsSlice(abi_size), endian);
},
.err => |err| {
const name = mod.intern_pool.stringToSlice(err.name);
const kv = try mod.getErrorValue(name);
try code.writer().writeInt(u16, @intCast(u16, kv.value), endian);
const int = try mod.getErrorValue(err.name);
try code.writer().writeInt(u16, @intCast(u16, int), endian);
},
.error_union => |error_union| {
const payload_ty = typed_value.ty.errorUnionPayload(mod);
const err_val = switch (error_union.val) {
.err_name => |err_name| @intCast(u16, (try mod.getErrorValue(mod.intern_pool.stringToSlice(err_name))).value),
.err_name => |err_name| @intCast(u16, try mod.getErrorValue(err_name)),
.payload => @as(u16, 0),
};
@@ -974,11 +973,8 @@ pub fn genTypedValue(
}, owner_decl_index);
},
.ErrorSet => {
const err_name = mod.intern_pool.stringToSlice(
mod.intern_pool.indexToKey(typed_value.val.toIntern()).err.name,
);
const global_error_set = mod.global_error_set;
const error_index = global_error_set.get(err_name).?;
const err_name = mod.intern_pool.indexToKey(typed_value.val.toIntern()).err.name;
const error_index = mod.global_error_set.getIndex(err_name).?;
return GenResult.mcv(.{ .immediate = error_index });
},
.ErrorUnion => {

View File

@@ -452,6 +452,7 @@ pub const Function = struct {
var promoted = f.object.dg.ctypes.promote(gpa);
defer f.object.dg.ctypes.demote(promoted);
const arena = promoted.arena.allocator();
const mod = f.object.dg.module;
gop.value_ptr.* = .{
.fn_name = switch (key) {
@@ -460,7 +461,7 @@ pub const Function = struct {
.never_inline,
=> |owner_decl| try std.fmt.allocPrint(arena, "zig_{s}_{}__{d}", .{
@tagName(key),
fmtIdent(mem.span(f.object.dg.module.declPtr(owner_decl).name)),
fmtIdent(mod.intern_pool.stringToSlice(mod.declPtr(owner_decl).name)),
@enumToInt(owner_decl),
}),
},
@@ -1465,7 +1466,7 @@ pub const DeclGen = struct {
try writer.writeAll(" .payload = {");
}
if (field_ty.hasRuntimeBits(mod)) {
try writer.print(" .{ } = ", .{fmtIdent(field_name)});
try writer.print(" .{ } = ", .{fmtIdent(mod.intern_pool.stringToSlice(field_name))});
try dg.renderValue(writer, field_ty, un.val.toValue(), initializer_type);
try writer.writeByte(' ');
} else for (ty.unionFields(mod).values()) |field| {
@@ -1849,9 +1850,9 @@ pub const DeclGen = struct {
try mod.markDeclAlive(decl);
if (mod.decl_exports.get(decl_index)) |exports| {
try writer.writeAll(exports.items[export_index].options.name);
try writer.writeAll(mod.intern_pool.stringToSlice(exports.items[export_index].name));
} else if (decl.isExtern(mod)) {
try writer.writeAll(mem.span(decl.name));
try writer.writeAll(mod.intern_pool.stringToSlice(decl.name));
} else {
// MSVC has a limit of 4095 character token length limit, and fmtIdent can (worst case),
// expand to 3x the length of its input, but let's cut it off at a much shorter limit.
@@ -1987,7 +1988,7 @@ fn renderTypeName(
try w.print("{s} {s}{}__{d}", .{
@tagName(tag)["fwd_".len..],
attributes,
fmtIdent(mem.span(mod.declPtr(owner_decl).name)),
fmtIdent(mod.intern_pool.stringToSlice(mod.declPtr(owner_decl).name)),
@enumToInt(owner_decl),
});
},
@@ -2406,11 +2407,12 @@ pub fn genErrDecls(o: *Object) !void {
try writer.writeAll("enum {\n");
o.indent_writer.pushIndent();
var max_name_len: usize = 0;
for (mod.error_name_list.items[1..], 1..) |name, value| {
max_name_len = std.math.max(name.len, max_name_len);
for (mod.global_error_set.keys()[1..], 1..) |name_nts, value| {
const name = mod.intern_pool.stringToSlice(name_nts);
max_name_len = @max(name.len, max_name_len);
const err_val = try mod.intern(.{ .err = .{
.ty = .anyerror_type,
.name = mod.intern_pool.getString(name).unwrap().?,
.name = name_nts,
} });
try o.dg.renderValue(writer, Type.anyerror, err_val.toValue(), .Other);
try writer.print(" = {d}u,\n", .{value});
@@ -2424,7 +2426,8 @@ pub fn genErrDecls(o: *Object) !void {
defer o.dg.gpa.free(name_buf);
@memcpy(name_buf[0..name_prefix.len], name_prefix);
for (mod.error_name_list.items) |name| {
for (mod.global_error_set.keys()) |name_nts| {
const name = mod.intern_pool.stringToSlice(name_nts);
@memcpy(name_buf[name_prefix.len..][0..name.len], name);
const identifier = name_buf[0 .. name_prefix.len + name.len];
@@ -2446,14 +2449,15 @@ pub fn genErrDecls(o: *Object) !void {
}
const name_array_ty = try mod.arrayType(.{
.len = mod.error_name_list.items.len,
.len = mod.global_error_set.count(),
.child = .slice_const_u8_sentinel_0_type,
});
try writer.writeAll("static ");
try o.dg.renderTypeAndName(writer, name_array_ty, .{ .identifier = array_identifier }, Const, 0, .complete);
try writer.writeAll(" = {");
for (mod.error_name_list.items, 0..) |name, value| {
for (mod.global_error_set.keys(), 0..) |name_nts, value| {
const name = mod.intern_pool.stringToSlice(name_nts);
if (value != 0) try writer.writeByte(',');
const len_val = try mod.intValue(Type.usize, name.len);
@@ -2469,14 +2473,16 @@ fn genExports(o: *Object) !void {
const tracy = trace(@src());
defer tracy.end();
const mod = o.dg.module;
const ip = &mod.intern_pool;
const fwd_decl_writer = o.dg.fwd_decl.writer();
if (o.dg.module.decl_exports.get(o.dg.decl_index.unwrap().?)) |exports| {
if (mod.decl_exports.get(o.dg.decl_index.unwrap().?)) |exports| {
for (exports.items[1..], 1..) |@"export", i| {
try fwd_decl_writer.writeAll("zig_export(");
try o.dg.renderFunctionSignature(fwd_decl_writer, o.dg.decl_index.unwrap().?, .forward, .{ .export_index = @intCast(u32, i) });
try fwd_decl_writer.print(", {s}, {s});\n", .{
fmtStringLiteral(exports.items[0].options.name, null),
fmtStringLiteral(@"export".options.name, null),
fmtStringLiteral(ip.stringToSlice(exports.items[0].name), null),
fmtStringLiteral(ip.stringToSlice(@"export".name), null),
});
}
}
@@ -2680,9 +2686,10 @@ pub fn genDecl(o: *Object) !void {
if (!is_global) try w.writeAll("static ");
if (variable.is_threadlocal) try w.writeAll("zig_threadlocal ");
if (variable.is_weak_linkage) try w.writeAll("zig_weak_linkage ");
if (decl.@"linksection") |section| try w.print("zig_linksection(\"{s}\", ", .{section});
if (mod.intern_pool.stringToSliceUnwrap(decl.@"linksection")) |s|
try w.print("zig_linksection(\"{s}\", ", .{s});
try o.dg.renderTypeAndName(w, tv.ty, decl_c_value, .{}, decl.@"align", .complete);
if (decl.@"linksection" != null) try w.writeAll(", read, write)");
if (decl.@"linksection" != .none) try w.writeAll(", read, write)");
try w.writeAll(" = ");
try o.dg.renderValue(w, tv.ty, variable.init.toValue(), .StaticInitializer);
try w.writeByte(';');
@@ -2697,9 +2704,10 @@ pub fn genDecl(o: *Object) !void {
const w = o.writer();
if (!is_global) try w.writeAll("static ");
if (decl.@"linksection") |section| try w.print("zig_linksection(\"{s}\", ", .{section});
if (mod.intern_pool.stringToSliceUnwrap(decl.@"linksection")) |s|
try w.print("zig_linksection(\"{s}\", ", .{s});
try o.dg.renderTypeAndName(w, tv.ty, decl_c_value, Const, decl.@"align", .complete);
if (decl.@"linksection" != null) try w.writeAll(", read)");
if (decl.@"linksection" != .none) try w.writeAll(", read)");
try w.writeAll(" = ");
try o.dg.renderValue(w, tv.ty, tv.val, .StaticInitializer);
try w.writeAll(";\n");
@@ -4229,7 +4237,9 @@ fn airDbgInline(f: *Function, inst: Air.Inst.Index) !CValue {
const mod = f.object.dg.module;
const writer = f.object.writer();
const function = mod.funcPtr(ty_fn.func);
try writer.print("/* dbg func:{s} */\n", .{mod.declPtr(function.owner_decl).name});
try writer.print("/* dbg func:{s} */\n", .{
mod.intern_pool.stringToSlice(mod.declPtr(function.owner_decl).name),
});
return .none;
}
@@ -5176,6 +5186,7 @@ fn fieldLocation(
byte_offset: u32,
end: void,
} {
const ip = &mod.intern_pool;
return switch (container_ty.zigTypeTag(mod)) {
.Struct => switch (container_ty.containerLayout(mod)) {
.Auto, .Extern => for (field_index..container_ty.structFieldCount(mod)) |next_field_index| {
@@ -5186,7 +5197,7 @@ fn fieldLocation(
break .{ .field = if (container_ty.isSimpleTuple(mod))
.{ .field = next_field_index }
else
.{ .identifier = container_ty.structFieldName(next_field_index, mod) } };
.{ .identifier = ip.stringToSlice(container_ty.structFieldName(next_field_index, mod)) } };
} else if (container_ty.hasRuntimeBitsIgnoreComptime(mod)) .end else .begin,
.Packed => if (field_ptr_ty.ptrInfo(mod).host_size == 0)
.{ .byte_offset = container_ty.packedStructFieldByteOffset(field_index, mod) }
@@ -5204,9 +5215,9 @@ fn fieldLocation(
.begin;
const field_name = container_ty.unionFields(mod).keys()[field_index];
return .{ .field = if (container_ty.unionTagTypeSafety(mod)) |_|
.{ .payload_identifier = field_name }
.{ .payload_identifier = ip.stringToSlice(field_name) }
else
.{ .identifier = field_name } };
.{ .identifier = ip.stringToSlice(field_name) } };
},
.Packed => .begin,
},
@@ -5347,6 +5358,7 @@ fn fieldPtr(
fn airStructFieldVal(f: *Function, inst: Air.Inst.Index) !CValue {
const mod = f.object.dg.module;
const ip = &mod.intern_pool;
const ty_pl = f.air.instructions.items(.data)[inst].ty_pl;
const extra = f.air.extraData(Air.StructField, ty_pl.payload).data;
@@ -5369,7 +5381,7 @@ fn airStructFieldVal(f: *Function, inst: Air.Inst.Index) !CValue {
.Auto, .Extern => if (struct_ty.isSimpleTuple(mod))
.{ .field = extra.field_index }
else
.{ .identifier = struct_ty.structFieldName(extra.field_index, mod) },
.{ .identifier = ip.stringToSlice(struct_ty.structFieldName(extra.field_index, mod)) },
.Packed => {
const struct_obj = mod.typeToStruct(struct_ty).?;
const int_info = struct_ty.intInfo(mod);
@@ -5431,7 +5443,7 @@ fn airStructFieldVal(f: *Function, inst: Air.Inst.Index) !CValue {
.anon_struct_type => |anon_struct_type| if (anon_struct_type.names.len == 0)
.{ .field = extra.field_index }
else
.{ .identifier = struct_ty.structFieldName(extra.field_index, mod) },
.{ .identifier = ip.stringToSlice(struct_ty.structFieldName(extra.field_index, mod)) },
.union_type => |union_type| field_name: {
const union_obj = mod.unionPtr(union_type.index);
@@ -5462,9 +5474,9 @@ fn airStructFieldVal(f: *Function, inst: Air.Inst.Index) !CValue {
} else {
const name = union_obj.fields.keys()[extra.field_index];
break :field_name if (union_type.hasTag()) .{
.payload_identifier = name,
.payload_identifier = ip.stringToSlice(name),
} else .{
.identifier = name,
.identifier = ip.stringToSlice(name),
};
}
},
@@ -6723,6 +6735,7 @@ fn airReduce(f: *Function, inst: Air.Inst.Index) !CValue {
fn airAggregateInit(f: *Function, inst: Air.Inst.Index) !CValue {
const mod = f.object.dg.module;
const ip = &mod.intern_pool;
const ty_pl = f.air.instructions.items(.data)[inst].ty_pl;
const inst_ty = f.typeOfIndex(inst);
const len = @intCast(usize, inst_ty.arrayLen(mod));
@@ -6773,7 +6786,7 @@ fn airAggregateInit(f: *Function, inst: Air.Inst.Index) !CValue {
try f.writeCValueMember(writer, local, if (inst_ty.isSimpleTuple(mod))
.{ .field = field_i }
else
.{ .identifier = inst_ty.structFieldName(field_i, mod) });
.{ .identifier = ip.stringToSlice(inst_ty.structFieldName(field_i, mod)) });
try a.assign(f, writer);
try f.writeCValue(writer, element, .Other);
try a.end(f, writer);
@@ -6851,6 +6864,7 @@ fn airAggregateInit(f: *Function, inst: Air.Inst.Index) !CValue {
fn airUnionInit(f: *Function, inst: Air.Inst.Index) !CValue {
const mod = f.object.dg.module;
const ip = &mod.intern_pool;
const ty_pl = f.air.instructions.items(.data)[inst].ty_pl;
const extra = f.air.extraData(Air.UnionInit, ty_pl.payload).data;
@@ -6886,8 +6900,8 @@ fn airUnionInit(f: *Function, inst: Air.Inst.Index) !CValue {
try writer.print("{}", .{try f.fmtIntLiteral(tag_ty, int_val)});
try a.end(f, writer);
}
break :field .{ .payload_identifier = field_name };
} else .{ .identifier = field_name };
break :field .{ .payload_identifier = ip.stringToSlice(field_name) };
} else .{ .identifier = ip.stringToSlice(field_name) };
const a = try Assignment.start(f, writer, payload_ty);
try f.writeCValueMember(writer, local, field);

View File

@@ -1953,11 +1953,11 @@ pub const CType = extern union {
.name = try if (ty.isSimpleTuple(mod))
std.fmt.allocPrintZ(arena, "f{}", .{field_i})
else
arena.dupeZ(u8, switch (zig_ty_tag) {
arena.dupeZ(u8, mod.intern_pool.stringToSlice(switch (zig_ty_tag) {
.Struct => ty.structFieldName(field_i, mod),
.Union => ty.unionFields(mod).keys()[field_i],
else => unreachable,
}),
})),
.type = store.set.typeToIndex(field_ty, mod, switch (kind) {
.forward, .forward_parameter => .forward,
.complete, .parameter, .payload => .complete,
@@ -2102,12 +2102,13 @@ pub const CType = extern union {
}) or !mem.eql(
u8,
if (ty.isSimpleTuple(mod))
std.fmt.bufPrint(&name_buf, "f{}", .{field_i}) catch unreachable
else switch (zig_ty_tag) {
.Struct => ty.structFieldName(field_i, mod),
.Union => ty.unionFields(mod).keys()[field_i],
else => unreachable,
},
std.fmt.bufPrintZ(&name_buf, "f{}", .{field_i}) catch unreachable
else
mod.intern_pool.stringToSlice(switch (zig_ty_tag) {
.Struct => ty.structFieldName(field_i, mod),
.Union => ty.unionFields(mod).keys()[field_i],
else => unreachable,
}),
mem.span(c_field.name),
) or AlignAs.fieldAlign(ty, field_i, mod).@"align" !=
c_field.alignas.@"align") return false;
@@ -2225,11 +2226,12 @@ pub const CType = extern union {
});
hasher.update(if (ty.isSimpleTuple(mod))
std.fmt.bufPrint(&name_buf, "f{}", .{field_i}) catch unreachable
else switch (zig_ty_tag) {
.Struct => ty.structFieldName(field_i, mod),
.Union => ty.unionFields(mod).keys()[field_i],
else => unreachable,
});
else
mod.intern_pool.stringToSlice(switch (zig_ty_tag) {
.Struct => ty.structFieldName(field_i, mod),
.Union => ty.unionFields(mod).keys()[field_i],
else => unreachable,
}));
autoHash(hasher, AlignAs.fieldAlign(ty, field_i, mod).@"align");
}
},

View File

@@ -585,13 +585,13 @@ pub const Object = struct {
const slice_ty = Type.slice_const_u8_sentinel_0;
const slice_alignment = slice_ty.abiAlignment(mod);
const error_name_list = mod.error_name_list.items;
const error_name_list = mod.global_error_set.keys();
const llvm_errors = try mod.gpa.alloc(*llvm.Value, error_name_list.len);
defer mod.gpa.free(llvm_errors);
llvm_errors[0] = llvm_slice_ty.getUndef();
for (llvm_errors[1..], 0..) |*llvm_error, i| {
const name = error_name_list[1..][i];
for (llvm_errors[1..], error_name_list[1..]) |*llvm_error, name_nts| {
const name = mod.intern_pool.stringToSlice(name_nts);
const str_init = self.context.constString(name.ptr, @intCast(c_uint, name.len), .False);
const str_global = self.llvm_module.addGlobal(str_init.typeOf(), "");
str_global.setInitializer(str_init);
@@ -671,7 +671,7 @@ pub const Object = struct {
const llvm_global = entry.value_ptr.*;
// Same logic as below but for externs instead of exports.
const decl = mod.declPtr(decl_index);
const other_global = object.getLlvmGlobal(decl.name) orelse continue;
const other_global = object.getLlvmGlobal(mod.intern_pool.stringToSlice(decl.name)) orelse continue;
if (other_global == llvm_global) continue;
llvm_global.replaceAllUsesWith(other_global);
@@ -689,8 +689,7 @@ pub const Object = struct {
// case, we need to replace all uses of it with this exported global.
// TODO update std.builtin.ExportOptions to have the name be a
// null-terminated slice.
const exp_name_z = try mod.gpa.dupeZ(u8, exp.options.name);
defer mod.gpa.free(exp_name_z);
const exp_name_z = mod.intern_pool.stringToSlice(exp.name);
const other_global = object.getLlvmGlobal(exp_name_z.ptr) orelse continue;
if (other_global == llvm_global) continue;
@@ -923,9 +922,8 @@ pub const Object = struct {
dg.addFnAttrString(llvm_func, "no-stack-arg-probe", "");
}
if (decl.@"linksection") |section| {
if (mod.intern_pool.stringToSliceUnwrap(decl.@"linksection")) |section|
llvm_func.setSection(section);
}
// Remove all the basic blocks of a function in order to start over, generating
// LLVM IR from an empty function body.
@@ -1173,7 +1171,7 @@ pub const Object = struct {
0;
const subprogram = dib.createFunction(
di_file.?.toScope(),
decl.name,
mod.intern_pool.stringToSlice(decl.name),
llvm_func.getValueName(),
di_file.?,
line_number,
@@ -1273,22 +1271,26 @@ pub const Object = struct {
if (decl.isExtern(mod)) {
var free_decl_name = false;
const decl_name = decl_name: {
const decl_name = mod.intern_pool.stringToSlice(decl.name);
if (mod.getTarget().isWasm() and try decl.isFunction(mod)) {
if (mod.intern_pool.stringToSliceUnwrap(decl.getOwnedExternFunc(mod).?.lib_name)) |lib_name| {
if (!std.mem.eql(u8, lib_name, "c")) {
free_decl_name = true;
break :decl_name try std.fmt.allocPrintZ(gpa, "{s}|{s}", .{ decl.name, lib_name });
break :decl_name try std.fmt.allocPrintZ(gpa, "{s}|{s}", .{
decl_name, lib_name,
});
}
}
}
break :decl_name std.mem.span(decl.name);
break :decl_name decl_name;
};
defer if (free_decl_name) gpa.free(decl_name);
llvm_global.setValueName(decl_name);
if (self.getLlvmGlobal(decl_name)) |other_global| {
if (other_global != llvm_global) {
log.debug("updateDeclExports isExtern()=true setValueName({s}) conflict", .{decl.name});
try self.extern_collisions.put(gpa, decl_index, {});
}
}
@@ -1298,11 +1300,11 @@ pub const Object = struct {
if (self.di_map.get(decl)) |di_node| {
if (try decl.isFunction(mod)) {
const di_func = @ptrCast(*llvm.DISubprogram, di_node);
const linkage_name = llvm.MDString.get(self.context, decl.name, std.mem.len(decl.name));
const linkage_name = llvm.MDString.get(self.context, decl_name.ptr, decl_name.len);
di_func.replaceLinkageName(linkage_name);
} else {
const di_global = @ptrCast(*llvm.DIGlobalVariable, di_node);
const linkage_name = llvm.MDString.get(self.context, decl.name, std.mem.len(decl.name));
const linkage_name = llvm.MDString.get(self.context, decl_name.ptr, decl_name.len);
di_global.replaceLinkageName(linkage_name);
}
}
@@ -1317,7 +1319,7 @@ pub const Object = struct {
}
}
} else if (exports.len != 0) {
const exp_name = exports[0].options.name;
const exp_name = mod.intern_pool.stringToSlice(exports[0].name);
llvm_global.setValueName2(exp_name.ptr, exp_name.len);
llvm_global.setUnnamedAddr(.False);
if (mod.wantDllExports()) llvm_global.setDLLStorageClass(.DLLExport);
@@ -1332,21 +1334,19 @@ pub const Object = struct {
di_global.replaceLinkageName(linkage_name);
}
}
switch (exports[0].options.linkage) {
switch (exports[0].linkage) {
.Internal => unreachable,
.Strong => llvm_global.setLinkage(.External),
.Weak => llvm_global.setLinkage(.WeakODR),
.LinkOnce => llvm_global.setLinkage(.LinkOnceODR),
}
switch (exports[0].options.visibility) {
switch (exports[0].visibility) {
.default => llvm_global.setVisibility(.Default),
.hidden => llvm_global.setVisibility(.Hidden),
.protected => llvm_global.setVisibility(.Protected),
}
if (exports[0].options.section) |section| {
const section_z = try gpa.dupeZ(u8, section);
defer gpa.free(section_z);
llvm_global.setSection(section_z);
if (mod.intern_pool.stringToSliceUnwrap(exports[0].section)) |section| {
llvm_global.setSection(section);
}
if (decl.val.getVariable(mod)) |variable| {
if (variable.is_threadlocal) {
@@ -1356,13 +1356,12 @@ pub const Object = struct {
// If a Decl is exported more than one time (which is rare),
// we add aliases for all but the first export.
// TODO LLVM C API does not support deleting aliases. We need to
// patch it to support this or figure out how to wrap the C++ API ourselves.
// TODO LLVM C API does not support deleting aliases.
// The planned solution to this is https://github.com/ziglang/zig/issues/13265
// Until then we iterate over existing aliases and make them point
// to the correct decl, or otherwise add a new alias. Old aliases are leaked.
for (exports[1..]) |exp| {
const exp_name_z = try gpa.dupeZ(u8, exp.options.name);
defer gpa.free(exp_name_z);
const exp_name_z = mod.intern_pool.stringToSlice(exp.name);
if (self.llvm_module.getNamedGlobalAlias(exp_name_z.ptr, exp_name_z.len)) |alias| {
alias.setAliasee(llvm_global);
@@ -1376,8 +1375,7 @@ pub const Object = struct {
}
}
} else {
const fqn = try decl.getFullyQualifiedName(mod);
defer gpa.free(fqn);
const fqn = mod.intern_pool.stringToSlice(try decl.getFullyQualifiedName(mod));
llvm_global.setValueName2(fqn.ptr, fqn.len);
llvm_global.setLinkage(.Internal);
if (mod.wantDllExports()) llvm_global.setDLLStorageClass(.Default);
@@ -2092,8 +2090,7 @@ pub const Object = struct {
const field_offset = std.mem.alignForwardGeneric(u64, offset, field_align);
offset = field_offset + field_size;
const field_name = try gpa.dupeZ(u8, fields.keys()[field_and_index.index]);
defer gpa.free(field_name);
const field_name = mod.intern_pool.stringToSlice(fields.keys()[field_and_index.index]);
try di_fields.append(gpa, dib.createMemberType(
fwd_decl.toScope(),
@@ -2200,12 +2197,9 @@ pub const Object = struct {
const field_size = field.ty.abiSize(mod);
const field_align = field.normalAlignment(mod);
const field_name_copy = try gpa.dupeZ(u8, field_name);
defer gpa.free(field_name_copy);
di_fields.appendAssumeCapacity(dib.createMemberType(
fwd_decl.toScope(),
field_name_copy,
mod.intern_pool.stringToSlice(field_name),
null, // file
0, // line
field_size * 8, // size in bits
@@ -2327,7 +2321,7 @@ pub const Object = struct {
if (fn_info.return_type.toType().isError(mod) and
o.module.comp.bin_file.options.error_return_tracing)
{
const ptr_ty = try mod.singleMutPtrType(o.getStackTraceType());
const ptr_ty = try mod.singleMutPtrType(try o.getStackTraceType());
try param_di_types.append(try o.lowerDebugType(ptr_ty, .full));
}
@@ -2384,7 +2378,7 @@ pub const Object = struct {
const fields: [0]*llvm.DIType = .{};
return o.di_builder.?.createStructType(
try o.namespaceToDebugScope(decl.src_namespace),
decl.name, // TODO use fully qualified name
mod.intern_pool.stringToSlice(decl.name), // TODO use fully qualified name
try o.getDIFile(o.gpa, mod.namespacePtr(decl.src_namespace).file_scope),
decl.src_line + 1,
0, // size in bits
@@ -2399,18 +2393,18 @@ pub const Object = struct {
);
}
fn getStackTraceType(o: *Object) Type {
fn getStackTraceType(o: *Object) Allocator.Error!Type {
const mod = o.module;
const std_pkg = mod.main_pkg.table.get("std").?;
const std_file = (mod.importPkg(std_pkg) catch unreachable).file;
const builtin_str: []const u8 = "builtin";
const builtin_str = try mod.intern_pool.getOrPutString(mod.gpa, "builtin");
const std_namespace = mod.namespacePtr(mod.declPtr(std_file.root_decl.unwrap().?).src_namespace);
const builtin_decl = std_namespace.decls
.getKeyAdapted(builtin_str, Module.DeclAdapter{ .mod = mod }).?;
const stack_trace_str: []const u8 = "StackTrace";
const stack_trace_str = try mod.intern_pool.getOrPutString(mod.gpa, "StackTrace");
// buffer is only used for int_type, `builtin` is a struct.
const builtin_ty = mod.declPtr(builtin_decl).val.toType();
const builtin_namespace = builtin_ty.getNamespace(mod).?;
@@ -2452,16 +2446,13 @@ pub const DeclGen = struct {
const decl_index = dg.decl_index;
assert(decl.has_tv);
log.debug("gen: {s} type: {}, value: {}", .{
decl.name, decl.ty.fmtDebug(), decl.val.fmtDebug(),
});
if (decl.val.getExternFunc(mod)) |extern_func| {
_ = try dg.resolveLlvmFunction(extern_func.decl);
} else {
const target = mod.getTarget();
var global = try dg.resolveGlobalDecl(decl_index);
global.setAlignment(decl.getAlignment(mod));
if (decl.@"linksection") |section| global.setSection(section);
if (mod.intern_pool.stringToSliceUnwrap(decl.@"linksection")) |s| global.setSection(s);
assert(decl.has_tv);
const init_val = if (decl.val.getVariable(mod)) |variable| init_val: {
break :init_val variable.init;
@@ -2495,7 +2486,8 @@ pub const DeclGen = struct {
new_global.setLinkage(global.getLinkage());
new_global.setUnnamedAddr(global.getUnnamedAddress());
new_global.setAlignment(global.getAlignment());
if (decl.@"linksection") |section| new_global.setSection(section);
if (mod.intern_pool.stringToSliceUnwrap(decl.@"linksection")) |s|
new_global.setSection(s);
new_global.setInitializer(llvm_init);
// TODO: How should this work then the address space of a global changed?
global.replaceAllUsesWith(new_global);
@@ -2513,7 +2505,7 @@ pub const DeclGen = struct {
const is_internal_linkage = !dg.module.decl_exports.contains(decl_index);
const di_global = dib.createGlobalVariableExpression(
di_file.toScope(),
decl.name,
mod.intern_pool.stringToSlice(decl.name),
global.getValueName(),
di_file,
line_number,
@@ -2544,8 +2536,7 @@ pub const DeclGen = struct {
const fn_type = try dg.lowerType(zig_fn_type);
const fqn = try decl.getFullyQualifiedName(mod);
defer dg.gpa.free(fqn);
const fqn = mod.intern_pool.stringToSlice(try decl.getFullyQualifiedName(mod));
const llvm_addrspace = toLlvmAddressSpace(decl.@"addrspace", target);
const llvm_fn = dg.llvmModule().addFunctionInAddressSpace(fqn, fn_type, llvm_addrspace);
@@ -2557,7 +2548,7 @@ pub const DeclGen = struct {
llvm_fn.setUnnamedAddr(.True);
} else {
if (target.isWasm()) {
dg.addFnAttrString(llvm_fn, "wasm-import-name", std.mem.sliceTo(decl.name, 0));
dg.addFnAttrString(llvm_fn, "wasm-import-name", mod.intern_pool.stringToSlice(decl.name));
if (mod.intern_pool.stringToSliceUnwrap(decl.getOwnedExternFunc(mod).?.lib_name)) |lib_name| {
if (!std.mem.eql(u8, lib_name, "c")) {
dg.addFnAttrString(llvm_fn, "wasm-import-module", lib_name);
@@ -2699,8 +2690,7 @@ pub const DeclGen = struct {
const mod = dg.module;
const decl = mod.declPtr(decl_index);
const fqn = try decl.getFullyQualifiedName(mod);
defer dg.gpa.free(fqn);
const fqn = mod.intern_pool.stringToSlice(try decl.getFullyQualifiedName(mod));
const target = mod.getTarget();
@@ -2716,7 +2706,7 @@ pub const DeclGen = struct {
// This is needed for declarations created by `@extern`.
if (decl.isExtern(mod)) {
llvm_global.setValueName(decl.name);
llvm_global.setValueName(mod.intern_pool.stringToSlice(decl.name));
llvm_global.setUnnamedAddr(.False);
llvm_global.setLinkage(.External);
if (decl.val.getVariable(mod)) |variable| {
@@ -2811,8 +2801,7 @@ pub const DeclGen = struct {
if (gop.found_existing) return gop.value_ptr.*;
const opaque_type = mod.intern_pool.indexToKey(t.toIntern()).opaque_type;
const name = try mod.opaqueFullyQualifiedName(opaque_type);
defer gpa.free(name);
const name = mod.intern_pool.stringToSlice(try mod.opaqueFullyQualifiedName(opaque_type));
const llvm_struct_ty = dg.context.structCreateNamed(name);
gop.value_ptr.* = llvm_struct_ty; // must be done before any recursive calls
@@ -2963,8 +2952,7 @@ pub const DeclGen = struct {
return int_llvm_ty;
}
const name = try struct_obj.getFullyQualifiedName(mod);
defer gpa.free(name);
const name = mod.intern_pool.stringToSlice(try struct_obj.getFullyQualifiedName(mod));
const llvm_struct_ty = dg.context.structCreateNamed(name);
gop.value_ptr.* = llvm_struct_ty; // must be done before any recursive calls
@@ -3040,8 +3028,7 @@ pub const DeclGen = struct {
return enum_tag_llvm_ty;
}
const name = try union_obj.getFullyQualifiedName(mod);
defer gpa.free(name);
const name = mod.intern_pool.stringToSlice(try union_obj.getFullyQualifiedName(mod));
const llvm_union_ty = dg.context.structCreateNamed(name);
gop.value_ptr.* = llvm_union_ty; // must be done before any recursive calls
@@ -3119,7 +3106,7 @@ pub const DeclGen = struct {
if (fn_info.return_type.toType().isError(mod) and
mod.comp.bin_file.options.error_return_tracing)
{
const ptr_ty = try mod.singleMutPtrType(dg.object.getStackTraceType());
const ptr_ty = try mod.singleMutPtrType(try dg.object.getStackTraceType());
try llvm_params.append(try dg.lowerType(ptr_ty));
}
@@ -3266,9 +3253,8 @@ pub const DeclGen = struct {
},
.err => |err| {
const llvm_ty = try dg.lowerType(Type.anyerror);
const name = mod.intern_pool.stringToSlice(err.name);
const kv = try mod.getErrorValue(name);
return llvm_ty.constInt(kv.value, .False);
const int = try mod.getErrorValue(err.name);
return llvm_ty.constInt(int, .False);
},
.error_union => |error_union| {
const err_tv: TypedValue = switch (error_union.val) {
@@ -5960,8 +5946,7 @@ pub const FuncGen = struct {
.base_line = self.base_line,
});
const fqn = try decl.getFullyQualifiedName(mod);
defer self.gpa.free(fqn);
const fqn = mod.intern_pool.stringToSlice(try decl.getFullyQualifiedName(mod));
const is_internal_linkage = !mod.decl_exports.contains(decl_index);
const fn_ty = try mod.funcType(.{
@@ -5981,7 +5966,7 @@ pub const FuncGen = struct {
});
const subprogram = dib.createFunction(
di_file.toScope(),
decl.name,
mod.intern_pool.stringToSlice(decl.name),
fqn,
di_file,
line_number,
@@ -8629,9 +8614,8 @@ pub const FuncGen = struct {
const end_block = self.context.appendBasicBlock(self.llvm_func, "End");
const switch_instr = self.builder.buildSwitch(operand, invalid_block, @intCast(c_uint, names.len));
for (names) |name_ip| {
const name = mod.intern_pool.stringToSlice(name_ip);
const err_int = mod.global_error_set.get(name).?;
for (names) |name| {
const err_int = @intCast(Module.ErrorInt, mod.global_error_set.getIndex(name).?);
const this_tag_int_value = try self.dg.lowerValue(.{
.ty = Type.err_int,
.val = try mod.intValue(Type.err_int, err_int),
@@ -8681,8 +8665,7 @@ pub const FuncGen = struct {
defer arena_allocator.deinit();
const arena = arena_allocator.allocator();
const fqn = try mod.declPtr(enum_type.decl).getFullyQualifiedName(mod);
defer self.gpa.free(fqn);
const fqn = mod.intern_pool.stringToSlice(try mod.declPtr(enum_type.decl).getFullyQualifiedName(mod));
const llvm_fn_name = try std.fmt.allocPrintZ(arena, "__zig_is_named_enum_value_{s}", .{fqn});
const param_types = [_]*llvm.Type{try self.dg.lowerType(enum_type.tag_ty.toType())};
@@ -8754,8 +8737,7 @@ pub const FuncGen = struct {
defer arena_allocator.deinit();
const arena = arena_allocator.allocator();
const fqn = try mod.declPtr(enum_type.decl).getFullyQualifiedName(mod);
defer self.gpa.free(fqn);
const fqn = mod.intern_pool.stringToSlice(try mod.declPtr(enum_type.decl).getFullyQualifiedName(mod));
const llvm_fn_name = try std.fmt.allocPrintZ(arena, "__zig_tag_name_{s}", .{fqn});
const slice_ty = Type.slice_const_u8_sentinel_0;

View File

@@ -593,7 +593,6 @@ pub const DeclGen = struct {
.extern_func => unreachable, // TODO
else => {
const result_id = dg.spv.allocId();
log.debug("addDeclRef: id = {}, index = {}, name = {s}", .{ result_id.id, @enumToInt(spv_decl_index), decl.name });
try self.decl_deps.put(spv_decl_index, {});
@@ -664,9 +663,8 @@ pub const DeclGen = struct {
=> unreachable, // non-runtime values
.int => try self.addInt(ty, val),
.err => |err| {
const name = mod.intern_pool.stringToSlice(err.name);
const kv = try mod.getErrorValue(name);
try self.addConstInt(u16, @intCast(u16, kv.value));
const int = try mod.getErrorValue(err.name);
try self.addConstInt(u16, @intCast(u16, int));
},
.error_union => |error_union| {
const payload_ty = ty.errorUnionPayload(mod);
@@ -1288,8 +1286,7 @@ pub const DeclGen = struct {
member_index += 1;
}
const name = try struct_obj.getFullyQualifiedName(self.module);
defer self.module.gpa.free(name);
const name = mod.intern_pool.stringToSlice(try struct_obj.getFullyQualifiedName(self.module));
return try self.spv.resolve(.{ .struct_type = .{
.name = try self.spv.resolveString(name),
@@ -1500,7 +1497,6 @@ pub const DeclGen = struct {
const spv_decl_index = try self.resolveDecl(self.decl_index);
const decl_id = self.spv.declPtr(spv_decl_index).result_id;
log.debug("genDecl: id = {}, index = {}, name = {s}", .{ decl_id.id, @enumToInt(spv_decl_index), decl.name });
if (decl.val.getFunction(mod)) |_| {
assert(decl.ty.zigTypeTag(mod) == .Fn);
@@ -1542,8 +1538,7 @@ pub const DeclGen = struct {
try self.func.body.emit(self.spv.gpa, .OpFunctionEnd, {});
try self.spv.addFunction(spv_decl_index, self.func);
const fqn = try decl.getFullyQualifiedName(self.module);
defer self.module.gpa.free(fqn);
const fqn = mod.intern_pool.stringToSlice(try decl.getFullyQualifiedName(self.module));
try self.spv.sections.debug_names.emit(self.gpa, .OpName, .{
.target = decl_id,

View File

@@ -502,8 +502,6 @@ pub const File = struct {
/// of the final binary.
pub fn lowerUnnamedConst(base: *File, tv: TypedValue, decl_index: Module.Decl.Index) UpdateDeclError!u32 {
if (build_options.only_c) @compileError("unreachable");
const decl = base.options.module.?.declPtr(decl_index);
log.debug("lowerUnnamedConst {*} ({s})", .{ decl, decl.name });
switch (base.tag) {
// zig fmt: off
.coff => return @fieldParentPtr(Coff, "base", base).lowerUnnamedConst(tv, decl_index),
@@ -543,7 +541,6 @@ pub const File = struct {
/// May be called before or after updateDeclExports for any given Decl.
pub fn updateDecl(base: *File, module: *Module, decl_index: Module.Decl.Index) UpdateDeclError!void {
const decl = module.declPtr(decl_index);
log.debug("updateDecl {*} ({s}), type={}", .{ decl, decl.name, decl.ty.fmt(module) });
assert(decl.has_tv);
if (build_options.only_c) {
assert(base.tag == .c);
@@ -566,10 +563,6 @@ pub const File = struct {
/// May be called before or after updateDeclExports for any given Decl.
pub fn updateFunc(base: *File, module: *Module, func_index: Module.Fn.Index, air: Air, liveness: Liveness) UpdateDeclError!void {
const func = module.funcPtr(func_index);
const owner_decl = module.declPtr(func.owner_decl);
log.debug("updateFunc {*} ({s}), type={}", .{
owner_decl, owner_decl.name, owner_decl.ty.fmt(module),
});
if (build_options.only_c) {
assert(base.tag == .c);
return @fieldParentPtr(C, "base", base).updateFunc(module, func, air, liveness);
@@ -590,9 +583,6 @@ pub const File = struct {
pub fn updateDeclLineNumber(base: *File, module: *Module, decl_index: Module.Decl.Index) UpdateDeclError!void {
const decl = module.declPtr(decl_index);
log.debug("updateDeclLineNumber {*} ({s}), line={}", .{
decl, decl.name, decl.src_line + 1,
});
assert(decl.has_tv);
if (build_options.only_c) {
assert(base.tag == .c);
@@ -868,7 +858,6 @@ pub const File = struct {
exports: []const *Module.Export,
) UpdateDeclExportsError!void {
const decl = module.declPtr(decl_index);
log.debug("updateDeclExports {*} ({s})", .{ decl, decl.name });
assert(decl.has_tv);
if (build_options.only_c) {
assert(base.tag == .c);

View File

@@ -6,6 +6,7 @@ const fs = std.fs;
const C = @This();
const Module = @import("../Module.zig");
const InternPool = @import("../InternPool.zig");
const Compilation = @import("../Compilation.zig");
const codegen = @import("../codegen/c.zig");
const link = @import("../link.zig");
@@ -289,11 +290,11 @@ pub fn flushModule(self: *C, _: *Compilation, prog_node: *std.Progress.Node) !vo
}
{
var export_names = std.StringHashMapUnmanaged(void){};
var export_names: std.AutoHashMapUnmanaged(InternPool.NullTerminatedString, void) = .{};
defer export_names.deinit(gpa);
try export_names.ensureTotalCapacity(gpa, @intCast(u32, module.decl_exports.entries.len));
for (module.decl_exports.values()) |exports| for (exports.items) |@"export"|
try export_names.put(gpa, @"export".options.name, {});
try export_names.put(gpa, @"export".name, {});
while (f.remaining_decls.popOrNull()) |kv| {
const decl_index = kv.key;
@@ -553,7 +554,7 @@ fn flushDecl(
self: *C,
f: *Flush,
decl_index: Module.Decl.Index,
export_names: std.StringHashMapUnmanaged(void),
export_names: std.AutoHashMapUnmanaged(InternPool.NullTerminatedString, void),
) FlushDeclError!void {
const gpa = self.base.allocator;
const mod = self.base.options.module.?;
@@ -571,7 +572,7 @@ fn flushDecl(
try self.flushLazyFns(f, decl_block.lazy_fns);
try f.all_buffers.ensureUnusedCapacity(gpa, 1);
if (!(decl.isExtern(mod) and export_names.contains(mem.span(decl.name))))
if (!(decl.isExtern(mod) and export_names.contains(decl.name)))
f.appendBufAssumeCapacity(decl_block.fwd_decl.items);
}

View File

@@ -1097,8 +1097,7 @@ pub fn lowerUnnamedConst(self: *Coff, tv: TypedValue, decl_index: Module.Decl.In
const atom_index = try self.createAtom();
const sym_name = blk: {
const decl_name = try decl.getFullyQualifiedName(mod);
defer gpa.free(decl_name);
const decl_name = mod.intern_pool.stringToSlice(try decl.getFullyQualifiedName(mod));
const index = unnamed_consts.items.len;
break :blk try std.fmt.allocPrint(gpa, "__unnamed_{s}_{d}", .{ decl_name, index });
@@ -1324,12 +1323,10 @@ fn getDeclOutputSection(self: *Coff, decl_index: Module.Decl.Index) u16 {
}
fn updateDeclCode(self: *Coff, decl_index: Module.Decl.Index, code: []u8, complex_type: coff.ComplexType) !void {
const gpa = self.base.allocator;
const mod = self.base.options.module.?;
const decl = mod.declPtr(decl_index);
const decl_name = try decl.getFullyQualifiedName(mod);
defer gpa.free(decl_name);
const decl_name = mod.intern_pool.stringToSlice(try decl.getFullyQualifiedName(mod));
log.debug("updateDeclCode {s}{*}", .{ decl_name, decl });
const required_alignment = decl.getAlignment(mod);
@@ -1420,6 +1417,8 @@ pub fn updateDeclExports(
@panic("Attempted to compile for object format that was disabled by build configuration");
}
const ip = &mod.intern_pool;
if (build_options.have_llvm) {
// Even in the case of LLVM, we need to notice certain exported symbols in order to
// detect the default subsystem.
@@ -1431,20 +1430,20 @@ pub fn updateDeclExports(
else => std.builtin.CallingConvention.C,
};
const decl_cc = exported_decl.ty.fnCallingConvention(mod);
if (decl_cc == .C and mem.eql(u8, exp.options.name, "main") and
if (decl_cc == .C and ip.stringEqlSlice(exp.name, "main") and
self.base.options.link_libc)
{
mod.stage1_flags.have_c_main = true;
} else if (decl_cc == winapi_cc and self.base.options.target.os.tag == .windows) {
if (mem.eql(u8, exp.options.name, "WinMain")) {
if (ip.stringEqlSlice(exp.name, "WinMain")) {
mod.stage1_flags.have_winmain = true;
} else if (mem.eql(u8, exp.options.name, "wWinMain")) {
} else if (ip.stringEqlSlice(exp.name, "wWinMain")) {
mod.stage1_flags.have_wwinmain = true;
} else if (mem.eql(u8, exp.options.name, "WinMainCRTStartup")) {
} else if (ip.stringEqlSlice(exp.name, "WinMainCRTStartup")) {
mod.stage1_flags.have_winmain_crt_startup = true;
} else if (mem.eql(u8, exp.options.name, "wWinMainCRTStartup")) {
} else if (ip.stringEqlSlice(exp.name, "wWinMainCRTStartup")) {
mod.stage1_flags.have_wwinmain_crt_startup = true;
} else if (mem.eql(u8, exp.options.name, "DllMainCRTStartup")) {
} else if (ip.stringEqlSlice(exp.name, "DllMainCRTStartup")) {
mod.stage1_flags.have_dllmain_crt_startup = true;
}
}
@@ -1453,9 +1452,6 @@ pub fn updateDeclExports(
if (self.llvm_object) |llvm_object| return llvm_object.updateDeclExports(mod, decl_index, exports);
}
const tracy = trace(@src());
defer tracy.end();
const gpa = self.base.allocator;
const decl = mod.declPtr(decl_index);
@@ -1465,12 +1461,13 @@ pub fn updateDeclExports(
const decl_metadata = self.decls.getPtr(decl_index).?;
for (exports) |exp| {
log.debug("adding new export '{s}'", .{exp.options.name});
const exp_name = mod.intern_pool.stringToSlice(exp.name);
log.debug("adding new export '{s}'", .{exp_name});
if (exp.options.section) |section_name| {
if (mod.intern_pool.stringToSliceUnwrap(exp.section)) |section_name| {
if (!mem.eql(u8, section_name, ".text")) {
try mod.failed_exports.putNoClobber(
mod.gpa,
gpa,
exp,
try Module.ErrorMsg.create(
gpa,
@@ -1483,9 +1480,9 @@ pub fn updateDeclExports(
}
}
if (exp.options.linkage == .LinkOnce) {
if (exp.linkage == .LinkOnce) {
try mod.failed_exports.putNoClobber(
mod.gpa,
gpa,
exp,
try Module.ErrorMsg.create(
gpa,
@@ -1497,19 +1494,19 @@ pub fn updateDeclExports(
continue;
}
const sym_index = decl_metadata.getExport(self, exp.options.name) orelse blk: {
const sym_index = decl_metadata.getExport(self, exp_name) orelse blk: {
const sym_index = try self.allocateSymbol();
try decl_metadata.exports.append(gpa, sym_index);
break :blk sym_index;
};
const sym_loc = SymbolWithLoc{ .sym_index = sym_index, .file = null };
const sym = self.getSymbolPtr(sym_loc);
try self.setSymbolName(sym, exp.options.name);
try self.setSymbolName(sym, exp_name);
sym.value = decl_sym.value;
sym.section_number = @intToEnum(coff.SectionNumber, self.text_section_index.? + 1);
sym.type = .{ .complex_type = .FUNCTION, .base_type = .NULL };
switch (exp.options.linkage) {
switch (exp.linkage) {
.Strong => {
sym.storage_class = .EXTERNAL;
},
@@ -1522,9 +1519,15 @@ pub fn updateDeclExports(
}
}
pub fn deleteDeclExport(self: *Coff, decl_index: Module.Decl.Index, name: []const u8) void {
pub fn deleteDeclExport(
self: *Coff,
decl_index: Module.Decl.Index,
name_ip: InternPool.NullTerminatedString,
) void {
if (self.llvm_object) |_| return;
const metadata = self.decls.getPtr(decl_index) orelse return;
const mod = self.base.options.module.?;
const name = mod.intern_pool.stringToSlice(name_ip);
const sym_index = metadata.getExportPtr(self, name) orelse return;
const gpa = self.base.allocator;
@@ -2540,6 +2543,7 @@ const ImportTable = @import("Coff/ImportTable.zig");
const Liveness = @import("../Liveness.zig");
const LlvmObject = @import("../codegen/llvm.zig").Object;
const Module = @import("../Module.zig");
const InternPool = @import("../InternPool.zig");
const Object = @import("Coff/Object.zig");
const Relocation = @import("Coff/Relocation.zig");
const TableSection = @import("table_section.zig").TableSection;

View File

@@ -358,8 +358,9 @@ pub const DeclState = struct {
struct_obj.fields.keys(),
struct_obj.fields.values(),
0..,
) |field_name, field, field_index| {
) |field_name_ip, field, field_index| {
if (!field.ty.hasRuntimeBits(mod)) continue;
const field_name = mod.intern_pool.stringToSlice(field_name_ip);
// DW.AT.member
try dbg_info_buffer.ensureUnusedCapacity(field_name.len + 2);
dbg_info_buffer.appendAssumeCapacity(@enumToInt(AbbrevKind.struct_member));
@@ -469,7 +470,8 @@ pub const DeclState = struct {
// DW.AT.member
try dbg_info_buffer.append(@enumToInt(AbbrevKind.struct_member));
// DW.AT.name, DW.FORM.string
try dbg_info_buffer.writer().print("{s}\x00", .{field_name});
try dbg_info_buffer.appendSlice(mod.intern_pool.stringToSlice(field_name));
try dbg_info_buffer.append(0);
// DW.AT.type, DW.FORM.ref4
const index = dbg_info_buffer.items.len;
try dbg_info_buffer.resize(index + 4);
@@ -949,8 +951,7 @@ pub fn initDeclState(self: *Dwarf, mod: *Module, decl_index: Module.Decl.Index)
defer tracy.end();
const decl = mod.declPtr(decl_index);
const decl_name = try decl.getFullyQualifiedName(mod);
defer self.allocator.free(decl_name);
const decl_name = mod.intern_pool.stringToSlice(try decl.getFullyQualifiedName(mod));
log.debug("initDeclState {s}{*}", .{ decl_name, decl });
@@ -1273,7 +1274,6 @@ pub fn commitDeclState(
}
}
log.debug("updateDeclDebugInfoAllocation for '{s}'", .{decl.name});
try self.updateDeclDebugInfoAllocation(di_atom_index, @intCast(u32, dbg_info_buffer.items.len));
while (decl_state.abbrev_relocs.popOrNull()) |reloc| {
@@ -1345,7 +1345,6 @@ pub fn commitDeclState(
}
}
log.debug("writeDeclDebugInfo for '{s}", .{decl.name});
try self.writeDeclDebugInfo(di_atom_index, dbg_info_buffer.items);
}
@@ -2523,15 +2522,7 @@ pub fn flushModule(self: *Dwarf, module: *Module) !void {
// TODO: don't create a zig type for this, just make the dwarf info
// without touching the zig type system.
const names = try arena.alloc(InternPool.NullTerminatedString, module.global_error_set.count());
{
var it = module.global_error_set.keyIterator();
var i: usize = 0;
while (it.next()) |key| : (i += 1) {
names[i] = module.intern_pool.getString(key.*).unwrap().?;
}
}
const names = try arena.dupe(InternPool.NullTerminatedString, module.global_error_set.keys());
std.mem.sort(InternPool.NullTerminatedString, names, {}, InternPool.NullTerminatedString.indexLessThan);
const error_ty = try module.intern(.{ .error_set_type = .{ .names = names } });
@@ -2682,8 +2673,8 @@ fn addDbgInfoErrorSet(
const error_names = ty.errorSetNames(mod);
for (error_names) |error_name_ip| {
const int = try mod.getErrorValue(error_name_ip);
const error_name = mod.intern_pool.stringToSlice(error_name_ip);
const kv = mod.getErrorValue(error_name) catch unreachable;
// DW.AT.enumerator
try dbg_info_buffer.ensureUnusedCapacity(error_name.len + 2 + @sizeOf(u64));
dbg_info_buffer.appendAssumeCapacity(@enumToInt(AbbrevKind.enum_variant));
@@ -2691,7 +2682,7 @@ fn addDbgInfoErrorSet(
dbg_info_buffer.appendSliceAssumeCapacity(error_name);
dbg_info_buffer.appendAssumeCapacity(0);
// DW.AT.const_value, DW.FORM.data8
mem.writeInt(u64, dbg_info_buffer.addManyAsArrayAssumeCapacity(8), kv.value, target_endian);
mem.writeInt(u64, dbg_info_buffer.addManyAsArrayAssumeCapacity(8), int, target_endian);
}
// DW.AT.enumeration_type delimit children

View File

@@ -28,6 +28,7 @@ const File = link.File;
const Liveness = @import("../Liveness.zig");
const LlvmObject = @import("../codegen/llvm.zig").Object;
const Module = @import("../Module.zig");
const InternPool = @import("../InternPool.zig");
const Package = @import("../Package.zig");
const StringTable = @import("strtab.zig").StringTable;
const TableSection = @import("table_section.zig").TableSection;
@@ -2480,8 +2481,7 @@ fn updateDeclCode(self: *Elf, decl_index: Module.Decl.Index, code: []const u8, s
const mod = self.base.options.module.?;
const decl = mod.declPtr(decl_index);
const decl_name = try decl.getFullyQualifiedName(mod);
defer self.base.allocator.free(decl_name);
const decl_name = mod.intern_pool.stringToSlice(try decl.getFullyQualifiedName(mod));
log.debug("updateDeclCode {s}{*}", .{ decl_name, decl });
const required_alignment = decl.getAlignment(mod);
@@ -2802,8 +2802,7 @@ pub fn lowerUnnamedConst(self: *Elf, typed_value: TypedValue, decl_index: Module
const decl = mod.declPtr(decl_index);
const name_str_index = blk: {
const decl_name = try decl.getFullyQualifiedName(mod);
defer gpa.free(decl_name);
const decl_name = mod.intern_pool.stringToSlice(try decl.getFullyQualifiedName(mod));
const index = unnamed_consts.items.len;
const name = try std.fmt.allocPrint(gpa, "__unnamed_{s}_{d}", .{ decl_name, index });
defer gpa.free(name);
@@ -2880,7 +2879,8 @@ pub fn updateDeclExports(
try self.global_symbols.ensureUnusedCapacity(gpa, exports.len);
for (exports) |exp| {
if (exp.options.section) |section_name| {
const exp_name = mod.intern_pool.stringToSlice(exp.name);
if (mod.intern_pool.stringToSliceUnwrap(exp.section)) |section_name| {
if (!mem.eql(u8, section_name, ".text")) {
try mod.failed_exports.ensureUnusedCapacity(mod.gpa, 1);
mod.failed_exports.putAssumeCapacityNoClobber(
@@ -2890,11 +2890,11 @@ pub fn updateDeclExports(
continue;
}
}
const stb_bits: u8 = switch (exp.options.linkage) {
const stb_bits: u8 = switch (exp.linkage) {
.Internal => elf.STB_LOCAL,
.Strong => blk: {
const entry_name = self.base.options.entry orelse "_start";
if (mem.eql(u8, exp.options.name, entry_name)) {
if (mem.eql(u8, exp_name, entry_name)) {
self.entry_addr = decl_sym.st_value;
}
break :blk elf.STB_GLOBAL;
@@ -2910,10 +2910,10 @@ pub fn updateDeclExports(
},
};
const stt_bits: u8 = @truncate(u4, decl_sym.st_info);
if (decl_metadata.getExport(self, exp.options.name)) |i| {
if (decl_metadata.getExport(self, exp_name)) |i| {
const sym = &self.global_symbols.items[i];
sym.* = .{
.st_name = try self.shstrtab.insert(gpa, exp.options.name),
.st_name = try self.shstrtab.insert(gpa, exp_name),
.st_info = (stb_bits << 4) | stt_bits,
.st_other = 0,
.st_shndx = shdr_index,
@@ -2927,7 +2927,7 @@ pub fn updateDeclExports(
};
try decl_metadata.exports.append(gpa, @intCast(u32, i));
self.global_symbols.items[i] = .{
.st_name = try self.shstrtab.insert(gpa, exp.options.name),
.st_name = try self.shstrtab.insert(gpa, exp_name),
.st_info = (stb_bits << 4) | stt_bits,
.st_other = 0,
.st_shndx = shdr_index,
@@ -2944,8 +2944,7 @@ pub fn updateDeclLineNumber(self: *Elf, mod: *Module, decl_index: Module.Decl.In
defer tracy.end();
const decl = mod.declPtr(decl_index);
const decl_name = try decl.getFullyQualifiedName(mod);
defer self.base.allocator.free(decl_name);
const decl_name = mod.intern_pool.stringToSlice(try decl.getFullyQualifiedName(mod));
log.debug("updateDeclLineNumber {s}{*}", .{ decl_name, decl });
@@ -2955,11 +2954,15 @@ pub fn updateDeclLineNumber(self: *Elf, mod: *Module, decl_index: Module.Decl.In
}
}
pub fn deleteDeclExport(self: *Elf, decl_index: Module.Decl.Index, name: []const u8) void {
pub fn deleteDeclExport(
self: *Elf,
decl_index: Module.Decl.Index,
name: InternPool.NullTerminatedString,
) void {
if (self.llvm_object) |_| return;
const metadata = self.decls.getPtr(decl_index) orelse return;
const sym_index = metadata.getExportPtr(self, name) orelse return;
log.debug("deleting export '{s}'", .{name});
const mod = self.base.options.module.?;
const sym_index = metadata.getExportPtr(self, mod.intern_pool.stringToSlice(name)) orelse return;
self.global_symbol_free_list.append(self.base.allocator, sym_index.*) catch {};
self.global_symbols.items[sym_index.*].st_info = 0;
sym_index.* = 0;

View File

@@ -40,6 +40,7 @@ const Liveness = @import("../Liveness.zig");
const LlvmObject = @import("../codegen/llvm.zig").Object;
const Md5 = std.crypto.hash.Md5;
const Module = @import("../Module.zig");
const InternPool = @import("../InternPool.zig");
const Relocation = @import("MachO/Relocation.zig");
const StringTable = @import("strtab.zig").StringTable;
const TableSection = @import("table_section.zig").TableSection;
@@ -1921,8 +1922,7 @@ pub fn lowerUnnamedConst(self: *MachO, typed_value: TypedValue, decl_index: Modu
const unnamed_consts = gop.value_ptr;
const decl = mod.declPtr(decl_index);
const decl_name = try decl.getFullyQualifiedName(mod);
defer gpa.free(decl_name);
const decl_name = mod.intern_pool.stringToSlice(try decl.getFullyQualifiedName(mod));
const name_str_index = blk: {
const index = unnamed_consts.items.len;
@@ -2206,8 +2206,7 @@ fn updateThreadlocalVariable(self: *MachO, module: *Module, decl_index: Module.D
const required_alignment = decl.getAlignment(mod);
const decl_name = try decl.getFullyQualifiedName(module);
defer gpa.free(decl_name);
const decl_name = mod.intern_pool.stringToSlice(try decl.getFullyQualifiedName(module));
const init_sym_name = try std.fmt.allocPrint(gpa, "{s}$tlv$init", .{decl_name});
defer gpa.free(init_sym_name);
@@ -2306,8 +2305,7 @@ fn updateDeclCode(self: *MachO, decl_index: Module.Decl.Index, code: []u8) !u64
const required_alignment = decl.getAlignment(mod);
const decl_name = try decl.getFullyQualifiedName(mod);
defer gpa.free(decl_name);
const decl_name = mod.intern_pool.stringToSlice(try decl.getFullyQualifiedName(mod));
const decl_metadata = self.decls.get(decl_index).?;
const atom_index = decl_metadata.atom;
@@ -2403,12 +2401,14 @@ pub fn updateDeclExports(
const decl_metadata = self.decls.getPtr(decl_index).?;
for (exports) |exp| {
const exp_name = try std.fmt.allocPrint(gpa, "_{s}", .{exp.options.name});
const exp_name = try std.fmt.allocPrint(gpa, "_{s}", .{
mod.intern_pool.stringToSlice(exp.name),
});
defer gpa.free(exp_name);
log.debug("adding new export '{s}'", .{exp_name});
if (exp.options.section) |section_name| {
if (mod.intern_pool.stringToSliceUnwrap(exp.section)) |section_name| {
if (!mem.eql(u8, section_name, "__text")) {
try mod.failed_exports.putNoClobber(
mod.gpa,
@@ -2424,7 +2424,7 @@ pub fn updateDeclExports(
}
}
if (exp.options.linkage == .LinkOnce) {
if (exp.linkage == .LinkOnce) {
try mod.failed_exports.putNoClobber(
mod.gpa,
exp,
@@ -2453,7 +2453,7 @@ pub fn updateDeclExports(
.n_value = decl_sym.n_value,
};
switch (exp.options.linkage) {
switch (exp.linkage) {
.Internal => {
// Symbol should be hidden, or in MachO lingo, private extern.
// We should also mark the symbol as Weak: n_desc == N_WEAK_DEF.
@@ -2488,12 +2488,17 @@ pub fn updateDeclExports(
}
}
pub fn deleteDeclExport(self: *MachO, decl_index: Module.Decl.Index, name: []const u8) Allocator.Error!void {
pub fn deleteDeclExport(
self: *MachO,
decl_index: Module.Decl.Index,
name: InternPool.NullTerminatedString,
) Allocator.Error!void {
if (self.llvm_object) |_| return;
const metadata = self.decls.getPtr(decl_index) orelse return;
const gpa = self.base.allocator;
const exp_name = try std.fmt.allocPrint(gpa, "_{s}", .{name});
const mod = self.base.options.module.?;
const exp_name = try std.fmt.allocPrint(gpa, "_{s}", .{mod.intern_pool.stringToSlice(name)});
defer gpa.free(exp_name);
const sym_index = metadata.getExportPtr(self, exp_name) orelse return;

View File

@@ -287,7 +287,6 @@ pub fn updateFunc(self: *Plan9, mod: *Module, func_index: Module.Fn.Index, air:
self.freeUnnamedConsts(decl_index);
_ = try self.seeDecl(decl_index);
log.debug("codegen decl {*} ({s})", .{ decl, decl.name });
var code_buffer = std.ArrayList(u8).init(self.base.allocator);
defer code_buffer.deinit();
@@ -345,8 +344,7 @@ pub fn lowerUnnamedConst(self: *Plan9, tv: TypedValue, decl_index: Module.Decl.I
}
const unnamed_consts = gop.value_ptr;
const decl_name = try decl.getFullyQualifiedName(mod);
defer self.base.allocator.free(decl_name);
const decl_name = mod.intern_pool.stringToSlice(try decl.getFullyQualifiedName(mod));
const index = unnamed_consts.items.len;
// name is freed when the unnamed const is freed
@@ -403,8 +401,6 @@ pub fn updateDecl(self: *Plan9, mod: *Module, decl_index: Module.Decl.Index) !vo
_ = try self.seeDecl(decl_index);
log.debug("codegen decl {*} ({s}) ({d})", .{ decl, decl.name, decl_index });
var code_buffer = std.ArrayList(u8).init(self.base.allocator);
defer code_buffer.deinit();
const decl_val = if (decl.val.getVariable(mod)) |variable| variable.init.toValue() else decl.val;
@@ -435,7 +431,6 @@ fn updateFinish(self: *Plan9, decl_index: Module.Decl.Index) !void {
const mod = self.base.options.module.?;
const decl = mod.declPtr(decl_index);
const is_fn = (decl.ty.zigTypeTag(mod) == .Fn);
log.debug("update the symbol table and got for decl {*} ({s})", .{ decl, decl.name });
const sym_t: aout.Sym.Type = if (is_fn) .t else .d;
const decl_block = self.getDeclBlockPtr(self.decls.get(decl_index).?.index);
@@ -446,7 +441,7 @@ fn updateFinish(self: *Plan9, decl_index: Module.Decl.Index) !void {
const sym: aout.Sym = .{
.value = undefined, // the value of stuff gets filled in in flushModule
.type = decl_block.type,
.name = mem.span(decl.name),
.name = mod.intern_pool.stringToSlice(decl.name),
};
if (decl_block.sym_index) |s| {
@@ -567,10 +562,8 @@ pub fn flushModule(self: *Plan9, comp: *Compilation, prog_node: *std.Progress.No
var it = fentry.value_ptr.functions.iterator();
while (it.next()) |entry| {
const decl_index = entry.key_ptr.*;
const decl = mod.declPtr(decl_index);
const decl_block = self.getDeclBlockPtr(self.decls.get(decl_index).?.index);
const out = entry.value_ptr.*;
log.debug("write text decl {*} ({s}), lines {d} to {d}", .{ decl, decl.name, out.start_line + 1, out.end_line });
{
// connect the previous decl to the next
const delta_line = @intCast(i32, out.start_line) - @intCast(i32, linecount);
@@ -616,10 +609,8 @@ pub fn flushModule(self: *Plan9, comp: *Compilation, prog_node: *std.Progress.No
var it = self.data_decl_table.iterator();
while (it.next()) |entry| {
const decl_index = entry.key_ptr.*;
const decl = mod.declPtr(decl_index);
const decl_block = self.getDeclBlockPtr(self.decls.get(decl_index).?.index);
const code = entry.value_ptr.*;
log.debug("write data decl {*} ({s})", .{ decl, decl.name });
foff += code.len;
iovecs[iovecs_i] = .{ .iov_base = code.ptr, .iov_len = code.len };
@@ -695,15 +686,12 @@ pub fn flushModule(self: *Plan9, comp: *Compilation, prog_node: *std.Progress.No
const source_decl = mod.declPtr(source_decl_index);
for (kv.value_ptr.items) |reloc| {
const target_decl_index = reloc.target;
const target_decl = mod.declPtr(target_decl_index);
const target_decl_block = self.getDeclBlock(self.decls.get(target_decl_index).?.index);
const target_decl_offset = target_decl_block.offset.?;
const offset = reloc.offset;
const addend = reloc.addend;
log.debug("relocating the address of '{s}' + {d} into '{s}' + {d}", .{ target_decl.name, addend, source_decl.name, offset });
const code = blk: {
const is_fn = source_decl.ty.zigTypeTag(mod) == .Fn;
if (is_fn) {
@@ -737,8 +725,9 @@ fn addDeclExports(
const decl_block = self.getDeclBlock(metadata.index);
for (exports) |exp| {
const exp_name = mod.intern_pool.stringToSlice(exp.name);
// plan9 does not support custom sections
if (exp.options.section) |section_name| {
if (mod.intern_pool.stringToSliceUnwrap(exp.section)) |section_name| {
if (!mem.eql(u8, section_name, ".text") or !mem.eql(u8, section_name, ".data")) {
try mod.failed_exports.put(mod.gpa, exp, try Module.ErrorMsg.create(
self.base.allocator,
@@ -752,10 +741,10 @@ fn addDeclExports(
const sym = .{
.value = decl_block.offset.?,
.type = decl_block.type.toGlobal(),
.name = exp.options.name,
.name = exp_name,
};
if (metadata.getExport(self, exp.options.name)) |i| {
if (metadata.getExport(self, exp_name)) |i| {
self.syms.items[i] = sym;
} else {
try self.syms.append(self.base.allocator, sym);
@@ -956,7 +945,10 @@ pub fn writeSym(self: *Plan9, w: anytype, sym: aout.Sym) !void {
try w.writeAll(sym.name);
try w.writeByte(0);
}
pub fn writeSyms(self: *Plan9, buf: *std.ArrayList(u8)) !void {
const mod = self.base.options.module.?;
const ip = &mod.intern_pool;
const writer = buf.writer();
// write the f symbols
{
@@ -980,7 +972,7 @@ pub fn writeSyms(self: *Plan9, buf: *std.ArrayList(u8)) !void {
const sym = self.syms.items[decl_block.sym_index.?];
try self.writeSym(writer, sym);
if (self.base.options.module.?.decl_exports.get(decl_index)) |exports| {
for (exports.items) |e| if (decl_metadata.getExport(self, e.options.name)) |exp_i| {
for (exports.items) |e| if (decl_metadata.getExport(self, ip.stringToSlice(e.name))) |exp_i| {
try self.writeSym(writer, self.syms.items[exp_i]);
};
}
@@ -1006,7 +998,7 @@ pub fn writeSyms(self: *Plan9, buf: *std.ArrayList(u8)) !void {
const sym = self.syms.items[decl_block.sym_index.?];
try self.writeSym(writer, sym);
if (self.base.options.module.?.decl_exports.get(decl_index)) |exports| {
for (exports.items) |e| if (decl_metadata.getExport(self, e.options.name)) |exp_i| {
for (exports.items) |e| if (decl_metadata.getExport(self, ip.stringToSlice(e.name))) |exp_i| {
const s = self.syms.items[exp_i];
if (mem.eql(u8, s.name, "_start"))
self.entry_val = s.value;

View File

@@ -147,7 +147,7 @@ pub fn updateDeclExports(
const spv_decl_index = entry.value_ptr.*;
for (exports) |exp| {
try self.spv.declareEntryPoint(spv_decl_index, exp.options.name);
try self.spv.declareEntryPoint(spv_decl_index, mod.intern_pool.stringToSlice(exp.name));
}
}
@@ -190,7 +190,8 @@ pub fn flushModule(self: *SpirV, comp: *Compilation, prog_node: *std.Progress.No
var error_info = std.ArrayList(u8).init(self.spv.arena);
try error_info.appendSlice("zig_errors");
const module = self.base.options.module.?;
for (module.error_name_list.items) |name| {
for (module.global_error_set.keys()) |name_nts| {
const name = module.intern_pool.stringToSlice(name_nts);
// Errors can contain pretty much any character - to encode them in a string we must escape
// them somehow. Easiest here is to use some established scheme, one which also preseves the
// name if it contains no strange characters is nice for debugging. URI encoding fits the bill.

View File

@@ -1416,7 +1416,7 @@ pub fn updateDecl(wasm: *Wasm, mod: *Module, decl_index: Module.Decl.Index) !voi
if (decl.isExtern(mod)) {
const variable = decl.getOwnedVariable(mod).?;
const name = mem.sliceTo(decl.name, 0);
const name = mod.intern_pool.stringToSlice(decl.name);
const lib_name = mod.intern_pool.stringToSliceUnwrap(variable.lib_name);
return wasm.addOrUpdateImport(name, atom.sym_index, lib_name, null);
}
@@ -1453,8 +1453,7 @@ pub fn updateDeclLineNumber(wasm: *Wasm, mod: *Module, decl_index: Module.Decl.I
defer tracy.end();
const decl = mod.declPtr(decl_index);
const decl_name = try decl.getFullyQualifiedName(mod);
defer wasm.base.allocator.free(decl_name);
const decl_name = mod.intern_pool.stringToSlice(try decl.getFullyQualifiedName(mod));
log.debug("updateDeclLineNumber {s}{*}", .{ decl_name, decl });
try dw.updateDeclLineNumber(mod, decl_index);
@@ -1467,8 +1466,7 @@ fn finishUpdateDecl(wasm: *Wasm, decl_index: Module.Decl.Index, code: []const u8
const atom_index = wasm.decls.get(decl_index).?;
const atom = wasm.getAtomPtr(atom_index);
const symbol = &wasm.symbols.items[atom.sym_index];
const full_name = try decl.getFullyQualifiedName(mod);
defer wasm.base.allocator.free(full_name);
const full_name = mod.intern_pool.stringToSlice(try decl.getFullyQualifiedName(mod));
symbol.name = try wasm.string_table.put(wasm.base.allocator, full_name);
try atom.code.appendSlice(wasm.base.allocator, code);
try wasm.resolved_symbols.put(wasm.base.allocator, atom.symbolLoc(), {});
@@ -1535,9 +1533,10 @@ pub fn lowerUnnamedConst(wasm: *Wasm, tv: TypedValue, decl_index: Module.Decl.In
const parent_atom = wasm.getAtomPtr(parent_atom_index);
const local_index = parent_atom.locals.items.len;
try parent_atom.locals.append(wasm.base.allocator, atom_index);
const fqdn = try decl.getFullyQualifiedName(mod);
defer wasm.base.allocator.free(fqdn);
const name = try std.fmt.allocPrintZ(wasm.base.allocator, "__unnamed_{s}_{d}", .{ fqdn, local_index });
const fqn = mod.intern_pool.stringToSlice(try decl.getFullyQualifiedName(mod));
const name = try std.fmt.allocPrintZ(wasm.base.allocator, "__unnamed_{s}_{d}", .{
fqn, local_index,
});
defer wasm.base.allocator.free(name);
var value_bytes = std.ArrayList(u8).init(wasm.base.allocator);
defer value_bytes.deinit();
@@ -1690,11 +1689,12 @@ pub fn updateDeclExports(
const decl = mod.declPtr(decl_index);
const atom_index = try wasm.getOrCreateAtomForDecl(decl_index);
const atom = wasm.getAtom(atom_index);
const gpa = mod.gpa;
for (exports) |exp| {
if (exp.options.section) |section| {
try mod.failed_exports.putNoClobber(mod.gpa, exp, try Module.ErrorMsg.create(
mod.gpa,
if (mod.intern_pool.stringToSliceUnwrap(exp.section)) |section| {
try mod.failed_exports.putNoClobber(gpa, exp, try Module.ErrorMsg.create(
gpa,
decl.srcLoc(mod),
"Unimplemented: ExportOptions.section '{s}'",
.{section},
@@ -1702,24 +1702,24 @@ pub fn updateDeclExports(
continue;
}
const export_name = try wasm.string_table.put(wasm.base.allocator, exp.options.name);
const export_name = try wasm.string_table.put(wasm.base.allocator, mod.intern_pool.stringToSlice(exp.name));
if (wasm.globals.getPtr(export_name)) |existing_loc| {
if (existing_loc.index == atom.sym_index) continue;
const existing_sym: Symbol = existing_loc.getSymbol(wasm).*;
const exp_is_weak = exp.options.linkage == .Internal or exp.options.linkage == .Weak;
const exp_is_weak = exp.linkage == .Internal or exp.linkage == .Weak;
// When both the to-be-exported symbol and the already existing symbol
// are strong symbols, we have a linker error.
// In the other case we replace one with the other.
if (!exp_is_weak and !existing_sym.isWeak()) {
try mod.failed_exports.put(mod.gpa, exp, try Module.ErrorMsg.create(
mod.gpa,
try mod.failed_exports.put(gpa, exp, try Module.ErrorMsg.create(
gpa,
decl.srcLoc(mod),
\\LinkError: symbol '{s}' defined multiple times
\\ first definition in '{s}'
\\ next definition in '{s}'
,
.{ exp.options.name, wasm.name, wasm.name },
.{ mod.intern_pool.stringToSlice(exp.name), wasm.name, wasm.name },
));
continue;
} else if (exp_is_weak) {
@@ -1736,7 +1736,7 @@ pub fn updateDeclExports(
const exported_atom = wasm.getAtom(exported_atom_index);
const sym_loc = exported_atom.symbolLoc();
const symbol = sym_loc.getSymbol(wasm);
switch (exp.options.linkage) {
switch (exp.linkage) {
.Internal => {
symbol.setFlag(.WASM_SYM_VISIBILITY_HIDDEN);
},
@@ -1745,8 +1745,8 @@ pub fn updateDeclExports(
},
.Strong => {}, // symbols are strong by default
.LinkOnce => {
try mod.failed_exports.putNoClobber(mod.gpa, exp, try Module.ErrorMsg.create(
mod.gpa,
try mod.failed_exports.putNoClobber(gpa, exp, try Module.ErrorMsg.create(
gpa,
decl.srcLoc(mod),
"Unimplemented: LinkOnce",
.{},
@@ -1755,7 +1755,7 @@ pub fn updateDeclExports(
},
}
// Ensure the symbol will be exported using the given name
if (!mem.eql(u8, exp.options.name, sym_loc.getName(wasm))) {
if (!mod.intern_pool.stringEqlSlice(exp.name, sym_loc.getName(wasm))) {
try wasm.export_names.put(wasm.base.allocator, sym_loc, export_name);
}
@@ -1769,7 +1769,7 @@ pub fn updateDeclExports(
// if the symbol was previously undefined, remove it as an import
_ = wasm.imports.remove(sym_loc);
_ = wasm.undefs.swapRemove(exp.options.name);
_ = wasm.undefs.swapRemove(mod.intern_pool.stringToSlice(exp.name));
}
}
@@ -2987,7 +2987,8 @@ fn populateErrorNameTable(wasm: *Wasm) !void {
// Addend for each relocation to the table
var addend: u32 = 0;
const mod = wasm.base.options.module.?;
for (mod.error_name_list.items) |error_name| {
for (mod.global_error_set.keys()) |error_name_nts| {
const error_name = mod.intern_pool.stringToSlice(error_name_nts);
const len = @intCast(u32, error_name.len + 1); // names are 0-termianted
const slice_ty = Type.slice_const_u8_sentinel_0;

View File

@@ -685,8 +685,9 @@ const Writer = struct {
fn writeDbgInline(w: *Writer, s: anytype, inst: Air.Inst.Index) @TypeOf(s).Error!void {
const ty_fn = w.air.instructions.items(.data)[inst].ty_fn;
const func_index = ty_fn.func;
const ip = &w.module.intern_pool;
const owner_decl = w.module.declPtr(w.module.funcPtr(func_index).owner_decl);
try s.print("{s}", .{owner_decl.name});
try s.print("{s}", .{ip.stringToSlice(owner_decl.name)});
}
fn writeDbgVar(w: *Writer, s: anytype, inst: Air.Inst.Index) @TypeOf(s).Error!void {

View File

@@ -2546,7 +2546,7 @@ pub const Type = struct {
defer mod.gpa.free(field_vals);
for (field_vals, s.fields.values()) |*field_val, field| {
if (field.is_comptime) {
field_val.* = try field.default_val.intern(field.ty, mod);
field_val.* = field.default_val;
continue;
}
if (try field.ty.onePossibleValue(mod)) |field_opv| {
@@ -2977,18 +2977,14 @@ pub const Type = struct {
return mod.intern_pool.indexToKey(ty.toIntern()).enum_type.names.len;
}
pub fn enumFieldName(ty: Type, field_index: usize, mod: *Module) [:0]const u8 {
const ip = &mod.intern_pool;
const field_name = ip.indexToKey(ty.toIntern()).enum_type.names[field_index];
return ip.stringToSlice(field_name);
pub fn enumFieldName(ty: Type, field_index: usize, mod: *Module) InternPool.NullTerminatedString {
return mod.intern_pool.indexToKey(ty.toIntern()).enum_type.names[field_index];
}
pub fn enumFieldIndex(ty: Type, field_name: []const u8, mod: *Module) ?u32 {
pub fn enumFieldIndex(ty: Type, field_name: InternPool.NullTerminatedString, mod: *Module) ?u32 {
const ip = &mod.intern_pool;
const enum_type = ip.indexToKey(ty.toIntern()).enum_type;
// If the string is not interned, then the field certainly is not present.
const field_name_interned = ip.getString(field_name).unwrap() orelse return null;
return enum_type.nameIndex(ip, field_name_interned);
return enum_type.nameIndex(ip, field_name);
}
/// Asserts `ty` is an enum. `enum_tag` can either be `enum_field_index` or
@@ -3017,19 +3013,16 @@ pub const Type = struct {
}
}
pub fn structFieldName(ty: Type, field_index: usize, mod: *Module) []const u8 {
switch (mod.intern_pool.indexToKey(ty.toIntern())) {
pub fn structFieldName(ty: Type, field_index: usize, mod: *Module) InternPool.NullTerminatedString {
return switch (mod.intern_pool.indexToKey(ty.toIntern())) {
.struct_type => |struct_type| {
const struct_obj = mod.structPtrUnwrap(struct_type.index).?;
assert(struct_obj.haveFieldTypes());
return struct_obj.fields.keys()[field_index];
},
.anon_struct_type => |anon_struct| {
const name = anon_struct.names[field_index];
return mod.intern_pool.stringToSlice(name);
},
.anon_struct_type => |anon_struct| anon_struct.names[field_index],
else => unreachable,
}
};
}
pub fn structFieldCount(ty: Type, mod: *Module) usize {
@@ -3082,7 +3075,10 @@ pub const Type = struct {
switch (mod.intern_pool.indexToKey(ty.toIntern())) {
.struct_type => |struct_type| {
const struct_obj = mod.structPtrUnwrap(struct_type.index).?;
return struct_obj.fields.values()[index].default_val;
const val = struct_obj.fields.values()[index].default_val;
// TODO: avoid using `unreachable` to indicate this.
if (val == .none) return Value.@"unreachable";
return val.toValue();
},
.anon_struct_type => |anon_struct| {
const val = anon_struct.values[index];
@@ -3100,7 +3096,7 @@ pub const Type = struct {
const struct_obj = mod.structPtrUnwrap(struct_type.index).?;
const field = struct_obj.fields.values()[index];
if (field.is_comptime) {
return field.default_val;
return field.default_val.toValue();
} else {
return field.ty.onePossibleValue(mod);
}

View File

@@ -24,9 +24,6 @@ pub const Value = struct {
/// This union takes advantage of the fact that the first page of memory
/// is unmapped, giving us 4096 possible enum tags that have no payload.
legacy: extern union {
/// If the tag value is less than Tag.no_payload_count, then no pointer
/// dereference is needed.
tag_if_small_enough: Tag,
ptr_otherwise: *Payload,
},
@@ -64,8 +61,6 @@ pub const Value = struct {
/// An instance of a union.
@"union",
pub const no_payload_count = 0;
pub fn Type(comptime t: Tag) type {
return switch (t) {
.eu_payload,
@@ -96,16 +91,7 @@ pub const Value = struct {
}
};
pub fn initTag(small_tag: Tag) Value {
assert(@enumToInt(small_tag) < Tag.no_payload_count);
return Value{
.ip_index = .none,
.legacy = .{ .tag_if_small_enough = small_tag },
};
}
pub fn initPayload(payload: *Payload) Value {
assert(@enumToInt(payload.tag) >= Tag.no_payload_count);
return Value{
.ip_index = .none,
.legacy = .{ .ptr_otherwise = payload },
@@ -114,11 +100,7 @@ pub const Value = struct {
pub fn tag(self: Value) Tag {
assert(self.ip_index == .none);
if (@enumToInt(self.legacy.tag_if_small_enough) < Tag.no_payload_count) {
return self.legacy.tag_if_small_enough;
} else {
return self.legacy.ptr_otherwise.tag;
}
return self.legacy.ptr_otherwise.tag;
}
/// Prefer `castTag` to this.
@@ -129,12 +111,7 @@ pub const Value = struct {
if (@hasField(T, "base_tag")) {
return self.castTag(T.base_tag);
}
if (@enumToInt(self.legacy.tag_if_small_enough) < Tag.no_payload_count) {
return null;
}
inline for (@typeInfo(Tag).Enum.fields) |field| {
if (field.value < Tag.no_payload_count)
continue;
const t = @intToEnum(Tag, field.value);
if (self.legacy.ptr_otherwise.tag == t) {
if (T == t.Type()) {
@@ -149,9 +126,6 @@ pub const Value = struct {
pub fn castTag(self: Value, comptime t: Tag) ?*t.Type() {
if (self.ip_index != .none) return null;
if (@enumToInt(self.legacy.tag_if_small_enough) < Tag.no_payload_count)
return null;
if (self.legacy.ptr_otherwise.tag == t)
return @fieldParentPtr(t.Type(), "base", self.legacy.ptr_otherwise);
@@ -164,12 +138,7 @@ pub const Value = struct {
if (self.ip_index != .none) {
return Value{ .ip_index = self.ip_index, .legacy = undefined };
}
if (@enumToInt(self.legacy.tag_if_small_enough) < Tag.no_payload_count) {
return Value{
.ip_index = .none,
.legacy = .{ .tag_if_small_enough = self.legacy.tag_if_small_enough },
};
} else switch (self.legacy.ptr_otherwise.tag) {
switch (self.legacy.ptr_otherwise.tag) {
.bytes => {
const bytes = self.castTag(.bytes).?.data;
const new_payload = try arena.create(Payload.Bytes);
@@ -312,6 +281,30 @@ pub const Value = struct {
} };
}
/// Asserts that the value is representable as an array of bytes.
/// Returns the value as a null-terminated string stored in the InternPool.
pub fn toIpString(val: Value, ty: Type, mod: *Module) !InternPool.NullTerminatedString {
const ip = &mod.intern_pool;
return switch (mod.intern_pool.indexToKey(val.toIntern())) {
.enum_literal => |enum_literal| enum_literal,
.ptr => |ptr| switch (ptr.len) {
.none => unreachable,
else => try arrayToIpString(val, ptr.len.toValue().toUnsignedInt(mod), mod),
},
.aggregate => |aggregate| switch (aggregate.storage) {
.bytes => |bytes| try ip.getOrPutString(mod.gpa, bytes),
.elems => try arrayToIpString(val, ty.arrayLen(mod), mod),
.repeated_elem => |elem| {
const byte = @intCast(u8, elem.toValue().toUnsignedInt(mod));
const len = @intCast(usize, ty.arrayLen(mod));
try ip.string_bytes.appendNTimes(mod.gpa, byte, len);
return ip.getOrPutTrailingString(mod.gpa, len);
},
},
else => unreachable,
};
}
/// Asserts that the value is representable as an array of bytes.
/// Copies the value into a freshly allocated slice of memory, which is owned by the caller.
pub fn toAllocatedBytes(val: Value, ty: Type, allocator: Allocator, mod: *Module) ![]u8 {
@@ -319,11 +312,11 @@ pub const Value = struct {
.enum_literal => |enum_literal| allocator.dupe(u8, mod.intern_pool.stringToSlice(enum_literal)),
.ptr => |ptr| switch (ptr.len) {
.none => unreachable,
else => arrayToAllocatedBytes(val, ptr.len.toValue().toUnsignedInt(mod), allocator, mod),
else => try arrayToAllocatedBytes(val, ptr.len.toValue().toUnsignedInt(mod), allocator, mod),
},
.aggregate => |aggregate| switch (aggregate.storage) {
.bytes => |bytes| try allocator.dupe(u8, bytes),
.elems => arrayToAllocatedBytes(val, ty.arrayLen(mod), allocator, mod),
.elems => try arrayToAllocatedBytes(val, ty.arrayLen(mod), allocator, mod),
.repeated_elem => |elem| {
const byte = @intCast(u8, elem.toValue().toUnsignedInt(mod));
const result = try allocator.alloc(u8, @intCast(usize, ty.arrayLen(mod)));
@@ -344,6 +337,23 @@ pub const Value = struct {
return result;
}
fn arrayToIpString(val: Value, len_u64: u64, mod: *Module) !InternPool.NullTerminatedString {
const gpa = mod.gpa;
const ip = &mod.intern_pool;
const len = @intCast(usize, len_u64);
try ip.string_bytes.ensureUnusedCapacity(gpa, len);
for (0..len) |i| {
// I don't think elemValue has the possibility to affect ip.string_bytes. Let's
// assert just to be sure.
const prev = ip.string_bytes.items.len;
const elem_val = try val.elemValue(mod, i);
assert(ip.string_bytes.items.len == prev);
const byte = @intCast(u8, elem_val.toUnsignedInt(mod));
ip.string_bytes.appendAssumeCapacity(byte);
}
return ip.getOrPutTrailingString(gpa, len);
}
pub fn intern(val: Value, ty: Type, mod: *Module) Allocator.Error!InternPool.Index {
if (val.ip_index != .none) return (try mod.getCoerced(val, ty)).toIntern();
switch (val.tag()) {
@@ -498,7 +508,7 @@ pub const Value = struct {
// Assume it is already an integer and return it directly.
.simple_type, .int_type => val,
.enum_literal => |enum_literal| {
const field_index = ty.enumFieldIndex(ip.stringToSlice(enum_literal), mod).?;
const field_index = ty.enumFieldIndex(enum_literal, mod).?;
return switch (ip.indexToKey(ty.toIntern())) {
// Assume it is already an integer and return it directly.
.simple_type, .int_type => val,
@@ -776,7 +786,7 @@ pub const Value = struct {
.error_union => |error_union| error_union.val.err_name,
else => unreachable,
};
const int = mod.global_error_set.get(mod.intern_pool.stringToSlice(name)).?;
const int = @intCast(Module.ErrorInt, mod.global_error_set.getIndex(name).?);
std.mem.writeInt(Int, buffer[0..@sizeOf(Int)], @intCast(Int, int), endian);
},
.Union => switch (ty.containerLayout(mod)) {
@@ -1028,10 +1038,10 @@ pub const Value = struct {
// TODO revisit this when we have the concept of the error tag type
const Int = u16;
const int = std.mem.readInt(Int, buffer[0..@sizeOf(Int)], endian);
const name = mod.error_name_list.items[@intCast(usize, int)];
const name = mod.global_error_set.keys()[@intCast(usize, int)];
return (try mod.intern(.{ .err = .{
.ty = ty.toIntern(),
.name = mod.intern_pool.getString(name).unwrap().?,
.name = name,
} })).toValue();
},
.Pointer => {
@@ -2155,15 +2165,29 @@ pub const Value = struct {
/// unreachable. For error unions, prefer `errorUnionIsPayload` to find out whether
/// something is an error or not because it works without having to figure out the
/// string.
pub fn getError(self: Value, mod: *const Module) ?[]const u8 {
return mod.intern_pool.stringToSliceUnwrap(switch (mod.intern_pool.indexToKey(self.toIntern())) {
.err => |err| err.name.toOptional(),
pub fn getError(val: Value, mod: *const Module) ?[]const u8 {
return switch (getErrorName(val, mod)) {
.empty => null,
else => |s| mod.intern_pool.stringToSlice(s),
};
}
pub fn getErrorName(val: Value, mod: *const Module) InternPool.NullTerminatedString {
return switch (mod.intern_pool.indexToKey(val.toIntern())) {
.err => |err| err.name,
.error_union => |error_union| switch (error_union.val) {
.err_name => |err_name| err_name.toOptional(),
.payload => .none,
.err_name => |err_name| err_name,
.payload => .empty,
},
else => unreachable,
});
};
}
pub fn getErrorInt(val: Value, mod: *const Module) Module.ErrorInt {
return switch (getErrorName(val, mod)) {
.empty => 0,
else => |s| @intCast(Module.ErrorInt, mod.global_error_set.getIndex(s).?),
};
}
/// Assumes the type is an error union. Returns true if and only if the value is
@@ -4225,7 +4249,7 @@ pub const Value = struct {
var fields: [tags.len]std.builtin.Type.StructField = undefined;
for (&fields, tags) |*field, t| field.* = .{
.name = t.name,
.type = *if (t.value < Tag.no_payload_count) void else @field(Tag, t.name).Type(),
.type = *@field(Tag, t.name).Type(),
.default_value = null,
.is_comptime = false,
.alignment = 0,