Fixed wrong "unable to load" error for non-existing import files

- Changed ZIR encoding of `import` metadata from having instruction
  indexes to storing token indexes.
This commit is contained in:
Loris Cro
2021-07-19 23:21:24 +02:00
committed by Andrew Kelley
parent 00e944f718
commit e807020679
3 changed files with 44 additions and 25 deletions

View File

@@ -36,7 +36,7 @@ compile_errors: ArrayListUnmanaged(Zir.Inst.CompileErrors.Item) = .{},
fn_block: ?*GenZir = null,
/// Maps string table indexes to the first `@import` ZIR instruction
/// that uses this string as the operand.
imports: std.AutoArrayHashMapUnmanaged(u32, Zir.Inst.Index) = .{},
imports: std.AutoArrayHashMapUnmanaged(u32, ast.TokenIndex) = .{},
const InnerError = error{ OutOfMemory, AnalysisFail };
@@ -132,8 +132,7 @@ pub fn generate(gpa: *Allocator, tree: ast.Tree) Allocator.Error!Zir {
if (astgen.compile_errors.items.len == 0) {
astgen.extra.items[err_index] = 0;
} else {
try astgen.extra.ensureCapacity(gpa, astgen.extra.items.len +
1 + astgen.compile_errors.items.len *
try astgen.extra.ensureUnusedCapacity(gpa, 1 + astgen.compile_errors.items.len *
@typeInfo(Zir.Inst.CompileErrors.Item).Struct.fields.len);
astgen.extra.items[err_index] = astgen.addExtraAssumeCapacity(Zir.Inst.CompileErrors{
@@ -149,13 +148,20 @@ pub fn generate(gpa: *Allocator, tree: ast.Tree) Allocator.Error!Zir {
if (astgen.imports.count() == 0) {
astgen.extra.items[imports_index] = 0;
} else {
try astgen.extra.ensureCapacity(gpa, astgen.extra.items.len +
@typeInfo(Zir.Inst.Imports).Struct.fields.len + astgen.imports.count());
try astgen.extra.ensureUnusedCapacity(gpa, @typeInfo(Zir.Inst.Imports).Struct.fields.len +
astgen.imports.count() * @typeInfo(Zir.Inst.Imports.Item).Struct.fields.len);
astgen.extra.items[imports_index] = astgen.addExtraAssumeCapacity(Zir.Inst.Imports{
.imports_len = @intCast(u32, astgen.imports.count()),
});
astgen.extra.appendSliceAssumeCapacity(astgen.imports.values());
var it = astgen.imports.iterator();
while (it.next()) |entry| {
_ = astgen.addExtraAssumeCapacity(Zir.Inst.Imports.Item{
.name = entry.key_ptr.*,
.token = entry.value_ptr.*,
});
}
}
return Zir{
@@ -6986,12 +6992,10 @@ fn builtinCall(
const str_lit_token = main_tokens[operand_node];
const str = try astgen.strLitAsString(str_lit_token);
const result = try gz.addStrTok(.import, str.index, str_lit_token);
if (gz.refToIndex(result)) |import_inst_index| {
const gop = try astgen.imports.getOrPut(astgen.gpa, str.index);
if (!gop.found_existing) {
gop.value_ptr.* = import_inst_index;
gop.value_ptr.* = str_lit_token;
}
}
return rvalue(gz, rl, result, node);
},
.compile_log => {

View File

@@ -2315,7 +2315,7 @@ const AstGenSrc = union(enum) {
root,
import: struct {
importing_file: *Module.Scope.File,
import_inst: Zir.Inst.Index,
import_tok: std.zig.ast.TokenIndex,
},
};
@@ -2352,11 +2352,15 @@ fn workerAstGenFile(
assert(file.zir_loaded);
const imports_index = file.zir.extra[@enumToInt(Zir.ExtraIndex.imports)];
if (imports_index != 0) {
const imports_len = file.zir.extra[imports_index];
const extra = file.zir.extraData(Zir.Inst.Imports, imports_index);
var import_i: u32 = 0;
var extra_index = extra.end;
for (file.zir.extra[imports_index + 1 ..][0..imports_len]) |import_inst| {
const inst_data = file.zir.instructions.items(.data)[import_inst].str_tok;
const import_path = inst_data.get(file.zir);
while (import_i < extra.data.imports_len) : (import_i += 1) {
const item = file.zir.extraData(Zir.Inst.Imports.Item, extra_index);
extra_index = item.end;
const import_path = file.zir.nullTerminatedString(item.data.name);
const import_result = blk: {
const lock = comp.mutex.acquire();
@@ -2370,7 +2374,7 @@ fn workerAstGenFile(
});
const sub_src: AstGenSrc = .{ .import = .{
.importing_file = file,
.import_inst = import_inst,
.import_tok = item.data.token,
} };
wg.start();
comp.thread_pool.spawn(workerAstGenFile, .{
@@ -2602,12 +2606,11 @@ fn reportRetryableAstGenError(
},
.import => |info| blk: {
const importing_file = info.importing_file;
const import_inst = info.import_inst;
const inst_data = importing_file.zir.instructions.items(.data)[import_inst].str_tok;
break :blk .{
.file_scope = importing_file,
.parent_decl_node = 0,
.lazy = .{ .token_offset = inst_data.src_tok },
.lazy = .{ .token_abs = info.import_tok },
};
},
};

View File

@@ -138,11 +138,17 @@ pub fn renderAsTextToFile(
const imports_index = scope_file.zir.extra[@enumToInt(ExtraIndex.imports)];
if (imports_index != 0) {
try fs_file.writeAll("Imports:\n");
const imports_len = scope_file.zir.extra[imports_index];
for (scope_file.zir.extra[imports_index + 1 ..][0..imports_len]) |import_inst| {
const inst_data = writer.code.instructions.items(.data)[import_inst].str_tok;
const src = inst_data.src();
const import_path = inst_data.get(writer.code);
const extra = scope_file.zir.extraData(Inst.Imports, imports_index);
var import_i: u32 = 0;
var extra_index = extra.end;
while (import_i < extra.data.imports_len) : (import_i += 1) {
const item = scope_file.zir.extraData(Inst.Imports.Item, extra_index);
extra_index = item.end;
const src: LazySrcLoc = .{ .token_abs = item.data.token };
const import_path = scope_file.zir.nullTerminatedString(item.data.name);
try fs_file.writer().print(" @import(\"{}\") ", .{
std.zig.fmtEscapes(import_path),
});
@@ -2780,10 +2786,16 @@ pub const Inst = struct {
};
};
/// Trailing: for each `imports_len` there is an instruction index
/// to an import instruction.
/// Trailing: for each `imports_len` there is an Item
pub const Imports = struct {
imports_len: Zir.Inst.Index,
pub const Item = struct {
/// null terminated string index
name: u32,
/// points to the import name
token: ast.TokenIndex,
};
};
};