Stage2: Implement comptime closures and the This builtin (#9823)
This commit is contained in:
300
src/AstGen.zig
300
src/AstGen.zig
@@ -124,7 +124,7 @@ pub fn generate(gpa: *Allocator, tree: Ast) Allocator.Error!Zir {
|
||||
container_decl,
|
||||
.Auto,
|
||||
)) |struct_decl_ref| {
|
||||
astgen.extra.items[@enumToInt(Zir.ExtraIndex.main_struct)] = @enumToInt(struct_decl_ref);
|
||||
assert(refToIndex(struct_decl_ref).? == 0);
|
||||
} else |err| switch (err) {
|
||||
error.OutOfMemory => return error.OutOfMemory,
|
||||
error.AnalysisFail => {}, // Handled via compile_errors below.
|
||||
@@ -2078,9 +2078,6 @@ fn unusedResultExpr(gz: *GenZir, scope: *Scope, statement: Ast.Node.Index) Inner
|
||||
.union_init_ptr,
|
||||
.field_type,
|
||||
.field_type_ref,
|
||||
.opaque_decl,
|
||||
.opaque_decl_anon,
|
||||
.opaque_decl_func,
|
||||
.error_set_decl,
|
||||
.error_set_decl_anon,
|
||||
.error_set_decl_func,
|
||||
@@ -2162,6 +2159,7 @@ fn unusedResultExpr(gz: *GenZir, scope: *Scope, statement: Ast.Node.Index) Inner
|
||||
.await_nosuspend,
|
||||
.ret_err_value_code,
|
||||
.extended,
|
||||
.closure_get,
|
||||
=> break :b false,
|
||||
|
||||
// ZIR instructions that are always `noreturn`.
|
||||
@@ -2205,6 +2203,7 @@ fn unusedResultExpr(gz: *GenZir, scope: *Scope, statement: Ast.Node.Index) Inner
|
||||
.set_cold,
|
||||
.set_float_mode,
|
||||
.set_runtime_safety,
|
||||
.closure_capture,
|
||||
=> break :b true,
|
||||
}
|
||||
} else switch (maybe_unused_result) {
|
||||
@@ -3534,8 +3533,9 @@ fn structDeclInner(
|
||||
container_decl: Ast.full.ContainerDecl,
|
||||
layout: std.builtin.TypeInfo.ContainerLayout,
|
||||
) InnerError!Zir.Inst.Ref {
|
||||
const decl_inst = try gz.reserveInstructionIndex();
|
||||
|
||||
if (container_decl.ast.members.len == 0) {
|
||||
const decl_inst = try gz.reserveInstructionIndex();
|
||||
try gz.setStruct(decl_inst, .{
|
||||
.src_node = node,
|
||||
.layout = layout,
|
||||
@@ -3553,11 +3553,19 @@ fn structDeclInner(
|
||||
const node_tags = tree.nodes.items(.tag);
|
||||
const node_datas = tree.nodes.items(.data);
|
||||
|
||||
var namespace: Scope.Namespace = .{
|
||||
.parent = scope,
|
||||
.node = node,
|
||||
.inst = decl_inst,
|
||||
.declaring_gz = gz,
|
||||
};
|
||||
defer namespace.deinit(gpa);
|
||||
|
||||
// The struct_decl instruction introduces a scope in which the decls of the struct
|
||||
// are in scope, so that field types, alignments, and default value expressions
|
||||
// can refer to decls within the struct itself.
|
||||
var block_scope: GenZir = .{
|
||||
.parent = scope,
|
||||
.parent = &namespace.base,
|
||||
.decl_node_index = node,
|
||||
.decl_line = gz.calcLine(node),
|
||||
.astgen = astgen,
|
||||
@@ -3566,9 +3574,6 @@ fn structDeclInner(
|
||||
};
|
||||
defer block_scope.instructions.deinit(gpa);
|
||||
|
||||
var namespace: Scope.Namespace = .{ .parent = scope, .node = node };
|
||||
defer namespace.decls.deinit(gpa);
|
||||
|
||||
try astgen.scanDecls(&namespace, container_decl.ast.members);
|
||||
|
||||
var wip_decls: WipDecls = .{};
|
||||
@@ -3773,7 +3778,6 @@ fn structDeclInner(
|
||||
}
|
||||
}
|
||||
|
||||
const decl_inst = try gz.reserveInstructionIndex();
|
||||
if (block_scope.instructions.items.len != 0) {
|
||||
_ = try block_scope.addBreak(.break_inline, decl_inst, .void_value);
|
||||
}
|
||||
@@ -3787,11 +3791,18 @@ fn structDeclInner(
|
||||
.known_has_bits = known_has_bits,
|
||||
});
|
||||
|
||||
try astgen.extra.ensureUnusedCapacity(gpa, bit_bag.items.len +
|
||||
@boolToInt(field_index != 0) + fields_data.items.len +
|
||||
// zig fmt: off
|
||||
try astgen.extra.ensureUnusedCapacity(gpa,
|
||||
bit_bag.items.len +
|
||||
@boolToInt(wip_decls.decl_index != 0) +
|
||||
wip_decls.payload.items.len +
|
||||
block_scope.instructions.items.len +
|
||||
wip_decls.bit_bag.items.len + @boolToInt(wip_decls.decl_index != 0) +
|
||||
wip_decls.payload.items.len);
|
||||
wip_decls.bit_bag.items.len +
|
||||
@boolToInt(field_index != 0) +
|
||||
fields_data.items.len
|
||||
);
|
||||
// zig fmt: on
|
||||
|
||||
astgen.extra.appendSliceAssumeCapacity(wip_decls.bit_bag.items); // Likely empty.
|
||||
if (wip_decls.decl_index != 0) {
|
||||
astgen.extra.appendAssumeCapacity(wip_decls.cur_bit_bag);
|
||||
@@ -3818,17 +3829,27 @@ fn unionDeclInner(
|
||||
arg_node: Ast.Node.Index,
|
||||
have_auto_enum: bool,
|
||||
) InnerError!Zir.Inst.Ref {
|
||||
const decl_inst = try gz.reserveInstructionIndex();
|
||||
|
||||
const astgen = gz.astgen;
|
||||
const gpa = astgen.gpa;
|
||||
const tree = astgen.tree;
|
||||
const node_tags = tree.nodes.items(.tag);
|
||||
const node_datas = tree.nodes.items(.data);
|
||||
|
||||
var namespace: Scope.Namespace = .{
|
||||
.parent = scope,
|
||||
.node = node,
|
||||
.inst = decl_inst,
|
||||
.declaring_gz = gz,
|
||||
};
|
||||
defer namespace.deinit(gpa);
|
||||
|
||||
// The union_decl instruction introduces a scope in which the decls of the union
|
||||
// are in scope, so that field types, alignments, and default value expressions
|
||||
// can refer to decls within the union itself.
|
||||
var block_scope: GenZir = .{
|
||||
.parent = scope,
|
||||
.parent = &namespace.base,
|
||||
.decl_node_index = node,
|
||||
.decl_line = gz.calcLine(node),
|
||||
.astgen = astgen,
|
||||
@@ -3837,13 +3858,10 @@ fn unionDeclInner(
|
||||
};
|
||||
defer block_scope.instructions.deinit(gpa);
|
||||
|
||||
var namespace: Scope.Namespace = .{ .parent = scope, .node = node };
|
||||
defer namespace.decls.deinit(gpa);
|
||||
|
||||
try astgen.scanDecls(&namespace, members);
|
||||
|
||||
const arg_inst: Zir.Inst.Ref = if (arg_node != 0)
|
||||
try typeExpr(gz, &namespace.base, arg_node)
|
||||
try typeExpr(&block_scope, &namespace.base, arg_node)
|
||||
else
|
||||
.none;
|
||||
|
||||
@@ -4056,7 +4074,6 @@ fn unionDeclInner(
|
||||
}
|
||||
}
|
||||
|
||||
const decl_inst = try gz.reserveInstructionIndex();
|
||||
if (block_scope.instructions.items.len != 0) {
|
||||
_ = try block_scope.addBreak(.break_inline, decl_inst, .void_value);
|
||||
}
|
||||
@@ -4071,11 +4088,18 @@ fn unionDeclInner(
|
||||
.auto_enum_tag = have_auto_enum,
|
||||
});
|
||||
|
||||
try astgen.extra.ensureUnusedCapacity(gpa, bit_bag.items.len +
|
||||
1 + fields_data.items.len +
|
||||
// zig fmt: off
|
||||
try astgen.extra.ensureUnusedCapacity(gpa,
|
||||
bit_bag.items.len +
|
||||
@boolToInt(wip_decls.decl_index != 0) +
|
||||
wip_decls.payload.items.len +
|
||||
block_scope.instructions.items.len +
|
||||
wip_decls.bit_bag.items.len + @boolToInt(wip_decls.decl_index != 0) +
|
||||
wip_decls.payload.items.len);
|
||||
wip_decls.bit_bag.items.len +
|
||||
1 + // cur_bit_bag
|
||||
fields_data.items.len
|
||||
);
|
||||
// zig fmt: on
|
||||
|
||||
astgen.extra.appendSliceAssumeCapacity(wip_decls.bit_bag.items); // Likely empty.
|
||||
if (wip_decls.decl_index != 0) {
|
||||
astgen.extra.appendAssumeCapacity(wip_decls.cur_bit_bag);
|
||||
@@ -4238,10 +4262,20 @@ fn containerDecl(
|
||||
// how structs are handled above.
|
||||
const nonexhaustive = counts.nonexhaustive_node != 0;
|
||||
|
||||
const decl_inst = try gz.reserveInstructionIndex();
|
||||
|
||||
var namespace: Scope.Namespace = .{
|
||||
.parent = scope,
|
||||
.node = node,
|
||||
.inst = decl_inst,
|
||||
.declaring_gz = gz,
|
||||
};
|
||||
defer namespace.deinit(gpa);
|
||||
|
||||
// The enum_decl instruction introduces a scope in which the decls of the enum
|
||||
// are in scope, so that tag values can refer to decls within the enum itself.
|
||||
var block_scope: GenZir = .{
|
||||
.parent = scope,
|
||||
.parent = &namespace.base,
|
||||
.decl_node_index = node,
|
||||
.decl_line = gz.calcLine(node),
|
||||
.astgen = astgen,
|
||||
@@ -4250,13 +4284,10 @@ fn containerDecl(
|
||||
};
|
||||
defer block_scope.instructions.deinit(gpa);
|
||||
|
||||
var namespace: Scope.Namespace = .{ .parent = scope, .node = node };
|
||||
defer namespace.decls.deinit(gpa);
|
||||
|
||||
try astgen.scanDecls(&namespace, container_decl.ast.members);
|
||||
|
||||
const arg_inst: Zir.Inst.Ref = if (container_decl.ast.arg != 0)
|
||||
try comptimeExpr(gz, &namespace.base, .{ .ty = .type_type }, container_decl.ast.arg)
|
||||
try comptimeExpr(&block_scope, &namespace.base, .{ .ty = .type_type }, container_decl.ast.arg)
|
||||
else
|
||||
.none;
|
||||
|
||||
@@ -4451,7 +4482,6 @@ fn containerDecl(
|
||||
}
|
||||
}
|
||||
|
||||
const decl_inst = try gz.reserveInstructionIndex();
|
||||
if (block_scope.instructions.items.len != 0) {
|
||||
_ = try block_scope.addBreak(.break_inline, decl_inst, .void_value);
|
||||
}
|
||||
@@ -4465,11 +4495,18 @@ fn containerDecl(
|
||||
.decls_len = @intCast(u32, wip_decls.decl_index),
|
||||
});
|
||||
|
||||
try astgen.extra.ensureUnusedCapacity(gpa, bit_bag.items.len +
|
||||
1 + fields_data.items.len +
|
||||
// zig fmt: off
|
||||
try astgen.extra.ensureUnusedCapacity(gpa,
|
||||
bit_bag.items.len +
|
||||
@boolToInt(wip_decls.decl_index != 0) +
|
||||
wip_decls.payload.items.len +
|
||||
block_scope.instructions.items.len +
|
||||
wip_decls.bit_bag.items.len + @boolToInt(wip_decls.decl_index != 0) +
|
||||
wip_decls.payload.items.len);
|
||||
wip_decls.bit_bag.items.len +
|
||||
1 + // cur_bit_bag
|
||||
fields_data.items.len
|
||||
);
|
||||
// zig fmt: on
|
||||
|
||||
astgen.extra.appendSliceAssumeCapacity(wip_decls.bit_bag.items); // Likely empty.
|
||||
if (wip_decls.decl_index != 0) {
|
||||
astgen.extra.appendAssumeCapacity(wip_decls.cur_bit_bag);
|
||||
@@ -4486,8 +4523,15 @@ fn containerDecl(
|
||||
.keyword_opaque => {
|
||||
assert(container_decl.ast.arg == 0);
|
||||
|
||||
var namespace: Scope.Namespace = .{ .parent = scope, .node = node };
|
||||
defer namespace.decls.deinit(gpa);
|
||||
const decl_inst = try gz.reserveInstructionIndex();
|
||||
|
||||
var namespace: Scope.Namespace = .{
|
||||
.parent = scope,
|
||||
.node = node,
|
||||
.inst = decl_inst,
|
||||
.declaring_gz = gz,
|
||||
};
|
||||
defer namespace.deinit(gpa);
|
||||
|
||||
try astgen.scanDecls(&namespace, container_decl.ast.members);
|
||||
|
||||
@@ -4625,21 +4669,20 @@ fn containerDecl(
|
||||
wip_decls.cur_bit_bag >>= @intCast(u5, empty_slot_count * WipDecls.bits_per_field);
|
||||
}
|
||||
}
|
||||
const tag: Zir.Inst.Tag = switch (gz.anon_name_strategy) {
|
||||
.parent => .opaque_decl,
|
||||
.anon => .opaque_decl_anon,
|
||||
.func => .opaque_decl_func,
|
||||
};
|
||||
const decl_inst = try gz.addBlock(tag, node);
|
||||
try gz.instructions.append(gpa, decl_inst);
|
||||
|
||||
try astgen.extra.ensureUnusedCapacity(gpa, @typeInfo(Zir.Inst.OpaqueDecl).Struct.fields.len +
|
||||
wip_decls.bit_bag.items.len + @boolToInt(wip_decls.decl_index != 0) +
|
||||
wip_decls.payload.items.len);
|
||||
const zir_datas = astgen.instructions.items(.data);
|
||||
zir_datas[decl_inst].pl_node.payload_index = astgen.addExtraAssumeCapacity(Zir.Inst.OpaqueDecl{
|
||||
try gz.setOpaque(decl_inst, .{
|
||||
.src_node = node,
|
||||
.decls_len = @intCast(u32, wip_decls.decl_index),
|
||||
});
|
||||
|
||||
// zig fmt: off
|
||||
try astgen.extra.ensureUnusedCapacity(gpa,
|
||||
wip_decls.bit_bag.items.len +
|
||||
@boolToInt(wip_decls.decl_index != 0) +
|
||||
wip_decls.payload.items.len
|
||||
);
|
||||
// zig fmt: on
|
||||
|
||||
astgen.extra.appendSliceAssumeCapacity(wip_decls.bit_bag.items); // Likely empty.
|
||||
if (wip_decls.decl_index != 0) {
|
||||
astgen.extra.appendAssumeCapacity(wip_decls.cur_bit_bag);
|
||||
@@ -6380,6 +6423,7 @@ fn identifier(
|
||||
|
||||
const astgen = gz.astgen;
|
||||
const tree = astgen.tree;
|
||||
const gpa = astgen.gpa;
|
||||
const main_tokens = tree.nodes.items(.main_token);
|
||||
|
||||
const ident_token = main_tokens[ident];
|
||||
@@ -6426,16 +6470,28 @@ fn identifier(
|
||||
const name_str_index = try astgen.identAsString(ident_token);
|
||||
var s = scope;
|
||||
var found_already: ?Ast.Node.Index = null; // we have found a decl with the same name already
|
||||
var hit_namespace: Ast.Node.Index = 0;
|
||||
var num_namespaces_out: u32 = 0;
|
||||
var capturing_namespace: ?*Scope.Namespace = null;
|
||||
while (true) switch (s.tag) {
|
||||
.local_val => {
|
||||
const local_val = s.cast(Scope.LocalVal).?;
|
||||
|
||||
if (local_val.name == name_str_index) {
|
||||
local_val.used = true;
|
||||
// Locals cannot shadow anything, so we do not need to look for ambiguous
|
||||
// references in this case.
|
||||
return rvalue(gz, rl, local_val.inst, ident);
|
||||
local_val.used = true;
|
||||
|
||||
const value_inst = try tunnelThroughClosure(
|
||||
gz,
|
||||
ident,
|
||||
num_namespaces_out,
|
||||
capturing_namespace,
|
||||
local_val.inst,
|
||||
local_val.token_src,
|
||||
gpa,
|
||||
);
|
||||
|
||||
return rvalue(gz, rl, value_inst, ident);
|
||||
}
|
||||
s = local_val.parent;
|
||||
},
|
||||
@@ -6443,16 +6499,29 @@ fn identifier(
|
||||
const local_ptr = s.cast(Scope.LocalPtr).?;
|
||||
if (local_ptr.name == name_str_index) {
|
||||
local_ptr.used = true;
|
||||
if (hit_namespace != 0 and !local_ptr.maybe_comptime) {
|
||||
|
||||
// Can't close over a runtime variable
|
||||
if (num_namespaces_out != 0 and !local_ptr.maybe_comptime) {
|
||||
return astgen.failNodeNotes(ident, "mutable '{s}' not accessible from here", .{ident_name}, &.{
|
||||
try astgen.errNoteTok(local_ptr.token_src, "declared mutable here", .{}),
|
||||
try astgen.errNoteNode(hit_namespace, "crosses namespace boundary here", .{}),
|
||||
try astgen.errNoteNode(capturing_namespace.?.node, "crosses namespace boundary here", .{}),
|
||||
});
|
||||
}
|
||||
|
||||
const ptr_inst = try tunnelThroughClosure(
|
||||
gz,
|
||||
ident,
|
||||
num_namespaces_out,
|
||||
capturing_namespace,
|
||||
local_ptr.ptr,
|
||||
local_ptr.token_src,
|
||||
gpa,
|
||||
);
|
||||
|
||||
switch (rl) {
|
||||
.ref, .none_or_ref => return local_ptr.ptr,
|
||||
.ref, .none_or_ref => return ptr_inst,
|
||||
else => {
|
||||
const loaded = try gz.addUnNode(.load, local_ptr.ptr, ident);
|
||||
const loaded = try gz.addUnNode(.load, ptr_inst, ident);
|
||||
return rvalue(gz, rl, loaded, ident);
|
||||
},
|
||||
}
|
||||
@@ -6473,7 +6542,8 @@ fn identifier(
|
||||
// We found a match but must continue looking for ambiguous references to decls.
|
||||
found_already = i;
|
||||
}
|
||||
hit_namespace = ns.node;
|
||||
num_namespaces_out += 1;
|
||||
capturing_namespace = ns;
|
||||
s = ns.parent;
|
||||
},
|
||||
.top => break,
|
||||
@@ -6493,6 +6563,37 @@ fn identifier(
|
||||
}
|
||||
}
|
||||
|
||||
/// Adds a capture to a namespace, if needed.
|
||||
/// Returns the index of the closure_capture instruction.
|
||||
fn tunnelThroughClosure(
|
||||
gz: *GenZir,
|
||||
inner_ref_node: Ast.Node.Index,
|
||||
num_tunnels: u32,
|
||||
ns: ?*Scope.Namespace,
|
||||
value: Zir.Inst.Ref,
|
||||
token: Ast.TokenIndex,
|
||||
gpa: *Allocator,
|
||||
) !Zir.Inst.Ref {
|
||||
// For trivial values, we don't need a tunnel.
|
||||
// Just return the ref.
|
||||
if (num_tunnels == 0 or refToIndex(value) == null) {
|
||||
return value;
|
||||
}
|
||||
|
||||
// Otherwise we need a tunnel. Check if this namespace
|
||||
// already has one for this value.
|
||||
const gop = try ns.?.captures.getOrPut(gpa, refToIndex(value).?);
|
||||
if (!gop.found_existing) {
|
||||
// Make a new capture for this value
|
||||
const capture_ref = try ns.?.declaring_gz.?.addUnTok(.closure_capture, value, token);
|
||||
gop.value_ptr.* = refToIndex(capture_ref).?;
|
||||
}
|
||||
|
||||
// Add an instruction to get the value from the closure into
|
||||
// our current context
|
||||
return try gz.addInstNode(.closure_get, gop.value_ptr.*, inner_ref_node);
|
||||
}
|
||||
|
||||
fn stringLiteral(
|
||||
gz: *GenZir,
|
||||
rl: ResultLoc,
|
||||
@@ -8961,6 +9062,17 @@ const Scope = struct {
|
||||
return @fieldParentPtr(T, "base", base);
|
||||
}
|
||||
|
||||
fn parent(base: *Scope) ?*Scope {
|
||||
return switch (base.tag) {
|
||||
.gen_zir => base.cast(GenZir).?.parent,
|
||||
.local_val => base.cast(LocalVal).?.parent,
|
||||
.local_ptr => base.cast(LocalPtr).?.parent,
|
||||
.defer_normal, .defer_error => base.cast(Defer).?.parent,
|
||||
.namespace => base.cast(Namespace).?.parent,
|
||||
.top => null,
|
||||
};
|
||||
}
|
||||
|
||||
const Tag = enum {
|
||||
gen_zir,
|
||||
local_val,
|
||||
@@ -8986,7 +9098,7 @@ const Scope = struct {
|
||||
const LocalVal = struct {
|
||||
const base_tag: Tag = .local_val;
|
||||
base: Scope = Scope{ .tag = base_tag },
|
||||
/// Parents can be: `LocalVal`, `LocalPtr`, `GenZir`, `Defer`.
|
||||
/// Parents can be: `LocalVal`, `LocalPtr`, `GenZir`, `Defer`, `Namespace`.
|
||||
parent: *Scope,
|
||||
gen_zir: *GenZir,
|
||||
inst: Zir.Inst.Ref,
|
||||
@@ -9005,7 +9117,7 @@ const Scope = struct {
|
||||
const LocalPtr = struct {
|
||||
const base_tag: Tag = .local_ptr;
|
||||
base: Scope = Scope{ .tag = base_tag },
|
||||
/// Parents can be: `LocalVal`, `LocalPtr`, `GenZir`, `Defer`.
|
||||
/// Parents can be: `LocalVal`, `LocalPtr`, `GenZir`, `Defer`, `Namespace`.
|
||||
parent: *Scope,
|
||||
gen_zir: *GenZir,
|
||||
ptr: Zir.Inst.Ref,
|
||||
@@ -9023,7 +9135,7 @@ const Scope = struct {
|
||||
|
||||
const Defer = struct {
|
||||
base: Scope,
|
||||
/// Parents can be: `LocalVal`, `LocalPtr`, `GenZir`, `Defer`.
|
||||
/// Parents can be: `LocalVal`, `LocalPtr`, `GenZir`, `Defer`, `Namespace`.
|
||||
parent: *Scope,
|
||||
defer_node: Ast.Node.Index,
|
||||
};
|
||||
@@ -9034,11 +9146,27 @@ const Scope = struct {
|
||||
const base_tag: Tag = .namespace;
|
||||
base: Scope = Scope{ .tag = base_tag },
|
||||
|
||||
/// Parents can be: `LocalVal`, `LocalPtr`, `GenZir`, `Defer`, `Namespace`.
|
||||
parent: *Scope,
|
||||
/// Maps string table index to the source location of declaration,
|
||||
/// for the purposes of reporting name shadowing compile errors.
|
||||
decls: std.AutoHashMapUnmanaged(u32, Ast.Node.Index) = .{},
|
||||
node: Ast.Node.Index,
|
||||
inst: Zir.Inst.Index,
|
||||
|
||||
/// The astgen scope containing this namespace.
|
||||
/// Only valid during astgen.
|
||||
declaring_gz: ?*GenZir,
|
||||
|
||||
/// Map from the raw captured value to the instruction
|
||||
/// ref of the capture for decls in this namespace
|
||||
captures: std.AutoHashMapUnmanaged(Zir.Inst.Index, Zir.Inst.Index) = .{},
|
||||
|
||||
pub fn deinit(self: *Namespace, gpa: *Allocator) void {
|
||||
self.decls.deinit(gpa);
|
||||
self.captures.deinit(gpa);
|
||||
self.* = undefined;
|
||||
}
|
||||
};
|
||||
|
||||
const Top = struct {
|
||||
@@ -9061,6 +9189,7 @@ const GenZir = struct {
|
||||
decl_node_index: Ast.Node.Index,
|
||||
/// The containing decl line index, absolute.
|
||||
decl_line: u32,
|
||||
/// Parents can be: `LocalVal`, `LocalPtr`, `GenZir`, `Defer`, `Namespace`.
|
||||
parent: *Scope,
|
||||
/// All `GenZir` scopes for the same ZIR share this.
|
||||
astgen: *AstGen,
|
||||
@@ -9096,6 +9225,12 @@ const GenZir = struct {
|
||||
suspend_node: Ast.Node.Index = 0,
|
||||
nosuspend_node: Ast.Node.Index = 0,
|
||||
|
||||
/// Namespace members are lazy. When executing a decl within a namespace,
|
||||
/// any references to external instructions need to be treated specially.
|
||||
/// This list tracks those references. See also .closure_capture and .closure_get.
|
||||
/// Keys are the raw instruction index, values are the closure_capture instruction.
|
||||
captures: std.AutoHashMapUnmanaged(Zir.Inst.Index, Zir.Inst.Index) = .{},
|
||||
|
||||
fn makeSubBlock(gz: *GenZir, scope: *Scope) GenZir {
|
||||
return .{
|
||||
.force_comptime = gz.force_comptime,
|
||||
@@ -9810,6 +9945,22 @@ const GenZir = struct {
|
||||
});
|
||||
}
|
||||
|
||||
fn addInstNode(
|
||||
gz: *GenZir,
|
||||
tag: Zir.Inst.Tag,
|
||||
inst: Zir.Inst.Index,
|
||||
/// Absolute node index. This function does the conversion to offset from Decl.
|
||||
src_node: Ast.Node.Index,
|
||||
) !Zir.Inst.Ref {
|
||||
return gz.add(.{
|
||||
.tag = tag,
|
||||
.data = .{ .inst_node = .{
|
||||
.inst = inst,
|
||||
.src_node = gz.nodeIndexToRelative(src_node),
|
||||
} },
|
||||
});
|
||||
}
|
||||
|
||||
fn addNodeExtended(
|
||||
gz: *GenZir,
|
||||
opcode: Zir.Inst.Extended,
|
||||
@@ -10111,6 +10262,37 @@ const GenZir = struct {
|
||||
});
|
||||
}
|
||||
|
||||
fn setOpaque(gz: *GenZir, inst: Zir.Inst.Index, args: struct {
|
||||
src_node: Ast.Node.Index,
|
||||
decls_len: u32,
|
||||
}) !void {
|
||||
const astgen = gz.astgen;
|
||||
const gpa = astgen.gpa;
|
||||
|
||||
try astgen.extra.ensureUnusedCapacity(gpa, 2);
|
||||
const payload_index = @intCast(u32, astgen.extra.items.len);
|
||||
|
||||
if (args.src_node != 0) {
|
||||
const node_offset = gz.nodeIndexToRelative(args.src_node);
|
||||
astgen.extra.appendAssumeCapacity(@bitCast(u32, node_offset));
|
||||
}
|
||||
if (args.decls_len != 0) {
|
||||
astgen.extra.appendAssumeCapacity(args.decls_len);
|
||||
}
|
||||
astgen.instructions.set(inst, .{
|
||||
.tag = .extended,
|
||||
.data = .{ .extended = .{
|
||||
.opcode = .opaque_decl,
|
||||
.small = @bitCast(u16, Zir.Inst.OpaqueDecl.Small{
|
||||
.has_src_node = args.src_node != 0,
|
||||
.has_decls_len = args.decls_len != 0,
|
||||
.name_strategy = gz.anon_name_strategy,
|
||||
}),
|
||||
.operand = payload_index,
|
||||
} },
|
||||
});
|
||||
}
|
||||
|
||||
fn add(gz: *GenZir, inst: Zir.Inst) !Zir.Inst.Ref {
|
||||
return indexToRef(try gz.addAsIndex(inst));
|
||||
}
|
||||
|
||||
141
src/Module.zig
141
src/Module.zig
@@ -275,6 +275,56 @@ pub const DeclPlusEmitH = struct {
|
||||
emit_h: EmitH,
|
||||
};
|
||||
|
||||
pub const CaptureScope = struct {
|
||||
parent: ?*CaptureScope,
|
||||
|
||||
/// Values from this decl's evaluation that will be closed over in
|
||||
/// child decls. Values stored in the value_arena of the linked decl.
|
||||
/// During sema, this map is backed by the gpa. Once sema completes,
|
||||
/// it is reallocated using the value_arena.
|
||||
captures: std.AutoHashMapUnmanaged(Zir.Inst.Index, TypedValue) = .{},
|
||||
};
|
||||
|
||||
pub const WipCaptureScope = struct {
|
||||
scope: *CaptureScope,
|
||||
finalized: bool,
|
||||
gpa: *Allocator,
|
||||
perm_arena: *Allocator,
|
||||
|
||||
pub fn init(gpa: *Allocator, perm_arena: *Allocator, parent: ?*CaptureScope) !@This() {
|
||||
const scope = try perm_arena.create(CaptureScope);
|
||||
scope.* = .{ .parent = parent };
|
||||
return @This(){
|
||||
.scope = scope,
|
||||
.finalized = false,
|
||||
.gpa = gpa,
|
||||
.perm_arena = perm_arena,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn finalize(noalias self: *@This()) !void {
|
||||
assert(!self.finalized);
|
||||
// use a temp to avoid unintentional aliasing due to RLS
|
||||
const tmp = try self.scope.captures.clone(self.perm_arena);
|
||||
self.scope.captures = tmp;
|
||||
self.finalized = true;
|
||||
}
|
||||
|
||||
pub fn reset(noalias self: *@This(), parent: ?*CaptureScope) !void {
|
||||
if (!self.finalized) try self.finalize();
|
||||
self.scope = try self.perm_arena.create(CaptureScope);
|
||||
self.scope.* = .{ .parent = parent };
|
||||
self.finalized = false;
|
||||
}
|
||||
|
||||
pub fn deinit(noalias self: *@This()) void {
|
||||
if (!self.finalized) {
|
||||
self.scope.captures.deinit(self.gpa);
|
||||
}
|
||||
self.* = undefined;
|
||||
}
|
||||
};
|
||||
|
||||
pub const Decl = struct {
|
||||
/// Allocated with Module's allocator; outlives the ZIR code.
|
||||
name: [*:0]const u8,
|
||||
@@ -290,7 +340,7 @@ pub const Decl = struct {
|
||||
linksection_val: Value,
|
||||
/// Populated when `has_tv`.
|
||||
@"addrspace": std.builtin.AddressSpace,
|
||||
/// The memory for ty, val, align_val, linksection_val.
|
||||
/// The memory for ty, val, align_val, linksection_val, and captures.
|
||||
/// If this is `null` then there is no memory management needed.
|
||||
value_arena: ?*std.heap.ArenaAllocator.State = null,
|
||||
/// The direct parent namespace of the Decl.
|
||||
@@ -299,6 +349,11 @@ pub const Decl = struct {
|
||||
/// the namespace of the struct, since there is no parent.
|
||||
namespace: *Scope.Namespace,
|
||||
|
||||
/// The scope which lexically contains this decl. A decl must depend
|
||||
/// on its lexical parent, in order to ensure that this pointer is valid.
|
||||
/// This scope is allocated out of the arena of the parent decl.
|
||||
src_scope: ?*CaptureScope,
|
||||
|
||||
/// An integer that can be checked against the corresponding incrementing
|
||||
/// generation field of Module. This is used to determine whether `complete` status
|
||||
/// represents pre- or post- re-analysis.
|
||||
@@ -959,6 +1014,7 @@ pub const Scope = struct {
|
||||
return @fieldParentPtr(T, "base", base);
|
||||
}
|
||||
|
||||
/// Get the decl that is currently being analyzed
|
||||
pub fn ownerDecl(scope: *Scope) ?*Decl {
|
||||
return switch (scope.tag) {
|
||||
.block => scope.cast(Block).?.sema.owner_decl,
|
||||
@@ -967,6 +1023,7 @@ pub const Scope = struct {
|
||||
};
|
||||
}
|
||||
|
||||
/// Get the decl which contains this decl, for the purposes of source reporting
|
||||
pub fn srcDecl(scope: *Scope) ?*Decl {
|
||||
return switch (scope.tag) {
|
||||
.block => scope.cast(Block).?.src_decl,
|
||||
@@ -975,6 +1032,15 @@ pub const Scope = struct {
|
||||
};
|
||||
}
|
||||
|
||||
/// Get the scope which contains this decl, for resolving closure_get instructions.
|
||||
pub fn srcScope(scope: *Scope) ?*CaptureScope {
|
||||
return switch (scope.tag) {
|
||||
.block => scope.cast(Block).?.wip_capture_scope,
|
||||
.file => null,
|
||||
.namespace => scope.cast(Namespace).?.getDecl().src_scope,
|
||||
};
|
||||
}
|
||||
|
||||
/// Asserts the scope has a parent which is a Namespace and returns it.
|
||||
pub fn namespace(scope: *Scope) *Namespace {
|
||||
switch (scope.tag) {
|
||||
@@ -1311,6 +1377,9 @@ pub const Scope = struct {
|
||||
instructions: ArrayListUnmanaged(Air.Inst.Index),
|
||||
// `param` instructions are collected here to be used by the `func` instruction.
|
||||
params: std.ArrayListUnmanaged(Param) = .{},
|
||||
|
||||
wip_capture_scope: *CaptureScope,
|
||||
|
||||
label: ?*Label = null,
|
||||
inlining: ?*Inlining,
|
||||
/// If runtime_index is not 0 then one of these is guaranteed to be non null.
|
||||
@@ -1372,6 +1441,7 @@ pub const Scope = struct {
|
||||
.sema = parent.sema,
|
||||
.src_decl = parent.src_decl,
|
||||
.instructions = .{},
|
||||
.wip_capture_scope = parent.wip_capture_scope,
|
||||
.label = null,
|
||||
.inlining = parent.inlining,
|
||||
.is_comptime = parent.is_comptime,
|
||||
@@ -2901,12 +2971,10 @@ pub fn mapOldZirToNew(
|
||||
var match_stack: std.ArrayListUnmanaged(MatchedZirDecl) = .{};
|
||||
defer match_stack.deinit(gpa);
|
||||
|
||||
const old_main_struct_inst = old_zir.getMainStruct();
|
||||
const new_main_struct_inst = new_zir.getMainStruct();
|
||||
|
||||
// Main struct inst is always the same
|
||||
try match_stack.append(gpa, .{
|
||||
.old_inst = old_main_struct_inst,
|
||||
.new_inst = new_main_struct_inst,
|
||||
.old_inst = Zir.main_struct_inst,
|
||||
.new_inst = Zir.main_struct_inst,
|
||||
});
|
||||
|
||||
var old_decls = std.ArrayList(Zir.Inst.Index).init(gpa);
|
||||
@@ -3064,6 +3132,7 @@ pub fn semaFile(mod: *Module, file: *Scope.File) SemaError!void {
|
||||
const struct_obj = try new_decl_arena.allocator.create(Module.Struct);
|
||||
const struct_ty = try Type.Tag.@"struct".create(&new_decl_arena.allocator, struct_obj);
|
||||
const struct_val = try Value.Tag.ty.create(&new_decl_arena.allocator, struct_ty);
|
||||
const ty_ty = comptime Type.initTag(.type);
|
||||
struct_obj.* = .{
|
||||
.owner_decl = undefined, // set below
|
||||
.fields = .{},
|
||||
@@ -3078,7 +3147,7 @@ pub fn semaFile(mod: *Module, file: *Scope.File) SemaError!void {
|
||||
.file_scope = file,
|
||||
},
|
||||
};
|
||||
const new_decl = try mod.allocateNewDecl(&struct_obj.namespace, 0);
|
||||
const new_decl = try mod.allocateNewDecl(&struct_obj.namespace, 0, null);
|
||||
file.root_decl = new_decl;
|
||||
struct_obj.owner_decl = new_decl;
|
||||
new_decl.src_line = 0;
|
||||
@@ -3087,7 +3156,7 @@ pub fn semaFile(mod: *Module, file: *Scope.File) SemaError!void {
|
||||
new_decl.is_exported = false;
|
||||
new_decl.has_align = false;
|
||||
new_decl.has_linksection_or_addrspace = false;
|
||||
new_decl.ty = struct_ty;
|
||||
new_decl.ty = ty_ty;
|
||||
new_decl.val = struct_val;
|
||||
new_decl.has_tv = true;
|
||||
new_decl.owns_tv = true;
|
||||
@@ -3097,7 +3166,7 @@ pub fn semaFile(mod: *Module, file: *Scope.File) SemaError!void {
|
||||
|
||||
if (file.status == .success_zir) {
|
||||
assert(file.zir_loaded);
|
||||
const main_struct_inst = file.zir.getMainStruct();
|
||||
const main_struct_inst = Zir.main_struct_inst;
|
||||
struct_obj.zir_index = main_struct_inst;
|
||||
|
||||
var sema_arena = std.heap.ArenaAllocator.init(gpa);
|
||||
@@ -3107,6 +3176,7 @@ pub fn semaFile(mod: *Module, file: *Scope.File) SemaError!void {
|
||||
.mod = mod,
|
||||
.gpa = gpa,
|
||||
.arena = &sema_arena.allocator,
|
||||
.perm_arena = &new_decl_arena.allocator,
|
||||
.code = file.zir,
|
||||
.owner_decl = new_decl,
|
||||
.namespace = &struct_obj.namespace,
|
||||
@@ -3115,10 +3185,15 @@ pub fn semaFile(mod: *Module, file: *Scope.File) SemaError!void {
|
||||
.owner_func = null,
|
||||
};
|
||||
defer sema.deinit();
|
||||
|
||||
var wip_captures = try WipCaptureScope.init(gpa, &new_decl_arena.allocator, null);
|
||||
defer wip_captures.deinit();
|
||||
|
||||
var block_scope: Scope.Block = .{
|
||||
.parent = null,
|
||||
.sema = &sema,
|
||||
.src_decl = new_decl,
|
||||
.wip_capture_scope = wip_captures.scope,
|
||||
.instructions = .{},
|
||||
.inlining = null,
|
||||
.is_comptime = true,
|
||||
@@ -3126,6 +3201,7 @@ pub fn semaFile(mod: *Module, file: *Scope.File) SemaError!void {
|
||||
defer block_scope.instructions.deinit(gpa);
|
||||
|
||||
if (sema.analyzeStructDecl(new_decl, main_struct_inst, struct_obj)) |_| {
|
||||
try wip_captures.finalize();
|
||||
new_decl.analysis = .complete;
|
||||
} else |err| switch (err) {
|
||||
error.OutOfMemory => return error.OutOfMemory,
|
||||
@@ -3155,6 +3231,10 @@ fn semaDecl(mod: *Module, decl: *Decl) !bool {
|
||||
|
||||
decl.analysis = .in_progress;
|
||||
|
||||
// We need the memory for the Type to go into the arena for the Decl
|
||||
var decl_arena = std.heap.ArenaAllocator.init(gpa);
|
||||
errdefer decl_arena.deinit();
|
||||
|
||||
var analysis_arena = std.heap.ArenaAllocator.init(gpa);
|
||||
defer analysis_arena.deinit();
|
||||
|
||||
@@ -3162,6 +3242,7 @@ fn semaDecl(mod: *Module, decl: *Decl) !bool {
|
||||
.mod = mod,
|
||||
.gpa = gpa,
|
||||
.arena = &analysis_arena.allocator,
|
||||
.perm_arena = &decl_arena.allocator,
|
||||
.code = zir,
|
||||
.owner_decl = decl,
|
||||
.namespace = decl.namespace,
|
||||
@@ -3173,7 +3254,7 @@ fn semaDecl(mod: *Module, decl: *Decl) !bool {
|
||||
|
||||
if (decl.isRoot()) {
|
||||
log.debug("semaDecl root {*} ({s})", .{ decl, decl.name });
|
||||
const main_struct_inst = zir.getMainStruct();
|
||||
const main_struct_inst = Zir.main_struct_inst;
|
||||
const struct_obj = decl.getStruct().?;
|
||||
// This might not have gotten set in `semaFile` if the first time had
|
||||
// a ZIR failure, so we set it here in case.
|
||||
@@ -3185,10 +3266,14 @@ fn semaDecl(mod: *Module, decl: *Decl) !bool {
|
||||
}
|
||||
log.debug("semaDecl {*} ({s})", .{ decl, decl.name });
|
||||
|
||||
var wip_captures = try WipCaptureScope.init(gpa, &decl_arena.allocator, decl.src_scope);
|
||||
defer wip_captures.deinit();
|
||||
|
||||
var block_scope: Scope.Block = .{
|
||||
.parent = null,
|
||||
.sema = &sema,
|
||||
.src_decl = decl,
|
||||
.wip_capture_scope = wip_captures.scope,
|
||||
.instructions = .{},
|
||||
.inlining = null,
|
||||
.is_comptime = true,
|
||||
@@ -3203,6 +3288,7 @@ fn semaDecl(mod: *Module, decl: *Decl) !bool {
|
||||
const extra = zir.extraData(Zir.Inst.Block, inst_data.payload_index);
|
||||
const body = zir.extra[extra.end..][0..extra.data.body_len];
|
||||
const break_index = try sema.analyzeBody(&block_scope, body);
|
||||
try wip_captures.finalize();
|
||||
const result_ref = zir_datas[break_index].@"break".operand;
|
||||
const src: LazySrcLoc = .{ .node_offset = 0 };
|
||||
const decl_tv = try sema.resolveInstValue(&block_scope, src, result_ref);
|
||||
@@ -3239,9 +3325,6 @@ fn semaDecl(mod: *Module, decl: *Decl) !bool {
|
||||
// not the struct itself.
|
||||
try sema.resolveTypeLayout(&block_scope, src, decl_tv.ty);
|
||||
|
||||
// We need the memory for the Type to go into the arena for the Decl
|
||||
var decl_arena = std.heap.ArenaAllocator.init(gpa);
|
||||
errdefer decl_arena.deinit();
|
||||
const decl_arena_state = try decl_arena.allocator.create(std.heap.ArenaAllocator.State);
|
||||
|
||||
if (decl.is_usingnamespace) {
|
||||
@@ -3638,7 +3721,7 @@ fn scanDecl(iter: *ScanDeclIter, decl_sub_index: usize, flags: u4) SemaError!voi
|
||||
// We create a Decl for it regardless of analysis status.
|
||||
const gop = try namespace.decls.getOrPut(gpa, decl_name);
|
||||
if (!gop.found_existing) {
|
||||
const new_decl = try mod.allocateNewDecl(namespace, decl_node);
|
||||
const new_decl = try mod.allocateNewDecl(namespace, decl_node, iter.parent_decl.src_scope);
|
||||
if (is_usingnamespace) {
|
||||
namespace.usingnamespace_set.putAssumeCapacity(new_decl, is_pub);
|
||||
}
|
||||
@@ -3898,10 +3981,15 @@ pub fn analyzeFnBody(mod: *Module, decl: *Decl, func: *Fn, arena: *Allocator) Se
|
||||
|
||||
const gpa = mod.gpa;
|
||||
|
||||
// Use the Decl's arena for captured values.
|
||||
var decl_arena = decl.value_arena.?.promote(gpa);
|
||||
defer decl.value_arena.?.* = decl_arena.state;
|
||||
|
||||
var sema: Sema = .{
|
||||
.mod = mod,
|
||||
.gpa = gpa,
|
||||
.arena = arena,
|
||||
.perm_arena = &decl_arena.allocator,
|
||||
.code = decl.namespace.file_scope.zir,
|
||||
.owner_decl = decl,
|
||||
.namespace = decl.namespace,
|
||||
@@ -3916,10 +4004,14 @@ pub fn analyzeFnBody(mod: *Module, decl: *Decl, func: *Fn, arena: *Allocator) Se
|
||||
try sema.air_extra.ensureTotalCapacity(gpa, reserved_count);
|
||||
sema.air_extra.items.len += reserved_count;
|
||||
|
||||
var wip_captures = try WipCaptureScope.init(gpa, &decl_arena.allocator, decl.src_scope);
|
||||
defer wip_captures.deinit();
|
||||
|
||||
var inner_block: Scope.Block = .{
|
||||
.parent = null,
|
||||
.sema = &sema,
|
||||
.src_decl = decl,
|
||||
.wip_capture_scope = wip_captures.scope,
|
||||
.instructions = .{},
|
||||
.inlining = null,
|
||||
.is_comptime = false,
|
||||
@@ -3995,6 +4087,8 @@ pub fn analyzeFnBody(mod: *Module, decl: *Decl, func: *Fn, arena: *Allocator) Se
|
||||
else => |e| return e,
|
||||
};
|
||||
|
||||
try wip_captures.finalize();
|
||||
|
||||
// Copy the block into place and mark that as the main block.
|
||||
try sema.air_extra.ensureUnusedCapacity(gpa, @typeInfo(Air.Block).Struct.fields.len +
|
||||
inner_block.instructions.items.len);
|
||||
@@ -4035,7 +4129,7 @@ fn markOutdatedDecl(mod: *Module, decl: *Decl) !void {
|
||||
decl.analysis = .outdated;
|
||||
}
|
||||
|
||||
pub fn allocateNewDecl(mod: *Module, namespace: *Scope.Namespace, src_node: Ast.Node.Index) !*Decl {
|
||||
pub fn allocateNewDecl(mod: *Module, namespace: *Scope.Namespace, src_node: Ast.Node.Index, src_scope: ?*CaptureScope) !*Decl {
|
||||
// If we have emit-h then we must allocate a bigger structure to store the emit-h state.
|
||||
const new_decl: *Decl = if (mod.emit_h != null) blk: {
|
||||
const parent_struct = try mod.gpa.create(DeclPlusEmitH);
|
||||
@@ -4061,6 +4155,7 @@ pub fn allocateNewDecl(mod: *Module, namespace: *Scope.Namespace, src_node: Ast.
|
||||
.analysis = .unreferenced,
|
||||
.deletion_flag = false,
|
||||
.zir_decl_index = 0,
|
||||
.src_scope = src_scope,
|
||||
.link = switch (mod.comp.bin_file.tag) {
|
||||
.coff => .{ .coff = link.File.Coff.TextBlock.empty },
|
||||
.elf => .{ .elf = link.File.Elf.TextBlock.empty },
|
||||
@@ -4087,6 +4182,7 @@ pub fn allocateNewDecl(mod: *Module, namespace: *Scope.Namespace, src_node: Ast.
|
||||
.alive = false,
|
||||
.is_usingnamespace = false,
|
||||
};
|
||||
|
||||
return new_decl;
|
||||
}
|
||||
|
||||
@@ -4191,25 +4287,26 @@ pub fn createAnonymousDeclNamed(
|
||||
typed_value: TypedValue,
|
||||
name: [:0]u8,
|
||||
) !*Decl {
|
||||
return mod.createAnonymousDeclFromDeclNamed(scope.ownerDecl().?, typed_value, name);
|
||||
return mod.createAnonymousDeclFromDeclNamed(scope.ownerDecl().?, scope.srcScope(), typed_value, name);
|
||||
}
|
||||
|
||||
pub fn createAnonymousDecl(mod: *Module, scope: *Scope, typed_value: TypedValue) !*Decl {
|
||||
return mod.createAnonymousDeclFromDecl(scope.ownerDecl().?, typed_value);
|
||||
return mod.createAnonymousDeclFromDecl(scope.ownerDecl().?, scope.srcScope(), typed_value);
|
||||
}
|
||||
|
||||
pub fn createAnonymousDeclFromDecl(mod: *Module, owner_decl: *Decl, tv: TypedValue) !*Decl {
|
||||
pub fn createAnonymousDeclFromDecl(mod: *Module, owner_decl: *Decl, src_scope: ?*CaptureScope, tv: TypedValue) !*Decl {
|
||||
const name_index = mod.getNextAnonNameIndex();
|
||||
const name = try std.fmt.allocPrintZ(mod.gpa, "{s}__anon_{d}", .{
|
||||
owner_decl.name, name_index,
|
||||
});
|
||||
return mod.createAnonymousDeclFromDeclNamed(owner_decl, tv, name);
|
||||
return mod.createAnonymousDeclFromDeclNamed(owner_decl, src_scope, tv, name);
|
||||
}
|
||||
|
||||
/// Takes ownership of `name` even if it returns an error.
|
||||
pub fn createAnonymousDeclFromDeclNamed(
|
||||
mod: *Module,
|
||||
owner_decl: *Decl,
|
||||
src_scope: ?*CaptureScope,
|
||||
typed_value: TypedValue,
|
||||
name: [:0]u8,
|
||||
) !*Decl {
|
||||
@@ -4218,7 +4315,7 @@ pub fn createAnonymousDeclFromDeclNamed(
|
||||
const namespace = owner_decl.namespace;
|
||||
try namespace.anon_decls.ensureUnusedCapacity(mod.gpa, 1);
|
||||
|
||||
const new_decl = try mod.allocateNewDecl(namespace, owner_decl.src_node);
|
||||
const new_decl = try mod.allocateNewDecl(namespace, owner_decl.src_node, src_scope);
|
||||
|
||||
new_decl.name = name;
|
||||
new_decl.src_line = owner_decl.src_line;
|
||||
@@ -4783,7 +4880,7 @@ pub fn populateTestFunctions(mod: *Module) !void {
|
||||
const arena = &new_decl_arena.allocator;
|
||||
|
||||
const test_fn_vals = try arena.alloc(Value, mod.test_functions.count());
|
||||
const array_decl = try mod.createAnonymousDeclFromDecl(decl, .{
|
||||
const array_decl = try mod.createAnonymousDeclFromDecl(decl, null, .{
|
||||
.ty = try Type.Tag.array.create(arena, .{
|
||||
.len = test_fn_vals.len,
|
||||
.elem_type = try tmp_test_fn_ty.copy(arena),
|
||||
@@ -4796,7 +4893,7 @@ pub fn populateTestFunctions(mod: *Module) !void {
|
||||
var name_decl_arena = std.heap.ArenaAllocator.init(gpa);
|
||||
errdefer name_decl_arena.deinit();
|
||||
const bytes = try name_decl_arena.allocator.dupe(u8, test_name_slice);
|
||||
const test_name_decl = try mod.createAnonymousDeclFromDecl(array_decl, .{
|
||||
const test_name_decl = try mod.createAnonymousDeclFromDecl(array_decl, null, .{
|
||||
.ty = try Type.Tag.array_u8.create(&name_decl_arena.allocator, bytes.len),
|
||||
.val = try Value.Tag.bytes.create(&name_decl_arena.allocator, bytes),
|
||||
});
|
||||
|
||||
263
src/Sema.zig
263
src/Sema.zig
@@ -8,8 +8,12 @@
|
||||
mod: *Module,
|
||||
/// Alias to `mod.gpa`.
|
||||
gpa: *Allocator,
|
||||
/// Points to the arena allocator of the Decl.
|
||||
/// Points to the temporary arena allocator of the Sema.
|
||||
/// This arena will be cleared when the sema is destroyed.
|
||||
arena: *Allocator,
|
||||
/// Points to the arena allocator for the owner_decl.
|
||||
/// This arena will persist until the decl is invalidated.
|
||||
perm_arena: *Allocator,
|
||||
code: Zir,
|
||||
air_instructions: std.MultiArrayList(Air.Inst) = .{},
|
||||
air_extra: std.ArrayListUnmanaged(u32) = .{},
|
||||
@@ -80,6 +84,8 @@ const Scope = Module.Scope;
|
||||
const CompileError = Module.CompileError;
|
||||
const SemaError = Module.SemaError;
|
||||
const Decl = Module.Decl;
|
||||
const CaptureScope = Module.CaptureScope;
|
||||
const WipCaptureScope = Module.WipCaptureScope;
|
||||
const LazySrcLoc = Module.LazySrcLoc;
|
||||
const RangeSet = @import("RangeSet.zig");
|
||||
const target_util = @import("target.zig");
|
||||
@@ -129,15 +135,29 @@ pub fn analyzeBody(
|
||||
) CompileError!Zir.Inst.Index {
|
||||
// No tracy calls here, to avoid interfering with the tail call mechanism.
|
||||
|
||||
const parent_capture_scope = block.wip_capture_scope;
|
||||
|
||||
var wip_captures = WipCaptureScope{
|
||||
.finalized = true,
|
||||
.scope = parent_capture_scope,
|
||||
.perm_arena = sema.perm_arena,
|
||||
.gpa = sema.gpa,
|
||||
};
|
||||
defer if (wip_captures.scope != parent_capture_scope) {
|
||||
wip_captures.deinit();
|
||||
};
|
||||
|
||||
const map = &block.sema.inst_map;
|
||||
const tags = block.sema.code.instructions.items(.tag);
|
||||
const datas = block.sema.code.instructions.items(.data);
|
||||
|
||||
var orig_captures: usize = parent_capture_scope.captures.count();
|
||||
|
||||
// We use a while(true) loop here to avoid a redundant way of breaking out of
|
||||
// the loop. The only way to break out of the loop is with a `noreturn`
|
||||
// instruction.
|
||||
var i: usize = 0;
|
||||
while (true) {
|
||||
const result = while (true) {
|
||||
const inst = body[i];
|
||||
const air_inst: Air.Inst.Ref = switch (tags[inst]) {
|
||||
// zig fmt: off
|
||||
@@ -170,6 +190,7 @@ pub fn analyzeBody(
|
||||
.call_compile_time => try sema.zirCall(block, inst, .compile_time, false),
|
||||
.call_nosuspend => try sema.zirCall(block, inst, .no_async, false),
|
||||
.call_async => try sema.zirCall(block, inst, .async_kw, false),
|
||||
.closure_get => try sema.zirClosureGet(block, inst),
|
||||
.cmp_lt => try sema.zirCmp(block, inst, .lt),
|
||||
.cmp_lte => try sema.zirCmp(block, inst, .lte),
|
||||
.cmp_eq => try sema.zirCmpEq(block, inst, .eq, .cmp_eq),
|
||||
@@ -343,9 +364,6 @@ pub fn analyzeBody(
|
||||
.trunc => try sema.zirUnaryMath(block, inst),
|
||||
.round => try sema.zirUnaryMath(block, inst),
|
||||
|
||||
.opaque_decl => try sema.zirOpaqueDecl(block, inst, .parent),
|
||||
.opaque_decl_anon => try sema.zirOpaqueDecl(block, inst, .anon),
|
||||
.opaque_decl_func => try sema.zirOpaqueDecl(block, inst, .func),
|
||||
.error_set_decl => try sema.zirErrorSetDecl(block, inst, .parent),
|
||||
.error_set_decl_anon => try sema.zirErrorSetDecl(block, inst, .anon),
|
||||
.error_set_decl_func => try sema.zirErrorSetDecl(block, inst, .func),
|
||||
@@ -362,13 +380,13 @@ pub fn analyzeBody(
|
||||
// Instructions that we know to *always* be noreturn based solely on their tag.
|
||||
// These functions match the return type of analyzeBody so that we can
|
||||
// tail call them here.
|
||||
.compile_error => return sema.zirCompileError(block, inst),
|
||||
.ret_coerce => return sema.zirRetCoerce(block, inst),
|
||||
.ret_node => return sema.zirRetNode(block, inst),
|
||||
.ret_load => return sema.zirRetLoad(block, inst),
|
||||
.ret_err_value => return sema.zirRetErrValue(block, inst),
|
||||
.@"unreachable" => return sema.zirUnreachable(block, inst),
|
||||
.panic => return sema.zirPanic(block, inst),
|
||||
.compile_error => break sema.zirCompileError(block, inst),
|
||||
.ret_coerce => break sema.zirRetCoerce(block, inst),
|
||||
.ret_node => break sema.zirRetNode(block, inst),
|
||||
.ret_load => break sema.zirRetLoad(block, inst),
|
||||
.ret_err_value => break sema.zirRetErrValue(block, inst),
|
||||
.@"unreachable" => break sema.zirUnreachable(block, inst),
|
||||
.panic => break sema.zirPanic(block, inst),
|
||||
// zig fmt: on
|
||||
|
||||
// Instructions that we know can *never* be noreturn based solely on
|
||||
@@ -503,34 +521,49 @@ pub fn analyzeBody(
|
||||
i += 1;
|
||||
continue;
|
||||
},
|
||||
.closure_capture => {
|
||||
try sema.zirClosureCapture(block, inst);
|
||||
i += 1;
|
||||
continue;
|
||||
},
|
||||
|
||||
// Special case instructions to handle comptime control flow.
|
||||
.@"break" => {
|
||||
if (block.is_comptime) {
|
||||
return inst; // same as break_inline
|
||||
break inst; // same as break_inline
|
||||
} else {
|
||||
return sema.zirBreak(block, inst);
|
||||
break sema.zirBreak(block, inst);
|
||||
}
|
||||
},
|
||||
.break_inline => return inst,
|
||||
.break_inline => break inst,
|
||||
.repeat => {
|
||||
if (block.is_comptime) {
|
||||
// Send comptime control flow back to the beginning of this block.
|
||||
const src: LazySrcLoc = .{ .node_offset = datas[inst].node };
|
||||
try sema.emitBackwardBranch(block, src);
|
||||
if (wip_captures.scope.captures.count() != orig_captures) {
|
||||
try wip_captures.reset(parent_capture_scope);
|
||||
block.wip_capture_scope = wip_captures.scope;
|
||||
orig_captures = 0;
|
||||
}
|
||||
i = 0;
|
||||
continue;
|
||||
} else {
|
||||
const src_node = sema.code.instructions.items(.data)[inst].node;
|
||||
const src: LazySrcLoc = .{ .node_offset = src_node };
|
||||
try sema.requireRuntimeBlock(block, src);
|
||||
return always_noreturn;
|
||||
break always_noreturn;
|
||||
}
|
||||
},
|
||||
.repeat_inline => {
|
||||
// Send comptime control flow back to the beginning of this block.
|
||||
const src: LazySrcLoc = .{ .node_offset = datas[inst].node };
|
||||
try sema.emitBackwardBranch(block, src);
|
||||
if (wip_captures.scope.captures.count() != orig_captures) {
|
||||
try wip_captures.reset(parent_capture_scope);
|
||||
block.wip_capture_scope = wip_captures.scope;
|
||||
orig_captures = 0;
|
||||
}
|
||||
i = 0;
|
||||
continue;
|
||||
},
|
||||
@@ -545,7 +578,7 @@ pub fn analyzeBody(
|
||||
if (inst == break_data.block_inst) {
|
||||
break :blk sema.resolveInst(break_data.operand);
|
||||
} else {
|
||||
return break_inst;
|
||||
break break_inst;
|
||||
}
|
||||
},
|
||||
.block => blk: {
|
||||
@@ -559,7 +592,7 @@ pub fn analyzeBody(
|
||||
if (inst == break_data.block_inst) {
|
||||
break :blk sema.resolveInst(break_data.operand);
|
||||
} else {
|
||||
return break_inst;
|
||||
break break_inst;
|
||||
}
|
||||
},
|
||||
.block_inline => blk: {
|
||||
@@ -572,11 +605,11 @@ pub fn analyzeBody(
|
||||
if (inst == break_data.block_inst) {
|
||||
break :blk sema.resolveInst(break_data.operand);
|
||||
} else {
|
||||
return break_inst;
|
||||
break break_inst;
|
||||
}
|
||||
},
|
||||
.condbr => blk: {
|
||||
if (!block.is_comptime) return sema.zirCondbr(block, inst);
|
||||
if (!block.is_comptime) break sema.zirCondbr(block, inst);
|
||||
// Same as condbr_inline. TODO https://github.com/ziglang/zig/issues/8220
|
||||
const inst_data = datas[inst].pl_node;
|
||||
const cond_src: LazySrcLoc = .{ .node_offset_if_cond = inst_data.src_node };
|
||||
@@ -590,7 +623,7 @@ pub fn analyzeBody(
|
||||
if (inst == break_data.block_inst) {
|
||||
break :blk sema.resolveInst(break_data.operand);
|
||||
} else {
|
||||
return break_inst;
|
||||
break break_inst;
|
||||
}
|
||||
},
|
||||
.condbr_inline => blk: {
|
||||
@@ -606,15 +639,22 @@ pub fn analyzeBody(
|
||||
if (inst == break_data.block_inst) {
|
||||
break :blk sema.resolveInst(break_data.operand);
|
||||
} else {
|
||||
return break_inst;
|
||||
break break_inst;
|
||||
}
|
||||
},
|
||||
};
|
||||
if (sema.typeOf(air_inst).isNoReturn())
|
||||
return always_noreturn;
|
||||
break always_noreturn;
|
||||
try map.put(sema.gpa, inst, air_inst);
|
||||
i += 1;
|
||||
} else unreachable;
|
||||
|
||||
if (!wip_captures.finalized) {
|
||||
try wip_captures.finalize();
|
||||
block.wip_capture_scope = parent_capture_scope;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
fn zirExtended(sema: *Sema, block: *Scope.Block, inst: Zir.Inst.Index) CompileError!Air.Inst.Ref {
|
||||
@@ -626,6 +666,7 @@ fn zirExtended(sema: *Sema, block: *Scope.Block, inst: Zir.Inst.Index) CompileEr
|
||||
.struct_decl => return sema.zirStructDecl( block, extended, inst),
|
||||
.enum_decl => return sema.zirEnumDecl( block, extended),
|
||||
.union_decl => return sema.zirUnionDecl( block, extended, inst),
|
||||
.opaque_decl => return sema.zirOpaqueDecl( block, extended, inst),
|
||||
.ret_ptr => return sema.zirRetPtr( block, extended),
|
||||
.ret_type => return sema.zirRetType( block, extended),
|
||||
.this => return sema.zirThis( block, extended),
|
||||
@@ -1011,7 +1052,6 @@ fn zirStructDecl(
|
||||
}
|
||||
|
||||
fn createTypeName(sema: *Sema, block: *Scope.Block, name_strategy: Zir.Inst.NameStrategy) ![:0]u8 {
|
||||
_ = block;
|
||||
switch (name_strategy) {
|
||||
.anon => {
|
||||
// It would be neat to have "struct:line:column" but this name has
|
||||
@@ -1020,14 +1060,14 @@ fn createTypeName(sema: *Sema, block: *Scope.Block, name_strategy: Zir.Inst.Name
|
||||
// semantically analyzed.
|
||||
const name_index = sema.mod.getNextAnonNameIndex();
|
||||
return std.fmt.allocPrintZ(sema.gpa, "{s}__anon_{d}", .{
|
||||
sema.owner_decl.name, name_index,
|
||||
block.src_decl.name, name_index,
|
||||
});
|
||||
},
|
||||
.parent => return sema.gpa.dupeZ(u8, mem.spanZ(sema.owner_decl.name)),
|
||||
.parent => return sema.gpa.dupeZ(u8, mem.spanZ(block.src_decl.name)),
|
||||
.func => {
|
||||
const name_index = sema.mod.getNextAnonNameIndex();
|
||||
const name = try std.fmt.allocPrintZ(sema.gpa, "{s}__anon_{d}", .{
|
||||
sema.owner_decl.name, name_index,
|
||||
block.src_decl.name, name_index,
|
||||
});
|
||||
log.warn("TODO: handle NameStrategy.func correctly instead of using anon name '{s}'", .{
|
||||
name,
|
||||
@@ -1083,17 +1123,6 @@ fn zirEnumDecl(
|
||||
var new_decl_arena = std.heap.ArenaAllocator.init(gpa);
|
||||
errdefer new_decl_arena.deinit();
|
||||
|
||||
const tag_ty = blk: {
|
||||
if (tag_type_ref != .none) {
|
||||
// TODO better source location
|
||||
// TODO (needs AstGen fix too) move this eval to the block so it gets allocated
|
||||
// in the new decl arena.
|
||||
break :blk try sema.resolveType(block, src, tag_type_ref);
|
||||
}
|
||||
const bits = std.math.log2_int_ceil(usize, fields_len);
|
||||
break :blk try Type.Tag.int_unsigned.create(&new_decl_arena.allocator, bits);
|
||||
};
|
||||
|
||||
const enum_obj = try new_decl_arena.allocator.create(Module.EnumFull);
|
||||
const enum_ty_payload = try new_decl_arena.allocator.create(Type.Payload.EnumFull);
|
||||
enum_ty_payload.* = .{
|
||||
@@ -1112,7 +1141,7 @@ fn zirEnumDecl(
|
||||
|
||||
enum_obj.* = .{
|
||||
.owner_decl = new_decl,
|
||||
.tag_ty = tag_ty,
|
||||
.tag_ty = Type.initTag(.@"null"),
|
||||
.fields = .{},
|
||||
.values = .{},
|
||||
.node_offset = src.node_offset,
|
||||
@@ -1140,16 +1169,6 @@ fn zirEnumDecl(
|
||||
const body_end = extra_index;
|
||||
extra_index += bit_bags_count;
|
||||
|
||||
try enum_obj.fields.ensureTotalCapacity(&new_decl_arena.allocator, fields_len);
|
||||
const any_values = for (sema.code.extra[body_end..][0..bit_bags_count]) |bag| {
|
||||
if (bag != 0) break true;
|
||||
} else false;
|
||||
if (any_values) {
|
||||
try enum_obj.values.ensureTotalCapacityContext(&new_decl_arena.allocator, fields_len, .{
|
||||
.ty = tag_ty,
|
||||
});
|
||||
}
|
||||
|
||||
{
|
||||
// We create a block for the field type instructions because they
|
||||
// may need to reference Decls from inside the enum namespace.
|
||||
@@ -1172,10 +1191,14 @@ fn zirEnumDecl(
|
||||
sema.func = null;
|
||||
defer sema.func = prev_func;
|
||||
|
||||
var wip_captures = try WipCaptureScope.init(gpa, sema.perm_arena, new_decl.src_scope);
|
||||
defer wip_captures.deinit();
|
||||
|
||||
var enum_block: Scope.Block = .{
|
||||
.parent = null,
|
||||
.sema = sema,
|
||||
.src_decl = new_decl,
|
||||
.wip_capture_scope = wip_captures.scope,
|
||||
.instructions = .{},
|
||||
.inlining = null,
|
||||
.is_comptime = true,
|
||||
@@ -1185,7 +1208,30 @@ fn zirEnumDecl(
|
||||
if (body.len != 0) {
|
||||
_ = try sema.analyzeBody(&enum_block, body);
|
||||
}
|
||||
|
||||
try wip_captures.finalize();
|
||||
|
||||
const tag_ty = blk: {
|
||||
if (tag_type_ref != .none) {
|
||||
// TODO better source location
|
||||
break :blk try sema.resolveType(block, src, tag_type_ref);
|
||||
}
|
||||
const bits = std.math.log2_int_ceil(usize, fields_len);
|
||||
break :blk try Type.Tag.int_unsigned.create(&new_decl_arena.allocator, bits);
|
||||
};
|
||||
enum_obj.tag_ty = tag_ty;
|
||||
}
|
||||
|
||||
try enum_obj.fields.ensureTotalCapacity(&new_decl_arena.allocator, fields_len);
|
||||
const any_values = for (sema.code.extra[body_end..][0..bit_bags_count]) |bag| {
|
||||
if (bag != 0) break true;
|
||||
} else false;
|
||||
if (any_values) {
|
||||
try enum_obj.values.ensureTotalCapacityContext(&new_decl_arena.allocator, fields_len, .{
|
||||
.ty = enum_obj.tag_ty,
|
||||
});
|
||||
}
|
||||
|
||||
var bit_bag_index: usize = body_end;
|
||||
var cur_bit_bag: u32 = undefined;
|
||||
var field_i: u32 = 0;
|
||||
@@ -1224,10 +1270,10 @@ fn zirEnumDecl(
|
||||
// that points to this default value expression rather than the struct.
|
||||
// But only resolve the source location if we need to emit a compile error.
|
||||
const tag_val = (try sema.resolveInstConst(block, src, tag_val_ref)).val;
|
||||
enum_obj.values.putAssumeCapacityNoClobberContext(tag_val, {}, .{ .ty = tag_ty });
|
||||
enum_obj.values.putAssumeCapacityNoClobberContext(tag_val, {}, .{ .ty = enum_obj.tag_ty });
|
||||
} else if (any_values) {
|
||||
const tag_val = try Value.Tag.int_u64.create(&new_decl_arena.allocator, field_i);
|
||||
enum_obj.values.putAssumeCapacityNoClobberContext(tag_val, {}, .{ .ty = tag_ty });
|
||||
enum_obj.values.putAssumeCapacityNoClobberContext(tag_val, {}, .{ .ty = enum_obj.tag_ty });
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1305,20 +1351,14 @@ fn zirUnionDecl(
|
||||
fn zirOpaqueDecl(
|
||||
sema: *Sema,
|
||||
block: *Scope.Block,
|
||||
extended: Zir.Inst.Extended.InstData,
|
||||
inst: Zir.Inst.Index,
|
||||
name_strategy: Zir.Inst.NameStrategy,
|
||||
) CompileError!Air.Inst.Ref {
|
||||
const tracy = trace(@src());
|
||||
defer tracy.end();
|
||||
|
||||
const inst_data = sema.code.instructions.items(.data)[inst].pl_node;
|
||||
const src = inst_data.src();
|
||||
const extra = sema.code.extraData(Zir.Inst.Block, inst_data.payload_index);
|
||||
|
||||
_ = name_strategy;
|
||||
_ = inst_data;
|
||||
_ = src;
|
||||
_ = extra;
|
||||
_ = extended;
|
||||
_ = inst;
|
||||
return sema.mod.fail(&block.base, sema.src, "TODO implement zirOpaqueDecl", .{});
|
||||
}
|
||||
|
||||
@@ -2160,6 +2200,7 @@ fn zirCImport(sema: *Sema, parent_block: *Scope.Block, inst: Zir.Inst.Index) Com
|
||||
.parent = parent_block,
|
||||
.sema = sema,
|
||||
.src_decl = parent_block.src_decl,
|
||||
.wip_capture_scope = parent_block.wip_capture_scope,
|
||||
.instructions = .{},
|
||||
.inlining = parent_block.inlining,
|
||||
.is_comptime = parent_block.is_comptime,
|
||||
@@ -2214,7 +2255,7 @@ fn zirCImport(sema: *Sema, parent_block: *Scope.Block, inst: Zir.Inst.Index) Com
|
||||
try sema.mod.semaFile(result.file);
|
||||
const file_root_decl = result.file.root_decl.?;
|
||||
try sema.mod.declareDeclDependency(sema.owner_decl, file_root_decl);
|
||||
return sema.addType(file_root_decl.ty);
|
||||
return sema.addConstant(file_root_decl.ty, file_root_decl.val);
|
||||
}
|
||||
|
||||
fn zirSuspendBlock(sema: *Sema, parent_block: *Scope.Block, inst: Zir.Inst.Index) CompileError!Air.Inst.Ref {
|
||||
@@ -2259,6 +2300,7 @@ fn zirBlock(
|
||||
.parent = parent_block,
|
||||
.sema = sema,
|
||||
.src_decl = parent_block.src_decl,
|
||||
.wip_capture_scope = parent_block.wip_capture_scope,
|
||||
.instructions = .{},
|
||||
.label = &label,
|
||||
.inlining = parent_block.inlining,
|
||||
@@ -2866,10 +2908,14 @@ fn analyzeCall(
|
||||
sema.func = module_fn;
|
||||
defer sema.func = parent_func;
|
||||
|
||||
var wip_captures = try WipCaptureScope.init(gpa, sema.perm_arena, module_fn.owner_decl.src_scope);
|
||||
defer wip_captures.deinit();
|
||||
|
||||
var child_block: Scope.Block = .{
|
||||
.parent = null,
|
||||
.sema = sema,
|
||||
.src_decl = module_fn.owner_decl,
|
||||
.wip_capture_scope = wip_captures.scope,
|
||||
.instructions = .{},
|
||||
.label = null,
|
||||
.inlining = &inlining,
|
||||
@@ -3034,6 +3080,9 @@ fn analyzeCall(
|
||||
|
||||
break :res2 result;
|
||||
};
|
||||
|
||||
try wip_captures.finalize();
|
||||
|
||||
break :res res2;
|
||||
} else if (func_ty_info.is_generic) res: {
|
||||
const func_val = try sema.resolveConstValue(block, func_src, func);
|
||||
@@ -3116,7 +3165,8 @@ fn analyzeCall(
|
||||
try namespace.anon_decls.ensureUnusedCapacity(gpa, 1);
|
||||
|
||||
// Create a Decl for the new function.
|
||||
const new_decl = try mod.allocateNewDecl(namespace, module_fn.owner_decl.src_node);
|
||||
const src_decl = namespace.getDecl();
|
||||
const new_decl = try mod.allocateNewDecl(namespace, module_fn.owner_decl.src_node, src_decl.src_scope);
|
||||
// TODO better names for generic function instantiations
|
||||
const name_index = mod.getNextAnonNameIndex();
|
||||
new_decl.name = try std.fmt.allocPrintZ(gpa, "{s}__anon_{d}", .{
|
||||
@@ -3147,6 +3197,7 @@ fn analyzeCall(
|
||||
.mod = mod,
|
||||
.gpa = gpa,
|
||||
.arena = sema.arena,
|
||||
.perm_arena = &new_decl_arena.allocator,
|
||||
.code = fn_zir,
|
||||
.owner_decl = new_decl,
|
||||
.namespace = namespace,
|
||||
@@ -3159,10 +3210,14 @@ fn analyzeCall(
|
||||
};
|
||||
defer child_sema.deinit();
|
||||
|
||||
var wip_captures = try WipCaptureScope.init(gpa, sema.perm_arena, new_decl.src_scope);
|
||||
defer wip_captures.deinit();
|
||||
|
||||
var child_block: Scope.Block = .{
|
||||
.parent = null,
|
||||
.sema = &child_sema,
|
||||
.src_decl = new_decl,
|
||||
.wip_capture_scope = wip_captures.scope,
|
||||
.instructions = .{},
|
||||
.inlining = null,
|
||||
.is_comptime = true,
|
||||
@@ -3250,6 +3305,8 @@ fn analyzeCall(
|
||||
arg_i += 1;
|
||||
}
|
||||
|
||||
try wip_captures.finalize();
|
||||
|
||||
// Populate the Decl ty/val with the function and its type.
|
||||
new_decl.ty = try child_sema.typeOf(new_func_inst).copy(&new_decl_arena.allocator);
|
||||
new_decl.val = try Value.Tag.function.create(&new_decl_arena.allocator, new_func);
|
||||
@@ -5164,6 +5221,7 @@ fn analyzeSwitch(
|
||||
.parent = block,
|
||||
.sema = sema,
|
||||
.src_decl = block.src_decl,
|
||||
.wip_capture_scope = block.wip_capture_scope,
|
||||
.instructions = .{},
|
||||
.label = &label,
|
||||
.inlining = block.inlining,
|
||||
@@ -5268,12 +5326,19 @@ fn analyzeSwitch(
|
||||
const body = sema.code.extra[extra_index..][0..body_len];
|
||||
extra_index += body_len;
|
||||
|
||||
var wip_captures = try WipCaptureScope.init(gpa, sema.perm_arena, child_block.wip_capture_scope);
|
||||
defer wip_captures.deinit();
|
||||
|
||||
case_block.instructions.shrinkRetainingCapacity(0);
|
||||
case_block.wip_capture_scope = wip_captures.scope;
|
||||
|
||||
const item = sema.resolveInst(item_ref);
|
||||
// `item` is already guaranteed to be constant known.
|
||||
|
||||
_ = try sema.analyzeBody(&case_block, body);
|
||||
|
||||
try wip_captures.finalize();
|
||||
|
||||
try cases_extra.ensureUnusedCapacity(gpa, 3 + case_block.instructions.items.len);
|
||||
cases_extra.appendAssumeCapacity(1); // items_len
|
||||
cases_extra.appendAssumeCapacity(@intCast(u32, case_block.instructions.items.len));
|
||||
@@ -5301,6 +5366,7 @@ fn analyzeSwitch(
|
||||
extra_index += items_len;
|
||||
|
||||
case_block.instructions.shrinkRetainingCapacity(0);
|
||||
case_block.wip_capture_scope = child_block.wip_capture_scope;
|
||||
|
||||
var any_ok: Air.Inst.Ref = .none;
|
||||
|
||||
@@ -5379,11 +5445,18 @@ fn analyzeSwitch(
|
||||
var cond_body = case_block.instructions.toOwnedSlice(gpa);
|
||||
defer gpa.free(cond_body);
|
||||
|
||||
var wip_captures = try WipCaptureScope.init(gpa, sema.perm_arena, child_block.wip_capture_scope);
|
||||
defer wip_captures.deinit();
|
||||
|
||||
case_block.instructions.shrinkRetainingCapacity(0);
|
||||
case_block.wip_capture_scope = wip_captures.scope;
|
||||
|
||||
const body = sema.code.extra[extra_index..][0..body_len];
|
||||
extra_index += body_len;
|
||||
_ = try sema.analyzeBody(&case_block, body);
|
||||
|
||||
try wip_captures.finalize();
|
||||
|
||||
if (is_first) {
|
||||
is_first = false;
|
||||
first_else_body = cond_body;
|
||||
@@ -5409,9 +5482,16 @@ fn analyzeSwitch(
|
||||
|
||||
var final_else_body: []const Air.Inst.Index = &.{};
|
||||
if (special.body.len != 0) {
|
||||
var wip_captures = try WipCaptureScope.init(gpa, sema.perm_arena, child_block.wip_capture_scope);
|
||||
defer wip_captures.deinit();
|
||||
|
||||
case_block.instructions.shrinkRetainingCapacity(0);
|
||||
case_block.wip_capture_scope = wip_captures.scope;
|
||||
|
||||
_ = try sema.analyzeBody(&case_block, special.body);
|
||||
|
||||
try wip_captures.finalize();
|
||||
|
||||
if (is_first) {
|
||||
final_else_body = case_block.instructions.items;
|
||||
} else {
|
||||
@@ -5693,7 +5773,7 @@ fn zirImport(sema: *Sema, block: *Scope.Block, inst: Zir.Inst.Index) CompileErro
|
||||
try mod.semaFile(result.file);
|
||||
const file_root_decl = result.file.root_decl.?;
|
||||
try sema.mod.declareDeclDependency(sema.owner_decl, file_root_decl);
|
||||
return sema.addType(file_root_decl.ty);
|
||||
return sema.addConstant(file_root_decl.ty, file_root_decl.val);
|
||||
}
|
||||
|
||||
fn zirRetErrValueCode(sema: *Sema, block: *Scope.Block, inst: Zir.Inst.Index) CompileError!Air.Inst.Ref {
|
||||
@@ -6536,8 +6616,45 @@ fn zirThis(
|
||||
block: *Scope.Block,
|
||||
extended: Zir.Inst.Extended.InstData,
|
||||
) CompileError!Air.Inst.Ref {
|
||||
const this_decl = block.base.namespace().getDecl();
|
||||
const src: LazySrcLoc = .{ .node_offset = @bitCast(i32, extended.operand) };
|
||||
return sema.mod.fail(&block.base, src, "TODO: implement Sema.zirThis", .{});
|
||||
return sema.analyzeDeclVal(block, src, this_decl);
|
||||
}
|
||||
|
||||
fn zirClosureCapture(
|
||||
sema: *Sema,
|
||||
block: *Scope.Block,
|
||||
inst: Zir.Inst.Index,
|
||||
) CompileError!void {
|
||||
// TODO: Compile error when closed over values are modified
|
||||
const inst_data = sema.code.instructions.items(.data)[inst].un_tok;
|
||||
const tv = try sema.resolveInstConst(block, inst_data.src(), inst_data.operand);
|
||||
try block.wip_capture_scope.captures.putNoClobber(sema.gpa, inst, .{
|
||||
.ty = try tv.ty.copy(sema.perm_arena),
|
||||
.val = try tv.val.copy(sema.perm_arena),
|
||||
});
|
||||
}
|
||||
|
||||
fn zirClosureGet(
|
||||
sema: *Sema,
|
||||
block: *Scope.Block,
|
||||
inst: Zir.Inst.Index,
|
||||
) CompileError!Air.Inst.Ref {
|
||||
// TODO CLOSURE: Test this with inline functions
|
||||
const inst_data = sema.code.instructions.items(.data)[inst].inst_node;
|
||||
var scope: *CaptureScope = block.src_decl.src_scope.?;
|
||||
// Note: The target closure must be in this scope list.
|
||||
// If it's not here, the zir is invalid, or the list is broken.
|
||||
const tv = while (true) {
|
||||
// Note: We don't need to add a dependency here, because
|
||||
// decls always depend on their lexical parents.
|
||||
if (scope.captures.getPtr(inst_data.inst)) |tv| {
|
||||
break tv;
|
||||
}
|
||||
scope = scope.parent.?;
|
||||
} else unreachable;
|
||||
|
||||
return sema.addConstant(tv.ty, tv.val);
|
||||
}
|
||||
|
||||
fn zirRetAddr(
|
||||
@@ -8615,6 +8732,7 @@ fn addSafetyCheck(
|
||||
var fail_block: Scope.Block = .{
|
||||
.parent = parent_block,
|
||||
.sema = sema,
|
||||
.wip_capture_scope = parent_block.wip_capture_scope,
|
||||
.src_decl = parent_block.src_decl,
|
||||
.instructions = .{},
|
||||
.inlining = parent_block.inlining,
|
||||
@@ -8714,7 +8832,7 @@ fn safetyPanic(
|
||||
block: *Scope.Block,
|
||||
src: LazySrcLoc,
|
||||
panic_id: PanicId,
|
||||
) !Zir.Inst.Index {
|
||||
) CompileError!Zir.Inst.Index {
|
||||
const msg = switch (panic_id) {
|
||||
.unreach => "reached unreachable code",
|
||||
.unwrap_null => "attempt to use null value",
|
||||
@@ -10666,6 +10784,10 @@ pub fn resolveDeclFields(sema: *Sema, block: *Scope.Block, src: LazySrcLoc, ty:
|
||||
sema.namespace = &struct_obj.namespace;
|
||||
defer sema.namespace = prev_namespace;
|
||||
|
||||
const old_src = block.src_decl;
|
||||
defer block.src_decl = old_src;
|
||||
block.src_decl = struct_obj.owner_decl;
|
||||
|
||||
struct_obj.status = .field_types_wip;
|
||||
try sema.analyzeStructFields(block, struct_obj);
|
||||
struct_obj.status = .have_field_types;
|
||||
@@ -10684,6 +10806,10 @@ pub fn resolveDeclFields(sema: *Sema, block: *Scope.Block, src: LazySrcLoc, ty:
|
||||
sema.namespace = &union_obj.namespace;
|
||||
defer sema.namespace = prev_namespace;
|
||||
|
||||
const old_src = block.src_decl;
|
||||
defer block.src_decl = old_src;
|
||||
block.src_decl = union_obj.owner_decl;
|
||||
|
||||
union_obj.status = .field_types_wip;
|
||||
try sema.analyzeUnionFields(block, union_obj);
|
||||
union_obj.status = .have_field_types;
|
||||
@@ -10885,9 +11011,11 @@ fn analyzeUnionFields(
|
||||
const src: LazySrcLoc = .{ .node_offset = union_obj.node_offset };
|
||||
extra_index += @boolToInt(small.has_src_node);
|
||||
|
||||
if (small.has_tag_type) {
|
||||
const tag_type_ref: Zir.Inst.Ref = if (small.has_tag_type) blk: {
|
||||
const ty_ref = @intToEnum(Zir.Inst.Ref, zir.extra[extra_index]);
|
||||
extra_index += 1;
|
||||
}
|
||||
break :blk ty_ref;
|
||||
} else .none;
|
||||
|
||||
const body_len = if (small.has_body_len) blk: {
|
||||
const body_len = zir.extra[extra_index];
|
||||
@@ -10996,6 +11124,7 @@ fn analyzeUnionFields(
|
||||
}
|
||||
|
||||
// TODO resolve the union tag_type_ref
|
||||
_ = tag_type_ref;
|
||||
}
|
||||
|
||||
fn getBuiltin(
|
||||
|
||||
99
src/Zir.zig
99
src/Zir.zig
@@ -49,8 +49,6 @@ pub const Header = extern struct {
|
||||
};
|
||||
|
||||
pub const ExtraIndex = enum(u32) {
|
||||
/// Ref. The main struct decl for this file.
|
||||
main_struct,
|
||||
/// If this is 0, no compile errors. Otherwise there is a `CompileErrors`
|
||||
/// payload at this index.
|
||||
compile_errors,
|
||||
@@ -61,11 +59,6 @@ pub const ExtraIndex = enum(u32) {
|
||||
_,
|
||||
};
|
||||
|
||||
pub fn getMainStruct(zir: Zir) Inst.Index {
|
||||
return zir.extra[@enumToInt(ExtraIndex.main_struct)] -
|
||||
@intCast(u32, Inst.Ref.typed_value_map.len);
|
||||
}
|
||||
|
||||
/// Returns the requested data, as well as the new index which is at the start of the
|
||||
/// trailers for the object.
|
||||
pub fn extraData(code: Zir, comptime T: type, index: usize) struct { data: T, end: usize } {
|
||||
@@ -112,6 +105,10 @@ pub fn deinit(code: *Zir, gpa: *Allocator) void {
|
||||
code.* = undefined;
|
||||
}
|
||||
|
||||
/// ZIR is structured so that the outermost "main" struct of any file
|
||||
/// is always at index 0.
|
||||
pub const main_struct_inst: Inst.Index = 0;
|
||||
|
||||
/// These are untyped instructions generated from an Abstract Syntax Tree.
|
||||
/// The data here is immutable because it is possible to have multiple
|
||||
/// analyses on the same ZIR happening at the same time.
|
||||
@@ -267,11 +264,6 @@ pub const Inst = struct {
|
||||
/// only the taken branch is analyzed. The then block and else block must
|
||||
/// terminate with an "inline" variant of a noreturn instruction.
|
||||
condbr_inline,
|
||||
/// An opaque type definition. Provides an AST node only.
|
||||
/// Uses the `pl_node` union field. Payload is `OpaqueDecl`.
|
||||
opaque_decl,
|
||||
opaque_decl_anon,
|
||||
opaque_decl_func,
|
||||
/// An error set type definition. Contains a list of field names.
|
||||
/// Uses the `pl_node` union field. Payload is `ErrorSetDecl`.
|
||||
error_set_decl,
|
||||
@@ -941,6 +933,17 @@ pub const Inst = struct {
|
||||
@"await",
|
||||
await_nosuspend,
|
||||
|
||||
/// When a type or function refers to a comptime value from an outer
|
||||
/// scope, that forms a closure over comptime value. The outer scope
|
||||
/// will record a capture of that value, which encodes its current state
|
||||
/// and marks it to persist. Uses `un_tok` field. Operand is the
|
||||
/// instruction value to capture.
|
||||
closure_capture,
|
||||
/// The inner scope of a closure uses closure_get to retrieve the value
|
||||
/// stored by the outer scope. Uses `inst_node` field. Operand is the
|
||||
/// closure_capture instruction ref.
|
||||
closure_get,
|
||||
|
||||
/// The ZIR instruction tag is one of the `Extended` ones.
|
||||
/// Uses the `extended` union field.
|
||||
extended,
|
||||
@@ -996,9 +999,6 @@ pub const Inst = struct {
|
||||
.cmp_gt,
|
||||
.cmp_neq,
|
||||
.coerce_result_ptr,
|
||||
.opaque_decl,
|
||||
.opaque_decl_anon,
|
||||
.opaque_decl_func,
|
||||
.error_set_decl,
|
||||
.error_set_decl_anon,
|
||||
.error_set_decl_func,
|
||||
@@ -1191,6 +1191,8 @@ pub const Inst = struct {
|
||||
.await_nosuspend,
|
||||
.ret_err_value_code,
|
||||
.extended,
|
||||
.closure_get,
|
||||
.closure_capture,
|
||||
=> false,
|
||||
|
||||
.@"break",
|
||||
@@ -1258,9 +1260,6 @@ pub const Inst = struct {
|
||||
.coerce_result_ptr = .bin,
|
||||
.condbr = .pl_node,
|
||||
.condbr_inline = .pl_node,
|
||||
.opaque_decl = .pl_node,
|
||||
.opaque_decl_anon = .pl_node,
|
||||
.opaque_decl_func = .pl_node,
|
||||
.error_set_decl = .pl_node,
|
||||
.error_set_decl_anon = .pl_node,
|
||||
.error_set_decl_func = .pl_node,
|
||||
@@ -1478,6 +1477,9 @@ pub const Inst = struct {
|
||||
.@"await" = .un_node,
|
||||
.await_nosuspend = .un_node,
|
||||
|
||||
.closure_capture = .un_tok,
|
||||
.closure_get = .inst_node,
|
||||
|
||||
.extended = .extended,
|
||||
});
|
||||
};
|
||||
@@ -1510,6 +1512,10 @@ pub const Inst = struct {
|
||||
/// `operand` is payload index to `UnionDecl`.
|
||||
/// `small` is `UnionDecl.Small`.
|
||||
union_decl,
|
||||
/// An opaque type definition. Contains references to decls and captures.
|
||||
/// `operand` is payload index to `OpaqueDecl`.
|
||||
/// `small` is `OpaqueDecl.Small`.
|
||||
opaque_decl,
|
||||
/// Obtains a pointer to the return value.
|
||||
/// `operand` is `src_node: i32`.
|
||||
ret_ptr,
|
||||
@@ -2194,6 +2200,18 @@ pub const Inst = struct {
|
||||
line: u32,
|
||||
column: u32,
|
||||
},
|
||||
/// Used for unary operators which reference an inst,
|
||||
/// with an AST node source location.
|
||||
inst_node: struct {
|
||||
/// Offset from Decl AST node index.
|
||||
src_node: i32,
|
||||
/// The meaning of this operand depends on the corresponding `Tag`.
|
||||
inst: Index,
|
||||
|
||||
pub fn src(self: @This()) LazySrcLoc {
|
||||
return .{ .node_offset = self.src_node };
|
||||
}
|
||||
},
|
||||
|
||||
// Make sure we don't accidentally add a field to make this union
|
||||
// bigger than expected. Note that in Debug builds, Zig is allowed
|
||||
@@ -2231,6 +2249,7 @@ pub const Inst = struct {
|
||||
@"break",
|
||||
switch_capture,
|
||||
dbg_stmt,
|
||||
inst_node,
|
||||
};
|
||||
};
|
||||
|
||||
@@ -2662,13 +2681,15 @@ pub const Inst = struct {
|
||||
};
|
||||
|
||||
/// Trailing:
|
||||
/// 0. decl_bits: u32 // for every 8 decls
|
||||
/// 0. src_node: i32, // if has_src_node
|
||||
/// 1. decls_len: u32, // if has_decls_len
|
||||
/// 2. decl_bits: u32 // for every 8 decls
|
||||
/// - sets of 4 bits:
|
||||
/// 0b000X: whether corresponding decl is pub
|
||||
/// 0b00X0: whether corresponding decl is exported
|
||||
/// 0b0X00: whether corresponding decl has an align expression
|
||||
/// 0bX000: whether corresponding decl has a linksection or an address space expression
|
||||
/// 1. decl: { // for every decls_len
|
||||
/// 3. decl: { // for every decls_len
|
||||
/// src_hash: [4]u32, // hash of source bytes
|
||||
/// line: u32, // line number of decl, relative to parent
|
||||
/// name: u32, // null terminated string index
|
||||
@@ -2685,7 +2706,12 @@ pub const Inst = struct {
|
||||
/// }
|
||||
/// }
|
||||
pub const OpaqueDecl = struct {
|
||||
decls_len: u32,
|
||||
pub const Small = packed struct {
|
||||
has_src_node: bool,
|
||||
has_decls_len: bool,
|
||||
name_strategy: NameStrategy,
|
||||
_: u12 = undefined,
|
||||
};
|
||||
};
|
||||
|
||||
/// Trailing: field_name: u32 // for every field: null terminated string index
|
||||
@@ -2937,15 +2963,6 @@ pub fn declIterator(zir: Zir, decl_inst: u32) DeclIterator {
|
||||
const tags = zir.instructions.items(.tag);
|
||||
const datas = zir.instructions.items(.data);
|
||||
switch (tags[decl_inst]) {
|
||||
.opaque_decl,
|
||||
.opaque_decl_anon,
|
||||
.opaque_decl_func,
|
||||
=> {
|
||||
const inst_data = datas[decl_inst].pl_node;
|
||||
const extra = zir.extraData(Inst.OpaqueDecl, inst_data.payload_index);
|
||||
return declIteratorInner(zir, extra.end, extra.data.decls_len);
|
||||
},
|
||||
|
||||
// Functions are allowed and yield no iterations.
|
||||
// There is one case matching this in the extended instruction set below.
|
||||
.func,
|
||||
@@ -3000,6 +3017,18 @@ pub fn declIterator(zir: Zir, decl_inst: u32) DeclIterator {
|
||||
|
||||
return declIteratorInner(zir, extra_index, decls_len);
|
||||
},
|
||||
.opaque_decl => {
|
||||
const small = @bitCast(Inst.OpaqueDecl.Small, extended.small);
|
||||
var extra_index: usize = extended.operand;
|
||||
extra_index += @boolToInt(small.has_src_node);
|
||||
const decls_len = if (small.has_decls_len) decls_len: {
|
||||
const decls_len = zir.extra[extra_index];
|
||||
extra_index += 1;
|
||||
break :decls_len decls_len;
|
||||
} else 0;
|
||||
|
||||
return declIteratorInner(zir, extra_index, decls_len);
|
||||
},
|
||||
else => unreachable,
|
||||
}
|
||||
},
|
||||
@@ -3037,13 +3066,6 @@ fn findDeclsInner(
|
||||
const datas = zir.instructions.items(.data);
|
||||
|
||||
switch (tags[inst]) {
|
||||
// Decl instructions are interesting but have no body.
|
||||
// TODO yes they do have a body actually. recurse over them just like block instructions.
|
||||
.opaque_decl,
|
||||
.opaque_decl_anon,
|
||||
.opaque_decl_func,
|
||||
=> return list.append(inst),
|
||||
|
||||
// Functions instructions are interesting and have a body.
|
||||
.func,
|
||||
.func_inferred,
|
||||
@@ -3071,9 +3093,12 @@ fn findDeclsInner(
|
||||
return zir.findDeclsBody(list, body);
|
||||
},
|
||||
|
||||
// Decl instructions are interesting but have no body.
|
||||
// TODO yes they do have a body actually. recurse over them just like block instructions.
|
||||
.struct_decl,
|
||||
.union_decl,
|
||||
.enum_decl,
|
||||
.opaque_decl,
|
||||
=> return list.append(inst),
|
||||
|
||||
else => return,
|
||||
|
||||
@@ -26,7 +26,7 @@ pub fn renderAsTextToFile(
|
||||
.parent_decl_node = 0,
|
||||
};
|
||||
|
||||
const main_struct_inst = scope_file.zir.getMainStruct();
|
||||
const main_struct_inst = Zir.main_struct_inst;
|
||||
try fs_file.writer().print("%{d} ", .{main_struct_inst});
|
||||
try writer.writeInstToStream(fs_file.writer(), main_struct_inst);
|
||||
try fs_file.writeAll("\n");
|
||||
@@ -171,6 +171,7 @@ const Writer = struct {
|
||||
.ref,
|
||||
.ret_coerce,
|
||||
.ensure_err_payload_void,
|
||||
.closure_capture,
|
||||
=> try self.writeUnTok(stream, inst),
|
||||
|
||||
.bool_br_and,
|
||||
@@ -307,10 +308,6 @@ const Writer = struct {
|
||||
.condbr_inline,
|
||||
=> try self.writePlNodeCondBr(stream, inst),
|
||||
|
||||
.opaque_decl => try self.writeOpaqueDecl(stream, inst, .parent),
|
||||
.opaque_decl_anon => try self.writeOpaqueDecl(stream, inst, .anon),
|
||||
.opaque_decl_func => try self.writeOpaqueDecl(stream, inst, .func),
|
||||
|
||||
.error_set_decl => try self.writeErrorSetDecl(stream, inst, .parent),
|
||||
.error_set_decl_anon => try self.writeErrorSetDecl(stream, inst, .anon),
|
||||
.error_set_decl_func => try self.writeErrorSetDecl(stream, inst, .func),
|
||||
@@ -371,6 +368,8 @@ const Writer = struct {
|
||||
|
||||
.dbg_stmt => try self.writeDbgStmt(stream, inst),
|
||||
|
||||
.closure_get => try self.writeInstNode(stream, inst),
|
||||
|
||||
.extended => try self.writeExtended(stream, inst),
|
||||
}
|
||||
}
|
||||
@@ -412,6 +411,7 @@ const Writer = struct {
|
||||
.struct_decl => try self.writeStructDecl(stream, extended),
|
||||
.union_decl => try self.writeUnionDecl(stream, extended),
|
||||
.enum_decl => try self.writeEnumDecl(stream, extended),
|
||||
.opaque_decl => try self.writeOpaqueDecl(stream, extended),
|
||||
|
||||
.c_undef, .c_include => {
|
||||
const inst_data = self.code.extraData(Zir.Inst.UnNode, extended.operand).data;
|
||||
@@ -745,6 +745,17 @@ const Writer = struct {
|
||||
try self.writeSrc(stream, src);
|
||||
}
|
||||
|
||||
fn writeInstNode(
|
||||
self: *Writer,
|
||||
stream: anytype,
|
||||
inst: Zir.Inst.Index,
|
||||
) (@TypeOf(stream).Error || error{OutOfMemory})!void {
|
||||
const inst_data = self.code.instructions.items(.data)[inst].inst_node;
|
||||
try self.writeInstIndex(stream, inst_data.inst);
|
||||
try stream.writeAll(") ");
|
||||
try self.writeSrc(stream, inst_data.src());
|
||||
}
|
||||
|
||||
fn writeAsm(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void {
|
||||
const extra = self.code.extraData(Zir.Inst.Asm, extended.operand);
|
||||
const src: LazySrcLoc = .{ .node_offset = extra.data.src_node };
|
||||
@@ -1365,26 +1376,36 @@ const Writer = struct {
|
||||
fn writeOpaqueDecl(
|
||||
self: *Writer,
|
||||
stream: anytype,
|
||||
inst: Zir.Inst.Index,
|
||||
name_strategy: Zir.Inst.NameStrategy,
|
||||
extended: Zir.Inst.Extended.InstData,
|
||||
) !void {
|
||||
const inst_data = self.code.instructions.items(.data)[inst].pl_node;
|
||||
const extra = self.code.extraData(Zir.Inst.OpaqueDecl, inst_data.payload_index);
|
||||
const decls_len = extra.data.decls_len;
|
||||
const small = @bitCast(Zir.Inst.OpaqueDecl.Small, extended.small);
|
||||
var extra_index: usize = extended.operand;
|
||||
|
||||
try stream.print("{s}, ", .{@tagName(name_strategy)});
|
||||
const src_node: ?i32 = if (small.has_src_node) blk: {
|
||||
const src_node = @bitCast(i32, self.code.extra[extra_index]);
|
||||
extra_index += 1;
|
||||
break :blk src_node;
|
||||
} else null;
|
||||
|
||||
const decls_len = if (small.has_decls_len) blk: {
|
||||
const decls_len = self.code.extra[extra_index];
|
||||
extra_index += 1;
|
||||
break :blk decls_len;
|
||||
} else 0;
|
||||
|
||||
try stream.print("{s}, ", .{@tagName(small.name_strategy)});
|
||||
|
||||
if (decls_len == 0) {
|
||||
try stream.writeAll("}) ");
|
||||
try stream.writeAll("{})");
|
||||
} else {
|
||||
try stream.writeAll("\n");
|
||||
try stream.writeAll("{\n");
|
||||
self.indent += 2;
|
||||
_ = try self.writeDecls(stream, decls_len, extra.end);
|
||||
_ = try self.writeDecls(stream, decls_len, extra_index);
|
||||
self.indent -= 2;
|
||||
try stream.writeByteNTimes(' ', self.indent);
|
||||
try stream.writeAll("}) ");
|
||||
try stream.writeAll("})");
|
||||
}
|
||||
try self.writeSrc(stream, inst_data.src());
|
||||
try self.writeSrcNode(stream, src_node);
|
||||
}
|
||||
|
||||
fn writeErrorSetDecl(
|
||||
|
||||
@@ -1,20 +1,22 @@
|
||||
const builtin = @import("builtin");
|
||||
|
||||
test {
|
||||
_ = @import("behavior/bool.zig");
|
||||
_ = @import("behavior/basic.zig");
|
||||
_ = @import("behavior/generics.zig");
|
||||
_ = @import("behavior/eval.zig");
|
||||
_ = @import("behavior/pointers.zig");
|
||||
_ = @import("behavior/if.zig");
|
||||
_ = @import("behavior/cast.zig");
|
||||
// Tests that pass for both.
|
||||
_ = @import("behavior/array.zig");
|
||||
_ = @import("behavior/usingnamespace.zig");
|
||||
_ = @import("behavior/atomics.zig");
|
||||
_ = @import("behavior/basic.zig");
|
||||
_ = @import("behavior/bool.zig");
|
||||
_ = @import("behavior/cast.zig");
|
||||
_ = @import("behavior/eval.zig");
|
||||
_ = @import("behavior/generics.zig");
|
||||
_ = @import("behavior/if.zig");
|
||||
_ = @import("behavior/pointers.zig");
|
||||
_ = @import("behavior/sizeof_and_typeof.zig");
|
||||
_ = @import("behavior/translate_c_macros.zig");
|
||||
_ = @import("behavior/struct.zig");
|
||||
_ = @import("behavior/this.zig");
|
||||
_ = @import("behavior/translate_c_macros.zig");
|
||||
_ = @import("behavior/union.zig");
|
||||
_ = @import("behavior/usingnamespace.zig");
|
||||
_ = @import("behavior/widening.zig");
|
||||
|
||||
if (builtin.zig_is_stage2) {
|
||||
@@ -142,7 +144,6 @@ test {
|
||||
_ = @import("behavior/switch.zig");
|
||||
_ = @import("behavior/switch_prong_err_enum.zig");
|
||||
_ = @import("behavior/switch_prong_implicit_cast.zig");
|
||||
_ = @import("behavior/this.zig");
|
||||
_ = @import("behavior/truncate.zig");
|
||||
_ = @import("behavior/try.zig");
|
||||
_ = @import("behavior/tuple.zig");
|
||||
|
||||
@@ -24,11 +24,10 @@ test "this refer to module call private fn" {
|
||||
}
|
||||
|
||||
test "this refer to container" {
|
||||
var pt = Point(i32){
|
||||
.x = 12,
|
||||
.y = 34,
|
||||
};
|
||||
pt.addOne();
|
||||
var pt: Point(i32) = undefined;
|
||||
pt.x = 12;
|
||||
pt.y = 34;
|
||||
Point(i32).addOne(&pt);
|
||||
try expect(pt.x == 13);
|
||||
try expect(pt.y == 35);
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user