Merge pull request #6660 from Vexu/stage2

Stage2 switch and package imports
This commit is contained in:
Veikka Tuominen
2020-10-31 09:39:28 +02:00
committed by GitHub
14 changed files with 1379 additions and 30 deletions

View File

@@ -660,6 +660,7 @@ pub fn create(gpa: *Allocator, options: InitOptions) !*Compilation {
.source = .{ .unloaded = {} },
.contents = .{ .not_available = {} },
.status = .never_loaded,
.pkg = root_pkg,
.root_container = .{
.file_scope = root_scope,
.decls = .{},

View File

@@ -469,6 +469,22 @@ pub const Scope = struct {
}
}
pub fn getOwnerPkg(base: *Scope) *Package {
var cur = base;
while (true) {
cur = switch (cur.tag) {
.container => return @fieldParentPtr(Container, "base", cur).file_scope.pkg,
.file => return @fieldParentPtr(File, "base", cur).pkg,
.zir_module => unreachable, // TODO are zir modules allowed to import packages?
.gen_zir => @fieldParentPtr(GenZIR, "base", cur).parent,
.local_val => @fieldParentPtr(LocalVal, "base", cur).parent,
.local_ptr => @fieldParentPtr(LocalPtr, "base", cur).parent,
.block => @fieldParentPtr(Block, "base", cur).decl.scope,
.decl => @fieldParentPtr(DeclAnalysis, "base", cur).decl.scope,
};
}
}
/// Asserts the scope is a namespace Scope and removes the Decl from the namespace.
pub fn removeDecl(base: *Scope, child: *Decl) void {
switch (base.tag) {
@@ -576,6 +592,8 @@ pub const Scope = struct {
unloaded_parse_failure,
loaded_success,
},
/// Package that this file is a part of, managed externally.
pkg: *Package,
root_container: Container,
@@ -614,7 +632,7 @@ pub const Scope = struct {
pub fn getSource(self: *File, module: *Module) ![:0]const u8 {
switch (self.source) {
.unloaded => {
const source = try module.root_pkg.root_src_directory.handle.readFileAllocOptions(
const source = try self.pkg.root_src_directory.handle.readFileAllocOptions(
module.gpa,
self.sub_file_path,
std.math.maxInt(u32),
@@ -1036,6 +1054,10 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
.param_types = param_types,
}, .{});
if (self.comp.verbose_ir) {
zir.dumpZir(self.gpa, "fn_type", decl.name, fn_type_scope.instructions.items) catch {};
}
// We need the memory for the Type to go into the arena for the Decl
var decl_arena = std.heap.ArenaAllocator.init(self.gpa);
errdefer decl_arena.deinit();
@@ -1109,6 +1131,10 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
_ = try astgen.addZIRNoOp(self, &gen_scope.base, src, .returnvoid);
}
if (self.comp.verbose_ir) {
zir.dumpZir(self.gpa, "fn_body", decl.name, gen_scope.instructions.items) catch {};
}
const fn_zir = try gen_scope_arena.allocator.create(Fn.ZIR);
fn_zir.* = .{
.body = .{
@@ -1240,6 +1266,9 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
const src = tree.token_locs[init_node.firstToken()].start;
const init_inst = try astgen.expr(self, &gen_scope.base, init_result_loc, init_node);
if (self.comp.verbose_ir) {
zir.dumpZir(self.gpa, "var_init", decl.name, gen_scope.instructions.items) catch {};
}
var inner_block: Scope.Block = .{
.parent = null,
@@ -1281,6 +1310,10 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
.val = Value.initTag(.type_type),
});
const var_type = try astgen.expr(self, &type_scope.base, .{ .ty = type_type }, type_node);
if (self.comp.verbose_ir) {
zir.dumpZir(self.gpa, "var_type", decl.name, type_scope.instructions.items) catch {};
}
const ty = try zir_sema.analyzeBodyValueAsType(self, &block_scope, var_type, .{
.instructions = type_scope.instructions.items,
});
@@ -1354,6 +1387,9 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
defer gen_scope.instructions.deinit(self.gpa);
_ = try astgen.comptimeExpr(self, &gen_scope.base, .none, comptime_decl.expr);
if (self.comp.verbose_ir) {
zir.dumpZir(self.gpa, "comptime_block", decl.name, gen_scope.instructions.items) catch {};
}
var block_scope: Scope.Block = .{
.parent = null,
@@ -2080,6 +2116,29 @@ pub fn addCall(
return &inst.base;
}
pub fn addSwitchBr(
self: *Module,
block: *Scope.Block,
src: usize,
target_ptr: *Inst,
cases: []Inst.SwitchBr.Case,
else_body: ir.Body,
) !*Inst {
const inst = try block.arena.create(Inst.SwitchBr);
inst.* = .{
.base = .{
.tag = .switchbr,
.ty = Type.initTag(.noreturn),
.src = src,
},
.target_ptr = target_ptr,
.cases = cases,
.else_body = else_body,
};
try block.instructions.append(self.gpa, &inst.base);
return &inst.base;
}
pub fn constInst(self: *Module, scope: *Scope, src: usize, typed_value: TypedValue) !*Inst {
const const_inst = try scope.arena().create(Inst.Constant);
const_inst.* = .{
@@ -2400,28 +2459,43 @@ pub fn analyzeSlice(self: *Module, scope: *Scope, src: usize, array_ptr: *Inst,
}
pub fn analyzeImport(self: *Module, scope: *Scope, src: usize, target_string: []const u8) !*Scope.File {
// TODO if (package_table.get(target_string)) |pkg|
if (self.import_table.get(target_string)) |some| {
const cur_pkg = scope.getOwnerPkg();
const cur_pkg_dir_path = cur_pkg.root_src_directory.path orelse ".";
const found_pkg = cur_pkg.table.get(target_string);
const resolved_path = if (found_pkg) |pkg|
try std.fs.path.resolve(self.gpa, &[_][]const u8{ pkg.root_src_directory.path orelse ".", pkg.root_src_path })
else
try std.fs.path.resolve(self.gpa, &[_][]const u8{ cur_pkg_dir_path, target_string });
errdefer self.gpa.free(resolved_path);
if (self.import_table.get(resolved_path)) |some| {
self.gpa.free(resolved_path);
return some;
}
// TODO check for imports outside of pkg path
if (false) return error.ImportOutsidePkgPath;
if (found_pkg == null) {
const resolved_root_path = try std.fs.path.resolve(self.gpa, &[_][]const u8{cur_pkg_dir_path});
defer self.gpa.free(resolved_root_path);
if (!mem.startsWith(u8, resolved_path, resolved_root_path)) {
return error.ImportOutsidePkgPath;
}
}
// TODO Scope.Container arena for ty and sub_file_path
const struct_payload = try self.gpa.create(Type.Payload.EmptyStruct);
errdefer self.gpa.destroy(struct_payload);
const file_scope = try self.gpa.create(Scope.File);
errdefer self.gpa.destroy(file_scope);
const file_path = try self.gpa.dupe(u8, target_string);
errdefer self.gpa.free(file_path);
struct_payload.* = .{ .scope = &file_scope.root_container };
file_scope.* = .{
.sub_file_path = file_path,
.sub_file_path = resolved_path,
.source = .{ .unloaded = {} },
.contents = .{ .not_available = {} },
.status = .never_loaded,
.pkg = found_pkg orelse cur_pkg,
.root_container = .{
.file_scope = file_scope,
.decls = .{},

76
src/RangeSet.zig Normal file
View File

@@ -0,0 +1,76 @@
const std = @import("std");
const Order = std.math.Order;
const Value = @import("value.zig").Value;
const RangeSet = @This();
ranges: std.ArrayList(Range),
pub const Range = struct {
start: Value,
end: Value,
src: usize,
};
pub fn init(allocator: *std.mem.Allocator) RangeSet {
return .{
.ranges = std.ArrayList(Range).init(allocator),
};
}
pub fn deinit(self: *RangeSet) void {
self.ranges.deinit();
}
pub fn add(self: *RangeSet, start: Value, end: Value, src: usize) !?usize {
for (self.ranges.items) |range| {
if ((start.compare(.gte, range.start) and start.compare(.lte, range.end)) or
(end.compare(.gte, range.start) and end.compare(.lte, range.end)))
{
// ranges overlap
return range.src;
}
}
try self.ranges.append(.{
.start = start,
.end = end,
.src = src,
});
return null;
}
/// Assumes a and b do not overlap
fn lessThan(_: void, a: Range, b: Range) bool {
return a.start.compare(.lt, b.start);
}
pub fn spans(self: *RangeSet, start: Value, end: Value) !bool {
std.sort.sort(Range, self.ranges.items, {}, lessThan);
if (!self.ranges.items[0].start.eql(start) or
!self.ranges.items[self.ranges.items.len - 1].end.eql(end))
{
return false;
}
var space: Value.BigIntSpace = undefined;
var counter = try std.math.big.int.Managed.init(self.ranges.allocator);
defer counter.deinit();
// look for gaps
for (self.ranges.items[1..]) |cur, i| {
// i starts counting from the second item.
const prev = self.ranges.items[i];
// prev.end + 1 == cur.start
try counter.copy(prev.end.toBigInt(&space));
try counter.addScalar(counter.toConst(), 1);
const cur_start_int = cur.start.toBigInt(&space);
if (!cur_start_int.eq(counter.toConst())) {
return false;
}
}
return true;
}

View File

@@ -183,6 +183,7 @@ pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node) InnerEr
.VarDecl => unreachable, // Handled in `blockExpr`.
.SwitchCase => unreachable, // Handled in `switchExpr`.
.SwitchElse => unreachable, // Handled in `switchExpr`.
.Range => unreachable, // Handled in `switchExpr`.
.Else => unreachable, // Handled explicitly the control flow expression functions.
.Payload => unreachable, // Handled explicitly.
.PointerPayload => unreachable, // Handled explicitly.
@@ -279,9 +280,9 @@ pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node) InnerEr
.Catch => return catchExpr(mod, scope, rl, node.castTag(.Catch).?),
.Comptime => return comptimeKeyword(mod, scope, rl, node.castTag(.Comptime).?),
.OrElse => return orelseExpr(mod, scope, rl, node.castTag(.OrElse).?),
.Switch => return switchExpr(mod, scope, rl, node.castTag(.Switch).?),
.Defer => return mod.failNode(scope, node, "TODO implement astgen.expr for .Defer", .{}),
.Range => return mod.failNode(scope, node, "TODO implement astgen.expr for .Range", .{}),
.Await => return mod.failNode(scope, node, "TODO implement astgen.expr for .Await", .{}),
.Resume => return mod.failNode(scope, node, "TODO implement astgen.expr for .Resume", .{}),
.Try => return mod.failNode(scope, node, "TODO implement astgen.expr for .Try", .{}),
@@ -289,7 +290,6 @@ pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node) InnerEr
.ArrayInitializerDot => return mod.failNode(scope, node, "TODO implement astgen.expr for .ArrayInitializerDot", .{}),
.StructInitializer => return mod.failNode(scope, node, "TODO implement astgen.expr for .StructInitializer", .{}),
.StructInitializerDot => return mod.failNode(scope, node, "TODO implement astgen.expr for .StructInitializerDot", .{}),
.Switch => return mod.failNode(scope, node, "TODO implement astgen.expr for .Switch", .{}),
.Suspend => return mod.failNode(scope, node, "TODO implement astgen.expr for .Suspend", .{}),
.Continue => return mod.failNode(scope, node, "TODO implement astgen.expr for .Continue", .{}),
.AnyType => return mod.failNode(scope, node, "TODO implement astgen.expr for .AnyType", .{}),
@@ -1561,6 +1561,245 @@ fn forExpr(mod: *Module, scope: *Scope, rl: ResultLoc, for_node: *ast.Node.For)
return &for_block.base;
}
fn getRangeNode(node: *ast.Node) ?*ast.Node.SimpleInfixOp {
var cur = node;
while (true) {
switch (cur.tag) {
.Range => return @fieldParentPtr(ast.Node.SimpleInfixOp, "base", cur),
.GroupedExpression => cur = @fieldParentPtr(ast.Node.GroupedExpression, "base", cur).expr,
else => return null,
}
}
}
fn switchExpr(mod: *Module, scope: *Scope, rl: ResultLoc, switch_node: *ast.Node.Switch) InnerError!*zir.Inst {
var block_scope: Scope.GenZIR = .{
.parent = scope,
.decl = scope.decl().?,
.arena = scope.arena(),
.instructions = .{},
};
defer block_scope.instructions.deinit(mod.gpa);
const tree = scope.tree();
const switch_src = tree.token_locs[switch_node.switch_token].start;
const target_ptr = try expr(mod, &block_scope.base, .ref, switch_node.expr);
const target = try addZIRUnOp(mod, &block_scope.base, target_ptr.src, .deref, target_ptr);
// Add the switch instruction here so that it comes before any range checks.
const switch_inst = (try addZIRInst(mod, &block_scope.base, switch_src, zir.Inst.SwitchBr, .{
.target_ptr = target_ptr,
.cases = undefined, // populated below
.items = &[_]*zir.Inst{}, // populated below
.else_body = undefined, // populated below
}, .{})).castTag(.switchbr).?;
var items = std.ArrayList(*zir.Inst).init(mod.gpa);
defer items.deinit();
var cases = std.ArrayList(zir.Inst.SwitchBr.Case).init(mod.gpa);
defer cases.deinit();
// Add comptime block containing all prong items first,
const item_block = try addZIRInstBlock(mod, scope, switch_src, .block_comptime_flat, .{
.instructions = undefined, // populated below
});
// then add block containing the switch.
const block = try addZIRInstBlock(mod, scope, switch_src, .block, .{
.instructions = try block_scope.arena.dupe(*zir.Inst, block_scope.instructions.items),
});
// Most result location types can be forwarded directly; however
// if we need to write to a pointer which has an inferred type,
// proper type inference requires peer type resolution on the switch case.
const case_rl: ResultLoc = switch (rl) {
.discard, .none, .ty, .ptr, .ref => rl,
.inferred_ptr, .bitcasted_ptr, .block_ptr => .{ .block_ptr = block },
};
var item_scope: Scope.GenZIR = .{
.parent = scope,
.decl = scope.decl().?,
.arena = scope.arena(),
.instructions = .{},
};
defer item_scope.instructions.deinit(mod.gpa);
var case_scope: Scope.GenZIR = .{
.parent = scope,
.decl = block_scope.decl,
.arena = block_scope.arena,
.instructions = .{},
};
defer case_scope.instructions.deinit(mod.gpa);
var else_scope: Scope.GenZIR = .{
.parent = scope,
.decl = block_scope.decl,
.arena = block_scope.arena,
.instructions = .{},
};
defer else_scope.instructions.deinit(mod.gpa);
// first we gather all the switch items and check else/'_' prongs
var else_src: ?usize = null;
var underscore_src: ?usize = null;
var first_range: ?*zir.Inst = null;
var special_case: ?*ast.Node.SwitchCase = null;
for (switch_node.cases()) |uncasted_case| {
const case = uncasted_case.castTag(.SwitchCase).?;
const case_src = tree.token_locs[case.firstToken()].start;
// reset without freeing to reduce allocations.
case_scope.instructions.items.len = 0;
assert(case.items_len != 0);
// Check for else/_ prong, those are handled last.
if (case.items_len == 1 and case.items()[0].tag == .SwitchElse) {
if (else_src) |src| {
return mod.fail(scope, case_src, "multiple else prongs in switch expression", .{});
// TODO notes "previous else prong is here"
}
else_src = case_src;
special_case = case;
continue;
} else if (case.items_len == 1 and case.items()[0].tag == .Identifier and
mem.eql(u8, tree.tokenSlice(case.items()[0].firstToken()), "_"))
{
if (underscore_src) |src| {
return mod.fail(scope, case_src, "multiple '_' prongs in switch expression", .{});
// TODO notes "previous '_' prong is here"
}
underscore_src = case_src;
special_case = case;
continue;
}
if (else_src) |some_else| {
if (underscore_src) |some_underscore| {
return mod.fail(scope, switch_src, "else and '_' prong in switch expression", .{});
// TODO notes "else prong is here"
// TODO notes "'_' prong is here"
}
}
// If this is a simple one item prong then it is handled by the switchbr.
if (case.items_len == 1 and getRangeNode(case.items()[0]) == null) {
const item = try expr(mod, &item_scope.base, .none, case.items()[0]);
try items.append(item);
try switchCaseExpr(mod, &case_scope.base, case_rl, block, case);
try cases.append(.{
.item = item,
.body = .{ .instructions = try scope.arena().dupe(*zir.Inst, case_scope.instructions.items) },
});
continue;
}
// TODO if the case has few items and no ranges it might be better
// to just handle them as switch prongs.
// Check if the target matches any of the items.
// 1, 2, 3..6 will result in
// target == 1 or target == 2 or (target >= 3 and target <= 6)
var any_ok: ?*zir.Inst = null;
for (case.items()) |item| {
if (getRangeNode(item)) |range| {
const start = try expr(mod, &item_scope.base, .none, range.lhs);
const end = try expr(mod, &item_scope.base, .none, range.rhs);
const range_src = tree.token_locs[range.op_token].start;
const range_inst = try addZIRBinOp(mod, &item_scope.base, range_src, .switch_range, start, end);
try items.append(range_inst);
if (first_range == null) first_range = range_inst;
// target >= start and target <= end
const range_start_ok = try addZIRBinOp(mod, &else_scope.base, range_src, .cmp_gte, target, start);
const range_end_ok = try addZIRBinOp(mod, &else_scope.base, range_src, .cmp_lte, target, end);
const range_ok = try addZIRBinOp(mod, &else_scope.base, range_src, .booland, range_start_ok, range_end_ok);
if (any_ok) |some| {
any_ok = try addZIRBinOp(mod, &else_scope.base, range_src, .boolor, some, range_ok);
} else {
any_ok = range_ok;
}
continue;
}
const item_inst = try expr(mod, &item_scope.base, .none, item);
try items.append(item_inst);
const cpm_ok = try addZIRBinOp(mod, &else_scope.base, item_inst.src, .cmp_eq, target, item_inst);
if (any_ok) |some| {
any_ok = try addZIRBinOp(mod, &else_scope.base, item_inst.src, .boolor, some, cpm_ok);
} else {
any_ok = cpm_ok;
}
}
const condbr = try addZIRInstSpecial(mod, &case_scope.base, case_src, zir.Inst.CondBr, .{
.condition = any_ok.?,
.then_body = undefined, // populated below
.else_body = undefined, // populated below
}, .{});
const cond_block = try addZIRInstBlock(mod, &else_scope.base, case_src, .block, .{
.instructions = try scope.arena().dupe(*zir.Inst, case_scope.instructions.items),
});
// reset cond_scope for then_body
case_scope.instructions.items.len = 0;
try switchCaseExpr(mod, &case_scope.base, case_rl, block, case);
condbr.positionals.then_body = .{
.instructions = try scope.arena().dupe(*zir.Inst, case_scope.instructions.items),
};
// reset cond_scope for else_body
case_scope.instructions.items.len = 0;
_ = try addZIRInst(mod, &case_scope.base, case_src, zir.Inst.BreakVoid, .{
.block = cond_block,
}, .{});
condbr.positionals.else_body = .{
.instructions = try scope.arena().dupe(*zir.Inst, case_scope.instructions.items),
};
}
// Generate else block or a break last to finish the block.
if (special_case) |case| {
try switchCaseExpr(mod, &else_scope.base, case_rl, block, case);
} else {
// Not handling all possible cases is a compile error.
_ = try addZIRNoOp(mod, &else_scope.base, switch_src, .unreach_nocheck);
}
// All items have been generated, add the instructions to the comptime block.
item_block.positionals.body = .{
.instructions = try block_scope.arena.dupe(*zir.Inst, item_scope.instructions.items),
};
// Actually populate switch instruction values.
if (else_src != null) switch_inst.kw_args.special_prong = .@"else";
if (underscore_src != null) switch_inst.kw_args.special_prong = .underscore;
switch_inst.positionals.cases = try block_scope.arena.dupe(zir.Inst.SwitchBr.Case, cases.items);
switch_inst.positionals.items = try block_scope.arena.dupe(*zir.Inst, items.items);
switch_inst.kw_args.range = first_range;
switch_inst.positionals.else_body = .{
.instructions = try block_scope.arena.dupe(*zir.Inst, else_scope.instructions.items),
};
return &block.base;
}
fn switchCaseExpr(mod: *Module, scope: *Scope, rl: ResultLoc, block: *zir.Inst.Block, case: *ast.Node.SwitchCase) !void {
const tree = scope.tree();
const case_src = tree.token_locs[case.firstToken()].start;
if (case.payload != null) {
return mod.fail(scope, case_src, "TODO switch case payload capture", .{});
}
const case_body = try expr(mod, scope, rl, case.expr);
if (!case_body.tag.isNoReturn()) {
_ = try addZIRInst(mod, scope, case_src, zir.Inst.Break, .{
.block = block,
.operand = case_body,
}, .{});
}
}
fn ret(mod: *Module, scope: *Scope, cfe: *ast.Node.ControlFlowExpression) InnerError!*zir.Inst {
const tree = scope.tree();
const src = tree.token_locs[cfe.ltoken].start;

View File

@@ -758,6 +758,8 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
.br => return self.genBr(inst.castTag(.br).?),
.breakpoint => return self.genBreakpoint(inst.src),
.brvoid => return self.genBrVoid(inst.castTag(.brvoid).?),
.booland => return self.genBoolOp(inst.castTag(.booland).?),
.boolor => return self.genBoolOp(inst.castTag(.boolor).?),
.call => return self.genCall(inst.castTag(.call).?),
.cmp_lt => return self.genCmp(inst.castTag(.cmp_lt).?, .lt),
.cmp_lte => return self.genCmp(inst.castTag(.cmp_lte).?, .lte),
@@ -782,6 +784,7 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
.retvoid => return self.genRetVoid(inst.castTag(.retvoid).?),
.store => return self.genStore(inst.castTag(.store).?),
.sub => return self.genSub(inst.castTag(.sub).?),
.switchbr => return self.genSwitch(inst.castTag(.switchbr).?),
.unreach => return MCValue{ .unreach = {} },
.unwrap_optional => return self.genUnwrapOptional(inst.castTag(.unwrap_optional).?),
.wrap_optional => return self.genWrapOptional(inst.castTag(.wrap_optional).?),
@@ -1989,6 +1992,12 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
return @bitCast(MCValue, inst.codegen.mcv);
}
fn genSwitch(self: *Self, inst: *ir.Inst.SwitchBr) !MCValue {
switch (arch) {
else => return self.fail(inst.base.src, "TODO genSwitch for {}", .{self.target.cpu.arch}),
}
}
fn performReloc(self: *Self, src: usize, reloc: Reloc) !void {
switch (reloc) {
.rel32 => |pos| {
@@ -2023,6 +2032,21 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
return self.brVoid(inst.base.src, inst.block);
}
fn genBoolOp(self: *Self, inst: *ir.Inst.BinOp) !MCValue {
if (inst.base.isUnused())
return MCValue.dead;
switch (arch) {
.x86_64 => if (inst.base.tag == .booland) {
// lhs AND rhs
return try self.genX8664BinMath(&inst.base, inst.lhs, inst.rhs, 4, 0x20);
} else {
// lhs OR rhs
return try self.genX8664BinMath(&inst.base, inst.lhs, inst.rhs, 1, 0x08);
},
else => return self.fail(inst.base.src, "TODO implement sub for {}", .{self.target.cpu.arch}),
}
}
fn brVoid(self: *Self, src: usize, block: *ir.Inst.Block) !MCValue {
// Emit a jump with a relocation. It will be patched up after the block ends.
try block.codegen.relocs.ensureCapacity(self.gpa, block.codegen.relocs.items.len + 1);

View File

@@ -74,6 +74,8 @@ pub const Inst = struct {
isnonnull,
isnull,
iserr,
booland,
boolor,
/// Read a value from a pointer.
load,
loop,
@@ -91,6 +93,7 @@ pub const Inst = struct {
intcast,
unwrap_optional,
wrap_optional,
switchbr,
pub fn Type(tag: Tag) type {
return switch (tag) {
@@ -125,6 +128,8 @@ pub const Inst = struct {
.cmp_gt,
.cmp_neq,
.store,
.booland,
.boolor,
=> BinOp,
.arg => Arg,
@@ -137,6 +142,7 @@ pub const Inst = struct {
.constant => Constant,
.loop => Loop,
.varptr => VarPtr,
.switchbr => SwitchBr,
};
}
@@ -458,6 +464,47 @@ pub const Inst = struct {
return null;
}
};
pub const SwitchBr = struct {
pub const base_tag = Tag.switchbr;
base: Inst,
target_ptr: *Inst,
cases: []Case,
/// Set of instructions whose lifetimes end at the start of one of the cases.
/// In same order as cases, deaths[0..case_0_count, case_0_count .. case_1_count, ... ].
deaths: [*]*Inst = undefined,
else_index: u32 = 0,
else_deaths: u32 = 0,
else_body: Body,
pub const Case = struct {
item: Value,
body: Body,
index: u32 = 0,
deaths: u32 = 0,
};
pub fn operandCount(self: *const SwitchBr) usize {
return 1;
}
pub fn getOperand(self: *const SwitchBr, index: usize) ?*Inst {
var i = index;
if (i < 1)
return self.target_ptr;
i -= 1;
return null;
}
pub fn caseDeaths(self: *const SwitchBr, case_index: usize) []*Inst {
const case = self.cases[case_index];
return (self.deaths + case.index)[0..case.deaths];
}
pub fn elseDeaths(self: *const SwitchBr) []*Inst {
return (self.deaths + self.else_index)[0..self.else_deaths];
}
};
};
pub const Body = struct {

View File

@@ -144,6 +144,92 @@ fn analyzeInst(
// instruction, and the deaths flag for the CondBr instruction will indicate whether the
// condition's lifetime ends immediately before entering any branch.
},
.switchbr => {
const inst = base.castTag(.switchbr).?;
const Table = std.AutoHashMap(*ir.Inst, void);
const case_tables = try table.allocator.alloc(Table, inst.cases.len + 1); // +1 for else
defer table.allocator.free(case_tables);
std.mem.set(Table, case_tables, Table.init(table.allocator));
defer for (case_tables) |*ct| ct.deinit();
for (inst.cases) |case, i| {
try analyzeWithTable(arena, table, &case_tables[i], case.body);
// Reset the table back to its state from before the case.
var it = case_tables[i].iterator();
while (it.next()) |entry| {
table.removeAssertDiscard(entry.key);
}
}
{ // else
try analyzeWithTable(arena, table, &case_tables[case_tables.len - 1], inst.else_body);
// Reset the table back to its state from before the case.
var it = case_tables[case_tables.len - 1].iterator();
while (it.next()) |entry| {
table.removeAssertDiscard(entry.key);
}
}
const List = std.ArrayList(*ir.Inst);
const case_deaths = try table.allocator.alloc(List, case_tables.len); // +1 for else
defer table.allocator.free(case_deaths);
std.mem.set(List, case_deaths, List.init(table.allocator));
defer for (case_deaths) |*cd| cd.deinit();
var total_deaths: u32 = 0;
for (case_tables) |*ct, i| {
total_deaths += ct.count();
var it = ct.iterator();
while (it.next()) |entry| {
const case_death = entry.key;
for (case_tables) |*ct_inner, j| {
if (i == j) continue;
if (!ct_inner.contains(case_death)) {
// instruction is not referenced in this case
try case_deaths[j].append(case_death);
}
}
// undo resetting the table
_ = try table.put(case_death, {});
}
}
// Now we have to correctly populate new_set.
if (new_set) |ns| {
try ns.ensureCapacity(@intCast(u32, ns.count() + total_deaths));
for (case_tables) |*ct| {
var it = ct.iterator();
while (it.next()) |entry| {
_ = ns.putAssumeCapacity(entry.key, {});
}
}
}
total_deaths = 0;
for (case_deaths[0 .. case_deaths.len - 1]) |*ct, i| {
inst.cases[i].index = total_deaths;
const len = std.math.cast(@TypeOf(inst.else_deaths), ct.items.len) catch return error.OutOfMemory;
inst.cases[i].deaths = len;
total_deaths += len;
}
{ // else
const else_deaths = std.math.cast(@TypeOf(inst.else_deaths), case_deaths[case_deaths.len - 1].items.len) catch return error.OutOfMemory;
inst.else_index = total_deaths;
inst.else_deaths = else_deaths;
total_deaths += else_deaths;
}
const allocated_slice = try arena.alloc(*ir.Inst, total_deaths);
inst.deaths = allocated_slice.ptr;
for (case_deaths[0 .. case_deaths.len - 1]) |*cd, i| {
std.mem.copy(*ir.Inst, inst.caseDeaths(i), cd.items);
}
std.mem.copy(*ir.Inst, inst.elseDeaths(), case_deaths[case_deaths.len - 1].items);
},
else => {},
}

View File

@@ -2421,6 +2421,7 @@ pub fn cmdFmt(gpa: *Allocator, args: []const []const u8) !void {
var stdin_flag: bool = false;
var check_flag: bool = false;
var input_files = ArrayList([]const u8).init(gpa);
defer input_files.deinit();
{
var i: usize = 0;

View File

@@ -463,10 +463,10 @@ pub const TestContext = struct {
var cache_dir = try tmp.dir.makeOpenPath("zig-cache", .{});
defer cache_dir.close();
const bogus_path = "bogus"; // TODO this will need to be fixed before we can test LLVM extensions
const tmp_path = try std.fs.path.join(arena, &[_][]const u8{ ".", "zig-cache", "tmp", &tmp.sub_path });
const zig_cache_directory: Compilation.Directory = .{
.handle = cache_dir,
.path = try std.fs.path.join(arena, &[_][]const u8{ bogus_path, "zig-cache" }),
.path = try std.fs.path.join(arena, &[_][]const u8{ tmp_path, "zig-cache" }),
};
const tmp_src_path = switch (case.extension) {
@@ -475,7 +475,7 @@ pub const TestContext = struct {
};
var root_pkg: Package = .{
.root_src_directory = .{ .path = bogus_path, .handle = tmp.dir },
.root_src_directory = .{ .path = tmp_path, .handle = tmp.dir },
.root_src_path = tmp_src_path,
};
@@ -488,7 +488,7 @@ pub const TestContext = struct {
});
const emit_directory: Compilation.Directory = .{
.path = bogus_path,
.path = tmp_path,
.handle = tmp.dir,
};
const emit_bin: Compilation.EmitLoc = .{

View File

@@ -2863,6 +2863,78 @@ pub const Type = extern union {
};
}
/// Asserts that self.zigTypeTag() == .Int.
pub fn minInt(self: Type, arena: *std.heap.ArenaAllocator, target: Target) !Value {
assert(self.zigTypeTag() == .Int);
const info = self.intInfo(target);
if (!info.signed) {
return Value.initTag(.zero);
}
if ((info.bits - 1) <= std.math.maxInt(u6)) {
const payload = try arena.allocator.create(Value.Payload.Int_i64);
payload.* = .{
.int = -(@as(i64, 1) << @truncate(u6, info.bits - 1)),
};
return Value.initPayload(&payload.base);
}
var res = try std.math.big.int.Managed.initSet(&arena.allocator, 1);
try res.shiftLeft(res, info.bits - 1);
res.negate();
const res_const = res.toConst();
if (res_const.positive) {
const val_payload = try arena.allocator.create(Value.Payload.IntBigPositive);
val_payload.* = .{ .limbs = res_const.limbs };
return Value.initPayload(&val_payload.base);
} else {
const val_payload = try arena.allocator.create(Value.Payload.IntBigNegative);
val_payload.* = .{ .limbs = res_const.limbs };
return Value.initPayload(&val_payload.base);
}
}
/// Asserts that self.zigTypeTag() == .Int.
pub fn maxInt(self: Type, arena: *std.heap.ArenaAllocator, target: Target) !Value {
assert(self.zigTypeTag() == .Int);
const info = self.intInfo(target);
if (info.signed and (info.bits - 1) <= std.math.maxInt(u6)) {
const payload = try arena.allocator.create(Value.Payload.Int_i64);
payload.* = .{
.int = (@as(i64, 1) << @truncate(u6, info.bits - 1)) - 1,
};
return Value.initPayload(&payload.base);
} else if (!info.signed and info.bits <= std.math.maxInt(u6)) {
const payload = try arena.allocator.create(Value.Payload.Int_u64);
payload.* = .{
.int = (@as(u64, 1) << @truncate(u6, info.bits)) - 1,
};
return Value.initPayload(&payload.base);
}
var res = try std.math.big.int.Managed.initSet(&arena.allocator, 1);
try res.shiftLeft(res, info.bits - @boolToInt(info.signed));
const one = std.math.big.int.Const{
.limbs = &[_]std.math.big.Limb{1},
.positive = true,
};
res.sub(res.toConst(), one) catch unreachable;
const res_const = res.toConst();
if (res_const.positive) {
const val_payload = try arena.allocator.create(Value.Payload.IntBigPositive);
val_payload.* = .{ .limbs = res_const.limbs };
return Value.initPayload(&val_payload.base);
} else {
const val_payload = try arena.allocator.create(Value.Payload.IntBigNegative);
val_payload.* = .{ .limbs = res_const.limbs };
return Value.initPayload(&val_payload.base);
}
}
/// This enum does not directly correspond to `std.builtin.TypeId` because
/// it has extra enum tags in it, as a way of using less memory. For example,
/// even though Zig recognizes `*align(10) i32` and `*i32` both as Pointer types

View File

@@ -565,7 +565,7 @@ pub const Value = extern union {
.int_u64 => return BigIntMutable.init(&space.limbs, self.cast(Payload.Int_u64).?.int).toConst(),
.int_i64 => return BigIntMutable.init(&space.limbs, self.cast(Payload.Int_i64).?.int).toConst(),
.int_big_positive => return self.cast(Payload.IntBigPositive).?.asBigInt(),
.int_big_negative => return self.cast(Payload.IntBigPositive).?.asBigInt(),
.int_big_negative => return self.cast(Payload.IntBigNegative).?.asBigInt(),
}
}
@@ -1233,15 +1233,170 @@ pub const Value = extern union {
}
pub fn eql(a: Value, b: Value) bool {
if (a.tag() == b.tag() and a.tag() == .enum_literal) {
const a_name = @fieldParentPtr(Payload.Bytes, "base", a.ptr_otherwise).data;
const b_name = @fieldParentPtr(Payload.Bytes, "base", b.ptr_otherwise).data;
return std.mem.eql(u8, a_name, b_name);
if (a.tag() == b.tag()) {
if (a.tag() == .void_value or a.tag() == .null_value) {
return true;
} else if (a.tag() == .enum_literal) {
const a_name = @fieldParentPtr(Payload.Bytes, "base", a.ptr_otherwise).data;
const b_name = @fieldParentPtr(Payload.Bytes, "base", b.ptr_otherwise).data;
return std.mem.eql(u8, a_name, b_name);
}
}
if (a.isType() and b.isType()) {
// 128 bytes should be enough to hold both types
var buf: [128]u8 = undefined;
var fib = std.heap.FixedBufferAllocator.init(&buf);
const a_type = a.toType(&fib.allocator) catch unreachable;
const b_type = b.toType(&fib.allocator) catch unreachable;
return a_type.eql(b_type);
}
// TODO non numerical comparisons
return compare(a, .eq, b);
}
pub fn hash(self: Value) u64 {
var hasher = std.hash.Wyhash.init(0);
switch (self.tag()) {
.u8_type,
.i8_type,
.u16_type,
.i16_type,
.u32_type,
.i32_type,
.u64_type,
.i64_type,
.usize_type,
.isize_type,
.c_short_type,
.c_ushort_type,
.c_int_type,
.c_uint_type,
.c_long_type,
.c_ulong_type,
.c_longlong_type,
.c_ulonglong_type,
.c_longdouble_type,
.f16_type,
.f32_type,
.f64_type,
.f128_type,
.c_void_type,
.bool_type,
.void_type,
.type_type,
.anyerror_type,
.comptime_int_type,
.comptime_float_type,
.noreturn_type,
.null_type,
.undefined_type,
.fn_noreturn_no_args_type,
.fn_void_no_args_type,
.fn_naked_noreturn_no_args_type,
.fn_ccc_void_no_args_type,
.single_const_pointer_to_comptime_int_type,
.const_slice_u8_type,
.enum_literal_type,
.anyframe_type,
.ty,
=> {
// Directly return Type.hash, toType can only fail for .int_type and .error_set.
var allocator = std.heap.FixedBufferAllocator.init(&[_]u8{});
return (self.toType(&allocator.allocator) catch unreachable).hash();
},
.error_set => {
// Payload.decl should be same for all instances of the type.
const payload = @fieldParentPtr(Payload.ErrorSet, "base", self.ptr_otherwise);
std.hash.autoHash(&hasher, payload.decl);
},
.int_type => {
const payload = self.cast(Payload.IntType).?;
if (payload.signed) {
var new = Type.Payload.IntSigned{ .bits = payload.bits };
return Type.initPayload(&new.base).hash();
} else {
var new = Type.Payload.IntUnsigned{ .bits = payload.bits };
return Type.initPayload(&new.base).hash();
}
},
.empty_struct_value,
.empty_array,
=> {},
.undef,
.null_value,
.void_value,
.unreachable_value,
=> std.hash.autoHash(&hasher, self.tag()),
.zero, .bool_false => std.hash.autoHash(&hasher, @as(u64, 0)),
.one, .bool_true => std.hash.autoHash(&hasher, @as(u64, 1)),
.float_16, .float_32, .float_64, .float_128 => {},
.enum_literal, .bytes => {
const payload = @fieldParentPtr(Payload.Bytes, "base", self.ptr_otherwise);
hasher.update(payload.data);
},
.int_u64 => {
const payload = @fieldParentPtr(Payload.Int_u64, "base", self.ptr_otherwise);
std.hash.autoHash(&hasher, payload.int);
},
.int_i64 => {
const payload = @fieldParentPtr(Payload.Int_i64, "base", self.ptr_otherwise);
std.hash.autoHash(&hasher, payload.int);
},
.repeated => {
const payload = @fieldParentPtr(Payload.Repeated, "base", self.ptr_otherwise);
std.hash.autoHash(&hasher, payload.val.hash());
},
.ref_val => {
const payload = @fieldParentPtr(Payload.RefVal, "base", self.ptr_otherwise);
std.hash.autoHash(&hasher, payload.val.hash());
},
.int_big_positive, .int_big_negative => {
var space: BigIntSpace = undefined;
const big = self.toBigInt(&space);
if (big.limbs.len == 1) {
// handle like {u,i}64 to ensure same hash as with Int{i,u}64
if (big.positive) {
std.hash.autoHash(&hasher, @as(u64, big.limbs[0]));
} else {
std.hash.autoHash(&hasher, @as(u64, @bitCast(usize, -@bitCast(isize, big.limbs[0]))));
}
} else {
std.hash.autoHash(&hasher, big.positive);
for (big.limbs) |limb| {
std.hash.autoHash(&hasher, limb);
}
}
},
.elem_ptr => {
const payload = @fieldParentPtr(Payload.ElemPtr, "base", self.ptr_otherwise);
std.hash.autoHash(&hasher, payload.array_ptr.hash());
std.hash.autoHash(&hasher, payload.index);
},
.decl_ref => {
const payload = @fieldParentPtr(Payload.DeclRef, "base", self.ptr_otherwise);
std.hash.autoHash(&hasher, payload.decl);
},
.function => {
const payload = @fieldParentPtr(Payload.Function, "base", self.ptr_otherwise);
std.hash.autoHash(&hasher, payload.func);
},
.variable => {
const payload = @fieldParentPtr(Payload.Variable, "base", self.ptr_otherwise);
std.hash.autoHash(&hasher, payload.variable);
},
.@"error" => {
const payload = @fieldParentPtr(Payload.Error, "base", self.ptr_otherwise);
hasher.update(payload.name);
std.hash.autoHash(&hasher, payload.value);
},
}
return hasher.final();
}
/// Asserts the value is a pointer and dereferences it.
/// Returns error.AnalysisFail if the pointer points to a Decl that failed semantic analysis.
pub fn pointerDeref(self: Value, allocator: *Allocator) error{ AnalysisFail, OutOfMemory }!Value {
@@ -1521,6 +1676,87 @@ pub const Value = extern union {
};
}
/// Valid for all types. Asserts the value is not undefined.
pub fn isType(self: Value) bool {
return switch (self.tag()) {
.ty,
.int_type,
.u8_type,
.i8_type,
.u16_type,
.i16_type,
.u32_type,
.i32_type,
.u64_type,
.i64_type,
.usize_type,
.isize_type,
.c_short_type,
.c_ushort_type,
.c_int_type,
.c_uint_type,
.c_long_type,
.c_ulong_type,
.c_longlong_type,
.c_ulonglong_type,
.c_longdouble_type,
.f16_type,
.f32_type,
.f64_type,
.f128_type,
.c_void_type,
.bool_type,
.void_type,
.type_type,
.anyerror_type,
.comptime_int_type,
.comptime_float_type,
.noreturn_type,
.null_type,
.undefined_type,
.fn_noreturn_no_args_type,
.fn_void_no_args_type,
.fn_naked_noreturn_no_args_type,
.fn_ccc_void_no_args_type,
.single_const_pointer_to_comptime_int_type,
.const_slice_u8_type,
.enum_literal_type,
.anyframe_type,
.error_set,
=> true,
.zero,
.one,
.empty_array,
.bool_true,
.bool_false,
.function,
.variable,
.int_u64,
.int_i64,
.int_big_positive,
.int_big_negative,
.ref_val,
.decl_ref,
.elem_ptr,
.bytes,
.repeated,
.float_16,
.float_32,
.float_64,
.float_128,
.void_value,
.enum_literal,
.@"error",
.empty_struct_value,
.null_value,
=> false,
.undef => unreachable,
.unreachable_value => unreachable,
};
}
/// This type is not copyable since it may contain pointers to its inner data.
pub const Payload = struct {
tag: Tag,
@@ -1655,3 +1891,18 @@ pub const Value = extern union {
limbs: [(@sizeOf(u64) / @sizeOf(std.math.big.Limb)) + 1]std.math.big.Limb,
};
};
test "hash same value different representation" {
const zero_1 = Value.initTag(.zero);
var payload_1 = Value.Payload.Int_u64{ .int = 0 };
const zero_2 = Value.initPayload(&payload_1.base);
std.testing.expectEqual(zero_1.hash(), zero_2.hash());
var payload_2 = Value.Payload.Int_i64{ .int = 0 };
const zero_3 = Value.initPayload(&payload_2.base);
std.testing.expectEqual(zero_2.hash(), zero_3.hash());
var payload_3 = Value.Payload.IntBigNegative{ .limbs = &[_]std.math.big.Limb{0} };
const zero_4 = Value.initPayload(&payload_3.base);
std.testing.expectEqual(zero_3.hash(), zero_4.hash());
}

View File

@@ -85,8 +85,12 @@ pub const Inst = struct {
block_comptime,
/// Same as `block_flat` but additionally makes the inner instructions execute at comptime.
block_comptime_flat,
/// Boolean AND. See also `bitand`.
booland,
/// Boolean NOT. See also `bitnot`.
boolnot,
/// Boolean OR. See also `bitor`.
boolor,
/// Return a value from a `Block`.
@"break",
breakpoint,
@@ -272,6 +276,12 @@ pub const Inst = struct {
ensure_err_payload_void,
/// Enum literal
enum_literal,
/// A switch expression.
switchbr,
/// A range in a switch case, `lhs...rhs`.
/// Only checks that `lhs >= rhs` if they are ints, everything else is
/// validated by the .switch instruction.
switch_range,
pub fn Type(tag: Tag) type {
return switch (tag) {
@@ -327,6 +337,8 @@ pub const Inst = struct {
.array_type,
.bitand,
.bitor,
.booland,
.boolor,
.div,
.mod_rem,
.mul,
@@ -351,6 +363,7 @@ pub const Inst = struct {
.error_union_type,
.merge_error_sets,
.slice_start,
.switch_range,
=> BinOp,
.block,
@@ -389,6 +402,7 @@ pub const Inst = struct {
.enum_literal => EnumLiteral,
.error_set => ErrorSet,
.slice => Slice,
.switchbr => SwitchBr,
};
}
@@ -417,6 +431,8 @@ pub const Inst = struct {
.block_comptime,
.block_comptime_flat,
.boolnot,
.booland,
.boolor,
.breakpoint,
.call,
.cmp_lt,
@@ -493,6 +509,7 @@ pub const Inst = struct {
.slice,
.slice_start,
.import,
.switch_range,
=> false,
.@"break",
@@ -504,6 +521,7 @@ pub const Inst = struct {
.unreach_nocheck,
.@"unreachable",
.loop,
.switchbr,
=> true,
};
}
@@ -987,6 +1005,33 @@ pub const Inst = struct {
sentinel: ?*Inst = null,
},
};
pub const SwitchBr = struct {
pub const base_tag = Tag.switchbr;
base: Inst,
positionals: struct {
target_ptr: *Inst,
/// List of all individual items and ranges
items: []*Inst,
cases: []Case,
else_body: Module.Body,
},
kw_args: struct {
/// Pointer to first range if such exists.
range: ?*Inst = null,
special_prong: enum {
none,
@"else",
underscore,
} = .none,
},
pub const Case = struct {
item: *Inst,
body: Module.Body,
};
};
};
pub const ErrorMsg = struct {
@@ -1218,8 +1263,8 @@ const Writer = struct {
bool => return stream.writeByte("01"[@boolToInt(param)]),
[]u8, []const u8 => return stream.print("\"{Z}\"", .{param}),
BigIntConst, usize => return stream.print("{}", .{param}),
TypedValue => unreachable, // this is a special case
*IrModule.Decl => unreachable, // this is a special case
TypedValue => return stream.print("TypedValue{{ .ty = {}, .val = {}}}", .{ param.ty, param.val }),
*IrModule.Decl => return stream.print("Decl({s})", .{param.name}),
*Inst.Block => {
const name = self.block_table.get(param).?;
return stream.print("\"{Z}\"", .{name});
@@ -1238,6 +1283,26 @@ const Writer = struct {
}
try stream.writeByte(']');
},
[]Inst.SwitchBr.Case => {
if (param.len == 0) {
return stream.writeAll("{}");
}
try stream.writeAll("{\n");
for (param) |*case, i| {
if (i != 0) {
try stream.writeAll(",\n");
}
try stream.writeByteNTimes(' ', self.indent);
self.indent += 2;
try self.writeParamToStream(stream, &case.item);
try stream.writeAll(" => ");
try self.writeParamToStream(stream, &case.body);
self.indent -= 2;
}
try stream.writeByte('\n');
try stream.writeByteNTimes(' ', self.indent - 2);
try stream.writeByte('}');
},
else => |T| @compileError("unimplemented: rendering parameter of type " ++ @typeName(T)),
}
}
@@ -1650,6 +1715,26 @@ const Parser = struct {
try requireEatBytes(self, "]");
return strings.toOwnedSlice();
},
[]Inst.SwitchBr.Case => {
try requireEatBytes(self, "{");
skipSpace(self);
if (eatByte(self, '}')) return &[0]Inst.SwitchBr.Case{};
var cases = std.ArrayList(Inst.SwitchBr.Case).init(&self.arena.allocator);
while (true) {
const cur = try cases.addOne();
skipSpace(self);
cur.item = try self.parseParameterGeneric(*Inst, body_ctx);
skipSpace(self);
try requireEatBytes(self, "=>");
cur.body = try self.parseBody(body_ctx);
skipSpace(self);
if (!eatByte(self, ',')) break;
}
skipSpace(self);
try requireEatBytes(self, "}");
return cases.toOwnedSlice();
},
else => @compileError("Unimplemented: ir parseParameterGeneric for type " ++ @typeName(T)),
}
return self.fail("TODO parse parameter {}", .{@typeName(T)});
@@ -1747,7 +1832,7 @@ pub fn dumpFn(old_module: IrModule, module_fn: *IrModule.Fn) void {
.arena = std.heap.ArenaAllocator.init(allocator),
.old_module = &old_module,
.next_auto_name = 0,
.names = std.StringHashMap(void).init(allocator),
.names = std.StringArrayHashMap(void).init(allocator),
.primitive_table = std.AutoHashMap(Inst.Primitive.Builtin, *Decl).init(allocator),
.indent = 0,
.block_table = std.AutoHashMap(*ir.Inst.Block, *Inst.Block).init(allocator),
@@ -2244,6 +2329,8 @@ const EmitZIR = struct {
.cmp_gte => try self.emitBinOp(inst.src, new_body, inst.castTag(.cmp_gte).?, .cmp_gte),
.cmp_gt => try self.emitBinOp(inst.src, new_body, inst.castTag(.cmp_gt).?, .cmp_gt),
.cmp_neq => try self.emitBinOp(inst.src, new_body, inst.castTag(.cmp_neq).?, .cmp_neq),
.booland => try self.emitBinOp(inst.src, new_body, inst.castTag(.booland).?, .booland),
.boolor => try self.emitBinOp(inst.src, new_body, inst.castTag(.boolor).?, .boolor),
.bitcast => try self.emitCast(inst.src, new_body, inst.castTag(.bitcast).?, .bitcast),
.intcast => try self.emitCast(inst.src, new_body, inst.castTag(.intcast).?, .intcast),
@@ -2470,7 +2557,63 @@ const EmitZIR = struct {
};
break :blk &new_inst.base;
},
.switchbr => blk: {
const old_inst = inst.castTag(.switchbr).?;
const cases = try self.arena.allocator.alloc(Inst.SwitchBr.Case, old_inst.cases.len);
const new_inst = try self.arena.allocator.create(Inst.SwitchBr);
new_inst.* = .{
.base = .{
.src = inst.src,
.tag = Inst.SwitchBr.base_tag,
},
.positionals = .{
.target_ptr = try self.resolveInst(new_body, old_inst.target_ptr),
.cases = cases,
.items = &[_]*Inst{}, // TODO this should actually be populated
.else_body = undefined, // populated below
},
.kw_args = .{},
};
var body_tmp = std.ArrayList(*Inst).init(self.allocator);
defer body_tmp.deinit();
for (old_inst.cases) |*case, i| {
body_tmp.items.len = 0;
const case_deaths = try self.arena.allocator.alloc(*Inst, old_inst.caseDeaths(i).len);
for (old_inst.caseDeaths(i)) |death, j| {
case_deaths[j] = try self.resolveInst(new_body, death);
}
try self.body_metadata.put(&cases[i].body, .{ .deaths = case_deaths });
try self.emitBody(case.body, inst_table, &body_tmp);
const item = (try self.emitTypedValue(inst.src, .{
.ty = old_inst.target_ptr.ty.elemType(),
.val = case.item,
})).inst;
cases[i] = .{
.item = item,
.body = .{ .instructions = try self.arena.allocator.dupe(*Inst, body_tmp.items) },
};
}
{ // else
const else_deaths = try self.arena.allocator.alloc(*Inst, old_inst.elseDeaths().len);
for (old_inst.elseDeaths()) |death, j| {
else_deaths[j] = try self.resolveInst(new_body, death);
}
try self.body_metadata.put(&new_inst.positionals.else_body, .{ .deaths = else_deaths });
body_tmp.items.len = 0;
try self.emitBody(old_inst.else_body, inst_table, &body_tmp);
new_inst.positionals.else_body = .{
.instructions = try self.arena.allocator.dupe(*Inst, body_tmp.items),
};
}
break :blk &new_inst.base;
},
.varptr => @panic("TODO"),
};
try self.metadata.put(new_inst, .{
@@ -2703,3 +2846,52 @@ const EmitZIR = struct {
return decl;
}
};
/// For debugging purposes, like dumpFn but for unanalyzed zir blocks
pub fn dumpZir(allocator: *Allocator, kind: []const u8, decl_name: [*:0]const u8, instructions: []*Inst) !void {
var fib = std.heap.FixedBufferAllocator.init(&[_]u8{});
var module = Module{
.decls = &[_]*Decl{},
.arena = std.heap.ArenaAllocator.init(&fib.allocator),
.metadata = std.AutoHashMap(*Inst, Module.MetaData).init(&fib.allocator),
.body_metadata = std.AutoHashMap(*Module.Body, Module.BodyMetaData).init(&fib.allocator),
};
var write = Writer{
.module = &module,
.inst_table = InstPtrTable.init(allocator),
.block_table = std.AutoHashMap(*Inst.Block, []const u8).init(allocator),
.loop_table = std.AutoHashMap(*Inst.Loop, []const u8).init(allocator),
.arena = std.heap.ArenaAllocator.init(allocator),
.indent = 4,
.next_instr_index = 0,
};
defer write.arena.deinit();
defer write.inst_table.deinit();
defer write.block_table.deinit();
defer write.loop_table.deinit();
try write.inst_table.ensureCapacity(@intCast(u32, instructions.len));
const stderr = std.io.getStdErr().outStream();
try stderr.print("{} {s} {{ // unanalyzed\n", .{ kind, decl_name });
for (instructions) |inst| {
const my_i = write.next_instr_index;
write.next_instr_index += 1;
if (inst.cast(Inst.Block)) |block| {
const name = try std.fmt.allocPrint(&write.arena.allocator, "label_{}", .{my_i});
try write.block_table.put(block, name);
} else if (inst.cast(Inst.Loop)) |loop| {
const name = try std.fmt.allocPrint(&write.arena.allocator, "loop_{}", .{my_i});
try write.loop_table.put(loop, name);
}
try write.inst_table.putNoClobber(inst, .{ .inst = inst, .index = my_i, .name = "inst" });
try stderr.print(" %{} ", .{my_i});
try write.writeInstToStream(stderr, inst);
try stderr.writeByte('\n');
}
try stderr.print("}} // {} {s}\n\n", .{ kind, decl_name });
}

View File

@@ -135,6 +135,10 @@ pub fn analyzeInst(mod: *Module, scope: *Scope, old_inst: *zir.Inst) InnerError!
.slice => return analyzeInstSlice(mod, scope, old_inst.castTag(.slice).?),
.slice_start => return analyzeInstSliceStart(mod, scope, old_inst.castTag(.slice_start).?),
.import => return analyzeInstImport(mod, scope, old_inst.castTag(.import).?),
.switchbr => return analyzeInstSwitchBr(mod, scope, old_inst.castTag(.switchbr).?),
.switch_range => return analyzeInstSwitchRange(mod, scope, old_inst.castTag(.switch_range).?),
.booland => return analyzeInstBoolOp(mod, scope, old_inst.castTag(.booland).?),
.boolor => return analyzeInstBoolOp(mod, scope, old_inst.castTag(.boolor).?),
}
}
@@ -551,10 +555,13 @@ fn analyzeInstBlockFlat(mod: *Module, scope: *Scope, inst: *zir.Inst.Block, is_c
try analyzeBody(mod, &child_block.base, inst.positionals.body);
const copied_instructions = try parent_block.arena.dupe(*Inst, child_block.instructions.items);
try parent_block.instructions.appendSlice(mod.gpa, copied_instructions);
try parent_block.instructions.appendSlice(mod.gpa, child_block.instructions.items);
return copied_instructions[copied_instructions.len - 1];
// comptime blocks won't generate any runtime values
if (child_block.instructions.items.len == 0)
return mod.constVoid(scope, inst.base.src);
return parent_block.instructions.items[parent_block.instructions.items.len - 1];
}
fn analyzeInstBlock(mod: *Module, scope: *Scope, inst: *zir.Inst.Block, is_comptime: bool) InnerError!*Inst {
@@ -1204,13 +1211,233 @@ fn analyzeInstSliceStart(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) Inn
return mod.analyzeSlice(scope, inst.base.src, array_ptr, start, null, null);
}
fn analyzeInstSwitchRange(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
const start = try resolveInst(mod, scope, inst.positionals.lhs);
const end = try resolveInst(mod, scope, inst.positionals.rhs);
switch (start.ty.zigTypeTag()) {
.Int, .ComptimeInt => {},
else => return mod.constVoid(scope, inst.base.src),
}
switch (end.ty.zigTypeTag()) {
.Int, .ComptimeInt => {},
else => return mod.constVoid(scope, inst.base.src),
}
if (start.value()) |start_val| {
if (end.value()) |end_val| {
if (start_val.compare(.gte, end_val)) {
return mod.fail(scope, inst.base.src, "range start value must be smaller than the end value", .{});
}
}
}
return mod.constVoid(scope, inst.base.src);
}
fn analyzeInstSwitchBr(mod: *Module, scope: *Scope, inst: *zir.Inst.SwitchBr) InnerError!*Inst {
const target_ptr = try resolveInst(mod, scope, inst.positionals.target_ptr);
const target = try mod.analyzeDeref(scope, inst.base.src, target_ptr, inst.positionals.target_ptr.src);
try validateSwitch(mod, scope, target, inst);
if (try mod.resolveDefinedValue(scope, target)) |target_val| {
for (inst.positionals.cases) |case| {
const resolved = try resolveInst(mod, scope, case.item);
const casted = try mod.coerce(scope, target.ty, resolved);
const item = try mod.resolveConstValue(scope, casted);
if (target_val.eql(item)) {
try analyzeBody(mod, scope, case.body);
return mod.constNoReturn(scope, inst.base.src);
}
}
try analyzeBody(mod, scope, inst.positionals.else_body);
return mod.constNoReturn(scope, inst.base.src);
}
if (inst.positionals.cases.len == 0) {
// no cases just analyze else_branch
try analyzeBody(mod, scope, inst.positionals.else_body);
return mod.constNoReturn(scope, inst.base.src);
}
const parent_block = try mod.requireRuntimeBlock(scope, inst.base.src);
const cases = try parent_block.arena.alloc(Inst.SwitchBr.Case, inst.positionals.cases.len);
var case_block: Scope.Block = .{
.parent = parent_block,
.func = parent_block.func,
.decl = parent_block.decl,
.instructions = .{},
.arena = parent_block.arena,
.is_comptime = parent_block.is_comptime,
};
defer case_block.instructions.deinit(mod.gpa);
for (inst.positionals.cases) |case, i| {
// Reset without freeing.
case_block.instructions.items.len = 0;
const resolved = try resolveInst(mod, scope, case.item);
const casted = try mod.coerce(scope, target.ty, resolved);
const item = try mod.resolveConstValue(scope, casted);
try analyzeBody(mod, &case_block.base, case.body);
cases[i] = .{
.item = item,
.body = .{ .instructions = try parent_block.arena.dupe(*Inst, case_block.instructions.items) },
};
}
case_block.instructions.items.len = 0;
try analyzeBody(mod, &case_block.base, inst.positionals.else_body);
const else_body: ir.Body = .{
.instructions = try parent_block.arena.dupe(*Inst, case_block.instructions.items),
};
return mod.addSwitchBr(parent_block, inst.base.src, target_ptr, cases, else_body);
}
fn validateSwitch(mod: *Module, scope: *Scope, target: *Inst, inst: *zir.Inst.SwitchBr) InnerError!void {
// validate usage of '_' prongs
if (inst.kw_args.special_prong == .underscore and target.ty.zigTypeTag() != .Enum) {
return mod.fail(scope, inst.base.src, "'_' prong only allowed when switching on non-exhaustive enums", .{});
// TODO notes "'_' prong here" inst.positionals.cases[last].src
}
// check that target type supports ranges
if (inst.kw_args.range) |range_inst| {
switch (target.ty.zigTypeTag()) {
.Int, .ComptimeInt => {},
else => {
return mod.fail(scope, target.src, "ranges not allowed when switching on type {}", .{target.ty});
// TODO notes "range used here" range_inst.src
},
}
}
// validate for duplicate items/missing else prong
switch (target.ty.zigTypeTag()) {
.Enum => return mod.fail(scope, inst.base.src, "TODO validateSwitch .Enum", .{}),
.ErrorSet => return mod.fail(scope, inst.base.src, "TODO validateSwitch .ErrorSet", .{}),
.Union => return mod.fail(scope, inst.base.src, "TODO validateSwitch .Union", .{}),
.Int, .ComptimeInt => {
var range_set = @import("RangeSet.zig").init(mod.gpa);
defer range_set.deinit();
for (inst.positionals.items) |item| {
const maybe_src = if (item.castTag(.switch_range)) |range| blk: {
const start_resolved = try resolveInst(mod, scope, range.positionals.lhs);
const start_casted = try mod.coerce(scope, target.ty, start_resolved);
const end_resolved = try resolveInst(mod, scope, range.positionals.rhs);
const end_casted = try mod.coerce(scope, target.ty, end_resolved);
break :blk try range_set.add(
try mod.resolveConstValue(scope, start_casted),
try mod.resolveConstValue(scope, end_casted),
item.src,
);
} else blk: {
const resolved = try resolveInst(mod, scope, item);
const casted = try mod.coerce(scope, target.ty, resolved);
const value = try mod.resolveConstValue(scope, casted);
break :blk try range_set.add(value, value, item.src);
};
if (maybe_src) |previous_src| {
return mod.fail(scope, item.src, "duplicate switch value", .{});
// TODO notes "previous value is here" previous_src
}
}
if (target.ty.zigTypeTag() == .Int) {
var arena = std.heap.ArenaAllocator.init(mod.gpa);
defer arena.deinit();
const start = try target.ty.minInt(&arena, mod.getTarget());
const end = try target.ty.maxInt(&arena, mod.getTarget());
if (try range_set.spans(start, end)) {
if (inst.kw_args.special_prong == .@"else") {
return mod.fail(scope, inst.base.src, "unreachable else prong, all cases already handled", .{});
}
return;
}
}
if (inst.kw_args.special_prong != .@"else") {
return mod.fail(scope, inst.base.src, "switch must handle all possibilities", .{});
}
},
.Bool => {
var true_count: u8 = 0;
var false_count: u8 = 0;
for (inst.positionals.items) |item| {
const resolved = try resolveInst(mod, scope, item);
const casted = try mod.coerce(scope, Type.initTag(.bool), resolved);
if ((try mod.resolveConstValue(scope, casted)).toBool()) {
true_count += 1;
} else {
false_count += 1;
}
if (true_count + false_count > 2) {
return mod.fail(scope, item.src, "duplicate switch value", .{});
}
}
if ((true_count + false_count < 2) and inst.kw_args.special_prong != .@"else") {
return mod.fail(scope, inst.base.src, "switch must handle all possibilities", .{});
}
if ((true_count + false_count == 2) and inst.kw_args.special_prong == .@"else") {
return mod.fail(scope, inst.base.src, "unreachable else prong, all cases already handled", .{});
}
},
.EnumLiteral, .Void, .Fn, .Pointer, .Type => {
if (inst.kw_args.special_prong != .@"else") {
return mod.fail(scope, inst.base.src, "else prong required when switching on type '{}'", .{target.ty});
}
var seen_values = std.HashMap(Value, usize, Value.hash, Value.eql, std.hash_map.DefaultMaxLoadPercentage).init(mod.gpa);
defer seen_values.deinit();
for (inst.positionals.items) |item| {
const resolved = try resolveInst(mod, scope, item);
const casted = try mod.coerce(scope, target.ty, resolved);
const val = try mod.resolveConstValue(scope, casted);
if (try seen_values.fetchPut(val, item.src)) |prev| {
return mod.fail(scope, item.src, "duplicate switch value", .{});
// TODO notes "previous value here" prev.value
}
}
},
.ErrorUnion,
.NoReturn,
.Array,
.Struct,
.Undefined,
.Null,
.Optional,
.BoundFn,
.Opaque,
.Vector,
.Frame,
.AnyFrame,
.ComptimeFloat,
.Float,
=> {
return mod.fail(scope, target.src, "invalid switch target type '{}'", .{target.ty});
},
}
}
fn analyzeInstImport(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
const operand = try resolveConstString(mod, scope, inst.positionals.operand);
const file_scope = mod.analyzeImport(scope, inst.base.src, operand) catch |err| switch (err) {
// error.ImportOutsidePkgPath => {
// return mod.fail(scope, inst.base.src, "import of file outside package path: '{}'", .{operand});
// },
error.ImportOutsidePkgPath => {
return mod.fail(scope, inst.base.src, "import of file outside package path: '{}'", .{operand});
},
error.FileNotFound => {
return mod.fail(scope, inst.base.src, "unable to find '{}'", .{operand});
},
@@ -1456,6 +1683,28 @@ fn analyzeInstBoolNot(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerEr
return mod.addUnOp(b, inst.base.src, bool_type, .not, operand);
}
fn analyzeInstBoolOp(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
const bool_type = Type.initTag(.bool);
const uncasted_lhs = try resolveInst(mod, scope, inst.positionals.lhs);
const lhs = try mod.coerce(scope, bool_type, uncasted_lhs);
const uncasted_rhs = try resolveInst(mod, scope, inst.positionals.rhs);
const rhs = try mod.coerce(scope, bool_type, uncasted_rhs);
const is_bool_or = inst.base.tag == .boolor;
if (lhs.value()) |lhs_val| {
if (rhs.value()) |rhs_val| {
if (is_bool_or) {
return mod.constBool(scope, inst.base.src, lhs_val.toBool() or rhs_val.toBool());
} else {
return mod.constBool(scope, inst.base.src, lhs_val.toBool() and rhs_val.toBool());
}
}
}
const b = try mod.requireRuntimeBlock(scope, inst.base.src);
return mod.addBinOp(b, inst.base.src, bool_type, if (is_bool_or) .boolor else .booland, lhs, rhs);
}
fn analyzeInstIsNonNull(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp, invert_logic: bool) InnerError!*Inst {
const operand = try resolveInst(mod, scope, inst.positionals.operand);
return mod.analyzeIsNull(scope, inst.base.src, operand, invert_logic);
@@ -1473,7 +1722,7 @@ fn analyzeInstCondBr(mod: *Module, scope: *Scope, inst: *zir.Inst.CondBr) InnerE
if (try mod.resolveDefinedValue(scope, cond)) |cond_val| {
const body = if (cond_val.toBool()) &inst.positionals.then_body else &inst.positionals.else_body;
try analyzeBody(mod, scope, body.*);
return mod.constVoid(scope, inst.base.src);
return mod.constNoReturn(scope, inst.base.src);
}
const parent_block = try mod.requireRuntimeBlock(scope, inst.base.src);

View File

@@ -974,6 +974,43 @@ pub fn addCases(ctx: *TestContext) !void {
,
"hello\nhello\nhello\nhello\nhello\n",
);
// comptime switch
// Basic for loop
case.addCompareOutput(
\\pub export fn _start() noreturn {
\\ assert(foo() == 1);
\\ exit();
\\}
\\
\\fn foo() u32 {
\\ const a: comptime_int = 1;
\\ var b: u32 = 0;
\\ switch (a) {
\\ 1 => b = 1,
\\ 2 => b = 2,
\\ else => unreachable,
\\ }
\\ return b;
\\}
\\
\\pub fn assert(ok: bool) void {
\\ if (!ok) unreachable; // assertion failure
\\}
\\
\\fn exit() noreturn {
\\ asm volatile ("syscall"
\\ :
\\ : [number] "{rax}" (231),
\\ [arg1] "{rdi}" (0)
\\ : "rcx", "r11", "memory"
\\ );
\\ unreachable;
\\}
,
"",
);
}
{