Merge pull request #11699 from ziglang/empty-error-sets

stage2: fixes for error union semantics
This commit is contained in:
Andrew Kelley
2022-05-25 03:12:34 -04:00
committed by GitHub
14 changed files with 1560 additions and 734 deletions

View File

@@ -1798,7 +1798,7 @@ fn resetSegfaultHandler() void {
.mask = os.empty_sigset,
.flags = 0,
};
// do nothing if an error happens to avoid a double-panic
// To avoid a double-panic, do nothing if an error happens here.
updateSegfaultHandler(&act) catch {};
}

View File

@@ -5899,12 +5899,22 @@ fn zirErrorToInt(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!
if (val.isUndef()) {
return sema.addConstUndef(result_ty);
}
const payload = try sema.arena.create(Value.Payload.U64);
payload.* = .{
.base = .{ .tag = .int_u64 },
.data = (try sema.mod.getErrorValue(val.castTag(.@"error").?.data.name)).value,
};
return sema.addConstant(result_ty, Value.initPayload(&payload.base));
switch (val.tag()) {
.@"error" => {
const payload = try sema.arena.create(Value.Payload.U64);
payload.* = .{
.base = .{ .tag = .int_u64 },
.data = (try sema.mod.getErrorValue(val.castTag(.@"error").?.data.name)).value,
};
return sema.addConstant(result_ty, Value.initPayload(&payload.base));
},
// This is not a valid combination with the type `anyerror`.
.the_only_possible_value => unreachable,
// Assume it's already encoded as an integer.
else => return sema.addConstant(result_ty, val),
}
}
try sema.requireRuntimeBlock(block, src);
@@ -6261,19 +6271,24 @@ fn zirErrUnionPayload(
});
}
const result_ty = operand_ty.errorUnionPayload();
if (try sema.resolveDefinedValue(block, src, operand)) |val| {
if (val.getError()) |name| {
return sema.fail(block, src, "caught unexpected error '{s}'", .{name});
}
const data = val.castTag(.eu_payload).?.data;
const result_ty = operand_ty.errorUnionPayload();
return sema.addConstant(result_ty, data);
}
try sema.requireRuntimeBlock(block, src);
if (safety_check and block.wantSafety()) {
// If the error set has no fields then no safety check is needed.
if (safety_check and block.wantSafety() and
operand_ty.errorUnionSet().errorSetCardinality() != .zero)
{
try sema.panicUnwrapError(block, src, operand, .unwrap_errunion_err, .is_non_err);
}
const result_ty = operand_ty.errorUnionPayload();
return block.addTyOp(.unwrap_errunion_payload, result_ty, operand);
}
@@ -6311,7 +6326,8 @@ fn analyzeErrUnionPayloadPtr(
});
}
const payload_ty = operand_ty.elemType().errorUnionPayload();
const err_union_ty = operand_ty.elemType();
const payload_ty = err_union_ty.errorUnionPayload();
const operand_pointer_ty = try Type.ptr(sema.arena, sema.mod, .{
.pointee_type = payload_ty,
.mutable = !operand_ty.isConstPtr(),
@@ -6351,9 +6367,14 @@ fn analyzeErrUnionPayloadPtr(
}
try sema.requireRuntimeBlock(block, src);
if (safety_check and block.wantSafety()) {
// If the error set has no fields then no safety check is needed.
if (safety_check and block.wantSafety() and
err_union_ty.errorUnionSet().errorSetCardinality() != .zero)
{
try sema.panicUnwrapError(block, src, operand, .unwrap_errunion_err_ptr, .is_non_err_ptr);
}
const air_tag: Air.Inst.Tag = if (initializing)
.errunion_payload_ptr_set
else
@@ -20929,6 +20950,11 @@ fn analyzeLoad(
.Pointer => ptr_ty.childType(),
else => return sema.fail(block, ptr_src, "expected pointer, found '{}'", .{ptr_ty.fmt(sema.mod)}),
};
if (try sema.typeHasOnePossibleValue(block, src, elem_ty)) |opv| {
return sema.addConstant(elem_ty, opv);
}
if (try sema.resolveDefinedValue(block, ptr_src, ptr)) |ptr_val| {
if (try sema.pointerDeref(block, ptr_src, ptr_val, ptr_ty)) |elem_val| {
return sema.addConstant(elem_ty, elem_val);
@@ -23295,16 +23321,11 @@ pub fn typeHasOnePossibleValue(
.const_slice,
.mut_slice,
.anyopaque,
.optional,
.optional_single_mut_pointer,
.optional_single_const_pointer,
.enum_literal,
.anyerror_void_error_union,
.error_union,
.error_set,
.error_set_single,
.error_set_inferred,
.error_set_merged,
.@"opaque",
.var_args_param,
.manyptr_u8,
@@ -23333,6 +23354,56 @@ pub fn typeHasOnePossibleValue(
.bound_fn,
=> return null,
.optional => {
var buf: Type.Payload.ElemType = undefined;
const child_ty = ty.optionalChild(&buf);
if (child_ty.isNoReturn()) {
return Value.@"null";
} else {
return null;
}
},
.error_union => {
const error_ty = ty.errorUnionSet();
switch (error_ty.errorSetCardinality()) {
.zero => {
const payload_ty = ty.errorUnionPayload();
if (try typeHasOnePossibleValue(sema, block, src, payload_ty)) |payload_val| {
return try Value.Tag.eu_payload.create(sema.arena, payload_val);
} else {
return null;
}
},
.one => {
if (ty.errorUnionPayload().isNoReturn()) {
const error_val = (try typeHasOnePossibleValue(sema, block, src, error_ty)).?;
return error_val;
} else {
return null;
}
},
.many => return null,
}
},
.error_set_single => {
const name = ty.castTag(.error_set_single).?.data;
return try Value.Tag.@"error".create(sema.arena, .{ .name = name });
},
.error_set => {
const err_set_obj = ty.castTag(.error_set).?.data;
const names = err_set_obj.names.keys();
if (names.len > 1) return null;
return try Value.Tag.@"error".create(sema.arena, .{ .name = names[0] });
},
.error_set_merged => {
const name_map = ty.castTag(.error_set_merged).?.data;
const names = name_map.keys();
if (names.len > 1) return null;
return try Value.Tag.@"error".create(sema.arena, .{ .name = names[0] });
},
.@"struct" => {
const resolved_ty = try sema.resolveTypeFields(block, src, ty);
const s = resolved_ty.castTag(.@"struct").?.data;

View File

@@ -3,6 +3,7 @@ const builtin = @import("builtin");
const mem = std.mem;
const math = std.math;
const assert = std.debug.assert;
const codegen = @import("../../codegen.zig");
const Air = @import("../../Air.zig");
const Mir = @import("Mir.zig");
const Emit = @import("Emit.zig");
@@ -22,12 +23,14 @@ const leb128 = std.leb;
const log = std.log.scoped(.codegen);
const build_options = @import("build_options");
const GenerateSymbolError = @import("../../codegen.zig").GenerateSymbolError;
const FnResult = @import("../../codegen.zig").FnResult;
const DebugInfoOutput = @import("../../codegen.zig").DebugInfoOutput;
const GenerateSymbolError = codegen.GenerateSymbolError;
const FnResult = codegen.FnResult;
const DebugInfoOutput = codegen.DebugInfoOutput;
const bits = @import("bits.zig");
const abi = @import("abi.zig");
const errUnionPayloadOffset = codegen.errUnionPayloadOffset;
const errUnionErrorOffset = codegen.errUnionErrorOffset;
const RegisterManager = abi.RegisterManager;
const RegisterLock = RegisterManager.RegisterLock;
const Register = bits.Register;
@@ -3272,7 +3275,14 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallOptions.
fn ret(self: *Self, mcv: MCValue) !void {
const ret_ty = self.fn_type.fnReturnType();
try self.setRegOrMem(ret_ty, self.ret_mcv, mcv);
switch (self.ret_mcv) {
.immediate => {
assert(ret_ty.isError());
},
else => {
try self.setRegOrMem(ret_ty, self.ret_mcv, mcv);
},
}
// Just add space for an instruction, patch this later
const index = try self.addInst(.{
.tag = .nop,
@@ -3601,30 +3611,39 @@ fn isErr(self: *Self, ty: Type, operand: MCValue) !MCValue {
const error_type = ty.errorUnionSet();
const payload_type = ty.errorUnionPayload();
if (!error_type.hasRuntimeBits()) {
if (error_type.errorSetCardinality() == .zero) {
return MCValue{ .immediate = 0 }; // always false
} else if (!payload_type.hasRuntimeBits()) {
if (error_type.abiSize(self.target.*) <= 8) {
const reg_mcv: MCValue = switch (operand) {
.register => operand,
else => .{ .register = try self.copyToTmpRegister(error_type, operand) },
};
}
const err_off = errUnionErrorOffset(payload_type, self.target.*);
switch (operand) {
.stack_offset => |off| {
const offset = off - @intCast(u32, err_off);
const tmp_reg = try self.copyToTmpRegister(Type.anyerror, .{ .stack_offset = offset });
_ = try self.addInst(.{
.tag = .cmp_immediate,
.data = .{ .r_imm12_sh = .{
.rn = reg_mcv.register,
.rn = tmp_reg,
.imm12 = 0,
} },
});
return MCValue{ .compare_flags_unsigned = .gt };
} else {
return self.fail("TODO isErr for errors with size > 8", .{});
}
} else {
return self.fail("TODO isErr for non-empty payloads", .{});
},
.register => |reg| {
if (err_off > 0 or payload_type.hasRuntimeBitsIgnoreComptime()) {
return self.fail("TODO implement isErr for register operand with payload bits", .{});
}
_ = try self.addInst(.{
.tag = .cmp_immediate,
.data = .{ .r_imm12_sh = .{
.rn = reg,
.imm12 = 0,
} },
});
},
else => return self.fail("TODO implement isErr for {}", .{operand}),
}
return MCValue{ .compare_flags_unsigned = .gt };
}
fn isNonErr(self: *Self, ty: Type, operand: MCValue) !MCValue {
@@ -4483,7 +4502,7 @@ fn resolveInst(self: *Self, inst: Air.Inst.Ref) InnerError!MCValue {
const ref_int = @enumToInt(inst);
if (ref_int < Air.Inst.Ref.typed_value_map.len) {
const tv = Air.Inst.Ref.typed_value_map[ref_int];
if (!tv.ty.hasRuntimeBits()) {
if (!tv.ty.hasRuntimeBitsIgnoreComptime() and !tv.ty.isError()) {
return MCValue{ .none = {} };
}
return self.genTypedValue(tv);
@@ -4491,7 +4510,7 @@ fn resolveInst(self: *Self, inst: Air.Inst.Ref) InnerError!MCValue {
// If the type has no codegen bits, no need to store it.
const inst_ty = self.air.typeOf(inst);
if (!inst_ty.hasRuntimeBits())
if (!inst_ty.hasRuntimeBitsIgnoreComptime() and !inst_ty.isError())
return MCValue{ .none = {} };
const inst_index = @intCast(Air.Inst.Index, ref_int - Air.Inst.Ref.typed_value_map.len);
@@ -4674,32 +4693,38 @@ fn genTypedValue(self: *Self, typed_value: TypedValue) InnerError!MCValue {
}
},
.ErrorSet => {
const err_name = typed_value.val.castTag(.@"error").?.data.name;
const module = self.bin_file.options.module.?;
const global_error_set = module.global_error_set;
const error_index = global_error_set.get(err_name).?;
return MCValue{ .immediate = error_index };
switch (typed_value.val.tag()) {
.@"error" => {
const err_name = typed_value.val.castTag(.@"error").?.data.name;
const module = self.bin_file.options.module.?;
const global_error_set = module.global_error_set;
const error_index = global_error_set.get(err_name).?;
return MCValue{ .immediate = error_index };
},
else => {
// In this case we are rendering an error union which has a 0 bits payload.
return MCValue{ .immediate = 0 };
},
}
},
.ErrorUnion => {
const error_type = typed_value.ty.errorUnionSet();
const payload_type = typed_value.ty.errorUnionPayload();
if (typed_value.val.castTag(.eu_payload)) |pl| {
if (!payload_type.hasRuntimeBits()) {
// We use the error type directly as the type.
return MCValue{ .immediate = 0 };
}
_ = pl;
return self.fail("TODO implement error union const of type '{}' (non-error)", .{typed_value.ty.fmtDebug()});
} else {
if (!payload_type.hasRuntimeBits()) {
// We use the error type directly as the type.
return self.genTypedValue(.{ .ty = error_type, .val = typed_value.val });
}
return self.fail("TODO implement error union const of type '{}' (error)", .{typed_value.ty.fmtDebug()});
if (error_type.errorSetCardinality() == .zero) {
const payload_val = typed_value.val.castTag(.eu_payload).?.data;
return self.genTypedValue(.{ .ty = payload_type, .val = payload_val });
}
const is_pl = typed_value.val.errorUnionIsPayload();
if (!payload_type.hasRuntimeBitsIgnoreComptime()) {
// We use the error type directly as the type.
const err_val = if (!is_pl) typed_value.val else Value.initTag(.zero);
return self.genTypedValue(.{ .ty = error_type, .val = err_val });
}
return self.lowerUnnamedConst(typed_value);
},
.Struct => {
return self.lowerUnnamedConst(typed_value);
@@ -4796,13 +4821,16 @@ fn resolveCallingConventionValues(self: *Self, fn_ty: Type) !CallMCValues {
if (ret_ty.zigTypeTag() == .NoReturn) {
result.return_value = .{ .unreach = {} };
} else if (!ret_ty.hasRuntimeBits()) {
} else if (!ret_ty.hasRuntimeBitsIgnoreComptime() and !ret_ty.isError()) {
result.return_value = .{ .none = {} };
} else switch (cc) {
.Naked => unreachable,
.Unspecified, .C => {
const ret_ty_size = @intCast(u32, ret_ty.abiSize(self.target.*));
if (ret_ty_size <= 8) {
if (ret_ty_size == 0) {
assert(ret_ty.isError());
result.return_value = .{ .immediate = 0 };
} else if (ret_ty_size <= 8) {
result.return_value = .{ .register = registerAlias(c_abi_int_return_regs[0], ret_ty_size) };
} else {
return self.fail("TODO support more return types for ARM backend", .{});

View File

@@ -3,6 +3,7 @@ const builtin = @import("builtin");
const mem = std.mem;
const math = std.math;
const assert = std.debug.assert;
const codegen = @import("../../codegen.zig");
const Air = @import("../../Air.zig");
const Mir = @import("Mir.zig");
const Emit = @import("Emit.zig");
@@ -22,12 +23,14 @@ const leb128 = std.leb;
const log = std.log.scoped(.codegen);
const build_options = @import("build_options");
const FnResult = @import("../../codegen.zig").FnResult;
const GenerateSymbolError = @import("../../codegen.zig").GenerateSymbolError;
const DebugInfoOutput = @import("../../codegen.zig").DebugInfoOutput;
const FnResult = codegen.FnResult;
const GenerateSymbolError = codegen.GenerateSymbolError;
const DebugInfoOutput = codegen.DebugInfoOutput;
const bits = @import("bits.zig");
const abi = @import("abi.zig");
const errUnionPayloadOffset = codegen.errUnionPayloadOffset;
const errUnionErrorOffset = codegen.errUnionErrorOffset;
const RegisterManager = abi.RegisterManager;
const RegisterLock = RegisterManager.RegisterLock;
const Register = bits.Register;
@@ -1763,19 +1766,26 @@ fn airWrapOptional(self: *Self, inst: Air.Inst.Index) !void {
/// Given an error union, returns the error
fn errUnionErr(self: *Self, error_union_mcv: MCValue, error_union_ty: Type) !MCValue {
const err_ty = error_union_ty.errorUnionSet();
const payload_ty = error_union_ty.errorUnionPayload();
if (!payload_ty.hasRuntimeBits()) return error_union_mcv;
if (err_ty.errorSetCardinality() == .zero) {
return MCValue{ .immediate = 0 };
}
if (!payload_ty.hasRuntimeBitsIgnoreComptime()) {
return error_union_mcv;
}
const err_offset = @intCast(u32, errUnionErrorOffset(payload_ty, self.target.*));
switch (error_union_mcv) {
.register => return self.fail("TODO errUnionErr for registers", .{}),
.stack_argument_offset => |off| {
return MCValue{ .stack_argument_offset = off };
return MCValue{ .stack_argument_offset = off - err_offset };
},
.stack_offset => |off| {
return MCValue{ .stack_offset = off };
return MCValue{ .stack_offset = off - err_offset };
},
.memory => |addr| {
return MCValue{ .memory = addr };
return MCValue{ .memory = addr + err_offset };
},
else => unreachable, // invalid MCValue for an error union
}
@@ -1793,24 +1803,26 @@ fn airUnwrapErrErr(self: *Self, inst: Air.Inst.Index) !void {
/// Given an error union, returns the payload
fn errUnionPayload(self: *Self, error_union_mcv: MCValue, error_union_ty: Type) !MCValue {
const err_ty = error_union_ty.errorUnionSet();
const payload_ty = error_union_ty.errorUnionPayload();
if (!payload_ty.hasRuntimeBits()) return MCValue.none;
const error_ty = error_union_ty.errorUnionSet();
const error_size = @intCast(u32, error_ty.abiSize(self.target.*));
const eu_align = @intCast(u32, error_union_ty.abiAlignment(self.target.*));
const offset = std.mem.alignForwardGeneric(u32, error_size, eu_align);
if (err_ty.errorSetCardinality() == .zero) {
return error_union_mcv;
}
if (!payload_ty.hasRuntimeBitsIgnoreComptime()) {
return MCValue.none;
}
const payload_offset = @intCast(u32, errUnionPayloadOffset(payload_ty, self.target.*));
switch (error_union_mcv) {
.register => return self.fail("TODO errUnionPayload for registers", .{}),
.stack_argument_offset => |off| {
return MCValue{ .stack_argument_offset = off - offset };
return MCValue{ .stack_argument_offset = off - payload_offset };
},
.stack_offset => |off| {
return MCValue{ .stack_offset = off - offset };
return MCValue{ .stack_offset = off - payload_offset };
},
.memory => |addr| {
return MCValue{ .memory = addr - offset };
return MCValue{ .memory = addr + payload_offset };
},
else => unreachable, // invalid MCValue for an error union
}
@@ -3478,6 +3490,9 @@ fn airRet(self: *Self, inst: Air.Inst.Index) !void {
switch (self.ret_mcv) {
.none => {},
.immediate => {
assert(ret_ty.isError());
},
.register => |reg| {
// Return result by value
try self.genSetReg(ret_ty, reg, operand);
@@ -3867,7 +3882,7 @@ fn isErr(self: *Self, ty: Type, operand: MCValue) !MCValue {
const error_type = ty.errorUnionSet();
const error_int_type = Type.initTag(.u16);
if (!error_type.hasRuntimeBits()) {
if (error_type.errorSetCardinality() == .zero) {
return MCValue{ .immediate = 0 }; // always false
}
@@ -4975,7 +4990,7 @@ fn resolveInst(self: *Self, inst: Air.Inst.Ref) InnerError!MCValue {
const ref_int = @enumToInt(inst);
if (ref_int < Air.Inst.Ref.typed_value_map.len) {
const tv = Air.Inst.Ref.typed_value_map[ref_int];
if (!tv.ty.hasRuntimeBits()) {
if (!tv.ty.hasRuntimeBitsIgnoreComptime() and !tv.ty.isError()) {
return MCValue{ .none = {} };
}
return self.genTypedValue(tv);
@@ -4983,7 +4998,7 @@ fn resolveInst(self: *Self, inst: Air.Inst.Ref) InnerError!MCValue {
// If the type has no codegen bits, no need to store it.
const inst_ty = self.air.typeOf(inst);
if (!inst_ty.hasRuntimeBits())
if (!inst_ty.hasRuntimeBitsIgnoreComptime() and !inst_ty.isError())
return MCValue{ .none = {} };
const inst_index = @intCast(Air.Inst.Index, ref_int - Air.Inst.Ref.typed_value_map.len);
@@ -5147,26 +5162,35 @@ fn genTypedValue(self: *Self, typed_value: TypedValue) InnerError!MCValue {
}
},
.ErrorSet => {
const err_name = typed_value.val.castTag(.@"error").?.data.name;
const module = self.bin_file.options.module.?;
const global_error_set = module.global_error_set;
const error_index = global_error_set.get(err_name).?;
return MCValue{ .immediate = error_index };
switch (typed_value.val.tag()) {
.@"error" => {
const err_name = typed_value.val.castTag(.@"error").?.data.name;
const module = self.bin_file.options.module.?;
const global_error_set = module.global_error_set;
const error_index = global_error_set.get(err_name).?;
return MCValue{ .immediate = error_index };
},
else => {
// In this case we are rendering an error union which has a 0 bits payload.
return MCValue{ .immediate = 0 };
},
}
},
.ErrorUnion => {
const error_type = typed_value.ty.errorUnionSet();
const payload_type = typed_value.ty.errorUnionPayload();
if (typed_value.val.castTag(.eu_payload)) |_| {
if (!payload_type.hasRuntimeBits()) {
// We use the error type directly as the type.
return MCValue{ .immediate = 0 };
}
} else {
if (!payload_type.hasRuntimeBits()) {
// We use the error type directly as the type.
return self.genTypedValue(.{ .ty = error_type, .val = typed_value.val });
}
if (error_type.errorSetCardinality() == .zero) {
const payload_val = typed_value.val.castTag(.eu_payload).?.data;
return self.genTypedValue(.{ .ty = payload_type, .val = payload_val });
}
const is_pl = typed_value.val.errorUnionIsPayload();
if (!payload_type.hasRuntimeBitsIgnoreComptime()) {
// We use the error type directly as the type.
const err_val = if (!is_pl) typed_value.val else Value.initTag(.zero);
return self.genTypedValue(.{ .ty = error_type, .val = err_val });
}
},
@@ -5231,7 +5255,7 @@ fn resolveCallingConventionValues(self: *Self, fn_ty: Type) !CallMCValues {
if (ret_ty.zigTypeTag() == .NoReturn) {
result.return_value = .{ .unreach = {} };
} else if (!ret_ty.hasRuntimeBits()) {
} else if (!ret_ty.hasRuntimeBitsIgnoreComptime()) {
result.return_value = .{ .none = {} };
} else {
const ret_ty_size = @intCast(u32, ret_ty.abiSize(self.target.*));
@@ -5278,11 +5302,14 @@ fn resolveCallingConventionValues(self: *Self, fn_ty: Type) !CallMCValues {
.Unspecified => {
if (ret_ty.zigTypeTag() == .NoReturn) {
result.return_value = .{ .unreach = {} };
} else if (!ret_ty.hasRuntimeBits()) {
} else if (!ret_ty.hasRuntimeBitsIgnoreComptime() and !ret_ty.isError()) {
result.return_value = .{ .none = {} };
} else {
const ret_ty_size = @intCast(u32, ret_ty.abiSize(self.target.*));
if (ret_ty_size <= 4) {
if (ret_ty_size == 0) {
assert(ret_ty.isError());
result.return_value = .{ .immediate = 0 };
} else if (ret_ty_size <= 4) {
result.return_value = .{ .register = .r0 };
} else {
// The result is returned by reference, not by

View File

@@ -22,6 +22,8 @@ const Liveness = @import("../../Liveness.zig");
const Mir = @import("Mir.zig");
const Emit = @import("Emit.zig");
const abi = @import("abi.zig");
const errUnionPayloadOffset = codegen.errUnionPayloadOffset;
const errUnionErrorOffset = codegen.errUnionErrorOffset;
/// Wasm Value, created when generating an instruction
const WValue = union(enum) {
@@ -636,7 +638,7 @@ fn resolveInst(self: *Self, ref: Air.Inst.Ref) InnerError!WValue {
// means we must generate it from a constant.
const val = self.air.value(ref).?;
const ty = self.air.typeOf(ref);
if (!ty.hasRuntimeBitsIgnoreComptime() and !ty.isInt()) {
if (!ty.hasRuntimeBitsIgnoreComptime() and !ty.isInt() and !ty.isError()) {
gop.value_ptr.* = WValue{ .none = {} };
return gop.value_ptr.*;
}
@@ -804,6 +806,8 @@ fn genFunctype(gpa: Allocator, fn_info: Type.Payload.Function.Data, target: std.
} else {
try returns.append(typeToValtype(fn_info.return_type, target));
}
} else if (fn_info.return_type.isError()) {
try returns.append(.i32);
}
// param types
@@ -1373,13 +1377,18 @@ fn isByRef(ty: Type, target: std.Target) bool {
.Int => return ty.intInfo(target).bits > 64,
.Float => return ty.floatBits(target) > 64,
.ErrorUnion => {
const has_tag = ty.errorUnionSet().hasRuntimeBitsIgnoreComptime();
const has_pl = ty.errorUnionPayload().hasRuntimeBitsIgnoreComptime();
if (!has_tag or !has_pl) return false;
return ty.hasRuntimeBitsIgnoreComptime();
const err_ty = ty.errorUnionSet();
const pl_ty = ty.errorUnionPayload();
if (err_ty.errorSetCardinality() == .zero) {
return isByRef(pl_ty, target);
}
if (!pl_ty.hasRuntimeBitsIgnoreComptime()) {
return false;
}
return true;
},
.Optional => {
if (ty.isPtrLikeOptional()) return false;
if (ty.optionalReprIsPayload()) return false;
var buf: Type.Payload.ElemType = undefined;
return ty.optionalChild(&buf).hasRuntimeBitsIgnoreComptime();
},
@@ -1624,13 +1633,14 @@ fn genBody(self: *Self, body: []const Air.Inst.Index) InnerError!void {
fn airRet(self: *Self, inst: Air.Inst.Index) InnerError!WValue {
const un_op = self.air.instructions.items(.data)[inst].un_op;
const operand = try self.resolveInst(un_op);
const ret_ty = self.decl.ty.fnReturnType();
const fn_info = self.decl.ty.fnInfo();
const ret_ty = fn_info.return_type;
// result must be stored in the stack and we return a pointer
// to the stack instead
if (self.return_value != .none) {
try self.store(self.return_value, operand, self.decl.ty.fnReturnType(), 0);
} else if (self.decl.ty.fnInfo().cc == .C and ret_ty.hasRuntimeBitsIgnoreComptime()) {
try self.store(self.return_value, operand, ret_ty, 0);
} else if (fn_info.cc == .C and ret_ty.hasRuntimeBitsIgnoreComptime()) {
switch (ret_ty.zigTypeTag()) {
// Aggregate types can be lowered as a singular value
.Struct, .Union => {
@@ -1650,7 +1660,11 @@ fn airRet(self: *Self, inst: Air.Inst.Index) InnerError!WValue {
else => try self.emitWValue(operand),
}
} else {
try self.emitWValue(operand);
if (!ret_ty.hasRuntimeBitsIgnoreComptime() and ret_ty.isError()) {
try self.addImm32(0);
} else {
try self.emitWValue(operand);
}
}
try self.restoreStackPointer();
try self.addTag(.@"return");
@@ -1675,7 +1689,13 @@ fn airRetLoad(self: *Self, inst: Air.Inst.Index) InnerError!WValue {
const un_op = self.air.instructions.items(.data)[inst].un_op;
const operand = try self.resolveInst(un_op);
const ret_ty = self.air.typeOf(un_op).childType();
if (!ret_ty.hasRuntimeBitsIgnoreComptime()) return WValue.none;
if (!ret_ty.hasRuntimeBitsIgnoreComptime()) {
if (ret_ty.isError()) {
try self.addImm32(0);
} else {
return WValue.none;
}
}
if (!firstParamSRet(self.decl.ty.fnInfo(), self.target)) {
const result = try self.load(operand, ret_ty, 0);
@@ -1723,8 +1743,7 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallOptions.
const sret = if (first_param_sret) blk: {
const sret_local = try self.allocStack(ret_ty);
const ptr_offset = try self.buildPointerOffset(sret_local, 0, .new);
try self.emitWValue(ptr_offset);
try self.lowerToStack(sret_local);
break :blk sret_local;
} else WValue{ .none = {} };
@@ -1754,7 +1773,7 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallOptions.
try self.addLabel(.call_indirect, fn_type_index);
}
if (self.liveness.isUnused(inst) or !ret_ty.hasRuntimeBitsIgnoreComptime()) {
if (self.liveness.isUnused(inst) or (!ret_ty.hasRuntimeBitsIgnoreComptime() and !ret_ty.isError())) {
return WValue.none;
} else if (ret_ty.isNoReturn()) {
try self.addTag(.@"unreachable");
@@ -1796,8 +1815,11 @@ fn store(self: *Self, lhs: WValue, rhs: WValue, ty: Type, offset: u32) InnerErro
.ErrorUnion => {
const err_ty = ty.errorUnionSet();
const pl_ty = ty.errorUnionPayload();
if (err_ty.errorSetCardinality() == .zero) {
return self.store(lhs, rhs, pl_ty, 0);
}
if (!pl_ty.hasRuntimeBitsIgnoreComptime()) {
return self.store(lhs, rhs, err_ty, 0);
return self.store(lhs, rhs, Type.anyerror, 0);
}
const len = @intCast(u32, ty.abiSize(self.target));
@@ -1812,6 +1834,9 @@ fn store(self: *Self, lhs: WValue, rhs: WValue, ty: Type, offset: u32) InnerErro
if (!pl_ty.hasRuntimeBitsIgnoreComptime()) {
return self.store(lhs, rhs, Type.u8, 0);
}
if (pl_ty.zigTypeTag() == .ErrorSet) {
return self.store(lhs, rhs, Type.anyerror, 0);
}
const len = @intCast(u32, ty.abiSize(self.target));
return self.memcpy(lhs, rhs, .{ .imm32 = len });
@@ -2178,7 +2203,7 @@ fn lowerParentPtr(self: *Self, ptr_val: Value, ptr_child_ty: Type) InnerError!WV
const parent_ptr = try self.lowerParentPtr(payload_ptr.container_ptr, payload_ptr.container_ty);
var buf: Type.Payload.ElemType = undefined;
const payload_ty = payload_ptr.container_ty.optionalChild(&buf);
if (!payload_ty.hasRuntimeBitsIgnoreComptime() or payload_ty.isPtrLikeOptional()) {
if (!payload_ty.hasRuntimeBitsIgnoreComptime() or payload_ty.optionalReprIsPayload()) {
return parent_ptr;
}
@@ -2256,6 +2281,7 @@ fn lowerConstant(self: *Self, val: Value, ty: Type) InnerError!WValue {
const target = self.target;
switch (ty.zigTypeTag()) {
.Void => return WValue{ .none = {} },
.Int => {
const int_info = ty.intInfo(self.target);
switch (int_info.signedness) {
@@ -2324,11 +2350,15 @@ fn lowerConstant(self: *Self, val: Value, ty: Type) InnerError!WValue {
},
.ErrorUnion => {
const error_type = ty.errorUnionSet();
if (error_type.errorSetCardinality() == .zero) {
const pl_val = if (val.castTag(.eu_payload)) |pl| pl.data else Value.initTag(.undef);
return self.lowerConstant(pl_val, ty.errorUnionPayload());
}
const is_pl = val.errorUnionIsPayload();
const err_val = if (!is_pl) val else Value.initTag(.zero);
return self.lowerConstant(err_val, error_type);
},
.Optional => if (ty.isPtrLikeOptional()) {
.Optional => if (ty.optionalReprIsPayload()) {
var buf: Type.Payload.ElemType = undefined;
const pl_ty = ty.optionalChild(&buf);
if (val.castTag(.opt_payload)) |payload| {
@@ -2367,7 +2397,7 @@ fn emitUndefined(self: *Self, ty: Type) InnerError!WValue {
.Optional => {
var buf: Type.Payload.ElemType = undefined;
const pl_ty = ty.optionalChild(&buf);
if (ty.isPtrLikeOptional()) {
if (ty.optionalReprIsPayload()) {
return self.emitUndefined(pl_ty);
}
return WValue{ .imm32 = 0xaaaaaaaa };
@@ -2517,7 +2547,7 @@ fn airCmp(self: *Self, inst: Air.Inst.Index, op: std.math.CompareOperator) Inner
}
fn cmp(self: *Self, lhs: WValue, rhs: WValue, ty: Type, op: std.math.CompareOperator) InnerError!WValue {
if (ty.zigTypeTag() == .Optional and !ty.isPtrLikeOptional()) {
if (ty.zigTypeTag() == .Optional and !ty.optionalReprIsPayload()) {
var buf: Type.Payload.ElemType = undefined;
const payload_ty = ty.optionalChild(&buf);
if (payload_ty.hasRuntimeBitsIgnoreComptime()) {
@@ -2889,15 +2919,22 @@ fn airSwitchBr(self: *Self, inst: Air.Inst.Index) InnerError!WValue {
fn airIsErr(self: *Self, inst: Air.Inst.Index, opcode: wasm.Opcode) InnerError!WValue {
const un_op = self.air.instructions.items(.data)[inst].un_op;
const operand = try self.resolveInst(un_op);
const err_ty = self.air.typeOf(un_op);
const pl_ty = err_ty.errorUnionPayload();
const err_union_ty = self.air.typeOf(un_op);
const pl_ty = err_union_ty.errorUnionPayload();
if (err_union_ty.errorUnionSet().errorSetCardinality() == .zero) {
switch (opcode) {
.i32_ne => return WValue{ .imm32 = 0 },
.i32_eq => return WValue{ .imm32 = 1 },
else => unreachable,
}
}
// load the error tag value
try self.emitWValue(operand);
if (pl_ty.hasRuntimeBitsIgnoreComptime()) {
try self.addMemArg(.i32_load16_u, .{
.offset = operand.offset(),
.alignment = err_ty.errorUnionSet().abiAlignment(self.target),
.offset = operand.offset() + @intCast(u32, errUnionErrorOffset(pl_ty, self.target)),
.alignment = Type.anyerror.abiAlignment(self.target),
});
}
@@ -2905,7 +2942,7 @@ fn airIsErr(self: *Self, inst: Air.Inst.Index, opcode: wasm.Opcode) InnerError!W
try self.addImm32(0);
try self.addTag(Mir.Inst.Tag.fromOpcode(opcode));
const is_err_tmp = try self.allocLocal(Type.initTag(.i32)); // result is always an i32
const is_err_tmp = try self.allocLocal(Type.i32);
try self.addLabel(.local_set, is_err_tmp.local);
return is_err_tmp;
}
@@ -2917,14 +2954,18 @@ fn airUnwrapErrUnionPayload(self: *Self, inst: Air.Inst.Index, op_is_ptr: bool)
const op_ty = self.air.typeOf(ty_op.operand);
const err_ty = if (op_is_ptr) op_ty.childType() else op_ty;
const payload_ty = err_ty.errorUnionPayload();
if (!payload_ty.hasRuntimeBitsIgnoreComptime()) return WValue{ .none = {} };
const err_align = err_ty.abiAlignment(self.target);
const set_size = err_ty.errorUnionSet().abiSize(self.target);
const offset = mem.alignForwardGeneric(u64, set_size, err_align);
if (op_is_ptr or isByRef(payload_ty, self.target)) {
return self.buildPointerOffset(operand, offset, .new);
if (err_ty.errorUnionSet().errorSetCardinality() == .zero) {
return operand;
}
return self.load(operand, payload_ty, @intCast(u32, offset));
if (!payload_ty.hasRuntimeBitsIgnoreComptime()) return WValue{ .none = {} };
const pl_offset = @intCast(u32, errUnionPayloadOffset(payload_ty, self.target));
if (op_is_ptr or isByRef(payload_ty, self.target)) {
return self.buildPointerOffset(operand, pl_offset, .new);
}
return self.load(operand, payload_ty, pl_offset);
}
fn airUnwrapErrUnionError(self: *Self, inst: Air.Inst.Index, op_is_ptr: bool) InnerError!WValue {
@@ -2935,11 +2976,16 @@ fn airUnwrapErrUnionError(self: *Self, inst: Air.Inst.Index, op_is_ptr: bool) In
const op_ty = self.air.typeOf(ty_op.operand);
const err_ty = if (op_is_ptr) op_ty.childType() else op_ty;
const payload_ty = err_ty.errorUnionPayload();
if (err_ty.errorUnionSet().errorSetCardinality() == .zero) {
return WValue{ .imm32 = 0 };
}
if (op_is_ptr or !payload_ty.hasRuntimeBitsIgnoreComptime()) {
return operand;
}
return self.load(operand, err_ty.errorUnionSet(), 0);
return self.load(operand, Type.anyerror, @intCast(u32, errUnionErrorOffset(payload_ty, self.target)));
}
fn airWrapErrUnionPayload(self: *Self, inst: Air.Inst.Index) InnerError!WValue {
@@ -2947,22 +2993,26 @@ fn airWrapErrUnionPayload(self: *Self, inst: Air.Inst.Index) InnerError!WValue {
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const operand = try self.resolveInst(ty_op.operand);
const err_ty = self.air.typeOfIndex(inst);
const op_ty = self.air.typeOf(ty_op.operand);
if (!op_ty.hasRuntimeBitsIgnoreComptime()) return operand;
const err_union_ty = self.air.getRefType(ty_op.ty);
const err_align = err_union_ty.abiAlignment(self.target);
const set_size = err_union_ty.errorUnionSet().abiSize(self.target);
const offset = mem.alignForwardGeneric(u64, set_size, err_align);
if (err_ty.errorUnionSet().errorSetCardinality() == .zero) {
return operand;
}
const err_union = try self.allocStack(err_union_ty);
const payload_ptr = try self.buildPointerOffset(err_union, offset, .new);
try self.store(payload_ptr, operand, op_ty, 0);
const pl_ty = self.air.typeOf(ty_op.operand);
if (!pl_ty.hasRuntimeBitsIgnoreComptime()) {
return operand;
}
const err_union = try self.allocStack(err_ty);
const payload_ptr = try self.buildPointerOffset(err_union, @intCast(u32, errUnionPayloadOffset(pl_ty, self.target)), .new);
try self.store(payload_ptr, operand, pl_ty, 0);
// ensure we also write '0' to the error part, so any present stack value gets overwritten by it.
try self.emitWValue(err_union);
try self.addImm32(0);
try self.addMemArg(.i32_store16, .{ .offset = err_union.offset(), .alignment = 2 });
const err_val_offset = @intCast(u32, errUnionErrorOffset(pl_ty, self.target));
try self.addMemArg(.i32_store16, .{ .offset = err_union.offset() + err_val_offset, .alignment = 2 });
return err_union;
}
@@ -2973,17 +3023,18 @@ fn airWrapErrUnionErr(self: *Self, inst: Air.Inst.Index) InnerError!WValue {
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const operand = try self.resolveInst(ty_op.operand);
const err_ty = self.air.getRefType(ty_op.ty);
const pl_ty = err_ty.errorUnionPayload();
if (!err_ty.errorUnionPayload().hasRuntimeBitsIgnoreComptime()) return operand;
if (!pl_ty.hasRuntimeBitsIgnoreComptime()) {
return operand;
}
const err_union = try self.allocStack(err_ty);
try self.store(err_union, operand, err_ty.errorUnionSet(), 0);
// store error value
try self.store(err_union, operand, Type.anyerror, @intCast(u32, errUnionErrorOffset(pl_ty, self.target)));
// write 'undefined' to the payload
const err_align = err_ty.abiAlignment(self.target);
const set_size = err_ty.errorUnionSet().abiSize(self.target);
const offset = mem.alignForwardGeneric(u64, set_size, err_align);
const payload_ptr = try self.buildPointerOffset(err_union, offset, .new);
const payload_ptr = try self.buildPointerOffset(err_union, @intCast(u32, errUnionPayloadOffset(pl_ty, self.target)), .new);
const len = @intCast(u32, err_ty.errorUnionPayload().abiSize(self.target));
try self.memset(payload_ptr, .{ .imm32 = len }, .{ .imm32 = 0xaaaaaaaa });
@@ -3074,7 +3125,7 @@ fn airIsNull(self: *Self, inst: Air.Inst.Index, opcode: wasm.Opcode, op_kind: en
fn isNull(self: *Self, operand: WValue, optional_ty: Type, opcode: wasm.Opcode) InnerError!WValue {
try self.emitWValue(operand);
if (!optional_ty.isPtrLikeOptional()) {
if (!optional_ty.optionalReprIsPayload()) {
var buf: Type.Payload.ElemType = undefined;
const payload_ty = optional_ty.optionalChild(&buf);
// When payload is zero-bits, we can treat operand as a value, rather than
@@ -3100,7 +3151,7 @@ fn airOptionalPayload(self: *Self, inst: Air.Inst.Index) InnerError!WValue {
const opt_ty = self.air.typeOf(ty_op.operand);
const payload_ty = self.air.typeOfIndex(inst);
if (!payload_ty.hasRuntimeBitsIgnoreComptime()) return WValue{ .none = {} };
if (opt_ty.isPtrLikeOptional()) return operand;
if (opt_ty.optionalReprIsPayload()) return operand;
const offset = opt_ty.abiSize(self.target) - payload_ty.abiSize(self.target);
@@ -3120,7 +3171,7 @@ fn airOptionalPayloadPtr(self: *Self, inst: Air.Inst.Index) InnerError!WValue {
var buf: Type.Payload.ElemType = undefined;
const payload_ty = opt_ty.optionalChild(&buf);
if (!payload_ty.hasRuntimeBitsIgnoreComptime() or opt_ty.isPtrLikeOptional()) {
if (!payload_ty.hasRuntimeBitsIgnoreComptime() or opt_ty.optionalReprIsPayload()) {
return operand;
}
@@ -3138,7 +3189,7 @@ fn airOptionalPayloadPtrSet(self: *Self, inst: Air.Inst.Index) InnerError!WValue
return self.fail("TODO: Implement OptionalPayloadPtrSet for optional with zero-sized type {}", .{payload_ty.fmtDebug()});
}
if (opt_ty.isPtrLikeOptional()) {
if (opt_ty.optionalReprIsPayload()) {
return operand;
}
@@ -3169,7 +3220,7 @@ fn airWrapOptional(self: *Self, inst: Air.Inst.Index) InnerError!WValue {
const operand = try self.resolveInst(ty_op.operand);
const op_ty = self.air.typeOfIndex(inst);
if (op_ty.isPtrLikeOptional()) {
if (op_ty.optionalReprIsPayload()) {
return operand;
}
const offset = std.math.cast(u32, op_ty.abiSize(self.target) - payload_ty.abiSize(self.target)) catch {
@@ -3927,12 +3978,16 @@ fn airFptrunc(self: *Self, inst: Air.Inst.Index) InnerError!WValue {
fn airErrUnionPayloadPtrSet(self: *Self, inst: Air.Inst.Index) InnerError!WValue {
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const err_set_ty = self.air.typeOf(ty_op.operand).childType();
const err_ty = err_set_ty.errorUnionSet();
const payload_ty = err_set_ty.errorUnionPayload();
const operand = try self.resolveInst(ty_op.operand);
// set error-tag to '0' to annotate error union is non-error
try self.store(operand, .{ .imm32 = 0 }, err_ty, 0);
try self.store(
operand,
.{ .imm32 = 0 },
Type.anyerror,
@intCast(u32, errUnionErrorOffset(payload_ty, self.target)),
);
if (self.liveness.isUnused(inst)) return WValue{ .none = {} };
@@ -3940,11 +3995,7 @@ fn airErrUnionPayloadPtrSet(self: *Self, inst: Air.Inst.Index) InnerError!WValue
return operand;
}
const err_align = err_set_ty.abiAlignment(self.target);
const set_size = err_ty.abiSize(self.target);
const offset = mem.alignForwardGeneric(u64, set_size, err_align);
return self.buildPointerOffset(operand, @intCast(u32, offset), .new);
return self.buildPointerOffset(operand, @intCast(u32, errUnionPayloadOffset(payload_ty, self.target)), .new);
}
fn airFieldParentPtr(self: *Self, inst: Air.Inst.Index) InnerError!WValue {

View File

@@ -2,6 +2,7 @@ const std = @import("std");
const build_options = @import("build_options");
const builtin = @import("builtin");
const assert = std.debug.assert;
const codegen = @import("../../codegen.zig");
const leb128 = std.leb;
const link = @import("../../link.zig");
const log = std.log.scoped(.codegen);
@@ -12,11 +13,11 @@ const trace = @import("../../tracy.zig").trace;
const Air = @import("../../Air.zig");
const Allocator = mem.Allocator;
const Compilation = @import("../../Compilation.zig");
const DebugInfoOutput = @import("../../codegen.zig").DebugInfoOutput;
const DebugInfoOutput = codegen.DebugInfoOutput;
const DW = std.dwarf;
const ErrorMsg = Module.ErrorMsg;
const FnResult = @import("../../codegen.zig").FnResult;
const GenerateSymbolError = @import("../../codegen.zig").GenerateSymbolError;
const FnResult = codegen.FnResult;
const GenerateSymbolError = codegen.GenerateSymbolError;
const Emit = @import("Emit.zig");
const Liveness = @import("../../Liveness.zig");
const Mir = @import("Mir.zig");
@@ -28,6 +29,8 @@ const Value = @import("../../value.zig").Value;
const bits = @import("bits.zig");
const abi = @import("abi.zig");
const errUnionPayloadOffset = codegen.errUnionPayloadOffset;
const errUnionErrorOffset = codegen.errUnionErrorOffset;
const callee_preserved_regs = abi.callee_preserved_regs;
const caller_preserved_regs = abi.caller_preserved_regs;
@@ -854,7 +857,7 @@ fn allocMemPtr(self: *Self, inst: Air.Inst.Index) !u32 {
const ptr_ty = self.air.typeOfIndex(inst);
const elem_ty = ptr_ty.elemType();
if (!elem_ty.hasRuntimeBits()) {
if (!elem_ty.hasRuntimeBitsIgnoreComptime()) {
return self.allocMem(inst, @sizeOf(usize), @alignOf(usize));
}
@@ -1786,21 +1789,34 @@ fn airUnwrapErrErr(self: *Self, inst: Air.Inst.Index) !void {
const err_ty = err_union_ty.errorUnionSet();
const payload_ty = err_union_ty.errorUnionPayload();
const operand = try self.resolveInst(ty_op.operand);
const operand_lock: ?RegisterLock = switch (operand) {
.register => |reg| self.register_manager.lockRegAssumeUnused(reg),
else => null,
};
defer if (operand_lock) |lock| self.register_manager.unlockReg(lock);
const result: MCValue = result: {
if (!payload_ty.hasRuntimeBits()) break :result operand;
if (err_ty.errorSetCardinality() == .zero) {
break :result MCValue{ .immediate = 0 };
}
if (!payload_ty.hasRuntimeBitsIgnoreComptime()) {
break :result operand;
}
const err_off = errUnionErrorOffset(payload_ty, self.target.*);
switch (operand) {
.stack_offset => |off| {
break :result MCValue{ .stack_offset = off };
const offset = off - @intCast(i32, err_off);
break :result MCValue{ .stack_offset = offset };
},
.register => {
.register => |reg| {
// TODO reuse operand
break :result try self.copyToRegisterWithInstTracking(inst, err_ty, operand);
const lock = self.register_manager.lockRegAssumeUnused(reg);
defer self.register_manager.unlockReg(lock);
const result = try self.copyToRegisterWithInstTracking(inst, err_union_ty, operand);
if (err_off > 0) {
const shift = @intCast(u6, err_off * 8);
try self.genShiftBinOpMir(.shr, err_union_ty, result.register, .{ .immediate = shift });
} else {
try self.truncateRegister(Type.anyerror, result.register);
}
break :result result;
},
else => return self.fail("TODO implement unwrap_err_err for {}", .{operand}),
}
@@ -1815,32 +1831,37 @@ fn airUnwrapErrPayload(self: *Self, inst: Air.Inst.Index) !void {
}
const err_union_ty = self.air.typeOf(ty_op.operand);
const payload_ty = err_union_ty.errorUnionPayload();
const err_ty = err_union_ty.errorUnionSet();
const operand = try self.resolveInst(ty_op.operand);
const result: MCValue = result: {
if (!payload_ty.hasRuntimeBits()) break :result MCValue.none;
if (err_ty.errorSetCardinality() == .zero) {
// TODO check if we can reuse
break :result operand;
}
const operand = try self.resolveInst(ty_op.operand);
const operand_lock: ?RegisterLock = switch (operand) {
.register => |reg| self.register_manager.lockRegAssumeUnused(reg),
else => null,
};
defer if (operand_lock) |lock| self.register_manager.unlockReg(lock);
if (!payload_ty.hasRuntimeBitsIgnoreComptime()) {
break :result MCValue.none;
}
const abi_align = err_union_ty.abiAlignment(self.target.*);
const err_ty = err_union_ty.errorUnionSet();
const err_abi_size = mem.alignForwardGeneric(u32, @intCast(u32, err_ty.abiSize(self.target.*)), abi_align);
const payload_off = errUnionPayloadOffset(payload_ty, self.target.*);
switch (operand) {
.stack_offset => |off| {
const offset = off - @intCast(i32, err_abi_size);
const offset = off - @intCast(i32, payload_off);
break :result MCValue{ .stack_offset = offset };
},
.register => {
.register => |reg| {
// TODO reuse operand
const shift = @intCast(u6, err_abi_size * @sizeOf(usize));
const lock = self.register_manager.lockRegAssumeUnused(reg);
defer self.register_manager.unlockReg(lock);
const result = try self.copyToRegisterWithInstTracking(inst, err_union_ty, operand);
try self.genShiftBinOpMir(.shr, Type.usize, result.register, .{ .immediate = shift });
break :result MCValue{
.register = registerAlias(result.register, @intCast(u32, payload_ty.abiSize(self.target.*))),
};
if (payload_off > 0) {
const shift = @intCast(u6, payload_off * 8);
try self.genShiftBinOpMir(.shr, err_union_ty, result.register, .{ .immediate = shift });
} else {
try self.truncateRegister(payload_ty, result.register);
}
break :result result;
},
else => return self.fail("TODO implement unwrap_err_payload for {}", .{operand}),
}
@@ -1935,24 +1956,37 @@ fn airWrapOptional(self: *Self, inst: Air.Inst.Index) !void {
/// T to E!T
fn airWrapErrUnionPayload(self: *Self, inst: Air.Inst.Index) !void {
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
if (self.liveness.isUnused(inst)) {
return self.finishAir(inst, .dead, .{ ty_op.operand, .none, .none });
}
const error_union_ty = self.air.getRefType(ty_op.ty);
const error_ty = error_union_ty.errorUnionSet();
const payload_ty = error_union_ty.errorUnionPayload();
const operand = try self.resolveInst(ty_op.operand);
assert(payload_ty.hasRuntimeBits());
const abi_size = @intCast(u32, error_union_ty.abiSize(self.target.*));
const abi_align = error_union_ty.abiAlignment(self.target.*);
const err_abi_size = @intCast(u32, error_ty.abiSize(self.target.*));
const stack_offset = @intCast(i32, try self.allocMem(inst, abi_size, abi_align));
const offset = mem.alignForwardGeneric(u32, err_abi_size, abi_align);
try self.genSetStack(error_ty, stack_offset, .{ .immediate = 0 }, .{});
try self.genSetStack(payload_ty, stack_offset - @intCast(i32, offset), operand, .{});
const result: MCValue = result: {
if (error_ty.errorSetCardinality() == .zero) {
break :result operand;
}
return self.finishAir(inst, .{ .stack_offset = stack_offset }, .{ ty_op.operand, .none, .none });
if (!payload_ty.hasRuntimeBitsIgnoreComptime()) {
break :result operand;
}
const abi_size = @intCast(u32, error_union_ty.abiSize(self.target.*));
const abi_align = error_union_ty.abiAlignment(self.target.*);
const stack_offset = @intCast(i32, try self.allocMem(inst, abi_size, abi_align));
const payload_off = errUnionPayloadOffset(payload_ty, self.target.*);
const err_off = errUnionErrorOffset(payload_ty, self.target.*);
try self.genSetStack(payload_ty, stack_offset - @intCast(i32, payload_off), operand, .{});
try self.genSetStack(Type.anyerror, stack_offset - @intCast(i32, err_off), .{ .immediate = 0 }, .{});
break :result MCValue{ .stack_offset = stack_offset };
};
return self.finishAir(inst, result, .{ ty_op.operand, .none, .none });
}
/// E to E!T
@@ -1962,19 +1996,22 @@ fn airWrapErrUnionErr(self: *Self, inst: Air.Inst.Index) !void {
return self.finishAir(inst, .dead, .{ ty_op.operand, .none, .none });
}
const error_union_ty = self.air.getRefType(ty_op.ty);
const error_ty = error_union_ty.errorUnionSet();
const payload_ty = error_union_ty.errorUnionPayload();
const err = try self.resolveInst(ty_op.operand);
const operand = try self.resolveInst(ty_op.operand);
const result: MCValue = result: {
if (!payload_ty.hasRuntimeBits()) break :result err;
if (!payload_ty.hasRuntimeBitsIgnoreComptime()) {
break :result operand;
}
const abi_size = @intCast(u32, error_union_ty.abiSize(self.target.*));
const abi_align = error_union_ty.abiAlignment(self.target.*);
const err_abi_size = @intCast(u32, error_ty.abiSize(self.target.*));
const stack_offset = @intCast(i32, try self.allocMem(inst, abi_size, abi_align));
const offset = mem.alignForwardGeneric(u32, err_abi_size, abi_align);
try self.genSetStack(error_ty, stack_offset, err, .{});
try self.genSetStack(payload_ty, stack_offset - @intCast(i32, offset), .undef, .{});
const payload_off = errUnionPayloadOffset(payload_ty, self.target.*);
const err_off = errUnionErrorOffset(payload_ty, self.target.*);
try self.genSetStack(Type.anyerror, stack_offset - @intCast(i32, err_off), operand, .{});
try self.genSetStack(payload_ty, stack_offset - @intCast(i32, payload_off), .undef, .{});
break :result MCValue{ .stack_offset = stack_offset };
};
@@ -2535,7 +2572,7 @@ fn airLoad(self: *Self, inst: Air.Inst.Index) !void {
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const elem_ty = self.air.typeOfIndex(inst);
const result: MCValue = result: {
if (!elem_ty.hasRuntimeBits())
if (!elem_ty.hasRuntimeBitsIgnoreComptime())
break :result MCValue.none;
const ptr = try self.resolveInst(ty_op.operand);
@@ -4102,6 +4139,9 @@ fn airRet(self: *Self, inst: Air.Inst.Index) !void {
const operand = try self.resolveInst(un_op);
const ret_ty = self.fn_type.fnReturnType();
switch (self.ret_mcv) {
.immediate => {
assert(ret_ty.isError());
},
.stack_offset => {
const reg = try self.copyToTmpRegister(Type.usize, self.ret_mcv);
const reg_lock = self.register_manager.lockRegAssumeUnused(reg);
@@ -4134,6 +4174,9 @@ fn airRetLoad(self: *Self, inst: Air.Inst.Index) !void {
const ptr_ty = self.air.typeOf(un_op);
const elem_ty = ptr_ty.elemType();
switch (self.ret_mcv) {
.immediate => {
assert(elem_ty.isError());
},
.stack_offset => {
const reg = try self.copyToTmpRegister(Type.usize, self.ret_mcv);
const reg_lock = self.register_manager.lockRegAssumeUnused(reg);
@@ -4377,7 +4420,6 @@ fn genVarDbgInfo(
fn addDbgInfoTypeReloc(self: *Self, ty: Type) !void {
switch (self.debug_output) {
.dwarf => |dw| {
assert(ty.hasRuntimeBits());
const dbg_info = &dw.dbg_info;
const index = dbg_info.items.len;
try dbg_info.resize(index + 4); // DW.AT.type, DW.FORM.ref4
@@ -4604,7 +4646,7 @@ fn isNull(self: *Self, inst: Air.Inst.Index, ty: Type, operand: MCValue) !MCValu
const cmp_ty: Type = if (!ty.isPtrLikeOptional()) blk: {
var buf: Type.Payload.ElemType = undefined;
const payload_ty = ty.optionalChild(&buf);
break :blk if (payload_ty.hasRuntimeBits()) Type.bool else ty;
break :blk if (payload_ty.hasRuntimeBitsIgnoreComptime()) Type.bool else ty;
} else ty;
try self.genBinOpMir(.cmp, cmp_ty, operand, MCValue{ .immediate = 0 });
@@ -4620,25 +4662,36 @@ fn isNonNull(self: *Self, inst: Air.Inst.Index, ty: Type, operand: MCValue) !MCV
fn isErr(self: *Self, inst: Air.Inst.Index, ty: Type, operand: MCValue) !MCValue {
const err_type = ty.errorUnionSet();
const payload_type = ty.errorUnionPayload();
if (!err_type.hasRuntimeBits()) {
if (err_type.errorSetCardinality() == .zero) {
return MCValue{ .immediate = 0 }; // always false
}
try self.spillCompareFlagsIfOccupied();
self.compare_flags_inst = inst;
if (!payload_type.hasRuntimeBits()) {
if (err_type.abiSize(self.target.*) <= 8) {
try self.genBinOpMir(.cmp, err_type, operand, MCValue{ .immediate = 0 });
return MCValue{ .compare_flags_unsigned = .gt };
} else {
return self.fail("TODO isErr for errors with size larger than register size", .{});
}
} else {
try self.genBinOpMir(.cmp, err_type, operand, MCValue{ .immediate = 0 });
return MCValue{ .compare_flags_unsigned = .gt };
const err_off = errUnionErrorOffset(ty.errorUnionPayload(), self.target.*);
switch (operand) {
.stack_offset => |off| {
const offset = off - @intCast(i32, err_off);
try self.genBinOpMir(.cmp, Type.anyerror, .{ .stack_offset = offset }, .{ .immediate = 0 });
},
.register => |reg| {
const maybe_lock = self.register_manager.lockReg(reg);
defer if (maybe_lock) |lock| self.register_manager.unlockReg(lock);
const tmp_reg = try self.copyToTmpRegister(ty, operand);
if (err_off > 0) {
const shift = @intCast(u6, err_off * 8);
try self.genShiftBinOpMir(.shr, ty, tmp_reg, .{ .immediate = shift });
} else {
try self.truncateRegister(Type.anyerror, tmp_reg);
}
try self.genBinOpMir(.cmp, Type.anyerror, .{ .register = tmp_reg }, .{ .immediate = 0 });
},
else => return self.fail("TODO implement isErr for {}", .{operand}),
}
return MCValue{ .compare_flags_unsigned = .gt };
}
fn isNonErr(self: *Self, inst: Air.Inst.Index, ty: Type, operand: MCValue) !MCValue {
@@ -5461,6 +5514,21 @@ fn genSetStack(self: *Self, ty: Type, stack_offset: i32, mcv: MCValue, opts: Inl
.immediate => |x_big| {
const base_reg = opts.dest_stack_base orelse .rbp;
switch (abi_size) {
0 => {
assert(ty.isError());
const payload = try self.addExtra(Mir.ImmPair{
.dest_off = @bitCast(u32, -stack_offset),
.operand = @truncate(u32, x_big),
});
_ = try self.addInst(.{
.tag = .mov_mem_imm,
.ops = Mir.Inst.Ops.encode(.{
.reg1 = base_reg,
.flags = 0b00,
}),
.data = .{ .payload = payload },
});
},
1, 2, 4 => {
const payload = try self.addExtra(Mir.ImmPair{
.dest_off = @bitCast(u32, -stack_offset),
@@ -6643,7 +6711,7 @@ pub fn resolveInst(self: *Self, inst: Air.Inst.Ref) InnerError!MCValue {
const ref_int = @enumToInt(inst);
if (ref_int < Air.Inst.Ref.typed_value_map.len) {
const tv = Air.Inst.Ref.typed_value_map[ref_int];
if (!tv.ty.hasRuntimeBits()) {
if (!tv.ty.hasRuntimeBitsIgnoreComptime() and !tv.ty.isError()) {
return MCValue{ .none = {} };
}
return self.genTypedValue(tv);
@@ -6651,7 +6719,7 @@ pub fn resolveInst(self: *Self, inst: Air.Inst.Ref) InnerError!MCValue {
// If the type has no codegen bits, no need to store it.
const inst_ty = self.air.typeOf(inst);
if (!inst_ty.hasRuntimeBits())
if (!inst_ty.hasRuntimeBitsIgnoreComptime() and !inst_ty.isError())
return MCValue{ .none = {} };
const inst_index = @intCast(Air.Inst.Index, ref_int - Air.Inst.Ref.typed_value_map.len);
@@ -6780,6 +6848,7 @@ fn genTypedValue(self: *Self, typed_value: TypedValue) InnerError!MCValue {
const target = self.target.*;
switch (typed_value.ty.zigTypeTag()) {
.Void => return MCValue{ .none = {} },
.Pointer => switch (typed_value.ty.ptrSize()) {
.Slice => {},
else => {
@@ -6841,26 +6910,35 @@ fn genTypedValue(self: *Self, typed_value: TypedValue) InnerError!MCValue {
}
},
.ErrorSet => {
const err_name = typed_value.val.castTag(.@"error").?.data.name;
const module = self.bin_file.options.module.?;
const global_error_set = module.global_error_set;
const error_index = global_error_set.get(err_name).?;
return MCValue{ .immediate = error_index };
switch (typed_value.val.tag()) {
.@"error" => {
const err_name = typed_value.val.castTag(.@"error").?.data.name;
const module = self.bin_file.options.module.?;
const global_error_set = module.global_error_set;
const error_index = global_error_set.get(err_name).?;
return MCValue{ .immediate = error_index };
},
else => {
// In this case we are rendering an error union which has a 0 bits payload.
return MCValue{ .immediate = 0 };
},
}
},
.ErrorUnion => {
const error_type = typed_value.ty.errorUnionSet();
const payload_type = typed_value.ty.errorUnionPayload();
if (typed_value.val.castTag(.eu_payload)) |_| {
if (!payload_type.hasRuntimeBits()) {
// We use the error type directly as the type.
return MCValue{ .immediate = 0 };
}
} else {
if (!payload_type.hasRuntimeBits()) {
// We use the error type directly as the type.
return self.genTypedValue(.{ .ty = error_type, .val = typed_value.val });
}
if (error_type.errorSetCardinality() == .zero) {
const payload_val = typed_value.val.castTag(.eu_payload).?.data;
return self.genTypedValue(.{ .ty = payload_type, .val = payload_val });
}
const is_pl = typed_value.val.errorUnionIsPayload();
if (!payload_type.hasRuntimeBitsIgnoreComptime()) {
// We use the error type directly as the type.
const err_val = if (!is_pl) typed_value.val else Value.initTag(.zero);
return self.genTypedValue(.{ .ty = error_type, .val = err_val });
}
},
@@ -6868,7 +6946,6 @@ fn genTypedValue(self: *Self, typed_value: TypedValue) InnerError!MCValue {
.ComptimeFloat => unreachable,
.Type => unreachable,
.EnumLiteral => unreachable,
.Void => unreachable,
.NoReturn => unreachable,
.Undefined => unreachable,
.Null => unreachable,
@@ -6922,11 +6999,14 @@ fn resolveCallingConventionValues(self: *Self, fn_ty: Type) !CallMCValues {
// Return values
if (ret_ty.zigTypeTag() == .NoReturn) {
result.return_value = .{ .unreach = {} };
} else if (!ret_ty.hasRuntimeBits()) {
} else if (!ret_ty.hasRuntimeBitsIgnoreComptime() and !ret_ty.isError()) {
result.return_value = .{ .none = {} };
} else {
const ret_ty_size = @intCast(u32, ret_ty.abiSize(self.target.*));
if (ret_ty_size <= 8) {
if (ret_ty_size == 0) {
assert(ret_ty.isError());
result.return_value = .{ .immediate = 0 };
} else if (ret_ty_size <= 8) {
const aliased_reg = registerAlias(c_abi_int_return_regs[0], ret_ty_size);
result.return_value = .{ .register = aliased_reg };
} else {

View File

@@ -442,7 +442,10 @@ pub fn generateSymbol(
.Int => {
const info = typed_value.ty.intInfo(target);
if (info.bits <= 8) {
const x = @intCast(u8, typed_value.val.toUnsignedInt(target));
const x: u8 = switch (info.signedness) {
.unsigned => @intCast(u8, typed_value.val.toUnsignedInt(target)),
.signed => @bitCast(u8, @intCast(i8, typed_value.val.toSignedInt())),
};
try code.append(x);
return Result{ .appended = {} };
}
@@ -654,7 +657,7 @@ pub fn generateSymbol(
return Result{ .appended = {} };
}
if (typed_value.ty.isPtrLikeOptional()) {
if (typed_value.ty.optionalReprIsPayload()) {
if (typed_value.val.castTag(.opt_payload)) |payload| {
switch (try generateSymbol(bin_file, src_loc, .{
.ty = payload_type,
@@ -702,16 +705,50 @@ pub fn generateSymbol(
.ErrorUnion => {
const error_ty = typed_value.ty.errorUnionSet();
const payload_ty = typed_value.ty.errorUnionPayload();
if (error_ty.errorSetCardinality() == .zero) {
const payload_val = typed_value.val.castTag(.eu_payload).?.data;
return generateSymbol(bin_file, src_loc, .{
.ty = payload_ty,
.val = payload_val,
}, code, debug_output, reloc_info);
}
const is_payload = typed_value.val.errorUnionIsPayload();
if (!payload_ty.hasRuntimeBitsIgnoreComptime()) {
const err_val = if (is_payload) Value.initTag(.zero) else typed_value.val;
return generateSymbol(bin_file, src_loc, .{
.ty = error_ty,
.val = err_val,
}, code, debug_output, reloc_info);
}
const payload_align = payload_ty.abiAlignment(target);
const error_align = Type.anyerror.abiAlignment(target);
const abi_align = typed_value.ty.abiAlignment(target);
{
const error_val = if (!is_payload) typed_value.val else Value.initTag(.zero);
const begin = code.items.len;
// error value first when its type is larger than the error union's payload
if (error_align > payload_align) {
switch (try generateSymbol(bin_file, src_loc, .{
.ty = error_ty,
.val = error_val,
.val = if (is_payload) Value.initTag(.zero) else typed_value.val,
}, code, debug_output, reloc_info)) {
.appended => {},
.externally_managed => |external_slice| {
code.appendSliceAssumeCapacity(external_slice);
},
.fail => |em| return Result{ .fail = em },
}
}
// emit payload part of the error union
{
const begin = code.items.len;
const payload_val = if (typed_value.val.castTag(.eu_payload)) |val| val.data else Value.initTag(.undef);
switch (try generateSymbol(bin_file, src_loc, .{
.ty = payload_ty,
.val = payload_val,
}, code, debug_output, reloc_info)) {
.appended => {},
.externally_managed => |external_slice| {
@@ -728,12 +765,12 @@ pub fn generateSymbol(
}
}
if (payload_ty.hasRuntimeBits()) {
// Payload size is larger than error set, so emit our error set last
if (error_align <= payload_align) {
const begin = code.items.len;
const payload_val = if (typed_value.val.castTag(.eu_payload)) |val| val.data else Value.initTag(.undef);
switch (try generateSymbol(bin_file, src_loc, .{
.ty = payload_ty,
.val = payload_val,
.ty = error_ty,
.val = if (is_payload) Value.initTag(.zero) else typed_value.val,
}, code, debug_output, reloc_info)) {
.appended => {},
.externally_managed => |external_slice| {
@@ -760,7 +797,7 @@ pub fn generateSymbol(
try code.writer().writeInt(u32, kv.value, endian);
},
else => {
try code.writer().writeByteNTimes(0, @intCast(usize, typed_value.ty.abiSize(target)));
try code.writer().writeByteNTimes(0, @intCast(usize, Type.anyerror.abiSize(target)));
},
}
return Result{ .appended = {} };
@@ -853,3 +890,23 @@ fn lowerDeclRef(
return Result{ .appended = {} };
}
pub fn errUnionPayloadOffset(payload_ty: Type, target: std.Target) u64 {
const payload_align = payload_ty.abiAlignment(target);
const error_align = Type.anyerror.abiAlignment(target);
if (payload_align >= error_align) {
return 0;
} else {
return mem.alignForwardGeneric(u64, Type.anyerror.abiSize(target), payload_align);
}
}
pub fn errUnionErrorOffset(payload_ty: Type, target: std.Target) u64 {
const payload_align = payload_ty.abiAlignment(target);
const error_align = Type.anyerror.abiAlignment(target);
if (payload_align >= error_align) {
return mem.alignForwardGeneric(u64, payload_ty.abiSize(target), error_align);
} else {
return 0;
}
}

View File

@@ -711,21 +711,24 @@ pub const DeclGen = struct {
.Bool => return writer.print("{}", .{val.toBool()}),
.Optional => {
var opt_buf: Type.Payload.ElemType = undefined;
const payload_type = ty.optionalChild(&opt_buf);
if (ty.isPtrLikeOptional()) {
return dg.renderValue(writer, payload_type, val, location);
}
if (payload_type.abiSize(target) == 0) {
const payload_ty = ty.optionalChild(&opt_buf);
if (!payload_ty.hasRuntimeBitsIgnoreComptime()) {
const is_null = val.castTag(.opt_payload) == null;
return writer.print("{}", .{is_null});
}
if (ty.optionalReprIsPayload()) {
return dg.renderValue(writer, payload_ty, val, location);
}
try writer.writeByte('(');
try dg.renderTypecast(writer, ty);
try writer.writeAll("){");
if (val.castTag(.opt_payload)) |pl| {
const payload_val = pl.data;
try writer.writeAll(" .is_null = false, .payload = ");
try dg.renderValue(writer, payload_type, payload_val, location);
try dg.renderValue(writer, payload_ty, payload_val, location);
try writer.writeAll(" }");
} else {
try writer.writeAll(" .is_null = true }");
@@ -749,6 +752,12 @@ pub const DeclGen = struct {
const error_type = ty.errorUnionSet();
const payload_type = ty.errorUnionPayload();
if (error_type.errorSetCardinality() == .zero) {
// We use the payload directly as the type.
const payload_val = val.castTag(.eu_payload).?.data;
return dg.renderValue(writer, payload_type, payload_val, location);
}
if (!payload_type.hasRuntimeBits()) {
// We use the error type directly as the type.
const err_val = if (val.errorUnionIsPayload()) Value.initTag(.zero) else val;
@@ -894,10 +903,12 @@ pub const DeclGen = struct {
try w.writeAll("ZIG_COLD ");
}
}
const return_ty = dg.decl.ty.fnReturnType();
if (return_ty.hasRuntimeBits()) {
try dg.renderType(w, return_ty);
} else if (return_ty.zigTypeTag() == .NoReturn) {
const fn_info = dg.decl.ty.fnInfo();
if (fn_info.return_type.hasRuntimeBits()) {
try dg.renderType(w, fn_info.return_type);
} else if (fn_info.return_type.isError()) {
try dg.renderType(w, Type.anyerror);
} else if (fn_info.return_type.zigTypeTag() == .NoReturn) {
try w.writeAll("zig_noreturn void");
} else {
try w.writeAll("void");
@@ -905,22 +916,19 @@ pub const DeclGen = struct {
try w.writeAll(" ");
try dg.renderDeclName(w, dg.decl_index);
try w.writeAll("(");
const param_len = dg.decl.ty.fnParamLen();
var index: usize = 0;
var params_written: usize = 0;
while (index < param_len) : (index += 1) {
const param_type = dg.decl.ty.fnParamType(index);
for (fn_info.param_types) |param_type, index| {
if (!param_type.hasRuntimeBitsIgnoreComptime()) continue;
if (params_written > 0) {
try w.writeAll(", ");
}
const name = CValue{ .arg = index };
try dg.renderTypeAndName(w, dg.decl.ty.fnParamType(index), name, .Mut, 0);
try dg.renderTypeAndName(w, param_type, name, .Mut, 0);
params_written += 1;
}
if (dg.decl.ty.fnIsVarArgs()) {
if (fn_info.is_var_args) {
if (params_written != 0) try w.writeAll(", ");
try w.writeAll("...");
} else if (params_written == 0) {
@@ -1156,26 +1164,36 @@ pub const DeclGen = struct {
}
fn renderErrorUnionTypedef(dg: *DeclGen, t: Type) error{ OutOfMemory, AnalysisFail }![]const u8 {
const child_type = t.errorUnionPayload();
const err_set_type = t.errorUnionSet();
const payload_ty = t.errorUnionPayload();
const error_ty = t.errorUnionSet();
var buffer = std.ArrayList(u8).init(dg.typedefs.allocator);
defer buffer.deinit();
const bw = buffer.writer();
try bw.writeAll("typedef struct { ");
const payload_name = CValue{ .bytes = "payload" };
try dg.renderTypeAndName(bw, child_type, payload_name, .Mut, 0);
try bw.writeAll("; uint16_t error; } ");
const target = dg.module.getTarget();
const payload_align = payload_ty.abiAlignment(target);
const error_align = Type.anyerror.abiAlignment(target);
if (error_align > payload_align) {
try bw.writeAll("typedef struct { ");
try dg.renderTypeAndName(bw, payload_ty, payload_name, .Mut, 0);
try bw.writeAll("; uint16_t error; } ");
} else {
try bw.writeAll("typedef struct { uint16_t error; ");
try dg.renderTypeAndName(bw, payload_ty, payload_name, .Mut, 0);
try bw.writeAll("; } ");
}
const name_index = buffer.items.len;
if (err_set_type.castTag(.error_set_inferred)) |inf_err_set_payload| {
if (error_ty.castTag(.error_set_inferred)) |inf_err_set_payload| {
const func = inf_err_set_payload.data.func;
try bw.writeAll("zig_E_");
try dg.renderDeclName(bw, func.owner_decl);
try bw.writeAll(";\n");
} else {
try bw.print("zig_E_{s}_{s};\n", .{
typeToCIdentifier(err_set_type, dg.module), typeToCIdentifier(child_type, dg.module),
typeToCIdentifier(error_ty, dg.module), typeToCIdentifier(payload_ty, dg.module),
});
}
@@ -1345,12 +1363,12 @@ pub const DeclGen = struct {
var opt_buf: Type.Payload.ElemType = undefined;
const child_type = t.optionalChild(&opt_buf);
if (t.isPtrLikeOptional()) {
return dg.renderType(w, child_type);
if (!child_type.hasRuntimeBitsIgnoreComptime()) {
return w.writeAll("bool");
}
if (child_type.abiSize(target) == 0) {
return w.writeAll("bool");
if (t.optionalReprIsPayload()) {
return dg.renderType(w, child_type);
}
const name = dg.getTypedefName(t) orelse
@@ -1359,12 +1377,19 @@ pub const DeclGen = struct {
return w.writeAll(name);
},
.ErrorSet => {
comptime assert(Type.initTag(.anyerror).abiSize(builtin.target) == 2);
comptime assert(Type.anyerror.abiSize(builtin.target) == 2);
return w.writeAll("uint16_t");
},
.ErrorUnion => {
if (t.errorUnionPayload().abiSize(target) == 0) {
return dg.renderType(w, t.errorUnionSet());
const error_ty = t.errorUnionSet();
const payload_ty = t.errorUnionPayload();
if (error_ty.errorSetCardinality() == .zero) {
return dg.renderType(w, payload_ty);
}
if (!payload_ty.hasRuntimeBitsIgnoreComptime()) {
return dg.renderType(w, Type.anyerror);
}
const name = dg.getTypedefName(t) orelse
@@ -1794,8 +1819,9 @@ fn genBody(f: *Function, body: []const Air.Inst.Index) error{ AnalysisFail, OutO
.not => try airNot (f, inst),
.optional_payload => try airOptionalPayload(f, inst),
.optional_payload_ptr => try airOptionalPayload(f, inst),
.optional_payload_ptr => try airOptionalPayloadPtr(f, inst),
.optional_payload_ptr_set => try airOptionalPayloadPtrSet(f, inst),
.wrap_optional => try airWrapOptional(f, inst),
.is_err => try airIsErr(f, inst, false, "!="),
.is_non_err => try airIsErr(f, inst, false, "=="),
@@ -1824,7 +1850,6 @@ fn genBody(f: *Function, body: []const Air.Inst.Index) error{ AnalysisFail, OutO
.cond_br => try airCondBr(f, inst),
.br => try airBr(f, inst),
.switch_br => try airSwitchBr(f, inst),
.wrap_optional => try airWrapOptional(f, inst),
.struct_field_ptr => try airStructFieldPtr(f, inst),
.array_to_slice => try airArrayToSlice(f, inst),
.cmpxchg_weak => try airCmpxchg(f, inst, "weak"),
@@ -1901,8 +1926,8 @@ fn genBody(f: *Function, body: []const Air.Inst.Index) error{ AnalysisFail, OutO
.array_elem_val => try airArrayElemVal(f, inst),
.unwrap_errunion_payload => try airUnwrapErrUnionPay(f, inst, ""),
.unwrap_errunion_err => try airUnwrapErrUnionErr(f, inst),
.unwrap_errunion_payload_ptr => try airUnwrapErrUnionPay(f, inst, "&"),
.unwrap_errunion_err => try airUnwrapErrUnionErr(f, inst),
.unwrap_errunion_err_ptr => try airUnwrapErrUnionErr(f, inst),
.wrap_errunion_payload => try airWrapErrUnionPay(f, inst),
.wrap_errunion_err => try airWrapErrUnionErr(f, inst),
@@ -2120,11 +2145,14 @@ fn airLoad(f: *Function, inst: Air.Inst.Index) !CValue {
fn airRet(f: *Function, inst: Air.Inst.Index) !CValue {
const un_op = f.air.instructions.items(.data)[inst].un_op;
const writer = f.object.writer();
if (f.air.typeOf(un_op).isFnOrHasRuntimeBitsIgnoreComptime()) {
const ret_ty = f.air.typeOf(un_op);
if (ret_ty.isFnOrHasRuntimeBitsIgnoreComptime()) {
const operand = try f.resolveInst(un_op);
try writer.writeAll("return ");
try f.writeCValue(writer, operand);
try writer.writeAll(";\n");
} else if (ret_ty.isError()) {
try writer.writeAll("return 0;");
} else {
try writer.writeAll("return;\n");
}
@@ -2136,13 +2164,16 @@ fn airRetLoad(f: *Function, inst: Air.Inst.Index) !CValue {
const writer = f.object.writer();
const ptr_ty = f.air.typeOf(un_op);
const ret_ty = ptr_ty.childType();
if (!ret_ty.isFnOrHasRuntimeBitsIgnoreComptime()) {
if (ret_ty.isFnOrHasRuntimeBitsIgnoreComptime()) {
const ptr = try f.resolveInst(un_op);
try writer.writeAll("return *");
try f.writeCValue(writer, ptr);
try writer.writeAll(";\n");
} else if (ret_ty.isError()) {
try writer.writeAll("return 0;\n");
} else {
try writer.writeAll("return;\n");
}
const ptr = try f.resolveInst(un_op);
try writer.writeAll("return *");
try f.writeCValue(writer, ptr);
try writer.writeAll(";\n");
return CValue.none;
}
@@ -2713,19 +2744,20 @@ fn airCall(
.Pointer => callee_ty.childType(),
else => unreachable,
};
const ret_ty = fn_ty.fnReturnType();
const unused_result = f.liveness.isUnused(inst);
const writer = f.object.writer();
var result_local: CValue = .none;
if (unused_result) {
if (ret_ty.hasRuntimeBits()) {
try writer.print("(void)", .{});
const result_local: CValue = r: {
if (f.liveness.isUnused(inst)) {
if (loweredFnRetTyHasBits(fn_ty)) {
try writer.print("(void)", .{});
}
break :r .none;
} else {
const local = try f.allocLocal(fn_ty.fnReturnType(), .Const);
try writer.writeAll(" = ");
break :r local;
}
} else {
result_local = try f.allocLocal(ret_ty, .Const);
try writer.writeAll(" = ");
}
};
callee: {
known: {
@@ -3116,7 +3148,6 @@ fn airIsNull(
const un_op = f.air.instructions.items(.data)[inst].un_op;
const writer = f.object.writer();
const operand = try f.resolveInst(un_op);
const target = f.object.dg.module.getTarget();
const local = try f.allocLocal(Type.initTag(.bool), .Const);
try writer.writeAll(" = (");
@@ -3124,16 +3155,18 @@ fn airIsNull(
const ty = f.air.typeOf(un_op);
var opt_buf: Type.Payload.ElemType = undefined;
const payload_type = if (ty.zigTypeTag() == .Pointer)
const payload_ty = if (ty.zigTypeTag() == .Pointer)
ty.childType().optionalChild(&opt_buf)
else
ty.optionalChild(&opt_buf);
if (ty.isPtrLikeOptional()) {
if (!payload_ty.hasRuntimeBitsIgnoreComptime()) {
try writer.print("){s} {s} true;\n", .{ deref_suffix, operator });
} else if (ty.isPtrLikeOptional()) {
// operand is a regular pointer, test `operand !=/== NULL`
try writer.print("){s} {s} NULL;\n", .{ deref_suffix, operator });
} else if (payload_type.abiSize(target) == 0) {
try writer.print("){s} {s} true;\n", .{ deref_suffix, operator });
} else if (payload_ty.zigTypeTag() == .ErrorSet) {
try writer.print("){s} {s} 0;\n", .{ deref_suffix, operator });
} else {
try writer.print("){s}.is_null {s} true;\n", .{ deref_suffix, operator });
}
@@ -3141,34 +3174,58 @@ fn airIsNull(
}
fn airOptionalPayload(f: *Function, inst: Air.Inst.Index) !CValue {
if (f.liveness.isUnused(inst))
return CValue.none;
if (f.liveness.isUnused(inst)) return CValue.none;
const ty_op = f.air.instructions.items(.data)[inst].ty_op;
const writer = f.object.writer();
const operand = try f.resolveInst(ty_op.operand);
const operand_ty = f.air.typeOf(ty_op.operand);
const opt_ty = f.air.typeOf(ty_op.operand);
const opt_ty = if (operand_ty.zigTypeTag() == .Pointer)
operand_ty.elemType()
else
operand_ty;
var buf: Type.Payload.ElemType = undefined;
const payload_ty = opt_ty.optionalChild(&buf);
if (opt_ty.isPtrLikeOptional()) {
if (!payload_ty.hasRuntimeBitsIgnoreComptime()) {
return CValue.none;
}
if (opt_ty.optionalReprIsPayload()) {
return operand;
}
const inst_ty = f.air.typeOfIndex(inst);
const local = try f.allocLocal(inst_ty, .Const);
try writer.writeAll(" = (");
try f.writeCValue(writer, operand);
try writer.writeAll(").payload;\n");
return local;
}
fn airOptionalPayloadPtr(f: *Function, inst: Air.Inst.Index) !CValue {
if (f.liveness.isUnused(inst)) return CValue.none;
const ty_op = f.air.instructions.items(.data)[inst].ty_op;
const writer = f.object.writer();
const operand = try f.resolveInst(ty_op.operand);
const ptr_ty = f.air.typeOf(ty_op.operand);
const opt_ty = ptr_ty.childType();
var buf: Type.Payload.ElemType = undefined;
const payload_ty = opt_ty.optionalChild(&buf);
if (!payload_ty.hasRuntimeBitsIgnoreComptime()) {
return operand;
}
if (opt_ty.optionalReprIsPayload()) {
// the operand is just a regular pointer, no need to do anything special.
// *?*T -> **T and ?*T -> *T are **T -> **T and *T -> *T in C
return operand;
}
const inst_ty = f.air.typeOfIndex(inst);
const maybe_deref = if (operand_ty.zigTypeTag() == .Pointer) "->" else ".";
const maybe_addrof = if (inst_ty.zigTypeTag() == .Pointer) "&" else "";
const local = try f.allocLocal(inst_ty, .Const);
try writer.print(" = {s}(", .{maybe_addrof});
try writer.writeAll(" = &(");
try f.writeCValue(writer, operand);
try writer.print("){s}payload;\n", .{maybe_deref});
try writer.writeAll(")->payload;\n");
return local;
}
@@ -3180,7 +3237,7 @@ fn airOptionalPayloadPtrSet(f: *Function, inst: Air.Inst.Index) !CValue {
const opt_ty = operand_ty.elemType();
if (opt_ty.isPtrLikeOptional()) {
if (opt_ty.optionalReprIsPayload()) {
// The payload and the optional are the same value.
// Setting to non-null will be done when the payload is set.
return operand;
@@ -3307,7 +3364,8 @@ fn airStructFieldVal(f: *Function, inst: Air.Inst.Index) !CValue {
return local;
}
// *(E!T) -> E NOT *E
/// *(E!T) -> E
/// Note that the result is never a pointer.
fn airUnwrapErrUnionErr(f: *Function, inst: Air.Inst.Index) !CValue {
if (f.liveness.isUnused(inst))
return CValue.none;
@@ -3319,7 +3377,11 @@ fn airUnwrapErrUnionErr(f: *Function, inst: Air.Inst.Index) !CValue {
const operand_ty = f.air.typeOf(ty_op.operand);
if (operand_ty.zigTypeTag() == .Pointer) {
if (!operand_ty.childType().errorUnionPayload().hasRuntimeBits()) {
const err_union_ty = operand_ty.childType();
if (err_union_ty.errorUnionSet().errorSetCardinality() == .zero) {
return CValue{ .bytes = "0" };
}
if (!err_union_ty.errorUnionPayload().hasRuntimeBits()) {
return operand;
}
const local = try f.allocLocal(inst_ty, .Const);
@@ -3328,6 +3390,9 @@ fn airUnwrapErrUnionErr(f: *Function, inst: Air.Inst.Index) !CValue {
try writer.writeAll(";\n");
return local;
}
if (operand_ty.errorUnionSet().errorSetCardinality() == .zero) {
return CValue{ .bytes = "0" };
}
if (!operand_ty.errorUnionPayload().hasRuntimeBits()) {
return operand;
}
@@ -3343,7 +3408,7 @@ fn airUnwrapErrUnionErr(f: *Function, inst: Air.Inst.Index) !CValue {
return local;
}
fn airUnwrapErrUnionPay(f: *Function, inst: Air.Inst.Index, maybe_addrof: []const u8) !CValue {
fn airUnwrapErrUnionPay(f: *Function, inst: Air.Inst.Index, maybe_addrof: [*:0]const u8) !CValue {
if (f.liveness.isUnused(inst))
return CValue.none;
@@ -3351,17 +3416,19 @@ fn airUnwrapErrUnionPay(f: *Function, inst: Air.Inst.Index, maybe_addrof: []cons
const writer = f.object.writer();
const operand = try f.resolveInst(ty_op.operand);
const operand_ty = f.air.typeOf(ty_op.operand);
const operand_is_ptr = operand_ty.zigTypeTag() == .Pointer;
const error_union_ty = if (operand_is_ptr) operand_ty.childType() else operand_ty;
if (error_union_ty.errorUnionSet().errorSetCardinality() == .zero) {
return operand;
}
const error_union_ty = if (operand_ty.zigTypeTag() == .Pointer)
operand_ty.childType()
else
operand_ty;
if (!error_union_ty.errorUnionPayload().hasRuntimeBits()) {
return CValue.none;
}
const inst_ty = f.air.typeOfIndex(inst);
const maybe_deref = if (operand_ty.zigTypeTag() == .Pointer) "->" else ".";
const maybe_deref = if (operand_is_ptr) "->" else ".";
const local = try f.allocLocal(inst_ty, .Const);
try writer.print(" = {s}(", .{maybe_addrof});
@@ -3380,8 +3447,7 @@ fn airWrapOptional(f: *Function, inst: Air.Inst.Index) !CValue {
const operand = try f.resolveInst(ty_op.operand);
const inst_ty = f.air.typeOfIndex(inst);
if (inst_ty.isPtrLikeOptional()) {
// the operand is just a regular pointer, no need to do anything special.
if (inst_ty.optionalReprIsPayload()) {
return operand;
}
@@ -3421,6 +3487,11 @@ fn airErrUnionPayloadPtrSet(f: *Function, inst: Air.Inst.Index) !CValue {
const error_ty = error_union_ty.errorUnionSet();
const payload_ty = error_union_ty.errorUnionPayload();
if (error_ty.errorSetCardinality() == .zero) {
// TODO: write undefined bytes through the pointer here
return operand;
}
// First, set the non-error value.
if (!payload_ty.hasRuntimeBitsIgnoreComptime()) {
try f.writeCValueDeref(writer, operand);
@@ -3464,6 +3535,9 @@ fn airWrapErrUnionPay(f: *Function, inst: Air.Inst.Index) !CValue {
const operand = try f.resolveInst(ty_op.operand);
const inst_ty = f.air.typeOfIndex(inst);
if (inst_ty.errorUnionSet().errorSetCardinality() == .zero) {
return operand;
}
const local = try f.allocLocal(inst_ty, .Const);
try writer.writeAll(" = { .error = 0, .payload = ");
try f.writeCValue(writer, operand);
@@ -3486,16 +3560,23 @@ fn airIsErr(
const operand_ty = f.air.typeOf(un_op);
const local = try f.allocLocal(Type.initTag(.bool), .Const);
const payload_ty = operand_ty.errorUnionPayload();
const error_ty = operand_ty.errorUnionSet();
try writer.writeAll(" = ");
if (is_ptr) {
try f.writeCValueDeref(writer, operand);
if (error_ty.errorSetCardinality() == .zero) {
try writer.print("0 {s} 0;\n", .{op_str});
} else {
try f.writeCValue(writer, operand);
if (is_ptr) {
try f.writeCValueDeref(writer, operand);
} else {
try f.writeCValue(writer, operand);
}
if (payload_ty.hasRuntimeBits()) {
try writer.writeAll(".error");
}
try writer.print(" {s} 0;\n", .{op_str});
}
if (payload_ty.hasRuntimeBits()) {
try writer.writeAll(".error");
}
try writer.print(" {s} 0;\n", .{op_str});
return local;
}
@@ -4129,3 +4210,14 @@ fn intMin(ty: Type, target: std.Target, buf: []u8) []const u8 {
},
}
}
fn loweredFnRetTyHasBits(fn_ty: Type) bool {
const ret_ty = fn_ty.fnReturnType();
if (ret_ty.hasRuntimeBitsIgnoreComptime()) {
return true;
}
if (ret_ty.isError()) {
return true;
}
return false;
}

File diff suppressed because it is too large Load Diff

View File

@@ -498,9 +498,11 @@ pub const DeclState = struct {
.ErrorUnion => {
const error_ty = ty.errorUnionSet();
const payload_ty = ty.errorUnionPayload();
const payload_align = payload_ty.abiAlignment(target);
const error_align = Type.anyerror.abiAlignment(target);
const abi_size = ty.abiSize(target);
const abi_align = ty.abiAlignment(target);
const payload_off = mem.alignForwardGeneric(u64, error_ty.abiSize(target), abi_align);
const payload_off = if (error_align >= payload_align) Type.anyerror.abiSize(target) else 0;
const error_off = if (error_align >= payload_align) 0 else payload_ty.abiSize(target);
// DW.AT.structure_type
try dbg_info_buffer.append(@enumToInt(AbbrevKind.struct_type));
@@ -534,7 +536,7 @@ pub const DeclState = struct {
try dbg_info_buffer.resize(index + 4);
try self.addTypeReloc(atom, error_ty, @intCast(u32, index), null);
// DW.AT.data_member_location, DW.FORM.sdata
try dbg_info_buffer.append(0);
try leb128.writeULEB128(dbg_info_buffer.writer(), error_off);
// DW.AT.structure_type delimit children
try dbg_info_buffer.append(0);
@@ -2293,7 +2295,7 @@ fn addDbgInfoErrorSet(
// DW.AT.enumeration_type
try dbg_info_buffer.append(@enumToInt(AbbrevKind.enum_type));
// DW.AT.byte_size, DW.FORM.sdata
const abi_size = ty.abiSize(target);
const abi_size = Type.anyerror.abiSize(target);
try leb128.writeULEB128(dbg_info_buffer.writer(), abi_size);
// DW.AT.name, DW.FORM.string
const name = try ty.nameAllocArena(arena, module);

View File

@@ -2317,10 +2317,7 @@ pub const Type = extern union {
.const_slice_u8_sentinel_0,
.array_u8_sentinel_0,
.anyerror_void_error_union,
.error_set,
.error_set_single,
.error_set_inferred,
.error_set_merged,
.manyptr_u8,
.manyptr_const_u8,
.manyptr_const_u8_sentinel_0,
@@ -2361,12 +2358,23 @@ pub const Type = extern union {
.fn_void_no_args,
.fn_naked_noreturn_no_args,
.fn_ccc_void_no_args,
.error_set_single,
=> return false,
.error_set => {
const err_set_obj = ty.castTag(.error_set).?.data;
const names = err_set_obj.names.keys();
return names.len > 1;
},
.error_set_merged => {
const name_map = ty.castTag(.error_set_merged).?.data;
const names = name_map.keys();
return names.len > 1;
},
// These types have more than one possible value, so the result is the same as
// asking whether they are comptime-only types.
.anyframe_T,
.optional,
.optional_single_mut_pointer,
.optional_single_const_pointer,
.single_const_pointer,
@@ -2388,6 +2396,41 @@ pub const Type = extern union {
}
},
.optional => {
var buf: Payload.ElemType = undefined;
const child_ty = ty.optionalChild(&buf);
if (child_ty.isNoReturn()) {
// Then the optional is comptime-known to be null.
return false;
}
if (ignore_comptime_only) {
return true;
} else if (sema_kit) |sk| {
return !(try sk.sema.typeRequiresComptime(sk.block, sk.src, child_ty));
} else {
return !comptimeOnly(child_ty);
}
},
.error_union => {
// This code needs to be kept in sync with the equivalent switch prong
// in abiSizeAdvanced.
const data = ty.castTag(.error_union).?.data;
switch (data.error_set.errorSetCardinality()) {
.zero => return hasRuntimeBitsAdvanced(data.payload, ignore_comptime_only, sema_kit),
.one => return !data.payload.isNoReturn(),
.many => {
if (ignore_comptime_only) {
return true;
} else if (sema_kit) |sk| {
return !(try sk.sema.typeRequiresComptime(sk.block, sk.src, ty));
} else {
return !comptimeOnly(ty);
}
},
}
},
.@"struct" => {
const struct_obj = ty.castTag(.@"struct").?.data;
if (sema_kit) |sk| {
@@ -2467,12 +2510,6 @@ pub const Type = extern union {
.int_signed, .int_unsigned => return ty.cast(Payload.Bits).?.data != 0,
.error_union => {
const payload = ty.castTag(.error_union).?.data;
return (try payload.error_set.hasRuntimeBitsAdvanced(ignore_comptime_only, sema_kit)) or
(try payload.payload.hasRuntimeBitsAdvanced(ignore_comptime_only, sema_kit));
},
.tuple, .anon_struct => {
const tuple = ty.tupleFields();
for (tuple.types) |field_ty, i| {
@@ -2647,13 +2684,22 @@ pub const Type = extern union {
};
}
pub fn isNoReturn(self: Type) bool {
const definitely_correct_result =
self.tag_if_small_enough != .bound_fn and
self.zigTypeTag() == .NoReturn;
const fast_result = self.tag_if_small_enough == Tag.noreturn;
assert(fast_result == definitely_correct_result);
return fast_result;
/// TODO add enums with no fields here
pub fn isNoReturn(ty: Type) bool {
switch (ty.tag()) {
.noreturn => return true,
.error_set => {
const err_set_obj = ty.castTag(.error_set).?.data;
const names = err_set_obj.names.keys();
return names.len == 0;
},
.error_set_merged => {
const name_map = ty.castTag(.error_set_merged).?.data;
const names = name_map.keys();
return names.len == 0;
},
else => return false,
}
}
/// Returns 0 if the pointer is naturally aligned and the element type is 0-bit.
@@ -2852,13 +2898,30 @@ pub const Type = extern union {
else => unreachable,
},
.error_set,
.error_set_single,
// TODO revisit this when we have the concept of the error tag type
.anyerror_void_error_union,
.anyerror,
.error_set_inferred,
.error_set_merged,
=> return AbiAlignmentAdvanced{ .scalar = 2 }, // TODO revisit this when we have the concept of the error tag type
=> return AbiAlignmentAdvanced{ .scalar = 2 },
.error_set => {
const err_set_obj = ty.castTag(.error_set).?.data;
const names = err_set_obj.names.keys();
if (names.len <= 1) {
return AbiAlignmentAdvanced{ .scalar = 0 };
} else {
return AbiAlignmentAdvanced{ .scalar = 2 };
}
},
.error_set_merged => {
const name_map = ty.castTag(.error_set_merged).?.data;
const names = name_map.keys();
if (names.len <= 1) {
return AbiAlignmentAdvanced{ .scalar = 0 };
} else {
return AbiAlignmentAdvanced{ .scalar = 2 };
}
},
.array, .array_sentinel => return ty.elemType().abiAlignmentAdvanced(target, strat),
@@ -2881,8 +2944,16 @@ pub const Type = extern union {
var buf: Payload.ElemType = undefined;
const child_type = ty.optionalChild(&buf);
if (child_type.zigTypeTag() == .Pointer and !child_type.isCPtr()) {
return AbiAlignmentAdvanced{ .scalar = @divExact(target.cpu.arch.ptrBitWidth(), 8) };
switch (child_type.zigTypeTag()) {
.Pointer => return AbiAlignmentAdvanced{ .scalar = @divExact(target.cpu.arch.ptrBitWidth(), 8) },
.ErrorSet => switch (child_type.errorSetCardinality()) {
// `?error{}` is comptime-known to be null.
.zero => return AbiAlignmentAdvanced{ .scalar = 0 },
.one => return AbiAlignmentAdvanced{ .scalar = 1 },
.many => return abiAlignmentAdvanced(Type.anyerror, target, strat),
},
.NoReturn => return AbiAlignmentAdvanced{ .scalar = 0 },
else => {},
}
switch (strat) {
@@ -2900,31 +2971,35 @@ pub const Type = extern union {
},
.error_union => {
// This code needs to be kept in sync with the equivalent switch prong
// in abiSizeAdvanced.
const data = ty.castTag(.error_union).?.data;
switch (data.error_set.errorSetCardinality()) {
.zero => return abiAlignmentAdvanced(data.payload, target, strat),
.one => {
if (data.payload.isNoReturn()) {
return AbiAlignmentAdvanced{ .scalar = 0 };
}
},
.many => {},
}
const code_align = abiAlignment(Type.anyerror, target);
switch (strat) {
.eager, .sema_kit => {
if (!(try data.error_set.hasRuntimeBitsAdvanced(false, sema_kit))) {
return data.payload.abiAlignmentAdvanced(target, strat);
} else if (!(try data.payload.hasRuntimeBitsAdvanced(false, sema_kit))) {
return data.error_set.abiAlignmentAdvanced(target, strat);
if (!(try data.payload.hasRuntimeBitsAdvanced(false, sema_kit))) {
return AbiAlignmentAdvanced{ .scalar = code_align };
}
return AbiAlignmentAdvanced{ .scalar = @maximum(
code_align,
(try data.payload.abiAlignmentAdvanced(target, strat)).scalar,
(try data.error_set.abiAlignmentAdvanced(target, strat)).scalar,
) };
},
.lazy => |arena| {
switch (try data.payload.abiAlignmentAdvanced(target, strat)) {
.scalar => |payload_align| {
if (payload_align == 0) {
return data.error_set.abiAlignmentAdvanced(target, strat);
}
switch (try data.error_set.abiAlignmentAdvanced(target, strat)) {
.scalar => |err_set_align| {
return AbiAlignmentAdvanced{ .scalar = @maximum(payload_align, err_set_align) };
},
.val => {},
}
return AbiAlignmentAdvanced{
.scalar = @maximum(code_align, payload_align),
};
},
.val => {},
}
@@ -3018,6 +3093,7 @@ pub const Type = extern union {
.@"undefined",
.enum_literal,
.type_info,
.error_set_single,
=> return AbiAlignmentAdvanced{ .scalar = 0 },
.noreturn,
@@ -3136,6 +3212,7 @@ pub const Type = extern union {
.empty_struct_literal,
.empty_struct,
.void,
.error_set_single,
=> return AbiSizeAdvanced{ .scalar = 0 },
.@"struct", .tuple, .anon_struct => switch (ty.containerLayout()) {
@@ -3291,14 +3368,30 @@ pub const Type = extern union {
},
// TODO revisit this when we have the concept of the error tag type
.error_set,
.error_set_single,
.anyerror_void_error_union,
.anyerror,
.error_set_inferred,
.error_set_merged,
=> return AbiSizeAdvanced{ .scalar = 2 },
.error_set => {
const err_set_obj = ty.castTag(.error_set).?.data;
const names = err_set_obj.names.keys();
if (names.len <= 1) {
return AbiSizeAdvanced{ .scalar = 0 };
} else {
return AbiSizeAdvanced{ .scalar = 2 };
}
},
.error_set_merged => {
const name_map = ty.castTag(.error_set_merged).?.data;
const names = name_map.keys();
if (names.len <= 1) {
return AbiSizeAdvanced{ .scalar = 0 };
} else {
return AbiSizeAdvanced{ .scalar = 2 };
}
},
.i16, .u16 => return AbiSizeAdvanced{ .scalar = intAbiSize(16, target) },
.i32, .u32 => return AbiSizeAdvanced{ .scalar = intAbiSize(32, target) },
.i64, .u64 => return AbiSizeAdvanced{ .scalar = intAbiSize(64, target) },
@@ -3312,37 +3405,81 @@ pub const Type = extern union {
.optional => {
var buf: Payload.ElemType = undefined;
const child_type = ty.optionalChild(&buf);
if (child_type.isNoReturn()) {
return AbiSizeAdvanced{ .scalar = 0 };
}
if (!child_type.hasRuntimeBits()) return AbiSizeAdvanced{ .scalar = 1 };
if (child_type.zigTypeTag() == .Pointer and !child_type.isCPtr() and !child_type.isSlice())
return AbiSizeAdvanced{ .scalar = @divExact(target.cpu.arch.ptrBitWidth(), 8) };
switch (child_type.zigTypeTag()) {
.Pointer => {
const ptr_info = child_type.ptrInfo().data;
const has_null = switch (ptr_info.size) {
.Slice, .C => true,
else => ptr_info.@"allowzero",
};
if (!has_null) {
const ptr_size_bytes = @divExact(target.cpu.arch.ptrBitWidth(), 8);
return AbiSizeAdvanced{ .scalar = ptr_size_bytes };
}
},
.ErrorSet => return abiSizeAdvanced(Type.anyerror, target, strat),
else => {},
}
// Optional types are represented as a struct with the child type as the first
// field and a boolean as the second. Since the child type's abi alignment is
// guaranteed to be >= that of bool's (1 byte) the added size is exactly equal
// to the child type's ABI alignment.
return AbiSizeAdvanced{ .scalar = child_type.abiAlignment(target) + child_type.abiSize(target) };
return AbiSizeAdvanced{
.scalar = child_type.abiAlignment(target) + child_type.abiSize(target),
};
},
.error_union => {
// This code needs to be kept in sync with the equivalent switch prong
// in abiAlignmentAdvanced.
const data = ty.castTag(.error_union).?.data;
if (!data.error_set.hasRuntimeBits() and !data.payload.hasRuntimeBits()) {
return AbiSizeAdvanced{ .scalar = 0 };
} else if (!data.error_set.hasRuntimeBits()) {
return AbiSizeAdvanced{ .scalar = data.payload.abiSize(target) };
} else if (!data.payload.hasRuntimeBits()) {
return AbiSizeAdvanced{ .scalar = data.error_set.abiSize(target) };
// Here we need to care whether or not the error set is *empty* or whether
// it only has *one possible value*. In the former case, it means there
// cannot possibly be an error, meaning the ABI size is equivalent to the
// payload ABI size. In the latter case, we need to account for the "tag"
// because even if both the payload type and the error set type of an
// error union have no runtime bits, an error union still has
// 1 bit of data which is whether or not the value is an error.
// Zig still uses the error code encoding at runtime, even when only 1 bit
// would suffice. This prevents coercions from needing to branch.
switch (data.error_set.errorSetCardinality()) {
.zero => return abiSizeAdvanced(data.payload, target, strat),
.one => {
if (data.payload.isNoReturn()) {
return AbiSizeAdvanced{ .scalar = 0 };
}
},
.many => {},
}
const code_align = abiAlignment(data.error_set, target);
const code_size = abiSize(Type.anyerror, target);
if (!data.payload.hasRuntimeBits()) {
// Same as anyerror.
return AbiSizeAdvanced{ .scalar = code_size };
}
const code_align = abiAlignment(Type.anyerror, target);
const payload_align = abiAlignment(data.payload, target);
const big_align = @maximum(code_align, payload_align);
const payload_size = abiSize(data.payload, target);
var size: u64 = 0;
size += abiSize(data.error_set, target);
size = std.mem.alignForwardGeneric(u64, size, payload_align);
size += payload_size;
size = std.mem.alignForwardGeneric(u64, size, big_align);
if (code_align > payload_align) {
size += code_size;
size = std.mem.alignForwardGeneric(u64, size, payload_align);
size += payload_size;
size = std.mem.alignForwardGeneric(u64, size, code_align);
} else {
size += payload_size;
size = std.mem.alignForwardGeneric(u64, size, code_align);
size += code_size;
size = std.mem.alignForwardGeneric(u64, size, payload_align);
}
return AbiSizeAdvanced{ .scalar = size };
},
}
@@ -3832,8 +3969,39 @@ pub const Type = extern union {
return ty.ptrInfo().data.@"allowzero";
}
/// See also `isPtrLikeOptional`.
pub fn optionalReprIsPayload(ty: Type) bool {
switch (ty.tag()) {
.optional_single_const_pointer,
.optional_single_mut_pointer,
.c_const_pointer,
.c_mut_pointer,
=> return true,
.optional => {
const child_ty = ty.castTag(.optional).?.data;
switch (child_ty.zigTypeTag()) {
.Pointer => {
const info = child_ty.ptrInfo().data;
switch (info.size) {
.Slice, .C => return false,
.Many, .One => return !info.@"allowzero",
}
},
.ErrorSet => return true,
else => return false,
}
},
.pointer => return ty.castTag(.pointer).?.data.size == .C,
else => return false,
}
}
/// Returns true if the type is optional and would be lowered to a single pointer
/// address value, using 0 for null. Note that this returns true for C pointers.
/// See also `hasOptionalRepr`.
pub fn isPtrLikeOptional(self: Type) bool {
switch (self.tag()) {
.optional_single_const_pointer,
@@ -4166,6 +4334,35 @@ pub const Type = extern union {
};
}
const ErrorSetCardinality = enum { zero, one, many };
pub fn errorSetCardinality(ty: Type) ErrorSetCardinality {
switch (ty.tag()) {
.anyerror => return .many,
.error_set_inferred => return .many,
.error_set_single => return .one,
.error_set => {
const err_set_obj = ty.castTag(.error_set).?.data;
const names = err_set_obj.names.keys();
switch (names.len) {
0 => return .zero,
1 => return .one,
else => return .many,
}
},
.error_set_merged => {
const name_map = ty.castTag(.error_set_merged).?.data;
const names = name_map.keys();
switch (names.len) {
0 => return .zero,
1 => return .one,
else => return .many,
}
},
else => unreachable,
}
}
/// Returns true if it is an error set that includes anyerror, false otherwise.
/// Note that the result may be a false negative if the type did not get error set
/// resolution prior to this call.
@@ -4658,16 +4855,11 @@ pub const Type = extern union {
.const_slice,
.mut_slice,
.anyopaque,
.optional,
.optional_single_mut_pointer,
.optional_single_const_pointer,
.enum_literal,
.anyerror_void_error_union,
.error_union,
.error_set,
.error_set_single,
.error_set_inferred,
.error_set_merged,
.@"opaque",
.var_args_param,
.manyptr_u8,
@@ -4696,6 +4888,52 @@ pub const Type = extern union {
.bound_fn,
=> return null,
.optional => {
var buf: Payload.ElemType = undefined;
const child_ty = ty.optionalChild(&buf);
if (child_ty.isNoReturn()) {
return Value.@"null";
} else {
return null;
}
},
.error_union => {
const error_ty = ty.errorUnionSet();
switch (error_ty.errorSetCardinality()) {
.zero => {
const payload_ty = ty.errorUnionPayload();
if (onePossibleValue(payload_ty)) |payload_val| {
_ = payload_val;
return Value.initTag(.the_only_possible_value);
} else {
return null;
}
},
.one => {
if (ty.errorUnionPayload().isNoReturn()) {
const error_val = onePossibleValue(error_ty).?;
return error_val;
} else {
return null;
}
},
.many => return null,
}
},
.error_set_single => return Value.initTag(.the_only_possible_value),
.error_set => {
const err_set_obj = ty.castTag(.error_set).?.data;
if (err_set_obj.names.count() > 1) return null;
return Value.initTag(.the_only_possible_value);
},
.error_set_merged => {
const name_map = ty.castTag(.error_set_merged).?.data;
if (name_map.count() > 1) return null;
return Value.initTag(.the_only_possible_value);
},
.@"struct" => {
const s = ty.castTag(.@"struct").?.data;
assert(s.haveFieldTypes());

View File

@@ -121,7 +121,7 @@ test "debug info for optional error set" {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
const SomeError = error{Hello};
const SomeError = error{ Hello, Hello2 };
var a_local_variable: ?SomeError = null;
_ = a_local_variable;
}
@@ -148,18 +148,46 @@ test "implicit cast to optional to error union to return result loc" {
//comptime S.entry(); TODO
}
test "error: fn returning empty error set can be passed as fn returning any error" {
test "fn returning empty error set can be passed as fn returning any error" {
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
entry();
comptime entry();
}
test "fn returning empty error set can be passed as fn returning any error - pointer" {
if (builtin.zig_backend == .stage1) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
entryPtr();
comptime entryPtr();
}
fn entry() void {
foo2(bar2);
}
fn entryPtr() void {
var ptr = &bar2;
fooPtr(ptr);
}
fn foo2(f: fn () anyerror!void) void {
const x = f();
x catch {};
x catch {
@panic("fail");
};
}
fn fooPtr(f: *const fn () anyerror!void) void {
const x = f();
x catch {
@panic("fail");
};
}
fn bar2() (error{}!void) {}
@@ -239,7 +267,10 @@ fn testComptimeTestErrorEmptySet(x: EmptyErrorSet!i32) !void {
}
test "comptime err to int of error set with only 1 possible value" {
if (builtin.zig_backend != .stage1) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_c) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
testErrToIntWithOnePossibleValue(error.A, @errorToInt(error.A));
comptime testErrToIntWithOnePossibleValue(error.A, @errorToInt(error.A));
@@ -409,9 +440,11 @@ test "return function call to error set from error union function" {
}
test "optional error set is the same size as error set" {
if (builtin.zig_backend != .stage1) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
comptime try expect(@sizeOf(?anyerror) == @sizeOf(anyerror));
comptime try expect(@alignOf(?anyerror) == @alignOf(anyerror));
const S = struct {
fn returnsOptErrSet() ?anyerror {
return null;
@@ -421,6 +454,65 @@ test "optional error set is the same size as error set" {
comptime try expect(S.returnsOptErrSet() == null);
}
test "optional error set with only one error is the same size as bool" {
if (builtin.zig_backend == .stage1) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
const E = error{only};
comptime try expect(@sizeOf(?E) == @sizeOf(bool));
comptime try expect(@alignOf(?E) == @alignOf(bool));
const S = struct {
fn gimmeNull() ?E {
return null;
}
fn gimmeErr() ?E {
return error.only;
}
};
try expect(S.gimmeNull() == null);
try expect(error.only == S.gimmeErr().?);
comptime try expect(S.gimmeNull() == null);
comptime try expect(error.only == S.gimmeErr().?);
}
test "optional empty error set" {
if (builtin.zig_backend == .stage1) return error.SkipZigTest;
comptime try expect(@sizeOf(error{}!void) == @sizeOf(void));
comptime try expect(@alignOf(error{}!void) == @alignOf(void));
var x: ?error{} = undefined;
if (x != null) {
@compileError("test failed");
}
}
test "empty error set plus zero-bit payload" {
if (builtin.zig_backend == .stage1) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
comptime try expect(@sizeOf(error{}!void) == @sizeOf(void));
comptime try expect(@alignOf(error{}!void) == @alignOf(void));
var x: error{}!void = undefined;
if (x) |payload| {
if (payload != {}) {
@compileError("test failed");
}
} else |_| {
@compileError("test failed");
}
const S = struct {
fn empty() error{}!void {}
fn inferred() !void {
return empty();
}
};
try S.inferred();
}
test "nested catch" {
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO

View File

@@ -425,7 +425,6 @@ test "f64 at compile time is lossy" {
}
test {
if (builtin.zig_backend != .stage1 and builtin.os.tag == .macos) return error.SkipZigTest;
comptime try expect(@as(f128, 1 << 113) == 10384593717069655257060992658440192);
}
@@ -573,28 +572,6 @@ test "inlined loop has array literal with elided runtime scope on first iteratio
}
}
test "call method on bound fn referring to var instance" {
if (builtin.zig_backend != .stage1) {
// Let's delay solving this one; I want to try to eliminate bound functions from
// the language.
return error.SkipZigTest; // TODO
}
try expect(bound_fn() == 1237);
}
const SimpleStruct = struct {
field: i32,
fn method(self: *const SimpleStruct) i32 {
return self.field + 3;
}
};
var simple_struct = SimpleStruct{ .field = 1234 };
const bound_fn = simple_struct.method;
test "ptr to local array argument at comptime" {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
@@ -669,8 +646,6 @@ pub fn TypeWithCompTimeSlice(comptime field_name: []const u8) type {
}
test "comptime function with mutable pointer is not memoized" {
if (builtin.zig_backend != .stage1) return error.SkipZigTest; // TODO
comptime {
var x: i32 = 1;
const ptr = &x;
@@ -685,8 +660,6 @@ fn increment(value: *i32) void {
}
test "const ptr to comptime mutable data is not memoized" {
if (builtin.zig_backend != .stage1) return error.SkipZigTest; // TODO
comptime {
var foo = SingleFieldStruct{ .x = 1 };
try expect(foo.read_x() == 1);

View File

@@ -0,0 +1,20 @@
export fn entry() void {
bad(bound_fn() == 1237);
}
const SimpleStruct = struct {
field: i32,
fn method(self: *const SimpleStruct) i32 {
return self.field + 3;
}
};
var simple_struct = SimpleStruct{ .field = 1234 };
const bound_fn = simple_struct.method;
fn bad(ok: bool) void {
_ = ok;
}
// error
// target=native
// backend=stage2
//
// :12:18: error: unable to resolve comptime value