|
|
|
|
@@ -623,6 +623,10 @@ fn addTag(self: *Self, tag: Mir.Inst.Tag) error{OutOfMemory}!void {
|
|
|
|
|
try self.addInst(.{ .tag = tag, .data = .{ .tag = {} } });
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn addExtended(self: *Self, opcode: wasm.PrefixedOpcode) error{OutOfMemory}!void {
|
|
|
|
|
try self.addInst(.{ .tag = .extended, .secondary = @enumToInt(opcode), .data = .{ .tag = {} } });
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn addLabel(self: *Self, tag: Mir.Inst.Tag, label: u32) error{OutOfMemory}!void {
|
|
|
|
|
try self.addInst(.{ .tag = tag, .data = .{ .label = label } });
|
|
|
|
|
}
|
|
|
|
|
@@ -746,6 +750,13 @@ fn genFunctype(self: *Self, fn_ty: Type) !wasm.Type {
|
|
|
|
|
defer params.deinit();
|
|
|
|
|
var returns = std.ArrayList(wasm.Valtype).init(self.gpa);
|
|
|
|
|
defer returns.deinit();
|
|
|
|
|
const return_type = fn_ty.fnReturnType();
|
|
|
|
|
|
|
|
|
|
const want_sret = isByRef(return_type);
|
|
|
|
|
|
|
|
|
|
if (want_sret) {
|
|
|
|
|
try params.append(try self.typeToValtype(Type.usize));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// param types
|
|
|
|
|
if (fn_ty.fnParamLen() != 0) {
|
|
|
|
|
@@ -759,11 +770,8 @@ fn genFunctype(self: *Self, fn_ty: Type) !wasm.Type {
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// return type
|
|
|
|
|
const return_type = fn_ty.fnReturnType();
|
|
|
|
|
switch (return_type.zigTypeTag()) {
|
|
|
|
|
.Void, .NoReturn => {},
|
|
|
|
|
.Struct => return self.fail("TODO: Implement struct as return type for wasm", .{}),
|
|
|
|
|
else => try returns.append(try self.typeToValtype(return_type)),
|
|
|
|
|
if (!want_sret and return_type.hasCodeGenBits()) {
|
|
|
|
|
try returns.append(try self.typeToValtype(return_type));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return wasm.Type{
|
|
|
|
|
@@ -785,6 +793,15 @@ pub fn genFunc(self: *Self) InnerError!Result {
|
|
|
|
|
|
|
|
|
|
// Generate MIR for function body
|
|
|
|
|
try self.genBody(self.air.getMainBody());
|
|
|
|
|
// In case we have a return value, but the last instruction is a noreturn (such as a while loop)
|
|
|
|
|
// we emit an unreachable instruction to tell the stack validator that part will never be reached.
|
|
|
|
|
if (func_type.returns.len != 0 and self.air.instructions.len > 0) {
|
|
|
|
|
const inst = @intCast(u32, self.air.instructions.len - 1);
|
|
|
|
|
if (self.air.typeOfIndex(inst).isNoReturn()) {
|
|
|
|
|
try self.addTag(.@"unreachable");
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// End of function body
|
|
|
|
|
try self.addTag(.end);
|
|
|
|
|
|
|
|
|
|
@@ -1074,6 +1091,15 @@ fn resolveCallingConventionValues(self: *Self, fn_ty: Type) InnerError!CallWValu
|
|
|
|
|
.return_value = .none,
|
|
|
|
|
};
|
|
|
|
|
errdefer self.gpa.free(result.args);
|
|
|
|
|
const ret_ty = fn_ty.fnReturnType();
|
|
|
|
|
// Check if we store the result as a pointer to the stack rather than
|
|
|
|
|
// by value
|
|
|
|
|
if (isByRef(ret_ty)) {
|
|
|
|
|
// the sret arg will be passed as first argument, therefore we
|
|
|
|
|
// set the `return_value` before allocating locals for regular args.
|
|
|
|
|
result.return_value = .{ .local = self.local_index };
|
|
|
|
|
self.local_index += 1;
|
|
|
|
|
}
|
|
|
|
|
switch (cc) {
|
|
|
|
|
.Naked => return result,
|
|
|
|
|
.Unspecified, .C => {
|
|
|
|
|
@@ -1086,27 +1112,6 @@ fn resolveCallingConventionValues(self: *Self, fn_ty: Type) InnerError!CallWValu
|
|
|
|
|
result.args[ty_index] = .{ .local = self.local_index };
|
|
|
|
|
self.local_index += 1;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const ret_ty = fn_ty.fnReturnType();
|
|
|
|
|
if (isByRef(ret_ty)) {
|
|
|
|
|
result.return_value = try self.allocLocal(Type.initTag(.i32));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Check if we store the result as a pointer to the stack rather than
|
|
|
|
|
// by value
|
|
|
|
|
if (result.return_value != .none) {
|
|
|
|
|
if (self.initial_stack_value == .none) try self.initializeStack();
|
|
|
|
|
const offset = std.math.cast(u32, ret_ty.abiSize(self.target)) catch {
|
|
|
|
|
return self.fail("Return type '{}' too big for stack frame", .{ret_ty});
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
try self.moveStack(offset, result.return_value.local);
|
|
|
|
|
|
|
|
|
|
// We want to make sure the return value's stack value doesn't get overwritten,
|
|
|
|
|
// so set initial stack value to current's position instead.
|
|
|
|
|
try self.addLabel(.global_get, 0);
|
|
|
|
|
try self.addLabel(.local_set, self.initial_stack_value.local);
|
|
|
|
|
}
|
|
|
|
|
},
|
|
|
|
|
else => return self.fail("TODO implement function parameters for cc '{}' on wasm", .{cc}),
|
|
|
|
|
}
|
|
|
|
|
@@ -1165,10 +1170,16 @@ fn allocStack(self: *Self, ty: Type) !WValue {
|
|
|
|
|
assert(ty.hasCodeGenBits());
|
|
|
|
|
|
|
|
|
|
// calculate needed stack space
|
|
|
|
|
const abi_size = std.math.cast(u32, ty.abiSize(self.target)) catch {
|
|
|
|
|
var abi_size = std.math.cast(u32, ty.abiSize(self.target)) catch {
|
|
|
|
|
return self.fail("Given type '{}' too big to fit into stack frame", .{ty});
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// We store slices as a struct with a pointer field and a length field
|
|
|
|
|
// both being 'usize' size.
|
|
|
|
|
if (ty.isSlice()) {
|
|
|
|
|
abi_size = self.ptrSize() * 2;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// allocate a local using wasm's pointer size
|
|
|
|
|
const local = try self.allocLocal(Type.@"usize");
|
|
|
|
|
try self.moveStack(abi_size, local.local);
|
|
|
|
|
@@ -1256,6 +1267,28 @@ fn isByRef(ty: Type) bool {
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Creates a new local for a pointer that points to memory with given offset.
|
|
|
|
|
/// This can be used to get a pointer to a struct field, error payload, etc.
|
|
|
|
|
fn buildPointerOffset(self: *Self, ptr_value: WValue, offset: u64) InnerError!WValue {
|
|
|
|
|
// do not perform arithmetic when offset is 0.
|
|
|
|
|
if (offset == 0) return ptr_value;
|
|
|
|
|
const result_ptr = try self.allocLocal(Type.usize);
|
|
|
|
|
try self.emitWValue(ptr_value);
|
|
|
|
|
switch (self.target.cpu.arch.ptrBitWidth()) {
|
|
|
|
|
32 => {
|
|
|
|
|
try self.addImm32(@bitCast(i32, @intCast(u32, offset)));
|
|
|
|
|
try self.addTag(.i32_add);
|
|
|
|
|
},
|
|
|
|
|
64 => {
|
|
|
|
|
try self.addImm64(offset);
|
|
|
|
|
try self.addTag(.i64_add);
|
|
|
|
|
},
|
|
|
|
|
else => unreachable,
|
|
|
|
|
}
|
|
|
|
|
try self.addLabel(.local_set, result_ptr.local);
|
|
|
|
|
return result_ptr;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn genInst(self: *Self, inst: Air.Inst.Index) !WValue {
|
|
|
|
|
const air_tags = self.air.instructions.items(.tag);
|
|
|
|
|
return switch (air_tags[inst]) {
|
|
|
|
|
@@ -1296,16 +1329,17 @@ fn genInst(self: *Self, inst: Air.Inst.Index) !WValue {
|
|
|
|
|
.is_err => self.airIsErr(inst, .i32_ne),
|
|
|
|
|
.is_non_err => self.airIsErr(inst, .i32_eq),
|
|
|
|
|
|
|
|
|
|
.is_null => self.airIsNull(inst, .i32_ne),
|
|
|
|
|
.is_non_null => self.airIsNull(inst, .i32_eq),
|
|
|
|
|
.is_null_ptr => self.airIsNull(inst, .i32_ne),
|
|
|
|
|
.is_non_null_ptr => self.airIsNull(inst, .i32_eq),
|
|
|
|
|
.is_null => self.airIsNull(inst, .i32_eq, .value),
|
|
|
|
|
.is_non_null => self.airIsNull(inst, .i32_ne, .value),
|
|
|
|
|
.is_null_ptr => self.airIsNull(inst, .i32_eq, .ptr),
|
|
|
|
|
.is_non_null_ptr => self.airIsNull(inst, .i32_ne, .ptr),
|
|
|
|
|
|
|
|
|
|
.load => self.airLoad(inst),
|
|
|
|
|
.loop => self.airLoop(inst),
|
|
|
|
|
.memset => self.airMemset(inst),
|
|
|
|
|
.not => self.airNot(inst),
|
|
|
|
|
.optional_payload => self.airOptionalPayload(inst),
|
|
|
|
|
.optional_payload_ptr => self.airOptionalPayload(inst),
|
|
|
|
|
.optional_payload_ptr => self.airOptionalPayloadPtr(inst),
|
|
|
|
|
.optional_payload_ptr_set => self.airOptionalPayloadPtrSet(inst),
|
|
|
|
|
.ptr_add => self.airPtrBinOp(inst, .add),
|
|
|
|
|
.ptr_sub => self.airPtrBinOp(inst, .sub),
|
|
|
|
|
@@ -1315,17 +1349,21 @@ fn genInst(self: *Self, inst: Air.Inst.Index) !WValue {
|
|
|
|
|
.ret => self.airRet(inst),
|
|
|
|
|
.ret_ptr => self.airRetPtr(inst),
|
|
|
|
|
.ret_load => self.airRetLoad(inst),
|
|
|
|
|
|
|
|
|
|
.slice => self.airSlice(inst),
|
|
|
|
|
.slice_len => self.airSliceLen(inst),
|
|
|
|
|
.slice_elem_val => self.airSliceElemVal(inst),
|
|
|
|
|
.slice_elem_ptr => self.airSliceElemPtr(inst),
|
|
|
|
|
.slice_ptr => self.airSlicePtr(inst),
|
|
|
|
|
.store => self.airStore(inst),
|
|
|
|
|
|
|
|
|
|
.struct_field_ptr => self.airStructFieldPtr(inst),
|
|
|
|
|
.struct_field_ptr_index_0 => self.airStructFieldPtrIndex(inst, 0),
|
|
|
|
|
.struct_field_ptr_index_1 => self.airStructFieldPtrIndex(inst, 1),
|
|
|
|
|
.struct_field_ptr_index_2 => self.airStructFieldPtrIndex(inst, 2),
|
|
|
|
|
.struct_field_ptr_index_3 => self.airStructFieldPtrIndex(inst, 3),
|
|
|
|
|
.struct_field_val => self.airStructFieldVal(inst),
|
|
|
|
|
|
|
|
|
|
.switch_br => self.airSwitchBr(inst),
|
|
|
|
|
.trunc => self.airTrunc(inst),
|
|
|
|
|
.unreach => self.airUnreachable(inst),
|
|
|
|
|
@@ -1353,7 +1391,6 @@ fn airRet(self: *Self, inst: Air.Inst.Index) InnerError!WValue {
|
|
|
|
|
// to the stack instead
|
|
|
|
|
if (self.return_value != .none) {
|
|
|
|
|
try self.store(self.return_value, operand, self.decl.ty.fnReturnType(), 0);
|
|
|
|
|
try self.emitWValue(self.return_value);
|
|
|
|
|
} else {
|
|
|
|
|
try self.emitWValue(operand);
|
|
|
|
|
}
|
|
|
|
|
@@ -1372,6 +1409,9 @@ fn airRetPtr(self: *Self, inst: Air.Inst.Index) InnerError!WValue {
|
|
|
|
|
|
|
|
|
|
if (child_type.abiSize(self.target) == 0) return WValue{ .none = {} };
|
|
|
|
|
|
|
|
|
|
if (isByRef(child_type)) {
|
|
|
|
|
return self.return_value;
|
|
|
|
|
}
|
|
|
|
|
return self.allocStack(child_type);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@@ -1381,9 +1421,7 @@ fn airRetLoad(self: *Self, inst: Air.Inst.Index) InnerError!WValue {
|
|
|
|
|
const ret_ty = self.air.typeOf(un_op).childType();
|
|
|
|
|
if (!ret_ty.hasCodeGenBits()) return WValue.none;
|
|
|
|
|
|
|
|
|
|
if (isByRef(ret_ty)) {
|
|
|
|
|
try self.emitWValue(operand);
|
|
|
|
|
} else {
|
|
|
|
|
if (!isByRef(ret_ty)) {
|
|
|
|
|
const result = try self.load(operand, ret_ty, 0);
|
|
|
|
|
try self.emitWValue(result);
|
|
|
|
|
}
|
|
|
|
|
@@ -1404,6 +1442,8 @@ fn airCall(self: *Self, inst: Air.Inst.Index) InnerError!WValue {
|
|
|
|
|
.Pointer => ty.childType(),
|
|
|
|
|
else => unreachable,
|
|
|
|
|
};
|
|
|
|
|
const ret_ty = fn_ty.fnReturnType();
|
|
|
|
|
const first_param_sret = isByRef(ret_ty);
|
|
|
|
|
|
|
|
|
|
const target: ?*Decl = blk: {
|
|
|
|
|
const func_val = self.air.value(pl_op.operand) orelse break :blk null;
|
|
|
|
|
@@ -1416,6 +1456,12 @@ fn airCall(self: *Self, inst: Air.Inst.Index) InnerError!WValue {
|
|
|
|
|
return self.fail("Expected a function, but instead found type '{s}'", .{func_val.tag()});
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
const sret = if (first_param_sret) blk: {
|
|
|
|
|
const sret_local = try self.allocStack(ret_ty);
|
|
|
|
|
try self.emitWValue(sret_local);
|
|
|
|
|
break :blk sret_local;
|
|
|
|
|
} else WValue{ .none = {} };
|
|
|
|
|
|
|
|
|
|
for (args) |arg| {
|
|
|
|
|
const arg_ref = @intToEnum(Air.Inst.Ref, arg);
|
|
|
|
|
const arg_val = self.resolveInst(arg_ref);
|
|
|
|
|
@@ -1454,42 +1500,35 @@ fn airCall(self: *Self, inst: Air.Inst.Index) InnerError!WValue {
|
|
|
|
|
try self.addLabel(.call_indirect, fn_type_index);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const ret_ty = fn_ty.fnReturnType();
|
|
|
|
|
if (!ret_ty.hasCodeGenBits()) return WValue.none;
|
|
|
|
|
|
|
|
|
|
// TODO: Implement this for all aggregate types
|
|
|
|
|
if (ret_ty.isSlice()) {
|
|
|
|
|
// first load the values onto the regular stack, before we move the stack pointer
|
|
|
|
|
// to prevent overwriting the return value.
|
|
|
|
|
const tmp = try self.allocLocal(ret_ty);
|
|
|
|
|
try self.addLabel(.local_set, tmp.local);
|
|
|
|
|
const field_ty = Type.@"usize";
|
|
|
|
|
const offset = @intCast(u32, field_ty.abiSize(self.target));
|
|
|
|
|
const ptr_local = try self.load(tmp, field_ty, 0);
|
|
|
|
|
const len_local = try self.load(tmp, field_ty, offset);
|
|
|
|
|
|
|
|
|
|
// As our values are now safe, we reserve space on the virtual stack and
|
|
|
|
|
// store the values there.
|
|
|
|
|
const result = try self.allocStack(ret_ty);
|
|
|
|
|
try self.store(result, ptr_local, field_ty, 0);
|
|
|
|
|
try self.store(result, len_local, field_ty, offset);
|
|
|
|
|
return result;
|
|
|
|
|
if (self.liveness.isUnused(inst) or !ret_ty.hasCodeGenBits()) {
|
|
|
|
|
return WValue.none;
|
|
|
|
|
} else if (ret_ty.isNoReturn()) {
|
|
|
|
|
try self.addTag(.@"unreachable");
|
|
|
|
|
return WValue.none;
|
|
|
|
|
} else if (first_param_sret) {
|
|
|
|
|
return sret;
|
|
|
|
|
} else {
|
|
|
|
|
const result_local = try self.allocLocal(ret_ty);
|
|
|
|
|
try self.addLabel(.local_set, result_local.local);
|
|
|
|
|
return result_local;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const result_local = try self.allocLocal(ret_ty);
|
|
|
|
|
try self.addLabel(.local_set, result_local.local);
|
|
|
|
|
return result_local;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn airAlloc(self: *Self, inst: Air.Inst.Index) InnerError!WValue {
|
|
|
|
|
const child_type = self.air.typeOfIndex(inst).childType();
|
|
|
|
|
const pointee_type = self.air.typeOfIndex(inst).childType();
|
|
|
|
|
|
|
|
|
|
// Initialize the stack
|
|
|
|
|
if (self.initial_stack_value == .none) {
|
|
|
|
|
try self.initializeStack();
|
|
|
|
|
}
|
|
|
|
|
if (child_type.abiSize(self.target) == 0) return WValue{ .none = {} };
|
|
|
|
|
return self.allocStack(child_type);
|
|
|
|
|
|
|
|
|
|
if (!pointee_type.hasCodeGenBits()) {
|
|
|
|
|
// when the pointee is zero-sized, we still want to create a pointer.
|
|
|
|
|
// but instead use a default pointer type as storage.
|
|
|
|
|
const zero_ptr = try self.allocStack(Type.usize);
|
|
|
|
|
return zero_ptr;
|
|
|
|
|
}
|
|
|
|
|
return self.allocStack(pointee_type);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn airStore(self: *Self, inst: Air.Inst.Index) InnerError!WValue {
|
|
|
|
|
@@ -1516,6 +1555,8 @@ fn store(self: *Self, lhs: WValue, rhs: WValue, ty: Type, offset: u32) InnerErro
|
|
|
|
|
const tag_ty = if (ty.zigTypeTag() == .ErrorUnion) ty.errorUnionSet() else Type.initTag(.u8);
|
|
|
|
|
const payload_offset = if (ty.zigTypeTag() == .ErrorUnion)
|
|
|
|
|
@intCast(u32, tag_ty.abiSize(self.target))
|
|
|
|
|
else if (ty.isPtrLikeOptional())
|
|
|
|
|
@as(u32, 0)
|
|
|
|
|
else
|
|
|
|
|
@intCast(u32, ty.abiSize(self.target) - payload_ty.abiSize(self.target));
|
|
|
|
|
|
|
|
|
|
@@ -1528,6 +1569,10 @@ fn store(self: *Self, lhs: WValue, rhs: WValue, ty: Type, offset: u32) InnerErro
|
|
|
|
|
try self.addLabel(.local_set, mem_local.local);
|
|
|
|
|
try self.store(lhs, mem_local, ty, 0);
|
|
|
|
|
return;
|
|
|
|
|
} else if (ty.isPtrLikeOptional()) {
|
|
|
|
|
// set the address of rhs to lhs
|
|
|
|
|
try self.store(lhs, rhs, Type.usize, 0);
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
// constant will contain both tag and payload,
|
|
|
|
|
// so save those in 2 temporary locals before storing them
|
|
|
|
|
@@ -1546,11 +1591,23 @@ fn store(self: *Self, lhs: WValue, rhs: WValue, ty: Type, offset: u32) InnerErro
|
|
|
|
|
return;
|
|
|
|
|
},
|
|
|
|
|
.local => {
|
|
|
|
|
// When the optional is pointer-like, we simply store the pointer
|
|
|
|
|
// instead.
|
|
|
|
|
if (ty.isPtrLikeOptional()) {
|
|
|
|
|
try self.store(lhs, rhs, Type.usize, 0);
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
// Load values from `rhs` stack position and store in `lhs` instead
|
|
|
|
|
const tag_local = try self.load(rhs, tag_ty, 0);
|
|
|
|
|
if (payload_ty.hasCodeGenBits()) {
|
|
|
|
|
const payload_local = try self.load(rhs, payload_ty, payload_offset);
|
|
|
|
|
try self.store(lhs, payload_local, payload_ty, payload_offset);
|
|
|
|
|
if (isByRef(payload_ty)) {
|
|
|
|
|
const payload_ptr = try self.buildPointerOffset(rhs, payload_offset);
|
|
|
|
|
const lhs_payload_ptr = try self.buildPointerOffset(lhs, payload_offset);
|
|
|
|
|
try self.store(lhs_payload_ptr, payload_ptr, payload_ty, 0);
|
|
|
|
|
} else {
|
|
|
|
|
const payload_local = try self.load(rhs, payload_ty, payload_offset);
|
|
|
|
|
try self.store(lhs, payload_local, payload_ty, payload_offset);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
return try self.store(lhs, tag_local, tag_ty, 0);
|
|
|
|
|
},
|
|
|
|
|
@@ -1593,12 +1650,9 @@ fn store(self: *Self, lhs: WValue, rhs: WValue, ty: Type, offset: u32) InnerErro
|
|
|
|
|
const len_offset = self.ptrSize();
|
|
|
|
|
if (val.castTag(.decl_ref)) |decl| {
|
|
|
|
|
// for decl references we also need to retrieve the length and the original decl's pointer
|
|
|
|
|
try self.addMemArg(.i32_load, .{ .offset = 0, .alignment = Type.@"usize".abiAlignment(self.target) });
|
|
|
|
|
try self.addMemArg(.i32_load, .{ .offset = 0, .alignment = self.ptrSize() });
|
|
|
|
|
try self.addLabel(.memory_address, decl.data.link.wasm.sym_index);
|
|
|
|
|
try self.addMemArg(
|
|
|
|
|
.i32_load,
|
|
|
|
|
.{ .offset = len_offset, .alignment = Type.@"usize".abiAlignment(self.target) },
|
|
|
|
|
);
|
|
|
|
|
try self.addMemArg(.i32_load, .{ .offset = len_offset, .alignment = self.ptrSize() });
|
|
|
|
|
}
|
|
|
|
|
try self.addLabel(.local_set, len_local.local);
|
|
|
|
|
try self.addLabel(.local_set, ptr_local.local);
|
|
|
|
|
@@ -1630,7 +1684,6 @@ fn store(self: *Self, lhs: WValue, rhs: WValue, ty: Type, offset: u32) InnerErro
|
|
|
|
|
.ErrorSet,
|
|
|
|
|
.Enum,
|
|
|
|
|
.Bool,
|
|
|
|
|
.ErrorUnion,
|
|
|
|
|
=> @intCast(u8, ty.abiSize(self.target)),
|
|
|
|
|
else => @as(u8, 4),
|
|
|
|
|
};
|
|
|
|
|
@@ -1670,7 +1723,9 @@ fn load(self: *Self, operand: WValue, ty: Type, offset: u32) InnerError!WValue {
|
|
|
|
|
// load local's value from memory by its stack position
|
|
|
|
|
try self.emitWValue(operand);
|
|
|
|
|
// Build the opcode with the right bitsize
|
|
|
|
|
const signedness: std.builtin.Signedness = if (ty.isUnsignedInt() or ty.zigTypeTag() == .ErrorSet)
|
|
|
|
|
const signedness: std.builtin.Signedness = if (ty.isUnsignedInt() or
|
|
|
|
|
ty.zigTypeTag() == .ErrorSet or
|
|
|
|
|
ty.zigTypeTag() == .Bool)
|
|
|
|
|
.unsigned
|
|
|
|
|
else
|
|
|
|
|
.signed;
|
|
|
|
|
@@ -1684,6 +1739,10 @@ fn load(self: *Self, operand: WValue, ty: Type, offset: u32) InnerError!WValue {
|
|
|
|
|
.Bool,
|
|
|
|
|
.ErrorUnion,
|
|
|
|
|
=> @intCast(u8, ty.abiSize(self.target)),
|
|
|
|
|
.Optional => blk: {
|
|
|
|
|
if (ty.isPtrLikeOptional()) break :blk @intCast(u8, self.ptrSize());
|
|
|
|
|
break :blk @intCast(u8, ty.abiSize(self.target));
|
|
|
|
|
},
|
|
|
|
|
else => @as(u8, 4),
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
@@ -1828,7 +1887,7 @@ fn emitConstant(self: *Self, val: Value, ty: Type) InnerError!void {
|
|
|
|
|
}
|
|
|
|
|
} else if (val.castTag(.int_u64)) |int_ptr| {
|
|
|
|
|
try self.addImm32(@bitCast(i32, @intCast(u32, int_ptr.data)));
|
|
|
|
|
} else if (val.tag() == .zero) {
|
|
|
|
|
} else if (val.tag() == .zero or val.tag() == .null_value) {
|
|
|
|
|
try self.addImm32(0);
|
|
|
|
|
} else if (val.tag() == .one) {
|
|
|
|
|
try self.addImm32(1);
|
|
|
|
|
@@ -1886,18 +1945,19 @@ fn emitConstant(self: *Self, val: Value, ty: Type) InnerError!void {
|
|
|
|
|
var buf: Type.Payload.ElemType = undefined;
|
|
|
|
|
const payload_type = ty.optionalChild(&buf);
|
|
|
|
|
if (ty.isPtrLikeOptional()) {
|
|
|
|
|
return self.fail("Wasm TODO: emitConstant for optional pointer", .{});
|
|
|
|
|
try self.emitConstant(val, payload_type);
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// When constant has value 'null', set is_null local to '1'
|
|
|
|
|
// and payload to '0'
|
|
|
|
|
if (val.castTag(.opt_payload)) |payload| {
|
|
|
|
|
try self.addImm32(0);
|
|
|
|
|
try self.addImm32(1);
|
|
|
|
|
if (payload_type.hasCodeGenBits())
|
|
|
|
|
try self.emitConstant(payload.data, payload_type);
|
|
|
|
|
} else {
|
|
|
|
|
// set null-tag
|
|
|
|
|
try self.addImm32(1);
|
|
|
|
|
try self.addImm32(0);
|
|
|
|
|
// null-tag is set, so write a '0' const
|
|
|
|
|
try self.addImm32(0);
|
|
|
|
|
}
|
|
|
|
|
@@ -1908,11 +1968,16 @@ fn emitConstant(self: *Self, val: Value, ty: Type) InnerError!void {
|
|
|
|
|
const result = try self.allocStack(ty);
|
|
|
|
|
|
|
|
|
|
const fields = ty.structFields();
|
|
|
|
|
var offset: u32 = 0;
|
|
|
|
|
for (fields.values()) |field, index| {
|
|
|
|
|
if (isByRef(field.ty)) {
|
|
|
|
|
return self.fail("TODO: emitConstant for struct field type {}\n", .{field.ty});
|
|
|
|
|
}
|
|
|
|
|
const tmp = try self.allocLocal(field.ty);
|
|
|
|
|
try self.emitConstant(struct_data.data[index], field.ty);
|
|
|
|
|
try self.addLabel(.local_set, tmp.local);
|
|
|
|
|
try self.store(result, tmp, field.ty, field.offset);
|
|
|
|
|
try self.store(result, tmp, field.ty, offset);
|
|
|
|
|
offset += @intCast(u32, field.ty.abiSize(self.target));
|
|
|
|
|
}
|
|
|
|
|
try self.addLabel(.local_get, result.local);
|
|
|
|
|
},
|
|
|
|
|
@@ -1936,10 +2001,15 @@ fn emitUndefined(self: *Self, ty: Type) InnerError!void {
|
|
|
|
|
// validator will not accept it due to out-of-bounds memory access);
|
|
|
|
|
.Array => try self.addImm32(@bitCast(i32, @as(u32, 0xaa))),
|
|
|
|
|
.Struct => {
|
|
|
|
|
// TODO: Write 0xaa to each field
|
|
|
|
|
// TODO: Write 0xaa struct's memory
|
|
|
|
|
const result = try self.allocStack(ty);
|
|
|
|
|
try self.addLabel(.local_get, result.local);
|
|
|
|
|
},
|
|
|
|
|
.Pointer => switch (self.ptrSize()) {
|
|
|
|
|
4 => try self.addImm32(@bitCast(i32, @as(u32, 0xaaaaaaaa))),
|
|
|
|
|
8 => try self.addImm64(0xaaaaaaaaaaaaaaaa),
|
|
|
|
|
else => unreachable,
|
|
|
|
|
},
|
|
|
|
|
else => return self.fail("Wasm TODO: emitUndefined for type: {}\n", .{ty}),
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
@@ -2065,23 +2135,34 @@ fn airCondBr(self: *Self, inst: Air.Inst.Index) InnerError!WValue {
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn airCmp(self: *Self, inst: Air.Inst.Index, op: std.math.CompareOperator) InnerError!WValue {
|
|
|
|
|
const data: Air.Inst.Data = self.air.instructions.items(.data)[inst];
|
|
|
|
|
const lhs = self.resolveInst(data.bin_op.lhs);
|
|
|
|
|
const rhs = self.resolveInst(data.bin_op.rhs);
|
|
|
|
|
const lhs_ty = self.air.typeOf(data.bin_op.lhs);
|
|
|
|
|
const bin_op = self.air.instructions.items(.data)[inst].bin_op;
|
|
|
|
|
const lhs = self.resolveInst(bin_op.lhs);
|
|
|
|
|
const rhs = self.resolveInst(bin_op.rhs);
|
|
|
|
|
const operand_ty = self.air.typeOf(bin_op.lhs);
|
|
|
|
|
|
|
|
|
|
try self.emitWValue(lhs);
|
|
|
|
|
try self.emitWValue(rhs);
|
|
|
|
|
|
|
|
|
|
if (operand_ty.zigTypeTag() == .Optional and !operand_ty.isPtrLikeOptional()) {
|
|
|
|
|
var buf: Type.Payload.ElemType = undefined;
|
|
|
|
|
const payload_ty = operand_ty.optionalChild(&buf);
|
|
|
|
|
if (payload_ty.hasCodeGenBits()) {
|
|
|
|
|
// When we hit this case, we must check the value of optionals
|
|
|
|
|
// that are not pointers. This means first checking against non-null for
|
|
|
|
|
// both lhs and rhs, as well as checking the payload are matching of lhs and rhs
|
|
|
|
|
return self.fail("TODO: Implement airCmp for comparing optionals", .{});
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const signedness: std.builtin.Signedness = blk: {
|
|
|
|
|
// by default we tell the operand type is unsigned (i.e. bools and enum values)
|
|
|
|
|
if (lhs_ty.zigTypeTag() != .Int) break :blk .unsigned;
|
|
|
|
|
if (operand_ty.zigTypeTag() != .Int) break :blk .unsigned;
|
|
|
|
|
|
|
|
|
|
// incase of an actual integer, we emit the correct signedness
|
|
|
|
|
break :blk lhs_ty.intInfo(self.target).signedness;
|
|
|
|
|
break :blk operand_ty.intInfo(self.target).signedness;
|
|
|
|
|
};
|
|
|
|
|
const opcode: wasm.Opcode = buildOpcode(.{
|
|
|
|
|
.valtype1 = try self.typeToValtype(lhs_ty),
|
|
|
|
|
.valtype1 = try self.typeToValtype(operand_ty),
|
|
|
|
|
.op = switch (op) {
|
|
|
|
|
.lt => .lt,
|
|
|
|
|
.lte => .le,
|
|
|
|
|
@@ -2132,7 +2213,7 @@ fn airNot(self: *Self, inst: Air.Inst.Index) InnerError!WValue {
|
|
|
|
|
try self.addTag(.i32_eq);
|
|
|
|
|
|
|
|
|
|
// save the result in the local
|
|
|
|
|
const not_tmp = try self.allocLocal(self.air.getRefType(ty_op.ty));
|
|
|
|
|
const not_tmp = try self.allocLocal(Type.initTag(.i32));
|
|
|
|
|
try self.addLabel(.local_set, not_tmp.local);
|
|
|
|
|
return not_tmp;
|
|
|
|
|
}
|
|
|
|
|
@@ -2173,7 +2254,7 @@ fn airStructFieldPtr(self: *Self, inst: Air.Inst.Index) InnerError!WValue {
|
|
|
|
|
struct_ty.structFieldType(extra.data.field_index),
|
|
|
|
|
});
|
|
|
|
|
};
|
|
|
|
|
return structFieldPtr(struct_ptr, offset);
|
|
|
|
|
return self.structFieldPtr(struct_ptr, offset);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn airStructFieldPtrIndex(self: *Self, inst: Air.Inst.Index, index: u32) InnerError!WValue {
|
|
|
|
|
@@ -2186,10 +2267,10 @@ fn airStructFieldPtrIndex(self: *Self, inst: Air.Inst.Index, index: u32) InnerEr
|
|
|
|
|
field_ty,
|
|
|
|
|
});
|
|
|
|
|
};
|
|
|
|
|
return structFieldPtr(struct_ptr, offset);
|
|
|
|
|
return self.structFieldPtr(struct_ptr, offset);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn structFieldPtr(struct_ptr: WValue, offset: u32) InnerError!WValue {
|
|
|
|
|
fn structFieldPtr(self: *Self, struct_ptr: WValue, offset: u32) InnerError!WValue {
|
|
|
|
|
var final_offset = offset;
|
|
|
|
|
const local = switch (struct_ptr) {
|
|
|
|
|
.local => |local| local,
|
|
|
|
|
@@ -2199,7 +2280,7 @@ fn structFieldPtr(struct_ptr: WValue, offset: u32) InnerError!WValue {
|
|
|
|
|
},
|
|
|
|
|
else => unreachable,
|
|
|
|
|
};
|
|
|
|
|
return WValue{ .local_with_offset = .{ .local = local, .offset = final_offset } };
|
|
|
|
|
return self.buildPointerOffset(.{ .local = local }, final_offset);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn airStructFieldVal(self: *Self, inst: Air.Inst.Index) InnerError!WValue {
|
|
|
|
|
@@ -2434,24 +2515,13 @@ fn airWrapErrUnionPayload(self: *Self, inst: Air.Inst.Index) InnerError!WValue {
|
|
|
|
|
const offset = err_ty.errorUnionSet().abiSize(self.target);
|
|
|
|
|
|
|
|
|
|
const err_union = try self.allocStack(err_ty);
|
|
|
|
|
const to_store = switch (op_ty.zigTypeTag()) {
|
|
|
|
|
// for those types we must load the pointer and then store
|
|
|
|
|
// its value
|
|
|
|
|
.Pointer, .Optional => blk: {
|
|
|
|
|
if (!op_ty.isPtrLikeOptional()) {
|
|
|
|
|
return self.fail("TODO: airWrapErrUnionPayload for optional type {}", .{op_ty});
|
|
|
|
|
}
|
|
|
|
|
break :blk try self.load(operand, op_ty, 0);
|
|
|
|
|
},
|
|
|
|
|
.Int => operand,
|
|
|
|
|
else => return self.fail("TODO: airWrapErrUnionPayload for type {}", .{op_ty}),
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
try self.store(err_union, to_store, op_ty, @intCast(u32, offset));
|
|
|
|
|
const payload_ptr = try self.buildPointerOffset(err_union, offset);
|
|
|
|
|
try self.store(payload_ptr, operand, op_ty, 0);
|
|
|
|
|
|
|
|
|
|
// ensure we also write '0' to the error part, so any present stack value gets overwritten by it.
|
|
|
|
|
const tmp_local = try self.allocLocal(err_ty.errorUnionSet()); // locals are '0' by default.
|
|
|
|
|
try self.store(err_union, tmp_local, err_ty.errorUnionSet(), 0);
|
|
|
|
|
try self.addLabel(.local_get, err_union.local);
|
|
|
|
|
try self.addImm32(0);
|
|
|
|
|
try self.addMemArg(.i32_store16, .{ .offset = 0, .alignment = 2 });
|
|
|
|
|
|
|
|
|
|
return err_union;
|
|
|
|
|
}
|
|
|
|
|
@@ -2499,64 +2569,140 @@ fn airIntcast(self: *Self, inst: Air.Inst.Index) InnerError!WValue {
|
|
|
|
|
return result;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn airIsNull(self: *Self, inst: Air.Inst.Index, opcode: wasm.Opcode) InnerError!WValue {
|
|
|
|
|
fn airIsNull(self: *Self, inst: Air.Inst.Index, opcode: wasm.Opcode, op_kind: enum { value, ptr }) InnerError!WValue {
|
|
|
|
|
const un_op = self.air.instructions.items(.data)[inst].un_op;
|
|
|
|
|
const operand = self.resolveInst(un_op);
|
|
|
|
|
|
|
|
|
|
const op_ty = self.air.typeOf(un_op);
|
|
|
|
|
const optional_ty = if (op_kind == .ptr) op_ty.childType() else op_ty;
|
|
|
|
|
try self.emitWValue(operand);
|
|
|
|
|
if (!op_ty.isPtrLikeOptional()) {
|
|
|
|
|
try self.addMemArg(.i32_load8_u, .{ .offset = 0, .alignment = 1 });
|
|
|
|
|
if (!optional_ty.isPtrLikeOptional()) {
|
|
|
|
|
var buf: Type.Payload.ElemType = undefined;
|
|
|
|
|
const payload_ty = optional_ty.optionalChild(&buf);
|
|
|
|
|
// When payload is zero-bits, we can treat operand as a value, rather than a
|
|
|
|
|
// stack value
|
|
|
|
|
if (payload_ty.hasCodeGenBits()) {
|
|
|
|
|
try self.addMemArg(.i32_load8_u, .{ .offset = 0, .alignment = 1 });
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Compare the error value with '0'
|
|
|
|
|
// Compare the null value with '0'
|
|
|
|
|
try self.addImm32(0);
|
|
|
|
|
try self.addTag(Mir.Inst.Tag.fromOpcode(opcode));
|
|
|
|
|
|
|
|
|
|
const is_null_tmp = try self.allocLocal(Type.initTag(.u8));
|
|
|
|
|
const is_null_tmp = try self.allocLocal(Type.initTag(.i32));
|
|
|
|
|
try self.addLabel(.local_set, is_null_tmp.local);
|
|
|
|
|
return is_null_tmp;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn airOptionalPayload(self: *Self, inst: Air.Inst.Index) InnerError!WValue {
|
|
|
|
|
if (self.liveness.isUnused(inst)) return WValue{ .none = {} };
|
|
|
|
|
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
|
|
|
|
|
const operand = self.resolveInst(ty_op.operand);
|
|
|
|
|
const opt_ty = self.air.typeOf(ty_op.operand);
|
|
|
|
|
|
|
|
|
|
// For pointers we simply return its stack address, rather than
|
|
|
|
|
// loading its value
|
|
|
|
|
if (opt_ty.zigTypeTag() == .Pointer) {
|
|
|
|
|
return WValue{ .local_with_offset = .{ .local = operand.local, .offset = 1 } };
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const payload_ty = self.air.typeOfIndex(inst);
|
|
|
|
|
if (!payload_ty.hasCodeGenBits()) return WValue{ .none = {} };
|
|
|
|
|
if (opt_ty.isPtrLikeOptional()) return operand;
|
|
|
|
|
|
|
|
|
|
var buf: Type.Payload.ElemType = undefined;
|
|
|
|
|
const child_ty = opt_ty.optionalChild(&buf);
|
|
|
|
|
const offset = opt_ty.abiSize(self.target) - child_ty.abiSize(self.target);
|
|
|
|
|
const offset = opt_ty.abiSize(self.target) - payload_ty.abiSize(self.target);
|
|
|
|
|
|
|
|
|
|
return self.load(operand, child_ty, @intCast(u32, offset));
|
|
|
|
|
if (isByRef(payload_ty)) {
|
|
|
|
|
return self.buildPointerOffset(operand, offset);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return self.load(operand, payload_ty, @intCast(u32, offset));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn airOptionalPayloadPtr(self: *Self, inst: Air.Inst.Index) InnerError!WValue {
|
|
|
|
|
if (self.liveness.isUnused(inst)) return WValue{ .none = {} };
|
|
|
|
|
|
|
|
|
|
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
|
|
|
|
|
const operand = self.resolveInst(ty_op.operand);
|
|
|
|
|
const opt_ty = self.air.typeOf(ty_op.operand).childType();
|
|
|
|
|
|
|
|
|
|
var buf: Type.Payload.ElemType = undefined;
|
|
|
|
|
const payload_ty = opt_ty.optionalChild(&buf);
|
|
|
|
|
if (!payload_ty.hasCodeGenBits() or opt_ty.isPtrLikeOptional()) {
|
|
|
|
|
return operand;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const offset = opt_ty.abiSize(self.target) - payload_ty.abiSize(self.target);
|
|
|
|
|
return self.buildPointerOffset(operand, offset);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn airOptionalPayloadPtrSet(self: *Self, inst: Air.Inst.Index) InnerError!WValue {
|
|
|
|
|
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
|
|
|
|
|
const operand = self.resolveInst(ty_op.operand);
|
|
|
|
|
_ = operand;
|
|
|
|
|
return self.fail("TODO - wasm codegen for optional_payload_ptr_set", .{});
|
|
|
|
|
const opt_ty = self.air.typeOf(ty_op.operand).childType();
|
|
|
|
|
var buf: Type.Payload.ElemType = undefined;
|
|
|
|
|
const payload_ty = opt_ty.optionalChild(&buf);
|
|
|
|
|
if (!payload_ty.hasCodeGenBits()) {
|
|
|
|
|
return self.fail("TODO: Implement OptionalPayloadPtrSet for optional with zero-sized type {}", .{payload_ty});
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (opt_ty.isPtrLikeOptional()) {
|
|
|
|
|
return operand;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const offset = std.math.cast(u32, opt_ty.abiSize(self.target) - payload_ty.abiSize(self.target)) catch {
|
|
|
|
|
return self.fail("Optional type {} too big to fit into stack frame", .{opt_ty});
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
try self.emitWValue(operand);
|
|
|
|
|
try self.addImm32(1);
|
|
|
|
|
try self.addMemArg(.i32_store8, .{ .offset = 0, .alignment = 1 });
|
|
|
|
|
|
|
|
|
|
return self.buildPointerOffset(operand, offset);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn airWrapOptional(self: *Self, inst: Air.Inst.Index) InnerError!WValue {
|
|
|
|
|
if (self.liveness.isUnused(inst)) return WValue{ .none = {} };
|
|
|
|
|
|
|
|
|
|
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
|
|
|
|
|
const payload_ty = self.air.typeOf(ty_op.operand);
|
|
|
|
|
if (!payload_ty.hasCodeGenBits()) {
|
|
|
|
|
const non_null_bit = try self.allocStack(Type.initTag(.u1));
|
|
|
|
|
try self.addLabel(.local_get, non_null_bit.local);
|
|
|
|
|
try self.addImm32(1);
|
|
|
|
|
try self.addMemArg(.i32_store8, .{ .offset = 0, .alignment = 1 });
|
|
|
|
|
return non_null_bit;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const operand = self.resolveInst(ty_op.operand);
|
|
|
|
|
const op_ty = self.air.typeOfIndex(inst);
|
|
|
|
|
if (op_ty.isPtrLikeOptional()) {
|
|
|
|
|
return operand;
|
|
|
|
|
}
|
|
|
|
|
const offset = std.math.cast(u32, op_ty.abiSize(self.target) - payload_ty.abiSize(self.target)) catch {
|
|
|
|
|
return self.fail("Optional type {} too big to fit into stack frame", .{op_ty});
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
const op_ty = self.air.typeOf(ty_op.operand);
|
|
|
|
|
const optional_ty = self.air.getRefType(ty_op.ty);
|
|
|
|
|
const offset = optional_ty.abiSize(self.target) - op_ty.abiSize(self.target);
|
|
|
|
|
// Create optional type, set the non-null bit, and store the operand inside the optional type
|
|
|
|
|
const result = try self.allocStack(op_ty);
|
|
|
|
|
try self.addLabel(.local_get, result.local);
|
|
|
|
|
try self.addImm32(1);
|
|
|
|
|
try self.addMemArg(.i32_store8, .{ .offset = 0, .alignment = 1 });
|
|
|
|
|
|
|
|
|
|
return WValue{ .local_with_offset = .{
|
|
|
|
|
.local = operand.local,
|
|
|
|
|
.offset = @intCast(u32, offset),
|
|
|
|
|
} };
|
|
|
|
|
const payload_ptr = try self.buildPointerOffset(result, offset);
|
|
|
|
|
try self.store(payload_ptr, operand, payload_ty, 0);
|
|
|
|
|
|
|
|
|
|
return result;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn airSlice(self: *Self, inst: Air.Inst.Index) InnerError!WValue {
|
|
|
|
|
if (self.liveness.isUnused(inst)) return WValue{ .none = {} };
|
|
|
|
|
|
|
|
|
|
const ty_pl = self.air.instructions.items(.data)[inst].ty_pl;
|
|
|
|
|
const bin_op = self.air.extraData(Air.Bin, ty_pl.payload).data;
|
|
|
|
|
const lhs = self.resolveInst(bin_op.lhs);
|
|
|
|
|
const rhs = self.resolveInst(bin_op.rhs);
|
|
|
|
|
const slice_ty = self.air.typeOfIndex(inst);
|
|
|
|
|
|
|
|
|
|
const slice = try self.allocStack(slice_ty);
|
|
|
|
|
try self.store(slice, lhs, Type.usize, 0);
|
|
|
|
|
try self.store(slice, rhs, Type.usize, self.ptrSize());
|
|
|
|
|
|
|
|
|
|
return slice;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn airSliceLen(self: *Self, inst: Air.Inst.Index) InnerError!WValue {
|
|
|
|
|
@@ -2809,3 +2955,71 @@ fn airPtrBinOp(self: *Self, inst: Air.Inst.Index, op: Op) InnerError!WValue {
|
|
|
|
|
try self.addLabel(.local_set, result.local);
|
|
|
|
|
return result;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn airMemset(self: *Self, inst: Air.Inst.Index) InnerError!WValue {
|
|
|
|
|
const pl_op = self.air.instructions.items(.data)[inst].pl_op;
|
|
|
|
|
const bin_op = self.air.extraData(Air.Bin, pl_op.payload).data;
|
|
|
|
|
|
|
|
|
|
const ptr = self.resolveInst(pl_op.operand);
|
|
|
|
|
const value = self.resolveInst(bin_op.lhs);
|
|
|
|
|
const len = self.resolveInst(bin_op.rhs);
|
|
|
|
|
try self.memSet(ptr, len, value);
|
|
|
|
|
|
|
|
|
|
return WValue.none;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Sets a region of memory at `ptr` to the value of `value`
|
|
|
|
|
/// When the user has enabled the bulk_memory feature, we lower
|
|
|
|
|
/// this to wasm's memset instruction. When the feature is not present,
|
|
|
|
|
/// we implement it manually.
|
|
|
|
|
fn memSet(self: *Self, ptr: WValue, len: WValue, value: WValue) InnerError!void {
|
|
|
|
|
// When bulk_memory is enabled, we lower it to wasm's memset instruction.
|
|
|
|
|
// If not, we lower it ourselves
|
|
|
|
|
if (std.Target.wasm.featureSetHas(self.target.cpu.features, .bulk_memory)) {
|
|
|
|
|
try self.emitWValue(ptr);
|
|
|
|
|
try self.emitWValue(value);
|
|
|
|
|
try self.emitWValue(len);
|
|
|
|
|
try self.addExtended(.memory_fill);
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// TODO: We should probably lower this to a call to compiler_rt
|
|
|
|
|
// But for now, we implement it manually
|
|
|
|
|
const offset = try self.allocLocal(Type.usize); // local for counter
|
|
|
|
|
// outer block to jump to when loop is done
|
|
|
|
|
try self.startBlock(.block, wasm.block_empty);
|
|
|
|
|
try self.startBlock(.loop, wasm.block_empty);
|
|
|
|
|
try self.emitWValue(offset);
|
|
|
|
|
try self.emitWValue(len);
|
|
|
|
|
switch (self.ptrSize()) {
|
|
|
|
|
4 => try self.addTag(.i32_eq),
|
|
|
|
|
8 => try self.addTag(.i64_eq),
|
|
|
|
|
else => unreachable,
|
|
|
|
|
}
|
|
|
|
|
try self.addLabel(.br_if, 1); // jump out of loop into outer block (finished)
|
|
|
|
|
try self.emitWValue(ptr);
|
|
|
|
|
try self.emitWValue(offset);
|
|
|
|
|
switch (self.ptrSize()) {
|
|
|
|
|
4 => try self.addTag(.i32_add),
|
|
|
|
|
8 => try self.addTag(.i64_add),
|
|
|
|
|
else => unreachable,
|
|
|
|
|
}
|
|
|
|
|
try self.emitWValue(value);
|
|
|
|
|
const mem_store_op: Mir.Inst.Tag = switch (self.ptrSize()) {
|
|
|
|
|
4 => .i32_store8,
|
|
|
|
|
8 => .i64_store8,
|
|
|
|
|
else => unreachable,
|
|
|
|
|
};
|
|
|
|
|
try self.addMemArg(mem_store_op, .{ .offset = 0, .alignment = 1 });
|
|
|
|
|
try self.emitWValue(offset);
|
|
|
|
|
try self.addImm32(1);
|
|
|
|
|
switch (self.ptrSize()) {
|
|
|
|
|
4 => try self.addTag(.i32_add),
|
|
|
|
|
8 => try self.addTag(.i64_add),
|
|
|
|
|
else => unreachable,
|
|
|
|
|
}
|
|
|
|
|
try self.addLabel(.local_set, offset.local);
|
|
|
|
|
try self.addLabel(.br, 0); // jump to start of loop
|
|
|
|
|
try self.endBlock();
|
|
|
|
|
try self.endBlock();
|
|
|
|
|
}
|
|
|
|
|
|