From 7b4f3c7cfce202117f62b12e9036e5c8ca6a0e56 Mon Sep 17 00:00:00 2001 From: joachimschmidt557 Date: Tue, 20 Oct 2020 11:51:55 +0200 Subject: [PATCH] stage2 ARM: genSetStack and genSetReg from stack --- src/codegen.zig | 78 ++++++++++++++++++++++++++++++++++++++++++--- src/codegen/arm.zig | 2 +- 2 files changed, 74 insertions(+), 6 deletions(-) diff --git a/src/codegen.zig b/src/codegen.zig index 01ebbe85ae..5e5215c992 100644 --- a/src/codegen.zig +++ b/src/codegen.zig @@ -573,17 +573,21 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type { // sub sp, sp, #reloc mem.writeIntLittle(u32, try self.code.addManyAsArray(4), Instruction.push(.al, .{ .fp, .lr }).toU32()); mem.writeIntLittle(u32, try self.code.addManyAsArray(4), Instruction.mov(.al, .fp, Instruction.Operand.reg(.sp, Instruction.Operand.Shift.none)).toU32()); - // TODO: prepare stack for local variables - // const backpatch_reloc = try self.code.addManyAsArray(4); + const backpatch_reloc = self.code.items.len; + try self.code.resize(backpatch_reloc + 4); try self.dbgSetPrologueEnd(); try self.genBody(self.mod_fn.analysis.success); // Backpatch stack offset - // const stack_end = self.max_end_stack; - // const aligned_stack_end = mem.alignForward(stack_end, self.stack_align); - // mem.writeIntLittle(u32, backpatch_reloc, Instruction.sub(.al, .sp, .sp, Instruction.Operand.imm())); + const stack_end = self.max_end_stack; + const aligned_stack_end = mem.alignForward(stack_end, self.stack_align); + if (Instruction.Operand.fromU32(@intCast(u32, aligned_stack_end))) |op| { + mem.writeIntLittle(u32, self.code.items[backpatch_reloc..][0..4], Instruction.sub(.al, .sp, .sp, op).toU32()); + } else { + return self.fail(self.src, "TODO ARM: allow larger stacks", .{}); + } try self.dbgSetEpilogueBegin(); @@ -2196,6 +2200,58 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type { fn genSetStack(self: *Self, src: usize, ty: Type, stack_offset: u32, mcv: MCValue) InnerError!void { switch (arch) { + .arm => switch (mcv) { + .dead => unreachable, + .ptr_stack_offset => unreachable, + .ptr_embedded_in_code => unreachable, + .unreach, .none => return, // Nothing to do. + .undef => { + if (!self.wantSafety()) + return; // The already existing value will do just fine. + // TODO Upgrade this to a memset call when we have that available. + switch (ty.abiSize(self.target.*)) { + 1 => return self.genSetStack(src, ty, stack_offset, .{ .immediate = 0xaa }), + 2 => return self.genSetStack(src, ty, stack_offset, .{ .immediate = 0xaaaa }), + 4 => return self.genSetStack(src, ty, stack_offset, .{ .immediate = 0xaaaaaaaa }), + 8 => return self.genSetStack(src, ty, stack_offset, .{ .immediate = 0xaaaaaaaaaaaaaaaa }), + else => return self.fail(src, "TODO implement memset", .{}), + } + }, + .compare_flags_unsigned => |op| { + return self.fail(src, "TODO implement set stack variable with compare flags value (unsigned)", .{}); + }, + .compare_flags_signed => |op| { + return self.fail(src, "TODO implement set stack variable with compare flags value (signed)", .{}); + }, + .immediate => { + const reg = try self.copyToTmpRegister(src, mcv); + return self.genSetStack(src, ty, stack_offset, MCValue{ .register = reg }); + }, + .embedded_in_code => |code_offset| { + return self.fail(src, "TODO implement set stack variable from embedded_in_code", .{}); + }, + .register => |reg| { + // TODO: strb, strh + if (stack_offset <= math.maxInt(u12)) { + mem.writeIntLittle(u32, try self.code.addManyAsArray(4), Instruction.str(.al, reg, .fp, .{ + .offset = Instruction.Offset.imm(@intCast(u12, stack_offset)), + .positive = false, + }).toU32()); + } else { + return self.fail(src, "TODO genSetStack with larger offsets", .{}); + } + }, + .memory => |vaddr| { + return self.fail(src, "TODO implement set stack variable from memory vaddr", .{}); + }, + .stack_offset => |off| { + if (stack_offset == off) + return; // Copy stack variable to itself; nothing to do. + + const reg = try self.copyToTmpRegister(src, mcv); + return self.genSetStack(src, ty, stack_offset, MCValue{ .register = reg }); + }, + }, .x86_64 => switch (mcv) { .dead => unreachable, .ptr_stack_offset => unreachable, @@ -2352,6 +2408,18 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type { try self.genSetReg(src, reg, .{ .immediate = addr }); mem.writeIntLittle(u32, try self.code.addManyAsArray(4), Instruction.ldr(.al, reg, reg, .{ .offset = Instruction.Offset.none }).toU32()); }, + .stack_offset => |unadjusted_off| { + // TODO: ldrb, ldrh + // TODO: maybe addressing from sp instead of fp + if (unadjusted_off <= math.maxInt(u12)) { + mem.writeIntLittle(u32, try self.code.addManyAsArray(4), Instruction.ldr(.al, reg, .fp, .{ + .offset = Instruction.Offset.imm(@intCast(u12, unadjusted_off)), + .positive = false, + }).toU32()); + } else { + return self.fail(src, "TODO genSetReg with larger stack offset", .{}); + } + }, else => return self.fail(src, "TODO implement getSetReg for arm {}", .{mcv}), }, .riscv64 => switch (mcv) { diff --git a/src/codegen/arm.zig b/src/codegen/arm.zig index 296edabbb2..33ff789648 100644 --- a/src/codegen/arm.zig +++ b/src/codegen/arm.zig @@ -395,7 +395,7 @@ pub const Instruction = union(enum) { }; } - pub fn imm(immediate: u8) Offset { + pub fn imm(immediate: u12) Offset { return Offset{ .Immediate = immediate, };