reglock draft 1
Signed-off-by: lizzie <lizzie@eden-emu.dev>
This commit is contained in:
parent
20984c0764
commit
8d27a7c516
|
|
@ -258,9 +258,9 @@ struct Context {
|
|||
base[offset++] = (op |
|
||||
bitExt(ra.index, 6, 5)
|
||||
| bitExt(rs.index, 11, 5)
|
||||
| bitExt(sh, 16, 5)
|
||||
| bitExt(mb, 21, 4)
|
||||
| bitExt(me, 26, 4)
|
||||
| ((sh & 0x1f) << 11)
|
||||
| ((mb & 0x1f) << 6)
|
||||
| ((me & 0x1f) << 1)
|
||||
| bitExt(rc, 31, 1)
|
||||
);
|
||||
}
|
||||
|
|
@ -359,6 +359,8 @@ struct Context {
|
|||
void CMPDI(GPR const rx, uint32_t si) { CMPI(0, 1, rx, si); }
|
||||
void CMPD(GPR const rx, GPR const ry) { CMP(0, 1, rx, ry); }
|
||||
|
||||
void LI(GPR const rx, uint32_t value) { ADDI(rx, R0, value); }
|
||||
|
||||
void BLR() {
|
||||
base[offset++] = 0x4e800020; //BCLR(R0, CR0, R0);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -17,41 +17,37 @@
|
|||
namespace Dynarmic::Backend::PPC64 {
|
||||
|
||||
template<>
|
||||
void EmitIR<IR::Opcode::A64SetCheckBit>(powah::Context&, EmitContext&, IR::Inst*) {
|
||||
void EmitIR<IR::Opcode::A64SetCheckBit>(powah::Context& code, EmitContext& ctx, IR::Inst* inst) {
|
||||
ASSERT(false && "unimp");
|
||||
}
|
||||
|
||||
template<>
|
||||
void EmitIR<IR::Opcode::A64GetCFlag>(powah::Context&, EmitContext&, IR::Inst*) {
|
||||
void EmitIR<IR::Opcode::A64GetCFlag>(powah::Context& code, EmitContext& ctx, IR::Inst* inst) {
|
||||
ASSERT(false && "unimp");
|
||||
}
|
||||
|
||||
template<>
|
||||
void EmitIR<IR::Opcode::A64GetNZCVRaw>(powah::Context&, EmitContext&, IR::Inst*) {
|
||||
void EmitIR<IR::Opcode::A64GetNZCVRaw>(powah::Context& code, EmitContext& ctx, IR::Inst* inst) {
|
||||
ASSERT(false && "unimp");
|
||||
}
|
||||
|
||||
template<>
|
||||
void EmitIR<IR::Opcode::A64SetNZCVRaw>(powah::Context&, EmitContext&, IR::Inst*) {
|
||||
void EmitIR<IR::Opcode::A64SetNZCVRaw>(powah::Context& code, EmitContext& ctx, IR::Inst* inst) {
|
||||
ASSERT(false && "unimp");
|
||||
}
|
||||
|
||||
template<>
|
||||
void EmitIR<IR::Opcode::A64SetNZCV>(powah::Context&, EmitContext&, IR::Inst*) {
|
||||
void EmitIR<IR::Opcode::A64SetNZCV>(powah::Context& code, EmitContext& ctx, IR::Inst* inst) {
|
||||
ASSERT(false && "unimp");
|
||||
}
|
||||
|
||||
template<>
|
||||
void EmitIR<IR::Opcode::A64GetW>(powah::Context&, EmitContext&, IR::Inst*) {
|
||||
ASSERT(false && "unimp");
|
||||
}
|
||||
|
||||
template<>
|
||||
void EmitIR<IR::Opcode::A64GetX>(powah::Context& code, EmitContext& ctx, IR::Inst* inst) {
|
||||
void EmitIR<IR::Opcode::A64GetW>(powah::Context& code, EmitContext& ctx, IR::Inst* inst) {
|
||||
if (inst->GetArg(0).GetType() == IR::Type::A64Reg) {
|
||||
powah::GPR const result = ctx.reg_alloc.ScratchGpr();
|
||||
code.ADDI(result, PPC64::RJIT, A64::RegNumber(inst->GetArg(0).GetA64RegRef()) * sizeof(u64));
|
||||
code.LD(result, result, offsetof(A64JitState, regs));
|
||||
auto const offs = offsetof(A64JitState, regs)
|
||||
+ A64::RegNumber(inst->GetArg(0).GetA64RegRef()) * sizeof(u64);
|
||||
code.LWZ(result, PPC64::RJIT, offs);
|
||||
ctx.reg_alloc.DefineValue(inst, result);
|
||||
} else {
|
||||
ASSERT(false && "unimp");
|
||||
|
|
@ -59,38 +55,58 @@ void EmitIR<IR::Opcode::A64GetX>(powah::Context& code, EmitContext& ctx, IR::Ins
|
|||
}
|
||||
|
||||
template<>
|
||||
void EmitIR<IR::Opcode::A64GetS>(powah::Context&, EmitContext&, IR::Inst*) {
|
||||
void EmitIR<IR::Opcode::A64GetX>(powah::Context& code, EmitContext& ctx, IR::Inst* inst) {
|
||||
if (inst->GetArg(0).GetType() == IR::Type::A64Reg) {
|
||||
powah::GPR const result = ctx.reg_alloc.ScratchGpr();
|
||||
auto const offs = offsetof(A64JitState, regs)
|
||||
+ A64::RegNumber(inst->GetArg(0).GetA64RegRef()) * sizeof(u64);
|
||||
code.LD(result, PPC64::RJIT, offs);
|
||||
ctx.reg_alloc.DefineValue(inst, result);
|
||||
} else {
|
||||
ASSERT(false && "unimp");
|
||||
}
|
||||
}
|
||||
|
||||
template<>
|
||||
void EmitIR<IR::Opcode::A64GetS>(powah::Context& code, EmitContext& ctx, IR::Inst* inst) {
|
||||
ASSERT(false && "unimp");
|
||||
}
|
||||
|
||||
template<>
|
||||
void EmitIR<IR::Opcode::A64GetD>(powah::Context&, EmitContext&, IR::Inst*) {
|
||||
void EmitIR<IR::Opcode::A64GetD>(powah::Context& code, EmitContext& ctx, IR::Inst* inst) {
|
||||
ASSERT(false && "unimp");
|
||||
}
|
||||
|
||||
template<>
|
||||
void EmitIR<IR::Opcode::A64GetQ>(powah::Context&, EmitContext&, IR::Inst*) {
|
||||
void EmitIR<IR::Opcode::A64GetQ>(powah::Context& code, EmitContext& ctx, IR::Inst* inst) {
|
||||
ASSERT(false && "unimp");
|
||||
}
|
||||
|
||||
template<>
|
||||
void EmitIR<IR::Opcode::A64GetSP>(powah::Context&, EmitContext&, IR::Inst*) {
|
||||
void EmitIR<IR::Opcode::A64GetSP>(powah::Context& code, EmitContext& ctx, IR::Inst* inst) {
|
||||
ASSERT(false && "unimp");
|
||||
}
|
||||
|
||||
template<>
|
||||
void EmitIR<IR::Opcode::A64GetFPCR>(powah::Context&, EmitContext&, IR::Inst*) {
|
||||
void EmitIR<IR::Opcode::A64GetFPCR>(powah::Context& code, EmitContext& ctx, IR::Inst* inst) {
|
||||
ASSERT(false && "unimp");
|
||||
}
|
||||
|
||||
template<>
|
||||
void EmitIR<IR::Opcode::A64GetFPSR>(powah::Context&, EmitContext&, IR::Inst*) {
|
||||
void EmitIR<IR::Opcode::A64GetFPSR>(powah::Context& code, EmitContext& ctx, IR::Inst* inst) {
|
||||
ASSERT(false && "unimp");
|
||||
}
|
||||
|
||||
template<>
|
||||
void EmitIR<IR::Opcode::A64SetW>(powah::Context&, EmitContext&, IR::Inst*) {
|
||||
ASSERT(false && "unimp");
|
||||
void EmitIR<IR::Opcode::A64SetW>(powah::Context& code, EmitContext& ctx, IR::Inst* inst) {
|
||||
powah::GPR const value = ctx.reg_alloc.UseGpr(inst->GetArg(1));
|
||||
if (inst->GetArg(0).GetType() == IR::Type::A64Reg) {
|
||||
powah::GPR const addr = ctx.reg_alloc.ScratchGpr();
|
||||
code.ADDI(addr, PPC64::RJIT, A64::RegNumber(inst->GetArg(0).GetA64RegRef()) * sizeof(u64));
|
||||
code.STD(value, addr, offsetof(A64JitState, regs));
|
||||
} else {
|
||||
ASSERT(false && "unimp");
|
||||
}
|
||||
}
|
||||
|
||||
template<>
|
||||
|
|
@ -106,213 +122,213 @@ void EmitIR<IR::Opcode::A64SetX>(powah::Context& code, EmitContext& ctx, IR::Ins
|
|||
}
|
||||
|
||||
template<>
|
||||
void EmitIR<IR::Opcode::A64SetS>(powah::Context&, EmitContext&, IR::Inst*) {
|
||||
void EmitIR<IR::Opcode::A64SetS>(powah::Context& code, EmitContext& ctx, IR::Inst* inst) {
|
||||
ASSERT(false && "unimp");
|
||||
}
|
||||
|
||||
template<>
|
||||
void EmitIR<IR::Opcode::A64SetD>(powah::Context&, EmitContext&, IR::Inst*) {
|
||||
void EmitIR<IR::Opcode::A64SetD>(powah::Context& code, EmitContext& ctx, IR::Inst* inst) {
|
||||
ASSERT(false && "unimp");
|
||||
}
|
||||
|
||||
template<>
|
||||
void EmitIR<IR::Opcode::A64SetQ>(powah::Context&, EmitContext&, IR::Inst*) {
|
||||
void EmitIR<IR::Opcode::A64SetQ>(powah::Context& code, EmitContext& ctx, IR::Inst* inst) {
|
||||
ASSERT(false && "unimp");
|
||||
}
|
||||
|
||||
template<>
|
||||
void EmitIR<IR::Opcode::A64SetSP>(powah::Context&, EmitContext&, IR::Inst*) {
|
||||
void EmitIR<IR::Opcode::A64SetSP>(powah::Context& code, EmitContext& ctx, IR::Inst* inst) {
|
||||
ASSERT(false && "unimp");
|
||||
}
|
||||
|
||||
template<>
|
||||
void EmitIR<IR::Opcode::A64SetFPCR>(powah::Context&, EmitContext&, IR::Inst*) {
|
||||
void EmitIR<IR::Opcode::A64SetFPCR>(powah::Context& code, EmitContext& ctx, IR::Inst* inst) {
|
||||
ASSERT(false && "unimp");
|
||||
}
|
||||
|
||||
template<>
|
||||
void EmitIR<IR::Opcode::A64SetFPSR>(powah::Context&, EmitContext&, IR::Inst*) {
|
||||
void EmitIR<IR::Opcode::A64SetFPSR>(powah::Context& code, EmitContext& ctx, IR::Inst* inst) {
|
||||
ASSERT(false && "unimp");
|
||||
}
|
||||
|
||||
template<>
|
||||
void EmitIR<IR::Opcode::A64SetPC>(powah::Context&, EmitContext&, IR::Inst*) {
|
||||
void EmitIR<IR::Opcode::A64SetPC>(powah::Context& code, EmitContext& ctx, IR::Inst* inst) {
|
||||
ASSERT(false && "unimp");
|
||||
}
|
||||
|
||||
template<>
|
||||
void EmitIR<IR::Opcode::A64CallSupervisor>(powah::Context&, EmitContext&, IR::Inst*) {
|
||||
void EmitIR<IR::Opcode::A64CallSupervisor>(powah::Context& code, EmitContext& ctx, IR::Inst* inst) {
|
||||
ASSERT(false && "unimp");
|
||||
}
|
||||
|
||||
template<>
|
||||
void EmitIR<IR::Opcode::A64ExceptionRaised>(powah::Context&, EmitContext&, IR::Inst*) {
|
||||
void EmitIR<IR::Opcode::A64ExceptionRaised>(powah::Context& code, EmitContext& ctx, IR::Inst* inst) {
|
||||
ASSERT(false && "unimp");
|
||||
}
|
||||
|
||||
template<>
|
||||
void EmitIR<IR::Opcode::A64DataCacheOperationRaised>(powah::Context&, EmitContext&, IR::Inst*) {
|
||||
void EmitIR<IR::Opcode::A64DataCacheOperationRaised>(powah::Context& code, EmitContext& ctx, IR::Inst* inst) {
|
||||
ASSERT(false && "unimp");
|
||||
}
|
||||
|
||||
template<>
|
||||
void EmitIR<IR::Opcode::A64InstructionCacheOperationRaised>(powah::Context&, EmitContext&, IR::Inst*) {
|
||||
void EmitIR<IR::Opcode::A64InstructionCacheOperationRaised>(powah::Context& code, EmitContext& ctx, IR::Inst* inst) {
|
||||
ASSERT(false && "unimp");
|
||||
}
|
||||
|
||||
template<>
|
||||
void EmitIR<IR::Opcode::A64DataSynchronizationBarrier>(powah::Context&, EmitContext&, IR::Inst*) {
|
||||
void EmitIR<IR::Opcode::A64DataSynchronizationBarrier>(powah::Context& code, EmitContext& ctx, IR::Inst* inst) {
|
||||
ASSERT(false && "unimp");
|
||||
}
|
||||
|
||||
template<>
|
||||
void EmitIR<IR::Opcode::A64DataMemoryBarrier>(powah::Context&, EmitContext&, IR::Inst*) {
|
||||
void EmitIR<IR::Opcode::A64DataMemoryBarrier>(powah::Context& code, EmitContext& ctx, IR::Inst* inst) {
|
||||
ASSERT(false && "unimp");
|
||||
}
|
||||
|
||||
template<>
|
||||
void EmitIR<IR::Opcode::A64InstructionSynchronizationBarrier>(powah::Context&, EmitContext&, IR::Inst*) {
|
||||
void EmitIR<IR::Opcode::A64InstructionSynchronizationBarrier>(powah::Context& code, EmitContext& ctx, IR::Inst* inst) {
|
||||
ASSERT(false && "unimp");
|
||||
}
|
||||
|
||||
template<>
|
||||
void EmitIR<IR::Opcode::A64GetCNTFRQ>(powah::Context&, EmitContext&, IR::Inst*) {
|
||||
void EmitIR<IR::Opcode::A64GetCNTFRQ>(powah::Context& code, EmitContext& ctx, IR::Inst* inst) {
|
||||
ASSERT(false && "unimp");
|
||||
}
|
||||
|
||||
template<>
|
||||
void EmitIR<IR::Opcode::A64GetCNTPCT>(powah::Context&, EmitContext&, IR::Inst*) {
|
||||
void EmitIR<IR::Opcode::A64GetCNTPCT>(powah::Context& code, EmitContext& ctx, IR::Inst* inst) {
|
||||
ASSERT(false && "unimp");
|
||||
}
|
||||
|
||||
template<>
|
||||
void EmitIR<IR::Opcode::A64GetCTR>(powah::Context&, EmitContext&, IR::Inst*) {
|
||||
void EmitIR<IR::Opcode::A64GetCTR>(powah::Context& code, EmitContext& ctx, IR::Inst* inst) {
|
||||
ASSERT(false && "unimp");
|
||||
}
|
||||
|
||||
template<>
|
||||
void EmitIR<IR::Opcode::A64GetDCZID>(powah::Context&, EmitContext&, IR::Inst*) {
|
||||
void EmitIR<IR::Opcode::A64GetDCZID>(powah::Context& code, EmitContext& ctx, IR::Inst* inst) {
|
||||
ASSERT(false && "unimp");
|
||||
}
|
||||
|
||||
template<>
|
||||
void EmitIR<IR::Opcode::A64GetTPIDR>(powah::Context&, EmitContext&, IR::Inst*) {
|
||||
void EmitIR<IR::Opcode::A64GetTPIDR>(powah::Context& code, EmitContext& ctx, IR::Inst* inst) {
|
||||
ASSERT(false && "unimp");
|
||||
}
|
||||
|
||||
template<>
|
||||
void EmitIR<IR::Opcode::A64GetTPIDRRO>(powah::Context&, EmitContext&, IR::Inst*) {
|
||||
void EmitIR<IR::Opcode::A64GetTPIDRRO>(powah::Context& code, EmitContext& ctx, IR::Inst* inst) {
|
||||
ASSERT(false && "unimp");
|
||||
}
|
||||
|
||||
template<>
|
||||
void EmitIR<IR::Opcode::A64SetTPIDR>(powah::Context&, EmitContext&, IR::Inst*) {
|
||||
void EmitIR<IR::Opcode::A64SetTPIDR>(powah::Context& code, EmitContext& ctx, IR::Inst* inst) {
|
||||
ASSERT(false && "unimp");
|
||||
}
|
||||
|
||||
// Memory
|
||||
template<>
|
||||
void EmitIR<IR::Opcode::A64ClearExclusive>(powah::Context&, EmitContext&, IR::Inst*) {
|
||||
void EmitIR<IR::Opcode::A64ClearExclusive>(powah::Context& code, EmitContext& ctx, IR::Inst* inst) {
|
||||
ASSERT(false && "unimp");
|
||||
}
|
||||
|
||||
template<>
|
||||
void EmitIR<IR::Opcode::A64ReadMemory8>(powah::Context&, EmitContext&, IR::Inst*) {
|
||||
void EmitIR<IR::Opcode::A64ReadMemory8>(powah::Context& code, EmitContext& ctx, IR::Inst* inst) {
|
||||
ASSERT(false && "unimp");
|
||||
}
|
||||
|
||||
template<>
|
||||
void EmitIR<IR::Opcode::A64ReadMemory16>(powah::Context&, EmitContext&, IR::Inst*) {
|
||||
void EmitIR<IR::Opcode::A64ReadMemory16>(powah::Context& code, EmitContext& ctx, IR::Inst* inst) {
|
||||
ASSERT(false && "unimp");
|
||||
}
|
||||
|
||||
template<>
|
||||
void EmitIR<IR::Opcode::A64ReadMemory32>(powah::Context&, EmitContext&, IR::Inst*) {
|
||||
void EmitIR<IR::Opcode::A64ReadMemory32>(powah::Context& code, EmitContext& ctx, IR::Inst* inst) {
|
||||
ASSERT(false && "unimp");
|
||||
}
|
||||
|
||||
template<>
|
||||
void EmitIR<IR::Opcode::A64ReadMemory64>(powah::Context&, EmitContext&, IR::Inst*) {
|
||||
void EmitIR<IR::Opcode::A64ReadMemory64>(powah::Context& code, EmitContext& ctx, IR::Inst* inst) {
|
||||
ASSERT(false && "unimp");
|
||||
}
|
||||
|
||||
template<>
|
||||
void EmitIR<IR::Opcode::A64ReadMemory128>(powah::Context&, EmitContext&, IR::Inst*) {
|
||||
void EmitIR<IR::Opcode::A64ReadMemory128>(powah::Context& code, EmitContext& ctx, IR::Inst* inst) {
|
||||
ASSERT(false && "unimp");
|
||||
}
|
||||
|
||||
template<>
|
||||
void EmitIR<IR::Opcode::A64ExclusiveReadMemory8>(powah::Context&, EmitContext&, IR::Inst*) {
|
||||
void EmitIR<IR::Opcode::A64ExclusiveReadMemory8>(powah::Context& code, EmitContext& ctx, IR::Inst* inst) {
|
||||
ASSERT(false && "unimp");
|
||||
}
|
||||
|
||||
template<>
|
||||
void EmitIR<IR::Opcode::A64ExclusiveReadMemory16>(powah::Context&, EmitContext&, IR::Inst*) {
|
||||
void EmitIR<IR::Opcode::A64ExclusiveReadMemory16>(powah::Context& code, EmitContext& ctx, IR::Inst* inst) {
|
||||
ASSERT(false && "unimp");
|
||||
}
|
||||
|
||||
template<>
|
||||
void EmitIR<IR::Opcode::A64ExclusiveReadMemory32>(powah::Context&, EmitContext&, IR::Inst*) {
|
||||
void EmitIR<IR::Opcode::A64ExclusiveReadMemory32>(powah::Context& code, EmitContext& ctx, IR::Inst* inst) {
|
||||
ASSERT(false && "unimp");
|
||||
}
|
||||
|
||||
template<>
|
||||
void EmitIR<IR::Opcode::A64ExclusiveReadMemory64>(powah::Context&, EmitContext&, IR::Inst*) {
|
||||
void EmitIR<IR::Opcode::A64ExclusiveReadMemory64>(powah::Context& code, EmitContext& ctx, IR::Inst* inst) {
|
||||
ASSERT(false && "unimp");
|
||||
}
|
||||
|
||||
template<>
|
||||
void EmitIR<IR::Opcode::A64ExclusiveReadMemory128>(powah::Context&, EmitContext&, IR::Inst*) {
|
||||
void EmitIR<IR::Opcode::A64ExclusiveReadMemory128>(powah::Context& code, EmitContext& ctx, IR::Inst* inst) {
|
||||
ASSERT(false && "unimp");
|
||||
}
|
||||
|
||||
template<>
|
||||
void EmitIR<IR::Opcode::A64WriteMemory8>(powah::Context&, EmitContext&, IR::Inst*) {
|
||||
void EmitIR<IR::Opcode::A64WriteMemory8>(powah::Context& code, EmitContext& ctx, IR::Inst* inst) {
|
||||
ASSERT(false && "unimp");
|
||||
}
|
||||
|
||||
template<>
|
||||
void EmitIR<IR::Opcode::A64WriteMemory16>(powah::Context&, EmitContext&, IR::Inst*) {
|
||||
void EmitIR<IR::Opcode::A64WriteMemory16>(powah::Context& code, EmitContext& ctx, IR::Inst* inst) {
|
||||
ASSERT(false && "unimp");
|
||||
}
|
||||
|
||||
template<>
|
||||
void EmitIR<IR::Opcode::A64WriteMemory32>(powah::Context&, EmitContext&, IR::Inst*) {
|
||||
void EmitIR<IR::Opcode::A64WriteMemory32>(powah::Context& code, EmitContext& ctx, IR::Inst* inst) {
|
||||
ASSERT(false && "unimp");
|
||||
}
|
||||
|
||||
template<>
|
||||
void EmitIR<IR::Opcode::A64WriteMemory64>(powah::Context&, EmitContext&, IR::Inst*) {
|
||||
void EmitIR<IR::Opcode::A64WriteMemory64>(powah::Context& code, EmitContext& ctx, IR::Inst* inst) {
|
||||
ASSERT(false && "unimp");
|
||||
}
|
||||
|
||||
template<>
|
||||
void EmitIR<IR::Opcode::A64WriteMemory128>(powah::Context&, EmitContext&, IR::Inst*) {
|
||||
void EmitIR<IR::Opcode::A64WriteMemory128>(powah::Context& code, EmitContext& ctx, IR::Inst* inst) {
|
||||
ASSERT(false && "unimp");
|
||||
}
|
||||
|
||||
template<>
|
||||
void EmitIR<IR::Opcode::A64ExclusiveWriteMemory8>(powah::Context&, EmitContext&, IR::Inst*) {
|
||||
void EmitIR<IR::Opcode::A64ExclusiveWriteMemory8>(powah::Context& code, EmitContext& ctx, IR::Inst* inst) {
|
||||
ASSERT(false && "unimp");
|
||||
}
|
||||
|
||||
template<>
|
||||
void EmitIR<IR::Opcode::A64ExclusiveWriteMemory16>(powah::Context&, EmitContext&, IR::Inst*) {
|
||||
void EmitIR<IR::Opcode::A64ExclusiveWriteMemory16>(powah::Context& code, EmitContext& ctx, IR::Inst* inst) {
|
||||
ASSERT(false && "unimp");
|
||||
}
|
||||
|
||||
template<>
|
||||
void EmitIR<IR::Opcode::A64ExclusiveWriteMemory32>(powah::Context&, EmitContext&, IR::Inst*) {
|
||||
void EmitIR<IR::Opcode::A64ExclusiveWriteMemory32>(powah::Context& code, EmitContext& ctx, IR::Inst* inst) {
|
||||
ASSERT(false && "unimp");
|
||||
}
|
||||
|
||||
template<>
|
||||
void EmitIR<IR::Opcode::A64ExclusiveWriteMemory64>(powah::Context&, EmitContext&, IR::Inst*) {
|
||||
void EmitIR<IR::Opcode::A64ExclusiveWriteMemory64>(powah::Context& code, EmitContext& ctx, IR::Inst* inst) {
|
||||
ASSERT(false && "unimp");
|
||||
}
|
||||
|
||||
template<>
|
||||
void EmitIR<IR::Opcode::A64ExclusiveWriteMemory128>(powah::Context&, EmitContext&, IR::Inst*) {
|
||||
void EmitIR<IR::Opcode::A64ExclusiveWriteMemory128>(powah::Context& code, EmitContext& ctx, IR::Inst* inst) {
|
||||
ASSERT(false && "unimp");
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -754,26 +754,24 @@ void EmitIR<IR::Opcode::ByteReverseHalf>(powah::Context& code, EmitContext& ctx,
|
|||
|
||||
template<>
|
||||
void EmitIR<IR::Opcode::ByteReverseDual>(powah::Context& code, EmitContext& ctx, IR::Inst* inst) {
|
||||
powah::GPR const result = ctx.reg_alloc.ScratchGpr();
|
||||
powah::GPR const source = ctx.reg_alloc.UseGpr(inst->GetArg(0));
|
||||
if (false) {
|
||||
//code.BRD(result, source);
|
||||
} else {
|
||||
powah::GPR const tmp = ctx.reg_alloc.ScratchGpr();
|
||||
code.ROTLDI(tmp, source, 16);
|
||||
code.ROTLDI(result, source, 8);
|
||||
code.RLDIMI(result, tmp, 8, 48);
|
||||
code.ROTLDI(tmp, source, 24);
|
||||
code.RLDIMI(result, tmp, 16, 40);
|
||||
code.ROTLDI(tmp, source, 32);
|
||||
code.RLDIMI(result, tmp, 24, 32);
|
||||
code.ROTLDI(tmp, source, 48);
|
||||
code.RLDIMI(result, tmp, 40, 16);
|
||||
code.ROTLDI(tmp, source, 56);
|
||||
code.RLDIMI(result, tmp, 48, 8);
|
||||
code.RLDIMI(result, source, 56, 0);
|
||||
powah::GPR const tmp10 = ctx.reg_alloc.ScratchGpr();
|
||||
powah::GPR const tmp9 = ctx.reg_alloc.ScratchGpr();
|
||||
powah::GPR const tmp3 = ctx.reg_alloc.ScratchGpr();
|
||||
code.MR(tmp3, source);
|
||||
code.ROTLWI(tmp10, tmp3, 24);
|
||||
code.SRDI(tmp9, tmp3, 32);
|
||||
code.RLWIMI(tmp10, tmp3, 8, 8, 15);
|
||||
code.RLWIMI(tmp10, tmp3, 8, 24, 31);
|
||||
code.ROTLWI(tmp3, tmp9, 24);
|
||||
code.RLWIMI(tmp3, tmp9, 8, 8, 15);
|
||||
code.RLWIMI(tmp3, tmp9, 8, 24, 31);
|
||||
code.RLDIMI(tmp3, tmp10, 32, 0);
|
||||
ctx.reg_alloc.DefineValue(inst, tmp3);
|
||||
}
|
||||
ctx.reg_alloc.DefineValue(inst, result);
|
||||
}
|
||||
|
||||
// __builtin_clz
|
||||
|
|
|
|||
|
|
@ -62,14 +62,14 @@ void RegAlloc::AssertNoMoreUses() const {
|
|||
ASSERT(std::all_of(spills.begin(), spills.end(), is_empty));
|
||||
}
|
||||
|
||||
std::optional<u32> RegAlloc::AllocateRegister(const std::array<HostLocInfo, 32>& regs, const std::vector<u32>& order) const {
|
||||
std::optional<u32> RegAlloc::AllocateRegister(const std::array<HostLocInfo, 32>& regs) const {
|
||||
auto const order = PPC64::GPR_ORDER;
|
||||
if (auto const it = std::find_if(order.begin(), order.end(), [&](u32 i) {
|
||||
return regs[i].values.empty() && !regs[i].locked;
|
||||
}); it != order.end())
|
||||
return *it;
|
||||
std::vector<u32> candidates;
|
||||
std::copy_if(order.begin(), order.end(), std::back_inserter(candidates), [&](u32 i) { return !regs[i].locked; });
|
||||
return candidates.empty() ? std::nullopt : std::optional<u32>{candidates[0]}; // TODO: LRU
|
||||
// TODO: Actual proper LRU
|
||||
return std::nullopt;
|
||||
}
|
||||
|
||||
void RegAlloc::SpillGpr(u32 index) {
|
||||
|
|
@ -151,14 +151,14 @@ HostLocInfo& RegAlloc::ValueInfo(const IR::Inst* value) {
|
|||
ASSERT(false && "unimp");
|
||||
}
|
||||
|
||||
/// @brief Defines a register temporal to use (and locks it)
|
||||
powah::GPR RegAlloc::ScratchGpr() {
|
||||
auto const r = AllocateRegister(gprs, PPC64::GPR_ORDER);
|
||||
return powah::GPR{*r};
|
||||
/// @brief Defines a register RegLock to use (and locks it)
|
||||
RegLock<powah::GPR> RegAlloc::ScratchGpr() {
|
||||
auto const r = AllocateRegister(gprs);
|
||||
return RegLock(*this, powah::GPR{*r});
|
||||
}
|
||||
|
||||
/// @brief Uses the given GPR of the argument
|
||||
powah::GPR RegAlloc::UseGpr(IR::Value arg) {
|
||||
RegLock<powah::GPR> RegAlloc::UseGpr(IR::Value arg) {
|
||||
if (arg.IsImmediate()) {
|
||||
// HOLY SHIT EVIL HAXX
|
||||
auto const reg = ScratchGpr();
|
||||
|
|
@ -172,7 +172,7 @@ powah::GPR RegAlloc::UseGpr(IR::Value arg) {
|
|||
} else {
|
||||
auto const loc = ValueLocation(arg.GetInst());
|
||||
ASSERT(loc && HostLocIsGpr(*loc));
|
||||
return std::get<powah::GPR>(HostLocToReg(*loc));
|
||||
return RegLock(*this, std::get<powah::GPR>(HostLocToReg(*loc)));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -184,7 +184,7 @@ void RegAlloc::DefineValue(IR::Inst* inst, powah::GPR const gpr) noexcept {
|
|||
void RegAlloc::DefineValue(IR::Inst* inst, IR::Value arg) noexcept {
|
||||
ASSERT(!ValueLocation(inst) && "inst has already been defined");
|
||||
if (arg.IsImmediate()) {
|
||||
HostLoc const loc{u8(ScratchGpr().index)};
|
||||
HostLoc const loc{u8(ScratchGpr().value.index)};
|
||||
ValueInfo(loc).values.push_back(inst);
|
||||
auto const value = arg.GetImmediateAsU64();
|
||||
if (value >= 0x7fff) {
|
||||
|
|
|
|||
|
|
@ -22,8 +22,6 @@
|
|||
|
||||
namespace Dynarmic::Backend::PPC64 {
|
||||
|
||||
class RegAlloc;
|
||||
|
||||
struct HostLocInfo final {
|
||||
std::vector<const IR::Inst*> values;
|
||||
size_t uses_this_inst = 0;
|
||||
|
|
@ -45,8 +43,27 @@ struct HostLocInfo final {
|
|||
void UpdateUses();
|
||||
};
|
||||
|
||||
class RegAlloc {
|
||||
public:
|
||||
struct RegAlloc;
|
||||
|
||||
/// @brief Allows to use RAII to denote liveness/locking of a given register
|
||||
/// this basically means that we can use temporals and not need to go thru
|
||||
/// any weird deallocation stuffs :)
|
||||
template<typename T> struct RegLock {
|
||||
inline RegLock(RegAlloc& reg_alloc, T const value) noexcept
|
||||
: reg_alloc{reg_alloc}
|
||||
, value{value}
|
||||
{
|
||||
SetLock(true);
|
||||
}
|
||||
inline ~RegLock() noexcept { SetLock(false); }
|
||||
operator T const&() { return value; }
|
||||
operator T() const { return value; }
|
||||
inline void SetLock(bool v) noexcept;
|
||||
RegAlloc& reg_alloc;
|
||||
const T value;
|
||||
};
|
||||
|
||||
struct RegAlloc {
|
||||
explicit RegAlloc(powah::Context& code) : code{code} {}
|
||||
bool IsValueLive(IR::Inst* inst) const;
|
||||
void DefineAsExisting(IR::Inst* inst, IR::Value arg);
|
||||
|
|
@ -55,12 +72,15 @@ public:
|
|||
void UpdateAllUses();
|
||||
void AssertNoMoreUses() const;
|
||||
|
||||
powah::GPR ScratchGpr();
|
||||
powah::GPR UseGpr(IR::Value arg);
|
||||
RegLock<powah::GPR> ScratchGpr();
|
||||
RegLock<powah::GPR> UseGpr(IR::Value arg);
|
||||
void DefineValue(IR::Inst* inst, powah::GPR const gpr) noexcept;
|
||||
void DefineValue(IR::Inst* inst, IR::Value arg) noexcept;
|
||||
private:
|
||||
std::optional<u32> AllocateRegister(const std::array<HostLocInfo, 32>& regs, const std::vector<u32>& order) const;
|
||||
template<typename T>
|
||||
friend struct RegLock;
|
||||
|
||||
std::optional<u32> AllocateRegister(const std::array<HostLocInfo, 32>& regs) const;
|
||||
void SpillGpr(u32 index);
|
||||
void SpillFpr(u32 index);
|
||||
u32 FindFreeSpill() const;
|
||||
|
|
@ -74,6 +94,17 @@ private:
|
|||
std::array<HostLocInfo, 32> fprs;
|
||||
std::array<HostLocInfo, 32> vprs;
|
||||
std::array<HostLocInfo, SpillCount> spills;
|
||||
uint32_t lru_counter = 0;
|
||||
};
|
||||
|
||||
template<> inline void RegLock<powah::GPR>::SetLock(bool v) noexcept {
|
||||
reg_alloc.gprs[value.index].locked = v;
|
||||
}
|
||||
template<> inline void RegLock<powah::FPR>::SetLock(bool v) noexcept {
|
||||
reg_alloc.fprs[value.index].locked = v;
|
||||
}
|
||||
template<> inline void RegLock<powah::VPR>::SetLock(bool v) noexcept {
|
||||
reg_alloc.vprs[value.index].locked = v;
|
||||
}
|
||||
|
||||
} // namespace Dynarmic::Backend::RV64
|
||||
|
|
|
|||
Loading…
Reference in New Issue