first emitted block
Signed-off-by: lizzie <lizzie@eden-emu.dev>
This commit is contained in:
parent
e8523ee006
commit
963c86ff74
|
|
@ -23,11 +23,8 @@ public:
|
|||
|
||||
protected:
|
||||
friend class A64Core;
|
||||
|
||||
void EmitPrelude();
|
||||
EmitConfig GetEmitConfig() override;
|
||||
void RegisterNewBasicBlock(const IR::Block& block, const EmittedBlockInfo& block_info) override;
|
||||
|
||||
const A64::UserConfig conf;
|
||||
BlockRangeInformation<u64> block_ranges;
|
||||
};
|
||||
|
|
|
|||
|
|
@ -30,47 +30,29 @@ struct A32JitState {
|
|||
class A32AddressSpace final {
|
||||
public:
|
||||
explicit A32AddressSpace(const A32::UserConfig& conf);
|
||||
|
||||
IR::Block GenerateIR(IR::LocationDescriptor) const;
|
||||
|
||||
CodePtr Get(IR::LocationDescriptor descriptor);
|
||||
|
||||
CodePtr GetOrEmit(IR::LocationDescriptor descriptor);
|
||||
|
||||
void ClearCache();
|
||||
|
||||
private:
|
||||
friend class A32Core;
|
||||
|
||||
void EmitPrelude();
|
||||
EmittedBlockInfo Emit(IR::Block ir_block);
|
||||
void Link(EmittedBlockInfo& block);
|
||||
|
||||
const A32::UserConfig conf;
|
||||
|
||||
CodeBlock cb;
|
||||
powah::Context as;
|
||||
|
||||
ankerl::unordered_dense::map<u64, CodePtr> block_entries;
|
||||
ankerl::unordered_dense::map<u64, EmittedBlockInfo> block_infos;
|
||||
|
||||
struct PreludeInfo {
|
||||
CodePtr end_of_prelude;
|
||||
|
||||
using RunCodeFuncType = HaltReason (*)(CodePtr entry_point, A32JitState* context, volatile u32* halt_reason);
|
||||
RunCodeFuncType run_code;
|
||||
CodePtr return_from_run_code;
|
||||
} prelude_info;
|
||||
};
|
||||
|
||||
class A32Core final {
|
||||
public:
|
||||
explicit A32Core(const A32::UserConfig&) {}
|
||||
|
||||
HaltReason Run(A32AddressSpace& process, A32JitState& thread_ctx, volatile u32* halt_reason) {
|
||||
const auto location_descriptor = thread_ctx.GetLocationDescriptor();
|
||||
const auto entry_point = process.GetOrEmit(location_descriptor);
|
||||
return process.prelude_info.run_code(entry_point, &thread_ctx, halt_reason);
|
||||
auto const loc = thread_ctx.GetLocationDescriptor();
|
||||
auto const entry = process.GetOrEmit(loc);
|
||||
using CodeFn = HaltReason (*)(A32JitState*, volatile u32*);
|
||||
return (CodeFn(entry))(&thread_ctx, halt_reason);
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ A32AddressSpace::A32AddressSpace(const A32::UserConfig& conf)
|
|||
: conf(conf)
|
||||
, cb(conf.code_cache_size)
|
||||
, as(cb.ptr<u8*>(), conf.code_cache_size) {
|
||||
EmitPrelude();
|
||||
|
||||
}
|
||||
|
||||
IR::Block A32AddressSpace::GenerateIR(IR::LocationDescriptor descriptor) const {
|
||||
|
|
@ -33,15 +33,13 @@ IR::Block A32AddressSpace::GenerateIR(IR::LocationDescriptor descriptor) const {
|
|||
}
|
||||
|
||||
CodePtr A32AddressSpace::Get(IR::LocationDescriptor descriptor) {
|
||||
if (const auto iter = block_entries.find(descriptor.Value()); iter != block_entries.end())
|
||||
return iter->second;
|
||||
return nullptr;
|
||||
auto const it = block_entries.find(descriptor.Value());
|
||||
return it != block_entries.end() ? it->second : nullptr;
|
||||
}
|
||||
|
||||
CodePtr A32AddressSpace::GetOrEmit(IR::LocationDescriptor descriptor) {
|
||||
if (CodePtr block_entry = Get(descriptor)) {
|
||||
if (CodePtr block_entry = Get(descriptor); block_entry != nullptr)
|
||||
return block_entry;
|
||||
}
|
||||
|
||||
IR::Block ir_block = GenerateIR(descriptor);
|
||||
const EmittedBlockInfo block_info = Emit(std::move(ir_block));
|
||||
|
|
@ -56,10 +54,6 @@ void A32AddressSpace::ClearCache() {
|
|||
block_infos.clear();
|
||||
}
|
||||
|
||||
void A32AddressSpace::EmitPrelude() {
|
||||
UNREACHABLE();
|
||||
}
|
||||
|
||||
EmittedBlockInfo A32AddressSpace::Emit(IR::Block block) {
|
||||
EmittedBlockInfo block_info = EmitPPC64(as, std::move(block), {
|
||||
.enable_cycle_counting = conf.enable_cycle_counting,
|
||||
|
|
@ -70,7 +64,7 @@ EmittedBlockInfo A32AddressSpace::Emit(IR::Block block) {
|
|||
}
|
||||
|
||||
void A32AddressSpace::Link(EmittedBlockInfo& block_info) {
|
||||
UNREACHABLE();
|
||||
//UNREACHABLE();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -160,7 +154,6 @@ struct Jit::Impl final {
|
|||
private:
|
||||
void RequestCacheInvalidation() {
|
||||
// UNREACHABLE();
|
||||
|
||||
invalidate_entire_cache = false;
|
||||
invalid_cache_ranges.clear();
|
||||
}
|
||||
|
|
|
|||
|
|
@ -52,12 +52,6 @@ private:
|
|||
powah::Context as;
|
||||
ankerl::unordered_dense::map<u64, CodePtr> block_entries;
|
||||
ankerl::unordered_dense::map<u64, EmittedBlockInfo> block_infos;
|
||||
struct PreludeInfo {
|
||||
CodePtr end_of_prelude;
|
||||
using RunCodeFuncType = HaltReason (*)(CodePtr entry_point, A64JitState* context, volatile u32* halt_reason);
|
||||
RunCodeFuncType run_code;
|
||||
CodePtr return_from_run_code;
|
||||
} prelude_info;
|
||||
};
|
||||
|
||||
class A64Core final {
|
||||
|
|
@ -65,9 +59,10 @@ public:
|
|||
explicit A64Core(const A64::UserConfig&) {}
|
||||
|
||||
HaltReason Run(A64AddressSpace& process, A64JitState& thread_ctx, volatile u32* halt_reason) {
|
||||
const auto location_descriptor = thread_ctx.GetLocationDescriptor();
|
||||
const auto entry_point = process.GetOrEmit(location_descriptor);
|
||||
return process.prelude_info.run_code(entry_point, &thread_ctx, halt_reason);
|
||||
const auto loc = thread_ctx.GetLocationDescriptor();
|
||||
const auto entry = process.GetOrEmit(loc);
|
||||
using CodeFn = HaltReason (*)(A64JitState*, volatile u32*);
|
||||
return (CodeFn(entry))(&thread_ctx, halt_reason);
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ A64AddressSpace::A64AddressSpace(const A64::UserConfig& conf)
|
|||
: conf(conf)
|
||||
, cb(conf.code_cache_size)
|
||||
, as(cb.ptr<u8*>(), conf.code_cache_size) {
|
||||
EmitPrelude();
|
||||
|
||||
}
|
||||
|
||||
CodePtr A64AddressSpace::Get(IR::LocationDescriptor descriptor) {
|
||||
|
|
@ -52,10 +52,6 @@ void A64AddressSpace::ClearCache() {
|
|||
block_infos.clear();
|
||||
}
|
||||
|
||||
void A64AddressSpace::EmitPrelude() {
|
||||
UNREACHABLE();
|
||||
}
|
||||
|
||||
EmittedBlockInfo A64AddressSpace::Emit(IR::Block block) {
|
||||
EmittedBlockInfo block_info = EmitPPC64(as, std::move(block), {
|
||||
.enable_cycle_counting = conf.enable_cycle_counting,
|
||||
|
|
@ -66,7 +62,8 @@ EmittedBlockInfo A64AddressSpace::Emit(IR::Block block) {
|
|||
}
|
||||
|
||||
void A64AddressSpace::Link(EmittedBlockInfo& block_info) {
|
||||
UNREACHABLE();
|
||||
// TODO(lizzie): Block linking
|
||||
// UNREACHABLE();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -15,7 +15,9 @@ constexpr powah::GPR ABI_PARAM2 = powah::R4;
|
|||
constexpr powah::GPR ABI_PARAM3 = powah::R5;
|
||||
constexpr powah::GPR ABI_PARAM4 = powah::R6;
|
||||
|
||||
constexpr std::initializer_list<u32> GPR_ORDER{8, 9, 18, 19, 20, 21, 22, 23, 24, 25, 5, 6, 7, 28, 29, 10, 11, 12, 13, 14, 15, 16, 17};
|
||||
constexpr std::initializer_list<u32> FPR_ORDER{8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31};
|
||||
// See https://refspecs.linuxfoundation.org/ELF/ppc64/PPC-elf64abi.html#REG
|
||||
constexpr std::initializer_list<u32> GPR_ORDER{
|
||||
14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31
|
||||
};
|
||||
|
||||
} // namespace Dynarmic::Backend::RV64
|
||||
|
|
|
|||
|
|
@ -15,22 +15,23 @@ namespace Dynarmic::Backend::PPC64 {
|
|||
|
||||
class CodeBlock {
|
||||
public:
|
||||
explicit CodeBlock(std::size_t size) noexcept : memsize(size) {
|
||||
mem = (u8*)mmap(nullptr, size, PROT_READ | PROT_WRITE, MAP_PRIVATE | MAP_ANONYMOUS, -1, 0);
|
||||
ASSERT(mem != nullptr);
|
||||
explicit CodeBlock(size_t size_) noexcept : size{size_} {
|
||||
block = (u8*)mmap(nullptr, size, PROT_WRITE | PROT_READ | PROT_EXEC, MAP_ANONYMOUS | MAP_PRIVATE, -1, 0);
|
||||
//printf("block = %p, size= %zx\n", block, size);
|
||||
ASSERT(block != nullptr);
|
||||
}
|
||||
~CodeBlock() noexcept {
|
||||
if (mem != nullptr)
|
||||
munmap(mem, memsize);
|
||||
if (block != nullptr)
|
||||
munmap(block, size);
|
||||
}
|
||||
template<typename T>
|
||||
T ptr() const noexcept {
|
||||
static_assert(std::is_pointer_v<T> || std::is_same_v<T, uintptr_t> || std::is_same_v<T, intptr_t>);
|
||||
return reinterpret_cast<T>(mem);
|
||||
return reinterpret_cast<T>(block);
|
||||
}
|
||||
protected:
|
||||
u8* mem = nullptr;
|
||||
size_t memsize = 0;
|
||||
void* block = nullptr;
|
||||
size_t size = 0;
|
||||
};
|
||||
|
||||
} // namespace Dynarmic::Backend::RV64
|
||||
|
|
|
|||
|
|
@ -87,26 +87,33 @@ void EmitIR<IR::Opcode::NZCVFromPackedFlags>(powah::Context&, EmitContext&, IR::
|
|||
UNREACHABLE();
|
||||
}
|
||||
|
||||
EmittedBlockInfo EmitPPC64(powah::Context& as, IR::Block block, const EmitConfig& emit_conf) {
|
||||
EmittedBlockInfo EmitPPC64(powah::Context& code, IR::Block block, const EmitConfig& emit_conf) {
|
||||
EmittedBlockInfo ebi;
|
||||
RegAlloc reg_alloc{as};
|
||||
RegAlloc reg_alloc{code};
|
||||
EmitContext ctx{block, reg_alloc, emit_conf, ebi};
|
||||
//ebi.entry_point = reinterpret_cast<CodePtr>(as.GetCursorPointer());
|
||||
ebi.entry_point = CodePtr(code.base + code.offset);
|
||||
|
||||
code.BLR();
|
||||
|
||||
// Non-volatile saves
|
||||
std::vector<u32> gpr_order{GPR_ORDER};
|
||||
for (size_t i = 0; i < gpr_order.size(); ++i)
|
||||
code.STD(powah::GPR{gpr_order[i]}, powah::R1, -(i * 8));
|
||||
|
||||
for (auto iter = block.begin(); iter != block.end(); ++iter) {
|
||||
IR::Inst* inst = &*iter;
|
||||
|
||||
switch (inst->GetOpcode()) {
|
||||
#define OPCODE(name, type, ...) \
|
||||
case IR::Opcode::name: \
|
||||
EmitIR<IR::Opcode::name>(as, ctx, inst); \
|
||||
EmitIR<IR::Opcode::name>(code, ctx, inst); \
|
||||
break;
|
||||
#define A32OPC(name, type, ...) \
|
||||
case IR::Opcode::A32##name: \
|
||||
EmitIR<IR::Opcode::A32##name>(as, ctx, inst); \
|
||||
EmitIR<IR::Opcode::A32##name>(code, ctx, inst); \
|
||||
break;
|
||||
#define A64OPC(name, type, ...) \
|
||||
case IR::Opcode::A64##name: \
|
||||
EmitIR<IR::Opcode::A64##name>(as, ctx, inst); \
|
||||
EmitIR<IR::Opcode::A64##name>(code, ctx, inst); \
|
||||
break;
|
||||
#include "dynarmic/ir/opcodes.inc"
|
||||
#undef OPCODE
|
||||
|
|
@ -116,8 +123,11 @@ EmittedBlockInfo EmitPPC64(powah::Context& as, IR::Block block, const EmitConfig
|
|||
UNREACHABLE();
|
||||
}
|
||||
}
|
||||
//UNREACHABLE();
|
||||
//ebi.size = reinterpret_cast<CodePtr>(as.GetCursorPointer()) - ebi.entry_point;
|
||||
|
||||
for (size_t i = 0; i < gpr_order.size(); ++i)
|
||||
code.LD(powah::GPR{gpr_order[i]}, powah::R1, -(i * 8));
|
||||
code.BLR();
|
||||
ebi.size = code.offset;
|
||||
return ebi;
|
||||
}
|
||||
|
||||
|
|
|
|||
Loading…
Reference in New Issue