backend/arm64: Update for oaknut 2.0.0.
Also respect DYNARMIC_ENABLE_NO_EXECUTE_SUPPORT.
This commit is contained in:
parent
6f3b6d35f0
commit
bbc058c76b
8 changed files with 99 additions and 84 deletions
|
@ -28,7 +28,7 @@ static void* EmitCallTrampoline(oaknut::CodeGenerator& code, T* this_) {
|
||||||
|
|
||||||
oaknut::Label l_addr, l_this;
|
oaknut::Label l_addr, l_this;
|
||||||
|
|
||||||
void* target = code.ptr<void*>();
|
void* target = code.xptr<void*>();
|
||||||
code.LDR(X0, l_this);
|
code.LDR(X0, l_this);
|
||||||
code.LDR(Xscratch0, l_addr);
|
code.LDR(Xscratch0, l_addr);
|
||||||
code.BR(Xscratch0);
|
code.BR(Xscratch0);
|
||||||
|
@ -52,7 +52,7 @@ static void* EmitWrappedReadCallTrampoline(oaknut::CodeGenerator& code, T* this_
|
||||||
|
|
||||||
constexpr u64 save_regs = ABI_CALLER_SAVE & ~ToRegList(Xscratch0);
|
constexpr u64 save_regs = ABI_CALLER_SAVE & ~ToRegList(Xscratch0);
|
||||||
|
|
||||||
void* target = code.ptr<void*>();
|
void* target = code.xptr<void*>();
|
||||||
ABI_PushRegisters(code, save_regs, 0);
|
ABI_PushRegisters(code, save_regs, 0);
|
||||||
code.LDR(X0, l_this);
|
code.LDR(X0, l_this);
|
||||||
code.MOV(X1, Xscratch0);
|
code.MOV(X1, Xscratch0);
|
||||||
|
@ -83,7 +83,7 @@ static void* EmitExclusiveReadCallTrampoline(oaknut::CodeGenerator& code, const
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
void* target = code.ptr<void*>();
|
void* target = code.xptr<void*>();
|
||||||
code.LDR(X0, l_this);
|
code.LDR(X0, l_this);
|
||||||
code.LDR(Xscratch0, l_addr);
|
code.LDR(Xscratch0, l_addr);
|
||||||
code.BR(Xscratch0);
|
code.BR(Xscratch0);
|
||||||
|
@ -107,7 +107,7 @@ static void* EmitWrappedWriteCallTrampoline(oaknut::CodeGenerator& code, T* this
|
||||||
|
|
||||||
constexpr u64 save_regs = ABI_CALLER_SAVE;
|
constexpr u64 save_regs = ABI_CALLER_SAVE;
|
||||||
|
|
||||||
void* target = code.ptr<void*>();
|
void* target = code.xptr<void*>();
|
||||||
ABI_PushRegisters(code, save_regs, 0);
|
ABI_PushRegisters(code, save_regs, 0);
|
||||||
code.LDR(X0, l_this);
|
code.LDR(X0, l_this);
|
||||||
code.MOV(X1, Xscratch0);
|
code.MOV(X1, Xscratch0);
|
||||||
|
@ -141,7 +141,7 @@ static void* EmitExclusiveWriteCallTrampoline(oaknut::CodeGenerator& code, const
|
||||||
: 1;
|
: 1;
|
||||||
};
|
};
|
||||||
|
|
||||||
void* target = code.ptr<void*>();
|
void* target = code.xptr<void*>();
|
||||||
code.LDR(X0, l_this);
|
code.LDR(X0, l_this);
|
||||||
code.LDR(Xscratch0, l_addr);
|
code.LDR(Xscratch0, l_addr);
|
||||||
code.BR(Xscratch0);
|
code.BR(Xscratch0);
|
||||||
|
@ -188,7 +188,7 @@ void A32AddressSpace::InvalidateCacheRanges(const boost::icl::interval_set<u32>&
|
||||||
void A32AddressSpace::EmitPrelude() {
|
void A32AddressSpace::EmitPrelude() {
|
||||||
using namespace oaknut::util;
|
using namespace oaknut::util;
|
||||||
|
|
||||||
mem.unprotect();
|
UnprotectCodeMemory();
|
||||||
|
|
||||||
prelude_info.read_memory_8 = EmitCallTrampoline<&A32::UserCallbacks::MemoryRead8>(code, conf.callbacks);
|
prelude_info.read_memory_8 = EmitCallTrampoline<&A32::UserCallbacks::MemoryRead8>(code, conf.callbacks);
|
||||||
prelude_info.read_memory_16 = EmitCallTrampoline<&A32::UserCallbacks::MemoryRead16>(code, conf.callbacks);
|
prelude_info.read_memory_16 = EmitCallTrampoline<&A32::UserCallbacks::MemoryRead16>(code, conf.callbacks);
|
||||||
|
@ -222,7 +222,7 @@ void A32AddressSpace::EmitPrelude() {
|
||||||
|
|
||||||
oaknut::Label return_from_run_code, l_return_to_dispatcher;
|
oaknut::Label return_from_run_code, l_return_to_dispatcher;
|
||||||
|
|
||||||
prelude_info.run_code = code.ptr<PreludeInfo::RunCodeFuncType>();
|
prelude_info.run_code = code.xptr<PreludeInfo::RunCodeFuncType>();
|
||||||
{
|
{
|
||||||
ABI_PushRegisters(code, ABI_CALLEE_SAVE | (1 << 30), sizeof(StackLayout));
|
ABI_PushRegisters(code, ABI_CALLEE_SAVE | (1 << 30), sizeof(StackLayout));
|
||||||
|
|
||||||
|
@ -261,7 +261,7 @@ void A32AddressSpace::EmitPrelude() {
|
||||||
code.BR(X19);
|
code.BR(X19);
|
||||||
}
|
}
|
||||||
|
|
||||||
prelude_info.step_code = code.ptr<PreludeInfo::RunCodeFuncType>();
|
prelude_info.step_code = code.xptr<PreludeInfo::RunCodeFuncType>();
|
||||||
{
|
{
|
||||||
ABI_PushRegisters(code, ABI_CALLEE_SAVE | (1 << 30), sizeof(StackLayout));
|
ABI_PushRegisters(code, ABI_CALLEE_SAVE | (1 << 30), sizeof(StackLayout));
|
||||||
|
|
||||||
|
@ -304,7 +304,7 @@ void A32AddressSpace::EmitPrelude() {
|
||||||
code.BR(X19);
|
code.BR(X19);
|
||||||
}
|
}
|
||||||
|
|
||||||
prelude_info.return_to_dispatcher = code.ptr<void*>();
|
prelude_info.return_to_dispatcher = code.xptr<void*>();
|
||||||
{
|
{
|
||||||
oaknut::Label l_this, l_addr;
|
oaknut::Label l_this, l_addr;
|
||||||
|
|
||||||
|
@ -333,7 +333,7 @@ void A32AddressSpace::EmitPrelude() {
|
||||||
code.dx(mcl::bit_cast<u64>(Common::FptrCast(fn)));
|
code.dx(mcl::bit_cast<u64>(Common::FptrCast(fn)));
|
||||||
}
|
}
|
||||||
|
|
||||||
prelude_info.return_from_run_code = code.ptr<void*>();
|
prelude_info.return_from_run_code = code.xptr<void*>();
|
||||||
{
|
{
|
||||||
code.l(return_from_run_code);
|
code.l(return_from_run_code);
|
||||||
|
|
||||||
|
@ -360,10 +360,10 @@ void A32AddressSpace::EmitPrelude() {
|
||||||
code.l(l_return_to_dispatcher);
|
code.l(l_return_to_dispatcher);
|
||||||
code.dx(mcl::bit_cast<u64>(prelude_info.return_to_dispatcher));
|
code.dx(mcl::bit_cast<u64>(prelude_info.return_to_dispatcher));
|
||||||
|
|
||||||
prelude_info.end_of_prelude = code.ptr<u32*>();
|
prelude_info.end_of_prelude = code.offset();
|
||||||
|
|
||||||
mem.invalidate_all();
|
mem.invalidate_all();
|
||||||
mem.protect();
|
ProtectCodeMemory();
|
||||||
}
|
}
|
||||||
|
|
||||||
EmitConfig A32AddressSpace::GetEmitConfig() {
|
EmitConfig A32AddressSpace::GetEmitConfig() {
|
||||||
|
|
|
@ -27,7 +27,7 @@ static void* EmitCallTrampoline(oaknut::CodeGenerator& code, T* this_) {
|
||||||
|
|
||||||
oaknut::Label l_addr, l_this;
|
oaknut::Label l_addr, l_this;
|
||||||
|
|
||||||
void* target = code.ptr<void*>();
|
void* target = code.xptr<void*>();
|
||||||
code.LDR(X0, l_this);
|
code.LDR(X0, l_this);
|
||||||
code.LDR(Xscratch0, l_addr);
|
code.LDR(Xscratch0, l_addr);
|
||||||
code.BR(Xscratch0);
|
code.BR(Xscratch0);
|
||||||
|
@ -51,7 +51,7 @@ static void* EmitWrappedReadCallTrampoline(oaknut::CodeGenerator& code, T* this_
|
||||||
|
|
||||||
constexpr u64 save_regs = ABI_CALLER_SAVE & ~ToRegList(Xscratch0);
|
constexpr u64 save_regs = ABI_CALLER_SAVE & ~ToRegList(Xscratch0);
|
||||||
|
|
||||||
void* target = code.ptr<void*>();
|
void* target = code.xptr<void*>();
|
||||||
ABI_PushRegisters(code, save_regs, 0);
|
ABI_PushRegisters(code, save_regs, 0);
|
||||||
code.LDR(X0, l_this);
|
code.LDR(X0, l_this);
|
||||||
code.MOV(X1, Xscratch0);
|
code.MOV(X1, Xscratch0);
|
||||||
|
@ -82,7 +82,7 @@ static void* EmitExclusiveReadCallTrampoline(oaknut::CodeGenerator& code, const
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
void* target = code.ptr<void*>();
|
void* target = code.xptr<void*>();
|
||||||
code.LDR(X0, l_this);
|
code.LDR(X0, l_this);
|
||||||
code.LDR(Xscratch0, l_addr);
|
code.LDR(Xscratch0, l_addr);
|
||||||
code.BR(Xscratch0);
|
code.BR(Xscratch0);
|
||||||
|
@ -106,7 +106,7 @@ static void* EmitWrappedWriteCallTrampoline(oaknut::CodeGenerator& code, T* this
|
||||||
|
|
||||||
constexpr u64 save_regs = ABI_CALLER_SAVE;
|
constexpr u64 save_regs = ABI_CALLER_SAVE;
|
||||||
|
|
||||||
void* target = code.ptr<void*>();
|
void* target = code.xptr<void*>();
|
||||||
ABI_PushRegisters(code, save_regs, 0);
|
ABI_PushRegisters(code, save_regs, 0);
|
||||||
code.LDR(X0, l_this);
|
code.LDR(X0, l_this);
|
||||||
code.MOV(X1, Xscratch0);
|
code.MOV(X1, Xscratch0);
|
||||||
|
@ -140,7 +140,7 @@ static void* EmitExclusiveWriteCallTrampoline(oaknut::CodeGenerator& code, const
|
||||||
: 1;
|
: 1;
|
||||||
};
|
};
|
||||||
|
|
||||||
void* target = code.ptr<void*>();
|
void* target = code.xptr<void*>();
|
||||||
code.LDR(X0, l_this);
|
code.LDR(X0, l_this);
|
||||||
code.LDR(Xscratch0, l_addr);
|
code.LDR(Xscratch0, l_addr);
|
||||||
code.BR(Xscratch0);
|
code.BR(Xscratch0);
|
||||||
|
@ -161,7 +161,7 @@ static void* EmitRead128CallTrampoline(oaknut::CodeGenerator& code, A64::UserCal
|
||||||
|
|
||||||
oaknut::Label l_addr, l_this;
|
oaknut::Label l_addr, l_this;
|
||||||
|
|
||||||
void* target = code.ptr<void*>();
|
void* target = code.xptr<void*>();
|
||||||
ABI_PushRegisters(code, (1ull << 29) | (1ull << 30), 0);
|
ABI_PushRegisters(code, (1ull << 29) | (1ull << 30), 0);
|
||||||
code.LDR(X0, l_this);
|
code.LDR(X0, l_this);
|
||||||
code.LDR(Xscratch0, l_addr);
|
code.LDR(Xscratch0, l_addr);
|
||||||
|
@ -189,7 +189,7 @@ static void* EmitWrappedRead128CallTrampoline(oaknut::CodeGenerator& code, A64::
|
||||||
|
|
||||||
constexpr u64 save_regs = ABI_CALLER_SAVE & ~ToRegList(Q0);
|
constexpr u64 save_regs = ABI_CALLER_SAVE & ~ToRegList(Q0);
|
||||||
|
|
||||||
void* target = code.ptr<void*>();
|
void* target = code.xptr<void*>();
|
||||||
ABI_PushRegisters(code, save_regs, 0);
|
ABI_PushRegisters(code, save_regs, 0);
|
||||||
code.LDR(X0, l_this);
|
code.LDR(X0, l_this);
|
||||||
code.MOV(X1, Xscratch0);
|
code.MOV(X1, Xscratch0);
|
||||||
|
@ -220,7 +220,7 @@ static void* EmitExclusiveRead128CallTrampoline(oaknut::CodeGenerator& code, con
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
void* target = code.ptr<void*>();
|
void* target = code.xptr<void*>();
|
||||||
ABI_PushRegisters(code, (1ull << 29) | (1ull << 30), 0);
|
ABI_PushRegisters(code, (1ull << 29) | (1ull << 30), 0);
|
||||||
code.LDR(X0, l_this);
|
code.LDR(X0, l_this);
|
||||||
code.LDR(Xscratch0, l_addr);
|
code.LDR(Xscratch0, l_addr);
|
||||||
|
@ -246,7 +246,7 @@ static void* EmitWrite128CallTrampoline(oaknut::CodeGenerator& code, A64::UserCa
|
||||||
|
|
||||||
oaknut::Label l_addr, l_this;
|
oaknut::Label l_addr, l_this;
|
||||||
|
|
||||||
void* target = code.ptr<void*>();
|
void* target = code.xptr<void*>();
|
||||||
code.LDR(X0, l_this);
|
code.LDR(X0, l_this);
|
||||||
code.FMOV(X2, D0);
|
code.FMOV(X2, D0);
|
||||||
code.FMOV(X3, V0.D()[1]);
|
code.FMOV(X3, V0.D()[1]);
|
||||||
|
@ -271,7 +271,7 @@ static void* EmitWrappedWrite128CallTrampoline(oaknut::CodeGenerator& code, A64:
|
||||||
|
|
||||||
constexpr u64 save_regs = ABI_CALLER_SAVE;
|
constexpr u64 save_regs = ABI_CALLER_SAVE;
|
||||||
|
|
||||||
void* target = code.ptr<void*>();
|
void* target = code.xptr<void*>();
|
||||||
ABI_PushRegisters(code, save_regs, 0);
|
ABI_PushRegisters(code, save_regs, 0);
|
||||||
code.LDR(X0, l_this);
|
code.LDR(X0, l_this);
|
||||||
code.MOV(X1, Xscratch0);
|
code.MOV(X1, Xscratch0);
|
||||||
|
@ -305,7 +305,7 @@ static void* EmitExclusiveWrite128CallTrampoline(oaknut::CodeGenerator& code, co
|
||||||
: 1;
|
: 1;
|
||||||
};
|
};
|
||||||
|
|
||||||
void* target = code.ptr<void*>();
|
void* target = code.xptr<void*>();
|
||||||
code.LDR(X0, l_this);
|
code.LDR(X0, l_this);
|
||||||
code.FMOV(X2, D0);
|
code.FMOV(X2, D0);
|
||||||
code.FMOV(X3, V0.D()[1]);
|
code.FMOV(X3, V0.D()[1]);
|
||||||
|
@ -357,7 +357,7 @@ void A64AddressSpace::InvalidateCacheRanges(const boost::icl::interval_set<u64>&
|
||||||
void A64AddressSpace::EmitPrelude() {
|
void A64AddressSpace::EmitPrelude() {
|
||||||
using namespace oaknut::util;
|
using namespace oaknut::util;
|
||||||
|
|
||||||
mem.unprotect();
|
UnprotectCodeMemory();
|
||||||
|
|
||||||
prelude_info.read_memory_8 = EmitCallTrampoline<&A64::UserCallbacks::MemoryRead8>(code, conf.callbacks);
|
prelude_info.read_memory_8 = EmitCallTrampoline<&A64::UserCallbacks::MemoryRead8>(code, conf.callbacks);
|
||||||
prelude_info.read_memory_16 = EmitCallTrampoline<&A64::UserCallbacks::MemoryRead16>(code, conf.callbacks);
|
prelude_info.read_memory_16 = EmitCallTrampoline<&A64::UserCallbacks::MemoryRead16>(code, conf.callbacks);
|
||||||
|
@ -400,7 +400,7 @@ void A64AddressSpace::EmitPrelude() {
|
||||||
|
|
||||||
oaknut::Label return_from_run_code, l_return_to_dispatcher;
|
oaknut::Label return_from_run_code, l_return_to_dispatcher;
|
||||||
|
|
||||||
prelude_info.run_code = code.ptr<PreludeInfo::RunCodeFuncType>();
|
prelude_info.run_code = code.xptr<PreludeInfo::RunCodeFuncType>();
|
||||||
{
|
{
|
||||||
ABI_PushRegisters(code, ABI_CALLEE_SAVE | (1 << 30), sizeof(StackLayout));
|
ABI_PushRegisters(code, ABI_CALLEE_SAVE | (1 << 30), sizeof(StackLayout));
|
||||||
|
|
||||||
|
@ -438,7 +438,7 @@ void A64AddressSpace::EmitPrelude() {
|
||||||
code.BR(X19);
|
code.BR(X19);
|
||||||
}
|
}
|
||||||
|
|
||||||
prelude_info.step_code = code.ptr<PreludeInfo::RunCodeFuncType>();
|
prelude_info.step_code = code.xptr<PreludeInfo::RunCodeFuncType>();
|
||||||
{
|
{
|
||||||
ABI_PushRegisters(code, ABI_CALLEE_SAVE | (1 << 30), sizeof(StackLayout));
|
ABI_PushRegisters(code, ABI_CALLEE_SAVE | (1 << 30), sizeof(StackLayout));
|
||||||
|
|
||||||
|
@ -480,7 +480,7 @@ void A64AddressSpace::EmitPrelude() {
|
||||||
code.BR(X19);
|
code.BR(X19);
|
||||||
}
|
}
|
||||||
|
|
||||||
prelude_info.return_to_dispatcher = code.ptr<void*>();
|
prelude_info.return_to_dispatcher = code.xptr<void*>();
|
||||||
{
|
{
|
||||||
oaknut::Label l_this, l_addr;
|
oaknut::Label l_this, l_addr;
|
||||||
|
|
||||||
|
@ -509,7 +509,7 @@ void A64AddressSpace::EmitPrelude() {
|
||||||
code.dx(mcl::bit_cast<u64>(Common::FptrCast(fn)));
|
code.dx(mcl::bit_cast<u64>(Common::FptrCast(fn)));
|
||||||
}
|
}
|
||||||
|
|
||||||
prelude_info.return_from_run_code = code.ptr<void*>();
|
prelude_info.return_from_run_code = code.xptr<void*>();
|
||||||
{
|
{
|
||||||
code.l(return_from_run_code);
|
code.l(return_from_run_code);
|
||||||
|
|
||||||
|
@ -536,10 +536,10 @@ void A64AddressSpace::EmitPrelude() {
|
||||||
code.l(l_return_to_dispatcher);
|
code.l(l_return_to_dispatcher);
|
||||||
code.dx(mcl::bit_cast<u64>(prelude_info.return_to_dispatcher));
|
code.dx(mcl::bit_cast<u64>(prelude_info.return_to_dispatcher));
|
||||||
|
|
||||||
prelude_info.end_of_prelude = code.ptr<u32*>();
|
prelude_info.end_of_prelude = code.offset();
|
||||||
|
|
||||||
mem.invalidate_all();
|
mem.invalidate_all();
|
||||||
mem.protect();
|
ProtectCodeMemory();
|
||||||
}
|
}
|
||||||
|
|
||||||
EmitConfig A64AddressSpace::GetEmitConfig() {
|
EmitConfig A64AddressSpace::GetEmitConfig() {
|
||||||
|
|
|
@ -18,7 +18,7 @@ namespace Dynarmic::Backend::Arm64 {
|
||||||
AddressSpace::AddressSpace(size_t code_cache_size)
|
AddressSpace::AddressSpace(size_t code_cache_size)
|
||||||
: code_cache_size(code_cache_size)
|
: code_cache_size(code_cache_size)
|
||||||
, mem(code_cache_size)
|
, mem(code_cache_size)
|
||||||
, code(mem.ptr())
|
, code(mem.ptr(), mem.ptr())
|
||||||
, fastmem_manager(exception_handler) {
|
, fastmem_manager(exception_handler) {
|
||||||
ASSERT_MSG(code_cache_size <= 128 * 1024 * 1024, "code_cache_size > 128 MiB not currently supported");
|
ASSERT_MSG(code_cache_size <= 128 * 1024 * 1024, "code_cache_size > 128 MiB not currently supported");
|
||||||
|
|
||||||
|
@ -66,7 +66,7 @@ CodePtr AddressSpace::GetOrEmit(IR::LocationDescriptor descriptor) {
|
||||||
}
|
}
|
||||||
|
|
||||||
void AddressSpace::InvalidateBasicBlocks(const tsl::robin_set<IR::LocationDescriptor>& descriptors) {
|
void AddressSpace::InvalidateBasicBlocks(const tsl::robin_set<IR::LocationDescriptor>& descriptors) {
|
||||||
mem.unprotect();
|
UnprotectCodeMemory();
|
||||||
|
|
||||||
for (const auto& descriptor : descriptors) {
|
for (const auto& descriptor : descriptors) {
|
||||||
const auto iter = block_entries.find(descriptor);
|
const auto iter = block_entries.find(descriptor);
|
||||||
|
@ -81,7 +81,7 @@ void AddressSpace::InvalidateBasicBlocks(const tsl::robin_set<IR::LocationDescri
|
||||||
block_entries.erase(iter);
|
block_entries.erase(iter);
|
||||||
}
|
}
|
||||||
|
|
||||||
mem.protect();
|
ProtectCodeMemory();
|
||||||
}
|
}
|
||||||
|
|
||||||
void AddressSpace::ClearCache() {
|
void AddressSpace::ClearCache() {
|
||||||
|
@ -89,11 +89,11 @@ void AddressSpace::ClearCache() {
|
||||||
reverse_block_entries.clear();
|
reverse_block_entries.clear();
|
||||||
block_infos.clear();
|
block_infos.clear();
|
||||||
block_references.clear();
|
block_references.clear();
|
||||||
code.set_ptr(prelude_info.end_of_prelude);
|
code.set_offset(prelude_info.end_of_prelude);
|
||||||
}
|
}
|
||||||
|
|
||||||
size_t AddressSpace::GetRemainingSize() {
|
size_t AddressSpace::GetRemainingSize() {
|
||||||
return code_cache_size - (code.ptr<CodePtr>() - reinterpret_cast<CodePtr>(mem.ptr()));
|
return code_cache_size - static_cast<size_t>(code.offset());
|
||||||
}
|
}
|
||||||
|
|
||||||
EmittedBlockInfo AddressSpace::Emit(IR::Block block) {
|
EmittedBlockInfo AddressSpace::Emit(IR::Block block) {
|
||||||
|
@ -101,7 +101,7 @@ EmittedBlockInfo AddressSpace::Emit(IR::Block block) {
|
||||||
ClearCache();
|
ClearCache();
|
||||||
}
|
}
|
||||||
|
|
||||||
mem.unprotect();
|
UnprotectCodeMemory();
|
||||||
|
|
||||||
EmittedBlockInfo block_info = EmitArm64(code, std::move(block), GetEmitConfig(), fastmem_manager);
|
EmittedBlockInfo block_info = EmitArm64(code, std::move(block), GetEmitConfig(), fastmem_manager);
|
||||||
|
|
||||||
|
@ -113,47 +113,20 @@ EmittedBlockInfo AddressSpace::Emit(IR::Block block) {
|
||||||
RelinkForDescriptor(block.Location(), block_info.entry_point);
|
RelinkForDescriptor(block.Location(), block_info.entry_point);
|
||||||
|
|
||||||
mem.invalidate(reinterpret_cast<u32*>(block_info.entry_point), block_info.size);
|
mem.invalidate(reinterpret_cast<u32*>(block_info.entry_point), block_info.size);
|
||||||
mem.protect();
|
ProtectCodeMemory();
|
||||||
|
|
||||||
RegisterNewBasicBlock(block, block_info);
|
RegisterNewBasicBlock(block, block_info);
|
||||||
|
|
||||||
return block_info;
|
return block_info;
|
||||||
}
|
}
|
||||||
|
|
||||||
static void LinkBlockLinks(const CodePtr entry_point, const CodePtr target_ptr, const std::vector<BlockRelocation>& block_relocations_list, void* return_to_dispatcher) {
|
|
||||||
using namespace oaknut;
|
|
||||||
using namespace oaknut::util;
|
|
||||||
|
|
||||||
for (auto [ptr_offset, type] : block_relocations_list) {
|
|
||||||
CodeGenerator c{reinterpret_cast<u32*>(entry_point + ptr_offset)};
|
|
||||||
|
|
||||||
switch (type) {
|
|
||||||
case BlockRelocationType::Branch:
|
|
||||||
if (target_ptr) {
|
|
||||||
c.B((void*)target_ptr);
|
|
||||||
} else {
|
|
||||||
c.NOP();
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case BlockRelocationType::MoveToScratch1:
|
|
||||||
if (target_ptr) {
|
|
||||||
c.ADRL(Xscratch1, (void*)target_ptr);
|
|
||||||
} else {
|
|
||||||
c.ADRL(Xscratch1, return_to_dispatcher);
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
ASSERT_FALSE("Invalid BlockRelocationType");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
void AddressSpace::Link(EmittedBlockInfo& block_info) {
|
void AddressSpace::Link(EmittedBlockInfo& block_info) {
|
||||||
using namespace oaknut;
|
using namespace oaknut;
|
||||||
using namespace oaknut::util;
|
using namespace oaknut::util;
|
||||||
|
|
||||||
for (auto [ptr_offset, target] : block_info.relocations) {
|
for (auto [ptr_offset, target] : block_info.relocations) {
|
||||||
CodeGenerator c{reinterpret_cast<u32*>(block_info.entry_point + ptr_offset)};
|
CodeGenerator c{mem.ptr(), mem.ptr()};
|
||||||
|
c.set_xptr(reinterpret_cast<u32*>(block_info.entry_point + ptr_offset));
|
||||||
|
|
||||||
switch (target) {
|
switch (target) {
|
||||||
case LinkTarget::ReturnToDispatcher:
|
case LinkTarget::ReturnToDispatcher:
|
||||||
|
@ -283,7 +256,36 @@ void AddressSpace::Link(EmittedBlockInfo& block_info) {
|
||||||
|
|
||||||
for (auto [target_descriptor, list] : block_info.block_relocations) {
|
for (auto [target_descriptor, list] : block_info.block_relocations) {
|
||||||
block_references[target_descriptor].emplace(block_info.entry_point);
|
block_references[target_descriptor].emplace(block_info.entry_point);
|
||||||
LinkBlockLinks(block_info.entry_point, Get(target_descriptor), list, prelude_info.return_to_dispatcher);
|
LinkBlockLinks(block_info.entry_point, Get(target_descriptor), list);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void AddressSpace::LinkBlockLinks(const CodePtr entry_point, const CodePtr target_ptr, const std::vector<BlockRelocation>& block_relocations_list) {
|
||||||
|
using namespace oaknut;
|
||||||
|
using namespace oaknut::util;
|
||||||
|
|
||||||
|
for (auto [ptr_offset, type] : block_relocations_list) {
|
||||||
|
CodeGenerator c{mem.ptr(), mem.ptr()};
|
||||||
|
c.set_xptr(reinterpret_cast<u32*>(entry_point + ptr_offset));
|
||||||
|
|
||||||
|
switch (type) {
|
||||||
|
case BlockRelocationType::Branch:
|
||||||
|
if (target_ptr) {
|
||||||
|
c.B((void*)target_ptr);
|
||||||
|
} else {
|
||||||
|
c.NOP();
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case BlockRelocationType::MoveToScratch1:
|
||||||
|
if (target_ptr) {
|
||||||
|
c.ADRL(Xscratch1, (void*)target_ptr);
|
||||||
|
} else {
|
||||||
|
c.ADRL(Xscratch1, prelude_info.return_to_dispatcher);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
ASSERT_FALSE("Invalid BlockRelocationType");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -293,7 +295,7 @@ void AddressSpace::RelinkForDescriptor(IR::LocationDescriptor target_descriptor,
|
||||||
const EmittedBlockInfo& block_info = block_iter->second;
|
const EmittedBlockInfo& block_info = block_iter->second;
|
||||||
|
|
||||||
if (auto relocation_iter = block_info.block_relocations.find(target_descriptor); relocation_iter != block_info.block_relocations.end()) {
|
if (auto relocation_iter = block_info.block_relocations.find(target_descriptor); relocation_iter != block_info.block_relocations.end()) {
|
||||||
LinkBlockLinks(block_info.entry_point, target_ptr, relocation_iter->second, prelude_info.return_to_dispatcher);
|
LinkBlockLinks(block_info.entry_point, target_ptr, relocation_iter->second);
|
||||||
}
|
}
|
||||||
|
|
||||||
mem.invalidate(reinterpret_cast<u32*>(block_info.entry_point), block_info.size);
|
mem.invalidate(reinterpret_cast<u32*>(block_info.entry_point), block_info.size);
|
||||||
|
|
|
@ -47,9 +47,22 @@ protected:
|
||||||
virtual EmitConfig GetEmitConfig() = 0;
|
virtual EmitConfig GetEmitConfig() = 0;
|
||||||
virtual void RegisterNewBasicBlock(const IR::Block& block, const EmittedBlockInfo& block_info) = 0;
|
virtual void RegisterNewBasicBlock(const IR::Block& block, const EmittedBlockInfo& block_info) = 0;
|
||||||
|
|
||||||
|
void ProtectCodeMemory() {
|
||||||
|
#if defined(DYNARMIC_ENABLE_NO_EXECUTE_SUPPORT) || defined(__APPLE__) || defined(__OpenBSD__)
|
||||||
|
mem.protect();
|
||||||
|
#endif
|
||||||
|
}
|
||||||
|
|
||||||
|
void UnprotectCodeMemory() {
|
||||||
|
#if defined(DYNARMIC_ENABLE_NO_EXECUTE_SUPPORT) || defined(__APPLE__) || defined(__OpenBSD__)
|
||||||
|
mem.unprotect();
|
||||||
|
#endif
|
||||||
|
}
|
||||||
|
|
||||||
size_t GetRemainingSize();
|
size_t GetRemainingSize();
|
||||||
EmittedBlockInfo Emit(IR::Block ir_block);
|
EmittedBlockInfo Emit(IR::Block ir_block);
|
||||||
void Link(EmittedBlockInfo& block);
|
void Link(EmittedBlockInfo& block);
|
||||||
|
void LinkBlockLinks(const CodePtr entry_point, const CodePtr target_ptr, const std::vector<BlockRelocation>& block_relocations_list);
|
||||||
void RelinkForDescriptor(IR::LocationDescriptor target_descriptor, CodePtr target_ptr);
|
void RelinkForDescriptor(IR::LocationDescriptor target_descriptor, CodePtr target_ptr);
|
||||||
|
|
||||||
FakeCall FastmemCallback(u64 host_pc);
|
FakeCall FastmemCallback(u64 host_pc);
|
||||||
|
@ -69,7 +82,7 @@ protected:
|
||||||
FastmemManager fastmem_manager;
|
FastmemManager fastmem_manager;
|
||||||
|
|
||||||
struct PreludeInfo {
|
struct PreludeInfo {
|
||||||
u32* end_of_prelude;
|
std::ptrdiff_t end_of_prelude;
|
||||||
|
|
||||||
using RunCodeFuncType = HaltReason (*)(CodePtr entry_point, void* jit_state, volatile u32* halt_reason);
|
using RunCodeFuncType = HaltReason (*)(CodePtr entry_point, void* jit_state, volatile u32* halt_reason);
|
||||||
RunCodeFuncType run_code;
|
RunCodeFuncType run_code;
|
||||||
|
|
|
@ -202,7 +202,7 @@ EmittedBlockInfo EmitArm64(oaknut::CodeGenerator& code, IR::Block block, const E
|
||||||
RegAlloc reg_alloc{code, fpsr_manager, GPR_ORDER, FPR_ORDER};
|
RegAlloc reg_alloc{code, fpsr_manager, GPR_ORDER, FPR_ORDER};
|
||||||
EmitContext ctx{block, reg_alloc, conf, ebi, fpsr_manager, fastmem_manager, {}};
|
EmitContext ctx{block, reg_alloc, conf, ebi, fpsr_manager, fastmem_manager, {}};
|
||||||
|
|
||||||
ebi.entry_point = code.ptr<CodePtr>();
|
ebi.entry_point = code.xptr<CodePtr>();
|
||||||
|
|
||||||
if (ctx.block.GetCondition() == IR::Cond::AL) {
|
if (ctx.block.GetCondition() == IR::Cond::AL) {
|
||||||
ASSERT(!ctx.block.HasConditionFailedLocation());
|
ASSERT(!ctx.block.HasConditionFailedLocation());
|
||||||
|
@ -263,17 +263,17 @@ EmittedBlockInfo EmitArm64(oaknut::CodeGenerator& code, IR::Block block, const E
|
||||||
}
|
}
|
||||||
code.BRK(0);
|
code.BRK(0);
|
||||||
|
|
||||||
ebi.size = code.ptr<CodePtr>() - ebi.entry_point;
|
ebi.size = code.xptr<CodePtr>() - ebi.entry_point;
|
||||||
return ebi;
|
return ebi;
|
||||||
}
|
}
|
||||||
|
|
||||||
void EmitRelocation(oaknut::CodeGenerator& code, EmitContext& ctx, LinkTarget link_target) {
|
void EmitRelocation(oaknut::CodeGenerator& code, EmitContext& ctx, LinkTarget link_target) {
|
||||||
ctx.ebi.relocations.emplace_back(Relocation{code.ptr<CodePtr>() - ctx.ebi.entry_point, link_target});
|
ctx.ebi.relocations.emplace_back(Relocation{code.xptr<CodePtr>() - ctx.ebi.entry_point, link_target});
|
||||||
code.NOP();
|
code.NOP();
|
||||||
}
|
}
|
||||||
|
|
||||||
void EmitBlockLinkRelocation(oaknut::CodeGenerator& code, EmitContext& ctx, const IR::LocationDescriptor& descriptor, BlockRelocationType type) {
|
void EmitBlockLinkRelocation(oaknut::CodeGenerator& code, EmitContext& ctx, const IR::LocationDescriptor& descriptor, BlockRelocationType type) {
|
||||||
ctx.ebi.block_relocations[descriptor].emplace_back(BlockRelocation{code.ptr<CodePtr>() - ctx.ebi.entry_point, type});
|
ctx.ebi.block_relocations[descriptor].emplace_back(BlockRelocation{code.xptr<CodePtr>() - ctx.ebi.entry_point, type});
|
||||||
switch (type) {
|
switch (type) {
|
||||||
case BlockRelocationType::Branch:
|
case BlockRelocationType::Branch:
|
||||||
code.NOP();
|
code.NOP();
|
||||||
|
|
|
@ -287,12 +287,12 @@ CodePtr EmitMemoryLdr(oaknut::CodeGenerator& code, int value_idx, oaknut::XReg X
|
||||||
const auto add_ext = extend32 ? oaknut::AddSubExt::UXTW : oaknut::AddSubExt::LSL;
|
const auto add_ext = extend32 ? oaknut::AddSubExt::UXTW : oaknut::AddSubExt::LSL;
|
||||||
const auto Roffset = extend32 ? oaknut::RReg{Xoffset.toW()} : oaknut::RReg{Xoffset};
|
const auto Roffset = extend32 ? oaknut::RReg{Xoffset.toW()} : oaknut::RReg{Xoffset};
|
||||||
|
|
||||||
CodePtr fastmem_location = code.ptr<CodePtr>();
|
CodePtr fastmem_location = code.xptr<CodePtr>();
|
||||||
|
|
||||||
if (ordered) {
|
if (ordered) {
|
||||||
code.ADD(Xscratch0, Xbase, Roffset, add_ext);
|
code.ADD(Xscratch0, Xbase, Roffset, add_ext);
|
||||||
|
|
||||||
fastmem_location = code.ptr<CodePtr>();
|
fastmem_location = code.xptr<CodePtr>();
|
||||||
|
|
||||||
switch (bitsize) {
|
switch (bitsize) {
|
||||||
case 8:
|
case 8:
|
||||||
|
@ -315,7 +315,7 @@ CodePtr EmitMemoryLdr(oaknut::CodeGenerator& code, int value_idx, oaknut::XReg X
|
||||||
ASSERT_FALSE("Invalid bitsize");
|
ASSERT_FALSE("Invalid bitsize");
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
fastmem_location = code.ptr<CodePtr>();
|
fastmem_location = code.xptr<CodePtr>();
|
||||||
|
|
||||||
switch (bitsize) {
|
switch (bitsize) {
|
||||||
case 8:
|
case 8:
|
||||||
|
@ -352,7 +352,7 @@ CodePtr EmitMemoryStr(oaknut::CodeGenerator& code, int value_idx, oaknut::XReg X
|
||||||
if (ordered) {
|
if (ordered) {
|
||||||
code.ADD(Xscratch0, Xbase, Roffset, add_ext);
|
code.ADD(Xscratch0, Xbase, Roffset, add_ext);
|
||||||
|
|
||||||
fastmem_location = code.ptr<CodePtr>();
|
fastmem_location = code.xptr<CodePtr>();
|
||||||
|
|
||||||
switch (bitsize) {
|
switch (bitsize) {
|
||||||
case 8:
|
case 8:
|
||||||
|
@ -376,7 +376,7 @@ CodePtr EmitMemoryStr(oaknut::CodeGenerator& code, int value_idx, oaknut::XReg X
|
||||||
ASSERT_FALSE("Invalid bitsize");
|
ASSERT_FALSE("Invalid bitsize");
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
fastmem_location = code.ptr<CodePtr>();
|
fastmem_location = code.xptr<CodePtr>();
|
||||||
|
|
||||||
switch (bitsize) {
|
switch (bitsize) {
|
||||||
case 8:
|
case 8:
|
||||||
|
@ -548,7 +548,7 @@ void FastmemEmitReadMemory(oaknut::CodeGenerator& code, EmitContext& ctx, IR::In
|
||||||
FastmemPatchInfo{
|
FastmemPatchInfo{
|
||||||
.marker = marker,
|
.marker = marker,
|
||||||
.fc = FakeCall{
|
.fc = FakeCall{
|
||||||
.call_pc = mcl::bit_cast<u64>(code.ptr<void*>()),
|
.call_pc = mcl::bit_cast<u64>(code.xptr<void*>()),
|
||||||
},
|
},
|
||||||
.recompile = ctx.conf.recompile_on_fastmem_failure,
|
.recompile = ctx.conf.recompile_on_fastmem_failure,
|
||||||
});
|
});
|
||||||
|
@ -598,7 +598,7 @@ void FastmemEmitWriteMemory(oaknut::CodeGenerator& code, EmitContext& ctx, IR::I
|
||||||
FastmemPatchInfo{
|
FastmemPatchInfo{
|
||||||
.marker = marker,
|
.marker = marker,
|
||||||
.fc = FakeCall{
|
.fc = FakeCall{
|
||||||
.call_pc = mcl::bit_cast<u64>(code.ptr<void*>()),
|
.call_pc = mcl::bit_cast<u64>(code.xptr<void*>()),
|
||||||
},
|
},
|
||||||
.recompile = ctx.conf.recompile_on_fastmem_failure,
|
.recompile = ctx.conf.recompile_on_fastmem_failure,
|
||||||
});
|
});
|
||||||
|
|
|
@ -54,16 +54,16 @@ SpinLockImpl impl;
|
||||||
|
|
||||||
SpinLockImpl::SpinLockImpl()
|
SpinLockImpl::SpinLockImpl()
|
||||||
: mem{4096}
|
: mem{4096}
|
||||||
, code{mem.ptr()} {}
|
, code{mem.ptr(), mem.ptr()} {}
|
||||||
|
|
||||||
void SpinLockImpl::Initialize() {
|
void SpinLockImpl::Initialize() {
|
||||||
mem.unprotect();
|
mem.unprotect();
|
||||||
|
|
||||||
lock = code.ptr<void (*)(volatile int*)>();
|
lock = code.xptr<void (*)(volatile int*)>();
|
||||||
EmitSpinLockLock(code, X0);
|
EmitSpinLockLock(code, X0);
|
||||||
code.RET();
|
code.RET();
|
||||||
|
|
||||||
unlock = code.ptr<void (*)(volatile int*)>();
|
unlock = code.xptr<void (*)(volatile int*)>();
|
||||||
EmitSpinLockUnlock(code, X0);
|
EmitSpinLockUnlock(code, X0);
|
||||||
code.RET();
|
code.RET();
|
||||||
|
|
||||||
|
|
|
@ -103,7 +103,7 @@ TEST_CASE("A64: fibonacci", "[a64]") {
|
||||||
env.cpu = &cpu;
|
env.cpu = &cpu;
|
||||||
|
|
||||||
std::vector<u32> instructions(1024);
|
std::vector<u32> instructions(1024);
|
||||||
oaknut::CodeGenerator code{instructions.data()};
|
oaknut::CodeGenerator code{instructions.data(), nullptr};
|
||||||
|
|
||||||
using namespace oaknut::util;
|
using namespace oaknut::util;
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue