address_space: Deduplicate {A32,A64}AddressSpace

This commit is contained in:
Merry 2022-11-29 12:22:45 +00:00 committed by Liam
parent 0707aa3a04
commit 8f9d1dbf4e
7 changed files with 327 additions and 496 deletions

View file

@ -373,6 +373,8 @@ elseif(ARCHITECTURE STREQUAL "arm64")
backend/arm64/a32_jitstate.h backend/arm64/a32_jitstate.h
backend/arm64/abi.cpp backend/arm64/abi.cpp
backend/arm64/abi.h backend/arm64/abi.h
backend/arm64/address_space.cpp
backend/arm64/address_space.h
backend/arm64/devirtualize.h backend/arm64/devirtualize.h
backend/arm64/emit_arm64.cpp backend/arm64/emit_arm64.cpp
backend/arm64/emit_arm64.h backend/arm64/emit_arm64.h

View file

@ -14,6 +14,7 @@
#include "dynarmic/common/fp/fpcr.h" #include "dynarmic/common/fp/fpcr.h"
#include "dynarmic/frontend/A32/a32_location_descriptor.h" #include "dynarmic/frontend/A32/a32_location_descriptor.h"
#include "dynarmic/frontend/A32/translate/a32_translate.h" #include "dynarmic/frontend/A32/translate/a32_translate.h"
#include "dynarmic/interface/A32/config.h"
#include "dynarmic/interface/exclusive_monitor.h" #include "dynarmic/interface/exclusive_monitor.h"
#include "dynarmic/ir/opt/passes.h" #include "dynarmic/ir/opt/passes.h"
@ -97,9 +98,8 @@ static void* EmitExclusiveWriteCallTrampoline(oaknut::CodeGenerator& code, const
} }
A32AddressSpace::A32AddressSpace(const A32::UserConfig& conf) A32AddressSpace::A32AddressSpace(const A32::UserConfig& conf)
: conf(conf) : AddressSpace(conf.code_cache_size)
, mem(conf.code_cache_size) , conf(conf) {
, code(mem.ptr()) {
EmitPrelude(); EmitPrelude();
} }
@ -121,33 +121,6 @@ IR::Block A32AddressSpace::GenerateIR(IR::LocationDescriptor descriptor) const {
return ir_block; return ir_block;
} }
CodePtr A32AddressSpace::Get(IR::LocationDescriptor descriptor) {
if (const auto iter = block_entries.find(descriptor.Value()); iter != block_entries.end()) {
return iter->second;
}
return nullptr;
}
CodePtr A32AddressSpace::GetOrEmit(IR::LocationDescriptor descriptor) {
if (CodePtr block_entry = Get(descriptor)) {
return block_entry;
}
IR::Block ir_block = GenerateIR(descriptor);
const EmittedBlockInfo block_info = Emit(std::move(ir_block));
block_infos.insert_or_assign(descriptor.Value(), block_info);
block_entries.insert_or_assign(descriptor.Value(), block_info.entry_point);
return block_info.entry_point;
}
void A32AddressSpace::ClearCache() {
block_entries.clear();
block_infos.clear();
block_references.clear();
code.set_ptr(prelude_info.end_of_prelude);
}
void A32AddressSpace::EmitPrelude() { void A32AddressSpace::EmitPrelude() {
using namespace oaknut::util; using namespace oaknut::util;
@ -291,18 +264,8 @@ void A32AddressSpace::EmitPrelude() {
mem.protect(); mem.protect();
} }
size_t A32AddressSpace::GetRemainingSize() { EmitConfig A32AddressSpace::GetEmitConfig() {
return conf.code_cache_size - (code.ptr<CodePtr>() - reinterpret_cast<CodePtr>(mem.ptr())); return EmitConfig{
}
EmittedBlockInfo A32AddressSpace::Emit(IR::Block block) {
if (GetRemainingSize() < 1024 * 1024) {
ClearCache();
}
mem.unprotect();
const EmitConfig emit_conf{
.optimizations = conf.unsafe_optimizations ? conf.optimizations : conf.optimizations & all_safe_optimizations, .optimizations = conf.unsafe_optimizations ? conf.optimizations : conf.optimizations & all_safe_optimizations,
.hook_isb = conf.hook_isb, .hook_isb = conf.hook_isb,
@ -328,132 +291,6 @@ EmittedBlockInfo A32AddressSpace::Emit(IR::Block block) {
.coprocessors = conf.coprocessors, .coprocessors = conf.coprocessors,
}; };
EmittedBlockInfo block_info = EmitArm64(code, std::move(block), emit_conf);
Link(block.Location(), block_info);
mem.invalidate(reinterpret_cast<u32*>(block_info.entry_point), block_info.size);
RelinkForDescriptor(block.Location());
mem.protect();
return block_info;
}
static void LinkBlockLinks(const CodePtr entry_point, const CodePtr target_ptr, const std::vector<BlockRelocation>& block_relocations_list) {
using namespace oaknut;
using namespace oaknut::util;
for (auto [ptr_offset] : block_relocations_list) {
CodeGenerator c{reinterpret_cast<u32*>(entry_point + ptr_offset)};
if (target_ptr) {
c.B((void*)target_ptr);
} else {
c.NOP();
}
}
}
void A32AddressSpace::Link(IR::LocationDescriptor block_descriptor, EmittedBlockInfo& block_info) {
using namespace oaknut;
using namespace oaknut::util;
for (auto [ptr_offset, target] : block_info.relocations) {
CodeGenerator c{reinterpret_cast<u32*>(block_info.entry_point + ptr_offset)};
switch (target) {
case LinkTarget::ReturnToDispatcher:
c.B(prelude_info.return_to_dispatcher);
break;
case LinkTarget::ReturnFromRunCode:
c.B(prelude_info.return_from_run_code);
break;
case LinkTarget::ReadMemory8:
c.BL(prelude_info.read_memory_8);
break;
case LinkTarget::ReadMemory16:
c.BL(prelude_info.read_memory_16);
break;
case LinkTarget::ReadMemory32:
c.BL(prelude_info.read_memory_32);
break;
case LinkTarget::ReadMemory64:
c.BL(prelude_info.read_memory_64);
break;
case LinkTarget::ExclusiveReadMemory8:
c.BL(prelude_info.exclusive_read_memory_8);
break;
case LinkTarget::ExclusiveReadMemory16:
c.BL(prelude_info.exclusive_read_memory_16);
break;
case LinkTarget::ExclusiveReadMemory32:
c.BL(prelude_info.exclusive_read_memory_32);
break;
case LinkTarget::ExclusiveReadMemory64:
c.BL(prelude_info.exclusive_read_memory_64);
break;
case LinkTarget::WriteMemory8:
c.BL(prelude_info.write_memory_8);
break;
case LinkTarget::WriteMemory16:
c.BL(prelude_info.write_memory_16);
break;
case LinkTarget::WriteMemory32:
c.BL(prelude_info.write_memory_32);
break;
case LinkTarget::WriteMemory64:
c.BL(prelude_info.write_memory_64);
break;
case LinkTarget::ExclusiveWriteMemory8:
c.BL(prelude_info.exclusive_write_memory_8);
break;
case LinkTarget::ExclusiveWriteMemory16:
c.BL(prelude_info.exclusive_write_memory_16);
break;
case LinkTarget::ExclusiveWriteMemory32:
c.BL(prelude_info.exclusive_write_memory_32);
break;
case LinkTarget::ExclusiveWriteMemory64:
c.BL(prelude_info.exclusive_write_memory_64);
break;
case LinkTarget::CallSVC:
c.BL(prelude_info.call_svc);
break;
case LinkTarget::ExceptionRaised:
c.BL(prelude_info.exception_raised);
break;
case LinkTarget::InstructionSynchronizationBarrierRaised:
c.BL(prelude_info.isb_raised);
break;
case LinkTarget::AddTicks:
c.BL(prelude_info.add_ticks);
break;
case LinkTarget::GetTicksRemaining:
c.BL(prelude_info.get_ticks_remaining);
break;
default:
ASSERT_FALSE("Invalid relocation target");
}
}
for (auto [target_descriptor, list] : block_info.block_relocations) {
block_references[target_descriptor.Value()].emplace(block_descriptor.Value());
LinkBlockLinks(block_info.entry_point, Get(target_descriptor), list);
}
}
void A32AddressSpace::RelinkForDescriptor(IR::LocationDescriptor target_descriptor) {
for (auto block_descriptor : block_references[target_descriptor.Value()]) {
if (auto iter = block_infos.find(block_descriptor); iter != block_infos.end()) {
const EmittedBlockInfo& block_info = iter->second;
LinkBlockLinks(block_info.entry_point, Get(target_descriptor), block_infos[block_descriptor].block_relocations[target_descriptor]);
mem.invalidate(reinterpret_cast<u32*>(block_info.entry_point), block_info.size);
}
}
} }
} // namespace Dynarmic::Backend::Arm64 } // namespace Dynarmic::Backend::Arm64

View file

@ -5,84 +5,24 @@
#pragma once #pragma once
#include <mcl/stdint.hpp> #include "dynarmic/backend/arm64/address_space.h"
#include <oaknut/code_block.hpp>
#include <oaknut/oaknut.hpp>
#include <tsl/robin_map.h>
#include <tsl/robin_set.h>
#include "dynarmic/backend/arm64/emit_arm64.h"
#include "dynarmic/interface/A32/config.h" #include "dynarmic/interface/A32/config.h"
#include "dynarmic/interface/halt_reason.h"
#include "dynarmic/ir/basic_block.h"
#include "dynarmic/ir/location_descriptor.h"
namespace Dynarmic::Backend::Arm64 { namespace Dynarmic::Backend::Arm64 {
struct A32JitState; class A32AddressSpace final : public AddressSpace {
class A32AddressSpace final {
public: public:
explicit A32AddressSpace(const A32::UserConfig& conf); explicit A32AddressSpace(const A32::UserConfig& conf);
IR::Block GenerateIR(IR::LocationDescriptor) const; IR::Block GenerateIR(IR::LocationDescriptor) const override;
CodePtr Get(IR::LocationDescriptor descriptor); protected:
CodePtr GetOrEmit(IR::LocationDescriptor descriptor);
void ClearCache();
private:
friend class A32Core; friend class A32Core;
void EmitPrelude(); void EmitPrelude();
EmitConfig GetEmitConfig() override;
size_t GetRemainingSize();
EmittedBlockInfo Emit(IR::Block ir_block);
void Link(IR::LocationDescriptor block_descriptor, EmittedBlockInfo& block);
void RelinkForDescriptor(IR::LocationDescriptor target_descriptor);
const A32::UserConfig conf; const A32::UserConfig conf;
oaknut::CodeBlock mem;
oaknut::CodeGenerator code;
tsl::robin_map<u64, CodePtr> block_entries;
tsl::robin_map<u64, EmittedBlockInfo> block_infos;
tsl::robin_map<u64, tsl::robin_set<u64>> block_references;
struct PreludeInfo {
u32* end_of_prelude;
using RunCodeFuncType = HaltReason (*)(CodePtr entry_point, A32JitState* context, volatile u32* halt_reason);
RunCodeFuncType run_code;
RunCodeFuncType step_code;
void* return_to_dispatcher;
void* return_from_run_code;
void* read_memory_8;
void* read_memory_16;
void* read_memory_32;
void* read_memory_64;
void* exclusive_read_memory_8;
void* exclusive_read_memory_16;
void* exclusive_read_memory_32;
void* exclusive_read_memory_64;
void* write_memory_8;
void* write_memory_16;
void* write_memory_32;
void* write_memory_64;
void* exclusive_write_memory_8;
void* exclusive_write_memory_16;
void* exclusive_write_memory_32;
void* exclusive_write_memory_64;
void* call_svc;
void* exception_raised;
void* isb_raised;
void* add_ticks;
void* get_ticks_remaining;
} prelude_info;
}; };
} // namespace Dynarmic::Backend::Arm64 } // namespace Dynarmic::Backend::Arm64

View file

@ -14,6 +14,7 @@
#include "dynarmic/common/fp/fpcr.h" #include "dynarmic/common/fp/fpcr.h"
#include "dynarmic/frontend/A64/a64_location_descriptor.h" #include "dynarmic/frontend/A64/a64_location_descriptor.h"
#include "dynarmic/frontend/A64/translate/a64_translate.h" #include "dynarmic/frontend/A64/translate/a64_translate.h"
#include "dynarmic/interface/A64/config.h"
#include "dynarmic/interface/exclusive_monitor.h" #include "dynarmic/interface/exclusive_monitor.h"
#include "dynarmic/ir/opt/passes.h" #include "dynarmic/ir/opt/passes.h"
@ -96,8 +97,6 @@ static void* EmitExclusiveWriteCallTrampoline(oaknut::CodeGenerator& code, const
return target; return target;
} }
/* =========================== 128-bit versions =========================== */
static void* EmitRead128CallTrampoline(oaknut::CodeGenerator& code, A64::UserCallbacks* this_) { static void* EmitRead128CallTrampoline(oaknut::CodeGenerator& code, A64::UserCallbacks* this_) {
using namespace oaknut::util; using namespace oaknut::util;
@ -214,9 +213,8 @@ static void* EmitExclusiveWrite128CallTrampoline(oaknut::CodeGenerator& code, co
} }
A64AddressSpace::A64AddressSpace(const A64::UserConfig& conf) A64AddressSpace::A64AddressSpace(const A64::UserConfig& conf)
: conf(conf) : AddressSpace(conf.code_cache_size)
, mem(conf.code_cache_size) , conf(conf) {
, code(mem.ptr()) {
EmitPrelude(); EmitPrelude();
} }
@ -242,33 +240,6 @@ IR::Block A64AddressSpace::GenerateIR(IR::LocationDescriptor descriptor) const {
return ir_block; return ir_block;
} }
CodePtr A64AddressSpace::Get(IR::LocationDescriptor descriptor) {
if (const auto iter = block_entries.find(descriptor.Value()); iter != block_entries.end()) {
return iter->second;
}
return nullptr;
}
CodePtr A64AddressSpace::GetOrEmit(IR::LocationDescriptor descriptor) {
if (CodePtr block_entry = Get(descriptor)) {
return block_entry;
}
IR::Block ir_block = GenerateIR(descriptor);
const EmittedBlockInfo block_info = Emit(std::move(ir_block));
block_infos.insert_or_assign(descriptor.Value(), block_info);
block_entries.insert_or_assign(descriptor.Value(), block_info.entry_point);
return block_info.entry_point;
}
void A64AddressSpace::ClearCache() {
block_entries.clear();
block_infos.clear();
block_references.clear();
code.set_ptr(prelude_info.end_of_prelude);
}
void A64AddressSpace::EmitPrelude() { void A64AddressSpace::EmitPrelude() {
using namespace oaknut::util; using namespace oaknut::util;
@ -421,18 +392,8 @@ void A64AddressSpace::EmitPrelude() {
mem.protect(); mem.protect();
} }
size_t A64AddressSpace::GetRemainingSize() { EmitConfig A64AddressSpace::GetEmitConfig() {
return conf.code_cache_size - (code.ptr<CodePtr>() - reinterpret_cast<CodePtr>(mem.ptr())); return EmitConfig{
}
EmittedBlockInfo A64AddressSpace::Emit(IR::Block block) {
if (GetRemainingSize() < 1024 * 1024) {
ClearCache();
}
mem.unprotect();
const EmitConfig emit_conf{
.optimizations = conf.unsafe_optimizations ? conf.optimizations : conf.optimizations & all_safe_optimizations, .optimizations = conf.unsafe_optimizations ? conf.optimizations : conf.optimizations & all_safe_optimizations,
.hook_isb = conf.hook_isb, .hook_isb = conf.hook_isb,
@ -458,153 +419,6 @@ EmittedBlockInfo A64AddressSpace::Emit(IR::Block block) {
.coprocessors{}, .coprocessors{},
}; };
EmittedBlockInfo block_info = EmitArm64(code, std::move(block), emit_conf);
Link(block.Location(), block_info);
mem.invalidate(reinterpret_cast<u32*>(block_info.entry_point), block_info.size);
RelinkForDescriptor(block.Location());
mem.protect();
return block_info;
}
static void LinkBlockLinks(const CodePtr entry_point, const CodePtr target_ptr, const std::vector<BlockRelocation>& block_relocations_list) {
using namespace oaknut;
using namespace oaknut::util;
for (auto [ptr_offset] : block_relocations_list) {
CodeGenerator c{reinterpret_cast<u32*>(entry_point + ptr_offset)};
if (target_ptr) {
c.B((void*)target_ptr);
} else {
c.NOP();
}
}
}
void A64AddressSpace::Link(IR::LocationDescriptor block_descriptor, EmittedBlockInfo& block_info) {
using namespace oaknut;
using namespace oaknut::util;
for (auto [ptr_offset, target] : block_info.relocations) {
CodeGenerator c{reinterpret_cast<u32*>(block_info.entry_point + ptr_offset)};
switch (target) {
case LinkTarget::ReturnToDispatcher:
c.B(prelude_info.return_to_dispatcher);
break;
case LinkTarget::ReturnFromRunCode:
c.B(prelude_info.return_from_run_code);
break;
case LinkTarget::ReadMemory8:
c.BL(prelude_info.read_memory_8);
break;
case LinkTarget::ReadMemory16:
c.BL(prelude_info.read_memory_16);
break;
case LinkTarget::ReadMemory32:
c.BL(prelude_info.read_memory_32);
break;
case LinkTarget::ReadMemory64:
c.BL(prelude_info.read_memory_64);
break;
case LinkTarget::ReadMemory128:
c.BL(prelude_info.read_memory_128);
break;
case LinkTarget::ExclusiveReadMemory8:
c.BL(prelude_info.exclusive_read_memory_8);
break;
case LinkTarget::ExclusiveReadMemory16:
c.BL(prelude_info.exclusive_read_memory_16);
break;
case LinkTarget::ExclusiveReadMemory32:
c.BL(prelude_info.exclusive_read_memory_32);
break;
case LinkTarget::ExclusiveReadMemory64:
c.BL(prelude_info.exclusive_read_memory_64);
break;
case LinkTarget::ExclusiveReadMemory128:
c.BL(prelude_info.exclusive_read_memory_128);
break;
case LinkTarget::WriteMemory8:
c.BL(prelude_info.write_memory_8);
break;
case LinkTarget::WriteMemory16:
c.BL(prelude_info.write_memory_16);
break;
case LinkTarget::WriteMemory32:
c.BL(prelude_info.write_memory_32);
break;
case LinkTarget::WriteMemory64:
c.BL(prelude_info.write_memory_64);
break;
case LinkTarget::WriteMemory128:
c.BL(prelude_info.write_memory_128);
break;
case LinkTarget::ExclusiveWriteMemory8:
c.BL(prelude_info.exclusive_write_memory_8);
break;
case LinkTarget::ExclusiveWriteMemory16:
c.BL(prelude_info.exclusive_write_memory_16);
break;
case LinkTarget::ExclusiveWriteMemory32:
c.BL(prelude_info.exclusive_write_memory_32);
break;
case LinkTarget::ExclusiveWriteMemory64:
c.BL(prelude_info.exclusive_write_memory_64);
break;
case LinkTarget::ExclusiveWriteMemory128:
c.BL(prelude_info.exclusive_write_memory_128);
break;
case LinkTarget::CallSVC:
c.BL(prelude_info.call_svc);
break;
case LinkTarget::ExceptionRaised:
c.BL(prelude_info.exception_raised);
break;
case LinkTarget::InstructionSynchronizationBarrierRaised:
c.BL(prelude_info.isb_raised);
break;
case LinkTarget::InstructionCacheOperationRaised:
c.BL(prelude_info.ic_raised);
break;
case LinkTarget::DataCacheOperationRaised:
c.BL(prelude_info.dc_raised);
break;
case LinkTarget::GetCNTPCT:
c.BL(prelude_info.get_cntpct);
break;
case LinkTarget::AddTicks:
c.BL(prelude_info.add_ticks);
break;
case LinkTarget::GetTicksRemaining:
c.BL(prelude_info.get_ticks_remaining);
break;
default:
ASSERT_FALSE("Invalid relocation target");
}
}
for (auto [target_descriptor, list] : block_info.block_relocations) {
block_references[target_descriptor.Value()].emplace(block_descriptor.Value());
LinkBlockLinks(block_info.entry_point, Get(target_descriptor), list);
}
}
void A64AddressSpace::RelinkForDescriptor(IR::LocationDescriptor target_descriptor) {
for (auto block_descriptor : block_references[target_descriptor.Value()]) {
if (auto iter = block_infos.find(block_descriptor); iter != block_infos.end()) {
const EmittedBlockInfo& block_info = iter->second;
LinkBlockLinks(block_info.entry_point, Get(target_descriptor), block_infos[block_descriptor].block_relocations[target_descriptor]);
mem.invalidate(reinterpret_cast<u32*>(block_info.entry_point), block_info.size);
}
}
} }
} // namespace Dynarmic::Backend::Arm64 } // namespace Dynarmic::Backend::Arm64

View file

@ -5,91 +5,24 @@
#pragma once #pragma once
#include <mcl/stdint.hpp> #include "dynarmic/backend/arm64/address_space.h"
#include <oaknut/code_block.hpp>
#include <oaknut/oaknut.hpp>
#include <tsl/robin_map.h>
#include <tsl/robin_set.h>
#include "dynarmic/backend/arm64/emit_arm64.h"
#include "dynarmic/interface/A64/config.h" #include "dynarmic/interface/A64/config.h"
#include "dynarmic/interface/halt_reason.h"
#include "dynarmic/ir/basic_block.h"
#include "dynarmic/ir/location_descriptor.h"
namespace Dynarmic::Backend::Arm64 { namespace Dynarmic::Backend::Arm64 {
struct A64JitState; class A64AddressSpace final : public AddressSpace {
class A64AddressSpace final {
public: public:
explicit A64AddressSpace(const A64::UserConfig& conf); explicit A64AddressSpace(const A64::UserConfig& conf);
IR::Block GenerateIR(IR::LocationDescriptor) const; IR::Block GenerateIR(IR::LocationDescriptor) const override;
CodePtr Get(IR::LocationDescriptor descriptor); protected:
CodePtr GetOrEmit(IR::LocationDescriptor descriptor);
void ClearCache();
private:
friend class A64Core; friend class A64Core;
void EmitPrelude(); void EmitPrelude();
EmitConfig GetEmitConfig() override;
size_t GetRemainingSize();
EmittedBlockInfo Emit(IR::Block ir_block);
void Link(IR::LocationDescriptor block_descriptor, EmittedBlockInfo& block);
void RelinkForDescriptor(IR::LocationDescriptor target_descriptor);
const A64::UserConfig conf; const A64::UserConfig conf;
oaknut::CodeBlock mem;
oaknut::CodeGenerator code;
tsl::robin_map<u64, CodePtr> block_entries;
tsl::robin_map<u64, EmittedBlockInfo> block_infos;
tsl::robin_map<u64, tsl::robin_set<u64>> block_references;
struct PreludeInfo {
u32* end_of_prelude;
using RunCodeFuncType = HaltReason (*)(CodePtr entry_point, A64JitState* context, volatile u32* halt_reason);
RunCodeFuncType run_code;
RunCodeFuncType step_code;
void* return_to_dispatcher;
void* return_from_run_code;
void* read_memory_8;
void* read_memory_16;
void* read_memory_32;
void* read_memory_64;
void* read_memory_128;
void* exclusive_read_memory_8;
void* exclusive_read_memory_16;
void* exclusive_read_memory_32;
void* exclusive_read_memory_64;
void* exclusive_read_memory_128;
void* write_memory_8;
void* write_memory_16;
void* write_memory_32;
void* write_memory_64;
void* write_memory_128;
void* exclusive_write_memory_8;
void* exclusive_write_memory_16;
void* exclusive_write_memory_32;
void* exclusive_write_memory_64;
void* exclusive_write_memory_128;
void* call_svc;
void* exception_raised;
void* dc_raised;
void* ic_raised;
void* isb_raised;
void* get_cntpct;
void* add_ticks;
void* get_ticks_remaining;
} prelude_info;
}; };
} // namespace Dynarmic::Backend::Arm64 } // namespace Dynarmic::Backend::Arm64

View file

@ -0,0 +1,213 @@
/* This file is part of the dynarmic project.
* Copyright (c) 2022 MerryMage
* SPDX-License-Identifier: 0BSD
*/
#include "dynarmic/backend/arm64/a64_address_space.h"
#include "dynarmic/backend/arm64/a64_jitstate.h"
#include "dynarmic/backend/arm64/abi.h"
#include "dynarmic/backend/arm64/devirtualize.h"
#include "dynarmic/backend/arm64/emit_arm64.h"
#include "dynarmic/backend/arm64/stack_layout.h"
#include "dynarmic/common/cast_util.h"
#include "dynarmic/common/fp/fpcr.h"
#include "dynarmic/interface/exclusive_monitor.h"
#include "dynarmic/ir/opt/passes.h"
namespace Dynarmic::Backend::Arm64 {
AddressSpace::AddressSpace(size_t code_cache_size)
: code_cache_size(code_cache_size)
, mem(code_cache_size)
, code(mem.ptr()) {}
AddressSpace::~AddressSpace() = default;
CodePtr AddressSpace::Get(IR::LocationDescriptor descriptor) {
if (const auto iter = block_entries.find(descriptor.Value()); iter != block_entries.end()) {
return iter->second;
}
return nullptr;
}
CodePtr AddressSpace::GetOrEmit(IR::LocationDescriptor descriptor) {
if (CodePtr block_entry = Get(descriptor)) {
return block_entry;
}
IR::Block ir_block = GenerateIR(descriptor);
const EmittedBlockInfo block_info = Emit(std::move(ir_block));
block_infos.insert_or_assign(descriptor.Value(), block_info);
block_entries.insert_or_assign(descriptor.Value(), block_info.entry_point);
return block_info.entry_point;
}
void AddressSpace::ClearCache() {
block_entries.clear();
block_infos.clear();
block_references.clear();
code.set_ptr(prelude_info.end_of_prelude);
}
size_t AddressSpace::GetRemainingSize() {
return code_cache_size - (code.ptr<CodePtr>() - reinterpret_cast<CodePtr>(mem.ptr()));
}
EmittedBlockInfo AddressSpace::Emit(IR::Block block) {
if (GetRemainingSize() < 1024 * 1024) {
ClearCache();
}
mem.unprotect();
EmittedBlockInfo block_info = EmitArm64(code, std::move(block), GetEmitConfig());
Link(block.Location(), block_info);
mem.invalidate(reinterpret_cast<u32*>(block_info.entry_point), block_info.size);
RelinkForDescriptor(block.Location());
mem.protect();
return block_info;
}
static void LinkBlockLinks(const CodePtr entry_point, const CodePtr target_ptr, const std::vector<BlockRelocation>& block_relocations_list) {
using namespace oaknut;
using namespace oaknut::util;
for (auto [ptr_offset] : block_relocations_list) {
CodeGenerator c{reinterpret_cast<u32*>(entry_point + ptr_offset)};
if (target_ptr) {
c.B((void*)target_ptr);
} else {
c.NOP();
}
}
}
void AddressSpace::Link(IR::LocationDescriptor block_descriptor, EmittedBlockInfo& block_info) {
using namespace oaknut;
using namespace oaknut::util;
for (auto [ptr_offset, target] : block_info.relocations) {
CodeGenerator c{reinterpret_cast<u32*>(block_info.entry_point + ptr_offset)};
switch (target) {
case LinkTarget::ReturnToDispatcher:
c.B(prelude_info.return_to_dispatcher);
break;
case LinkTarget::ReturnFromRunCode:
c.B(prelude_info.return_from_run_code);
break;
case LinkTarget::ReadMemory8:
c.BL(prelude_info.read_memory_8);
break;
case LinkTarget::ReadMemory16:
c.BL(prelude_info.read_memory_16);
break;
case LinkTarget::ReadMemory32:
c.BL(prelude_info.read_memory_32);
break;
case LinkTarget::ReadMemory64:
c.BL(prelude_info.read_memory_64);
break;
case LinkTarget::ReadMemory128:
c.BL(prelude_info.read_memory_128);
break;
case LinkTarget::ExclusiveReadMemory8:
c.BL(prelude_info.exclusive_read_memory_8);
break;
case LinkTarget::ExclusiveReadMemory16:
c.BL(prelude_info.exclusive_read_memory_16);
break;
case LinkTarget::ExclusiveReadMemory32:
c.BL(prelude_info.exclusive_read_memory_32);
break;
case LinkTarget::ExclusiveReadMemory64:
c.BL(prelude_info.exclusive_read_memory_64);
break;
case LinkTarget::ExclusiveReadMemory128:
c.BL(prelude_info.exclusive_read_memory_128);
break;
case LinkTarget::WriteMemory8:
c.BL(prelude_info.write_memory_8);
break;
case LinkTarget::WriteMemory16:
c.BL(prelude_info.write_memory_16);
break;
case LinkTarget::WriteMemory32:
c.BL(prelude_info.write_memory_32);
break;
case LinkTarget::WriteMemory64:
c.BL(prelude_info.write_memory_64);
break;
case LinkTarget::WriteMemory128:
c.BL(prelude_info.write_memory_128);
break;
case LinkTarget::ExclusiveWriteMemory8:
c.BL(prelude_info.exclusive_write_memory_8);
break;
case LinkTarget::ExclusiveWriteMemory16:
c.BL(prelude_info.exclusive_write_memory_16);
break;
case LinkTarget::ExclusiveWriteMemory32:
c.BL(prelude_info.exclusive_write_memory_32);
break;
case LinkTarget::ExclusiveWriteMemory64:
c.BL(prelude_info.exclusive_write_memory_64);
break;
case LinkTarget::ExclusiveWriteMemory128:
c.BL(prelude_info.exclusive_write_memory_128);
break;
case LinkTarget::CallSVC:
c.BL(prelude_info.call_svc);
break;
case LinkTarget::ExceptionRaised:
c.BL(prelude_info.exception_raised);
break;
case LinkTarget::InstructionSynchronizationBarrierRaised:
c.BL(prelude_info.isb_raised);
break;
case LinkTarget::InstructionCacheOperationRaised:
c.BL(prelude_info.ic_raised);
break;
case LinkTarget::DataCacheOperationRaised:
c.BL(prelude_info.dc_raised);
break;
case LinkTarget::GetCNTPCT:
c.BL(prelude_info.get_cntpct);
break;
case LinkTarget::AddTicks:
c.BL(prelude_info.add_ticks);
break;
case LinkTarget::GetTicksRemaining:
c.BL(prelude_info.get_ticks_remaining);
break;
default:
ASSERT_FALSE("Invalid relocation target");
}
}
for (auto [target_descriptor, list] : block_info.block_relocations) {
block_references[target_descriptor.Value()].emplace(block_descriptor.Value());
LinkBlockLinks(block_info.entry_point, Get(target_descriptor), list);
}
}
void AddressSpace::RelinkForDescriptor(IR::LocationDescriptor target_descriptor) {
for (auto block_descriptor : block_references[target_descriptor.Value()]) {
if (auto iter = block_infos.find(block_descriptor); iter != block_infos.end()) {
const EmittedBlockInfo& block_info = iter->second;
LinkBlockLinks(block_info.entry_point, Get(target_descriptor), block_infos[block_descriptor].block_relocations[target_descriptor]);
mem.invalidate(reinterpret_cast<u32*>(block_info.entry_point), block_info.size);
}
}
}
} // namespace Dynarmic::Backend::Arm64

View file

@ -0,0 +1,92 @@
/* This file is part of the dynarmic project.
* Copyright (c) 2022 MerryMage
* SPDX-License-Identifier: 0BSD
*/
#pragma once
#include <mcl/stdint.hpp>
#include <oaknut/code_block.hpp>
#include <oaknut/oaknut.hpp>
#include <tsl/robin_map.h>
#include <tsl/robin_set.h>
#include "dynarmic/backend/arm64/emit_arm64.h"
#include "dynarmic/interface/halt_reason.h"
#include "dynarmic/ir/basic_block.h"
#include "dynarmic/ir/location_descriptor.h"
namespace Dynarmic::Backend::Arm64 {
class AddressSpace {
public:
explicit AddressSpace(size_t code_cache_size);
virtual ~AddressSpace();
virtual IR::Block GenerateIR(IR::LocationDescriptor) const = 0;
CodePtr Get(IR::LocationDescriptor descriptor);
CodePtr GetOrEmit(IR::LocationDescriptor descriptor);
void ClearCache();
protected:
virtual EmitConfig GetEmitConfig() = 0;
size_t GetRemainingSize();
EmittedBlockInfo Emit(IR::Block ir_block);
void Link(IR::LocationDescriptor block_descriptor, EmittedBlockInfo& block);
void RelinkForDescriptor(IR::LocationDescriptor target_descriptor);
const size_t code_cache_size;
oaknut::CodeBlock mem;
oaknut::CodeGenerator code;
tsl::robin_map<u64, CodePtr> block_entries;
tsl::robin_map<u64, EmittedBlockInfo> block_infos;
tsl::robin_map<u64, tsl::robin_set<u64>> block_references;
struct PreludeInfo {
u32* end_of_prelude;
using RunCodeFuncType = HaltReason (*)(CodePtr entry_point, void* jit_state, volatile u32* halt_reason);
RunCodeFuncType run_code;
RunCodeFuncType step_code;
void* return_to_dispatcher;
void* return_from_run_code;
void* read_memory_8;
void* read_memory_16;
void* read_memory_32;
void* read_memory_64;
void* read_memory_128;
void* exclusive_read_memory_8;
void* exclusive_read_memory_16;
void* exclusive_read_memory_32;
void* exclusive_read_memory_64;
void* exclusive_read_memory_128;
void* write_memory_8;
void* write_memory_16;
void* write_memory_32;
void* write_memory_64;
void* write_memory_128;
void* exclusive_write_memory_8;
void* exclusive_write_memory_16;
void* exclusive_write_memory_32;
void* exclusive_write_memory_64;
void* exclusive_write_memory_128;
void* call_svc;
void* exception_raised;
void* dc_raised;
void* ic_raised;
void* isb_raised;
void* get_cntpct;
void* add_ticks;
void* get_ticks_remaining;
} prelude_info;
};
} // namespace Dynarmic::Backend::Arm64