2022-04-23 10:59:50 +02:00
|
|
|
// SPDX-FileCopyrightText: Copyright 2020 yuzu Emulator Project
|
|
|
|
// SPDX-License-Identifier: GPL-2.0-or-later
|
2020-03-02 05:46:10 +01:00
|
|
|
|
|
|
|
#include <cinttypes>
|
|
|
|
#include <memory>
|
2021-05-26 21:09:53 +02:00
|
|
|
#include <dynarmic/interface/A32/a32.h>
|
|
|
|
#include <dynarmic/interface/A32/config.h>
|
|
|
|
#include <dynarmic/interface/A32/context.h>
|
2020-11-04 01:54:53 +01:00
|
|
|
#include "common/assert.h"
|
2021-06-23 23:18:27 +02:00
|
|
|
#include "common/literals.h"
|
2020-06-19 01:56:59 +02:00
|
|
|
#include "common/logging/log.h"
|
|
|
|
#include "common/page_table.h"
|
2021-04-15 01:07:40 +02:00
|
|
|
#include "common/settings.h"
|
2020-03-06 19:56:05 +01:00
|
|
|
#include "core/arm/cpu_interrupt_handler.h"
|
2020-03-02 05:46:10 +01:00
|
|
|
#include "core/arm/dynarmic/arm_dynarmic_32.h"
|
|
|
|
#include "core/arm/dynarmic/arm_dynarmic_cp15.h"
|
2020-06-19 01:56:59 +02:00
|
|
|
#include "core/arm/dynarmic/arm_exclusive_monitor.h"
|
2020-03-02 05:46:10 +01:00
|
|
|
#include "core/core.h"
|
|
|
|
#include "core/core_timing.h"
|
2022-05-31 01:35:01 +02:00
|
|
|
#include "core/debugger/debugger.h"
|
|
|
|
#include "core/hle/kernel/k_process.h"
|
2020-03-02 05:46:10 +01:00
|
|
|
#include "core/hle/kernel/svc.h"
|
|
|
|
#include "core/memory.h"
|
|
|
|
|
|
|
|
namespace Core {
|
|
|
|
|
2021-06-23 23:18:27 +02:00
|
|
|
using namespace Common::Literals;
|
|
|
|
|
2020-03-02 05:46:10 +01:00
|
|
|
class DynarmicCallbacks32 : public Dynarmic::A32::UserCallbacks {
|
|
|
|
public:
|
2021-05-29 09:00:47 +02:00
|
|
|
explicit DynarmicCallbacks32(ARM_Dynarmic_32& parent_)
|
2022-06-06 18:56:01 +02:00
|
|
|
: parent{parent_},
|
|
|
|
memory(parent.system.Memory()), debugger_enabled{parent.system.DebuggerEnabled()} {}
|
2020-03-02 05:46:10 +01:00
|
|
|
|
|
|
|
u8 MemoryRead8(u32 vaddr) override {
|
2022-06-06 18:56:01 +02:00
|
|
|
CheckMemoryAccess(vaddr, 1, Kernel::DebugWatchpointType::Read);
|
2021-05-29 09:00:47 +02:00
|
|
|
return memory.Read8(vaddr);
|
2020-03-02 05:46:10 +01:00
|
|
|
}
|
|
|
|
u16 MemoryRead16(u32 vaddr) override {
|
2022-06-06 18:56:01 +02:00
|
|
|
CheckMemoryAccess(vaddr, 2, Kernel::DebugWatchpointType::Read);
|
2021-05-29 09:00:47 +02:00
|
|
|
return memory.Read16(vaddr);
|
2020-03-02 05:46:10 +01:00
|
|
|
}
|
|
|
|
u32 MemoryRead32(u32 vaddr) override {
|
2022-06-06 18:56:01 +02:00
|
|
|
CheckMemoryAccess(vaddr, 4, Kernel::DebugWatchpointType::Read);
|
2021-05-29 09:00:47 +02:00
|
|
|
return memory.Read32(vaddr);
|
2020-03-02 05:46:10 +01:00
|
|
|
}
|
|
|
|
u64 MemoryRead64(u32 vaddr) override {
|
2022-06-06 18:56:01 +02:00
|
|
|
CheckMemoryAccess(vaddr, 8, Kernel::DebugWatchpointType::Read);
|
2021-05-29 09:00:47 +02:00
|
|
|
return memory.Read64(vaddr);
|
2020-03-02 05:46:10 +01:00
|
|
|
}
|
2022-06-21 02:39:16 +02:00
|
|
|
std::optional<u32> MemoryReadCode(u32 vaddr) override {
|
|
|
|
if (!memory.IsValidVirtualAddressRange(vaddr, sizeof(u32))) {
|
|
|
|
return std::nullopt;
|
|
|
|
}
|
|
|
|
return MemoryRead32(vaddr);
|
|
|
|
}
|
2020-03-02 05:46:10 +01:00
|
|
|
|
|
|
|
void MemoryWrite8(u32 vaddr, u8 value) override {
|
2022-06-06 18:56:01 +02:00
|
|
|
if (CheckMemoryAccess(vaddr, 1, Kernel::DebugWatchpointType::Write)) {
|
|
|
|
memory.Write8(vaddr, value);
|
|
|
|
}
|
2020-03-02 05:46:10 +01:00
|
|
|
}
|
|
|
|
void MemoryWrite16(u32 vaddr, u16 value) override {
|
2022-06-06 18:56:01 +02:00
|
|
|
if (CheckMemoryAccess(vaddr, 2, Kernel::DebugWatchpointType::Write)) {
|
|
|
|
memory.Write16(vaddr, value);
|
|
|
|
}
|
2020-03-02 05:46:10 +01:00
|
|
|
}
|
|
|
|
void MemoryWrite32(u32 vaddr, u32 value) override {
|
2022-06-06 18:56:01 +02:00
|
|
|
if (CheckMemoryAccess(vaddr, 4, Kernel::DebugWatchpointType::Write)) {
|
|
|
|
memory.Write32(vaddr, value);
|
|
|
|
}
|
2020-03-02 05:46:10 +01:00
|
|
|
}
|
|
|
|
void MemoryWrite64(u32 vaddr, u64 value) override {
|
2022-06-06 18:56:01 +02:00
|
|
|
if (CheckMemoryAccess(vaddr, 8, Kernel::DebugWatchpointType::Write)) {
|
|
|
|
memory.Write64(vaddr, value);
|
|
|
|
}
|
2020-03-02 05:46:10 +01:00
|
|
|
}
|
|
|
|
|
2020-06-19 01:56:59 +02:00
|
|
|
bool MemoryWriteExclusive8(u32 vaddr, u8 value, u8 expected) override {
|
2022-06-06 18:56:01 +02:00
|
|
|
return CheckMemoryAccess(vaddr, 1, Kernel::DebugWatchpointType::Write) &&
|
|
|
|
memory.WriteExclusive8(vaddr, value, expected);
|
2020-06-19 01:56:59 +02:00
|
|
|
}
|
|
|
|
bool MemoryWriteExclusive16(u32 vaddr, u16 value, u16 expected) override {
|
2022-06-06 18:56:01 +02:00
|
|
|
return CheckMemoryAccess(vaddr, 2, Kernel::DebugWatchpointType::Write) &&
|
|
|
|
memory.WriteExclusive16(vaddr, value, expected);
|
2020-06-19 01:56:59 +02:00
|
|
|
}
|
|
|
|
bool MemoryWriteExclusive32(u32 vaddr, u32 value, u32 expected) override {
|
2022-06-06 18:56:01 +02:00
|
|
|
return CheckMemoryAccess(vaddr, 4, Kernel::DebugWatchpointType::Write) &&
|
|
|
|
memory.WriteExclusive32(vaddr, value, expected);
|
2020-06-19 01:56:59 +02:00
|
|
|
}
|
|
|
|
bool MemoryWriteExclusive64(u32 vaddr, u64 value, u64 expected) override {
|
2022-06-06 18:56:01 +02:00
|
|
|
return CheckMemoryAccess(vaddr, 8, Kernel::DebugWatchpointType::Write) &&
|
|
|
|
memory.WriteExclusive64(vaddr, value, expected);
|
2020-06-19 01:56:59 +02:00
|
|
|
}
|
|
|
|
|
2020-03-02 05:46:10 +01:00
|
|
|
void InterpreterFallback(u32 pc, std::size_t num_instructions) override {
|
2022-04-05 21:40:20 +02:00
|
|
|
parent.LogBacktrace();
|
2022-06-21 02:39:16 +02:00
|
|
|
LOG_ERROR(Core_ARM,
|
|
|
|
"Unimplemented instruction @ 0x{:X} for {} instructions (instr = {:08X})", pc,
|
|
|
|
num_instructions, MemoryRead32(pc));
|
2020-03-02 05:46:10 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
void ExceptionRaised(u32 pc, Dynarmic::A32::Exception exception) override {
|
2022-06-21 02:39:16 +02:00
|
|
|
switch (exception) {
|
|
|
|
case Dynarmic::A32::Exception::NoExecuteFault:
|
|
|
|
LOG_CRITICAL(Core_ARM, "Cannot execute instruction at unmapped address {:#08x}", pc);
|
|
|
|
ReturnException(pc, ARM_Interface::no_execute);
|
2022-05-31 01:35:01 +02:00
|
|
|
return;
|
2022-06-21 02:39:16 +02:00
|
|
|
default:
|
|
|
|
if (debugger_enabled) {
|
|
|
|
ReturnException(pc, ARM_Interface::breakpoint);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
parent.LogBacktrace();
|
|
|
|
LOG_CRITICAL(Core_ARM,
|
|
|
|
"ExceptionRaised(exception = {}, pc = {:08X}, code = {:08X}, thumb = {})",
|
|
|
|
exception, pc, MemoryRead32(pc), parent.IsInThumbMode());
|
2022-05-31 01:35:01 +02:00
|
|
|
}
|
2020-03-02 05:46:10 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
void CallSVC(u32 swi) override {
|
2021-05-27 22:54:22 +02:00
|
|
|
parent.svc_swi = swi;
|
2022-05-31 20:37:37 +02:00
|
|
|
parent.jit.load()->HaltExecution(ARM_Interface::svc_call);
|
2020-03-02 05:46:10 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
void AddTicks(u64 ticks) override {
|
2022-04-03 17:29:05 +02:00
|
|
|
ASSERT_MSG(!parent.uses_wall_clock, "This should never happen - dynarmic ticking disabled");
|
|
|
|
|
2020-03-28 20:23:28 +01:00
|
|
|
// Divide the number of ticks by the amount of CPU cores. TODO(Subv): This yields only a
|
|
|
|
// rough approximation of the amount of executed ticks in the system, it may be thrown off
|
|
|
|
// if not all cores are doing a similar amount of work. Instead of doing this, we should
|
|
|
|
// device a way so that timing is consistent across all cores without increasing the ticks 4
|
|
|
|
// times.
|
|
|
|
u64 amortized_ticks =
|
|
|
|
(ticks - num_interpreted_instructions) / Core::Hardware::NUM_CPU_CORES;
|
|
|
|
// Always execute at least one tick.
|
|
|
|
amortized_ticks = std::max<u64>(amortized_ticks, 1);
|
|
|
|
|
|
|
|
parent.system.CoreTiming().AddTicks(amortized_ticks);
|
|
|
|
num_interpreted_instructions = 0;
|
2020-03-02 05:46:10 +01:00
|
|
|
}
|
2020-03-20 17:36:01 +01:00
|
|
|
|
2020-03-02 05:46:10 +01:00
|
|
|
u64 GetTicksRemaining() override {
|
2022-04-03 17:29:05 +02:00
|
|
|
ASSERT_MSG(!parent.uses_wall_clock, "This should never happen - dynarmic ticking disabled");
|
|
|
|
|
2020-10-21 04:07:39 +02:00
|
|
|
return std::max<s64>(parent.system.CoreTiming().GetDowncount(), 0);
|
2020-03-20 17:36:01 +01:00
|
|
|
}
|
|
|
|
|
2022-06-06 18:56:01 +02:00
|
|
|
bool CheckMemoryAccess(VAddr addr, u64 size, Kernel::DebugWatchpointType type) {
|
|
|
|
if (!debugger_enabled) {
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
const auto match{parent.MatchingWatchpoint(addr, size, type)};
|
|
|
|
if (match) {
|
|
|
|
parent.halted_watchpoint = match;
|
2022-06-21 02:39:16 +02:00
|
|
|
ReturnException(parent.jit.load()->Regs()[15], ARM_Interface::watchpoint);
|
2022-06-06 18:56:01 +02:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2022-06-21 02:39:16 +02:00
|
|
|
void ReturnException(u32 pc, Dynarmic::HaltReason hr) {
|
|
|
|
parent.SaveContext(parent.breakpoint_context);
|
|
|
|
parent.breakpoint_context.cpu_registers[15] = pc;
|
|
|
|
parent.jit.load()->HaltExecution(hr);
|
|
|
|
}
|
|
|
|
|
2020-03-02 05:46:10 +01:00
|
|
|
ARM_Dynarmic_32& parent;
|
2021-05-29 09:00:47 +02:00
|
|
|
Core::Memory::Memory& memory;
|
2020-03-02 05:46:10 +01:00
|
|
|
std::size_t num_interpreted_instructions{};
|
2022-06-06 18:56:01 +02:00
|
|
|
bool debugger_enabled{};
|
2020-06-28 00:20:06 +02:00
|
|
|
static constexpr u64 minimum_run_cycles = 1000U;
|
2020-03-02 05:46:10 +01:00
|
|
|
};
|
|
|
|
|
2021-03-24 12:08:41 +01:00
|
|
|
std::shared_ptr<Dynarmic::A32::Jit> ARM_Dynarmic_32::MakeJit(Common::PageTable* page_table) const {
|
2020-03-02 05:46:10 +01:00
|
|
|
Dynarmic::A32::UserConfig config;
|
|
|
|
config.callbacks = cb.get();
|
2020-06-17 17:32:08 +02:00
|
|
|
config.coprocessors[15] = cp15;
|
2020-03-02 05:46:10 +01:00
|
|
|
config.define_unpredictable_behaviour = true;
|
2020-06-19 01:56:59 +02:00
|
|
|
static constexpr std::size_t PAGE_BITS = 12;
|
|
|
|
static constexpr std::size_t NUM_PAGE_TABLE_ENTRIES = 1 << (32 - PAGE_BITS);
|
2021-03-24 12:08:41 +01:00
|
|
|
if (page_table) {
|
|
|
|
config.page_table = reinterpret_cast<std::array<std::uint8_t*, NUM_PAGE_TABLE_ENTRIES>*>(
|
|
|
|
page_table->pointers.data());
|
2020-01-19 01:49:30 +01:00
|
|
|
config.fastmem_pointer = page_table->fastmem_arena;
|
2021-03-24 12:08:41 +01:00
|
|
|
}
|
2020-06-19 01:56:59 +02:00
|
|
|
config.absolute_offset_page_table = true;
|
core/memory: Read and write page table atomically
Squash attributes into the pointer's integer, making them an uintptr_t
pair containing 2 bits at the bottom and then the pointer. These bits
are currently unused thanks to alignment requirements.
Configure Dynarmic to mask out these bits on pointer reads.
While we are at it, remove some unused attributes carried over from
Citra.
Read/Write and other hot functions use a two step unpacking process that
is less readable to stop MSVC from emitting an extra AND instruction in
the hot path:
mov rdi,rcx
shr rdx,0Ch
mov r8,qword ptr [rax+8]
mov rax,qword ptr [r8+rdx*8]
mov rdx,rax
-and al,3
and rdx,0FFFFFFFFFFFFFFFCh
je Core::Memory::Memory::Impl::Read<unsigned char>
mov rax,qword ptr [vaddr]
movzx eax,byte ptr [rdx+rax]
2020-12-30 01:16:57 +01:00
|
|
|
config.page_table_pointer_mask_bits = Common::PageTable::ATTRIBUTE_BITS;
|
2020-06-19 01:56:59 +02:00
|
|
|
config.detect_misaligned_access_via_page_table = 16 | 32 | 64 | 128;
|
|
|
|
config.only_detect_misalignment_via_page_table_on_page_boundary = true;
|
2022-02-27 20:40:05 +01:00
|
|
|
config.fastmem_exclusive_access = true;
|
|
|
|
config.recompile_on_exclusive_fastmem_failure = true;
|
2020-06-19 01:56:59 +02:00
|
|
|
|
|
|
|
// Multi-process state
|
|
|
|
config.processor_id = core_index;
|
|
|
|
config.global_monitor = &exclusive_monitor.monitor;
|
|
|
|
|
|
|
|
// Timing
|
|
|
|
config.wall_clock_cntpct = uses_wall_clock;
|
2022-04-03 17:29:05 +02:00
|
|
|
config.enable_cycle_counting = !uses_wall_clock;
|
2020-06-19 01:56:59 +02:00
|
|
|
|
2021-04-02 19:08:39 +02:00
|
|
|
// Code cache size
|
2022-03-23 02:32:54 +01:00
|
|
|
config.code_cache_size = 512_MiB;
|
|
|
|
config.far_code_offset = 400_MiB;
|
2021-04-02 19:08:39 +02:00
|
|
|
|
2022-06-06 18:56:01 +02:00
|
|
|
// Allow memory fault handling to work
|
|
|
|
if (system.DebuggerEnabled()) {
|
|
|
|
config.check_halt_on_memory_access = true;
|
|
|
|
}
|
|
|
|
|
2022-04-10 16:46:29 +02:00
|
|
|
// null_jit
|
|
|
|
if (!page_table) {
|
|
|
|
// Don't waste too much memory on null_jit
|
|
|
|
config.code_cache_size = 8_MiB;
|
|
|
|
config.far_code_offset = 4_MiB;
|
|
|
|
}
|
|
|
|
|
2020-07-11 15:26:36 +02:00
|
|
|
// Safe optimizations
|
2021-07-08 22:56:44 +02:00
|
|
|
if (Settings::values.cpu_debug_mode) {
|
2020-07-11 17:25:49 +02:00
|
|
|
if (!Settings::values.cpuopt_page_tables) {
|
|
|
|
config.page_table = nullptr;
|
|
|
|
}
|
|
|
|
if (!Settings::values.cpuopt_block_linking) {
|
|
|
|
config.optimizations &= ~Dynarmic::OptimizationFlag::BlockLinking;
|
|
|
|
}
|
|
|
|
if (!Settings::values.cpuopt_return_stack_buffer) {
|
|
|
|
config.optimizations &= ~Dynarmic::OptimizationFlag::ReturnStackBuffer;
|
|
|
|
}
|
|
|
|
if (!Settings::values.cpuopt_fast_dispatcher) {
|
|
|
|
config.optimizations &= ~Dynarmic::OptimizationFlag::FastDispatch;
|
|
|
|
}
|
|
|
|
if (!Settings::values.cpuopt_context_elimination) {
|
|
|
|
config.optimizations &= ~Dynarmic::OptimizationFlag::GetSetElimination;
|
|
|
|
}
|
|
|
|
if (!Settings::values.cpuopt_const_prop) {
|
|
|
|
config.optimizations &= ~Dynarmic::OptimizationFlag::ConstProp;
|
|
|
|
}
|
|
|
|
if (!Settings::values.cpuopt_misc_ir) {
|
|
|
|
config.optimizations &= ~Dynarmic::OptimizationFlag::MiscIROpt;
|
|
|
|
}
|
|
|
|
if (!Settings::values.cpuopt_reduce_misalign_checks) {
|
|
|
|
config.only_detect_misalignment_via_page_table_on_page_boundary = false;
|
2021-06-06 09:57:24 +02:00
|
|
|
}
|
|
|
|
if (!Settings::values.cpuopt_fastmem) {
|
|
|
|
config.fastmem_pointer = nullptr;
|
2020-07-11 17:25:49 +02:00
|
|
|
}
|
2022-02-27 20:40:05 +01:00
|
|
|
if (!Settings::values.cpuopt_fastmem_exclusives) {
|
|
|
|
config.fastmem_exclusive_access = false;
|
|
|
|
}
|
|
|
|
if (!Settings::values.cpuopt_recompile_exclusives) {
|
|
|
|
config.recompile_on_exclusive_fastmem_failure = false;
|
|
|
|
}
|
2022-03-26 09:45:51 +01:00
|
|
|
} else {
|
|
|
|
// Unsafe optimizations
|
|
|
|
if (Settings::values.cpu_accuracy.GetValue() == Settings::CPUAccuracy::Unsafe) {
|
|
|
|
config.unsafe_optimizations = true;
|
|
|
|
if (Settings::values.cpuopt_unsafe_unfuse_fma) {
|
|
|
|
config.optimizations |= Dynarmic::OptimizationFlag::Unsafe_UnfuseFMA;
|
|
|
|
}
|
|
|
|
if (Settings::values.cpuopt_unsafe_reduce_fp_error) {
|
|
|
|
config.optimizations |= Dynarmic::OptimizationFlag::Unsafe_ReducedErrorFP;
|
|
|
|
}
|
|
|
|
if (Settings::values.cpuopt_unsafe_ignore_standard_fpcr) {
|
|
|
|
config.optimizations |= Dynarmic::OptimizationFlag::Unsafe_IgnoreStandardFPCRValue;
|
|
|
|
}
|
|
|
|
if (Settings::values.cpuopt_unsafe_inaccurate_nan) {
|
|
|
|
config.optimizations |= Dynarmic::OptimizationFlag::Unsafe_InaccurateNaN;
|
|
|
|
}
|
|
|
|
if (Settings::values.cpuopt_unsafe_ignore_global_monitor) {
|
|
|
|
config.optimizations |= Dynarmic::OptimizationFlag::Unsafe_IgnoreGlobalMonitor;
|
|
|
|
}
|
|
|
|
}
|
2020-06-19 01:56:59 +02:00
|
|
|
|
2022-03-26 09:45:51 +01:00
|
|
|
// Curated optimizations
|
|
|
|
if (Settings::values.cpu_accuracy.GetValue() == Settings::CPUAccuracy::Auto) {
|
|
|
|
config.unsafe_optimizations = true;
|
2020-08-16 14:19:55 +02:00
|
|
|
config.optimizations |= Dynarmic::OptimizationFlag::Unsafe_UnfuseFMA;
|
2021-06-20 20:40:02 +02:00
|
|
|
config.optimizations |= Dynarmic::OptimizationFlag::Unsafe_IgnoreStandardFPCRValue;
|
2021-07-01 04:22:21 +02:00
|
|
|
config.optimizations |= Dynarmic::OptimizationFlag::Unsafe_InaccurateNaN;
|
2022-02-27 20:40:05 +01:00
|
|
|
config.optimizations |= Dynarmic::OptimizationFlag::Unsafe_IgnoreGlobalMonitor;
|
|
|
|
}
|
2021-07-01 04:22:21 +02:00
|
|
|
|
2022-03-26 09:45:51 +01:00
|
|
|
// Paranoia mode for debugging optimizations
|
|
|
|
if (Settings::values.cpu_accuracy.GetValue() == Settings::CPUAccuracy::Paranoid) {
|
|
|
|
config.unsafe_optimizations = false;
|
|
|
|
config.optimizations = Dynarmic::no_optimizations;
|
|
|
|
}
|
2020-08-16 14:19:55 +02:00
|
|
|
}
|
|
|
|
|
2020-03-02 05:46:10 +01:00
|
|
|
return std::make_unique<Dynarmic::A32::Jit>(config);
|
|
|
|
}
|
|
|
|
|
2022-05-31 20:37:37 +02:00
|
|
|
Dynarmic::HaltReason ARM_Dynarmic_32::RunJit() {
|
|
|
|
return jit.load()->Run();
|
|
|
|
}
|
2022-05-31 01:35:01 +02:00
|
|
|
|
2022-05-31 20:37:37 +02:00
|
|
|
Dynarmic::HaltReason ARM_Dynarmic_32::StepJit() {
|
|
|
|
return jit.load()->Step();
|
|
|
|
}
|
2022-05-31 01:35:01 +02:00
|
|
|
|
2022-05-31 20:37:37 +02:00
|
|
|
u32 ARM_Dynarmic_32::GetSvcNumber() const {
|
|
|
|
return svc_swi;
|
2020-11-13 20:11:12 +01:00
|
|
|
}
|
|
|
|
|
2022-06-06 18:56:01 +02:00
|
|
|
const Kernel::DebugWatchpoint* ARM_Dynarmic_32::HaltedWatchpoint() const {
|
|
|
|
return halted_watchpoint;
|
|
|
|
}
|
|
|
|
|
|
|
|
void ARM_Dynarmic_32::RewindBreakpointInstruction() {
|
|
|
|
LoadContext(breakpoint_context);
|
|
|
|
}
|
|
|
|
|
2021-05-04 10:04:05 +02:00
|
|
|
ARM_Dynarmic_32::ARM_Dynarmic_32(System& system_, CPUInterrupts& interrupt_handlers_,
|
|
|
|
bool uses_wall_clock_, ExclusiveMonitor& exclusive_monitor_,
|
|
|
|
std::size_t core_index_)
|
|
|
|
: ARM_Interface{system_, interrupt_handlers_, uses_wall_clock_},
|
2020-03-28 20:23:28 +01:00
|
|
|
cb(std::make_unique<DynarmicCallbacks32>(*this)),
|
2021-05-04 10:04:05 +02:00
|
|
|
cp15(std::make_shared<DynarmicCP15>(*this)), core_index{core_index_},
|
|
|
|
exclusive_monitor{dynamic_cast<DynarmicExclusiveMonitor&>(exclusive_monitor_)},
|
2022-04-10 16:46:29 +02:00
|
|
|
null_jit{MakeJit(nullptr)}, jit{null_jit.get()} {}
|
2020-03-02 05:46:10 +01:00
|
|
|
|
|
|
|
ARM_Dynarmic_32::~ARM_Dynarmic_32() = default;
|
|
|
|
|
|
|
|
void ARM_Dynarmic_32::SetPC(u64 pc) {
|
2022-04-10 16:46:29 +02:00
|
|
|
jit.load()->Regs()[15] = static_cast<u32>(pc);
|
2020-03-02 05:46:10 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
u64 ARM_Dynarmic_32::GetPC() const {
|
2022-04-10 16:46:29 +02:00
|
|
|
return jit.load()->Regs()[15];
|
2020-03-02 05:46:10 +01:00
|
|
|
}
|
|
|
|
|
2022-04-21 02:17:48 +02:00
|
|
|
u64 ARM_Dynarmic_32::GetSP() const {
|
|
|
|
return jit.load()->Regs()[13];
|
|
|
|
}
|
|
|
|
|
2020-10-21 04:07:39 +02:00
|
|
|
u64 ARM_Dynarmic_32::GetReg(int index) const {
|
2022-04-10 16:46:29 +02:00
|
|
|
return jit.load()->Regs()[index];
|
2020-03-02 05:46:10 +01:00
|
|
|
}
|
|
|
|
|
2020-10-21 04:07:39 +02:00
|
|
|
void ARM_Dynarmic_32::SetReg(int index, u64 value) {
|
2022-04-10 16:46:29 +02:00
|
|
|
jit.load()->Regs()[index] = static_cast<u32>(value);
|
2020-03-02 05:46:10 +01:00
|
|
|
}
|
|
|
|
|
2020-10-21 04:07:39 +02:00
|
|
|
u128 ARM_Dynarmic_32::GetVectorReg(int index) const {
|
2020-03-02 05:46:10 +01:00
|
|
|
return {};
|
|
|
|
}
|
|
|
|
|
2020-10-21 04:07:39 +02:00
|
|
|
void ARM_Dynarmic_32::SetVectorReg(int index, u128 value) {}
|
2020-03-02 05:46:10 +01:00
|
|
|
|
|
|
|
u32 ARM_Dynarmic_32::GetPSTATE() const {
|
2022-04-10 16:46:29 +02:00
|
|
|
return jit.load()->Cpsr();
|
2020-03-02 05:46:10 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
void ARM_Dynarmic_32::SetPSTATE(u32 cpsr) {
|
2022-04-10 16:46:29 +02:00
|
|
|
jit.load()->SetCpsr(cpsr);
|
2020-03-02 05:46:10 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
u64 ARM_Dynarmic_32::GetTlsAddress() const {
|
2020-06-17 17:32:08 +02:00
|
|
|
return cp15->uro;
|
2020-03-02 05:46:10 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
void ARM_Dynarmic_32::SetTlsAddress(VAddr address) {
|
2020-06-17 17:32:08 +02:00
|
|
|
cp15->uro = static_cast<u32>(address);
|
2020-03-02 05:46:10 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
u64 ARM_Dynarmic_32::GetTPIDR_EL0() const {
|
2020-06-17 17:32:08 +02:00
|
|
|
return cp15->uprw;
|
2020-03-02 05:46:10 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
void ARM_Dynarmic_32::SetTPIDR_EL0(u64 value) {
|
2020-06-18 21:52:15 +02:00
|
|
|
cp15->uprw = static_cast<u32>(value);
|
2020-03-02 05:46:10 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
void ARM_Dynarmic_32::SaveContext(ThreadContext32& ctx) {
|
|
|
|
Dynarmic::A32::Context context;
|
2022-04-10 16:46:29 +02:00
|
|
|
jit.load()->SaveContext(context);
|
2020-03-02 05:46:10 +01:00
|
|
|
ctx.cpu_registers = context.Regs();
|
2020-06-20 01:40:07 +02:00
|
|
|
ctx.extension_registers = context.ExtRegs();
|
2020-03-02 05:46:10 +01:00
|
|
|
ctx.cpsr = context.Cpsr();
|
2020-06-20 01:40:07 +02:00
|
|
|
ctx.fpscr = context.Fpscr();
|
2020-03-02 05:46:10 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
void ARM_Dynarmic_32::LoadContext(const ThreadContext32& ctx) {
|
|
|
|
Dynarmic::A32::Context context;
|
|
|
|
context.Regs() = ctx.cpu_registers;
|
2020-06-20 01:40:07 +02:00
|
|
|
context.ExtRegs() = ctx.extension_registers;
|
2020-03-02 05:46:10 +01:00
|
|
|
context.SetCpsr(ctx.cpsr);
|
2020-06-20 01:40:07 +02:00
|
|
|
context.SetFpscr(ctx.fpscr);
|
2022-04-10 16:46:29 +02:00
|
|
|
jit.load()->LoadContext(context);
|
2020-03-02 05:46:10 +01:00
|
|
|
}
|
|
|
|
|
2022-04-03 17:29:05 +02:00
|
|
|
void ARM_Dynarmic_32::SignalInterrupt() {
|
2022-04-10 16:46:29 +02:00
|
|
|
jit.load()->HaltExecution(break_loop);
|
2020-03-02 05:46:10 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
void ARM_Dynarmic_32::ClearInstructionCache() {
|
2022-04-10 16:46:29 +02:00
|
|
|
jit.load()->ClearCache();
|
2020-03-02 05:46:10 +01:00
|
|
|
}
|
|
|
|
|
2020-11-14 08:20:32 +01:00
|
|
|
void ARM_Dynarmic_32::InvalidateCacheRange(VAddr addr, std::size_t size) {
|
2022-04-10 16:46:29 +02:00
|
|
|
jit.load()->InvalidateCacheRange(static_cast<u32>(addr), size);
|
2020-11-14 08:20:32 +01:00
|
|
|
}
|
|
|
|
|
2020-06-20 01:40:07 +02:00
|
|
|
void ARM_Dynarmic_32::ClearExclusiveState() {
|
2022-04-10 16:46:29 +02:00
|
|
|
jit.load()->ClearExclusiveState();
|
2020-06-20 01:40:07 +02:00
|
|
|
}
|
2020-03-02 05:46:10 +01:00
|
|
|
|
|
|
|
void ARM_Dynarmic_32::PageTableChanged(Common::PageTable& page_table,
|
|
|
|
std::size_t new_address_space_size_in_bits) {
|
2021-03-21 23:25:25 +01:00
|
|
|
ThreadContext32 ctx{};
|
|
|
|
SaveContext(ctx);
|
|
|
|
|
2020-03-02 05:46:10 +01:00
|
|
|
auto key = std::make_pair(&page_table, new_address_space_size_in_bits);
|
|
|
|
auto iter = jit_cache.find(key);
|
|
|
|
if (iter != jit_cache.end()) {
|
2022-04-10 16:46:29 +02:00
|
|
|
jit.store(iter->second.get());
|
2021-03-21 23:25:25 +01:00
|
|
|
LoadContext(ctx);
|
2020-03-02 05:46:10 +01:00
|
|
|
return;
|
|
|
|
}
|
2022-04-10 16:46:29 +02:00
|
|
|
std::shared_ptr new_jit = MakeJit(&page_table);
|
|
|
|
jit.store(new_jit.get());
|
2021-03-21 23:25:25 +01:00
|
|
|
LoadContext(ctx);
|
2022-04-10 16:46:29 +02:00
|
|
|
jit_cache.emplace(key, std::move(new_jit));
|
2020-03-02 05:46:10 +01:00
|
|
|
}
|
|
|
|
|
2022-04-21 02:17:48 +02:00
|
|
|
std::vector<ARM_Interface::BacktraceEntry> ARM_Dynarmic_32::GetBacktrace(Core::System& system,
|
2022-06-25 18:54:24 +02:00
|
|
|
u64 fp, u64 lr, u64 pc) {
|
|
|
|
std::vector<BacktraceEntry> out;
|
|
|
|
auto& memory = system.Memory();
|
|
|
|
|
|
|
|
out.push_back({"", 0, pc, 0, ""});
|
|
|
|
|
|
|
|
// fp (= r11) points to the last frame record.
|
|
|
|
// Frame records are two words long:
|
|
|
|
// fp+0 : pointer to previous frame record
|
|
|
|
// fp+4 : value of lr for frame
|
|
|
|
while (true) {
|
|
|
|
out.push_back({"", 0, lr, 0, ""});
|
|
|
|
if (!fp || (fp % 4 != 0) || !memory.IsValidVirtualAddressRange(fp, 8)) {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
lr = memory.Read32(fp + 4);
|
|
|
|
fp = memory.Read32(fp);
|
|
|
|
}
|
|
|
|
|
|
|
|
SymbolicateBacktrace(system, out);
|
|
|
|
|
|
|
|
return out;
|
2022-04-21 02:17:48 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
std::vector<ARM_Interface::BacktraceEntry> ARM_Dynarmic_32::GetBacktraceFromContext(
|
|
|
|
System& system, const ThreadContext32& ctx) {
|
2022-06-25 18:54:24 +02:00
|
|
|
const auto& reg = ctx.cpu_registers;
|
|
|
|
return GetBacktrace(system, reg[11], reg[14], reg[15]);
|
2022-04-21 02:17:48 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
std::vector<ARM_Interface::BacktraceEntry> ARM_Dynarmic_32::GetBacktrace() const {
|
2022-06-25 18:54:24 +02:00
|
|
|
return GetBacktrace(system, GetReg(11), GetReg(14), GetReg(15));
|
2022-04-21 02:17:48 +02:00
|
|
|
}
|
|
|
|
|
2020-03-02 05:46:10 +01:00
|
|
|
} // namespace Core
|