ARM/Memory: Correct Exclusive Monitor and Implement Exclusive Memory Writes.
This commit is contained in:
parent
535c542d84
commit
cd1c38be8d
12 changed files with 325 additions and 24 deletions
|
@ -98,6 +98,8 @@ add_library(common STATIC
|
||||||
algorithm.h
|
algorithm.h
|
||||||
alignment.h
|
alignment.h
|
||||||
assert.h
|
assert.h
|
||||||
|
atomic_ops.cpp
|
||||||
|
atomic_ops.h
|
||||||
detached_tasks.cpp
|
detached_tasks.cpp
|
||||||
detached_tasks.h
|
detached_tasks.h
|
||||||
bit_field.h
|
bit_field.h
|
||||||
|
|
70
src/common/atomic_ops.cpp
Normal file
70
src/common/atomic_ops.cpp
Normal file
|
@ -0,0 +1,70 @@
|
||||||
|
// Copyright 2020 yuzu Emulator Project
|
||||||
|
// Licensed under GPLv2 or any later version
|
||||||
|
// Refer to the license.txt file included.
|
||||||
|
|
||||||
|
#include <cstring>
|
||||||
|
|
||||||
|
#include "common/atomic_ops.h"
|
||||||
|
|
||||||
|
#if _MSC_VER
|
||||||
|
#include <intrin.h>
|
||||||
|
#endif
|
||||||
|
|
||||||
|
namespace Common {
|
||||||
|
|
||||||
|
#if _MSC_VER
|
||||||
|
|
||||||
|
bool AtomicCompareAndSwap(u8 volatile* pointer, u8 value, u8 expected) {
|
||||||
|
u8 result = _InterlockedCompareExchange8((char*)pointer, value, expected);
|
||||||
|
return result == expected;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool AtomicCompareAndSwap(u16 volatile* pointer, u16 value, u16 expected) {
|
||||||
|
u16 result = _InterlockedCompareExchange16((short*)pointer, value, expected);
|
||||||
|
return result == expected;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool AtomicCompareAndSwap(u32 volatile* pointer, u32 value, u32 expected) {
|
||||||
|
u32 result = _InterlockedCompareExchange((long*)pointer, value, expected);
|
||||||
|
return result == expected;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool AtomicCompareAndSwap(u64 volatile* pointer, u64 value, u64 expected) {
|
||||||
|
u64 result = _InterlockedCompareExchange64((__int64*)pointer, value, expected);
|
||||||
|
return result == expected;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool AtomicCompareAndSwap(u64 volatile* pointer, u128 value, u128 expected) {
|
||||||
|
return _InterlockedCompareExchange128((__int64*)pointer, value[1], value[0], (__int64*)expected.data()) != 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
#else
|
||||||
|
|
||||||
|
bool AtomicCompareAndSwap(u8 volatile* pointer, u8 value, u8 expected) {
|
||||||
|
return __sync_bool_compare_and_swap (pointer, value, expected);
|
||||||
|
}
|
||||||
|
|
||||||
|
bool AtomicCompareAndSwap(u16 volatile* pointer, u16 value, u16 expected) {
|
||||||
|
return __sync_bool_compare_and_swap (pointer, value, expected);
|
||||||
|
}
|
||||||
|
|
||||||
|
bool AtomicCompareAndSwap(u32 volatile* pointer, u32 value, u32 expected) {
|
||||||
|
return __sync_bool_compare_and_swap (pointer, value, expected);
|
||||||
|
}
|
||||||
|
|
||||||
|
bool AtomicCompareAndSwap(u64 volatile* pointer, u64 value, u64 expected) {
|
||||||
|
return __sync_bool_compare_and_swap (pointer, value, expected);
|
||||||
|
}
|
||||||
|
|
||||||
|
bool AtomicCompareAndSwap(u64 volatile* pointer, u128 value, u128 expected) {
|
||||||
|
unsigned __int128 value_a;
|
||||||
|
unsigned __int128 expected_a;
|
||||||
|
std::memcpy(&value_a, value.data(), sizeof(u128));
|
||||||
|
std::memcpy(&expected_a, expected.data(), sizeof(u128));
|
||||||
|
return __sync_bool_compare_and_swap ((unsigned __int128*)pointer, value_a, expected_a);
|
||||||
|
}
|
||||||
|
|
||||||
|
#endif
|
||||||
|
|
||||||
|
} // namespace Common
|
17
src/common/atomic_ops.h
Normal file
17
src/common/atomic_ops.h
Normal file
|
@ -0,0 +1,17 @@
|
||||||
|
// Copyright 2020 yuzu Emulator Project
|
||||||
|
// Licensed under GPLv2 or any later version
|
||||||
|
// Refer to the license.txt file included.
|
||||||
|
|
||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include "common/common_types.h"
|
||||||
|
|
||||||
|
namespace Common {
|
||||||
|
|
||||||
|
bool AtomicCompareAndSwap(u8 volatile * pointer, u8 value, u8 expected);
|
||||||
|
bool AtomicCompareAndSwap(u16 volatile* pointer, u16 value, u16 expected);
|
||||||
|
bool AtomicCompareAndSwap(u32 volatile* pointer, u32 value, u32 expected);
|
||||||
|
bool AtomicCompareAndSwap(u64 volatile* pointer, u64 value, u64 expected);
|
||||||
|
bool AtomicCompareAndSwap(u64 volatile* pointer, u128 value, u128 expected);
|
||||||
|
|
||||||
|
} // namespace Common
|
|
@ -66,6 +66,22 @@ public:
|
||||||
memory.Write64(vaddr + 8, value[1]);
|
memory.Write64(vaddr + 8, value[1]);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bool MemoryWriteExclusive8(u64 vaddr, std::uint8_t value, std::uint8_t expected) override {
|
||||||
|
return parent.system.Memory().WriteExclusive8(vaddr, value, expected);
|
||||||
|
}
|
||||||
|
bool MemoryWriteExclusive16(u64 vaddr, std::uint16_t value, std::uint16_t expected) override {
|
||||||
|
return parent.system.Memory().WriteExclusive16(vaddr, value, expected);
|
||||||
|
}
|
||||||
|
bool MemoryWriteExclusive32(u64 vaddr, std::uint32_t value, std::uint32_t expected) override {
|
||||||
|
return parent.system.Memory().WriteExclusive32(vaddr, value, expected);
|
||||||
|
}
|
||||||
|
bool MemoryWriteExclusive64(u64 vaddr, std::uint64_t value, std::uint64_t expected) override {
|
||||||
|
return parent.system.Memory().WriteExclusive64(vaddr, value, expected);
|
||||||
|
}
|
||||||
|
bool MemoryWriteExclusive128(u64 vaddr, Vector value, Vector expected) override {
|
||||||
|
return parent.system.Memory().WriteExclusive128(vaddr, value, expected);
|
||||||
|
}
|
||||||
|
|
||||||
void InterpreterFallback(u64 pc, std::size_t num_instructions) override {
|
void InterpreterFallback(u64 pc, std::size_t num_instructions) override {
|
||||||
LOG_INFO(Core_ARM, "Unicorn fallback @ 0x{:X} for {} instructions (instr = {:08X})", pc,
|
LOG_INFO(Core_ARM, "Unicorn fallback @ 0x{:X} for {} instructions (instr = {:08X})", pc,
|
||||||
num_instructions, MemoryReadCode(pc));
|
num_instructions, MemoryReadCode(pc));
|
||||||
|
@ -284,9 +300,29 @@ DynarmicExclusiveMonitor::DynarmicExclusiveMonitor(Memory::Memory& memory, std::
|
||||||
|
|
||||||
DynarmicExclusiveMonitor::~DynarmicExclusiveMonitor() = default;
|
DynarmicExclusiveMonitor::~DynarmicExclusiveMonitor() = default;
|
||||||
|
|
||||||
void DynarmicExclusiveMonitor::SetExclusive(std::size_t core_index, VAddr addr) {
|
void DynarmicExclusiveMonitor::SetExclusive8(std::size_t core_index, VAddr addr) {
|
||||||
// Size doesn't actually matter.
|
monitor.Mark<u8>(core_index, addr, 1, [&]() -> u8 { return memory.Read8(addr); });
|
||||||
monitor.Mark(core_index, addr, 16);
|
}
|
||||||
|
|
||||||
|
void DynarmicExclusiveMonitor::SetExclusive16(std::size_t core_index, VAddr addr) {
|
||||||
|
monitor.Mark<u16>(core_index, addr, 2, [&]() -> u16 { return memory.Read16(addr); });
|
||||||
|
}
|
||||||
|
|
||||||
|
void DynarmicExclusiveMonitor::SetExclusive32(std::size_t core_index, VAddr addr) {
|
||||||
|
monitor.Mark<u32>(core_index, addr, 4, [&]() -> u32 { return memory.Read32(addr); });
|
||||||
|
}
|
||||||
|
|
||||||
|
void DynarmicExclusiveMonitor::SetExclusive64(std::size_t core_index, VAddr addr) {
|
||||||
|
monitor.Mark<u64>(core_index, addr, 8, [&]() -> u64 { return memory.Read64(addr); });
|
||||||
|
}
|
||||||
|
|
||||||
|
void DynarmicExclusiveMonitor::SetExclusive128(std::size_t core_index, VAddr addr) {
|
||||||
|
monitor.Mark<u128>(core_index, addr, 16, [&]() -> u128 {
|
||||||
|
u128 result;
|
||||||
|
result[0] = memory.Read64(addr);
|
||||||
|
result[1] = memory.Read64(addr + 8);
|
||||||
|
return result;
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
void DynarmicExclusiveMonitor::ClearExclusive() {
|
void DynarmicExclusiveMonitor::ClearExclusive() {
|
||||||
|
@ -294,28 +330,32 @@ void DynarmicExclusiveMonitor::ClearExclusive() {
|
||||||
}
|
}
|
||||||
|
|
||||||
bool DynarmicExclusiveMonitor::ExclusiveWrite8(std::size_t core_index, VAddr vaddr, u8 value) {
|
bool DynarmicExclusiveMonitor::ExclusiveWrite8(std::size_t core_index, VAddr vaddr, u8 value) {
|
||||||
return monitor.DoExclusiveOperation(core_index, vaddr, 1, [&] { memory.Write8(vaddr, value); });
|
return monitor.DoExclusiveOperation<u8>(core_index, vaddr, 1, [&](u8 expected) -> bool {
|
||||||
|
return memory.WriteExclusive8(vaddr, value, expected);
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
bool DynarmicExclusiveMonitor::ExclusiveWrite16(std::size_t core_index, VAddr vaddr, u16 value) {
|
bool DynarmicExclusiveMonitor::ExclusiveWrite16(std::size_t core_index, VAddr vaddr, u16 value) {
|
||||||
return monitor.DoExclusiveOperation(core_index, vaddr, 2,
|
return monitor.DoExclusiveOperation<u16>(core_index, vaddr, 2, [&](u16 expected) -> bool {
|
||||||
[&] { memory.Write16(vaddr, value); });
|
return memory.WriteExclusive16(vaddr, value, expected);
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
bool DynarmicExclusiveMonitor::ExclusiveWrite32(std::size_t core_index, VAddr vaddr, u32 value) {
|
bool DynarmicExclusiveMonitor::ExclusiveWrite32(std::size_t core_index, VAddr vaddr, u32 value) {
|
||||||
return monitor.DoExclusiveOperation(core_index, vaddr, 4,
|
return monitor.DoExclusiveOperation<u32>(core_index, vaddr, 4, [&](u32 expected) -> bool {
|
||||||
[&] { memory.Write32(vaddr, value); });
|
return memory.WriteExclusive32(vaddr, value, expected);
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
bool DynarmicExclusiveMonitor::ExclusiveWrite64(std::size_t core_index, VAddr vaddr, u64 value) {
|
bool DynarmicExclusiveMonitor::ExclusiveWrite64(std::size_t core_index, VAddr vaddr, u64 value) {
|
||||||
return monitor.DoExclusiveOperation(core_index, vaddr, 8,
|
return monitor.DoExclusiveOperation<u64>(core_index, vaddr, 8, [&](u64 expected) -> bool {
|
||||||
[&] { memory.Write64(vaddr, value); });
|
return memory.WriteExclusive64(vaddr, value, expected);
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
bool DynarmicExclusiveMonitor::ExclusiveWrite128(std::size_t core_index, VAddr vaddr, u128 value) {
|
bool DynarmicExclusiveMonitor::ExclusiveWrite128(std::size_t core_index, VAddr vaddr, u128 value) {
|
||||||
return monitor.DoExclusiveOperation(core_index, vaddr, 16, [&] {
|
return monitor.DoExclusiveOperation<u128>(core_index, vaddr, 16, [&](u128 expected) -> bool {
|
||||||
memory.Write64(vaddr + 0, value[0]);
|
return memory.WriteExclusive128(vaddr, value, expected);
|
||||||
memory.Write64(vaddr + 8, value[1]);
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -82,7 +82,11 @@ public:
|
||||||
explicit DynarmicExclusiveMonitor(Memory::Memory& memory, std::size_t core_count);
|
explicit DynarmicExclusiveMonitor(Memory::Memory& memory, std::size_t core_count);
|
||||||
~DynarmicExclusiveMonitor() override;
|
~DynarmicExclusiveMonitor() override;
|
||||||
|
|
||||||
void SetExclusive(std::size_t core_index, VAddr addr) override;
|
void SetExclusive8(std::size_t core_index, VAddr addr) override;
|
||||||
|
void SetExclusive16(std::size_t core_index, VAddr addr) override;
|
||||||
|
void SetExclusive32(std::size_t core_index, VAddr addr) override;
|
||||||
|
void SetExclusive64(std::size_t core_index, VAddr addr) override;
|
||||||
|
void SetExclusive128(std::size_t core_index, VAddr addr) override;
|
||||||
void ClearExclusive() override;
|
void ClearExclusive() override;
|
||||||
|
|
||||||
bool ExclusiveWrite8(std::size_t core_index, VAddr vaddr, u8 value) override;
|
bool ExclusiveWrite8(std::size_t core_index, VAddr vaddr, u8 value) override;
|
||||||
|
|
|
@ -18,7 +18,11 @@ class ExclusiveMonitor {
|
||||||
public:
|
public:
|
||||||
virtual ~ExclusiveMonitor();
|
virtual ~ExclusiveMonitor();
|
||||||
|
|
||||||
virtual void SetExclusive(std::size_t core_index, VAddr addr) = 0;
|
virtual void SetExclusive8(std::size_t core_index, VAddr addr) = 0;
|
||||||
|
virtual void SetExclusive16(std::size_t core_index, VAddr addr) = 0;
|
||||||
|
virtual void SetExclusive32(std::size_t core_index, VAddr addr) = 0;
|
||||||
|
virtual void SetExclusive64(std::size_t core_index, VAddr addr) = 0;
|
||||||
|
virtual void SetExclusive128(std::size_t core_index, VAddr addr) = 0;
|
||||||
virtual void ClearExclusive() = 0;
|
virtual void ClearExclusive() = 0;
|
||||||
|
|
||||||
virtual bool ExclusiveWrite8(std::size_t core_index, VAddr vaddr, u8 value) = 0;
|
virtual bool ExclusiveWrite8(std::size_t core_index, VAddr vaddr, u8 value) = 0;
|
||||||
|
|
|
@ -90,7 +90,7 @@ ResultCode AddressArbiter::IncrementAndSignalToAddressIfEqual(VAddr address, s32
|
||||||
auto& monitor = system.Monitor();
|
auto& monitor = system.Monitor();
|
||||||
u32 current_value;
|
u32 current_value;
|
||||||
do {
|
do {
|
||||||
monitor.SetExclusive(current_core, address);
|
monitor.SetExclusive32(current_core, address);
|
||||||
current_value = memory.Read32(address);
|
current_value = memory.Read32(address);
|
||||||
|
|
||||||
if (current_value != value) {
|
if (current_value != value) {
|
||||||
|
@ -120,7 +120,7 @@ ResultCode AddressArbiter::ModifyByWaitingCountAndSignalToAddressIfEqual(VAddr a
|
||||||
auto& monitor = system.Monitor();
|
auto& monitor = system.Monitor();
|
||||||
s32 updated_value;
|
s32 updated_value;
|
||||||
do {
|
do {
|
||||||
monitor.SetExclusive(current_core, address);
|
monitor.SetExclusive32(current_core, address);
|
||||||
updated_value = memory.Read32(address);
|
updated_value = memory.Read32(address);
|
||||||
|
|
||||||
if (updated_value != value) {
|
if (updated_value != value) {
|
||||||
|
@ -191,7 +191,7 @@ ResultCode AddressArbiter::WaitForAddressIfLessThan(VAddr address, s32 value, s6
|
||||||
const std::size_t current_core = system.CurrentCoreIndex();
|
const std::size_t current_core = system.CurrentCoreIndex();
|
||||||
auto& monitor = system.Monitor();
|
auto& monitor = system.Monitor();
|
||||||
do {
|
do {
|
||||||
monitor.SetExclusive(current_core, address);
|
monitor.SetExclusive32(current_core, address);
|
||||||
current_value = static_cast<s32>(memory.Read32(address));
|
current_value = static_cast<s32>(memory.Read32(address));
|
||||||
if (should_decrement) {
|
if (should_decrement) {
|
||||||
decrement_value = current_value - 1;
|
decrement_value = current_value - 1;
|
||||||
|
|
|
@ -10,6 +10,7 @@
|
||||||
#include "common/logging/log.h"
|
#include "common/logging/log.h"
|
||||||
#include "core/core.h"
|
#include "core/core.h"
|
||||||
#include "core/arm/exclusive_monitor.h"
|
#include "core/arm/exclusive_monitor.h"
|
||||||
|
#include "core/core.h"
|
||||||
#include "core/hle/kernel/errors.h"
|
#include "core/hle/kernel/errors.h"
|
||||||
#include "core/hle/kernel/handle_table.h"
|
#include "core/hle/kernel/handle_table.h"
|
||||||
#include "core/hle/kernel/kernel.h"
|
#include "core/hle/kernel/kernel.h"
|
||||||
|
@ -138,7 +139,7 @@ std::pair<ResultCode, std::shared_ptr<Thread>> Mutex::Unlock(std::shared_ptr<Thr
|
||||||
const std::size_t current_core = system.CurrentCoreIndex();
|
const std::size_t current_core = system.CurrentCoreIndex();
|
||||||
if (new_owner == nullptr) {
|
if (new_owner == nullptr) {
|
||||||
do {
|
do {
|
||||||
monitor.SetExclusive(current_core, address);
|
monitor.SetExclusive32(current_core, address);
|
||||||
} while (!monitor.ExclusiveWrite32(current_core, address, 0));
|
} while (!monitor.ExclusiveWrite32(current_core, address, 0));
|
||||||
return {RESULT_SUCCESS, nullptr};
|
return {RESULT_SUCCESS, nullptr};
|
||||||
}
|
}
|
||||||
|
@ -154,7 +155,7 @@ std::pair<ResultCode, std::shared_ptr<Thread>> Mutex::Unlock(std::shared_ptr<Thr
|
||||||
new_owner->ResumeFromWait();
|
new_owner->ResumeFromWait();
|
||||||
|
|
||||||
do {
|
do {
|
||||||
monitor.SetExclusive(current_core, address);
|
monitor.SetExclusive32(current_core, address);
|
||||||
} while (!monitor.ExclusiveWrite32(current_core, address, mutex_value));
|
} while (!monitor.ExclusiveWrite32(current_core, address, mutex_value));
|
||||||
return {RESULT_SUCCESS, new_owner};
|
return {RESULT_SUCCESS, new_owner};
|
||||||
}
|
}
|
||||||
|
|
|
@ -1641,7 +1641,7 @@ static void SignalProcessWideKey(Core::System& system, VAddr condition_variable_
|
||||||
u32 update_val = 0;
|
u32 update_val = 0;
|
||||||
const VAddr mutex_address = thread->GetMutexWaitAddress();
|
const VAddr mutex_address = thread->GetMutexWaitAddress();
|
||||||
do {
|
do {
|
||||||
monitor.SetExclusive(current_core, mutex_address);
|
monitor.SetExclusive32(current_core, mutex_address);
|
||||||
|
|
||||||
// If the mutex is not yet acquired, acquire it.
|
// If the mutex is not yet acquired, acquire it.
|
||||||
mutex_val = memory.Read32(mutex_address);
|
mutex_val = memory.Read32(mutex_address);
|
||||||
|
|
|
@ -412,12 +412,12 @@ ResultCode Thread::SetActivity(ThreadActivity value) {
|
||||||
}
|
}
|
||||||
|
|
||||||
if (value == ThreadActivity::Paused) {
|
if (value == ThreadActivity::Paused) {
|
||||||
if (pausing_state & static_cast<u32>(ThreadSchedFlags::ThreadPauseFlag) != 0) {
|
if ((pausing_state & static_cast<u32>(ThreadSchedFlags::ThreadPauseFlag)) != 0) {
|
||||||
return ERR_INVALID_STATE;
|
return ERR_INVALID_STATE;
|
||||||
}
|
}
|
||||||
AddSchedulingFlag(ThreadSchedFlags::ThreadPauseFlag);
|
AddSchedulingFlag(ThreadSchedFlags::ThreadPauseFlag);
|
||||||
} else {
|
} else {
|
||||||
if (pausing_state & static_cast<u32>(ThreadSchedFlags::ThreadPauseFlag) == 0) {
|
if ((pausing_state & static_cast<u32>(ThreadSchedFlags::ThreadPauseFlag)) == 0) {
|
||||||
return ERR_INVALID_STATE;
|
return ERR_INVALID_STATE;
|
||||||
}
|
}
|
||||||
RemoveSchedulingFlag(ThreadSchedFlags::ThreadPauseFlag);
|
RemoveSchedulingFlag(ThreadSchedFlags::ThreadPauseFlag);
|
||||||
|
|
|
@ -8,6 +8,7 @@
|
||||||
#include <utility>
|
#include <utility>
|
||||||
|
|
||||||
#include "common/assert.h"
|
#include "common/assert.h"
|
||||||
|
#include "common/atomic_ops.h"
|
||||||
#include "common/common_types.h"
|
#include "common/common_types.h"
|
||||||
#include "common/logging/log.h"
|
#include "common/logging/log.h"
|
||||||
#include "common/page_table.h"
|
#include "common/page_table.h"
|
||||||
|
@ -176,6 +177,22 @@ struct Memory::Impl {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bool WriteExclusive8(const VAddr addr, const u8 data, const u8 expected) {
|
||||||
|
return WriteExclusive<u8>(addr, data, expected);
|
||||||
|
}
|
||||||
|
|
||||||
|
bool WriteExclusive16(const VAddr addr, const u16 data, const u16 expected) {
|
||||||
|
return WriteExclusive<u16_le>(addr, data, expected);
|
||||||
|
}
|
||||||
|
|
||||||
|
bool WriteExclusive32(const VAddr addr, const u32 data, const u32 expected) {
|
||||||
|
return WriteExclusive<u32_le>(addr, data, expected);
|
||||||
|
}
|
||||||
|
|
||||||
|
bool WriteExclusive64(const VAddr addr, const u64 data, const u64 expected) {
|
||||||
|
return WriteExclusive<u64_le>(addr, data, expected);
|
||||||
|
}
|
||||||
|
|
||||||
std::string ReadCString(VAddr vaddr, std::size_t max_length) {
|
std::string ReadCString(VAddr vaddr, std::size_t max_length) {
|
||||||
std::string string;
|
std::string string;
|
||||||
string.reserve(max_length);
|
string.reserve(max_length);
|
||||||
|
@ -679,6 +696,67 @@ struct Memory::Impl {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
template <typename T>
|
||||||
|
bool WriteExclusive(const VAddr vaddr, const T data, const T expected) {
|
||||||
|
u8* page_pointer = current_page_table->pointers[vaddr >> PAGE_BITS];
|
||||||
|
if (page_pointer != nullptr) {
|
||||||
|
// NOTE: Avoid adding any extra logic to this fast-path block
|
||||||
|
T volatile* pointer = reinterpret_cast<T volatile*>(&page_pointer[vaddr]);
|
||||||
|
return Common::AtomicCompareAndSwap(pointer, data, expected);
|
||||||
|
}
|
||||||
|
|
||||||
|
const Common::PageType type = current_page_table->attributes[vaddr >> PAGE_BITS];
|
||||||
|
switch (type) {
|
||||||
|
case Common::PageType::Unmapped:
|
||||||
|
LOG_ERROR(HW_Memory, "Unmapped Write{} 0x{:08X} @ 0x{:016X}", sizeof(data) * 8,
|
||||||
|
static_cast<u32>(data), vaddr);
|
||||||
|
return true;
|
||||||
|
case Common::PageType::Memory:
|
||||||
|
ASSERT_MSG(false, "Mapped memory page without a pointer @ {:016X}", vaddr);
|
||||||
|
break;
|
||||||
|
case Common::PageType::RasterizerCachedMemory: {
|
||||||
|
u8* host_ptr{GetPointerFromVMA(vaddr)};
|
||||||
|
system.GPU().InvalidateRegion(ToCacheAddr(host_ptr), sizeof(T));
|
||||||
|
T volatile* pointer = reinterpret_cast<T volatile*>(&host_ptr);
|
||||||
|
return Common::AtomicCompareAndSwap(pointer, data, expected);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
UNREACHABLE();
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool WriteExclusive128(const VAddr vaddr, const u128 data, const u128 expected) {
|
||||||
|
u8* const page_pointer = current_page_table->pointers[vaddr >> PAGE_BITS];
|
||||||
|
if (page_pointer != nullptr) {
|
||||||
|
// NOTE: Avoid adding any extra logic to this fast-path block
|
||||||
|
u64 volatile* pointer = reinterpret_cast<u64 volatile*>(&page_pointer[vaddr]);
|
||||||
|
return Common::AtomicCompareAndSwap(pointer, data, expected);
|
||||||
|
}
|
||||||
|
|
||||||
|
const Common::PageType type = current_page_table->attributes[vaddr >> PAGE_BITS];
|
||||||
|
switch (type) {
|
||||||
|
case Common::PageType::Unmapped:
|
||||||
|
LOG_ERROR(HW_Memory, "Unmapped Write{} 0x{:08X} @ 0x{:016X}{:016X}", sizeof(data) * 8,
|
||||||
|
static_cast<u64>(data[1]), static_cast<u64>(data[0]), vaddr);
|
||||||
|
return true;
|
||||||
|
case Common::PageType::Memory:
|
||||||
|
ASSERT_MSG(false, "Mapped memory page without a pointer @ {:016X}", vaddr);
|
||||||
|
break;
|
||||||
|
case Common::PageType::RasterizerCachedMemory: {
|
||||||
|
u8* host_ptr{GetPointerFromVMA(vaddr)};
|
||||||
|
system.GPU().InvalidateRegion(ToCacheAddr(host_ptr), sizeof(u128));
|
||||||
|
u64 volatile* pointer = reinterpret_cast<u64 volatile*>(&host_ptr);
|
||||||
|
return Common::AtomicCompareAndSwap(pointer, data, expected);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
UNREACHABLE();
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
Common::PageTable* current_page_table = nullptr;
|
Common::PageTable* current_page_table = nullptr;
|
||||||
Core::System& system;
|
Core::System& system;
|
||||||
};
|
};
|
||||||
|
@ -761,6 +839,26 @@ void Memory::Write64(VAddr addr, u64 data) {
|
||||||
impl->Write64(addr, data);
|
impl->Write64(addr, data);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bool Memory::WriteExclusive8(VAddr addr, u8 data, u8 expected) {
|
||||||
|
return impl->WriteExclusive8(addr, data, expected);
|
||||||
|
}
|
||||||
|
|
||||||
|
bool Memory::WriteExclusive16(VAddr addr, u16 data, u16 expected) {
|
||||||
|
return impl->WriteExclusive16(addr, data, expected);
|
||||||
|
}
|
||||||
|
|
||||||
|
bool Memory::WriteExclusive32(VAddr addr, u32 data, u32 expected) {
|
||||||
|
return impl->WriteExclusive32(addr, data, expected);
|
||||||
|
}
|
||||||
|
|
||||||
|
bool Memory::WriteExclusive64(VAddr addr, u64 data, u64 expected) {
|
||||||
|
return impl->WriteExclusive64(addr, data, expected);
|
||||||
|
}
|
||||||
|
|
||||||
|
bool Memory::WriteExclusive128(VAddr addr, u128 data, u128 expected) {
|
||||||
|
return impl->WriteExclusive128(addr, data, expected);
|
||||||
|
}
|
||||||
|
|
||||||
std::string Memory::ReadCString(VAddr vaddr, std::size_t max_length) {
|
std::string Memory::ReadCString(VAddr vaddr, std::size_t max_length) {
|
||||||
return impl->ReadCString(vaddr, max_length);
|
return impl->ReadCString(vaddr, max_length);
|
||||||
}
|
}
|
||||||
|
|
|
@ -244,6 +244,71 @@ public:
|
||||||
*/
|
*/
|
||||||
void Write64(VAddr addr, u64 data);
|
void Write64(VAddr addr, u64 data);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Writes a 8-bit unsigned integer to the given virtual address in
|
||||||
|
* the current process' address space if and only if the address contains
|
||||||
|
* the expected value. This operation is atomic.
|
||||||
|
*
|
||||||
|
* @param addr The virtual address to write the 8-bit unsigned integer to.
|
||||||
|
* @param data The 8-bit unsigned integer to write to the given virtual address.
|
||||||
|
* @param expected The 8-bit unsigned integer to check against the given virtual address.
|
||||||
|
*
|
||||||
|
* @post The memory range [addr, sizeof(data)) contains the given data value.
|
||||||
|
*/
|
||||||
|
bool WriteExclusive8(VAddr addr, u8 data, u8 expected);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Writes a 16-bit unsigned integer to the given virtual address in
|
||||||
|
* the current process' address space if and only if the address contains
|
||||||
|
* the expected value. This operation is atomic.
|
||||||
|
*
|
||||||
|
* @param addr The virtual address to write the 16-bit unsigned integer to.
|
||||||
|
* @param data The 16-bit unsigned integer to write to the given virtual address.
|
||||||
|
* @param expected The 16-bit unsigned integer to check against the given virtual address.
|
||||||
|
*
|
||||||
|
* @post The memory range [addr, sizeof(data)) contains the given data value.
|
||||||
|
*/
|
||||||
|
bool WriteExclusive16(VAddr addr, u16 data, u16 expected);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Writes a 32-bit unsigned integer to the given virtual address in
|
||||||
|
* the current process' address space if and only if the address contains
|
||||||
|
* the expected value. This operation is atomic.
|
||||||
|
*
|
||||||
|
* @param addr The virtual address to write the 32-bit unsigned integer to.
|
||||||
|
* @param data The 32-bit unsigned integer to write to the given virtual address.
|
||||||
|
* @param expected The 32-bit unsigned integer to check against the given virtual address.
|
||||||
|
*
|
||||||
|
* @post The memory range [addr, sizeof(data)) contains the given data value.
|
||||||
|
*/
|
||||||
|
bool WriteExclusive32(VAddr addr, u32 data, u32 expected);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Writes a 64-bit unsigned integer to the given virtual address in
|
||||||
|
* the current process' address space if and only if the address contains
|
||||||
|
* the expected value. This operation is atomic.
|
||||||
|
*
|
||||||
|
* @param addr The virtual address to write the 64-bit unsigned integer to.
|
||||||
|
* @param data The 64-bit unsigned integer to write to the given virtual address.
|
||||||
|
* @param expected The 64-bit unsigned integer to check against the given virtual address.
|
||||||
|
*
|
||||||
|
* @post The memory range [addr, sizeof(data)) contains the given data value.
|
||||||
|
*/
|
||||||
|
bool WriteExclusive64(VAddr addr, u64 data, u64 expected);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Writes a 128-bit unsigned integer to the given virtual address in
|
||||||
|
* the current process' address space if and only if the address contains
|
||||||
|
* the expected value. This operation is atomic.
|
||||||
|
*
|
||||||
|
* @param addr The virtual address to write the 128-bit unsigned integer to.
|
||||||
|
* @param data The 128-bit unsigned integer to write to the given virtual address.
|
||||||
|
* @param expected The 128-bit unsigned integer to check against the given virtual address.
|
||||||
|
*
|
||||||
|
* @post The memory range [addr, sizeof(data)) contains the given data value.
|
||||||
|
*/
|
||||||
|
bool WriteExclusive128(VAddr addr, u128 data, u128 expected);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Reads a null-terminated string from the given virtual address.
|
* Reads a null-terminated string from the given virtual address.
|
||||||
* This function will continually read characters until either:
|
* This function will continually read characters until either:
|
||||||
|
|
Loading…
Reference in a new issue