a32_interface: Clear cache invalidation flag prior to performing cache invalidation

This commit is contained in:
Merry 2022-11-12 13:07:17 +00:00
parent 9d6758b4ae
commit 068519b2cd
2 changed files with 40 additions and 16 deletions

View file

@ -83,7 +83,7 @@ struct Jit::Impl final {
HaltReason Run() { HaltReason Run() {
ASSERT(!jit_interface->is_executing); ASSERT(!jit_interface->is_executing);
PerformRequestedCacheInvalidation(); PerformRequestedCacheInvalidation(static_cast<HaltReason>(Atomic::Load(&halt_reason)));
jit_interface->is_executing = true; jit_interface->is_executing = true;
SCOPE_EXIT { SCOPE_EXIT {
@ -92,14 +92,14 @@ struct Jit::Impl final {
HaltReason hr = core.Run(current_address_space, current_state, &halt_reason); HaltReason hr = core.Run(current_address_space, current_state, &halt_reason);
PerformRequestedCacheInvalidation(); PerformRequestedCacheInvalidation(hr);
return hr; return hr;
} }
HaltReason Step() { HaltReason Step() {
ASSERT(!jit_interface->is_executing); ASSERT(!jit_interface->is_executing);
PerformRequestedCacheInvalidation(); PerformRequestedCacheInvalidation(static_cast<HaltReason>(Atomic::Load(&halt_reason)));
jit_interface->is_executing = true; jit_interface->is_executing = true;
SCOPE_EXIT { SCOPE_EXIT {
@ -108,7 +108,7 @@ struct Jit::Impl final {
HaltReason hr = core.Step(current_address_space, current_state, &halt_reason); HaltReason hr = core.Step(current_address_space, current_state, &halt_reason);
PerformRequestedCacheInvalidation(); PerformRequestedCacheInvalidation(hr);
return hr; return hr;
} }
@ -131,10 +131,12 @@ struct Jit::Impl final {
void HaltExecution(HaltReason hr) { void HaltExecution(HaltReason hr) {
Atomic::Or(&halt_reason, static_cast<u32>(hr)); Atomic::Or(&halt_reason, static_cast<u32>(hr));
Atomic::Barrier();
} }
void ClearHalt(HaltReason hr) { void ClearHalt(HaltReason hr) {
Atomic::And(&halt_reason, ~static_cast<u32>(hr)); Atomic::And(&halt_reason, ~static_cast<u32>(hr));
Atomic::Barrier();
} }
std::array<std::uint32_t, 16>& Regs() { std::array<std::uint32_t, 16>& Regs() {
@ -192,21 +194,27 @@ struct Jit::Impl final {
} }
private: private:
void PerformRequestedCacheInvalidation() { void PerformRequestedCacheInvalidation(HaltReason hr) {
if (invalidate_entire_cache) { if (Has(hr, HaltReason::CacheInvalidation)) {
current_address_space.ClearCache(); std::unique_lock lock{invalidation_mutex};
invalidate_entire_cache = false; ClearHalt(HaltReason::CacheInvalidation);
invalid_cache_ranges.clear();
return;
}
if (!invalid_cache_ranges.empty()) { if (invalidate_entire_cache) {
// TODO: Optimize current_address_space.ClearCache();
current_address_space.ClearCache();
invalid_cache_ranges.clear(); invalidate_entire_cache = false;
return; invalid_cache_ranges.clear();
return;
}
if (!invalid_cache_ranges.empty()) {
// TODO: Optimize
current_address_space.ClearCache();
invalid_cache_ranges.clear();
return;
}
} }
} }

View file

@ -9,6 +9,14 @@
namespace Dynarmic::Atomic { namespace Dynarmic::Atomic {
inline u32 Load(volatile u32* ptr) {
#ifdef _MSC_VER
return _InterlockedOr(reinterpret_cast<volatile long*>(ptr), 0);
#else
return __atomic_load_n(ptr, __ATOMIC_SEQ_CST);
#endif
}
inline void Or(volatile u32* ptr, u32 value) { inline void Or(volatile u32* ptr, u32 value) {
#ifdef _MSC_VER #ifdef _MSC_VER
_InterlockedOr(reinterpret_cast<volatile long*>(ptr), value); _InterlockedOr(reinterpret_cast<volatile long*>(ptr), value);
@ -25,4 +33,12 @@ inline void And(volatile u32* ptr, u32 value) {
#endif #endif
} }
inline void Barrier() {
#ifdef _MSC_VER
_ReadWriteBarrier();
#else
__atomic_thread_fence(__ATOMIC_SEQ_CST);
#endif
}
} // namespace Dynarmic::Atomic } // namespace Dynarmic::Atomic