backend/arm64: Add InvalidateCacheRanges

This commit is contained in:
Merry 2023-02-11 11:01:56 +00:00
parent 165621a872
commit 082167feeb
13 changed files with 69 additions and 28 deletions

View file

@ -1,6 +1,8 @@
include(TargetArchitectureSpecificSources)
add_library(dynarmic
backend/block_range_information.cpp
backend/block_range_information.h
backend/exception_handler.h
common/always_false.h
common/cast_util.h
@ -271,8 +273,6 @@ if ("x86_64" IN_LIST ARCHITECTURE)
backend/x64/abi.h
backend/x64/block_of_code.cpp
backend/x64/block_of_code.h
backend/x64/block_range_information.cpp
backend/x64/block_range_information.h
backend/x64/callback.cpp
backend/x64/callback.h
backend/x64/constant_pool.cpp

View file

@ -181,6 +181,10 @@ IR::Block A32AddressSpace::GenerateIR(IR::LocationDescriptor descriptor) const {
return ir_block;
}
void A32AddressSpace::InvalidateCacheRanges(const boost::icl::interval_set<u32>& ranges) {
InvalidateBasicBlocks(block_ranges.InvalidateRanges(ranges));
}
void A32AddressSpace::EmitPrelude() {
using namespace oaknut::util;
@ -392,4 +396,11 @@ EmitConfig A32AddressSpace::GetEmitConfig() {
};
}
void A32AddressSpace::RegisterNewBasicBlock(const IR::Block& block, const EmittedBlockInfo&) {
const A32::LocationDescriptor descriptor{block.Location()};
const A32::LocationDescriptor end_location{block.EndLocation()};
const auto range = boost::icl::discrete_interval<u32>::closed(descriptor.PC(), end_location.PC() - 1);
block_ranges.AddRange(range, descriptor);
}
} // namespace Dynarmic::Backend::Arm64

View file

@ -6,23 +6,30 @@
#pragma once
#include "dynarmic/backend/arm64/address_space.h"
#include "dynarmic/backend/block_range_information.h"
#include "dynarmic/interface/A32/config.h"
namespace Dynarmic::Backend::Arm64 {
struct EmittedBlockInfo;
class A32AddressSpace final : public AddressSpace {
public:
explicit A32AddressSpace(const A32::UserConfig& conf);
IR::Block GenerateIR(IR::LocationDescriptor) const override;
void InvalidateCacheRanges(const boost::icl::interval_set<u32>& ranges);
protected:
friend class A32Core;
void EmitPrelude();
EmitConfig GetEmitConfig() override;
void RegisterNewBasicBlock(const IR::Block& block, const EmittedBlockInfo& block_info) override;
const A32::UserConfig conf;
BlockRangeInformation<u32> block_ranges;
};
} // namespace Dynarmic::Backend::Arm64

View file

@ -209,8 +209,7 @@ private:
}
if (!invalid_cache_ranges.empty()) {
// TODO: Optimize
current_address_space.ClearCache();
current_address_space.InvalidateCacheRanges(invalid_cache_ranges);
invalid_cache_ranges.clear();
return;

View file

@ -350,6 +350,10 @@ IR::Block A64AddressSpace::GenerateIR(IR::LocationDescriptor descriptor) const {
return ir_block;
}
void A64AddressSpace::InvalidateCacheRanges(const boost::icl::interval_set<u64>& ranges) {
InvalidateBasicBlocks(block_ranges.InvalidateRanges(ranges));
}
void A64AddressSpace::EmitPrelude() {
using namespace oaknut::util;
@ -568,4 +572,11 @@ EmitConfig A64AddressSpace::GetEmitConfig() {
};
}
void A64AddressSpace::RegisterNewBasicBlock(const IR::Block& block, const EmittedBlockInfo&) {
const A64::LocationDescriptor descriptor{block.Location()};
const A64::LocationDescriptor end_location{block.EndLocation()};
const auto range = boost::icl::discrete_interval<u64>::closed(descriptor.PC(), end_location.PC() - 1);
block_ranges.AddRange(range, descriptor);
}
} // namespace Dynarmic::Backend::Arm64

View file

@ -6,23 +6,30 @@
#pragma once
#include "dynarmic/backend/arm64/address_space.h"
#include "dynarmic/backend/block_range_information.h"
#include "dynarmic/interface/A64/config.h"
namespace Dynarmic::Backend::Arm64 {
struct EmittedBlockInfo;
class A64AddressSpace final : public AddressSpace {
public:
explicit A64AddressSpace(const A64::UserConfig& conf);
IR::Block GenerateIR(IR::LocationDescriptor) const override;
void InvalidateCacheRanges(const boost::icl::interval_set<u64>& ranges);
protected:
friend class A64Core;
void EmitPrelude();
EmitConfig GetEmitConfig() override;
void RegisterNewBasicBlock(const IR::Block& block, const EmittedBlockInfo& block_info) override;
const A64::UserConfig conf;
BlockRangeInformation<u64> block_ranges;
};
} // namespace Dynarmic::Backend::Arm64

View file

@ -30,7 +30,7 @@ struct Jit::Impl final {
HaltReason Run() {
ASSERT(!is_executing);
PerformRequestedCacheInvalidation();
PerformRequestedCacheInvalidation(static_cast<HaltReason>(Atomic::Load(&halt_reason)));
is_executing = true;
SCOPE_EXIT {
@ -39,14 +39,14 @@ struct Jit::Impl final {
HaltReason hr = core.Run(current_address_space, current_state, &halt_reason);
PerformRequestedCacheInvalidation();
PerformRequestedCacheInvalidation(hr);
return hr;
}
HaltReason Step() {
ASSERT(!is_executing);
PerformRequestedCacheInvalidation();
PerformRequestedCacheInvalidation(static_cast<HaltReason>(Atomic::Load(&halt_reason)));
is_executing = true;
SCOPE_EXIT {
@ -55,7 +55,7 @@ struct Jit::Impl final {
HaltReason hr = core.Step(current_address_space, current_state, &halt_reason);
PerformRequestedCacheInvalidation();
PerformRequestedCacheInvalidation(hr);
return hr;
}
@ -157,7 +157,10 @@ struct Jit::Impl final {
}
private:
void PerformRequestedCacheInvalidation() {
void PerformRequestedCacheInvalidation(HaltReason hr) {
if (Has(hr, HaltReason::CacheInvalidation)) {
std::unique_lock lock{invalidation_mutex};
ClearHalt(HaltReason::CacheInvalidation);
if (invalidate_entire_cache) {
@ -169,13 +172,13 @@ private:
}
if (!invalid_cache_ranges.empty()) {
// TODO: Optimize
current_address_space.ClearCache();
current_address_space.InvalidateCacheRanges(invalid_cache_ranges);
invalid_cache_ranges.clear();
return;
}
}
}
A64::UserConfig conf;
A64JitState current_state{};

View file

@ -115,6 +115,8 @@ EmittedBlockInfo AddressSpace::Emit(IR::Block block) {
mem.invalidate(reinterpret_cast<u32*>(block_info.entry_point), block_info.size);
mem.protect();
RegisterNewBasicBlock(block, block_info);
return block_info;
}

View file

@ -45,6 +45,7 @@ public:
protected:
virtual EmitConfig GetEmitConfig() = 0;
virtual void RegisterNewBasicBlock(const IR::Block& block, const EmittedBlockInfo& block_info) = 0;
size_t GetRemainingSize();
EmittedBlockInfo Emit(IR::Block ir_block);

View file

@ -3,14 +3,14 @@
* SPDX-License-Identifier: 0BSD
*/
#include "dynarmic/backend/x64/block_range_information.h"
#include "dynarmic/backend/block_range_information.h"
#include <boost/icl/interval_map.hpp>
#include <boost/icl/interval_set.hpp>
#include <mcl/stdint.hpp>
#include <tsl/robin_set.h>
namespace Dynarmic::Backend::X64 {
namespace Dynarmic::Backend {
template<typename ProgramCounterType>
void BlockRangeInformation<ProgramCounterType>::AddRange(boost::icl::discrete_interval<ProgramCounterType> range, IR::LocationDescriptor location) {
@ -40,4 +40,4 @@ tsl::robin_set<IR::LocationDescriptor> BlockRangeInformation<ProgramCounterType>
template class BlockRangeInformation<u32>;
template class BlockRangeInformation<u64>;
} // namespace Dynarmic::Backend::X64
} // namespace Dynarmic::Backend

View file

@ -13,7 +13,7 @@
#include "dynarmic/ir/location_descriptor.h"
namespace Dynarmic::Backend::X64 {
namespace Dynarmic::Backend {
template<typename ProgramCounterType>
class BlockRangeInformation {
@ -26,4 +26,4 @@ private:
boost::icl::interval_map<ProgramCounterType, std::set<IR::LocationDescriptor>> block_ranges;
};
} // namespace Dynarmic::Backend::X64
} // namespace Dynarmic::Backend

View file

@ -12,8 +12,8 @@
#include <tsl/robin_map.h>
#include "dynarmic/backend/block_range_information.h"
#include "dynarmic/backend/x64/a32_jitstate.h"
#include "dynarmic/backend/x64/block_range_information.h"
#include "dynarmic/backend/x64/emit_x64.h"
#include "dynarmic/frontend/A32/a32_location_descriptor.h"
#include "dynarmic/interface/A32/a32.h"

View file

@ -10,8 +10,8 @@
#include <optional>
#include <tuple>
#include "dynarmic/backend/block_range_information.h"
#include "dynarmic/backend/x64/a64_jitstate.h"
#include "dynarmic/backend/x64/block_range_information.h"
#include "dynarmic/backend/x64/emit_x64.h"
#include "dynarmic/frontend/A64/a64_location_descriptor.h"
#include "dynarmic/interface/A64/a64.h"