emit_x64: Use boost::icl::interval_map to speed up ranged invalidation

This commit is contained in:
MerryMage 2017-12-05 21:34:40 +00:00
parent 6fde29f5d8
commit cb119c2f72
4 changed files with 29 additions and 22 deletions

View file

@ -117,6 +117,8 @@ EmitX64::BlockDescriptor EmitX64::Emit(IR::Block& block) {
EmitX64::BlockDescriptor block_desc{entrypoint, size, block.Location(), block.EndLocation().PC()}; EmitX64::BlockDescriptor block_desc{entrypoint, size, block.Location(), block.EndLocation().PC()};
block_descriptors.emplace(descriptor.UniqueHash(), block_desc); block_descriptors.emplace(descriptor.UniqueHash(), block_desc);
block_ranges.add(std::make_pair(boost::icl::discrete_interval<u32>::closed(block.Location().PC(), block.EndLocation().PC() - 1), std::set<IR::LocationDescriptor>{descriptor}));
return block_desc; return block_desc;
} }
@ -3541,21 +3543,19 @@ void EmitX64::ClearCache() {
patch_information.clear(); patch_information.clear();
} }
void EmitX64::InvalidateCacheRange(const Common::AddressRange& range) { void EmitX64::InvalidateCacheRanges(const boost::icl::interval_set<u32>& ranges) {
// Remove cached block descriptors and patch information overlapping with the given range. // Remove cached block descriptors and patch information overlapping with the given range.
for (auto it = block_descriptors.begin(); it != block_descriptors.end();) { for (auto invalidate_interval : ranges) {
IR::LocationDescriptor descriptor = it->second.start_location; auto pair = block_ranges.equal_range(invalidate_interval);
u32 start = descriptor.PC(); for (auto it = pair.first; it != pair.second; ++it) {
u32 end = it->second.end_location_pc; for (const auto& descriptor : it->second) {
if (range.Overlaps(start, end)) { if (patch_information.count(descriptor.UniqueHash())) {
it = block_descriptors.erase(it); Unpatch(descriptor);
}
if (patch_information.count(descriptor.UniqueHash())) { block_descriptors.erase(descriptor.UniqueHash());
Unpatch(descriptor);
} }
} else {
++it;
} }
block_ranges.erase(pair.first, pair.second);
} }
} }

View file

@ -9,6 +9,8 @@
#include <unordered_map> #include <unordered_map>
#include <vector> #include <vector>
#include <boost/icl/interval_map.hpp>
#include <boost/icl/interval_set.hpp>
#include <boost/optional.hpp> #include <boost/optional.hpp>
#include <xbyak_util.h> #include <xbyak_util.h>
@ -57,10 +59,10 @@ public:
void ClearCache(); void ClearCache();
/** /**
* Invalidate the cache at a range of addresses. * Invalidate the cache at a set of ranges of addresses.
* @param range The range of addresses to invalidate the cache at. * @param ranges The set of ranges of addresses to invalidate the cache at.
*/ */
void InvalidateCacheRange(const Common::AddressRange& range); void InvalidateCacheRanges(const boost::icl::interval_set<u32>& ranges);
private: private:
// Microinstruction emitters // Microinstruction emitters
@ -97,6 +99,7 @@ private:
// State // State
BlockOfCode* code; BlockOfCode* code;
UserCallbacks cb; UserCallbacks cb;
boost::icl::interval_map<u32, std::set<IR::LocationDescriptor>> block_ranges;
Jit* jit_interface; Jit* jit_interface;
std::unordered_map<u64, BlockDescriptor> block_descriptors; std::unordered_map<u64, BlockDescriptor> block_descriptors;
std::unordered_map<u64, PatchInformation> patch_information; std::unordered_map<u64, PatchInformation> patch_information;

View file

@ -4,9 +4,9 @@
* General Public License version 2 or any later version. * General Public License version 2 or any later version.
*/ */
#include <deque>
#include <memory> #include <memory>
#include <boost/icl/interval_set.hpp>
#include <fmt/format.h> #include <fmt/format.h>
#ifdef DYNARMIC_USE_LLVM #ifdef DYNARMIC_USE_LLVM
@ -45,7 +45,7 @@ struct Jit::Impl {
const UserCallbacks callbacks; const UserCallbacks callbacks;
// Requests made during execution to invalidate the cache are queued up here. // Requests made during execution to invalidate the cache are queued up here.
std::deque<Common::AddressRange> invalid_cache_ranges; boost::icl::interval_set<u32> invalid_cache_ranges;
bool invalidate_entire_cache = false; bool invalidate_entire_cache = false;
void Execute(size_t cycle_count) { void Execute(size_t cycle_count) {
@ -106,10 +106,8 @@ struct Jit::Impl {
} }
jit_state.ResetRSB(); jit_state.ResetRSB();
while (!invalid_cache_ranges.empty()) { emitter.InvalidateCacheRanges(invalid_cache_ranges);
emitter.InvalidateCacheRange(invalid_cache_ranges.front()); invalid_cache_ranges.clear();
invalid_cache_ranges.pop_front();
}
} }
void RequestCacheInvalidation() { void RequestCacheInvalidation() {
@ -179,7 +177,7 @@ void Jit::ClearCache() {
} }
void Jit::InvalidateCacheRange(std::uint32_t start_address, std::size_t length) { void Jit::InvalidateCacheRange(std::uint32_t start_address, std::size_t length) {
impl->invalid_cache_ranges.emplace_back(Common::AddressRange{start_address, length}); impl->invalid_cache_ranges.add(boost::icl::discrete_interval<u32>::closed(start_address, start_address + length - 1));
impl->RequestCacheInvalidation(); impl->RequestCacheInvalidation();
} }

View file

@ -101,6 +101,12 @@ std::ostream& operator<<(std::ostream& o, const LocationDescriptor& descriptor);
namespace std { namespace std {
template <> template <>
struct less<Dynarmic::IR::LocationDescriptor> {
bool operator()(const Dynarmic::IR::LocationDescriptor& x, const Dynarmic::IR::LocationDescriptor& y) const {
return x.UniqueHash() < y.UniqueHash();
}
};
template <>
struct hash<Dynarmic::IR::LocationDescriptor> { struct hash<Dynarmic::IR::LocationDescriptor> {
size_t operator()(const Dynarmic::IR::LocationDescriptor& x) const { size_t operator()(const Dynarmic::IR::LocationDescriptor& x) const {
return std::hash<u64>()(x.UniqueHash()); return std::hash<u64>()(x.UniqueHash());