diff --git a/src/backend_x64/emit_x64.cpp b/src/backend_x64/emit_x64.cpp index 07ea1e5b..d40876e0 100644 --- a/src/backend_x64/emit_x64.cpp +++ b/src/backend_x64/emit_x64.cpp @@ -117,6 +117,8 @@ EmitX64::BlockDescriptor EmitX64::Emit(IR::Block& block) { EmitX64::BlockDescriptor block_desc{entrypoint, size, block.Location(), block.EndLocation().PC()}; block_descriptors.emplace(descriptor.UniqueHash(), block_desc); + block_ranges.add(std::make_pair(boost::icl::discrete_interval::closed(block.Location().PC(), block.EndLocation().PC() - 1), std::set{descriptor})); + return block_desc; } @@ -3541,21 +3543,19 @@ void EmitX64::ClearCache() { patch_information.clear(); } -void EmitX64::InvalidateCacheRange(const Common::AddressRange& range) { +void EmitX64::InvalidateCacheRanges(const boost::icl::interval_set& ranges) { // Remove cached block descriptors and patch information overlapping with the given range. - for (auto it = block_descriptors.begin(); it != block_descriptors.end();) { - IR::LocationDescriptor descriptor = it->second.start_location; - u32 start = descriptor.PC(); - u32 end = it->second.end_location_pc; - if (range.Overlaps(start, end)) { - it = block_descriptors.erase(it); - - if (patch_information.count(descriptor.UniqueHash())) { - Unpatch(descriptor); + for (auto invalidate_interval : ranges) { + auto pair = block_ranges.equal_range(invalidate_interval); + for (auto it = pair.first; it != pair.second; ++it) { + for (const auto& descriptor : it->second) { + if (patch_information.count(descriptor.UniqueHash())) { + Unpatch(descriptor); + } + block_descriptors.erase(descriptor.UniqueHash()); } - } else { - ++it; } + block_ranges.erase(pair.first, pair.second); } } diff --git a/src/backend_x64/emit_x64.h b/src/backend_x64/emit_x64.h index 91cfde38..1c847aab 100644 --- a/src/backend_x64/emit_x64.h +++ b/src/backend_x64/emit_x64.h @@ -9,6 +9,8 @@ #include #include +#include +#include #include #include @@ -57,10 +59,10 @@ public: void ClearCache(); /** - * Invalidate the cache at a range of addresses. - * @param range The range of addresses to invalidate the cache at. + * Invalidate the cache at a set of ranges of addresses. + * @param ranges The set of ranges of addresses to invalidate the cache at. */ - void InvalidateCacheRange(const Common::AddressRange& range); + void InvalidateCacheRanges(const boost::icl::interval_set& ranges); private: // Microinstruction emitters @@ -97,6 +99,7 @@ private: // State BlockOfCode* code; UserCallbacks cb; + boost::icl::interval_map> block_ranges; Jit* jit_interface; std::unordered_map block_descriptors; std::unordered_map patch_information; diff --git a/src/backend_x64/interface_x64.cpp b/src/backend_x64/interface_x64.cpp index 5de944b7..39585f0b 100644 --- a/src/backend_x64/interface_x64.cpp +++ b/src/backend_x64/interface_x64.cpp @@ -4,9 +4,9 @@ * General Public License version 2 or any later version. */ -#include #include +#include #include #ifdef DYNARMIC_USE_LLVM @@ -45,7 +45,7 @@ struct Jit::Impl { const UserCallbacks callbacks; // Requests made during execution to invalidate the cache are queued up here. - std::deque invalid_cache_ranges; + boost::icl::interval_set invalid_cache_ranges; bool invalidate_entire_cache = false; void Execute(size_t cycle_count) { @@ -106,10 +106,8 @@ struct Jit::Impl { } jit_state.ResetRSB(); - while (!invalid_cache_ranges.empty()) { - emitter.InvalidateCacheRange(invalid_cache_ranges.front()); - invalid_cache_ranges.pop_front(); - } + emitter.InvalidateCacheRanges(invalid_cache_ranges); + invalid_cache_ranges.clear(); } void RequestCacheInvalidation() { @@ -179,7 +177,7 @@ void Jit::ClearCache() { } void Jit::InvalidateCacheRange(std::uint32_t start_address, std::size_t length) { - impl->invalid_cache_ranges.emplace_back(Common::AddressRange{start_address, length}); + impl->invalid_cache_ranges.add(boost::icl::discrete_interval::closed(start_address, start_address + length - 1)); impl->RequestCacheInvalidation(); } diff --git a/src/frontend/ir/location_descriptor.h b/src/frontend/ir/location_descriptor.h index 3089bf5c..5eaf913f 100644 --- a/src/frontend/ir/location_descriptor.h +++ b/src/frontend/ir/location_descriptor.h @@ -101,6 +101,12 @@ std::ostream& operator<<(std::ostream& o, const LocationDescriptor& descriptor); namespace std { template <> +struct less { + bool operator()(const Dynarmic::IR::LocationDescriptor& x, const Dynarmic::IR::LocationDescriptor& y) const { + return x.UniqueHash() < y.UniqueHash(); + } +}; +template <> struct hash { size_t operator()(const Dynarmic::IR::LocationDescriptor& x) const { return std::hash()(x.UniqueHash());