From 64adc91ca243a61e418eccf1bb70f9d86a278d32 Mon Sep 17 00:00:00 2001 From: merry Date: Sat, 26 Mar 2022 16:49:14 +0000 Subject: [PATCH] emit_x64_memory: Move EmitFastmemVAddr to common file --- .../backend/x64/a64_emit_x64_memory.cpp | 39 --------------- src/dynarmic/backend/x64/emit_x64_memory.h | 48 +++++++++++++++++++ 2 files changed, 48 insertions(+), 39 deletions(-) diff --git a/src/dynarmic/backend/x64/a64_emit_x64_memory.cpp b/src/dynarmic/backend/x64/a64_emit_x64_memory.cpp index a6700d6d..13ae3f68 100644 --- a/src/dynarmic/backend/x64/a64_emit_x64_memory.cpp +++ b/src/dynarmic/backend/x64/a64_emit_x64_memory.cpp @@ -302,45 +302,6 @@ FakeCall A64EmitX64::FastmemCallback(u64 rip_) { namespace { -Xbyak::RegExp EmitFastmemVAddr(BlockOfCode& code, A64EmitContext& ctx, Xbyak::Label& abort, Xbyak::Reg64 vaddr, bool& require_abort_handling, std::optional tmp = std::nullopt) { - const size_t unused_top_bits = 64 - ctx.conf.fastmem_address_space_bits; - - if (unused_top_bits == 0) { - return r13 + vaddr; - } else if (ctx.conf.silently_mirror_fastmem) { - if (!tmp) { - tmp = ctx.reg_alloc.ScratchGpr(); - } - if (unused_top_bits < 32) { - code.mov(*tmp, vaddr); - code.shl(*tmp, int(unused_top_bits)); - code.shr(*tmp, int(unused_top_bits)); - } else if (unused_top_bits == 32) { - code.mov(tmp->cvt32(), vaddr.cvt32()); - } else { - code.mov(tmp->cvt32(), vaddr.cvt32()); - code.and_(*tmp, u32((1 << ctx.conf.fastmem_address_space_bits) - 1)); - } - return r13 + *tmp; - } else { - if (ctx.conf.fastmem_address_space_bits < 32) { - code.test(vaddr, u32(-(1 << ctx.conf.fastmem_address_space_bits))); - code.jnz(abort, code.T_NEAR); - require_abort_handling = true; - } else { - // TODO: Consider having TEST as above but coalesce 64-bit constant in register allocator - if (!tmp) { - tmp = ctx.reg_alloc.ScratchGpr(); - } - code.mov(*tmp, vaddr); - code.shr(*tmp, int(ctx.conf.fastmem_address_space_bits)); - code.jnz(abort, code.T_NEAR); - require_abort_handling = true; - } - return r13 + vaddr; - } -} - template void EmitReadMemoryMov(BlockOfCode& code, int value_idx, const Xbyak::RegExp& addr) { switch (bitsize) { diff --git a/src/dynarmic/backend/x64/emit_x64_memory.h b/src/dynarmic/backend/x64/emit_x64_memory.h index 9cdd13b7..b471e6a4 100644 --- a/src/dynarmic/backend/x64/emit_x64_memory.h +++ b/src/dynarmic/backend/x64/emit_x64_memory.h @@ -149,6 +149,54 @@ template<> return page + tmp; } +template +Xbyak::RegExp EmitFastmemVAddr(BlockOfCode& code, EmitContext& ctx, Xbyak::Label& abort, Xbyak::Reg64 vaddr, bool& require_abort_handling, std::optional tmp = std::nullopt); + +template<> +[[maybe_unused]] Xbyak::RegExp EmitFastmemVAddr(BlockOfCode&, A32EmitContext&, Xbyak::Label&, Xbyak::Reg64 vaddr, bool&, std::optional) { + return r13 + vaddr; +} + +template<> +[[maybe_unused]] Xbyak::RegExp EmitFastmemVAddr(BlockOfCode& code, A64EmitContext& ctx, Xbyak::Label& abort, Xbyak::Reg64 vaddr, bool& require_abort_handling, std::optional tmp) { + const size_t unused_top_bits = 64 - ctx.conf.fastmem_address_space_bits; + + if (unused_top_bits == 0) { + return r13 + vaddr; + } else if (ctx.conf.silently_mirror_fastmem) { + if (!tmp) { + tmp = ctx.reg_alloc.ScratchGpr(); + } + if (unused_top_bits < 32) { + code.mov(*tmp, vaddr); + code.shl(*tmp, int(unused_top_bits)); + code.shr(*tmp, int(unused_top_bits)); + } else if (unused_top_bits == 32) { + code.mov(tmp->cvt32(), vaddr.cvt32()); + } else { + code.mov(tmp->cvt32(), vaddr.cvt32()); + code.and_(*tmp, u32((1 << ctx.conf.fastmem_address_space_bits) - 1)); + } + return r13 + *tmp; + } else { + if (ctx.conf.fastmem_address_space_bits < 32) { + code.test(vaddr, u32(-(1 << ctx.conf.fastmem_address_space_bits))); + code.jnz(abort, code.T_NEAR); + require_abort_handling = true; + } else { + // TODO: Consider having TEST as above but coalesce 64-bit constant in register allocator + if (!tmp) { + tmp = ctx.reg_alloc.ScratchGpr(); + } + code.mov(*tmp, vaddr); + code.shr(*tmp, int(ctx.conf.fastmem_address_space_bits)); + code.jnz(abort, code.T_NEAR); + require_abort_handling = true; + } + return r13 + vaddr; + } +} + template void EmitExclusiveLock(BlockOfCode& code, const UserConfig& conf, Xbyak::Reg64 pointer, Xbyak::Reg32 tmp) { if (conf.HasOptimization(OptimizationFlag::Unsafe_IgnoreGlobalMonitor)) {