From 3d657c450a7d196af2b8d5f4cdc90e461198a653 Mon Sep 17 00:00:00 2001 From: merry Date: Sat, 26 Mar 2022 16:09:00 +0000 Subject: [PATCH] emit_x64_memory: Share EmitDetectMisalignedVAddr --- .../backend/x64/a32_emit_x64_memory.cpp | 43 +----------------- .../backend/x64/a64_emit_x64_memory.cpp | 45 +------------------ src/dynarmic/backend/x64/emit_x64_memory.h | 45 +++++++++++++++++++ 3 files changed, 47 insertions(+), 86 deletions(-) diff --git a/src/dynarmic/backend/x64/a32_emit_x64_memory.cpp b/src/dynarmic/backend/x64/a32_emit_x64_memory.cpp index 51bdd1ce..6278cde0 100644 --- a/src/dynarmic/backend/x64/a32_emit_x64_memory.cpp +++ b/src/dynarmic/backend/x64/a32_emit_x64_memory.cpp @@ -160,52 +160,11 @@ FakeCall A32EmitX64::FastmemCallback(u64 rip_) { namespace { -void EmitDetectMisaignedVAddr(BlockOfCode& code, A32EmitContext& ctx, size_t bitsize, Xbyak::Label& abort, Xbyak::Reg32 vaddr, Xbyak::Reg32 tmp) { - if (bitsize == 8 || (ctx.conf.detect_misaligned_access_via_page_table & bitsize) == 0) { - return; - } - - const u32 align_mask = [bitsize]() -> u32 { - switch (bitsize) { - case 16: - return 0b1; - case 32: - return 0b11; - case 64: - return 0b111; - } - UNREACHABLE(); - }(); - - code.test(vaddr, align_mask); - - if (!ctx.conf.only_detect_misalignment_via_page_table_on_page_boundary) { - code.jnz(abort, code.T_NEAR); - return; - } - - const u32 page_align_mask = static_cast(page_size - 1) & ~align_mask; - - Xbyak::Label detect_boundary, resume; - - code.jnz(detect_boundary, code.T_NEAR); - code.L(resume); - - code.SwitchToFarCode(); - code.L(detect_boundary); - code.mov(tmp, vaddr); - code.and_(tmp, page_align_mask); - code.cmp(tmp, page_align_mask); - code.jne(resume, code.T_NEAR); - // NOTE: We expect to fallthrough into abort code here. - code.SwitchToNearCode(); -} - Xbyak::RegExp EmitVAddrLookup(BlockOfCode& code, A32EmitContext& ctx, size_t bitsize, Xbyak::Label& abort, Xbyak::Reg64 vaddr) { const Xbyak::Reg64 page = ctx.reg_alloc.ScratchGpr(); const Xbyak::Reg32 tmp = ctx.conf.absolute_offset_page_table ? page.cvt32() : ctx.reg_alloc.ScratchGpr().cvt32(); - EmitDetectMisaignedVAddr(code, ctx, bitsize, abort, vaddr.cvt32(), tmp); + EmitDetectMisalignedVAddr(code, ctx, bitsize, abort, vaddr, tmp.cvt64()); // TODO: This code assumes vaddr has been zext from 32-bits to 64-bits. diff --git a/src/dynarmic/backend/x64/a64_emit_x64_memory.cpp b/src/dynarmic/backend/x64/a64_emit_x64_memory.cpp index abca5dbb..335178cf 100644 --- a/src/dynarmic/backend/x64/a64_emit_x64_memory.cpp +++ b/src/dynarmic/backend/x64/a64_emit_x64_memory.cpp @@ -302,49 +302,6 @@ FakeCall A64EmitX64::FastmemCallback(u64 rip_) { namespace { -void EmitDetectMisaignedVAddr(BlockOfCode& code, A64EmitContext& ctx, size_t bitsize, Xbyak::Label& abort, Xbyak::Reg64 vaddr, Xbyak::Reg64 tmp) { - if (bitsize == 8 || (ctx.conf.detect_misaligned_access_via_page_table & bitsize) == 0) { - return; - } - - const u32 align_mask = [bitsize]() -> u32 { - switch (bitsize) { - case 16: - return 0b1; - case 32: - return 0b11; - case 64: - return 0b111; - case 128: - return 0b1111; - } - UNREACHABLE(); - }(); - - code.test(vaddr, align_mask); - - if (!ctx.conf.only_detect_misalignment_via_page_table_on_page_boundary) { - code.jnz(abort, code.T_NEAR); - return; - } - - const u32 page_align_mask = static_cast(page_size - 1) & ~align_mask; - - Xbyak::Label detect_boundary, resume; - - code.jnz(detect_boundary, code.T_NEAR); - code.L(resume); - - code.SwitchToFarCode(); - code.L(detect_boundary); - code.mov(tmp, vaddr); - code.and_(tmp, page_align_mask); - code.cmp(tmp, page_align_mask); - code.jne(resume, code.T_NEAR); - // NOTE: We expect to fallthrough into abort code here. - code.SwitchToNearCode(); -} - Xbyak::RegExp EmitVAddrLookup(BlockOfCode& code, A64EmitContext& ctx, size_t bitsize, Xbyak::Label& abort, Xbyak::Reg64 vaddr) { const size_t valid_page_index_bits = ctx.conf.page_table_address_space_bits - page_bits; const size_t unused_top_bits = 64 - ctx.conf.page_table_address_space_bits; @@ -352,7 +309,7 @@ Xbyak::RegExp EmitVAddrLookup(BlockOfCode& code, A64EmitContext& ctx, size_t bit const Xbyak::Reg64 page = ctx.reg_alloc.ScratchGpr(); const Xbyak::Reg64 tmp = ctx.conf.absolute_offset_page_table ? page : ctx.reg_alloc.ScratchGpr(); - EmitDetectMisaignedVAddr(code, ctx, bitsize, abort, vaddr, tmp); + EmitDetectMisalignedVAddr(code, ctx, bitsize, abort, vaddr, tmp); if (unused_top_bits == 0) { code.mov(tmp, vaddr); diff --git a/src/dynarmic/backend/x64/emit_x64_memory.h b/src/dynarmic/backend/x64/emit_x64_memory.h index 4d9175b2..320a7798 100644 --- a/src/dynarmic/backend/x64/emit_x64_memory.h +++ b/src/dynarmic/backend/x64/emit_x64_memory.h @@ -20,6 +20,51 @@ constexpr size_t page_bits = 12; constexpr size_t page_size = 1 << page_bits; constexpr size_t page_mask = (1 << page_bits) - 1; +template +void EmitDetectMisalignedVAddr(BlockOfCode& code, EmitContext& ctx, size_t bitsize, Xbyak::Label& abort, Xbyak::Reg64 vaddr, Xbyak::Reg64 tmp) { + if (bitsize == 8 || (ctx.conf.detect_misaligned_access_via_page_table & bitsize) == 0) { + return; + } + + const u32 align_mask = [bitsize]() -> u32 { + switch (bitsize) { + case 16: + return 0b1; + case 32: + return 0b11; + case 64: + return 0b111; + case 128: + return 0b1111; + default: + UNREACHABLE(); + } + }(); + + code.test(vaddr, align_mask); + + if (!ctx.conf.only_detect_misalignment_via_page_table_on_page_boundary) { + code.jnz(abort, code.T_NEAR); + return; + } + + const u32 page_align_mask = static_cast(page_size - 1) & ~align_mask; + + Xbyak::Label detect_boundary, resume; + + code.jnz(detect_boundary, code.T_NEAR); + code.L(resume); + + code.SwitchToFarCode(); + code.L(detect_boundary); + code.mov(tmp, vaddr); + code.and_(tmp, page_align_mask); + code.cmp(tmp, page_align_mask); + code.jne(resume, code.T_NEAR); + // NOTE: We expect to fallthrough into abort code here. + code.SwitchToNearCode(); +} + template void EmitExclusiveLock(BlockOfCode& code, const UserConfig& conf, Xbyak::Reg64 pointer, Xbyak::Reg32 tmp) { if (conf.HasOptimization(OptimizationFlag::Unsafe_IgnoreGlobalMonitor)) {