2016-07-01 15:01:06 +02:00
|
|
|
/* This file is part of the dynarmic project.
|
|
|
|
* Copyright (c) 2016 MerryMage
|
|
|
|
* This software may be used and distributed according to the terms of the GNU
|
|
|
|
* General Public License version 2 or any later version.
|
|
|
|
*/
|
|
|
|
|
|
|
|
#include <unordered_map>
|
|
|
|
|
2016-12-05 05:14:58 +01:00
|
|
|
#include "backend_x64/block_of_code.h"
|
2016-07-01 15:01:06 +02:00
|
|
|
#include "backend_x64/emit_x64.h"
|
2016-12-05 05:11:34 +01:00
|
|
|
#include "common/assert.h"
|
2018-02-03 14:34:40 +01:00
|
|
|
#include "common/bit_util.h"
|
2017-02-16 19:18:29 +01:00
|
|
|
#include "common/common_types.h"
|
2017-02-16 20:40:51 +01:00
|
|
|
#include "common/variant_util.h"
|
2016-09-03 22:48:03 +02:00
|
|
|
#include "frontend/ir/basic_block.h"
|
|
|
|
#include "frontend/ir/microinstruction.h"
|
2016-12-05 05:11:34 +01:00
|
|
|
#include "frontend/ir/opcodes.h"
|
2016-07-01 15:01:06 +02:00
|
|
|
|
|
|
|
// TODO: Have ARM flags in host flags and not have them use up GPR registers unless necessary.
|
|
|
|
// TODO: Actually implement that proper instruction selector you've always wanted to sweetheart.
|
|
|
|
|
2018-01-26 14:51:48 +01:00
|
|
|
namespace Dynarmic::BackendX64 {
|
2016-07-01 15:01:06 +02:00
|
|
|
|
2017-12-09 16:42:47 +01:00
|
|
|
using namespace Xbyak::util;
|
|
|
|
|
2018-01-02 00:40:34 +01:00
|
|
|
EmitContext::EmitContext(RegAlloc& reg_alloc, IR::Block& block)
|
|
|
|
: reg_alloc(reg_alloc), block(block) {}
|
|
|
|
|
|
|
|
void EmitContext::EraseInstruction(IR::Inst* inst) {
|
2016-08-26 21:38:59 +02:00
|
|
|
block.Instructions().erase(inst);
|
2018-01-18 14:00:07 +01:00
|
|
|
inst->ClearArgs();
|
2016-07-01 15:01:06 +02:00
|
|
|
}
|
|
|
|
|
2018-02-03 15:28:57 +01:00
|
|
|
EmitX64::EmitX64(BlockOfCode& code)
|
2018-01-04 22:12:02 +01:00
|
|
|
: code(code) {}
|
2016-12-05 05:22:56 +01:00
|
|
|
|
2018-01-24 03:11:07 +01:00
|
|
|
EmitX64::~EmitX64() = default;
|
2016-07-14 13:52:53 +02:00
|
|
|
|
2018-01-23 20:16:39 +01:00
|
|
|
boost::optional<typename EmitX64::BlockDescriptor> EmitX64::GetBasicBlock(IR::LocationDescriptor descriptor) const {
|
2018-01-01 23:34:05 +01:00
|
|
|
auto iter = block_descriptors.find(descriptor);
|
2016-12-19 16:01:49 +01:00
|
|
|
if (iter == block_descriptors.end())
|
2016-12-05 05:22:56 +01:00
|
|
|
return boost::none;
|
2017-11-28 21:56:49 +01:00
|
|
|
return iter->second;
|
2016-12-05 05:22:56 +01:00
|
|
|
}
|
|
|
|
|
2018-01-23 20:16:39 +01:00
|
|
|
void EmitX64::EmitVoid(EmitContext&, IR::Inst*) {
|
2017-02-18 22:46:36 +01:00
|
|
|
}
|
|
|
|
|
2018-01-23 20:16:39 +01:00
|
|
|
void EmitX64::EmitBreakpoint(EmitContext&, IR::Inst*) {
|
2018-02-03 15:28:57 +01:00
|
|
|
code.int3();
|
2016-08-05 15:07:27 +02:00
|
|
|
}
|
|
|
|
|
2018-01-23 20:16:39 +01:00
|
|
|
void EmitX64::EmitIdentity(EmitContext& ctx, IR::Inst* inst) {
|
2018-01-02 00:40:34 +01:00
|
|
|
auto args = ctx.reg_alloc.GetArgumentInfo(inst);
|
2017-02-24 22:25:31 +01:00
|
|
|
if (!args[0].IsImmediate()) {
|
2018-01-02 00:40:34 +01:00
|
|
|
ctx.reg_alloc.DefineValue(inst, args[0]);
|
2016-08-05 15:11:27 +02:00
|
|
|
}
|
2016-08-02 12:51:05 +02:00
|
|
|
}
|
|
|
|
|
2018-01-23 20:16:39 +01:00
|
|
|
void EmitX64::PushRSBHelper(Xbyak::Reg64 loc_desc_reg, Xbyak::Reg64 index_reg, IR::LocationDescriptor target) {
|
2016-08-24 21:07:08 +02:00
|
|
|
using namespace Xbyak::util;
|
|
|
|
|
2018-01-01 23:34:05 +01:00
|
|
|
auto iter = block_descriptors.find(target);
|
2016-12-19 16:01:49 +01:00
|
|
|
CodePtr target_code_ptr = iter != block_descriptors.end()
|
2017-02-16 19:18:29 +01:00
|
|
|
? iter->second.entrypoint
|
2018-02-03 15:28:57 +01:00
|
|
|
: code.GetReturnFromRunCodeAddress();
|
2016-08-13 01:10:23 +02:00
|
|
|
|
2018-02-03 15:28:57 +01:00
|
|
|
code.mov(index_reg.cvt32(), dword[r15 + code.GetJitStateInfo().offsetof_rsb_ptr]);
|
2016-08-15 16:48:22 +02:00
|
|
|
|
2018-02-03 15:28:57 +01:00
|
|
|
code.mov(loc_desc_reg, target.Value());
|
2016-12-19 16:01:49 +01:00
|
|
|
|
2018-02-03 15:28:57 +01:00
|
|
|
patch_information[target].mov_rcx.emplace_back(code.getCurr());
|
2016-12-19 16:01:49 +01:00
|
|
|
EmitPatchMovRcx(target_code_ptr);
|
2016-08-15 16:48:22 +02:00
|
|
|
|
2018-02-03 15:28:57 +01:00
|
|
|
code.mov(qword[r15 + index_reg * 8 + code.GetJitStateInfo().offsetof_rsb_location_descriptors], loc_desc_reg);
|
|
|
|
code.mov(qword[r15 + index_reg * 8 + code.GetJitStateInfo().offsetof_rsb_codeptrs], rcx);
|
2016-08-15 16:48:22 +02:00
|
|
|
|
2018-02-03 15:28:57 +01:00
|
|
|
code.add(index_reg.cvt32(), 1);
|
|
|
|
code.and_(index_reg.cvt32(), u32(code.GetJitStateInfo().rsb_ptr_mask));
|
|
|
|
code.mov(dword[r15 + code.GetJitStateInfo().offsetof_rsb_ptr], index_reg.cvt32());
|
2017-11-27 21:29:19 +01:00
|
|
|
}
|
|
|
|
|
2018-01-23 20:16:39 +01:00
|
|
|
void EmitX64::EmitPushRSB(EmitContext& ctx, IR::Inst* inst) {
|
2018-01-02 00:40:34 +01:00
|
|
|
auto args = ctx.reg_alloc.GetArgumentInfo(inst);
|
2017-11-27 21:29:19 +01:00
|
|
|
ASSERT(args[0].IsImmediate());
|
|
|
|
u64 unique_hash_of_target = args[0].GetImmediateU64();
|
|
|
|
|
2018-01-02 00:40:34 +01:00
|
|
|
ctx.reg_alloc.ScratchGpr({HostLoc::RCX});
|
|
|
|
Xbyak::Reg64 loc_desc_reg = ctx.reg_alloc.ScratchGpr();
|
|
|
|
Xbyak::Reg64 index_reg = ctx.reg_alloc.ScratchGpr();
|
2016-08-15 16:48:22 +02:00
|
|
|
|
2018-01-01 23:34:05 +01:00
|
|
|
PushRSBHelper(loc_desc_reg, index_reg, IR::LocationDescriptor{unique_hash_of_target});
|
2016-08-13 01:10:23 +02:00
|
|
|
}
|
|
|
|
|
2018-01-23 20:16:39 +01:00
|
|
|
void EmitX64::EmitGetCarryFromOp(EmitContext&, IR::Inst*) {
|
2016-08-26 17:43:51 +02:00
|
|
|
ASSERT_MSG(false, "should never happen");
|
2016-07-01 15:01:06 +02:00
|
|
|
}
|
|
|
|
|
2018-01-23 20:16:39 +01:00
|
|
|
void EmitX64::EmitGetOverflowFromOp(EmitContext&, IR::Inst*) {
|
2016-08-26 17:43:51 +02:00
|
|
|
ASSERT_MSG(false, "should never happen");
|
2016-07-08 11:09:18 +02:00
|
|
|
}
|
|
|
|
|
2018-01-23 20:16:39 +01:00
|
|
|
void EmitX64::EmitGetGEFromOp(EmitContext&, IR::Inst*) {
|
2016-12-04 21:52:06 +01:00
|
|
|
ASSERT_MSG(false, "should never happen");
|
|
|
|
}
|
|
|
|
|
2018-01-23 20:16:39 +01:00
|
|
|
void EmitX64::EmitGetNZCVFromOp(EmitContext& ctx, IR::Inst* inst) {
|
2018-01-07 13:52:12 +01:00
|
|
|
auto args = ctx.reg_alloc.GetArgumentInfo(inst);
|
|
|
|
|
2018-01-12 18:31:21 +01:00
|
|
|
const int bitsize = [&]{
|
2018-01-07 13:52:12 +01:00
|
|
|
switch (args[0].GetType()) {
|
|
|
|
case IR::Type::U8:
|
|
|
|
return 8;
|
|
|
|
case IR::Type::U16:
|
|
|
|
return 16;
|
|
|
|
case IR::Type::U32:
|
|
|
|
return 32;
|
|
|
|
case IR::Type::U64:
|
|
|
|
return 64;
|
|
|
|
default:
|
|
|
|
ASSERT_MSG(false, "Unreachable");
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
}();
|
|
|
|
|
|
|
|
Xbyak::Reg64 nzcv = ctx.reg_alloc.ScratchGpr({HostLoc::RAX});
|
|
|
|
Xbyak::Reg value = ctx.reg_alloc.UseGpr(args[0]).changeBit(bitsize);
|
2018-02-03 15:28:57 +01:00
|
|
|
code.cmp(value, 0);
|
|
|
|
code.lahf();
|
|
|
|
code.seto(code.al);
|
2018-01-07 13:52:12 +01:00
|
|
|
ctx.reg_alloc.DefineValue(inst, nzcv);
|
2018-01-07 12:31:20 +01:00
|
|
|
}
|
|
|
|
|
2018-02-03 14:34:40 +01:00
|
|
|
void EmitX64::EmitNZCVFromPackedFlags(EmitContext& ctx, IR::Inst* inst) {
|
|
|
|
auto args = ctx.reg_alloc.GetArgumentInfo(inst);
|
|
|
|
|
|
|
|
if (args[0].IsImmediate()) {
|
|
|
|
Xbyak::Reg32 nzcv = ctx.reg_alloc.ScratchGpr().cvt32();
|
|
|
|
u32 value = 0;
|
|
|
|
value |= Common::Bit<31>(args[0].GetImmediateU32()) ? (1 << 15) : 0;
|
|
|
|
value |= Common::Bit<30>(args[0].GetImmediateU32()) ? (1 << 14) : 0;
|
|
|
|
value |= Common::Bit<29>(args[0].GetImmediateU32()) ? (1 << 8) : 0;
|
|
|
|
value |= Common::Bit<28>(args[0].GetImmediateU32()) ? (1 << 0) : 0;
|
2018-02-03 15:28:57 +01:00
|
|
|
code.mov(nzcv, value);
|
2018-02-03 14:34:40 +01:00
|
|
|
ctx.reg_alloc.DefineValue(inst, nzcv);
|
|
|
|
} else {
|
|
|
|
Xbyak::Reg32 nzcv = ctx.reg_alloc.UseScratchGpr(args[0]).cvt32();
|
|
|
|
// TODO: Optimize
|
2018-02-03 15:28:57 +01:00
|
|
|
code.shr(nzcv, 28);
|
|
|
|
code.imul(nzcv, nzcv, 0b00010000'10000001);
|
|
|
|
code.and_(nzcv.cvt8(), 1);
|
2018-02-03 14:34:40 +01:00
|
|
|
ctx.reg_alloc.DefineValue(inst, nzcv);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-01-23 20:16:39 +01:00
|
|
|
void EmitX64::EmitAddCycles(size_t cycles) {
|
2016-07-04 15:37:50 +02:00
|
|
|
ASSERT(cycles < std::numeric_limits<u32>::max());
|
2018-02-03 15:28:57 +01:00
|
|
|
code.sub(qword[r15 + code.GetJitStateInfo().offsetof_cycles_remaining], static_cast<u32>(cycles));
|
2016-07-07 11:53:09 +02:00
|
|
|
}
|
|
|
|
|
2018-01-23 20:16:39 +01:00
|
|
|
Xbyak::Label EmitX64::EmitCond(IR::Cond cond) {
|
2016-08-24 21:07:08 +02:00
|
|
|
Xbyak::Label label;
|
2016-07-14 13:52:53 +02:00
|
|
|
|
2016-08-24 21:07:08 +02:00
|
|
|
const Xbyak::Reg32 cpsr = eax;
|
2018-02-03 15:28:57 +01:00
|
|
|
code.mov(cpsr, dword[r15 + code.GetJitStateInfo().offsetof_CPSR_nzcv]);
|
2016-07-14 13:52:53 +02:00
|
|
|
|
2016-08-24 21:07:08 +02:00
|
|
|
constexpr size_t n_shift = 31;
|
|
|
|
constexpr size_t z_shift = 30;
|
|
|
|
constexpr size_t c_shift = 29;
|
|
|
|
constexpr size_t v_shift = 28;
|
|
|
|
constexpr u32 n_mask = 1u << n_shift;
|
|
|
|
constexpr u32 z_mask = 1u << z_shift;
|
|
|
|
constexpr u32 c_mask = 1u << c_shift;
|
|
|
|
constexpr u32 v_mask = 1u << v_shift;
|
2016-07-14 13:52:53 +02:00
|
|
|
|
|
|
|
switch (cond) {
|
2018-01-01 16:23:56 +01:00
|
|
|
case IR::Cond::EQ: //z
|
2018-02-03 15:28:57 +01:00
|
|
|
code.test(cpsr, z_mask);
|
|
|
|
code.jnz(label);
|
2016-08-23 00:40:30 +02:00
|
|
|
break;
|
2018-01-01 16:23:56 +01:00
|
|
|
case IR::Cond::NE: //!z
|
2018-02-03 15:28:57 +01:00
|
|
|
code.test(cpsr, z_mask);
|
|
|
|
code.jz(label);
|
2016-08-23 00:40:30 +02:00
|
|
|
break;
|
2018-01-01 16:23:56 +01:00
|
|
|
case IR::Cond::CS: //c
|
2018-02-03 15:28:57 +01:00
|
|
|
code.test(cpsr, c_mask);
|
|
|
|
code.jnz(label);
|
2016-08-23 00:40:30 +02:00
|
|
|
break;
|
2018-01-01 16:23:56 +01:00
|
|
|
case IR::Cond::CC: //!c
|
2018-02-03 15:28:57 +01:00
|
|
|
code.test(cpsr, c_mask);
|
|
|
|
code.jz(label);
|
2016-08-23 00:40:30 +02:00
|
|
|
break;
|
2018-01-01 16:23:56 +01:00
|
|
|
case IR::Cond::MI: //n
|
2018-02-03 15:28:57 +01:00
|
|
|
code.test(cpsr, n_mask);
|
|
|
|
code.jnz(label);
|
2016-08-23 00:40:30 +02:00
|
|
|
break;
|
2018-01-01 16:23:56 +01:00
|
|
|
case IR::Cond::PL: //!n
|
2018-02-03 15:28:57 +01:00
|
|
|
code.test(cpsr, n_mask);
|
|
|
|
code.jz(label);
|
2016-08-23 00:40:30 +02:00
|
|
|
break;
|
2018-01-01 16:23:56 +01:00
|
|
|
case IR::Cond::VS: //v
|
2018-02-03 15:28:57 +01:00
|
|
|
code.test(cpsr, v_mask);
|
|
|
|
code.jnz(label);
|
2016-08-23 00:40:30 +02:00
|
|
|
break;
|
2018-01-01 16:23:56 +01:00
|
|
|
case IR::Cond::VC: //!v
|
2018-02-03 15:28:57 +01:00
|
|
|
code.test(cpsr, v_mask);
|
|
|
|
code.jz(label);
|
2016-08-23 00:40:30 +02:00
|
|
|
break;
|
2018-01-01 16:23:56 +01:00
|
|
|
case IR::Cond::HI: { //c & !z
|
2018-02-03 15:28:57 +01:00
|
|
|
code.and_(cpsr, z_mask | c_mask);
|
|
|
|
code.cmp(cpsr, c_mask);
|
|
|
|
code.je(label);
|
2016-08-23 00:40:30 +02:00
|
|
|
break;
|
|
|
|
}
|
2018-01-01 16:23:56 +01:00
|
|
|
case IR::Cond::LS: { //!c | z
|
2018-02-03 15:28:57 +01:00
|
|
|
code.and_(cpsr, z_mask | c_mask);
|
|
|
|
code.cmp(cpsr, c_mask);
|
|
|
|
code.jne(label);
|
2016-08-23 00:40:30 +02:00
|
|
|
break;
|
|
|
|
}
|
2018-01-01 16:23:56 +01:00
|
|
|
case IR::Cond::GE: { // n == v
|
2018-02-03 15:28:57 +01:00
|
|
|
code.and_(cpsr, n_mask | v_mask);
|
|
|
|
code.jz(label);
|
|
|
|
code.cmp(cpsr, n_mask | v_mask);
|
|
|
|
code.je(label);
|
2016-08-23 00:40:30 +02:00
|
|
|
break;
|
|
|
|
}
|
2018-01-01 16:23:56 +01:00
|
|
|
case IR::Cond::LT: { // n != v
|
2016-08-24 21:07:08 +02:00
|
|
|
Xbyak::Label fail;
|
2018-02-03 15:28:57 +01:00
|
|
|
code.and_(cpsr, n_mask | v_mask);
|
|
|
|
code.jz(fail);
|
|
|
|
code.cmp(cpsr, n_mask | v_mask);
|
|
|
|
code.jne(label);
|
|
|
|
code.L(fail);
|
2016-08-23 00:40:30 +02:00
|
|
|
break;
|
|
|
|
}
|
2018-01-01 16:23:56 +01:00
|
|
|
case IR::Cond::GT: { // !z & (n == v)
|
2016-08-24 21:07:08 +02:00
|
|
|
const Xbyak::Reg32 tmp1 = ebx;
|
|
|
|
const Xbyak::Reg32 tmp2 = esi;
|
2018-02-03 15:28:57 +01:00
|
|
|
code.mov(tmp1, cpsr);
|
|
|
|
code.mov(tmp2, cpsr);
|
|
|
|
code.shr(tmp1, n_shift);
|
|
|
|
code.shr(tmp2, v_shift);
|
|
|
|
code.shr(cpsr, z_shift);
|
|
|
|
code.xor_(tmp1, tmp2);
|
|
|
|
code.or_(tmp1, cpsr);
|
|
|
|
code.test(tmp1, 1);
|
|
|
|
code.jz(label);
|
2016-08-23 00:40:30 +02:00
|
|
|
break;
|
|
|
|
}
|
2018-01-01 16:23:56 +01:00
|
|
|
case IR::Cond::LE: { // z | (n != v)
|
2016-08-24 21:07:08 +02:00
|
|
|
const Xbyak::Reg32 tmp1 = ebx;
|
|
|
|
const Xbyak::Reg32 tmp2 = esi;
|
2018-02-03 15:28:57 +01:00
|
|
|
code.mov(tmp1, cpsr);
|
|
|
|
code.mov(tmp2, cpsr);
|
|
|
|
code.shr(tmp1, n_shift);
|
|
|
|
code.shr(tmp2, v_shift);
|
|
|
|
code.shr(cpsr, z_shift);
|
|
|
|
code.xor_(tmp1, tmp2);
|
|
|
|
code.or_(tmp1, cpsr);
|
|
|
|
code.test(tmp1, 1);
|
|
|
|
code.jnz(label);
|
2016-08-23 00:40:30 +02:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
default:
|
2018-01-28 00:42:30 +01:00
|
|
|
ASSERT_MSG(false, "Unknown cond {}", static_cast<size_t>(cond));
|
2016-08-23 00:40:30 +02:00
|
|
|
break;
|
2016-07-14 13:52:53 +02:00
|
|
|
}
|
2016-07-18 22:04:39 +02:00
|
|
|
|
2016-08-24 21:07:08 +02:00
|
|
|
return label;
|
2016-07-18 22:04:39 +02:00
|
|
|
}
|
|
|
|
|
2018-01-23 20:16:39 +01:00
|
|
|
void EmitX64::EmitCondPrelude(const IR::Block& block) {
|
2018-01-01 16:23:56 +01:00
|
|
|
if (block.GetCondition() == IR::Cond::AL) {
|
2016-08-25 16:35:50 +02:00
|
|
|
ASSERT(!block.HasConditionFailedLocation());
|
2016-07-18 22:04:39 +02:00
|
|
|
return;
|
2016-07-14 13:52:53 +02:00
|
|
|
}
|
|
|
|
|
2016-08-25 16:35:50 +02:00
|
|
|
ASSERT(block.HasConditionFailedLocation());
|
2016-07-18 22:04:39 +02:00
|
|
|
|
2018-01-01 23:34:05 +01:00
|
|
|
Xbyak::Label pass = EmitCond(block.GetCondition());
|
2016-08-25 16:35:50 +02:00
|
|
|
EmitAddCycles(block.ConditionFailedCycleCount());
|
2017-02-16 20:40:51 +01:00
|
|
|
EmitTerminal(IR::Term::LinkBlock{block.ConditionFailedLocation()}, block.Location());
|
2018-02-03 15:28:57 +01:00
|
|
|
code.L(pass);
|
2016-07-14 13:52:53 +02:00
|
|
|
}
|
|
|
|
|
2018-01-23 20:16:39 +01:00
|
|
|
void EmitX64::EmitTerminal(IR::Terminal terminal, IR::LocationDescriptor initial_location) {
|
2017-02-16 20:40:51 +01:00
|
|
|
Common::VisitVariant<void>(terminal, [this, &initial_location](auto x) {
|
2018-01-01 23:34:05 +01:00
|
|
|
using T = std::decay_t<decltype(x)>;
|
|
|
|
if constexpr (!std::is_same_v<T, IR::Term::Invalid>) {
|
|
|
|
this->EmitTerminalImpl(x, initial_location);
|
|
|
|
} else {
|
|
|
|
ASSERT_MSG(false, "Invalid terminal");
|
|
|
|
}
|
2017-02-16 20:40:51 +01:00
|
|
|
});
|
2016-07-04 15:37:50 +02:00
|
|
|
}
|
|
|
|
|
2018-01-23 20:16:39 +01:00
|
|
|
void EmitX64::Patch(const IR::LocationDescriptor& desc, CodePtr bb) {
|
2018-02-03 15:28:57 +01:00
|
|
|
const CodePtr save_code_ptr = code.getCurr();
|
2018-01-01 23:34:05 +01:00
|
|
|
const PatchInformation& patch_info = patch_information[desc];
|
2016-08-07 23:11:39 +02:00
|
|
|
|
2016-12-19 16:01:49 +01:00
|
|
|
for (CodePtr location : patch_info.jg) {
|
2018-02-03 15:28:57 +01:00
|
|
|
code.SetCodePtr(location);
|
2017-04-08 11:04:53 +02:00
|
|
|
EmitPatchJg(desc, bb);
|
2016-08-07 23:11:39 +02:00
|
|
|
}
|
|
|
|
|
2016-12-19 16:01:49 +01:00
|
|
|
for (CodePtr location : patch_info.jmp) {
|
2018-02-03 15:28:57 +01:00
|
|
|
code.SetCodePtr(location);
|
2016-12-19 16:01:49 +01:00
|
|
|
EmitPatchJmp(desc, bb);
|
2016-08-15 15:33:17 +02:00
|
|
|
}
|
|
|
|
|
2016-12-19 16:01:49 +01:00
|
|
|
for (CodePtr location : patch_info.mov_rcx) {
|
2018-02-03 15:28:57 +01:00
|
|
|
code.SetCodePtr(location);
|
2016-12-19 16:01:49 +01:00
|
|
|
EmitPatchMovRcx(bb);
|
2016-08-13 01:10:23 +02:00
|
|
|
}
|
|
|
|
|
2018-02-03 15:28:57 +01:00
|
|
|
code.SetCodePtr(save_code_ptr);
|
2016-08-07 23:11:39 +02:00
|
|
|
}
|
|
|
|
|
2018-01-23 20:16:39 +01:00
|
|
|
void EmitX64::Unpatch(const IR::LocationDescriptor& desc) {
|
2016-12-19 16:01:49 +01:00
|
|
|
Patch(desc, nullptr);
|
|
|
|
}
|
|
|
|
|
2018-01-23 20:16:39 +01:00
|
|
|
void EmitX64::ClearCache() {
|
2016-12-19 16:01:49 +01:00
|
|
|
block_descriptors.clear();
|
|
|
|
patch_information.clear();
|
2016-07-07 13:01:47 +02:00
|
|
|
}
|
|
|
|
|
2018-01-23 20:16:39 +01:00
|
|
|
void EmitX64::InvalidateBasicBlocks(const std::unordered_set<IR::LocationDescriptor>& locations) {
|
|
|
|
for (const auto &descriptor : locations) {
|
|
|
|
auto it = block_descriptors.find(descriptor);
|
|
|
|
if (it == block_descriptors.end()) {
|
|
|
|
continue;
|
2017-02-16 19:18:29 +01:00
|
|
|
}
|
2018-01-23 20:16:39 +01:00
|
|
|
|
|
|
|
if (patch_information.count(descriptor)) {
|
|
|
|
Unpatch(descriptor);
|
|
|
|
}
|
|
|
|
block_descriptors.erase(it);
|
2017-02-16 19:18:29 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-01-26 14:51:48 +01:00
|
|
|
} // namespace Dynarmic::BackendX64
|