2016-07-01 15:01:06 +02:00
|
|
|
/* This file is part of the dynarmic project.
|
|
|
|
* Copyright (c) 2016 MerryMage
|
|
|
|
* This software may be used and distributed according to the terms of the GNU
|
|
|
|
* General Public License version 2 or any later version.
|
|
|
|
*/
|
|
|
|
|
|
|
|
#include <algorithm>
|
2017-11-27 20:38:22 +01:00
|
|
|
#include <numeric>
|
2016-07-01 15:01:06 +02:00
|
|
|
|
2016-08-24 21:07:08 +02:00
|
|
|
#include <xbyak.h>
|
|
|
|
|
2016-08-26 19:43:50 +02:00
|
|
|
#include "backend_x64/abi.h"
|
2016-07-01 15:01:06 +02:00
|
|
|
#include "backend_x64/reg_alloc.h"
|
|
|
|
#include "common/assert.h"
|
|
|
|
|
|
|
|
namespace Dynarmic {
|
|
|
|
namespace BackendX64 {
|
|
|
|
|
2016-12-03 12:29:50 +01:00
|
|
|
static u64 ImmediateToU64(const IR::Value& imm) {
|
2016-08-05 15:10:39 +02:00
|
|
|
switch (imm.GetType()) {
|
2016-08-23 00:40:30 +02:00
|
|
|
case IR::Type::U1:
|
2016-12-03 12:29:50 +01:00
|
|
|
return u64(imm.GetU1());
|
2016-08-23 00:40:30 +02:00
|
|
|
case IR::Type::U8:
|
2016-12-03 12:29:50 +01:00
|
|
|
return u64(imm.GetU8());
|
2017-02-24 22:09:12 +01:00
|
|
|
case IR::Type::U16:
|
|
|
|
return u64(imm.GetU16());
|
2016-08-23 00:40:30 +02:00
|
|
|
case IR::Type::U32:
|
2016-12-03 12:29:50 +01:00
|
|
|
return u64(imm.GetU32());
|
|
|
|
case IR::Type::U64:
|
|
|
|
return u64(imm.GetU64());
|
2016-08-23 00:40:30 +02:00
|
|
|
default:
|
|
|
|
ASSERT_MSG(false, "This should never happen.");
|
2016-08-05 15:10:39 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-02-24 20:42:36 +01:00
|
|
|
static bool IsSameHostLocClass(HostLoc a, HostLoc b) {
|
|
|
|
return (HostLocIsGPR(a) && HostLocIsGPR(b))
|
|
|
|
|| (HostLocIsXMM(a) && HostLocIsXMM(b))
|
|
|
|
|| (HostLocIsSpill(a) && HostLocIsSpill(b));
|
|
|
|
}
|
|
|
|
|
2017-02-27 00:16:41 +01:00
|
|
|
bool HostLocInfo::IsLocked() const {
|
|
|
|
return is_being_used;
|
|
|
|
}
|
|
|
|
|
|
|
|
bool HostLocInfo::IsEmpty() const {
|
|
|
|
return !is_being_used && values.empty();
|
|
|
|
}
|
|
|
|
|
|
|
|
bool HostLocInfo::IsLastUse() const {
|
2017-11-27 20:41:22 +01:00
|
|
|
return !is_being_used && current_references == 1 && accumulated_uses + 1 == total_uses;
|
2017-02-27 00:16:41 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
bool HostLocInfo::ContainsValue(const IR::Inst* inst) const {
|
|
|
|
return std::find(values.begin(), values.end(), inst) != values.end();
|
|
|
|
}
|
|
|
|
|
|
|
|
void HostLocInfo::ReadLock() {
|
|
|
|
ASSERT(!is_scratch);
|
|
|
|
is_being_used = true;
|
|
|
|
}
|
|
|
|
|
|
|
|
void HostLocInfo::WriteLock() {
|
|
|
|
ASSERT(!is_being_used);
|
|
|
|
is_being_used = true;
|
|
|
|
is_scratch = true;
|
|
|
|
}
|
|
|
|
|
|
|
|
void HostLocInfo::AddValue(IR::Inst* inst) {
|
|
|
|
values.push_back(inst);
|
2017-11-27 20:38:22 +01:00
|
|
|
total_uses += inst->UseCount();
|
|
|
|
}
|
|
|
|
|
|
|
|
void HostLocInfo::AddArgReference() {
|
|
|
|
current_references++;
|
|
|
|
ASSERT(accumulated_uses + current_references <= total_uses);
|
2017-02-27 00:16:41 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
void HostLocInfo::EndOfAllocScope() {
|
2017-11-27 20:38:22 +01:00
|
|
|
accumulated_uses += current_references;
|
|
|
|
current_references = 0;
|
|
|
|
|
|
|
|
if (total_uses == accumulated_uses) {
|
|
|
|
values.clear();
|
|
|
|
accumulated_uses = 0;
|
|
|
|
total_uses = 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
ASSERT(total_uses == std::accumulate(values.begin(), values.end(), size_t(0), [](size_t sum, IR::Inst* inst) { return sum + inst->UseCount(); }));
|
2017-02-27 00:16:41 +01:00
|
|
|
|
|
|
|
is_being_used = false;
|
|
|
|
is_scratch = false;
|
|
|
|
}
|
|
|
|
|
|
|
|
IR::Type Argument::GetType() const {
|
|
|
|
return value.GetType();
|
|
|
|
}
|
|
|
|
|
|
|
|
bool Argument::IsImmediate() const {
|
|
|
|
return value.IsImmediate();
|
|
|
|
}
|
|
|
|
|
2017-02-24 22:25:31 +01:00
|
|
|
bool Argument::GetImmediateU1() const {
|
|
|
|
return value.GetU1();
|
|
|
|
}
|
|
|
|
|
2017-02-24 22:09:12 +01:00
|
|
|
u8 Argument::GetImmediateU8() const {
|
|
|
|
u64 imm = ImmediateToU64(value);
|
|
|
|
ASSERT(imm < 0x100);
|
|
|
|
return u8(imm);
|
|
|
|
}
|
|
|
|
|
|
|
|
u16 Argument::GetImmediateU16() const {
|
|
|
|
u64 imm = ImmediateToU64(value);
|
|
|
|
ASSERT(imm < 0x10000);
|
|
|
|
return u16(imm);
|
|
|
|
}
|
|
|
|
|
|
|
|
u32 Argument::GetImmediateU32() const {
|
|
|
|
u64 imm = ImmediateToU64(value);
|
|
|
|
ASSERT(imm < 0x100000000);
|
|
|
|
return u32(imm);
|
|
|
|
}
|
|
|
|
|
|
|
|
u64 Argument::GetImmediateU64() const {
|
|
|
|
return ImmediateToU64(value);
|
|
|
|
}
|
|
|
|
|
|
|
|
bool Argument::IsInGpr() const {
|
|
|
|
return HostLocIsGPR(*reg_alloc.ValueLocation(value.GetInst()));
|
|
|
|
}
|
|
|
|
|
|
|
|
bool Argument::IsInXmm() const {
|
|
|
|
return HostLocIsXMM(*reg_alloc.ValueLocation(value.GetInst()));
|
|
|
|
}
|
|
|
|
|
|
|
|
bool Argument::IsInMemory() const {
|
|
|
|
return HostLocIsSpill(*reg_alloc.ValueLocation(value.GetInst()));
|
|
|
|
}
|
|
|
|
|
|
|
|
std::array<Argument, 3> RegAlloc::GetArgumentInfo(IR::Inst* inst) {
|
2017-02-26 23:28:32 +01:00
|
|
|
std::array<Argument, 3> ret = { Argument{*this}, Argument{*this}, Argument{*this} };
|
2017-02-24 22:09:12 +01:00
|
|
|
for (size_t i = 0; i < inst->NumArgs(); i++) {
|
2017-11-27 20:38:22 +01:00
|
|
|
const IR::Value& arg = inst->GetArg(i);
|
2017-02-24 22:09:12 +01:00
|
|
|
ret[i].value = arg;
|
2017-02-26 23:57:12 +01:00
|
|
|
if (!arg.IsImmediate()) {
|
2017-11-27 20:38:22 +01:00
|
|
|
ASSERT_MSG(ValueLocation(arg.GetInst()), "argument must already been defined");
|
|
|
|
LocInfo(*ValueLocation(arg.GetInst())).AddArgReference();
|
2017-02-26 23:57:12 +01:00
|
|
|
}
|
2017-02-24 22:09:12 +01:00
|
|
|
}
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
2017-02-26 23:28:32 +01:00
|
|
|
Xbyak::Reg64 RegAlloc::UseGpr(Argument& arg) {
|
|
|
|
ASSERT(!arg.allocated);
|
|
|
|
arg.allocated = true;
|
|
|
|
return HostLocToReg64(UseImpl(arg.value, any_gpr));
|
|
|
|
}
|
2016-08-05 15:10:39 +02:00
|
|
|
|
2017-02-26 23:28:32 +01:00
|
|
|
Xbyak::Xmm RegAlloc::UseXmm(Argument& arg) {
|
|
|
|
ASSERT(!arg.allocated);
|
|
|
|
arg.allocated = true;
|
|
|
|
return HostLocToXmm(UseImpl(arg.value, any_xmm));
|
|
|
|
}
|
2016-08-05 15:10:39 +02:00
|
|
|
|
2017-02-26 23:28:32 +01:00
|
|
|
OpArg RegAlloc::UseOpArg(Argument& arg) {
|
|
|
|
return UseGpr(arg);
|
2016-07-01 15:01:06 +02:00
|
|
|
}
|
|
|
|
|
2017-02-26 23:28:32 +01:00
|
|
|
void RegAlloc::Use(Argument& arg, HostLoc host_loc) {
|
|
|
|
ASSERT(!arg.allocated);
|
|
|
|
arg.allocated = true;
|
|
|
|
UseImpl(arg.value, {host_loc});
|
|
|
|
}
|
|
|
|
|
|
|
|
Xbyak::Reg64 RegAlloc::UseScratchGpr(Argument& arg) {
|
|
|
|
ASSERT(!arg.allocated);
|
|
|
|
arg.allocated = true;
|
|
|
|
return HostLocToReg64(UseScratchImpl(arg.value, any_gpr));
|
|
|
|
}
|
|
|
|
|
|
|
|
Xbyak::Xmm RegAlloc::UseScratchXmm(Argument& arg) {
|
|
|
|
ASSERT(!arg.allocated);
|
|
|
|
arg.allocated = true;
|
|
|
|
return HostLocToXmm(UseScratchImpl(arg.value, any_xmm));
|
|
|
|
}
|
|
|
|
|
|
|
|
void RegAlloc::UseScratch(Argument& arg, HostLoc host_loc) {
|
|
|
|
ASSERT(!arg.allocated);
|
|
|
|
arg.allocated = true;
|
|
|
|
UseScratchImpl(arg.value, {host_loc});
|
|
|
|
}
|
|
|
|
|
|
|
|
void RegAlloc::DefineValue(IR::Inst* inst, const Xbyak::Reg& reg) {
|
|
|
|
ASSERT(reg.getKind() == Xbyak::Operand::XMM || reg.getKind() == Xbyak::Operand::REG);
|
|
|
|
HostLoc hostloc = static_cast<HostLoc>(reg.getIdx() + static_cast<size_t>(reg.getKind() == Xbyak::Operand::XMM ? HostLoc::XMM0 : HostLoc::RAX));
|
|
|
|
DefineValueImpl(inst, hostloc);
|
|
|
|
}
|
|
|
|
|
|
|
|
void RegAlloc::DefineValue(IR::Inst* inst, Argument& arg) {
|
|
|
|
ASSERT(!arg.allocated);
|
|
|
|
arg.allocated = true;
|
|
|
|
DefineValueImpl(inst, arg.value);
|
|
|
|
}
|
2016-07-23 00:55:00 +02:00
|
|
|
|
2017-02-26 23:28:32 +01:00
|
|
|
Xbyak::Reg64 RegAlloc::ScratchGpr(HostLocList desired_locations) {
|
|
|
|
return HostLocToReg64(ScratchImpl(desired_locations));
|
2016-07-23 00:55:00 +02:00
|
|
|
}
|
|
|
|
|
2017-02-26 23:28:32 +01:00
|
|
|
Xbyak::Xmm RegAlloc::ScratchXmm(HostLocList desired_locations) {
|
|
|
|
return HostLocToXmm(ScratchImpl(desired_locations));
|
|
|
|
}
|
|
|
|
|
|
|
|
HostLoc RegAlloc::UseImpl(IR::Value use_value, HostLocList desired_locations) {
|
|
|
|
if (use_value.IsImmediate()) {
|
|
|
|
return LoadImmediate(use_value, ScratchImpl(desired_locations));
|
|
|
|
}
|
2016-07-01 15:01:06 +02:00
|
|
|
|
2017-02-26 23:28:32 +01:00
|
|
|
IR::Inst* use_inst = use_value.GetInst();
|
2017-02-24 20:42:36 +01:00
|
|
|
const HostLoc current_location = *ValueLocation(use_inst);
|
|
|
|
|
|
|
|
const bool can_use_current_location = std::find(desired_locations.begin(), desired_locations.end(), current_location) != desired_locations.end();
|
|
|
|
if (can_use_current_location) {
|
2017-02-24 21:19:50 +01:00
|
|
|
LocInfo(current_location).ReadLock();
|
2016-08-24 21:07:08 +02:00
|
|
|
return current_location;
|
2016-08-05 15:10:39 +02:00
|
|
|
}
|
2016-07-01 15:01:06 +02:00
|
|
|
|
2017-02-24 20:42:36 +01:00
|
|
|
if (LocInfo(current_location).IsLocked()) {
|
2017-02-26 23:28:32 +01:00
|
|
|
return UseScratchImpl(use_value, desired_locations);
|
2017-02-24 20:42:36 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
const HostLoc destination_location = SelectARegister(desired_locations);
|
|
|
|
if (IsSameHostLocClass(destination_location, current_location)) {
|
|
|
|
Exchange(destination_location, current_location);
|
|
|
|
} else {
|
|
|
|
MoveOutOfTheWay(destination_location);
|
|
|
|
Move(destination_location, current_location);
|
|
|
|
}
|
2017-02-24 21:19:50 +01:00
|
|
|
LocInfo(destination_location).ReadLock();
|
2017-02-24 20:42:36 +01:00
|
|
|
return destination_location;
|
2016-08-05 15:10:39 +02:00
|
|
|
}
|
2016-07-01 15:01:06 +02:00
|
|
|
|
2017-02-26 23:28:32 +01:00
|
|
|
HostLoc RegAlloc::UseScratchImpl(IR::Value use_value, HostLocList desired_locations) {
|
|
|
|
if (use_value.IsImmediate()) {
|
|
|
|
return LoadImmediate(use_value, ScratchImpl(desired_locations));
|
2016-07-23 00:55:00 +02:00
|
|
|
}
|
|
|
|
|
2017-02-26 23:28:32 +01:00
|
|
|
IR::Inst* use_inst = use_value.GetInst();
|
2017-02-24 20:58:16 +01:00
|
|
|
const HostLoc current_location = *ValueLocation(use_inst);
|
2016-07-18 16:11:16 +02:00
|
|
|
|
2017-02-24 20:58:16 +01:00
|
|
|
const bool can_use_current_location = std::find(desired_locations.begin(), desired_locations.end(), current_location) != desired_locations.end();
|
|
|
|
if (can_use_current_location && !LocInfo(current_location).IsLocked()) {
|
2017-11-27 20:41:22 +01:00
|
|
|
if (!LocInfo(current_location).IsLastUse()) {
|
|
|
|
MoveOutOfTheWay(current_location);
|
|
|
|
}
|
2017-02-24 21:19:50 +01:00
|
|
|
LocInfo(current_location).WriteLock();
|
2017-02-24 20:58:16 +01:00
|
|
|
return current_location;
|
2016-07-18 16:11:16 +02:00
|
|
|
}
|
|
|
|
|
2017-02-24 20:58:16 +01:00
|
|
|
const HostLoc destination_location = SelectARegister(desired_locations);
|
|
|
|
MoveOutOfTheWay(destination_location);
|
|
|
|
CopyToScratch(destination_location, current_location);
|
2017-02-24 21:19:50 +01:00
|
|
|
LocInfo(destination_location).WriteLock();
|
2017-02-24 20:58:16 +01:00
|
|
|
return destination_location;
|
2016-07-18 16:11:16 +02:00
|
|
|
}
|
|
|
|
|
2017-02-26 23:28:32 +01:00
|
|
|
HostLoc RegAlloc::ScratchImpl(HostLocList desired_locations) {
|
2016-07-01 15:01:06 +02:00
|
|
|
HostLoc location = SelectARegister(desired_locations);
|
2017-02-24 21:01:41 +01:00
|
|
|
MoveOutOfTheWay(location);
|
2017-02-24 21:19:50 +01:00
|
|
|
LocInfo(location).WriteLock();
|
2016-08-24 21:07:08 +02:00
|
|
|
return location;
|
2016-07-23 00:55:00 +02:00
|
|
|
}
|
|
|
|
|
2017-02-26 23:28:32 +01:00
|
|
|
void RegAlloc::HostCall(IR::Inst* result_def, boost::optional<Argument&> arg0, boost::optional<Argument&> arg1, boost::optional<Argument&> arg2, boost::optional<Argument&> arg3) {
|
2016-08-26 19:43:50 +02:00
|
|
|
constexpr size_t args_count = 4;
|
|
|
|
constexpr std::array<HostLoc, args_count> args_hostloc = { ABI_PARAM1, ABI_PARAM2, ABI_PARAM3, ABI_PARAM4 };
|
2017-02-26 23:28:32 +01:00
|
|
|
const std::array<boost::optional<Argument&>, args_count> args = { arg0, arg1, arg2, arg3 };
|
2016-07-11 16:28:10 +02:00
|
|
|
|
2017-09-29 02:23:45 +02:00
|
|
|
static const std::vector<HostLoc> other_caller_save = [args_hostloc]() {
|
2016-08-26 19:43:50 +02:00
|
|
|
std::vector<HostLoc> ret(ABI_ALL_CALLER_SAVE.begin(), ABI_ALL_CALLER_SAVE.end());
|
2016-07-11 16:28:10 +02:00
|
|
|
|
2016-09-01 00:53:16 +02:00
|
|
|
for (auto hostloc : args_hostloc)
|
2016-08-26 19:43:50 +02:00
|
|
|
ret.erase(std::find(ret.begin(), ret.end(), hostloc));
|
|
|
|
|
|
|
|
return ret;
|
|
|
|
}();
|
|
|
|
|
2017-02-26 23:28:32 +01:00
|
|
|
ScratchGpr({ABI_RETURN});
|
2016-07-11 16:28:10 +02:00
|
|
|
if (result_def) {
|
2017-02-26 23:28:32 +01:00
|
|
|
DefineValueImpl(result_def, ABI_RETURN);
|
|
|
|
}
|
|
|
|
|
|
|
|
for (size_t i = 0; i < args_count; i++) {
|
|
|
|
if (args[i]) {
|
|
|
|
UseScratch(*args[i], args_hostloc[i]);
|
2017-04-08 23:04:16 +02:00
|
|
|
#if defined(__llvm__) && !defined(_WIN32)
|
|
|
|
// LLVM puts the burden of zero-extension of 8 and 16 bit values on the caller instead of the callee
|
|
|
|
Xbyak::Reg64 reg = HostLocToReg64(args_hostloc[i]);
|
|
|
|
switch (args[i]->GetType()) {
|
|
|
|
case IR::Type::U8:
|
|
|
|
code->movzx(reg.cvt32(), reg.cvt8());
|
|
|
|
break;
|
|
|
|
case IR::Type::U16:
|
|
|
|
code->movzx(reg.cvt32(), reg.cvt16());
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
break; // Nothing needs to be done
|
|
|
|
}
|
|
|
|
#endif
|
2017-02-26 23:28:32 +01:00
|
|
|
}
|
2016-07-11 16:28:10 +02:00
|
|
|
}
|
|
|
|
|
2016-08-26 19:43:50 +02:00
|
|
|
for (size_t i = 0; i < args_count; i++) {
|
2017-02-26 23:28:32 +01:00
|
|
|
if (!args[i]) {
|
|
|
|
// TODO: Force spill
|
|
|
|
ScratchGpr({args_hostloc[i]});
|
2016-07-11 16:28:10 +02:00
|
|
|
}
|
|
|
|
}
|
2016-07-21 22:48:45 +02:00
|
|
|
|
2016-08-26 19:43:50 +02:00
|
|
|
for (HostLoc caller_saved : other_caller_save) {
|
2017-02-26 23:28:32 +01:00
|
|
|
ScratchImpl({caller_saved});
|
2016-08-07 22:02:16 +02:00
|
|
|
}
|
2016-07-11 16:28:10 +02:00
|
|
|
}
|
|
|
|
|
2017-02-26 23:28:32 +01:00
|
|
|
void RegAlloc::EndOfAllocScope() {
|
|
|
|
for (auto& iter : hostloc_info) {
|
|
|
|
iter.EndOfAllocScope();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void RegAlloc::AssertNoMoreUses() {
|
|
|
|
ASSERT(std::all_of(hostloc_info.begin(), hostloc_info.end(), [](const auto& i) { return i.IsEmpty(); }));
|
|
|
|
}
|
|
|
|
|
2016-08-02 14:46:12 +02:00
|
|
|
HostLoc RegAlloc::SelectARegister(HostLocList desired_locations) const {
|
2016-07-01 15:01:06 +02:00
|
|
|
std::vector<HostLoc> candidates = desired_locations;
|
|
|
|
|
|
|
|
// Find all locations that have not been allocated..
|
|
|
|
auto allocated_locs = std::partition(candidates.begin(), candidates.end(), [this](auto loc){
|
2017-02-24 21:14:02 +01:00
|
|
|
return !this->LocInfo(loc).IsLocked();
|
2016-07-01 15:01:06 +02:00
|
|
|
});
|
|
|
|
candidates.erase(allocated_locs, candidates.end());
|
|
|
|
ASSERT_MSG(!candidates.empty(), "All candidate registers have already been allocated");
|
|
|
|
|
|
|
|
// Selects the best location out of the available locations.
|
|
|
|
// TODO: Actually do LRU or something. Currently we just try to pick something without a value if possible.
|
|
|
|
|
|
|
|
std::partition(candidates.begin(), candidates.end(), [this](auto loc){
|
2017-02-24 21:14:02 +01:00
|
|
|
return this->LocInfo(loc).IsEmpty();
|
2016-07-01 15:01:06 +02:00
|
|
|
});
|
|
|
|
|
|
|
|
return candidates.front();
|
|
|
|
}
|
|
|
|
|
2016-08-19 04:13:38 +02:00
|
|
|
boost::optional<HostLoc> RegAlloc::ValueLocation(const IR::Inst* value) const {
|
2018-01-04 22:12:02 +01:00
|
|
|
for (size_t i = 0; i < hostloc_info.size(); i++)
|
2017-02-24 19:42:59 +01:00
|
|
|
if (hostloc_info[i].ContainsValue(value))
|
2017-11-28 21:56:49 +01:00
|
|
|
return static_cast<HostLoc>(i);
|
2016-07-01 15:01:06 +02:00
|
|
|
|
2016-08-05 19:40:28 +02:00
|
|
|
return boost::none;
|
2016-07-01 15:01:06 +02:00
|
|
|
}
|
|
|
|
|
2017-02-26 23:28:32 +01:00
|
|
|
void RegAlloc::DefineValueImpl(IR::Inst* def_inst, HostLoc host_loc) {
|
2017-02-27 00:27:41 +01:00
|
|
|
ASSERT_MSG(!ValueLocation(def_inst), "def_inst has already been defined");
|
2017-02-24 20:08:58 +01:00
|
|
|
LocInfo(host_loc).AddValue(def_inst);
|
|
|
|
}
|
|
|
|
|
2017-02-26 23:28:32 +01:00
|
|
|
void RegAlloc::DefineValueImpl(IR::Inst* def_inst, const IR::Value& use_inst) {
|
2017-02-27 00:27:41 +01:00
|
|
|
ASSERT_MSG(!ValueLocation(def_inst), "def_inst has already been defined");
|
2016-07-01 15:01:06 +02:00
|
|
|
|
2017-02-26 23:28:32 +01:00
|
|
|
if (use_inst.IsImmediate()) {
|
|
|
|
HostLoc location = ScratchImpl(any_gpr);
|
|
|
|
DefineValueImpl(def_inst, location);
|
|
|
|
LoadImmediate(use_inst, location);
|
|
|
|
return;
|
2017-02-24 19:42:59 +01:00
|
|
|
}
|
2016-07-11 23:43:53 +02:00
|
|
|
|
2017-02-27 00:27:41 +01:00
|
|
|
ASSERT_MSG(ValueLocation(use_inst.GetInst()), "use_inst must already be defined");
|
2017-02-26 23:28:32 +01:00
|
|
|
HostLoc location = *ValueLocation(use_inst.GetInst());
|
|
|
|
DefineValueImpl(def_inst, location);
|
2016-08-02 14:46:12 +02:00
|
|
|
}
|
|
|
|
|
2017-02-26 23:28:32 +01:00
|
|
|
HostLoc RegAlloc::LoadImmediate(IR::Value imm, HostLoc host_loc) {
|
2016-08-05 15:10:39 +02:00
|
|
|
ASSERT_MSG(imm.IsImmediate(), "imm is not an immediate");
|
2016-08-24 21:07:08 +02:00
|
|
|
|
2017-08-17 00:11:05 +02:00
|
|
|
if (HostLocIsGPR(host_loc)) {
|
|
|
|
Xbyak::Reg64 reg = HostLocToReg64(host_loc);
|
|
|
|
u64 imm_value = ImmediateToU64(imm);
|
|
|
|
if (imm_value == 0)
|
|
|
|
code->xor_(reg.cvt32(), reg.cvt32());
|
|
|
|
else
|
|
|
|
code->mov(reg, imm_value);
|
|
|
|
return host_loc;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (HostLocIsXMM(host_loc)) {
|
|
|
|
Xbyak::Xmm reg = HostLocToXmm(host_loc);
|
|
|
|
u64 imm_value = ImmediateToU64(imm);
|
|
|
|
if (imm_value == 0)
|
|
|
|
code->pxor(reg, reg);
|
|
|
|
else
|
|
|
|
code->movdqa(reg, code->MConst(imm_value)); // TODO: movaps/movapd more appropriate sometimes
|
|
|
|
return host_loc;
|
|
|
|
}
|
|
|
|
|
|
|
|
UNREACHABLE();
|
2016-08-05 15:10:39 +02:00
|
|
|
}
|
|
|
|
|
2017-02-24 20:42:36 +01:00
|
|
|
void RegAlloc::Move(HostLoc to, HostLoc from) {
|
|
|
|
ASSERT(LocInfo(to).IsEmpty() && !LocInfo(from).IsLocked());
|
|
|
|
|
|
|
|
if (LocInfo(from).IsEmpty()) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
LocInfo(to) = LocInfo(from);
|
|
|
|
LocInfo(from) = {};
|
|
|
|
|
2018-01-04 22:12:02 +01:00
|
|
|
EmitMove(to, from);
|
2017-02-24 20:42:36 +01:00
|
|
|
}
|
|
|
|
|
2017-02-24 20:58:16 +01:00
|
|
|
void RegAlloc::CopyToScratch(HostLoc to, HostLoc from) {
|
|
|
|
ASSERT(LocInfo(to).IsEmpty() && !LocInfo(from).IsEmpty());
|
|
|
|
|
2018-01-04 22:12:02 +01:00
|
|
|
EmitMove(to, from);
|
2017-02-24 20:58:16 +01:00
|
|
|
}
|
|
|
|
|
2017-02-24 20:42:36 +01:00
|
|
|
void RegAlloc::Exchange(HostLoc a, HostLoc b) {
|
|
|
|
ASSERT(!LocInfo(a).IsLocked() && !LocInfo(b).IsLocked());
|
|
|
|
|
|
|
|
if (LocInfo(a).IsEmpty()) {
|
|
|
|
Move(a, b);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (LocInfo(b).IsEmpty()) {
|
|
|
|
Move(b, a);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
std::swap(LocInfo(a), LocInfo(b));
|
|
|
|
|
2018-01-04 22:12:02 +01:00
|
|
|
EmitExchange(a, b);
|
2017-02-24 20:42:36 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
void RegAlloc::MoveOutOfTheWay(HostLoc reg) {
|
|
|
|
ASSERT(!LocInfo(reg).IsLocked());
|
2017-02-24 21:14:02 +01:00
|
|
|
if (!LocInfo(reg).IsEmpty()) {
|
2017-02-24 20:42:36 +01:00
|
|
|
SpillRegister(reg);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-02-26 23:28:32 +01:00
|
|
|
void RegAlloc::SpillRegister(HostLoc loc) {
|
|
|
|
ASSERT_MSG(HostLocIsRegister(loc), "Only registers can be spilled");
|
|
|
|
ASSERT_MSG(!LocInfo(loc).IsEmpty(), "There is no need to spill unoccupied registers");
|
|
|
|
ASSERT_MSG(!LocInfo(loc).IsLocked(), "Registers that have been allocated must not be spilt");
|
|
|
|
|
|
|
|
HostLoc new_loc = FindFreeSpill();
|
|
|
|
Move(new_loc, loc);
|
|
|
|
}
|
|
|
|
|
|
|
|
HostLoc RegAlloc::FindFreeSpill() const {
|
2018-01-04 22:12:02 +01:00
|
|
|
for (size_t i = static_cast<size_t>(HostLoc::FirstSpill); i < hostloc_info.size(); i++) {
|
|
|
|
HostLoc loc = static_cast<HostLoc>(i);
|
|
|
|
if (LocInfo(loc).IsEmpty())
|
|
|
|
return loc;
|
|
|
|
}
|
2017-02-26 23:28:32 +01:00
|
|
|
|
|
|
|
ASSERT_MSG(false, "All spill locations are full");
|
|
|
|
}
|
|
|
|
|
|
|
|
HostLocInfo& RegAlloc::LocInfo(HostLoc loc) {
|
2017-02-27 00:27:41 +01:00
|
|
|
ASSERT(loc != HostLoc::RSP && loc != HostLoc::R15);
|
2017-02-26 23:28:32 +01:00
|
|
|
return hostloc_info[static_cast<size_t>(loc)];
|
|
|
|
}
|
|
|
|
|
|
|
|
const HostLocInfo& RegAlloc::LocInfo(HostLoc loc) const {
|
2017-02-27 00:27:41 +01:00
|
|
|
ASSERT(loc != HostLoc::RSP && loc != HostLoc::R15);
|
2017-02-26 23:28:32 +01:00
|
|
|
return hostloc_info[static_cast<size_t>(loc)];
|
|
|
|
}
|
|
|
|
|
2018-01-04 22:12:02 +01:00
|
|
|
void RegAlloc::EmitMove(HostLoc to, HostLoc from) {
|
|
|
|
if (HostLocIsXMM(to) && HostLocIsXMM(from)) {
|
|
|
|
code->movaps(HostLocToXmm(to), HostLocToXmm(from));
|
|
|
|
} else if (HostLocIsGPR(to) && HostLocIsGPR(from)) {
|
|
|
|
code->mov(HostLocToReg64(to), HostLocToReg64(from));
|
|
|
|
} else if (HostLocIsXMM(to) && HostLocIsGPR(from)) {
|
|
|
|
code->movq(HostLocToXmm(to), HostLocToReg64(from));
|
|
|
|
} else if (HostLocIsGPR(to) && HostLocIsXMM(from)) {
|
|
|
|
code->movq(HostLocToReg64(to), HostLocToXmm(from));
|
|
|
|
} else if (HostLocIsXMM(to) && HostLocIsSpill(from)) {
|
|
|
|
code->movsd(HostLocToXmm(to), spill_to_addr(from));
|
|
|
|
} else if (HostLocIsSpill(to) && HostLocIsXMM(from)) {
|
|
|
|
code->movsd(spill_to_addr(to), HostLocToXmm(from));
|
|
|
|
} else if (HostLocIsGPR(to) && HostLocIsSpill(from)) {
|
|
|
|
code->mov(HostLocToReg64(to), spill_to_addr(from));
|
|
|
|
} else if (HostLocIsSpill(to) && HostLocIsGPR(from)) {
|
|
|
|
code->mov(spill_to_addr(to), HostLocToReg64(from));
|
|
|
|
} else {
|
|
|
|
ASSERT_MSG(false, "Invalid RegAlloc::EmitMove");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void RegAlloc::EmitExchange(HostLoc a, HostLoc b) {
|
|
|
|
if (HostLocIsGPR(a) && HostLocIsGPR(b)) {
|
|
|
|
code->xchg(HostLocToReg64(a), HostLocToReg64(b));
|
|
|
|
} else if (HostLocIsXMM(a) && HostLocIsXMM(b)) {
|
|
|
|
ASSERT_MSG(false, "Check your code: Exchanging XMM registers is unnecessary");
|
|
|
|
} else {
|
|
|
|
ASSERT_MSG(false, "Invalid RegAlloc::EmitExchange");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-07-01 15:01:06 +02:00
|
|
|
} // namespace BackendX64
|
|
|
|
} // namespace Dynarmic
|