2016-07-01 15:01:06 +02:00
|
|
|
/* This file is part of the dynarmic project.
|
|
|
|
* Copyright (c) 2016 MerryMage
|
|
|
|
* This software may be used and distributed according to the terms of the GNU
|
|
|
|
* General Public License version 2 or any later version.
|
|
|
|
*/
|
|
|
|
|
|
|
|
#include <algorithm>
|
|
|
|
#include <map>
|
|
|
|
|
|
|
|
#include "backend_x64/reg_alloc.h"
|
|
|
|
#include "common/assert.h"
|
|
|
|
#include "common/x64/emitter.h"
|
|
|
|
|
|
|
|
namespace Dynarmic {
|
|
|
|
namespace BackendX64 {
|
|
|
|
|
2016-08-05 15:10:39 +02:00
|
|
|
static Gen::OpArg ImmediateToOpArg(const IR::Value& imm) {
|
|
|
|
switch (imm.GetType()) {
|
2016-08-23 00:40:30 +02:00
|
|
|
case IR::Type::U1:
|
|
|
|
return Gen::Imm32(imm.GetU1());
|
|
|
|
break;
|
|
|
|
case IR::Type::U8:
|
|
|
|
return Gen::Imm32(imm.GetU8());
|
|
|
|
break;
|
|
|
|
case IR::Type::U32:
|
|
|
|
return Gen::Imm32(imm.GetU32());
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
ASSERT_MSG(false, "This should never happen.");
|
2016-08-05 15:10:39 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-07-23 00:55:00 +02:00
|
|
|
static Gen::X64Reg HostLocToX64(HostLoc loc) {
|
|
|
|
DEBUG_ASSERT(HostLocIsRegister(loc));
|
2016-08-08 17:01:07 +02:00
|
|
|
DEBUG_ASSERT(loc != HostLoc::RSP);
|
2016-07-23 00:55:00 +02:00
|
|
|
// HostLoc is ordered such that the numbers line up.
|
2016-08-06 18:11:22 +02:00
|
|
|
if (HostLocIsGPR(loc)) {
|
|
|
|
return static_cast<Gen::X64Reg>(loc);
|
|
|
|
}
|
|
|
|
if (HostLocIsXMM(loc)) {
|
|
|
|
return static_cast<Gen::X64Reg>(size_t(loc) - size_t(HostLoc::XMM0));
|
|
|
|
}
|
|
|
|
ASSERT_MSG(false, "This should never happen.");
|
|
|
|
return Gen::INVALID_REG;
|
2016-07-23 00:55:00 +02:00
|
|
|
}
|
2016-07-01 15:01:06 +02:00
|
|
|
|
|
|
|
static Gen::OpArg SpillToOpArg(HostLoc loc) {
|
2016-08-13 01:10:23 +02:00
|
|
|
static_assert(std::is_same<decltype(JitState{nullptr}.Spill[0]), u64&>::value, "Spill must be u64");
|
2016-07-23 00:55:00 +02:00
|
|
|
DEBUG_ASSERT(HostLocIsSpill(loc));
|
2016-07-01 15:01:06 +02:00
|
|
|
|
|
|
|
size_t i = static_cast<size_t>(loc) - static_cast<size_t>(HostLoc::FirstSpill);
|
2016-08-02 14:46:12 +02:00
|
|
|
return Gen::MDisp(Gen::R15, static_cast<int>(offsetof(JitState, Spill) + i * sizeof(u64)));
|
2016-07-01 15:01:06 +02:00
|
|
|
}
|
|
|
|
|
2016-08-02 14:46:12 +02:00
|
|
|
Gen::X64Reg RegAlloc::DefRegister(IR::Inst* def_inst, HostLocList desired_locations) {
|
2016-07-23 00:55:00 +02:00
|
|
|
DEBUG_ASSERT(std::all_of(desired_locations.begin(), desired_locations.end(), HostLocIsRegister));
|
2016-08-05 19:40:28 +02:00
|
|
|
DEBUG_ASSERT_MSG(!ValueLocation(def_inst), "def_inst has already been defined");
|
2016-07-01 15:01:06 +02:00
|
|
|
|
|
|
|
HostLoc location = SelectARegister(desired_locations);
|
|
|
|
|
|
|
|
if (IsRegisterOccupied(location)) {
|
|
|
|
SpillRegister(location);
|
|
|
|
}
|
|
|
|
|
2016-08-05 15:10:39 +02:00
|
|
|
LocInfo(location).is_being_used = true;
|
|
|
|
LocInfo(location).def = def_inst;
|
|
|
|
|
|
|
|
DEBUG_ASSERT(LocInfo(location).IsDef());
|
2016-07-23 00:55:00 +02:00
|
|
|
return HostLocToX64(location);
|
|
|
|
}
|
|
|
|
|
2016-08-05 15:10:39 +02:00
|
|
|
void RegAlloc::RegisterAddDef(IR::Inst* def_inst, const IR::Value& use_inst) {
|
2016-08-05 19:40:28 +02:00
|
|
|
DEBUG_ASSERT_MSG(!ValueLocation(def_inst), "def_inst has already been defined");
|
2016-08-05 15:10:39 +02:00
|
|
|
|
|
|
|
if (use_inst.IsImmediate()) {
|
|
|
|
LoadImmediateIntoRegister(use_inst, DefRegister(def_inst, any_gpr));
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2016-08-05 19:40:28 +02:00
|
|
|
DEBUG_ASSERT_MSG(ValueLocation(use_inst.GetInst()), "use_inst must already be defined");
|
2016-08-05 15:10:39 +02:00
|
|
|
HostLoc location = *ValueLocation(use_inst.GetInst());
|
|
|
|
LocInfo(location).values.emplace_back(def_inst);
|
|
|
|
DecrementRemainingUses(use_inst.GetInst());
|
|
|
|
DEBUG_ASSERT(LocInfo(location).IsIdle());
|
|
|
|
}
|
|
|
|
|
2016-08-02 14:46:12 +02:00
|
|
|
Gen::X64Reg RegAlloc::UseDefRegister(IR::Value use_value, IR::Inst* def_inst, HostLocList desired_locations) {
|
2016-07-23 00:55:00 +02:00
|
|
|
if (!use_value.IsImmediate()) {
|
|
|
|
return UseDefRegister(use_value.GetInst(), def_inst, desired_locations);
|
|
|
|
}
|
2016-07-01 15:01:06 +02:00
|
|
|
|
2016-07-23 00:55:00 +02:00
|
|
|
return LoadImmediateIntoRegister(use_value, DefRegister(def_inst, desired_locations));
|
2016-07-01 15:01:06 +02:00
|
|
|
}
|
|
|
|
|
2016-08-02 14:46:12 +02:00
|
|
|
Gen::X64Reg RegAlloc::UseDefRegister(IR::Inst* use_inst, IR::Inst* def_inst, HostLocList desired_locations) {
|
2016-07-23 00:55:00 +02:00
|
|
|
DEBUG_ASSERT(std::all_of(desired_locations.begin(), desired_locations.end(), HostLocIsRegister));
|
2016-08-05 19:40:28 +02:00
|
|
|
DEBUG_ASSERT_MSG(!ValueLocation(def_inst), "def_inst has already been defined");
|
|
|
|
DEBUG_ASSERT_MSG(ValueLocation(use_inst), "use_inst has not been defined");
|
2016-08-05 15:10:39 +02:00
|
|
|
|
|
|
|
if (IsLastUse(use_inst)) {
|
|
|
|
HostLoc current_location = *ValueLocation(use_inst);
|
|
|
|
auto& loc_info = LocInfo(current_location);
|
2016-08-05 16:24:25 +02:00
|
|
|
if (loc_info.IsIdle()) {
|
2016-08-05 15:10:39 +02:00
|
|
|
loc_info.is_being_used = true;
|
|
|
|
loc_info.def = def_inst;
|
|
|
|
DEBUG_ASSERT(loc_info.IsUseDef());
|
2016-08-05 19:40:28 +02:00
|
|
|
if (HostLocIsSpill(current_location)) {
|
|
|
|
HostLoc new_location = SelectARegister(desired_locations);
|
|
|
|
if (IsRegisterOccupied(new_location)) {
|
|
|
|
SpillRegister(new_location);
|
|
|
|
}
|
|
|
|
EmitMove(new_location, current_location);
|
|
|
|
LocInfo(new_location) = LocInfo(current_location);
|
|
|
|
LocInfo(current_location) = {};
|
|
|
|
return HostLocToX64(new_location);
|
|
|
|
} else {
|
|
|
|
return HostLocToX64(current_location);
|
|
|
|
}
|
2016-08-05 15:10:39 +02:00
|
|
|
}
|
|
|
|
}
|
2016-07-01 15:01:06 +02:00
|
|
|
|
2016-08-05 19:54:19 +02:00
|
|
|
bool is_floating_point = use_inst->GetType() == IR::Type::F32 || use_inst->GetType() == IR::Type::F64;
|
|
|
|
Gen::X64Reg use_reg = UseRegister(use_inst, is_floating_point ? any_xmm : any_gpr);
|
2016-07-23 00:55:00 +02:00
|
|
|
Gen::X64Reg def_reg = DefRegister(def_inst, desired_locations);
|
2016-08-05 19:54:19 +02:00
|
|
|
if (is_floating_point) {
|
|
|
|
code->MOVAPD(def_reg, Gen::R(use_reg));
|
|
|
|
} else {
|
|
|
|
code->MOV(64, Gen::R(def_reg), Gen::R(use_reg));
|
|
|
|
}
|
2016-07-01 15:01:06 +02:00
|
|
|
return def_reg;
|
|
|
|
}
|
|
|
|
|
2016-08-05 15:10:39 +02:00
|
|
|
std::tuple<Gen::OpArg, Gen::X64Reg> RegAlloc::UseDefOpArg(IR::Value use_value, IR::Inst* def_inst, HostLocList desired_locations) {
|
|
|
|
DEBUG_ASSERT(std::all_of(desired_locations.begin(), desired_locations.end(), HostLocIsRegister));
|
2016-08-05 19:40:28 +02:00
|
|
|
DEBUG_ASSERT_MSG(!ValueLocation(def_inst), "def_inst has already been defined");
|
|
|
|
DEBUG_ASSERT_MSG(use_value.IsImmediate() || ValueLocation(use_value.GetInst()), "use_inst has not been defined");
|
2016-08-05 15:10:39 +02:00
|
|
|
|
|
|
|
if (!use_value.IsImmediate()) {
|
|
|
|
IR::Inst* use_inst = use_value.GetInst();
|
|
|
|
|
|
|
|
if (IsLastUse(use_inst)) {
|
|
|
|
HostLoc current_location = *ValueLocation(use_inst);
|
|
|
|
auto& loc_info = LocInfo(current_location);
|
|
|
|
if (!loc_info.IsIdle()) {
|
2016-08-05 19:40:28 +02:00
|
|
|
if (HostLocIsSpill(current_location)) {
|
|
|
|
loc_info.is_being_used = true;
|
|
|
|
DEBUG_ASSERT(loc_info.IsUse());
|
|
|
|
return std::make_tuple(SpillToOpArg(current_location), DefRegister(def_inst, desired_locations));
|
|
|
|
} else {
|
|
|
|
loc_info.is_being_used = true;
|
|
|
|
loc_info.def = def_inst;
|
|
|
|
DEBUG_ASSERT(loc_info.IsUseDef());
|
|
|
|
return std::make_tuple(Gen::R(HostLocToX64(current_location)), HostLocToX64(current_location));
|
|
|
|
}
|
2016-08-05 15:10:39 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
Gen::OpArg use_oparg = UseOpArg(use_value, any_gpr);
|
|
|
|
Gen::X64Reg def_reg = DefRegister(def_inst, desired_locations);
|
|
|
|
return std::make_tuple(use_oparg, def_reg);
|
|
|
|
}
|
|
|
|
|
2016-08-02 14:46:12 +02:00
|
|
|
Gen::X64Reg RegAlloc::UseRegister(IR::Value use_value, HostLocList desired_locations) {
|
2016-07-23 00:55:00 +02:00
|
|
|
if (!use_value.IsImmediate()) {
|
|
|
|
return UseRegister(use_value.GetInst(), desired_locations);
|
|
|
|
}
|
|
|
|
|
|
|
|
return LoadImmediateIntoRegister(use_value, ScratchRegister(desired_locations));
|
|
|
|
}
|
|
|
|
|
2016-08-02 14:46:12 +02:00
|
|
|
Gen::X64Reg RegAlloc::UseRegister(IR::Inst* use_inst, HostLocList desired_locations) {
|
2016-08-05 15:10:39 +02:00
|
|
|
HostLoc current_location;
|
|
|
|
bool was_being_used;
|
|
|
|
std::tie(current_location, was_being_used) = UseHostLoc(use_inst, desired_locations);
|
2016-07-01 15:01:06 +02:00
|
|
|
|
2016-08-05 15:10:39 +02:00
|
|
|
if (HostLocIsRegister(current_location)) {
|
2016-07-23 00:55:00 +02:00
|
|
|
return HostLocToX64(current_location);
|
2016-08-05 15:10:39 +02:00
|
|
|
} else if (HostLocIsSpill(current_location)) {
|
|
|
|
HostLoc new_location = SelectARegister(desired_locations);
|
2016-07-01 15:01:06 +02:00
|
|
|
if (IsRegisterOccupied(new_location)) {
|
|
|
|
SpillRegister(new_location);
|
|
|
|
}
|
2016-08-02 14:46:12 +02:00
|
|
|
EmitMove(new_location, current_location);
|
2016-08-05 15:10:39 +02:00
|
|
|
if (!was_being_used) {
|
|
|
|
LocInfo(new_location) = LocInfo(current_location);
|
|
|
|
LocInfo(current_location) = {};
|
|
|
|
DEBUG_ASSERT(LocInfo(new_location).IsUse());
|
|
|
|
} else {
|
|
|
|
LocInfo(new_location).is_being_used = true;
|
|
|
|
DEBUG_ASSERT(LocInfo(new_location).IsScratch());
|
|
|
|
}
|
|
|
|
return HostLocToX64(new_location);
|
|
|
|
}
|
2016-07-01 15:01:06 +02:00
|
|
|
|
2016-08-05 15:10:39 +02:00
|
|
|
ASSERT_MSG(false, "Unknown current_location type");
|
|
|
|
return Gen::INVALID_REG;
|
|
|
|
}
|
2016-07-01 15:01:06 +02:00
|
|
|
|
2016-08-05 15:10:39 +02:00
|
|
|
Gen::OpArg RegAlloc::UseOpArg(IR::Value use_value, HostLocList desired_locations) {
|
|
|
|
if (use_value.IsImmediate()) {
|
|
|
|
return ImmediateToOpArg(use_value);
|
|
|
|
}
|
2016-07-01 15:01:06 +02:00
|
|
|
|
2016-08-05 15:10:39 +02:00
|
|
|
IR::Inst* use_inst = use_value.GetInst();
|
|
|
|
|
|
|
|
HostLoc current_location;
|
|
|
|
bool was_being_used;
|
|
|
|
std::tie(current_location, was_being_used) = UseHostLoc(use_inst, desired_locations);
|
|
|
|
|
|
|
|
if (HostLocIsRegister(current_location)) {
|
|
|
|
return Gen::R(HostLocToX64(current_location));
|
|
|
|
} else if (HostLocIsSpill(current_location)) {
|
|
|
|
return SpillToOpArg(current_location);
|
2016-07-01 15:01:06 +02:00
|
|
|
}
|
|
|
|
|
2016-08-05 15:10:39 +02:00
|
|
|
ASSERT_MSG(false, "Unknown current_location type");
|
|
|
|
return Gen::R(Gen::INVALID_REG);
|
2016-07-01 15:01:06 +02:00
|
|
|
}
|
|
|
|
|
2016-08-02 14:46:12 +02:00
|
|
|
Gen::X64Reg RegAlloc::UseScratchRegister(IR::Value use_value, HostLocList desired_locations) {
|
2016-07-23 00:55:00 +02:00
|
|
|
if (!use_value.IsImmediate()) {
|
|
|
|
return UseScratchRegister(use_value.GetInst(), desired_locations);
|
|
|
|
}
|
|
|
|
|
|
|
|
return LoadImmediateIntoRegister(use_value, ScratchRegister(desired_locations));
|
|
|
|
}
|
|
|
|
|
2016-08-02 14:46:12 +02:00
|
|
|
Gen::X64Reg RegAlloc::UseScratchRegister(IR::Inst* use_inst, HostLocList desired_locations) {
|
2016-07-23 00:55:00 +02:00
|
|
|
DEBUG_ASSERT(std::all_of(desired_locations.begin(), desired_locations.end(), HostLocIsRegister));
|
2016-08-05 15:10:39 +02:00
|
|
|
DEBUG_ASSERT_MSG(ValueLocation(use_inst), "use_inst has not been defined");
|
2016-08-19 04:08:01 +02:00
|
|
|
ASSERT_MSG(use_inst->HasUses(), "use_inst ran out of uses. (Use-d an IR::Inst* too many times)");
|
2016-07-18 16:11:16 +02:00
|
|
|
|
2016-08-05 15:10:39 +02:00
|
|
|
HostLoc current_location = *ValueLocation(use_inst);
|
2016-07-18 16:11:16 +02:00
|
|
|
HostLoc new_location = SelectARegister(desired_locations);
|
2016-08-05 15:10:39 +02:00
|
|
|
if (IsRegisterOccupied(new_location)) {
|
|
|
|
SpillRegister(new_location);
|
|
|
|
}
|
2016-07-18 16:11:16 +02:00
|
|
|
|
|
|
|
if (HostLocIsSpill(current_location)) {
|
2016-08-02 14:46:12 +02:00
|
|
|
EmitMove(new_location, current_location);
|
2016-08-05 15:10:39 +02:00
|
|
|
LocInfo(new_location).is_being_used = true;
|
2016-07-23 00:55:00 +02:00
|
|
|
DecrementRemainingUses(use_inst);
|
2016-08-05 15:10:39 +02:00
|
|
|
DEBUG_ASSERT(LocInfo(new_location).IsScratch());
|
|
|
|
return HostLocToX64(new_location);
|
2016-07-18 16:11:16 +02:00
|
|
|
} else if (HostLocIsRegister(current_location)) {
|
2016-08-07 20:25:12 +02:00
|
|
|
ASSERT(LocInfo(current_location).IsIdle()
|
|
|
|
|| LocInfo(current_location).IsUse()
|
|
|
|
|| LocInfo(current_location).IsUseDef());
|
2016-07-18 16:11:16 +02:00
|
|
|
|
2016-08-05 15:10:39 +02:00
|
|
|
if (current_location != new_location) {
|
2016-08-02 14:46:12 +02:00
|
|
|
EmitMove(new_location, current_location);
|
2016-08-07 20:25:12 +02:00
|
|
|
} else {
|
|
|
|
ASSERT(LocInfo(current_location).IsIdle());
|
2016-07-18 16:11:16 +02:00
|
|
|
}
|
|
|
|
|
2016-08-05 15:10:39 +02:00
|
|
|
LocInfo(new_location).is_being_used = true;
|
|
|
|
LocInfo(new_location).values.clear();
|
2016-07-23 00:55:00 +02:00
|
|
|
DecrementRemainingUses(use_inst);
|
2016-08-05 15:10:39 +02:00
|
|
|
DEBUG_ASSERT(LocInfo(new_location).IsScratch());
|
|
|
|
return HostLocToX64(new_location);
|
2016-07-18 16:11:16 +02:00
|
|
|
}
|
|
|
|
|
2016-08-05 15:10:39 +02:00
|
|
|
ASSERT_MSG(0, "Invalid current_location");
|
|
|
|
return Gen::INVALID_REG;
|
2016-07-18 16:11:16 +02:00
|
|
|
}
|
|
|
|
|
2016-08-02 14:46:12 +02:00
|
|
|
Gen::X64Reg RegAlloc::ScratchRegister(HostLocList desired_locations) {
|
2016-07-23 00:55:00 +02:00
|
|
|
DEBUG_ASSERT(std::all_of(desired_locations.begin(), desired_locations.end(), HostLocIsRegister));
|
2016-07-01 15:01:06 +02:00
|
|
|
|
|
|
|
HostLoc location = SelectARegister(desired_locations);
|
|
|
|
|
|
|
|
if (IsRegisterOccupied(location)) {
|
|
|
|
SpillRegister(location);
|
|
|
|
}
|
|
|
|
|
|
|
|
// Update state
|
2016-08-05 15:10:39 +02:00
|
|
|
LocInfo(location).is_being_used = true;
|
2016-07-23 00:55:00 +02:00
|
|
|
|
2016-08-05 15:10:39 +02:00
|
|
|
DEBUG_ASSERT(LocInfo(location).IsScratch());
|
2016-07-23 00:55:00 +02:00
|
|
|
return HostLocToX64(location);
|
|
|
|
}
|
|
|
|
|
|
|
|
void RegAlloc::HostCall(IR::Inst* result_def, IR::Value arg0_use, IR::Value arg1_use, IR::Value arg2_use, IR::Value arg3_use) {
|
2016-07-11 16:28:10 +02:00
|
|
|
constexpr HostLoc AbiReturn = HostLoc::RAX;
|
|
|
|
#ifdef _WIN32
|
|
|
|
constexpr std::array<HostLoc, 4> AbiArgs = { HostLoc::RCX, HostLoc::RDX, HostLoc::R8, HostLoc::R9 };
|
|
|
|
/// Caller-saved registers other than AbiReturn or AbiArgs
|
|
|
|
constexpr std::array<HostLoc, 2> OtherCallerSave = { HostLoc::R10, HostLoc::R11 };
|
|
|
|
#else
|
|
|
|
constexpr std::array<HostLoc, 4> AbiArgs = { HostLoc::RDI, HostLoc::RSI, HostLoc::RDX, HostLoc::RCX };
|
|
|
|
/// Caller-saved registers other than AbiReturn or AbiArgs
|
|
|
|
constexpr std::array<HostLoc, 4> OtherCallerSave = { HostLoc::R8, HostLoc::R9, HostLoc::R10, HostLoc::R11 };
|
|
|
|
#endif
|
|
|
|
|
2016-07-23 00:55:00 +02:00
|
|
|
const std::array<IR::Value*, 4> args = {&arg0_use, &arg1_use, &arg2_use, &arg3_use};
|
2016-07-11 16:28:10 +02:00
|
|
|
|
|
|
|
// TODO: This works but almost certainly leads to suboptimal generated code.
|
|
|
|
|
|
|
|
for (HostLoc caller_save : OtherCallerSave) {
|
|
|
|
ScratchRegister({caller_save});
|
|
|
|
}
|
|
|
|
|
|
|
|
if (result_def) {
|
|
|
|
DefRegister(result_def, {AbiReturn});
|
|
|
|
} else {
|
|
|
|
ScratchRegister({AbiReturn});
|
|
|
|
}
|
|
|
|
|
|
|
|
for (size_t i = 0; i < AbiArgs.size(); i++) {
|
2016-07-23 00:55:00 +02:00
|
|
|
if (!args[i]->IsEmpty()) {
|
|
|
|
UseScratchRegister(*args[i], {AbiArgs[i]});
|
2016-07-11 16:28:10 +02:00
|
|
|
} else {
|
|
|
|
ScratchRegister({AbiArgs[i]});
|
|
|
|
}
|
|
|
|
}
|
2016-07-21 22:48:45 +02:00
|
|
|
|
2016-08-07 22:02:16 +02:00
|
|
|
// Flush all xmm registers
|
|
|
|
for (auto xmm : any_xmm) {
|
|
|
|
ScratchRegister({xmm});
|
|
|
|
}
|
2016-07-11 16:28:10 +02:00
|
|
|
}
|
|
|
|
|
2016-08-02 14:46:12 +02:00
|
|
|
HostLoc RegAlloc::SelectARegister(HostLocList desired_locations) const {
|
2016-07-01 15:01:06 +02:00
|
|
|
std::vector<HostLoc> candidates = desired_locations;
|
|
|
|
|
|
|
|
// Find all locations that have not been allocated..
|
|
|
|
auto allocated_locs = std::partition(candidates.begin(), candidates.end(), [this](auto loc){
|
|
|
|
return !this->IsRegisterAllocated(loc);
|
|
|
|
});
|
|
|
|
candidates.erase(allocated_locs, candidates.end());
|
|
|
|
ASSERT_MSG(!candidates.empty(), "All candidate registers have already been allocated");
|
|
|
|
|
|
|
|
// Selects the best location out of the available locations.
|
|
|
|
// TODO: Actually do LRU or something. Currently we just try to pick something without a value if possible.
|
|
|
|
|
|
|
|
std::partition(candidates.begin(), candidates.end(), [this](auto loc){
|
|
|
|
return !this->IsRegisterOccupied(loc);
|
|
|
|
});
|
|
|
|
|
|
|
|
return candidates.front();
|
|
|
|
}
|
|
|
|
|
2016-08-19 04:13:38 +02:00
|
|
|
boost::optional<HostLoc> RegAlloc::ValueLocation(const IR::Inst* value) const {
|
2016-07-23 00:55:00 +02:00
|
|
|
for (size_t i = 0; i < HostLocCount; i++)
|
2016-08-19 04:13:38 +02:00
|
|
|
for (const IR::Inst* v : hostloc_info[i].values)
|
2016-08-05 15:10:39 +02:00
|
|
|
if (v == value)
|
2016-08-05 19:40:28 +02:00
|
|
|
return boost::make_optional<HostLoc>(static_cast<HostLoc>(i));
|
2016-07-01 15:01:06 +02:00
|
|
|
|
2016-08-05 19:40:28 +02:00
|
|
|
return boost::none;
|
2016-07-01 15:01:06 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
bool RegAlloc::IsRegisterOccupied(HostLoc loc) const {
|
2016-08-16 20:52:46 +02:00
|
|
|
const auto& info = LocInfo(loc);
|
|
|
|
|
|
|
|
return !info.values.empty() || info.def;
|
2016-07-01 15:01:06 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
bool RegAlloc::IsRegisterAllocated(HostLoc loc) const {
|
2016-08-16 20:52:46 +02:00
|
|
|
return LocInfo(loc).is_being_used;
|
2016-08-05 15:10:39 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
bool RegAlloc::IsLastUse(IR::Inst* inst) const {
|
|
|
|
if (inst->use_count > 1)
|
|
|
|
return false;
|
2016-08-16 20:52:46 +02:00
|
|
|
return LocInfo(*ValueLocation(inst)).values.size() == 1;
|
2016-07-01 15:01:06 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
void RegAlloc::SpillRegister(HostLoc loc) {
|
|
|
|
ASSERT_MSG(HostLocIsRegister(loc), "Only registers can be spilled");
|
|
|
|
ASSERT_MSG(IsRegisterOccupied(loc), "There is no need to spill unoccupied registers");
|
|
|
|
ASSERT_MSG(!IsRegisterAllocated(loc), "Registers that have been allocated must not be spilt");
|
|
|
|
|
|
|
|
HostLoc new_loc = FindFreeSpill();
|
|
|
|
|
2016-08-02 14:46:12 +02:00
|
|
|
EmitMove(new_loc, loc);
|
2016-07-01 15:01:06 +02:00
|
|
|
|
2016-08-05 15:10:39 +02:00
|
|
|
LocInfo(new_loc) = LocInfo(loc);
|
|
|
|
LocInfo(loc) = {};
|
2016-07-01 15:01:06 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
HostLoc RegAlloc::FindFreeSpill() const {
|
|
|
|
for (size_t i = 0; i < SpillCount; i++)
|
|
|
|
if (!IsRegisterOccupied(HostLocSpill(i)))
|
|
|
|
return HostLocSpill(i);
|
|
|
|
|
|
|
|
ASSERT_MSG(0, "All spill locations are full");
|
|
|
|
}
|
|
|
|
|
|
|
|
void RegAlloc::EndOfAllocScope() {
|
2016-08-02 14:46:12 +02:00
|
|
|
for (auto& iter : hostloc_info) {
|
2016-08-05 15:10:39 +02:00
|
|
|
iter.is_being_used = false;
|
|
|
|
if (iter.def) {
|
|
|
|
iter.values.clear();
|
|
|
|
iter.values.emplace_back(iter.def);
|
|
|
|
iter.def = nullptr;
|
2016-08-13 02:55:03 +02:00
|
|
|
}
|
|
|
|
if (!iter.values.empty()) {
|
2016-08-05 15:10:39 +02:00
|
|
|
auto to_erase = std::remove_if(iter.values.begin(), iter.values.end(),
|
2016-08-19 04:08:01 +02:00
|
|
|
[](const auto& inst){ return !inst->HasUses(); });
|
2016-08-05 15:10:39 +02:00
|
|
|
iter.values.erase(to_erase, iter.values.end());
|
|
|
|
}
|
2016-08-02 14:46:12 +02:00
|
|
|
}
|
2016-07-01 15:01:06 +02:00
|
|
|
}
|
|
|
|
|
2016-07-23 00:55:00 +02:00
|
|
|
void RegAlloc::DecrementRemainingUses(IR::Inst* value) {
|
2016-08-19 04:08:01 +02:00
|
|
|
ASSERT_MSG(value->HasUses(), "value doesn't have any remaining uses");
|
2016-07-23 00:55:00 +02:00
|
|
|
value->use_count--;
|
2016-07-11 23:43:53 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
void RegAlloc::AssertNoMoreUses() {
|
2016-08-05 15:10:39 +02:00
|
|
|
ASSERT(std::all_of(hostloc_info.begin(), hostloc_info.end(), [](const auto& i){ return i.values.empty(); }));
|
2016-07-11 23:43:53 +02:00
|
|
|
}
|
|
|
|
|
2016-07-04 15:37:50 +02:00
|
|
|
void RegAlloc::Reset() {
|
2016-08-02 14:46:12 +02:00
|
|
|
hostloc_info.fill({});
|
|
|
|
}
|
|
|
|
|
|
|
|
void RegAlloc::EmitMove(HostLoc to, HostLoc from) {
|
|
|
|
if (HostLocIsXMM(to) && HostLocIsSpill(from)) {
|
2016-08-05 15:10:39 +02:00
|
|
|
code->MOVSD(HostLocToX64(to), SpillToOpArg(from));
|
2016-08-02 14:46:12 +02:00
|
|
|
} else if (HostLocIsSpill(to) && HostLocIsXMM(from)) {
|
2016-08-06 18:11:58 +02:00
|
|
|
code->MOVSD(SpillToOpArg(to), HostLocToX64(from));
|
2016-08-02 14:46:12 +02:00
|
|
|
} else if (HostLocIsXMM(to) && HostLocIsXMM(from)) {
|
|
|
|
code->MOVAPS(HostLocToX64(to), Gen::R(HostLocToX64(from)));
|
|
|
|
} else if (HostLocIsGPR(to) && HostLocIsSpill(from)) {
|
|
|
|
code->MOV(64, Gen::R(HostLocToX64(to)), SpillToOpArg(from));
|
|
|
|
} else if (HostLocIsSpill(to) && HostLocIsGPR(from)) {
|
|
|
|
code->MOV(64, SpillToOpArg(to), Gen::R(HostLocToX64(from)));
|
|
|
|
} else if (HostLocIsGPR(to) && HostLocIsGPR(from)){
|
|
|
|
code->MOV(64, Gen::R(HostLocToX64(to)), Gen::R(HostLocToX64(from)));
|
|
|
|
} else {
|
|
|
|
ASSERT_MSG(false, "Invalid RegAlloc::EmitMove");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void RegAlloc::EmitExchange(HostLoc a, HostLoc b) {
|
|
|
|
if (HostLocIsGPR(a) && HostLocIsGPR(b)) {
|
|
|
|
code->XCHG(64, Gen::R(HostLocToX64(a)), Gen::R(HostLocToX64(b)));
|
|
|
|
} else if (HostLocIsXMM(a) && HostLocIsXMM(b)) {
|
|
|
|
ASSERT_MSG(false, "Exchange is unnecessary for XMM registers");
|
|
|
|
} else {
|
|
|
|
ASSERT_MSG(false, "Invalid RegAlloc::EmitExchange");
|
|
|
|
}
|
2016-07-04 15:37:50 +02:00
|
|
|
}
|
|
|
|
|
2016-08-05 15:10:39 +02:00
|
|
|
std::tuple<HostLoc, bool> RegAlloc::UseHostLoc(IR::Inst* use_inst, HostLocList desired_locations) {
|
|
|
|
DEBUG_ASSERT(std::all_of(desired_locations.begin(), desired_locations.end(), HostLocIsRegister));
|
|
|
|
DEBUG_ASSERT_MSG(ValueLocation(use_inst), "use_inst has not been defined");
|
|
|
|
|
|
|
|
HostLoc current_location = *ValueLocation(use_inst);
|
|
|
|
auto iter = std::find(desired_locations.begin(), desired_locations.end(), current_location);
|
|
|
|
if (iter != desired_locations.end()) {
|
|
|
|
if (LocInfo(current_location).IsDef()) {
|
|
|
|
HostLoc new_location = SelectARegister(desired_locations);
|
|
|
|
if (IsRegisterOccupied(new_location)) {
|
|
|
|
SpillRegister(new_location);
|
|
|
|
}
|
|
|
|
EmitMove(new_location, current_location);
|
|
|
|
LocInfo(new_location).is_being_used = true;
|
|
|
|
LocInfo(new_location).values.emplace_back(use_inst);
|
|
|
|
DecrementRemainingUses(use_inst);
|
|
|
|
DEBUG_ASSERT(LocInfo(new_location).IsUse());
|
|
|
|
return std::make_tuple(new_location, false);
|
|
|
|
} else {
|
|
|
|
bool was_being_used = LocInfo(current_location).is_being_used;
|
|
|
|
ASSERT(LocInfo(current_location).IsUse() || LocInfo(current_location).IsIdle());
|
|
|
|
LocInfo(current_location).is_being_used = true;
|
|
|
|
DecrementRemainingUses(use_inst);
|
2016-08-05 19:40:28 +02:00
|
|
|
DEBUG_ASSERT(LocInfo(current_location).IsUse());
|
2016-08-05 15:10:39 +02:00
|
|
|
return std::make_tuple(current_location, was_being_used);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (HostLocIsSpill(current_location)) {
|
|
|
|
bool was_being_used = LocInfo(current_location).is_being_used;
|
|
|
|
LocInfo(current_location).is_being_used = true;
|
|
|
|
DecrementRemainingUses(use_inst);
|
|
|
|
DEBUG_ASSERT(LocInfo(current_location).IsUse());
|
|
|
|
return std::make_tuple(current_location, was_being_used);
|
|
|
|
} else if (HostLocIsRegister(current_location)) {
|
|
|
|
HostLoc new_location = SelectARegister(desired_locations);
|
|
|
|
ASSERT(LocInfo(current_location).IsIdle());
|
|
|
|
EmitExchange(new_location, current_location);
|
|
|
|
std::swap(LocInfo(new_location), LocInfo(current_location));
|
|
|
|
LocInfo(new_location).is_being_used = true;
|
|
|
|
DecrementRemainingUses(use_inst);
|
|
|
|
DEBUG_ASSERT(LocInfo(new_location).IsUse());
|
|
|
|
return std::make_tuple(new_location, false);
|
|
|
|
}
|
|
|
|
|
|
|
|
ASSERT_MSG(0, "Invalid current_location");
|
|
|
|
return std::make_tuple(static_cast<HostLoc>(-1), false);
|
|
|
|
}
|
|
|
|
|
|
|
|
Gen::X64Reg RegAlloc::LoadImmediateIntoRegister(IR::Value imm, Gen::X64Reg reg) {
|
|
|
|
ASSERT_MSG(imm.IsImmediate(), "imm is not an immediate");
|
2016-08-06 22:04:13 +02:00
|
|
|
Gen::OpArg op_arg = ImmediateToOpArg(imm);
|
|
|
|
if (op_arg.GetImmValue() == 0)
|
|
|
|
code->XOR(32, Gen::R(reg), Gen::R(reg));
|
|
|
|
else
|
|
|
|
code->MOV(32, Gen::R(reg), op_arg);
|
2016-08-05 15:10:39 +02:00
|
|
|
return reg;
|
|
|
|
}
|
|
|
|
|
2016-07-01 15:01:06 +02:00
|
|
|
} // namespace BackendX64
|
|
|
|
} // namespace Dynarmic
|