BackendX64/RegAlloc: Add RegisterAddDef, UseDefOpArg, UseOpArg

This commit is contained in:
MerryMage 2016-08-05 14:10:39 +01:00
parent 01cfaf0286
commit 92bd5f214b
2 changed files with 240 additions and 119 deletions

View file

@ -14,6 +14,22 @@
namespace Dynarmic {
namespace BackendX64 {
static Gen::OpArg ImmediateToOpArg(const IR::Value& imm) {
switch (imm.GetType()) {
case IR::Type::U1:
return Gen::Imm32(imm.GetU1());
break;
case IR::Type::U8:
return Gen::Imm32(imm.GetU8());
break;
case IR::Type::U32:
return Gen::Imm32(imm.GetU32());
break;
default:
ASSERT_MSG(false, "This should never happen.");
}
}
static Gen::X64Reg HostLocToX64(HostLoc loc) {
DEBUG_ASSERT(HostLocIsRegister(loc));
// HostLoc is ordered such that the numbers line up.
@ -30,7 +46,7 @@ static Gen::OpArg SpillToOpArg(HostLoc loc) {
Gen::X64Reg RegAlloc::DefRegister(IR::Inst* def_inst, HostLocList desired_locations) {
DEBUG_ASSERT(std::all_of(desired_locations.begin(), desired_locations.end(), HostLocIsRegister));
DEBUG_ASSERT_MSG(ValueLocations(def_inst).empty(), "def_inst has already been defined");
DEBUG_ASSERT_MSG(!ValueLocations(def_inst), "def_inst has already been defined");
HostLoc location = SelectARegister(desired_locations);
@ -38,11 +54,28 @@ Gen::X64Reg RegAlloc::DefRegister(IR::Inst* def_inst, HostLocList desired_locati
SpillRegister(location);
}
// Update state
LocInfo(location) = HostLocInfo{def_inst, HostLocState::Def};
LocInfo(location).is_being_used = true;
LocInfo(location).def = def_inst;
DEBUG_ASSERT(LocInfo(location).IsDef());
return HostLocToX64(location);
}
void RegAlloc::RegisterAddDef(IR::Inst* def_inst, const IR::Value& use_inst) {
DEBUG_ASSERT_MSG(!ValueLocations(def_inst), "def_inst has already been defined");
if (use_inst.IsImmediate()) {
LoadImmediateIntoRegister(use_inst, DefRegister(def_inst, any_gpr));
return;
}
DEBUG_ASSERT_MSG(ValueLocations(use_inst.GetInst()), "use_inst must already be defined");
HostLoc location = *ValueLocation(use_inst.GetInst());
LocInfo(location).values.emplace_back(def_inst);
DecrementRemainingUses(use_inst.GetInst());
DEBUG_ASSERT(LocInfo(location).IsIdle());
}
Gen::X64Reg RegAlloc::UseDefRegister(IR::Value use_value, IR::Inst* def_inst, HostLocList desired_locations) {
if (!use_value.IsImmediate()) {
return UseDefRegister(use_value.GetInst(), def_inst, desired_locations);
@ -53,16 +86,51 @@ Gen::X64Reg RegAlloc::UseDefRegister(IR::Value use_value, IR::Inst* def_inst, Ho
Gen::X64Reg RegAlloc::UseDefRegister(IR::Inst* use_inst, IR::Inst* def_inst, HostLocList desired_locations) {
DEBUG_ASSERT(std::all_of(desired_locations.begin(), desired_locations.end(), HostLocIsRegister));
DEBUG_ASSERT_MSG(ValueLocations(def_inst).empty(), "def_inst has already been defined");
DEBUG_ASSERT_MSG(!ValueLocations(use_inst).empty(), "use_inst has not been defined");
DEBUG_ASSERT_MSG(ValueLocation(def_inst), "def_inst has already been defined");
DEBUG_ASSERT_MSG(!ValueLocation(use_inst), "use_inst has not been defined");
if (IsLastUse(use_inst)) {
HostLoc current_location = *ValueLocation(use_inst);
auto& loc_info = LocInfo(current_location);
if (!loc_info.IsIdle()) {
loc_info.is_being_used = true;
loc_info.def = def_inst;
DEBUG_ASSERT(loc_info.IsUseDef());
return HostLocToX64(current_location);
}
}
// TODO: Optimize the case when this is the last use_inst use.
Gen::X64Reg use_reg = UseRegister(use_inst, any_gpr);
Gen::X64Reg def_reg = DefRegister(def_inst, desired_locations);
code->MOV(64, Gen::R(def_reg), Gen::R(use_reg));
return def_reg;
}
std::tuple<Gen::OpArg, Gen::X64Reg> RegAlloc::UseDefOpArg(IR::Value use_value, IR::Inst* def_inst, HostLocList desired_locations) {
DEBUG_ASSERT(std::all_of(desired_locations.begin(), desired_locations.end(), HostLocIsRegister));
DEBUG_ASSERT_MSG(ValueLocation(def_inst), "def_inst has already been defined");
DEBUG_ASSERT_MSG(!ValueLocation(use_inst), "use_inst has not been defined");
if (!use_value.IsImmediate()) {
IR::Inst* use_inst = use_value.GetInst();
if (IsLastUse(use_inst)) {
HostLoc current_location = *ValueLocation(use_inst);
auto& loc_info = LocInfo(current_location);
if (!loc_info.IsIdle()) {
loc_info.is_being_used = true;
loc_info.def = def_inst;
DEBUG_ASSERT(loc_info.IsUseDef());
return std::make_tuple(Gen::R(HostLocToX64(current_location)), HostLocToX64(current_location));
}
}
}
Gen::OpArg use_oparg = UseOpArg(use_value, any_gpr);
Gen::X64Reg def_reg = DefRegister(def_inst, desired_locations);
return std::make_tuple(use_oparg, def_reg);
}
Gen::X64Reg RegAlloc::UseRegister(IR::Value use_value, HostLocList desired_locations) {
if (!use_value.IsImmediate()) {
return UseRegister(use_value.GetInst(), desired_locations);
@ -72,48 +140,52 @@ Gen::X64Reg RegAlloc::UseRegister(IR::Value use_value, HostLocList desired_locat
}
Gen::X64Reg RegAlloc::UseRegister(IR::Inst* use_inst, HostLocList desired_locations) {
DEBUG_ASSERT(std::all_of(desired_locations.begin(), desired_locations.end(), HostLocIsRegister));
DEBUG_ASSERT_MSG(!ValueLocations(use_inst).empty(), "use_inst has not been defined");
HostLoc current_location = ValueLocations(use_inst).front();
auto iter = std::find(desired_locations.begin(), desired_locations.end(), current_location);
if (iter != desired_locations.end()) {
ASSERT(LocInfo(current_location).state == HostLocState::Idle ||
LocInfo(current_location).state == HostLocState::Use);
// Update state
LocInfo(current_location).state = HostLocState::Use;
DecrementRemainingUses(use_inst);
HostLoc current_location;
bool was_being_used;
std::tie(current_location, was_being_used) = UseHostLoc(use_inst, desired_locations);
if (HostLocIsRegister(current_location)) {
return HostLocToX64(current_location);
}
} else if (HostLocIsSpill(current_location)) {
HostLoc new_location = SelectARegister(desired_locations);
if (HostLocIsSpill(current_location)) {
if (IsRegisterOccupied(new_location)) {
SpillRegister(new_location);
}
EmitMove(new_location, current_location);
if (!was_being_used) {
LocInfo(new_location) = LocInfo(current_location);
LocInfo(new_location).state = HostLocState::Use;
LocInfo(current_location) = {};
DecrementRemainingUses(use_inst);
} else if (HostLocIsRegister(current_location)) {
ASSERT(LocInfo(current_location).state == HostLocState::Idle);
EmitExchange(new_location, current_location);
std::swap(LocInfo(new_location), LocInfo(current_location));
LocInfo(new_location).state = HostLocState::Use;
DecrementRemainingUses(use_inst);
DEBUG_ASSERT(LocInfo(new_location).IsUse());
} else {
ASSERT_MSG(0, "Invalid current_location");
LocInfo(new_location).is_being_used = true;
DEBUG_ASSERT(LocInfo(new_location).IsScratch());
}
return HostLocToX64(new_location);
}
return HostLocToX64(new_location);
ASSERT_MSG(false, "Unknown current_location type");
return Gen::INVALID_REG;
}
Gen::OpArg RegAlloc::UseOpArg(IR::Value use_value, HostLocList desired_locations) {
if (use_value.IsImmediate()) {
return ImmediateToOpArg(use_value);
}
IR::Inst* use_inst = use_value.GetInst();
HostLoc current_location;
bool was_being_used;
std::tie(current_location, was_being_used) = UseHostLoc(use_inst, desired_locations);
if (HostLocIsRegister(current_location)) {
return Gen::R(HostLocToX64(current_location));
} else if (HostLocIsSpill(current_location)) {
return SpillToOpArg(current_location);
}
ASSERT_MSG(false, "Unknown current_location type");
return Gen::R(Gen::INVALID_REG);
}
Gen::X64Reg RegAlloc::UseScratchRegister(IR::Value use_value, HostLocList desired_locations) {
@ -126,40 +198,37 @@ Gen::X64Reg RegAlloc::UseScratchRegister(IR::Value use_value, HostLocList desire
Gen::X64Reg RegAlloc::UseScratchRegister(IR::Inst* use_inst, HostLocList desired_locations) {
DEBUG_ASSERT(std::all_of(desired_locations.begin(), desired_locations.end(), HostLocIsRegister));
DEBUG_ASSERT_MSG(!ValueLocations(use_inst).empty(), "use_inst has not been defined");
DEBUG_ASSERT_MSG(ValueLocation(use_inst), "use_inst has not been defined");
ASSERT_MSG(use_inst->use_count != 0, "use_inst ran out of uses. (Use-d an IR::Inst* too many times)");
HostLoc current_location = ValueLocations(use_inst).front();
HostLoc current_location = *ValueLocation(use_inst);
HostLoc new_location = SelectARegister(desired_locations);
if (HostLocIsSpill(current_location)) {
if (IsRegisterOccupied(new_location)) {
SpillRegister(new_location);
}
if (HostLocIsSpill(current_location)) {
EmitMove(new_location, current_location);
LocInfo(new_location).state = HostLocState::Scratch;
LocInfo(new_location).is_being_used = true;
DecrementRemainingUses(use_inst);
DEBUG_ASSERT(LocInfo(new_location).IsScratch());
return HostLocToX64(new_location);
} else if (HostLocIsRegister(current_location)) {
ASSERT(LocInfo(current_location).state == HostLocState::Idle);
ASSERT(LocInfo(current_location).IsIdle());
if (IsRegisterOccupied(new_location)) {
SpillRegister(new_location);
if (current_location != new_location) {
EmitMove(new_location, current_location);
}
} else {
EmitMove(new_location, current_location);
}
LocInfo(new_location).state = HostLocState::Scratch;
LocInfo(new_location).is_being_used = true;
LocInfo(new_location).values.clear();
DecrementRemainingUses(use_inst);
} else {
ASSERT_MSG(0, "Invalid current_location");
DEBUG_ASSERT(LocInfo(new_location).IsScratch());
return HostLocToX64(new_location);
}
return HostLocToX64(new_location);
ASSERT_MSG(0, "Invalid current_location");
return Gen::INVALID_REG;
}
Gen::X64Reg RegAlloc::ScratchRegister(HostLocList desired_locations) {
@ -172,31 +241,12 @@ Gen::X64Reg RegAlloc::ScratchRegister(HostLocList desired_locations) {
}
// Update state
LocInfo(location).state = HostLocState::Scratch;
LocInfo(location).is_being_used = true;
DEBUG_ASSERT(LocInfo(location).IsScratch());
return HostLocToX64(location);
}
Gen::X64Reg RegAlloc::LoadImmediateIntoRegister(IR::Value imm, Gen::X64Reg reg) {
ASSERT_MSG(imm.IsImmediate(), "imm is not an immediate");
switch (imm.GetType()) {
case IR::Type::U1:
code->MOV(32, R(reg), Gen::Imm32(imm.GetU1()));
break;
case IR::Type::U8:
code->MOV(32, R(reg), Gen::Imm32(imm.GetU8()));
break;
case IR::Type::U32:
code->MOV(32, R(reg), Gen::Imm32(imm.GetU32()));
break;
default:
ASSERT_MSG(false, "This should never happen.");
}
return reg;
}
void RegAlloc::HostCall(IR::Inst* result_def, IR::Value arg0_use, IR::Value arg1_use, IR::Value arg2_use, IR::Value arg3_use) {
constexpr HostLoc AbiReturn = HostLoc::RAX;
#ifdef _WIN32
@ -255,27 +305,31 @@ HostLoc RegAlloc::SelectARegister(HostLocList desired_locations) const {
return candidates.front();
}
std::vector<HostLoc> RegAlloc::ValueLocations(IR::Inst* value) const {
std::vector<HostLoc> locations;
boost::optional<HostLoc> RegAlloc::ValueLocation(IR::Inst* value) const {
for (size_t i = 0; i < HostLocCount; i++)
if (hostloc_info[i].value == value)
locations.emplace_back(static_cast<HostLoc>(i));
for (IR::Inst* v : hostloc_info[i].values)
if (v == value)
return {static_cast<HostLoc>(i)};
return locations;
return {};
}
bool RegAlloc::IsRegisterOccupied(HostLoc loc) const {
return GetLocInfo(loc).value != nullptr;
return !GetLocInfo(loc).values.empty() || GetLocInfo(loc).def;
}
bool RegAlloc::IsRegisterAllocated(HostLoc loc) const {
return GetLocInfo(loc).state != HostLocState::Idle;
return GetLocInfo(loc).is_being_used;
}
bool RegAlloc::IsLastUse(IR::Inst* inst) const {
if (inst->use_count > 1)
return false;
return GetLocInfo(*ValueLocation(inst)).values.size() == 1;
}
void RegAlloc::SpillRegister(HostLoc loc) {
ASSERT_MSG(HostLocIsRegister(loc), "Only registers can be spilled");
ASSERT_MSG(LocInfo(loc).state == HostLocState::Idle, "Allocated registers cannot be spilled");
ASSERT_MSG(IsRegisterOccupied(loc), "There is no need to spill unoccupied registers");
ASSERT_MSG(!IsRegisterAllocated(loc), "Registers that have been allocated must not be spilt");
@ -283,8 +337,8 @@ void RegAlloc::SpillRegister(HostLoc loc) {
EmitMove(new_loc, loc);
LocInfo(new_loc).value = LocInfo(loc).value;
LocInfo(loc).value = nullptr;
LocInfo(new_loc) = LocInfo(loc);
LocInfo(loc) = {};
}
HostLoc RegAlloc::FindFreeSpill() const {
@ -297,9 +351,16 @@ HostLoc RegAlloc::FindFreeSpill() const {
void RegAlloc::EndOfAllocScope() {
for (auto& iter : hostloc_info) {
iter.state = HostLocState::Idle;
if (iter.value && iter.value->use_count == 0)
iter.value = nullptr;
iter.is_being_used = false;
if (iter.def) {
iter.values.clear();
iter.values.emplace_back(iter.def);
iter.def = nullptr;
} else if (!iter.values.empty()) {
auto to_erase = std::remove_if(iter.values.begin(), iter.values.end(),
[](const auto& inst){ return inst->use_count <= 0; });
iter.values.erase(to_erase, iter.values.end());
}
}
}
@ -309,7 +370,7 @@ void RegAlloc::DecrementRemainingUses(IR::Inst* value) {
}
void RegAlloc::AssertNoMoreUses() {
ASSERT(std::all_of(hostloc_info.begin(), hostloc_info.end(), [](const auto& i){ return !i.value; }));
ASSERT(std::all_of(hostloc_info.begin(), hostloc_info.end(), [](const auto& i){ return i.values.empty(); }));
}
void RegAlloc::Reset() {
@ -317,24 +378,10 @@ void RegAlloc::Reset() {
}
void RegAlloc::EmitMove(HostLoc to, HostLoc from) {
const auto& from_info = LocInfo(from);
if (HostLocIsXMM(to) && HostLocIsSpill(from)) {
if (from_info.GetType() == IR::Type::F64) {
code->MOVSD(HostLocToX64(to), SpillToOpArg(from));
} else if (from_info.GetType() == IR::Type::F32) {
code->MOVSS(HostLocToX64(to), SpillToOpArg(from));
} else {
ASSERT_MSG(false, "Tried to move a non-fp value into an XMM register");
}
} else if (HostLocIsSpill(to) && HostLocIsXMM(from)) {
if (from_info.GetType() == IR::Type::F64) {
code->MOVSD(SpillToOpArg(to), HostLocToX64(from));
} else if (from_info.GetType() == IR::Type::F32) {
code->MOVSS(SpillToOpArg(to), HostLocToX64(from));
} else {
ASSERT_MSG(false, "Tried to move a non-fp value into an XMM register");
}
code->MOVSD(HostLocToX64(to), SpillToOpArg(from));
} else if (HostLocIsXMM(to) && HostLocIsXMM(from)) {
code->MOVAPS(HostLocToX64(to), Gen::R(HostLocToX64(from)));
} else if (HostLocIsGPR(to) && HostLocIsSpill(from)) {
@ -358,5 +405,60 @@ void RegAlloc::EmitExchange(HostLoc a, HostLoc b) {
}
}
std::tuple<HostLoc, bool> RegAlloc::UseHostLoc(IR::Inst* use_inst, HostLocList desired_locations) {
DEBUG_ASSERT(std::all_of(desired_locations.begin(), desired_locations.end(), HostLocIsRegister));
DEBUG_ASSERT_MSG(ValueLocation(use_inst), "use_inst has not been defined");
HostLoc current_location = *ValueLocation(use_inst);
auto iter = std::find(desired_locations.begin(), desired_locations.end(), current_location);
if (iter != desired_locations.end()) {
if (LocInfo(current_location).IsDef()) {
HostLoc new_location = SelectARegister(desired_locations);
if (IsRegisterOccupied(new_location)) {
SpillRegister(new_location);
}
EmitMove(new_location, current_location);
LocInfo(new_location).is_being_used = true;
LocInfo(new_location).values.emplace_back(use_inst);
DecrementRemainingUses(use_inst);
DEBUG_ASSERT(LocInfo(new_location).IsUse());
return std::make_tuple(new_location, false);
} else {
bool was_being_used = LocInfo(current_location).is_being_used;
ASSERT(LocInfo(current_location).IsUse() || LocInfo(current_location).IsIdle());
LocInfo(current_location).is_being_used = true;
DecrementRemainingUses(use_inst);
DEBUG_ASSERT(LocInfo(new_location).IsUse());
return std::make_tuple(current_location, was_being_used);
}
}
if (HostLocIsSpill(current_location)) {
bool was_being_used = LocInfo(current_location).is_being_used;
LocInfo(current_location).is_being_used = true;
DecrementRemainingUses(use_inst);
DEBUG_ASSERT(LocInfo(current_location).IsUse());
return std::make_tuple(current_location, was_being_used);
} else if (HostLocIsRegister(current_location)) {
HostLoc new_location = SelectARegister(desired_locations);
ASSERT(LocInfo(current_location).IsIdle());
EmitExchange(new_location, current_location);
std::swap(LocInfo(new_location), LocInfo(current_location));
LocInfo(new_location).is_being_used = true;
DecrementRemainingUses(use_inst);
DEBUG_ASSERT(LocInfo(new_location).IsUse());
return std::make_tuple(new_location, false);
}
ASSERT_MSG(0, "Invalid current_location");
return std::make_tuple(static_cast<HostLoc>(-1), false);
}
Gen::X64Reg RegAlloc::LoadImmediateIntoRegister(IR::Value imm, Gen::X64Reg reg) {
ASSERT_MSG(imm.IsImmediate(), "imm is not an immediate");
code->MOV(32, Gen::R(reg), ImmediateToOpArg(imm));
return reg;
}
} // namespace BackendX64
} // namespace Dynarmic

View file

@ -101,18 +101,20 @@ public:
/// Late-def
Gen::X64Reg DefRegister(IR::Inst* def_inst, HostLocList desired_locations);
void RegisterAddDef(IR::Inst* def_inst, const IR::Value& use_inst);
/// Early-use, Late-def
Gen::X64Reg UseDefRegister(IR::Value use_value, IR::Inst* def_inst, HostLocList desired_locations);
Gen::X64Reg UseDefRegister(IR::Inst* use_inst, IR::Inst* def_inst, HostLocList desired_locations);
std::tuple<Gen::OpArg, Gen::X64Reg> UseDefOpArg(IR::Value use_value, IR::Inst* def_inst, HostLocList desired_locations);
/// Early-use
Gen::X64Reg UseRegister(IR::Value use_value, HostLocList desired_locations);
Gen::X64Reg UseRegister(IR::Inst* use_inst, HostLocList desired_locations);
Gen::OpArg UseOpArg(IR::Value use_value, HostLocList desired_locations);
/// Early-use, Destroyed
Gen::X64Reg UseScratchRegister(IR::Value use_value, HostLocList desired_locations);
Gen::X64Reg UseScratchRegister(IR::Inst* use_inst, HostLocList desired_locations);
/// Early-def, Late-use, single-use
Gen::X64Reg ScratchRegister(HostLocList desired_locations);
Gen::X64Reg LoadImmediateIntoRegister(IR::Value imm, Gen::X64Reg reg);
/// Late-def for result register, Early-use for all arguments, Each value is placed into registers according to host ABI.
void HostCall(IR::Inst* result_def = nullptr, IR::Value arg0_use = {}, IR::Value arg1_use = {}, IR::Value arg2_use = {}, IR::Value arg3_use = {});
@ -129,24 +131,41 @@ public:
private:
HostLoc SelectARegister(HostLocList desired_locations) const;
std::vector<HostLoc> ValueLocations(IR::Inst* value) const;
boost::optional<HostLoc> ValueLocation(IR::Inst* value) const;
bool IsRegisterOccupied(HostLoc loc) const;
bool IsRegisterAllocated(HostLoc loc) const;
bool IsLastUse(IR::Inst* inst) const;
std::tuple<HostLoc, bool> UseHostLoc(IR::Inst* use_inst, HostLocList desired_locations);
void EmitMove(HostLoc to, HostLoc from);
void EmitExchange(HostLoc a, HostLoc b);
Gen::X64Reg LoadImmediateIntoRegister(IR::Value imm, Gen::X64Reg reg);
void SpillRegister(HostLoc loc);
HostLoc FindFreeSpill() const;
Gen::XEmitter* code = nullptr;
struct HostLocInfo {
HostLocInfo() = default;
HostLocInfo(IR::Inst* value, HostLocState state) : value(value), state(state) {}
IR::Inst* value = nullptr;
HostLocState state = HostLocState::Idle;
IR::Type GetType() const {
return value ? value->GetType() : IR::Type::Void;
std::vector<IR::Inst*> values; // early value
IR::Inst* def = nullptr; // late value
bool is_being_used = false;
bool IsIdle() const {
return !is_being_used;
}
bool IsScratch() const {
return is_being_used && !def && values.empty();
}
bool IsUse() const {
return is_being_used && !def && !values.empty();
}
bool IsDef() const {
return is_being_used && def && values.empty();
}
bool IsUseDef() const {
return is_being_used && def && !values.empty();
}
};
std::array<HostLocInfo, HostLocCount> hostloc_info;