backend/arm64: Use always_false_v in static_assert
This commit is contained in:
parent
ce1ba43fea
commit
f3af94bc7c
10 changed files with 52 additions and 64 deletions
|
@ -2,6 +2,7 @@ include(TargetArchitectureSpecificSources)
|
||||||
|
|
||||||
add_library(dynarmic
|
add_library(dynarmic
|
||||||
backend/exception_handler.h
|
backend/exception_handler.h
|
||||||
|
common/always_false.h
|
||||||
common/cast_util.h
|
common/cast_util.h
|
||||||
common/crypto/aes.cpp
|
common/crypto/aes.cpp
|
||||||
common/crypto/aes.h
|
common/crypto/aes.h
|
||||||
|
@ -23,6 +24,10 @@ add_library(dynarmic
|
||||||
common/fp/op/FPMulAdd.cpp
|
common/fp/op/FPMulAdd.cpp
|
||||||
common/fp/op/FPMulAdd.h
|
common/fp/op/FPMulAdd.h
|
||||||
common/fp/op/FPNeg.h
|
common/fp/op/FPNeg.h
|
||||||
|
common/fp/op/FPRSqrtEstimate.cpp
|
||||||
|
common/fp/op/FPRSqrtEstimate.h
|
||||||
|
common/fp/op/FPRSqrtStepFused.cpp
|
||||||
|
common/fp/op/FPRSqrtStepFused.h
|
||||||
common/fp/op/FPRecipEstimate.cpp
|
common/fp/op/FPRecipEstimate.cpp
|
||||||
common/fp/op/FPRecipEstimate.h
|
common/fp/op/FPRecipEstimate.h
|
||||||
common/fp/op/FPRecipExponent.cpp
|
common/fp/op/FPRecipExponent.cpp
|
||||||
|
@ -31,10 +36,6 @@ add_library(dynarmic
|
||||||
common/fp/op/FPRecipStepFused.h
|
common/fp/op/FPRecipStepFused.h
|
||||||
common/fp/op/FPRoundInt.cpp
|
common/fp/op/FPRoundInt.cpp
|
||||||
common/fp/op/FPRoundInt.h
|
common/fp/op/FPRoundInt.h
|
||||||
common/fp/op/FPRSqrtEstimate.cpp
|
|
||||||
common/fp/op/FPRSqrtEstimate.h
|
|
||||||
common/fp/op/FPRSqrtStepFused.cpp
|
|
||||||
common/fp/op/FPRSqrtStepFused.h
|
|
||||||
common/fp/op/FPToFixed.cpp
|
common/fp/op/FPToFixed.cpp
|
||||||
common/fp/op/FPToFixed.h
|
common/fp/op/FPToFixed.h
|
||||||
common/fp/process_exception.cpp
|
common/fp/process_exception.cpp
|
||||||
|
|
|
@ -9,9 +9,12 @@
|
||||||
#include <stdexcept>
|
#include <stdexcept>
|
||||||
#include <type_traits>
|
#include <type_traits>
|
||||||
|
|
||||||
|
#include <mcl/mp/metavalue/lift_value.hpp>
|
||||||
#include <mcl/stdint.hpp>
|
#include <mcl/stdint.hpp>
|
||||||
#include <oaknut/oaknut.hpp>
|
#include <oaknut/oaknut.hpp>
|
||||||
|
|
||||||
|
#include "dynarmic/common/always_false.h"
|
||||||
|
|
||||||
namespace Dynarmic::Backend::Arm64 {
|
namespace Dynarmic::Backend::Arm64 {
|
||||||
|
|
||||||
constexpr oaknut::XReg Xstate{28};
|
constexpr oaknut::XReg Xstate{28};
|
||||||
|
@ -30,7 +33,7 @@ constexpr auto Rscratch0() {
|
||||||
} else if constexpr (bitsize == 64) {
|
} else if constexpr (bitsize == 64) {
|
||||||
return Xscratch0;
|
return Xscratch0;
|
||||||
} else {
|
} else {
|
||||||
static_assert(bitsize == 32 || bitsize == 64);
|
static_assert(Common::always_false_v<mcl::mp::lift_value<bitsize>>);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -41,7 +44,7 @@ constexpr auto Rscratch1() {
|
||||||
} else if constexpr (bitsize == 64) {
|
} else if constexpr (bitsize == 64) {
|
||||||
return Xscratch1;
|
return Xscratch1;
|
||||||
} else {
|
} else {
|
||||||
static_assert(bitsize == 32 || bitsize == 64);
|
static_assert(Common::always_false_v<mcl::mp::lift_value<bitsize>>);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -3,6 +3,7 @@
|
||||||
* SPDX-License-Identifier: 0BSD
|
* SPDX-License-Identifier: 0BSD
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
#include <mcl/mp/metavalue/lift_value.hpp>
|
||||||
#include <oaknut/oaknut.hpp>
|
#include <oaknut/oaknut.hpp>
|
||||||
|
|
||||||
#include "dynarmic/backend/arm64/a32_jitstate.h"
|
#include "dynarmic/backend/arm64/a32_jitstate.h"
|
||||||
|
@ -11,6 +12,7 @@
|
||||||
#include "dynarmic/backend/arm64/emit_context.h"
|
#include "dynarmic/backend/arm64/emit_context.h"
|
||||||
#include "dynarmic/backend/arm64/fpsr_manager.h"
|
#include "dynarmic/backend/arm64/fpsr_manager.h"
|
||||||
#include "dynarmic/backend/arm64/reg_alloc.h"
|
#include "dynarmic/backend/arm64/reg_alloc.h"
|
||||||
|
#include "dynarmic/common/always_false.h"
|
||||||
#include "dynarmic/ir/basic_block.h"
|
#include "dynarmic/ir/basic_block.h"
|
||||||
#include "dynarmic/ir/microinstruction.h"
|
#include "dynarmic/ir/microinstruction.h"
|
||||||
#include "dynarmic/ir/opcodes.h"
|
#include "dynarmic/ir/opcodes.h"
|
||||||
|
@ -41,7 +43,7 @@ static void EmitTwoOpArranged(oaknut::CodeGenerator& code, EmitContext& ctx, IR:
|
||||||
} else if constexpr (size == 64) {
|
} else if constexpr (size == 64) {
|
||||||
emit(Qresult->D2(), Qoperand->D2());
|
emit(Qresult->D2(), Qoperand->D2());
|
||||||
} else {
|
} else {
|
||||||
static_assert(size == 8 || size == 16 || size == 32 || size == 64);
|
static_assert(Common::always_false_v<mcl::mp::lift_value<size>>);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -64,7 +66,7 @@ static void EmitTwoOpArrangedWiden(oaknut::CodeGenerator& code, EmitContext& ctx
|
||||||
} else if constexpr (size == 32) {
|
} else if constexpr (size == 32) {
|
||||||
emit(Qresult->D2(), Qoperand->toD().S2());
|
emit(Qresult->D2(), Qoperand->toD().S2());
|
||||||
} else {
|
} else {
|
||||||
static_assert(size == 8 || size == 16 || size == 32);
|
static_assert(Common::always_false_v<mcl::mp::lift_value<size>>);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -79,7 +81,7 @@ static void EmitTwoOpArrangedNarrow(oaknut::CodeGenerator& code, EmitContext& ct
|
||||||
} else if constexpr (size == 64) {
|
} else if constexpr (size == 64) {
|
||||||
emit(Qresult->toD().S2(), Qoperand->D2());
|
emit(Qresult->toD().S2(), Qoperand->D2());
|
||||||
} else {
|
} else {
|
||||||
static_assert(size == 16 || size == 32 || size == 64);
|
static_assert(Common::always_false_v<mcl::mp::lift_value<size>>);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -102,7 +104,7 @@ static void EmitTwoOpArrangedPairWiden(oaknut::CodeGenerator& code, EmitContext&
|
||||||
} else if constexpr (size == 32) {
|
} else if constexpr (size == 32) {
|
||||||
emit(Qresult->D2(), Qoperand->S4());
|
emit(Qresult->D2(), Qoperand->S4());
|
||||||
} else {
|
} else {
|
||||||
static_assert(size == 8 || size == 16 || size == 32);
|
static_assert(Common::always_false_v<mcl::mp::lift_value<size>>);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -117,7 +119,7 @@ static void EmitTwoOpArrangedLower(oaknut::CodeGenerator& code, EmitContext& ctx
|
||||||
} else if constexpr (size == 32) {
|
} else if constexpr (size == 32) {
|
||||||
emit(Qresult->toD().S2(), Qoperand->toD().S2());
|
emit(Qresult->toD().S2(), Qoperand->toD().S2());
|
||||||
} else {
|
} else {
|
||||||
static_assert(size == 8 || size == 16 || size == 32 || size == 64);
|
static_assert(Common::always_false_v<mcl::mp::lift_value<size>>);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -145,7 +147,7 @@ static void EmitThreeOpArranged(oaknut::CodeGenerator& code, EmitContext& ctx, I
|
||||||
} else if constexpr (size == 64) {
|
} else if constexpr (size == 64) {
|
||||||
emit(Qresult->D2(), Qa->D2(), Qb->D2());
|
emit(Qresult->D2(), Qa->D2(), Qb->D2());
|
||||||
} else {
|
} else {
|
||||||
static_assert(size == 8 || size == 16 || size == 32 || size == 64);
|
static_assert(Common::always_false_v<mcl::mp::lift_value<size>>);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -170,7 +172,7 @@ static void EmitThreeOpArrangedWiden(oaknut::CodeGenerator& code, EmitContext& c
|
||||||
} else if constexpr (size == 64) {
|
} else if constexpr (size == 64) {
|
||||||
emit(Qresult->Q1(), Qa->toD().D1(), Qb->toD().D1());
|
emit(Qresult->Q1(), Qa->toD().D1(), Qb->toD().D1());
|
||||||
} else {
|
} else {
|
||||||
static_assert(size == 8 || size == 16 || size == 32);
|
static_assert(Common::always_false_v<mcl::mp::lift_value<size>>);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -193,7 +195,7 @@ static void EmitThreeOpArrangedLower(oaknut::CodeGenerator& code, EmitContext& c
|
||||||
} else if constexpr (size == 32) {
|
} else if constexpr (size == 32) {
|
||||||
emit(Qresult->toD().S2(), Qa->toD().S2(), Qb->toD().S2());
|
emit(Qresult->toD().S2(), Qa->toD().S2(), Qb->toD().S2());
|
||||||
} else {
|
} else {
|
||||||
static_assert(size == 8 || size == 16 || size == 32 || size == 64);
|
static_assert(Common::always_false_v<mcl::mp::lift_value<size>>);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -215,7 +217,7 @@ static void EmitSaturatedAccumulate(oaknut::CodeGenerator&, EmitContext& ctx, IR
|
||||||
} else if constexpr (size == 64) {
|
} else if constexpr (size == 64) {
|
||||||
emit(Qaccumulator->D2(), Qoperand->D2());
|
emit(Qaccumulator->D2(), Qoperand->D2());
|
||||||
} else {
|
} else {
|
||||||
static_assert(size == 8 || size == 16 || size == 32 || size == 64);
|
static_assert(Common::always_false_v<mcl::mp::lift_value<size>>);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -236,7 +238,7 @@ static void EmitImmShift(oaknut::CodeGenerator&, EmitContext& ctx, IR::Inst* ins
|
||||||
} else if constexpr (size == 64) {
|
} else if constexpr (size == 64) {
|
||||||
emit(Qresult->D2(), Qoperand->D2(), shift_amount);
|
emit(Qresult->D2(), Qoperand->D2(), shift_amount);
|
||||||
} else {
|
} else {
|
||||||
static_assert(size == 8 || size == 16 || size == 32 || size == 64);
|
static_assert(Common::always_false_v<mcl::mp::lift_value<size>>);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -264,7 +266,7 @@ static void EmitReduce(oaknut::CodeGenerator&, EmitContext& ctx, IR::Inst* inst,
|
||||||
} else if constexpr (size == 64) {
|
} else if constexpr (size == 64) {
|
||||||
emit(Vresult, Qoperand->D2());
|
emit(Vresult, Qoperand->D2());
|
||||||
} else {
|
} else {
|
||||||
static_assert(size == 8 || size == 16 || size == 32 || size == 64);
|
static_assert(Common::always_false_v<mcl::mp::lift_value<size>>);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -571,11 +573,6 @@ void EmitIR<IR::Opcode::VectorDeinterleaveEvenLower32>(oaknut::CodeGenerator& co
|
||||||
EmitThreeOpArrangedLower<32>(code, ctx, inst, [&](auto Vresult, auto Va, auto Vb) { code.UZP1(Vresult, Va, Vb); });
|
EmitThreeOpArrangedLower<32>(code, ctx, inst, [&](auto Vresult, auto Va, auto Vb) { code.UZP1(Vresult, Va, Vb); });
|
||||||
}
|
}
|
||||||
|
|
||||||
template<>
|
|
||||||
void EmitIR<IR::Opcode::VectorDeinterleaveEvenLower64>(oaknut::CodeGenerator& code, EmitContext& ctx, IR::Inst* inst) {
|
|
||||||
EmitThreeOpArrangedLower<64>(code, ctx, inst, [&](auto Vresult, auto Va, auto Vb) { code.UZP1(Vresult, Va, Vb); });
|
|
||||||
}
|
|
||||||
|
|
||||||
template<>
|
template<>
|
||||||
void EmitIR<IR::Opcode::VectorDeinterleaveOdd8>(oaknut::CodeGenerator& code, EmitContext& ctx, IR::Inst* inst) {
|
void EmitIR<IR::Opcode::VectorDeinterleaveOdd8>(oaknut::CodeGenerator& code, EmitContext& ctx, IR::Inst* inst) {
|
||||||
EmitThreeOpArranged<8>(code, ctx, inst, [&](auto Vresult, auto Va, auto Vb) { code.UZP2(Vresult, Va, Vb); });
|
EmitThreeOpArranged<8>(code, ctx, inst, [&](auto Vresult, auto Va, auto Vb) { code.UZP2(Vresult, Va, Vb); });
|
||||||
|
@ -611,11 +608,6 @@ void EmitIR<IR::Opcode::VectorDeinterleaveOddLower32>(oaknut::CodeGenerator& cod
|
||||||
EmitThreeOpArrangedLower<32>(code, ctx, inst, [&](auto Vresult, auto Va, auto Vb) { code.UZP2(Vresult, Va, Vb); });
|
EmitThreeOpArrangedLower<32>(code, ctx, inst, [&](auto Vresult, auto Va, auto Vb) { code.UZP2(Vresult, Va, Vb); });
|
||||||
}
|
}
|
||||||
|
|
||||||
template<>
|
|
||||||
void EmitIR<IR::Opcode::VectorDeinterleaveOddLower64>(oaknut::CodeGenerator& code, EmitContext& ctx, IR::Inst* inst) {
|
|
||||||
EmitThreeOpArrangedLower<64>(code, ctx, inst, [&](auto Vresult, auto Va, auto Vb) { code.UZP2(Vresult, Va, Vb); });
|
|
||||||
}
|
|
||||||
|
|
||||||
template<>
|
template<>
|
||||||
void EmitIR<IR::Opcode::VectorEor>(oaknut::CodeGenerator& code, EmitContext& ctx, IR::Inst* inst) {
|
void EmitIR<IR::Opcode::VectorEor>(oaknut::CodeGenerator& code, EmitContext& ctx, IR::Inst* inst) {
|
||||||
EmitThreeOpArranged<8>(code, ctx, inst, [&](auto Vresult, auto Va, auto Vb) { code.EOR(Vresult, Va, Vb); });
|
EmitThreeOpArranged<8>(code, ctx, inst, [&](auto Vresult, auto Va, auto Vb) { code.EOR(Vresult, Va, Vb); });
|
||||||
|
|
|
@ -21,6 +21,7 @@
|
||||||
#include "dynarmic/backend/arm64/emit_context.h"
|
#include "dynarmic/backend/arm64/emit_context.h"
|
||||||
#include "dynarmic/backend/arm64/fpsr_manager.h"
|
#include "dynarmic/backend/arm64/fpsr_manager.h"
|
||||||
#include "dynarmic/backend/arm64/reg_alloc.h"
|
#include "dynarmic/backend/arm64/reg_alloc.h"
|
||||||
|
#include "dynarmic/common/always_false.h"
|
||||||
#include "dynarmic/common/cast_util.h"
|
#include "dynarmic/common/cast_util.h"
|
||||||
#include "dynarmic/common/fp/fpcr.h"
|
#include "dynarmic/common/fp/fpcr.h"
|
||||||
#include "dynarmic/common/fp/fpsr.h"
|
#include "dynarmic/common/fp/fpsr.h"
|
||||||
|
@ -80,7 +81,7 @@ static void EmitTwoOpArranged(oaknut::CodeGenerator& code, EmitContext& ctx, IR:
|
||||||
} else if constexpr (size == 64) {
|
} else if constexpr (size == 64) {
|
||||||
emit(Qresult->D2(), Qa->D2());
|
emit(Qresult->D2(), Qa->D2());
|
||||||
} else {
|
} else {
|
||||||
static_assert(size == 16 || size == 32 || size == 64);
|
static_assert(Common::always_false_v<mcl::mp::lift_value<size>>);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -108,7 +109,7 @@ static void EmitThreeOpArranged(oaknut::CodeGenerator& code, EmitContext& ctx, I
|
||||||
} else if constexpr (size == 64) {
|
} else if constexpr (size == 64) {
|
||||||
emit(Qresult->D2(), Qa->D2(), Qb->D2());
|
emit(Qresult->D2(), Qa->D2(), Qb->D2());
|
||||||
} else {
|
} else {
|
||||||
static_assert(size == 16 || size == 32 || size == 64);
|
static_assert(Common::always_false_v<mcl::mp::lift_value<size>>);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -131,7 +132,7 @@ static void EmitFMA(oaknut::CodeGenerator& code, EmitContext& ctx, IR::Inst* ins
|
||||||
} else if constexpr (size == 64) {
|
} else if constexpr (size == 64) {
|
||||||
emit(Qresult->D2(), Qm->D2(), Qn->D2());
|
emit(Qresult->D2(), Qm->D2(), Qn->D2());
|
||||||
} else {
|
} else {
|
||||||
static_assert(size == 16 || size == 32 || size == 64);
|
static_assert(Common::always_false_v<mcl::mp::lift_value<size>>);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -153,7 +154,7 @@ static void EmitFromFixed(oaknut::CodeGenerator& code, EmitContext& ctx, IR::Ins
|
||||||
} else if constexpr (size == 64) {
|
} else if constexpr (size == 64) {
|
||||||
emit(Qto->D2(), Qfrom->D2(), fbits);
|
emit(Qto->D2(), Qfrom->D2(), fbits);
|
||||||
} else {
|
} else {
|
||||||
static_assert(size == 32 || size == 64);
|
static_assert(Common::always_false_v<mcl::mp::lift_value<size>>);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -175,7 +176,7 @@ void EmitToFixed(oaknut::CodeGenerator& code, EmitContext& ctx, IR::Inst* inst)
|
||||||
} else if constexpr (fsize == 64) {
|
} else if constexpr (fsize == 64) {
|
||||||
return Qto->D2();
|
return Qto->D2();
|
||||||
} else {
|
} else {
|
||||||
static_assert(fsize == 32 || fsize == 64);
|
static_assert(Common::always_false_v<mcl::mp::lift_value<fsize>>);
|
||||||
}
|
}
|
||||||
}();
|
}();
|
||||||
auto Vfrom = [&] {
|
auto Vfrom = [&] {
|
||||||
|
@ -184,7 +185,7 @@ void EmitToFixed(oaknut::CodeGenerator& code, EmitContext& ctx, IR::Inst* inst)
|
||||||
} else if constexpr (fsize == 64) {
|
} else if constexpr (fsize == 64) {
|
||||||
return Qfrom->D2();
|
return Qfrom->D2();
|
||||||
} else {
|
} else {
|
||||||
static_assert(fsize == 32 || fsize == 64);
|
static_assert(Common::always_false_v<mcl::mp::lift_value<fsize>>);
|
||||||
}
|
}
|
||||||
}();
|
}();
|
||||||
|
|
||||||
|
|
|
@ -3,6 +3,7 @@
|
||||||
* SPDX-License-Identifier: 0BSD
|
* SPDX-License-Identifier: 0BSD
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
#include <mcl/mp/metavalue/lift_value.hpp>
|
||||||
#include <oaknut/oaknut.hpp>
|
#include <oaknut/oaknut.hpp>
|
||||||
|
|
||||||
#include "dynarmic/backend/arm64/a32_jitstate.h"
|
#include "dynarmic/backend/arm64/a32_jitstate.h"
|
||||||
|
@ -11,6 +12,7 @@
|
||||||
#include "dynarmic/backend/arm64/emit_context.h"
|
#include "dynarmic/backend/arm64/emit_context.h"
|
||||||
#include "dynarmic/backend/arm64/fpsr_manager.h"
|
#include "dynarmic/backend/arm64/fpsr_manager.h"
|
||||||
#include "dynarmic/backend/arm64/reg_alloc.h"
|
#include "dynarmic/backend/arm64/reg_alloc.h"
|
||||||
|
#include "dynarmic/common/always_false.h"
|
||||||
#include "dynarmic/ir/basic_block.h"
|
#include "dynarmic/ir/basic_block.h"
|
||||||
#include "dynarmic/ir/microinstruction.h"
|
#include "dynarmic/ir/microinstruction.h"
|
||||||
#include "dynarmic/ir/opcodes.h"
|
#include "dynarmic/ir/opcodes.h"
|
||||||
|
@ -37,7 +39,7 @@ static void Emit(oaknut::CodeGenerator&, EmitContext& ctx, IR::Inst* inst, EmitF
|
||||||
} else if constexpr (size == 64) {
|
} else if constexpr (size == 64) {
|
||||||
emit(Qresult->D2(), Qa->D2(), Qb->D2());
|
emit(Qresult->D2(), Qa->D2(), Qb->D2());
|
||||||
} else {
|
} else {
|
||||||
static_assert(size == 8 || size == 16 || size == 32 || size == 64);
|
static_assert(Common::always_false_v<mcl::mp::lift_value<size>>);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -10,10 +10,12 @@
|
||||||
|
|
||||||
#include <mcl/assert.hpp>
|
#include <mcl/assert.hpp>
|
||||||
#include <mcl/bit/bit_field.hpp>
|
#include <mcl/bit/bit_field.hpp>
|
||||||
|
#include <mcl/mp/metavalue/lift_value.hpp>
|
||||||
#include <mcl/stdint.hpp>
|
#include <mcl/stdint.hpp>
|
||||||
|
|
||||||
#include "dynarmic/backend/arm64/abi.h"
|
#include "dynarmic/backend/arm64/abi.h"
|
||||||
#include "dynarmic/backend/arm64/fpsr_manager.h"
|
#include "dynarmic/backend/arm64/fpsr_manager.h"
|
||||||
|
#include "dynarmic/common/always_false.h"
|
||||||
|
|
||||||
namespace Dynarmic::Backend::Arm64 {
|
namespace Dynarmic::Backend::Arm64 {
|
||||||
|
|
||||||
|
@ -261,7 +263,7 @@ int RegAlloc::GenerateImmediate(const IR::Value& value) {
|
||||||
|
|
||||||
return 0;
|
return 0;
|
||||||
} else {
|
} else {
|
||||||
static_assert(kind == HostLoc::Kind::Fpr || kind == HostLoc::Kind::Gpr || kind == HostLoc::Kind::Flags);
|
static_assert(Common::always_false_v<mcl::mp::lift_value<kind>>);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -330,7 +332,7 @@ int RegAlloc::RealizeReadImpl(const IR::Value& value) {
|
||||||
} else if constexpr (required_kind == HostLoc::Kind::Flags) {
|
} else if constexpr (required_kind == HostLoc::Kind::Flags) {
|
||||||
ASSERT_FALSE("A simple read from flags is likely a logic error.");
|
ASSERT_FALSE("A simple read from flags is likely a logic error.");
|
||||||
} else {
|
} else {
|
||||||
static_assert(required_kind == HostLoc::Kind::Fpr || required_kind == HostLoc::Kind::Gpr || required_kind == HostLoc::Kind::Flags);
|
static_assert(Common::always_false_v<mcl::mp::lift_value<required_kind>>);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -353,7 +355,7 @@ int RegAlloc::RealizeWriteImpl(const IR::Inst* value) {
|
||||||
flags.SetupLocation(value);
|
flags.SetupLocation(value);
|
||||||
return 0;
|
return 0;
|
||||||
} else {
|
} else {
|
||||||
static_assert(kind == HostLoc::Kind::Fpr || kind == HostLoc::Kind::Gpr || kind == HostLoc::Kind::Flags);
|
static_assert(Common::always_false_v<mcl::mp::lift_value<kind>>);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -372,7 +374,7 @@ int RegAlloc::RealizeReadWriteImpl(const IR::Value& read_value, const IR::Inst*
|
||||||
} else if constexpr (kind == HostLoc::Kind::Flags) {
|
} else if constexpr (kind == HostLoc::Kind::Flags) {
|
||||||
ASSERT_FALSE("Incorrect function for ReadWrite of flags");
|
ASSERT_FALSE("Incorrect function for ReadWrite of flags");
|
||||||
} else {
|
} else {
|
||||||
static_assert(kind == HostLoc::Kind::Fpr || kind == HostLoc::Kind::Gpr || kind == HostLoc::Kind::Flags);
|
static_assert(Common::always_false_v<mcl::mp::lift_value<kind>>);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1155,16 +1155,6 @@ void EmitX64::EmitVectorDeinterleaveEvenLower32(EmitContext& ctx, IR::Inst* inst
|
||||||
ctx.reg_alloc.DefineValue(inst, lhs);
|
ctx.reg_alloc.DefineValue(inst, lhs);
|
||||||
}
|
}
|
||||||
|
|
||||||
void EmitX64::EmitVectorDeinterleaveEvenLower64(EmitContext& ctx, IR::Inst* inst) {
|
|
||||||
auto args = ctx.reg_alloc.GetArgumentInfo(inst);
|
|
||||||
const Xbyak::Xmm lhs = ctx.reg_alloc.UseScratchXmm(args[0]);
|
|
||||||
[[maybe_unused]] const Xbyak::Xmm rhs = ctx.reg_alloc.UseXmm(args[1]);
|
|
||||||
|
|
||||||
code.movq(lhs, lhs);
|
|
||||||
|
|
||||||
ctx.reg_alloc.DefineValue(inst, lhs);
|
|
||||||
}
|
|
||||||
|
|
||||||
void EmitX64::EmitVectorDeinterleaveOdd8(EmitContext& ctx, IR::Inst* inst) {
|
void EmitX64::EmitVectorDeinterleaveOdd8(EmitContext& ctx, IR::Inst* inst) {
|
||||||
auto args = ctx.reg_alloc.GetArgumentInfo(inst);
|
auto args = ctx.reg_alloc.GetArgumentInfo(inst);
|
||||||
const Xbyak::Xmm lhs = ctx.reg_alloc.UseScratchXmm(args[0]);
|
const Xbyak::Xmm lhs = ctx.reg_alloc.UseScratchXmm(args[0]);
|
||||||
|
@ -1250,16 +1240,6 @@ void EmitX64::EmitVectorDeinterleaveOddLower32(EmitContext& ctx, IR::Inst* inst)
|
||||||
ctx.reg_alloc.DefineValue(inst, lhs);
|
ctx.reg_alloc.DefineValue(inst, lhs);
|
||||||
}
|
}
|
||||||
|
|
||||||
void EmitX64::EmitVectorDeinterleaveOddLower64(EmitContext& ctx, IR::Inst* inst) {
|
|
||||||
auto args = ctx.reg_alloc.GetArgumentInfo(inst);
|
|
||||||
[[maybe_unused]] const Xbyak::Xmm lhs = ctx.reg_alloc.UseXmm(args[0]);
|
|
||||||
const Xbyak::Xmm rhs = ctx.reg_alloc.UseScratchXmm(args[1]);
|
|
||||||
|
|
||||||
code.movq(rhs, rhs);
|
|
||||||
|
|
||||||
ctx.reg_alloc.DefineValue(inst, rhs);
|
|
||||||
}
|
|
||||||
|
|
||||||
void EmitX64::EmitVectorEor(EmitContext& ctx, IR::Inst* inst) {
|
void EmitX64::EmitVectorEor(EmitContext& ctx, IR::Inst* inst) {
|
||||||
EmitVectorOperation(code, ctx, inst, &Xbyak::CodeGenerator::pxor);
|
EmitVectorOperation(code, ctx, inst, &Xbyak::CodeGenerator::pxor);
|
||||||
}
|
}
|
||||||
|
|
13
src/dynarmic/common/always_false.h
Normal file
13
src/dynarmic/common/always_false.h
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
/* This file is part of the dynarmic project.
|
||||||
|
* Copyright (c) 2023 MerryMage
|
||||||
|
* SPDX-License-Identifier: 0BSD
|
||||||
|
*/
|
||||||
|
|
||||||
|
#pragma once
|
||||||
|
|
||||||
|
namespace Dynarmic::Common {
|
||||||
|
|
||||||
|
template<typename T>
|
||||||
|
inline constexpr bool always_false_v = false;
|
||||||
|
|
||||||
|
} // namespace Dynarmic::Common
|
|
@ -1126,8 +1126,6 @@ U128 IREmitter::VectorDeinterleaveEvenLower(size_t esize, const U128& a, const U
|
||||||
return Inst<U128>(Opcode::VectorDeinterleaveEvenLower16, a, b);
|
return Inst<U128>(Opcode::VectorDeinterleaveEvenLower16, a, b);
|
||||||
case 32:
|
case 32:
|
||||||
return Inst<U128>(Opcode::VectorDeinterleaveEvenLower32, a, b);
|
return Inst<U128>(Opcode::VectorDeinterleaveEvenLower32, a, b);
|
||||||
case 64:
|
|
||||||
return Inst<U128>(Opcode::VectorDeinterleaveEvenLower64, a, b);
|
|
||||||
}
|
}
|
||||||
UNREACHABLE();
|
UNREACHABLE();
|
||||||
}
|
}
|
||||||
|
@ -1140,8 +1138,6 @@ U128 IREmitter::VectorDeinterleaveOddLower(size_t esize, const U128& a, const U1
|
||||||
return Inst<U128>(Opcode::VectorDeinterleaveOddLower16, a, b);
|
return Inst<U128>(Opcode::VectorDeinterleaveOddLower16, a, b);
|
||||||
case 32:
|
case 32:
|
||||||
return Inst<U128>(Opcode::VectorDeinterleaveOddLower32, a, b);
|
return Inst<U128>(Opcode::VectorDeinterleaveOddLower32, a, b);
|
||||||
case 64:
|
|
||||||
return Inst<U128>(Opcode::VectorDeinterleaveOddLower64, a, b);
|
|
||||||
}
|
}
|
||||||
UNREACHABLE();
|
UNREACHABLE();
|
||||||
}
|
}
|
||||||
|
|
|
@ -329,7 +329,6 @@ OPCODE(VectorDeinterleaveEven64, U128, U128
|
||||||
OPCODE(VectorDeinterleaveEvenLower8, U128, U128, U128 )
|
OPCODE(VectorDeinterleaveEvenLower8, U128, U128, U128 )
|
||||||
OPCODE(VectorDeinterleaveEvenLower16, U128, U128, U128 )
|
OPCODE(VectorDeinterleaveEvenLower16, U128, U128, U128 )
|
||||||
OPCODE(VectorDeinterleaveEvenLower32, U128, U128, U128 )
|
OPCODE(VectorDeinterleaveEvenLower32, U128, U128, U128 )
|
||||||
OPCODE(VectorDeinterleaveEvenLower64, U128, U128, U128 )
|
|
||||||
OPCODE(VectorDeinterleaveOdd8, U128, U128, U128 )
|
OPCODE(VectorDeinterleaveOdd8, U128, U128, U128 )
|
||||||
OPCODE(VectorDeinterleaveOdd16, U128, U128, U128 )
|
OPCODE(VectorDeinterleaveOdd16, U128, U128, U128 )
|
||||||
OPCODE(VectorDeinterleaveOdd32, U128, U128, U128 )
|
OPCODE(VectorDeinterleaveOdd32, U128, U128, U128 )
|
||||||
|
@ -337,7 +336,6 @@ OPCODE(VectorDeinterleaveOdd64, U128, U128
|
||||||
OPCODE(VectorDeinterleaveOddLower8, U128, U128, U128 )
|
OPCODE(VectorDeinterleaveOddLower8, U128, U128, U128 )
|
||||||
OPCODE(VectorDeinterleaveOddLower16, U128, U128, U128 )
|
OPCODE(VectorDeinterleaveOddLower16, U128, U128, U128 )
|
||||||
OPCODE(VectorDeinterleaveOddLower32, U128, U128, U128 )
|
OPCODE(VectorDeinterleaveOddLower32, U128, U128, U128 )
|
||||||
OPCODE(VectorDeinterleaveOddLower64, U128, U128, U128 )
|
|
||||||
OPCODE(VectorEor, U128, U128, U128 )
|
OPCODE(VectorEor, U128, U128, U128 )
|
||||||
OPCODE(VectorEqual8, U128, U128, U128 )
|
OPCODE(VectorEqual8, U128, U128, U128 )
|
||||||
OPCODE(VectorEqual16, U128, U128, U128 )
|
OPCODE(VectorEqual16, U128, U128, U128 )
|
||||||
|
|
Loading…
Reference in a new issue