diff --git a/src/dynarmic/CMakeLists.txt b/src/dynarmic/CMakeLists.txt index 992f3c4f..aa1a5ac6 100644 --- a/src/dynarmic/CMakeLists.txt +++ b/src/dynarmic/CMakeLists.txt @@ -386,6 +386,8 @@ elseif(ARCHITECTURE STREQUAL "arm64") backend/arm64/emit_arm64_cryptography.cpp backend/arm64/emit_arm64_data_processing.cpp backend/arm64/emit_arm64_floating_point.cpp + backend/arm64/emit_arm64_memory.cpp + backend/arm64/emit_arm64_memory.h backend/arm64/emit_arm64_packed.cpp backend/arm64/emit_arm64_saturation.cpp backend/arm64/emit_arm64_vector.cpp diff --git a/src/dynarmic/backend/arm64/a32_address_space.cpp b/src/dynarmic/backend/arm64/a32_address_space.cpp index 333d21fa..1957af15 100644 --- a/src/dynarmic/backend/arm64/a32_address_space.cpp +++ b/src/dynarmic/backend/arm64/a32_address_space.cpp @@ -288,6 +288,7 @@ EmitConfig A32AddressSpace::GetEmitConfig() { .state_nzcv_offset = offsetof(A32JitState, cpsr_nzcv), .state_fpsr_offset = offsetof(A32JitState, fpsr), + .state_exclusive_state_offset = offsetof(A32JitState, exclusive_state), .coprocessors = conf.coprocessors, }; diff --git a/src/dynarmic/backend/arm64/a64_address_space.cpp b/src/dynarmic/backend/arm64/a64_address_space.cpp index 96ce49b8..d21f6090 100644 --- a/src/dynarmic/backend/arm64/a64_address_space.cpp +++ b/src/dynarmic/backend/arm64/a64_address_space.cpp @@ -408,6 +408,7 @@ EmitConfig A64AddressSpace::GetEmitConfig() { .state_nzcv_offset = offsetof(A64JitState, cpsr_nzcv), .state_fpsr_offset = offsetof(A64JitState, fpsr), + .state_exclusive_state_offset = offsetof(A64JitState, exclusive_state), .coprocessors{}, }; diff --git a/src/dynarmic/backend/arm64/emit_arm64.h b/src/dynarmic/backend/arm64/emit_arm64.h index 20d10f92..7b5eebec 100644 --- a/src/dynarmic/backend/arm64/emit_arm64.h +++ b/src/dynarmic/backend/arm64/emit_arm64.h @@ -120,6 +120,7 @@ struct EmitConfig { // State offsets size_t state_nzcv_offset; size_t state_fpsr_offset; + size_t state_exclusive_state_offset; // A32 specific std::array, 16> coprocessors{}; diff --git a/src/dynarmic/backend/arm64/emit_arm64_a32_memory.cpp b/src/dynarmic/backend/arm64/emit_arm64_a32_memory.cpp index ef961d57..4fffa996 100644 --- a/src/dynarmic/backend/arm64/emit_arm64_a32_memory.cpp +++ b/src/dynarmic/backend/arm64/emit_arm64_a32_memory.cpp @@ -8,9 +8,9 @@ #include "dynarmic/backend/arm64/a32_jitstate.h" #include "dynarmic/backend/arm64/abi.h" #include "dynarmic/backend/arm64/emit_arm64.h" +#include "dynarmic/backend/arm64/emit_arm64_memory.h" #include "dynarmic/backend/arm64/emit_context.h" #include "dynarmic/backend/arm64/reg_alloc.h" -#include "dynarmic/ir/acc_type.h" #include "dynarmic/ir/basic_block.h" #include "dynarmic/ir/microinstruction.h" #include "dynarmic/ir/opcodes.h" @@ -19,72 +19,6 @@ namespace Dynarmic::Backend::Arm64 { using namespace oaknut::util; -static bool IsOrdered(IR::AccType acctype) { - return acctype == IR::AccType::ORDERED || acctype == IR::AccType::ORDEREDRW || acctype == IR::AccType::LIMITEDORDERED; -} - -static void EmitReadMemory(oaknut::CodeGenerator& code, EmitContext& ctx, IR::Inst* inst, LinkTarget fn) { - auto args = ctx.reg_alloc.GetArgumentInfo(inst); - ctx.reg_alloc.PrepareForCall({}, args[1]); - const bool ordered = IsOrdered(args[2].GetImmediateAccType()); - - EmitRelocation(code, ctx, fn); - if (ordered) { - code.DMB(oaknut::BarrierOp::ISH); - } - ctx.reg_alloc.DefineAsRegister(inst, X0); -} - -static void EmitExclusiveReadMemory(oaknut::CodeGenerator& code, EmitContext& ctx, IR::Inst* inst, LinkTarget fn) { - auto args = ctx.reg_alloc.GetArgumentInfo(inst); - ctx.reg_alloc.PrepareForCall({}, args[1]); - const bool ordered = IsOrdered(args[2].GetImmediateAccType()); - - code.MOV(Wscratch0, 1); - code.STRB(Wscratch0, Xstate, offsetof(A32JitState, exclusive_state)); - EmitRelocation(code, ctx, fn); - if (ordered) { - code.DMB(oaknut::BarrierOp::ISH); - } - ctx.reg_alloc.DefineAsRegister(inst, X0); -} - -static void EmitWriteMemory(oaknut::CodeGenerator& code, EmitContext& ctx, IR::Inst* inst, LinkTarget fn) { - auto args = ctx.reg_alloc.GetArgumentInfo(inst); - ctx.reg_alloc.PrepareForCall({}, args[1], args[2]); - const bool ordered = IsOrdered(args[3].GetImmediateAccType()); - - if (ordered) { - code.DMB(oaknut::BarrierOp::ISH); - } - EmitRelocation(code, ctx, fn); - if (ordered) { - code.DMB(oaknut::BarrierOp::ISH); - } -} - -static void EmitExclusiveWriteMemory(oaknut::CodeGenerator& code, EmitContext& ctx, IR::Inst* inst, LinkTarget fn) { - auto args = ctx.reg_alloc.GetArgumentInfo(inst); - ctx.reg_alloc.PrepareForCall({}, args[1], args[2]); - const bool ordered = IsOrdered(args[3].GetImmediateAccType()); - - oaknut::Label end; - - if (ordered) { - code.DMB(oaknut::BarrierOp::ISH); - } - code.MOV(W0, 1); - code.LDRB(Wscratch0, Xstate, offsetof(A32JitState, exclusive_state)); - code.CBZ(Wscratch0, end); - code.STRB(WZR, Xstate, offsetof(A32JitState, exclusive_state)); - EmitRelocation(code, ctx, fn); - if (ordered) { - code.DMB(oaknut::BarrierOp::ISH); - } - code.l(end); - ctx.reg_alloc.DefineAsRegister(inst, X0); -} - template<> void EmitIR(oaknut::CodeGenerator& code, EmitContext&, IR::Inst*) { code.STR(WZR, Xstate, offsetof(A32JitState, exclusive_state)); diff --git a/src/dynarmic/backend/arm64/emit_arm64_a64_memory.cpp b/src/dynarmic/backend/arm64/emit_arm64_a64_memory.cpp index 2bab7845..43f0ea4d 100644 --- a/src/dynarmic/backend/arm64/emit_arm64_a64_memory.cpp +++ b/src/dynarmic/backend/arm64/emit_arm64_a64_memory.cpp @@ -8,6 +8,7 @@ #include "dynarmic/backend/arm64/a64_jitstate.h" #include "dynarmic/backend/arm64/abi.h" #include "dynarmic/backend/arm64/emit_arm64.h" +#include "dynarmic/backend/arm64/emit_arm64_memory.h" #include "dynarmic/backend/arm64/emit_context.h" #include "dynarmic/backend/arm64/reg_alloc.h" #include "dynarmic/ir/acc_type.h" @@ -19,100 +20,6 @@ namespace Dynarmic::Backend::Arm64 { using namespace oaknut::util; -static bool IsOrdered(IR::AccType acctype) { - return acctype == IR::AccType::ORDERED || acctype == IR::AccType::ORDEREDRW || acctype == IR::AccType::LIMITEDORDERED; -} - -static void EmitReadMemory(oaknut::CodeGenerator& code, EmitContext& ctx, IR::Inst* inst, LinkTarget fn) { - auto args = ctx.reg_alloc.GetArgumentInfo(inst); - ctx.reg_alloc.PrepareForCall({}, args[1]); - const bool ordered = IsOrdered(args[2].GetImmediateAccType()); - - EmitRelocation(code, ctx, fn); - if (ordered) { - code.DMB(oaknut::BarrierOp::ISH); - } - ctx.reg_alloc.DefineAsRegister(inst, X0); -} - -static void EmitReadMemory128(oaknut::CodeGenerator& code, EmitContext& ctx, IR::Inst* inst, LinkTarget fn) { - auto args = ctx.reg_alloc.GetArgumentInfo(inst); - ctx.reg_alloc.PrepareForCall({}, args[1]); - const bool ordered = IsOrdered(args[2].GetImmediateAccType()); - - EmitRelocation(code, ctx, fn); - if (ordered) { - code.DMB(oaknut::BarrierOp::ISH); - } - code.MOV(Q8.B16(), Q0.B16()); - ctx.reg_alloc.DefineAsRegister(inst, Q8); -} - -static void EmitExclusiveReadMemory(oaknut::CodeGenerator& code, EmitContext& ctx, IR::Inst* inst, LinkTarget fn) { - auto args = ctx.reg_alloc.GetArgumentInfo(inst); - ctx.reg_alloc.PrepareForCall({}, args[1]); - const bool ordered = IsOrdered(args[2].GetImmediateAccType()); - - code.MOV(Wscratch0, 1); - code.STRB(Wscratch0, Xstate, offsetof(A64JitState, exclusive_state)); - EmitRelocation(code, ctx, fn); - if (ordered) { - code.DMB(oaknut::BarrierOp::ISH); - } - ctx.reg_alloc.DefineAsRegister(inst, X0); -} - -static void EmitExclusiveReadMemory128(oaknut::CodeGenerator& code, EmitContext& ctx, IR::Inst* inst, LinkTarget fn) { - auto args = ctx.reg_alloc.GetArgumentInfo(inst); - ctx.reg_alloc.PrepareForCall({}, args[1]); - const bool ordered = IsOrdered(args[2].GetImmediateAccType()); - - code.MOV(Wscratch0, 1); - code.STRB(Wscratch0, Xstate, offsetof(A64JitState, exclusive_state)); - EmitRelocation(code, ctx, fn); - if (ordered) { - code.DMB(oaknut::BarrierOp::ISH); - } - code.MOV(Q8.B16(), Q0.B16()); - ctx.reg_alloc.DefineAsRegister(inst, Q8); -} - -static void EmitWriteMemory(oaknut::CodeGenerator& code, EmitContext& ctx, IR::Inst* inst, LinkTarget fn) { - auto args = ctx.reg_alloc.GetArgumentInfo(inst); - ctx.reg_alloc.PrepareForCall({}, args[1], args[2]); - const bool ordered = IsOrdered(args[3].GetImmediateAccType()); - - if (ordered) { - code.DMB(oaknut::BarrierOp::ISH); - } - EmitRelocation(code, ctx, fn); - if (ordered) { - code.DMB(oaknut::BarrierOp::ISH); - } -} - -static void EmitExclusiveWriteMemory(oaknut::CodeGenerator& code, EmitContext& ctx, IR::Inst* inst, LinkTarget fn) { - auto args = ctx.reg_alloc.GetArgumentInfo(inst); - ctx.reg_alloc.PrepareForCall({}, args[1], args[2]); - const bool ordered = IsOrdered(args[3].GetImmediateAccType()); - - oaknut::Label end; - - if (ordered) { - code.DMB(oaknut::BarrierOp::ISH); - } - code.MOV(W0, 1); - code.LDRB(Wscratch0, Xstate, offsetof(A64JitState, exclusive_state)); - code.CBZ(Wscratch0, end); - code.STRB(WZR, Xstate, offsetof(A64JitState, exclusive_state)); - EmitRelocation(code, ctx, fn); - if (ordered) { - code.DMB(oaknut::BarrierOp::ISH); - } - code.l(end); - ctx.reg_alloc.DefineAsRegister(inst, X0); -} - template<> void EmitIR(oaknut::CodeGenerator& code, EmitContext&, IR::Inst*) { code.STR(WZR, Xstate, offsetof(A64JitState, exclusive_state)); diff --git a/src/dynarmic/backend/arm64/emit_arm64_memory.cpp b/src/dynarmic/backend/arm64/emit_arm64_memory.cpp new file mode 100644 index 00000000..cdc078a7 --- /dev/null +++ b/src/dynarmic/backend/arm64/emit_arm64_memory.cpp @@ -0,0 +1,117 @@ +/* This file is part of the dynarmic project. + * Copyright (c) 2022 MerryMage + * SPDX-License-Identifier: 0BSD + */ + +#include "dynarmic/backend/arm64/emit_arm64_memory.h" + +#include + +#include "dynarmic/backend/arm64/abi.h" +#include "dynarmic/backend/arm64/emit_arm64.h" +#include "dynarmic/backend/arm64/emit_context.h" +#include "dynarmic/backend/arm64/reg_alloc.h" +#include "dynarmic/ir/acc_type.h" +#include "dynarmic/ir/basic_block.h" +#include "dynarmic/ir/microinstruction.h" +#include "dynarmic/ir/opcodes.h" + +namespace Dynarmic::Backend::Arm64 { + +using namespace oaknut::util; + +bool IsOrdered(IR::AccType acctype) { + return acctype == IR::AccType::ORDERED || acctype == IR::AccType::ORDEREDRW || acctype == IR::AccType::LIMITEDORDERED; +} + +void EmitReadMemory(oaknut::CodeGenerator& code, EmitContext& ctx, IR::Inst* inst, LinkTarget fn) { + auto args = ctx.reg_alloc.GetArgumentInfo(inst); + ctx.reg_alloc.PrepareForCall({}, args[1]); + const bool ordered = IsOrdered(args[2].GetImmediateAccType()); + + EmitRelocation(code, ctx, fn); + if (ordered) { + code.DMB(oaknut::BarrierOp::ISH); + } + ctx.reg_alloc.DefineAsRegister(inst, X0); +} + +void EmitReadMemory128(oaknut::CodeGenerator& code, EmitContext& ctx, IR::Inst* inst, LinkTarget fn) { + auto args = ctx.reg_alloc.GetArgumentInfo(inst); + ctx.reg_alloc.PrepareForCall({}, args[1]); + const bool ordered = IsOrdered(args[2].GetImmediateAccType()); + + EmitRelocation(code, ctx, fn); + if (ordered) { + code.DMB(oaknut::BarrierOp::ISH); + } + code.MOV(Q8.B16(), Q0.B16()); + ctx.reg_alloc.DefineAsRegister(inst, Q8); +} + +void EmitExclusiveReadMemory(oaknut::CodeGenerator& code, EmitContext& ctx, IR::Inst* inst, LinkTarget fn) { + auto args = ctx.reg_alloc.GetArgumentInfo(inst); + ctx.reg_alloc.PrepareForCall({}, args[1]); + const bool ordered = IsOrdered(args[2].GetImmediateAccType()); + + code.MOV(Wscratch0, 1); + code.STRB(Wscratch0, Xstate, ctx.conf.state_exclusive_state_offset); + EmitRelocation(code, ctx, fn); + if (ordered) { + code.DMB(oaknut::BarrierOp::ISH); + } + ctx.reg_alloc.DefineAsRegister(inst, X0); +} + +void EmitExclusiveReadMemory128(oaknut::CodeGenerator& code, EmitContext& ctx, IR::Inst* inst, LinkTarget fn) { + auto args = ctx.reg_alloc.GetArgumentInfo(inst); + ctx.reg_alloc.PrepareForCall({}, args[1]); + const bool ordered = IsOrdered(args[2].GetImmediateAccType()); + + code.MOV(Wscratch0, 1); + code.STRB(Wscratch0, Xstate, ctx.conf.state_exclusive_state_offset); + EmitRelocation(code, ctx, fn); + if (ordered) { + code.DMB(oaknut::BarrierOp::ISH); + } + code.MOV(Q8.B16(), Q0.B16()); + ctx.reg_alloc.DefineAsRegister(inst, Q8); +} + +void EmitWriteMemory(oaknut::CodeGenerator& code, EmitContext& ctx, IR::Inst* inst, LinkTarget fn) { + auto args = ctx.reg_alloc.GetArgumentInfo(inst); + ctx.reg_alloc.PrepareForCall({}, args[1], args[2]); + const bool ordered = IsOrdered(args[3].GetImmediateAccType()); + + if (ordered) { + code.DMB(oaknut::BarrierOp::ISH); + } + EmitRelocation(code, ctx, fn); + if (ordered) { + code.DMB(oaknut::BarrierOp::ISH); + } +} + +void EmitExclusiveWriteMemory(oaknut::CodeGenerator& code, EmitContext& ctx, IR::Inst* inst, LinkTarget fn) { + auto args = ctx.reg_alloc.GetArgumentInfo(inst); + ctx.reg_alloc.PrepareForCall({}, args[1], args[2]); + const bool ordered = IsOrdered(args[3].GetImmediateAccType()); + + oaknut::Label end; + + if (ordered) { + code.DMB(oaknut::BarrierOp::ISH); + } + code.MOV(W0, 1); + code.LDRB(Wscratch0, Xstate, ctx.conf.state_exclusive_state_offset); + code.CBZ(Wscratch0, end); + code.STRB(WZR, Xstate, ctx.conf.state_exclusive_state_offset); + EmitRelocation(code, ctx, fn); + if (ordered) { + code.DMB(oaknut::BarrierOp::ISH); + } + code.l(end); + ctx.reg_alloc.DefineAsRegister(inst, X0); +} + +} // namespace Dynarmic::Backend::Arm64 diff --git a/src/dynarmic/backend/arm64/emit_arm64_memory.h b/src/dynarmic/backend/arm64/emit_arm64_memory.h new file mode 100644 index 00000000..ae7ec4c3 --- /dev/null +++ b/src/dynarmic/backend/arm64/emit_arm64_memory.h @@ -0,0 +1,32 @@ +/* This file is part of the dynarmic project. + * Copyright (c) 2022 MerryMage + * SPDX-License-Identifier: 0BSD + */ + +namespace oaknut { +struct PointerCodeGeneratorPolicy; +template +class BasicCodeGenerator; +using CodeGenerator = BasicCodeGenerator; +struct Label; +} // namespace oaknut + +namespace Dynarmic::IR { +enum class AccType; +class Inst; +} // namespace Dynarmic::IR + +namespace Dynarmic::Backend::Arm64 { + +struct EmitContext; +enum class LinkTarget; + +bool IsOrdered(IR::AccType acctype); +void EmitReadMemory(oaknut::CodeGenerator& code, EmitContext& ctx, IR::Inst* inst, LinkTarget fn); +void EmitReadMemory128(oaknut::CodeGenerator& code, EmitContext& ctx, IR::Inst* inst, LinkTarget fn); +void EmitExclusiveReadMemory(oaknut::CodeGenerator& code, EmitContext& ctx, IR::Inst* inst, LinkTarget fn); +void EmitExclusiveReadMemory128(oaknut::CodeGenerator& code, EmitContext& ctx, IR::Inst* inst, LinkTarget fn); +void EmitWriteMemory(oaknut::CodeGenerator& code, EmitContext& ctx, IR::Inst* inst, LinkTarget fn); +void EmitExclusiveWriteMemory(oaknut::CodeGenerator& code, EmitContext& ctx, IR::Inst* inst, LinkTarget fn); + +} // namespace Dynarmic::Backend::Arm64