2021-02-08 06:54:35 +01:00
|
|
|
// Copyright 2021 yuzu Emulator Project
|
|
|
|
// Licensed under GPLv2 or any later version
|
|
|
|
// Refer to the license.txt file included.
|
|
|
|
|
2021-02-17 04:59:28 +01:00
|
|
|
#include <span>
|
|
|
|
#include <tuple>
|
2021-02-08 06:54:35 +01:00
|
|
|
#include <type_traits>
|
2021-02-17 04:59:28 +01:00
|
|
|
#include <utility>
|
|
|
|
#include <vector>
|
2021-02-08 06:54:35 +01:00
|
|
|
|
|
|
|
#include "shader_recompiler/backend/spirv/emit_spirv.h"
|
|
|
|
#include "shader_recompiler/frontend/ir/basic_block.h"
|
|
|
|
#include "shader_recompiler/frontend/ir/microinstruction.h"
|
|
|
|
#include "shader_recompiler/frontend/ir/program.h"
|
|
|
|
|
|
|
|
namespace Shader::Backend::SPIRV {
|
2021-02-16 08:10:22 +01:00
|
|
|
namespace {
|
|
|
|
template <class Func>
|
2021-04-06 04:25:22 +02:00
|
|
|
struct FuncTraits {};
|
2021-02-08 06:54:35 +01:00
|
|
|
|
2021-02-17 04:59:28 +01:00
|
|
|
template <class ReturnType_, class... Args>
|
|
|
|
struct FuncTraits<ReturnType_ (*)(Args...)> {
|
2021-02-16 08:10:22 +01:00
|
|
|
using ReturnType = ReturnType_;
|
2021-02-08 06:54:35 +01:00
|
|
|
|
2021-02-16 08:10:22 +01:00
|
|
|
static constexpr size_t NUM_ARGS = sizeof...(Args);
|
2021-02-08 06:54:35 +01:00
|
|
|
|
2021-02-16 08:10:22 +01:00
|
|
|
template <size_t I>
|
|
|
|
using ArgType = std::tuple_element_t<I, std::tuple<Args...>>;
|
|
|
|
};
|
2021-02-14 05:24:32 +01:00
|
|
|
|
2021-02-17 04:59:28 +01:00
|
|
|
template <auto func, typename... Args>
|
|
|
|
void SetDefinition(EmitContext& ctx, IR::Inst* inst, Args... args) {
|
2021-04-11 07:08:15 +02:00
|
|
|
inst->SetDefinition<Id>(func(ctx, std::forward<Args>(args)...));
|
2021-02-16 08:10:22 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
template <typename ArgType>
|
|
|
|
ArgType Arg(EmitContext& ctx, const IR::Value& arg) {
|
|
|
|
if constexpr (std::is_same_v<ArgType, Id>) {
|
|
|
|
return ctx.Def(arg);
|
|
|
|
} else if constexpr (std::is_same_v<ArgType, const IR::Value&>) {
|
|
|
|
return arg;
|
|
|
|
} else if constexpr (std::is_same_v<ArgType, u32>) {
|
|
|
|
return arg.U32();
|
|
|
|
} else if constexpr (std::is_same_v<ArgType, IR::Block*>) {
|
|
|
|
return arg.Label();
|
2021-03-19 23:28:31 +01:00
|
|
|
} else if constexpr (std::is_same_v<ArgType, IR::Attribute>) {
|
|
|
|
return arg.Attribute();
|
2021-04-02 06:17:47 +02:00
|
|
|
} else if constexpr (std::is_same_v<ArgType, IR::Reg>) {
|
|
|
|
return arg.Reg();
|
2021-02-16 08:10:22 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-02-17 04:59:28 +01:00
|
|
|
template <auto func, bool is_first_arg_inst, size_t... I>
|
|
|
|
void Invoke(EmitContext& ctx, IR::Inst* inst, std::index_sequence<I...>) {
|
|
|
|
using Traits = FuncTraits<decltype(func)>;
|
2021-04-06 04:25:22 +02:00
|
|
|
if constexpr (std::is_same_v<typename Traits::ReturnType, Id>) {
|
2021-02-16 08:10:22 +01:00
|
|
|
if constexpr (is_first_arg_inst) {
|
2021-04-06 04:25:22 +02:00
|
|
|
SetDefinition<func>(
|
|
|
|
ctx, inst, inst,
|
|
|
|
Arg<typename Traits::template ArgType<I + 2>>(ctx, inst->Arg(I))...);
|
2021-02-16 08:10:22 +01:00
|
|
|
} else {
|
2021-04-06 04:25:22 +02:00
|
|
|
SetDefinition<func>(
|
|
|
|
ctx, inst, Arg<typename Traits::template ArgType<I + 1>>(ctx, inst->Arg(I))...);
|
2021-02-16 08:10:22 +01:00
|
|
|
}
|
|
|
|
} else {
|
|
|
|
if constexpr (is_first_arg_inst) {
|
2021-04-06 04:25:22 +02:00
|
|
|
func(ctx, inst, Arg<typename Traits::template ArgType<I + 2>>(ctx, inst->Arg(I))...);
|
2021-02-16 08:10:22 +01:00
|
|
|
} else {
|
2021-04-06 04:25:22 +02:00
|
|
|
func(ctx, Arg<typename Traits::template ArgType<I + 1>>(ctx, inst->Arg(I))...);
|
2021-02-08 06:54:35 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-02-17 04:59:28 +01:00
|
|
|
template <auto func>
|
|
|
|
void Invoke(EmitContext& ctx, IR::Inst* inst) {
|
|
|
|
using Traits = FuncTraits<decltype(func)>;
|
2021-02-16 08:10:22 +01:00
|
|
|
static_assert(Traits::NUM_ARGS >= 1, "Insufficient arguments");
|
|
|
|
if constexpr (Traits::NUM_ARGS == 1) {
|
2021-02-17 04:59:28 +01:00
|
|
|
Invoke<func, false>(ctx, inst, std::make_index_sequence<0>{});
|
2021-02-16 08:10:22 +01:00
|
|
|
} else {
|
|
|
|
using FirstArgType = typename Traits::template ArgType<1>;
|
|
|
|
static constexpr bool is_first_arg_inst = std::is_same_v<FirstArgType, IR::Inst*>;
|
|
|
|
using Indices = std::make_index_sequence<Traits::NUM_ARGS - (is_first_arg_inst ? 2 : 1)>;
|
2021-02-17 04:59:28 +01:00
|
|
|
Invoke<func, is_first_arg_inst>(ctx, inst, Indices{});
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void EmitInst(EmitContext& ctx, IR::Inst* inst) {
|
2021-04-06 04:25:22 +02:00
|
|
|
switch (inst->GetOpcode()) {
|
2021-02-17 04:59:28 +01:00
|
|
|
#define OPCODE(name, result_type, ...) \
|
|
|
|
case IR::Opcode::name: \
|
|
|
|
return Invoke<&Emit##name>(ctx, inst);
|
|
|
|
#include "shader_recompiler/frontend/ir/opcodes.inc"
|
|
|
|
#undef OPCODE
|
|
|
|
}
|
2021-04-06 04:25:22 +02:00
|
|
|
throw LogicError("Invalid opcode {}", inst->GetOpcode());
|
2021-02-17 04:59:28 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
Id TypeId(const EmitContext& ctx, IR::Type type) {
|
|
|
|
switch (type) {
|
|
|
|
case IR::Type::U1:
|
|
|
|
return ctx.U1;
|
|
|
|
case IR::Type::U32:
|
|
|
|
return ctx.U32[1];
|
|
|
|
default:
|
|
|
|
throw NotImplementedException("Phi node type {}", type);
|
2021-02-16 08:10:22 +01:00
|
|
|
}
|
|
|
|
}
|
2021-02-20 07:30:13 +01:00
|
|
|
|
2021-03-20 23:11:56 +01:00
|
|
|
Id DefineMain(EmitContext& ctx, IR::Program& program) {
|
|
|
|
const Id void_function{ctx.TypeFunction(ctx.void_id)};
|
|
|
|
const Id main{ctx.OpFunction(ctx.void_id, spv::FunctionControlMask::MaskNone, void_function)};
|
|
|
|
for (IR::Block* const block : program.blocks) {
|
|
|
|
ctx.AddLabel(block->Definition<Id>());
|
|
|
|
for (IR::Inst& inst : block->Instructions()) {
|
|
|
|
EmitInst(ctx, &inst);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
ctx.OpFunctionEnd();
|
|
|
|
return main;
|
|
|
|
}
|
|
|
|
|
2021-03-27 07:08:31 +01:00
|
|
|
void DefineEntryPoint(const IR::Program& program, EmitContext& ctx, Id main) {
|
2021-03-20 23:11:56 +01:00
|
|
|
const std::span interfaces(ctx.interfaces.data(), ctx.interfaces.size());
|
|
|
|
spv::ExecutionModel execution_model{};
|
2021-03-26 22:45:38 +01:00
|
|
|
switch (program.stage) {
|
2021-03-20 23:11:56 +01:00
|
|
|
case Shader::Stage::Compute: {
|
2021-03-27 07:08:31 +01:00
|
|
|
const std::array<u32, 3> workgroup_size{program.workgroup_size};
|
2021-03-20 23:11:56 +01:00
|
|
|
execution_model = spv::ExecutionModel::GLCompute;
|
|
|
|
ctx.AddExecutionMode(main, spv::ExecutionMode::LocalSize, workgroup_size[0],
|
|
|
|
workgroup_size[1], workgroup_size[2]);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
case Shader::Stage::VertexB:
|
|
|
|
execution_model = spv::ExecutionModel::Vertex;
|
|
|
|
break;
|
|
|
|
case Shader::Stage::Fragment:
|
|
|
|
execution_model = spv::ExecutionModel::Fragment;
|
|
|
|
ctx.AddExecutionMode(main, spv::ExecutionMode::OriginUpperLeft);
|
2021-03-26 22:45:38 +01:00
|
|
|
if (program.info.stores_frag_depth) {
|
|
|
|
ctx.AddExecutionMode(main, spv::ExecutionMode::DepthReplacing);
|
|
|
|
}
|
2021-03-20 23:11:56 +01:00
|
|
|
break;
|
|
|
|
default:
|
2021-03-27 07:08:31 +01:00
|
|
|
throw NotImplementedException("Stage {}", program.stage);
|
2021-03-20 23:11:56 +01:00
|
|
|
}
|
|
|
|
ctx.AddEntryPoint(execution_model, main, "main", interfaces);
|
|
|
|
}
|
|
|
|
|
2021-02-20 07:30:13 +01:00
|
|
|
void SetupDenormControl(const Profile& profile, const IR::Program& program, EmitContext& ctx,
|
|
|
|
Id main_func) {
|
|
|
|
const Info& info{program.info};
|
|
|
|
if (info.uses_fp32_denorms_flush && info.uses_fp32_denorms_preserve) {
|
|
|
|
// LOG_ERROR(HW_GPU, "Fp32 denorm flush and preserve on the same shader");
|
|
|
|
} else if (info.uses_fp32_denorms_flush) {
|
|
|
|
if (profile.support_fp32_denorm_flush) {
|
|
|
|
ctx.AddCapability(spv::Capability::DenormFlushToZero);
|
|
|
|
ctx.AddExecutionMode(main_func, spv::ExecutionMode::DenormFlushToZero, 32U);
|
|
|
|
} else {
|
|
|
|
// Drivers will most likely flush denorms by default, no need to warn
|
|
|
|
}
|
|
|
|
} else if (info.uses_fp32_denorms_preserve) {
|
|
|
|
if (profile.support_fp32_denorm_preserve) {
|
|
|
|
ctx.AddCapability(spv::Capability::DenormPreserve);
|
|
|
|
ctx.AddExecutionMode(main_func, spv::ExecutionMode::DenormPreserve, 32U);
|
|
|
|
} else {
|
|
|
|
// LOG_WARNING(HW_GPU, "Fp32 denorm preserve used in shader without host support");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (!profile.support_separate_denorm_behavior) {
|
|
|
|
// No separate denorm behavior
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
if (info.uses_fp16_denorms_flush && info.uses_fp16_denorms_preserve) {
|
|
|
|
// LOG_ERROR(HW_GPU, "Fp16 denorm flush and preserve on the same shader");
|
|
|
|
} else if (info.uses_fp16_denorms_flush) {
|
|
|
|
if (profile.support_fp16_denorm_flush) {
|
|
|
|
ctx.AddCapability(spv::Capability::DenormFlushToZero);
|
2021-02-22 03:42:38 +01:00
|
|
|
ctx.AddExecutionMode(main_func, spv::ExecutionMode::DenormFlushToZero, 16U);
|
2021-02-20 07:30:13 +01:00
|
|
|
} else {
|
|
|
|
// Same as fp32, no need to warn as most drivers will flush by default
|
|
|
|
}
|
2021-02-22 03:42:38 +01:00
|
|
|
} else if (info.uses_fp16_denorms_preserve) {
|
2021-02-20 07:30:13 +01:00
|
|
|
if (profile.support_fp16_denorm_preserve) {
|
|
|
|
ctx.AddCapability(spv::Capability::DenormPreserve);
|
|
|
|
ctx.AddExecutionMode(main_func, spv::ExecutionMode::DenormPreserve, 16U);
|
|
|
|
} else {
|
|
|
|
// LOG_WARNING(HW_GPU, "Fp16 denorm preserve used in shader without host support");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2021-02-24 22:37:47 +01:00
|
|
|
|
2021-03-22 00:28:37 +01:00
|
|
|
void SetupSignedNanCapabilities(const Profile& profile, const IR::Program& program,
|
|
|
|
EmitContext& ctx, Id main_func) {
|
|
|
|
if (program.info.uses_fp16 && profile.support_fp16_signed_zero_nan_preserve) {
|
|
|
|
ctx.AddCapability(spv::Capability::SignedZeroInfNanPreserve);
|
|
|
|
ctx.AddExecutionMode(main_func, spv::ExecutionMode::SignedZeroInfNanPreserve, 16U);
|
|
|
|
}
|
|
|
|
if (profile.support_fp32_signed_zero_nan_preserve) {
|
|
|
|
ctx.AddCapability(spv::Capability::SignedZeroInfNanPreserve);
|
|
|
|
ctx.AddExecutionMode(main_func, spv::ExecutionMode::SignedZeroInfNanPreserve, 32U);
|
|
|
|
}
|
|
|
|
if (program.info.uses_fp64 && profile.support_fp64_signed_zero_nan_preserve) {
|
|
|
|
ctx.AddCapability(spv::Capability::SignedZeroInfNanPreserve);
|
|
|
|
ctx.AddExecutionMode(main_func, spv::ExecutionMode::SignedZeroInfNanPreserve, 64U);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-03-20 23:11:56 +01:00
|
|
|
void SetupCapabilities(const Profile& profile, const Info& info, EmitContext& ctx) {
|
|
|
|
if (info.uses_sampled_1d) {
|
|
|
|
ctx.AddCapability(spv::Capability::Sampled1D);
|
|
|
|
}
|
|
|
|
if (info.uses_sparse_residency) {
|
|
|
|
ctx.AddCapability(spv::Capability::SparseResidency);
|
|
|
|
}
|
|
|
|
if (info.uses_demote_to_helper_invocation) {
|
|
|
|
ctx.AddExtension("SPV_EXT_demote_to_helper_invocation");
|
|
|
|
ctx.AddCapability(spv::Capability::DemoteToHelperInvocationEXT);
|
|
|
|
}
|
2021-04-01 08:34:45 +02:00
|
|
|
if (info.stores_viewport_index) {
|
|
|
|
ctx.AddCapability(spv::Capability::MultiViewport);
|
|
|
|
if (profile.support_viewport_index_layer_non_geometry &&
|
2021-04-03 01:48:39 +02:00
|
|
|
ctx.stage != Shader::Stage::Geometry) {
|
2021-04-01 08:34:45 +02:00
|
|
|
ctx.AddExtension("SPV_EXT_shader_viewport_index_layer");
|
|
|
|
ctx.AddCapability(spv::Capability::ShaderViewportIndexLayerEXT);
|
|
|
|
}
|
|
|
|
}
|
2021-03-20 23:11:56 +01:00
|
|
|
if (!profile.support_vertex_instance_id && (info.loads_instance_id || info.loads_vertex_id)) {
|
|
|
|
ctx.AddExtension("SPV_KHR_shader_draw_parameters");
|
|
|
|
ctx.AddCapability(spv::Capability::DrawParameters);
|
|
|
|
}
|
2021-03-25 16:31:37 +01:00
|
|
|
if ((info.uses_subgroup_vote || info.uses_subgroup_invocation_id) && profile.support_vote) {
|
2021-03-24 01:27:17 +01:00
|
|
|
ctx.AddExtension("SPV_KHR_shader_ballot");
|
|
|
|
ctx.AddCapability(spv::Capability::SubgroupBallotKHR);
|
|
|
|
if (!profile.warp_size_potentially_larger_than_guest) {
|
|
|
|
// vote ops are only used when not taking the long path
|
|
|
|
ctx.AddExtension("SPV_KHR_subgroup_vote");
|
|
|
|
ctx.AddCapability(spv::Capability::SubgroupVoteKHR);
|
|
|
|
}
|
|
|
|
}
|
2021-04-11 08:07:02 +02:00
|
|
|
if (info.uses_64_bit_atomics && profile.support_int64_atomics) {
|
|
|
|
ctx.AddCapability(spv::Capability::Int64Atomics);
|
|
|
|
}
|
2021-04-11 07:37:03 +02:00
|
|
|
if (info.uses_typeless_image_reads && profile.support_typeless_image_loads) {
|
|
|
|
ctx.AddCapability(spv::Capability::StorageImageReadWithoutFormat);
|
|
|
|
}
|
2021-04-12 02:02:44 +02:00
|
|
|
if (info.uses_typeless_image_writes) {
|
|
|
|
ctx.AddCapability(spv::Capability::StorageImageWriteWithoutFormat);
|
|
|
|
}
|
2021-03-20 23:11:56 +01:00
|
|
|
// TODO: Track this usage
|
|
|
|
ctx.AddCapability(spv::Capability::ImageGatherExtended);
|
2021-03-26 22:45:38 +01:00
|
|
|
ctx.AddCapability(spv::Capability::ImageQuery);
|
2021-04-06 07:56:15 +02:00
|
|
|
ctx.AddCapability(spv::Capability::SampledBuffer);
|
2021-03-20 23:11:56 +01:00
|
|
|
}
|
2021-04-11 07:08:15 +02:00
|
|
|
|
2021-04-11 07:46:51 +02:00
|
|
|
void PatchPhiNodes(IR::Program& program, EmitContext& ctx) {
|
2021-04-11 07:08:15 +02:00
|
|
|
auto inst{program.blocks.front()->begin()};
|
2021-04-11 07:46:51 +02:00
|
|
|
size_t block_index{0};
|
2021-04-11 07:08:15 +02:00
|
|
|
ctx.PatchDeferredPhi([&](size_t phi_arg) {
|
|
|
|
if (phi_arg == 0) {
|
|
|
|
++inst;
|
|
|
|
if (inst == program.blocks[block_index]->end() ||
|
|
|
|
inst->GetOpcode() != IR::Opcode::Phi) {
|
|
|
|
do {
|
|
|
|
++block_index;
|
|
|
|
inst = program.blocks[block_index]->begin();
|
|
|
|
} while (inst->GetOpcode() != IR::Opcode::Phi);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return ctx.Def(inst->Arg(phi_arg));
|
|
|
|
});
|
2021-04-11 07:46:51 +02:00
|
|
|
}
|
|
|
|
} // Anonymous namespace
|
|
|
|
|
|
|
|
std::vector<u32> EmitSPIRV(const Profile& profile, IR::Program& program, u32& binding) {
|
|
|
|
EmitContext ctx{profile, program, binding};
|
|
|
|
const Id main{DefineMain(ctx, program)};
|
|
|
|
DefineEntryPoint(program, ctx, main);
|
|
|
|
if (profile.support_float_controls) {
|
|
|
|
ctx.AddExtension("SPV_KHR_float_controls");
|
|
|
|
SetupDenormControl(profile, program, ctx, main);
|
|
|
|
SetupSignedNanCapabilities(profile, program, ctx, main);
|
|
|
|
}
|
|
|
|
SetupCapabilities(profile, program.info, ctx);
|
|
|
|
PatchPhiNodes(program, ctx);
|
2021-02-17 04:59:28 +01:00
|
|
|
return ctx.Assemble();
|
2021-02-11 20:39:06 +01:00
|
|
|
}
|
|
|
|
|
2021-02-17 04:59:28 +01:00
|
|
|
Id EmitPhi(EmitContext& ctx, IR::Inst* inst) {
|
2021-02-11 20:39:06 +01:00
|
|
|
const size_t num_args{inst->NumArgs()};
|
2021-04-11 07:08:15 +02:00
|
|
|
boost::container::small_vector<Id, 32> blocks;
|
|
|
|
blocks.reserve(num_args);
|
2021-02-11 20:39:06 +01:00
|
|
|
for (size_t index = 0; index < num_args; ++index) {
|
2021-04-11 07:08:15 +02:00
|
|
|
blocks.push_back(inst->PhiBlock(index)->Definition<Id>());
|
2021-02-11 20:39:06 +01:00
|
|
|
}
|
2021-03-30 08:19:50 +02:00
|
|
|
// The type of a phi instruction is stored in its flags
|
|
|
|
const Id result_type{TypeId(ctx, inst->Flags<IR::Type>())};
|
2021-04-11 07:08:15 +02:00
|
|
|
return ctx.DeferredOpPhi(result_type, std::span(blocks.data(), blocks.size()));
|
2021-02-08 06:54:35 +01:00
|
|
|
}
|
|
|
|
|
2021-02-17 04:59:28 +01:00
|
|
|
void EmitVoid(EmitContext&) {}
|
2021-02-08 06:54:35 +01:00
|
|
|
|
2021-02-17 04:59:28 +01:00
|
|
|
Id EmitIdentity(EmitContext& ctx, const IR::Value& value) {
|
2021-04-11 07:08:15 +02:00
|
|
|
const Id id{ctx.Def(value)};
|
|
|
|
if (!Sirit::ValidId(id)) {
|
|
|
|
throw NotImplementedException("Forward identity declaration");
|
2021-04-01 06:07:51 +02:00
|
|
|
}
|
2021-04-11 07:08:15 +02:00
|
|
|
return id;
|
2021-02-08 06:54:35 +01:00
|
|
|
}
|
|
|
|
|
2021-02-17 04:59:28 +01:00
|
|
|
void EmitGetZeroFromOp(EmitContext&) {
|
2021-02-08 06:54:35 +01:00
|
|
|
throw LogicError("Unreachable instruction");
|
|
|
|
}
|
|
|
|
|
2021-02-17 04:59:28 +01:00
|
|
|
void EmitGetSignFromOp(EmitContext&) {
|
2021-02-08 06:54:35 +01:00
|
|
|
throw LogicError("Unreachable instruction");
|
|
|
|
}
|
|
|
|
|
2021-02-17 04:59:28 +01:00
|
|
|
void EmitGetCarryFromOp(EmitContext&) {
|
2021-02-08 06:54:35 +01:00
|
|
|
throw LogicError("Unreachable instruction");
|
|
|
|
}
|
|
|
|
|
2021-02-17 04:59:28 +01:00
|
|
|
void EmitGetOverflowFromOp(EmitContext&) {
|
2021-02-08 06:54:35 +01:00
|
|
|
throw LogicError("Unreachable instruction");
|
|
|
|
}
|
|
|
|
|
2021-03-08 22:31:53 +01:00
|
|
|
void EmitGetSparseFromOp(EmitContext&) {
|
|
|
|
throw LogicError("Unreachable instruction");
|
|
|
|
}
|
|
|
|
|
2021-03-25 16:31:37 +01:00
|
|
|
void EmitGetInBoundsFromOp(EmitContext&) {
|
|
|
|
throw LogicError("Unreachable instruction");
|
|
|
|
}
|
|
|
|
|
2021-02-08 06:54:35 +01:00
|
|
|
} // namespace Shader::Backend::SPIRV
|