2021-03-19 23:28:31 +01:00
|
|
|
// Copyright 2021 yuzu Emulator Project
|
|
|
|
// Licensed under GPLv2 or any later version
|
|
|
|
// Refer to the license.txt file included.
|
|
|
|
|
|
|
|
#include <algorithm>
|
|
|
|
#include <span>
|
|
|
|
|
|
|
|
#include <boost/container/small_vector.hpp>
|
|
|
|
#include <boost/container/static_vector.hpp>
|
|
|
|
|
|
|
|
#include "common/bit_field.h"
|
|
|
|
#include "video_core/renderer_vulkan/maxwell_to_vk.h"
|
|
|
|
#include "video_core/renderer_vulkan/pipeline_helper.h"
|
|
|
|
#include "video_core/renderer_vulkan/vk_buffer_cache.h"
|
|
|
|
#include "video_core/renderer_vulkan/vk_graphics_pipeline.h"
|
|
|
|
#include "video_core/renderer_vulkan/vk_render_pass_cache.h"
|
|
|
|
#include "video_core/renderer_vulkan/vk_scheduler.h"
|
|
|
|
#include "video_core/renderer_vulkan/vk_texture_cache.h"
|
|
|
|
#include "video_core/renderer_vulkan/vk_update_descriptor.h"
|
2021-06-06 05:11:36 +02:00
|
|
|
#include "video_core/shader_notify.h"
|
2021-03-19 23:28:31 +01:00
|
|
|
#include "video_core/vulkan_common/vulkan_device.h"
|
|
|
|
|
2021-05-23 09:28:34 +02:00
|
|
|
#if defined(_MSC_VER) && defined(NDEBUG)
|
2021-04-24 23:28:02 +02:00
|
|
|
#define LAMBDA_FORCEINLINE [[msvc::forceinline]]
|
|
|
|
#else
|
|
|
|
#define LAMBDA_FORCEINLINE
|
|
|
|
#endif
|
|
|
|
|
2021-03-19 23:28:31 +01:00
|
|
|
namespace Vulkan {
|
|
|
|
namespace {
|
|
|
|
using boost::container::small_vector;
|
|
|
|
using boost::container::static_vector;
|
2021-04-24 23:28:02 +02:00
|
|
|
using Shader::ImageBufferDescriptor;
|
2021-05-23 09:28:34 +02:00
|
|
|
using Tegra::Texture::TexturePair;
|
2021-03-19 23:28:31 +01:00
|
|
|
using VideoCore::Surface::PixelFormat;
|
|
|
|
using VideoCore::Surface::PixelFormatFromDepthFormat;
|
|
|
|
using VideoCore::Surface::PixelFormatFromRenderTargetFormat;
|
|
|
|
|
2021-04-24 23:28:02 +02:00
|
|
|
constexpr size_t NUM_STAGES = Maxwell::MaxShaderStage;
|
|
|
|
constexpr size_t MAX_IMAGE_ELEMENTS = 64;
|
|
|
|
|
2021-04-01 08:15:28 +02:00
|
|
|
DescriptorLayoutBuilder MakeBuilder(const Device& device, std::span<const Shader::Info> infos) {
|
2021-06-17 02:14:57 +02:00
|
|
|
DescriptorLayoutBuilder builder{device};
|
2021-03-19 23:28:31 +01:00
|
|
|
for (size_t index = 0; index < infos.size(); ++index) {
|
|
|
|
static constexpr std::array stages{
|
|
|
|
VK_SHADER_STAGE_VERTEX_BIT,
|
|
|
|
VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT,
|
|
|
|
VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT,
|
|
|
|
VK_SHADER_STAGE_GEOMETRY_BIT,
|
|
|
|
VK_SHADER_STAGE_FRAGMENT_BIT,
|
|
|
|
};
|
|
|
|
builder.Add(infos[index], stages.at(index));
|
|
|
|
}
|
2021-04-01 08:15:28 +02:00
|
|
|
return builder;
|
2021-03-19 23:28:31 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
template <class StencilFace>
|
|
|
|
VkStencilOpState GetStencilFaceState(const StencilFace& face) {
|
|
|
|
return {
|
|
|
|
.failOp = MaxwellToVK::StencilOp(face.ActionStencilFail()),
|
|
|
|
.passOp = MaxwellToVK::StencilOp(face.ActionDepthPass()),
|
|
|
|
.depthFailOp = MaxwellToVK::StencilOp(face.ActionDepthFail()),
|
|
|
|
.compareOp = MaxwellToVK::ComparisonOp(face.TestFunc()),
|
|
|
|
.compareMask = 0,
|
|
|
|
.writeMask = 0,
|
|
|
|
.reference = 0,
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
|
|
|
bool SupportsPrimitiveRestart(VkPrimitiveTopology topology) {
|
|
|
|
static constexpr std::array unsupported_topologies{
|
|
|
|
VK_PRIMITIVE_TOPOLOGY_POINT_LIST,
|
|
|
|
VK_PRIMITIVE_TOPOLOGY_LINE_LIST,
|
|
|
|
VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST,
|
|
|
|
VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY,
|
|
|
|
VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY,
|
|
|
|
VK_PRIMITIVE_TOPOLOGY_PATCH_LIST,
|
|
|
|
// VK_PRIMITIVE_TOPOLOGY_QUAD_LIST_EXT,
|
|
|
|
};
|
|
|
|
return std::ranges::find(unsupported_topologies, topology) == unsupported_topologies.end();
|
|
|
|
}
|
|
|
|
|
|
|
|
VkViewportSwizzleNV UnpackViewportSwizzle(u16 swizzle) {
|
|
|
|
union Swizzle {
|
|
|
|
u32 raw;
|
|
|
|
BitField<0, 3, Maxwell::ViewportSwizzle> x;
|
|
|
|
BitField<4, 3, Maxwell::ViewportSwizzle> y;
|
|
|
|
BitField<8, 3, Maxwell::ViewportSwizzle> z;
|
|
|
|
BitField<12, 3, Maxwell::ViewportSwizzle> w;
|
|
|
|
};
|
|
|
|
const Swizzle unpacked{swizzle};
|
|
|
|
return VkViewportSwizzleNV{
|
|
|
|
.x = MaxwellToVK::ViewportSwizzle(unpacked.x),
|
|
|
|
.y = MaxwellToVK::ViewportSwizzle(unpacked.y),
|
|
|
|
.z = MaxwellToVK::ViewportSwizzle(unpacked.z),
|
|
|
|
.w = MaxwellToVK::ViewportSwizzle(unpacked.w),
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
|
|
|
PixelFormat DecodeFormat(u8 encoded_format) {
|
|
|
|
const auto format{static_cast<Tegra::RenderTargetFormat>(encoded_format)};
|
|
|
|
if (format == Tegra::RenderTargetFormat::NONE) {
|
|
|
|
return PixelFormat::Invalid;
|
|
|
|
}
|
|
|
|
return PixelFormatFromRenderTargetFormat(format);
|
|
|
|
}
|
|
|
|
|
|
|
|
RenderPassKey MakeRenderPassKey(const FixedPipelineState& state) {
|
|
|
|
RenderPassKey key;
|
|
|
|
std::ranges::transform(state.color_formats, key.color_formats.begin(), DecodeFormat);
|
|
|
|
if (state.depth_enabled != 0) {
|
|
|
|
const auto depth_format{static_cast<Tegra::DepthFormat>(state.depth_format.Value())};
|
|
|
|
key.depth_format = PixelFormatFromDepthFormat(depth_format);
|
|
|
|
} else {
|
|
|
|
key.depth_format = PixelFormat::Invalid;
|
|
|
|
}
|
|
|
|
key.samples = MaxwellToVK::MsaaMode(state.msaa_mode);
|
|
|
|
return key;
|
|
|
|
}
|
2021-04-23 02:05:10 +02:00
|
|
|
|
|
|
|
size_t NumAttachments(const FixedPipelineState& state) {
|
|
|
|
size_t num{};
|
|
|
|
for (size_t index = 0; index < Maxwell::NumRenderTargets; ++index) {
|
|
|
|
const auto format{static_cast<Tegra::RenderTargetFormat>(state.color_formats[index])};
|
|
|
|
if (format != Tegra::RenderTargetFormat::NONE) {
|
|
|
|
num = index + 1;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return num;
|
|
|
|
}
|
2021-04-24 23:28:02 +02:00
|
|
|
|
|
|
|
template <typename Spec>
|
|
|
|
bool Passes(const std::array<vk::ShaderModule, NUM_STAGES>& modules,
|
|
|
|
const std::array<Shader::Info, NUM_STAGES>& stage_infos) {
|
|
|
|
for (size_t stage = 0; stage < NUM_STAGES; ++stage) {
|
|
|
|
if (!Spec::enabled_stages[stage] && modules[stage]) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
const auto& info{stage_infos[stage]};
|
|
|
|
if constexpr (!Spec::has_storage_buffers) {
|
|
|
|
if (!info.storage_buffers_descriptors.empty()) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if constexpr (!Spec::has_texture_buffers) {
|
|
|
|
if (!info.texture_buffer_descriptors.empty()) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if constexpr (!Spec::has_image_buffers) {
|
|
|
|
if (!info.image_buffer_descriptors.empty()) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if constexpr (!Spec::has_images) {
|
|
|
|
if (!info.image_descriptors.empty()) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
using ConfigureFuncPtr = void (*)(GraphicsPipeline*, bool);
|
|
|
|
|
|
|
|
template <typename Spec, typename... Specs>
|
|
|
|
ConfigureFuncPtr FindSpec(const std::array<vk::ShaderModule, NUM_STAGES>& modules,
|
|
|
|
const std::array<Shader::Info, NUM_STAGES>& stage_infos) {
|
|
|
|
if constexpr (sizeof...(Specs) > 0) {
|
|
|
|
if (!Passes<Spec>(modules, stage_infos)) {
|
|
|
|
return FindSpec<Specs...>(modules, stage_infos);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return GraphicsPipeline::MakeConfigureSpecFunc<Spec>();
|
|
|
|
}
|
|
|
|
|
|
|
|
struct SimpleVertexFragmentSpec {
|
|
|
|
static constexpr std::array<bool, 5> enabled_stages{true, false, false, false, true};
|
|
|
|
static constexpr bool has_storage_buffers = false;
|
|
|
|
static constexpr bool has_texture_buffers = false;
|
|
|
|
static constexpr bool has_image_buffers = false;
|
|
|
|
static constexpr bool has_images = false;
|
|
|
|
};
|
|
|
|
|
|
|
|
struct SimpleVertexSpec {
|
|
|
|
static constexpr std::array<bool, 5> enabled_stages{true, false, false, false, false};
|
|
|
|
static constexpr bool has_storage_buffers = false;
|
|
|
|
static constexpr bool has_texture_buffers = false;
|
|
|
|
static constexpr bool has_image_buffers = false;
|
|
|
|
static constexpr bool has_images = false;
|
|
|
|
};
|
|
|
|
|
|
|
|
struct DefaultSpec {
|
|
|
|
static constexpr std::array<bool, 5> enabled_stages{true, true, true, true, true};
|
|
|
|
static constexpr bool has_storage_buffers = true;
|
|
|
|
static constexpr bool has_texture_buffers = true;
|
|
|
|
static constexpr bool has_image_buffers = true;
|
|
|
|
static constexpr bool has_images = true;
|
|
|
|
};
|
|
|
|
|
|
|
|
ConfigureFuncPtr ConfigureFunc(const std::array<vk::ShaderModule, NUM_STAGES>& modules,
|
|
|
|
const std::array<Shader::Info, NUM_STAGES>& infos) {
|
|
|
|
return FindSpec<SimpleVertexSpec, SimpleVertexFragmentSpec, DefaultSpec>(modules, infos);
|
|
|
|
}
|
2021-03-19 23:28:31 +01:00
|
|
|
} // Anonymous namespace
|
|
|
|
|
2021-06-06 05:11:36 +02:00
|
|
|
GraphicsPipeline::GraphicsPipeline(
|
|
|
|
Tegra::Engines::Maxwell3D& maxwell3d_, Tegra::MemoryManager& gpu_memory_,
|
|
|
|
VKScheduler& scheduler_, BufferCache& buffer_cache_, TextureCache& texture_cache_,
|
|
|
|
VideoCore::ShaderNotify* shader_notify, const Device& device_, DescriptorPool& descriptor_pool,
|
|
|
|
VKUpdateDescriptorQueue& update_descriptor_queue_, Common::ThreadWorker* worker_thread,
|
|
|
|
RenderPassCache& render_pass_cache, const GraphicsPipelineCacheKey& key_,
|
|
|
|
std::array<vk::ShaderModule, NUM_STAGES> stages,
|
|
|
|
const std::array<const Shader::Info*, NUM_STAGES>& infos)
|
2021-04-25 06:04:49 +02:00
|
|
|
: key{key_}, maxwell3d{maxwell3d_}, gpu_memory{gpu_memory_}, device{device_},
|
|
|
|
texture_cache{texture_cache_}, buffer_cache{buffer_cache_}, scheduler{scheduler_},
|
2021-04-24 23:27:25 +02:00
|
|
|
update_descriptor_queue{update_descriptor_queue_}, spv_modules{std::move(stages)} {
|
2021-06-06 05:11:36 +02:00
|
|
|
if (shader_notify) {
|
|
|
|
shader_notify->MarkShaderBuilding();
|
|
|
|
}
|
2021-06-02 07:15:07 +02:00
|
|
|
for (size_t stage = 0; stage < NUM_STAGES; ++stage) {
|
|
|
|
const Shader::Info* const info{infos[stage]};
|
|
|
|
if (!info) {
|
|
|
|
continue;
|
|
|
|
}
|
2021-06-06 05:11:36 +02:00
|
|
|
stage_infos[stage] = *info;
|
2021-06-02 07:15:07 +02:00
|
|
|
enabled_uniform_buffer_masks[stage] = info->constant_buffer_mask;
|
|
|
|
std::ranges::copy(info->constant_buffer_used_sizes, uniform_buffer_sizes[stage].begin());
|
|
|
|
}
|
2021-06-06 05:11:36 +02:00
|
|
|
auto func{[this, shader_notify, &render_pass_cache, &descriptor_pool] {
|
2021-04-25 06:04:49 +02:00
|
|
|
DescriptorLayoutBuilder builder{MakeBuilder(device, stage_infos)};
|
2021-06-17 02:14:57 +02:00
|
|
|
uses_push_descriptor = builder.CanUsePushDescriptor();
|
|
|
|
descriptor_set_layout = builder.CreateDescriptorSetLayout(uses_push_descriptor);
|
|
|
|
if (!uses_push_descriptor) {
|
|
|
|
descriptor_allocator = descriptor_pool.Allocator(*descriptor_set_layout, stage_infos);
|
|
|
|
}
|
2021-04-01 08:15:28 +02:00
|
|
|
const VkDescriptorSetLayout set_layout{*descriptor_set_layout};
|
|
|
|
pipeline_layout = builder.CreatePipelineLayout(set_layout);
|
2021-06-17 02:14:57 +02:00
|
|
|
descriptor_update_template =
|
|
|
|
builder.CreateTemplate(set_layout, *pipeline_layout, uses_push_descriptor);
|
2021-04-01 08:15:28 +02:00
|
|
|
|
2021-04-24 23:27:25 +02:00
|
|
|
const VkRenderPass render_pass{render_pass_cache.Get(MakeRenderPassKey(key.state))};
|
2021-04-24 23:28:02 +02:00
|
|
|
Validate();
|
2021-04-25 06:04:49 +02:00
|
|
|
MakePipeline(render_pass);
|
2021-04-04 02:41:49 +02:00
|
|
|
|
|
|
|
std::lock_guard lock{build_mutex};
|
|
|
|
is_built = true;
|
|
|
|
build_condvar.notify_one();
|
2021-06-06 05:11:36 +02:00
|
|
|
if (shader_notify) {
|
|
|
|
shader_notify->MarkShaderComplete();
|
|
|
|
}
|
2021-04-01 06:36:22 +02:00
|
|
|
}};
|
|
|
|
if (worker_thread) {
|
|
|
|
worker_thread->QueueWork(std::move(func));
|
|
|
|
} else {
|
|
|
|
func();
|
|
|
|
}
|
2021-04-24 23:28:02 +02:00
|
|
|
configure_func = ConfigureFunc(spv_modules, stage_infos);
|
2021-03-19 23:28:31 +01:00
|
|
|
}
|
|
|
|
|
2021-04-24 23:27:25 +02:00
|
|
|
void GraphicsPipeline::AddTransition(GraphicsPipeline* transition) {
|
|
|
|
transition_keys.push_back(transition->key);
|
|
|
|
transitions.push_back(transition);
|
|
|
|
}
|
|
|
|
|
2021-04-24 23:28:02 +02:00
|
|
|
template <typename Spec>
|
|
|
|
void GraphicsPipeline::ConfigureImpl(bool is_indexed) {
|
|
|
|
std::array<ImageId, MAX_IMAGE_ELEMENTS> image_view_ids;
|
|
|
|
std::array<u32, MAX_IMAGE_ELEMENTS> image_view_indices;
|
|
|
|
std::array<VkSampler, MAX_IMAGE_ELEMENTS> samplers;
|
2021-05-01 05:29:31 +02:00
|
|
|
size_t sampler_index{};
|
2021-04-24 23:28:02 +02:00
|
|
|
size_t image_index{};
|
2021-03-19 23:28:31 +01:00
|
|
|
|
2021-04-01 06:36:22 +02:00
|
|
|
texture_cache.SynchronizeGraphicsDescriptors();
|
2021-03-19 23:28:31 +01:00
|
|
|
|
2021-06-02 07:15:07 +02:00
|
|
|
buffer_cache.SetUniformBuffersState(enabled_uniform_buffer_masks, &uniform_buffer_sizes);
|
2021-05-30 07:57:42 +02:00
|
|
|
|
2021-04-01 06:36:22 +02:00
|
|
|
const auto& regs{maxwell3d.regs};
|
2021-03-19 23:28:31 +01:00
|
|
|
const bool via_header_index{regs.sampler_index == Maxwell::SamplerIndex::ViaHeaderIndex};
|
2021-04-24 23:28:02 +02:00
|
|
|
const auto config_stage{[&](size_t stage) LAMBDA_FORCEINLINE {
|
2021-03-19 23:28:31 +01:00
|
|
|
const Shader::Info& info{stage_infos[stage]};
|
2021-04-01 06:36:22 +02:00
|
|
|
buffer_cache.UnbindGraphicsStorageBuffers(stage);
|
2021-04-24 23:28:02 +02:00
|
|
|
if constexpr (Spec::has_storage_buffers) {
|
|
|
|
size_t ssbo_index{};
|
|
|
|
for (const auto& desc : info.storage_buffers_descriptors) {
|
|
|
|
ASSERT(desc.count == 1);
|
|
|
|
buffer_cache.BindGraphicsStorageBuffer(stage, ssbo_index, desc.cbuf_index,
|
|
|
|
desc.cbuf_offset, desc.is_written);
|
|
|
|
++ssbo_index;
|
|
|
|
}
|
2021-03-19 23:28:31 +01:00
|
|
|
}
|
2021-04-01 06:36:22 +02:00
|
|
|
const auto& cbufs{maxwell3d.state.shader_stages[stage].const_buffers};
|
2021-04-22 21:17:59 +02:00
|
|
|
const auto read_handle{[&](const auto& desc, u32 index) {
|
2021-04-21 00:48:45 +02:00
|
|
|
ASSERT(cbufs[desc.cbuf_index].enabled);
|
2021-04-22 21:17:59 +02:00
|
|
|
const u32 index_offset{index << desc.size_shift};
|
|
|
|
const u32 offset{desc.cbuf_offset + index_offset};
|
|
|
|
const GPUVAddr addr{cbufs[desc.cbuf_index].address + offset};
|
2021-04-21 00:48:45 +02:00
|
|
|
if constexpr (std::is_same_v<decltype(desc), const Shader::TextureDescriptor&> ||
|
|
|
|
std::is_same_v<decltype(desc), const Shader::TextureBufferDescriptor&>) {
|
|
|
|
if (desc.has_secondary) {
|
|
|
|
ASSERT(cbufs[desc.secondary_cbuf_index].enabled);
|
2021-04-22 21:17:59 +02:00
|
|
|
const u32 second_offset{desc.secondary_cbuf_offset + index_offset};
|
2021-04-21 00:48:45 +02:00
|
|
|
const GPUVAddr separate_addr{cbufs[desc.secondary_cbuf_index].address +
|
2021-04-22 21:17:59 +02:00
|
|
|
second_offset};
|
2021-04-21 00:48:45 +02:00
|
|
|
const u32 lhs_raw{gpu_memory.Read<u32>(addr)};
|
|
|
|
const u32 rhs_raw{gpu_memory.Read<u32>(separate_addr)};
|
|
|
|
const u32 raw{lhs_raw | rhs_raw};
|
2021-05-23 09:28:34 +02:00
|
|
|
return TexturePair(raw, via_header_index);
|
2021-04-21 00:48:45 +02:00
|
|
|
}
|
|
|
|
}
|
2021-05-23 09:28:34 +02:00
|
|
|
return TexturePair(gpu_memory.Read<u32>(addr), via_header_index);
|
2021-04-06 07:56:15 +02:00
|
|
|
}};
|
2021-04-15 02:36:36 +02:00
|
|
|
const auto add_image{[&](const auto& desc) {
|
2021-04-22 21:17:59 +02:00
|
|
|
for (u32 index = 0; index < desc.count; ++index) {
|
2021-05-23 09:28:34 +02:00
|
|
|
const auto handle{read_handle(desc, index)};
|
|
|
|
image_view_indices[image_index++] = handle.first;
|
2021-04-22 21:17:59 +02:00
|
|
|
}
|
2021-04-15 02:36:36 +02:00
|
|
|
}};
|
2021-04-24 23:28:02 +02:00
|
|
|
if constexpr (Spec::has_texture_buffers) {
|
|
|
|
for (const auto& desc : info.texture_buffer_descriptors) {
|
|
|
|
add_image(desc);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if constexpr (Spec::has_image_buffers) {
|
|
|
|
for (const auto& desc : info.image_buffer_descriptors) {
|
|
|
|
add_image(desc);
|
|
|
|
}
|
|
|
|
}
|
2021-04-06 07:56:15 +02:00
|
|
|
for (const auto& desc : info.texture_descriptors) {
|
2021-04-22 21:17:59 +02:00
|
|
|
for (u32 index = 0; index < desc.count; ++index) {
|
2021-05-23 09:28:34 +02:00
|
|
|
const auto handle{read_handle(desc, index)};
|
|
|
|
image_view_indices[image_index++] = handle.first;
|
2021-03-19 23:28:31 +01:00
|
|
|
|
2021-05-23 09:28:34 +02:00
|
|
|
Sampler* const sampler{texture_cache.GetGraphicsSampler(handle.second)};
|
2021-05-01 05:29:31 +02:00
|
|
|
samplers[sampler_index++] = sampler->Handle();
|
2021-04-24 23:28:02 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
if constexpr (Spec::has_images) {
|
|
|
|
for (const auto& desc : info.image_descriptors) {
|
|
|
|
add_image(desc);
|
2021-04-22 21:17:59 +02:00
|
|
|
}
|
2021-03-19 23:28:31 +01:00
|
|
|
}
|
2021-04-24 23:28:02 +02:00
|
|
|
}};
|
|
|
|
if constexpr (Spec::enabled_stages[0]) {
|
|
|
|
config_stage(0);
|
|
|
|
}
|
|
|
|
if constexpr (Spec::enabled_stages[1]) {
|
|
|
|
config_stage(1);
|
|
|
|
}
|
|
|
|
if constexpr (Spec::enabled_stages[2]) {
|
|
|
|
config_stage(2);
|
2021-04-07 01:14:55 +02:00
|
|
|
}
|
2021-04-24 23:28:02 +02:00
|
|
|
if constexpr (Spec::enabled_stages[3]) {
|
|
|
|
config_stage(3);
|
|
|
|
}
|
|
|
|
if constexpr (Spec::enabled_stages[4]) {
|
|
|
|
config_stage(4);
|
|
|
|
}
|
|
|
|
const std::span indices_span(image_view_indices.data(), image_index);
|
2021-04-07 01:14:55 +02:00
|
|
|
texture_cache.FillGraphicsImageViews(indices_span, image_view_ids);
|
|
|
|
|
|
|
|
ImageId* texture_buffer_index{image_view_ids.data()};
|
2021-04-24 23:28:02 +02:00
|
|
|
const auto bind_stage_info{[&](size_t stage) LAMBDA_FORCEINLINE {
|
2021-04-07 01:14:55 +02:00
|
|
|
size_t index{};
|
2021-04-15 02:36:36 +02:00
|
|
|
const auto add_buffer{[&](const auto& desc) {
|
2021-05-23 09:28:34 +02:00
|
|
|
constexpr bool is_image = std::is_same_v<decltype(desc), const ImageBufferDescriptor&>;
|
2021-04-23 07:38:02 +02:00
|
|
|
for (u32 i = 0; i < desc.count; ++i) {
|
2021-04-22 21:17:59 +02:00
|
|
|
bool is_written{false};
|
2021-05-23 09:28:34 +02:00
|
|
|
if constexpr (is_image) {
|
2021-04-22 21:17:59 +02:00
|
|
|
is_written = desc.is_written;
|
|
|
|
}
|
|
|
|
ImageView& image_view{texture_cache.GetImageView(*texture_buffer_index)};
|
|
|
|
buffer_cache.BindGraphicsTextureBuffer(stage, index, image_view.GpuAddr(),
|
|
|
|
image_view.BufferSize(), image_view.format,
|
2021-05-23 09:28:34 +02:00
|
|
|
is_written, is_image);
|
2021-04-22 21:17:59 +02:00
|
|
|
++index;
|
|
|
|
++texture_buffer_index;
|
2021-04-15 02:36:36 +02:00
|
|
|
}
|
|
|
|
}};
|
2021-05-01 05:29:31 +02:00
|
|
|
buffer_cache.UnbindGraphicsTextureBuffers(stage);
|
|
|
|
|
2021-04-15 02:36:36 +02:00
|
|
|
const Shader::Info& info{stage_infos[stage]};
|
2021-04-24 23:28:02 +02:00
|
|
|
if constexpr (Spec::has_texture_buffers) {
|
|
|
|
for (const auto& desc : info.texture_buffer_descriptors) {
|
|
|
|
add_buffer(desc);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if constexpr (Spec::has_image_buffers) {
|
|
|
|
for (const auto& desc : info.image_buffer_descriptors) {
|
|
|
|
add_buffer(desc);
|
|
|
|
}
|
|
|
|
}
|
2021-04-22 21:17:59 +02:00
|
|
|
for (const auto& desc : info.texture_descriptors) {
|
|
|
|
texture_buffer_index += desc.count;
|
|
|
|
}
|
2021-04-24 23:28:02 +02:00
|
|
|
if constexpr (Spec::has_images) {
|
|
|
|
for (const auto& desc : info.image_descriptors) {
|
|
|
|
texture_buffer_index += desc.count;
|
|
|
|
}
|
2021-04-22 21:17:59 +02:00
|
|
|
}
|
2021-04-24 23:28:02 +02:00
|
|
|
}};
|
|
|
|
if constexpr (Spec::enabled_stages[0]) {
|
|
|
|
bind_stage_info(0);
|
|
|
|
}
|
|
|
|
if constexpr (Spec::enabled_stages[1]) {
|
|
|
|
bind_stage_info(1);
|
|
|
|
}
|
|
|
|
if constexpr (Spec::enabled_stages[2]) {
|
|
|
|
bind_stage_info(2);
|
|
|
|
}
|
|
|
|
if constexpr (Spec::enabled_stages[3]) {
|
|
|
|
bind_stage_info(3);
|
|
|
|
}
|
|
|
|
if constexpr (Spec::enabled_stages[4]) {
|
|
|
|
bind_stage_info(4);
|
2021-03-19 23:28:31 +01:00
|
|
|
}
|
|
|
|
|
2021-04-24 23:28:02 +02:00
|
|
|
buffer_cache.UpdateGraphicsBuffers(is_indexed);
|
2021-04-01 06:36:22 +02:00
|
|
|
buffer_cache.BindHostGeometryBuffers(is_indexed);
|
2021-03-19 23:28:31 +01:00
|
|
|
|
2021-04-04 03:28:07 +02:00
|
|
|
update_descriptor_queue.Acquire();
|
|
|
|
|
2021-04-07 01:14:55 +02:00
|
|
|
const VkSampler* samplers_it{samplers.data()};
|
|
|
|
const ImageId* views_it{image_view_ids.data()};
|
2021-04-24 23:28:02 +02:00
|
|
|
const auto prepare_stage{[&](size_t stage) LAMBDA_FORCEINLINE {
|
2021-04-01 06:36:22 +02:00
|
|
|
buffer_cache.BindHostStageBuffers(stage);
|
2021-04-07 01:14:55 +02:00
|
|
|
PushImageDescriptors(stage_infos[stage], samplers_it, views_it, texture_cache,
|
|
|
|
update_descriptor_queue);
|
2021-04-24 23:28:02 +02:00
|
|
|
}};
|
|
|
|
if constexpr (Spec::enabled_stages[0]) {
|
|
|
|
prepare_stage(0);
|
|
|
|
}
|
|
|
|
if constexpr (Spec::enabled_stages[1]) {
|
|
|
|
prepare_stage(1);
|
|
|
|
}
|
|
|
|
if constexpr (Spec::enabled_stages[2]) {
|
|
|
|
prepare_stage(2);
|
|
|
|
}
|
|
|
|
if constexpr (Spec::enabled_stages[3]) {
|
|
|
|
prepare_stage(3);
|
2021-03-19 23:28:31 +01:00
|
|
|
}
|
2021-04-24 23:28:02 +02:00
|
|
|
if constexpr (Spec::enabled_stages[4]) {
|
|
|
|
prepare_stage(4);
|
|
|
|
}
|
|
|
|
ConfigureDraw();
|
|
|
|
}
|
|
|
|
|
|
|
|
void GraphicsPipeline::ConfigureDraw() {
|
2021-04-01 06:36:22 +02:00
|
|
|
texture_cache.UpdateRenderTargets(false);
|
|
|
|
scheduler.RequestRenderpass(texture_cache.GetFramebuffer());
|
2021-03-26 22:55:07 +01:00
|
|
|
|
2021-04-04 02:41:49 +02:00
|
|
|
if (!is_built.load(std::memory_order::relaxed)) {
|
|
|
|
// Wait for the pipeline to be built
|
|
|
|
scheduler.Record([this](vk::CommandBuffer) {
|
|
|
|
std::unique_lock lock{build_mutex};
|
|
|
|
build_condvar.wait(lock, [this] { return is_built.load(std::memory_order::relaxed); });
|
|
|
|
});
|
2021-04-01 06:36:22 +02:00
|
|
|
}
|
2021-04-25 06:04:49 +02:00
|
|
|
const bool bind_pipeline{scheduler.UpdateGraphicsPipeline(this)};
|
|
|
|
const void* const descriptor_data{update_descriptor_queue.UpdateData()};
|
|
|
|
scheduler.Record([this, descriptor_data, bind_pipeline](vk::CommandBuffer cmdbuf) {
|
2021-05-01 05:29:31 +02:00
|
|
|
if (bind_pipeline) {
|
|
|
|
cmdbuf.BindPipeline(VK_PIPELINE_BIND_POINT_GRAPHICS, *pipeline);
|
|
|
|
}
|
2021-04-25 06:04:49 +02:00
|
|
|
if (!descriptor_set_layout) {
|
|
|
|
return;
|
|
|
|
}
|
2021-06-17 02:14:57 +02:00
|
|
|
if (uses_push_descriptor) {
|
|
|
|
cmdbuf.PushDescriptorSetWithTemplateKHR(*descriptor_update_template, *pipeline_layout,
|
|
|
|
0, descriptor_data);
|
|
|
|
} else {
|
|
|
|
const VkDescriptorSet descriptor_set{descriptor_allocator.Commit()};
|
|
|
|
const vk::Device& dev{device.GetLogical()};
|
|
|
|
dev.UpdateDescriptorSet(descriptor_set, *descriptor_update_template, descriptor_data);
|
|
|
|
cmdbuf.BindDescriptorSets(VK_PIPELINE_BIND_POINT_GRAPHICS, *pipeline_layout, 0,
|
|
|
|
descriptor_set, nullptr);
|
|
|
|
}
|
2021-03-19 23:28:31 +01:00
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2021-04-25 06:04:49 +02:00
|
|
|
void GraphicsPipeline::MakePipeline(VkRenderPass render_pass) {
|
2021-03-19 23:28:31 +01:00
|
|
|
FixedPipelineState::DynamicState dynamic{};
|
2021-06-15 03:02:42 +02:00
|
|
|
if (!key.state.extended_dynamic_state) {
|
2021-04-24 23:27:25 +02:00
|
|
|
dynamic = key.state.dynamic_state;
|
2021-03-19 23:28:31 +01:00
|
|
|
}
|
|
|
|
static_vector<VkVertexInputBindingDescription, 32> vertex_bindings;
|
|
|
|
static_vector<VkVertexInputBindingDivisorDescriptionEXT, 32> vertex_binding_divisors;
|
2021-06-12 10:07:52 +02:00
|
|
|
static_vector<VkVertexInputAttributeDescription, 32> vertex_attributes;
|
|
|
|
if (key.state.dynamic_vertex_input) {
|
|
|
|
const auto& input_attributes = stage_infos[0].input_generics;
|
|
|
|
for (size_t index = 0; index < key.state.attributes.size(); ++index) {
|
|
|
|
const u32 type = key.state.DynamicAttributeType(index);
|
|
|
|
if (!input_attributes[index].used || type == 0) {
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
vertex_attributes.push_back({
|
|
|
|
.location = static_cast<u32>(index),
|
|
|
|
.binding = 0,
|
|
|
|
.format = type == 1 ? VK_FORMAT_R32_SFLOAT
|
|
|
|
: type == 2 ? VK_FORMAT_R32_SINT
|
|
|
|
: VK_FORMAT_R32_UINT,
|
|
|
|
.offset = 0,
|
|
|
|
});
|
|
|
|
}
|
|
|
|
if (!vertex_attributes.empty()) {
|
|
|
|
vertex_bindings.push_back({
|
|
|
|
.binding = 0,
|
|
|
|
.stride = 4,
|
|
|
|
.inputRate = VK_VERTEX_INPUT_RATE_VERTEX,
|
|
|
|
});
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
for (size_t index = 0; index < Maxwell::NumVertexArrays; ++index) {
|
|
|
|
const bool instanced = key.state.binding_divisors[index] != 0;
|
|
|
|
const auto rate =
|
|
|
|
instanced ? VK_VERTEX_INPUT_RATE_INSTANCE : VK_VERTEX_INPUT_RATE_VERTEX;
|
|
|
|
vertex_bindings.push_back({
|
2021-03-19 23:28:31 +01:00
|
|
|
.binding = static_cast<u32>(index),
|
2021-06-12 10:07:52 +02:00
|
|
|
.stride = dynamic.vertex_strides[index],
|
|
|
|
.inputRate = rate,
|
2021-03-19 23:28:31 +01:00
|
|
|
});
|
2021-06-12 10:07:52 +02:00
|
|
|
if (instanced) {
|
|
|
|
vertex_binding_divisors.push_back({
|
|
|
|
.binding = static_cast<u32>(index),
|
|
|
|
.divisor = key.state.binding_divisors[index],
|
|
|
|
});
|
|
|
|
}
|
2021-03-19 23:28:31 +01:00
|
|
|
}
|
2021-06-12 10:07:52 +02:00
|
|
|
const auto& input_attributes = stage_infos[0].input_generics;
|
|
|
|
for (size_t index = 0; index < key.state.attributes.size(); ++index) {
|
|
|
|
const auto& attribute = key.state.attributes[index];
|
|
|
|
if (!attribute.enabled || !input_attributes[index].used) {
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
vertex_attributes.push_back({
|
|
|
|
.location = static_cast<u32>(index),
|
|
|
|
.binding = attribute.buffer,
|
|
|
|
.format = MaxwellToVK::VertexFormat(attribute.Type(), attribute.Size()),
|
|
|
|
.offset = attribute.offset,
|
|
|
|
});
|
2021-03-19 23:28:31 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
VkPipelineVertexInputStateCreateInfo vertex_input_ci{
|
|
|
|
.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,
|
|
|
|
.pNext = nullptr,
|
|
|
|
.flags = 0,
|
|
|
|
.vertexBindingDescriptionCount = static_cast<u32>(vertex_bindings.size()),
|
|
|
|
.pVertexBindingDescriptions = vertex_bindings.data(),
|
|
|
|
.vertexAttributeDescriptionCount = static_cast<u32>(vertex_attributes.size()),
|
|
|
|
.pVertexAttributeDescriptions = vertex_attributes.data(),
|
|
|
|
};
|
|
|
|
const VkPipelineVertexInputDivisorStateCreateInfoEXT input_divisor_ci{
|
|
|
|
.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT,
|
|
|
|
.pNext = nullptr,
|
|
|
|
.vertexBindingDivisorCount = static_cast<u32>(vertex_binding_divisors.size()),
|
|
|
|
.pVertexBindingDivisors = vertex_binding_divisors.data(),
|
|
|
|
};
|
|
|
|
if (!vertex_binding_divisors.empty()) {
|
|
|
|
vertex_input_ci.pNext = &input_divisor_ci;
|
|
|
|
}
|
2021-04-24 23:27:25 +02:00
|
|
|
auto input_assembly_topology = MaxwellToVK::PrimitiveTopology(device, key.state.topology);
|
2021-04-23 12:33:21 +02:00
|
|
|
if (input_assembly_topology == VK_PRIMITIVE_TOPOLOGY_PATCH_LIST) {
|
|
|
|
if (!spv_modules[1] && !spv_modules[2]) {
|
|
|
|
LOG_WARNING(Render_Vulkan, "Patch topology used without tessellation, using points");
|
|
|
|
input_assembly_topology = VK_PRIMITIVE_TOPOLOGY_POINT_LIST;
|
|
|
|
}
|
|
|
|
}
|
2021-03-19 23:28:31 +01:00
|
|
|
const VkPipelineInputAssemblyStateCreateInfo input_assembly_ci{
|
|
|
|
.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,
|
|
|
|
.pNext = nullptr,
|
|
|
|
.flags = 0,
|
2021-04-23 12:33:21 +02:00
|
|
|
.topology = input_assembly_topology,
|
2021-04-24 23:27:25 +02:00
|
|
|
.primitiveRestartEnable = key.state.primitive_restart_enable != 0 &&
|
2021-03-19 23:28:31 +01:00
|
|
|
SupportsPrimitiveRestart(input_assembly_topology),
|
|
|
|
};
|
|
|
|
const VkPipelineTessellationStateCreateInfo tessellation_ci{
|
|
|
|
.sType = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO,
|
|
|
|
.pNext = nullptr,
|
|
|
|
.flags = 0,
|
2021-04-24 23:27:25 +02:00
|
|
|
.patchControlPoints = key.state.patch_control_points_minus_one.Value() + 1,
|
2021-03-19 23:28:31 +01:00
|
|
|
};
|
2021-06-12 10:07:52 +02:00
|
|
|
|
2021-03-19 23:28:31 +01:00
|
|
|
std::array<VkViewportSwizzleNV, Maxwell::NumViewports> swizzles;
|
2021-04-24 23:27:25 +02:00
|
|
|
std::ranges::transform(key.state.viewport_swizzles, swizzles.begin(), UnpackViewportSwizzle);
|
2021-06-12 10:07:52 +02:00
|
|
|
const VkPipelineViewportSwizzleStateCreateInfoNV swizzle_ci{
|
2021-03-19 23:28:31 +01:00
|
|
|
.sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV,
|
|
|
|
.pNext = nullptr,
|
|
|
|
.flags = 0,
|
|
|
|
.viewportCount = Maxwell::NumViewports,
|
|
|
|
.pViewportSwizzles = swizzles.data(),
|
|
|
|
};
|
2021-06-12 10:07:52 +02:00
|
|
|
const VkPipelineViewportStateCreateInfo viewport_ci{
|
|
|
|
.sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,
|
|
|
|
.pNext = device.IsNvViewportSwizzleSupported() ? &swizzle_ci : nullptr,
|
|
|
|
.flags = 0,
|
|
|
|
.viewportCount = Maxwell::NumViewports,
|
|
|
|
.pViewports = nullptr,
|
|
|
|
.scissorCount = Maxwell::NumViewports,
|
|
|
|
.pScissors = nullptr,
|
|
|
|
};
|
2021-03-19 23:28:31 +01:00
|
|
|
|
2021-06-23 08:32:41 +02:00
|
|
|
VkPipelineRasterizationStateCreateInfo rasterization_ci{
|
2021-03-19 23:28:31 +01:00
|
|
|
.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,
|
2021-06-23 08:32:41 +02:00
|
|
|
.pNext = nullptr,
|
2021-03-19 23:28:31 +01:00
|
|
|
.flags = 0,
|
|
|
|
.depthClampEnable =
|
2021-04-24 23:27:25 +02:00
|
|
|
static_cast<VkBool32>(key.state.depth_clamp_disabled == 0 ? VK_TRUE : VK_FALSE),
|
2021-03-19 23:28:31 +01:00
|
|
|
.rasterizerDiscardEnable =
|
2021-04-24 23:27:25 +02:00
|
|
|
static_cast<VkBool32>(key.state.rasterize_enable == 0 ? VK_TRUE : VK_FALSE),
|
2021-04-16 03:46:11 +02:00
|
|
|
.polygonMode =
|
2021-04-24 23:27:25 +02:00
|
|
|
MaxwellToVK::PolygonMode(FixedPipelineState::UnpackPolygonMode(key.state.polygon_mode)),
|
2021-03-19 23:28:31 +01:00
|
|
|
.cullMode = static_cast<VkCullModeFlags>(
|
|
|
|
dynamic.cull_enable ? MaxwellToVK::CullFace(dynamic.CullFace()) : VK_CULL_MODE_NONE),
|
|
|
|
.frontFace = MaxwellToVK::FrontFace(dynamic.FrontFace()),
|
2021-04-24 23:27:25 +02:00
|
|
|
.depthBiasEnable = key.state.depth_bias_enable,
|
2021-03-19 23:28:31 +01:00
|
|
|
.depthBiasConstantFactor = 0.0f,
|
|
|
|
.depthBiasClamp = 0.0f,
|
|
|
|
.depthBiasSlopeFactor = 0.0f,
|
|
|
|
.lineWidth = 1.0f,
|
|
|
|
};
|
2021-06-23 08:32:41 +02:00
|
|
|
VkPipelineRasterizationConservativeStateCreateInfoEXT conservative_raster{
|
|
|
|
.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT,
|
|
|
|
.pNext = nullptr,
|
|
|
|
.flags = 0,
|
|
|
|
.conservativeRasterizationMode = key.state.conservative_raster_enable != 0
|
|
|
|
? VK_CONSERVATIVE_RASTERIZATION_MODE_OVERESTIMATE_EXT
|
|
|
|
: VK_CONSERVATIVE_RASTERIZATION_MODE_DISABLED_EXT,
|
|
|
|
.extraPrimitiveOverestimationSize = 0.0f,
|
|
|
|
};
|
|
|
|
VkPipelineRasterizationProvokingVertexStateCreateInfoEXT provoking_vertex{
|
|
|
|
.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_PROVOKING_VERTEX_STATE_CREATE_INFO_EXT,
|
|
|
|
.pNext = nullptr,
|
|
|
|
.provokingVertexMode = key.state.provoking_vertex_last != 0
|
|
|
|
? VK_PROVOKING_VERTEX_MODE_LAST_VERTEX_EXT
|
|
|
|
: VK_PROVOKING_VERTEX_MODE_FIRST_VERTEX_EXT,
|
|
|
|
};
|
|
|
|
if (device.IsExtConservativeRasterizationSupported()) {
|
|
|
|
conservative_raster.pNext = std::exchange(rasterization_ci.pNext, &conservative_raster);
|
|
|
|
}
|
|
|
|
if (device.IsExtProvokingVertexSupported()) {
|
|
|
|
provoking_vertex.pNext = std::exchange(rasterization_ci.pNext, &provoking_vertex);
|
|
|
|
}
|
2021-06-12 02:52:04 +02:00
|
|
|
|
2021-03-19 23:28:31 +01:00
|
|
|
const VkPipelineMultisampleStateCreateInfo multisample_ci{
|
|
|
|
.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,
|
|
|
|
.pNext = nullptr,
|
|
|
|
.flags = 0,
|
2021-04-24 23:27:25 +02:00
|
|
|
.rasterizationSamples = MaxwellToVK::MsaaMode(key.state.msaa_mode),
|
2021-03-19 23:28:31 +01:00
|
|
|
.sampleShadingEnable = VK_FALSE,
|
|
|
|
.minSampleShading = 0.0f,
|
|
|
|
.pSampleMask = nullptr,
|
|
|
|
.alphaToCoverageEnable = VK_FALSE,
|
|
|
|
.alphaToOneEnable = VK_FALSE,
|
|
|
|
};
|
|
|
|
const VkPipelineDepthStencilStateCreateInfo depth_stencil_ci{
|
|
|
|
.sType = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO,
|
|
|
|
.pNext = nullptr,
|
|
|
|
.flags = 0,
|
|
|
|
.depthTestEnable = dynamic.depth_test_enable,
|
|
|
|
.depthWriteEnable = dynamic.depth_write_enable,
|
|
|
|
.depthCompareOp = dynamic.depth_test_enable
|
|
|
|
? MaxwellToVK::ComparisonOp(dynamic.DepthTestFunc())
|
|
|
|
: VK_COMPARE_OP_ALWAYS,
|
2021-05-31 01:43:47 +02:00
|
|
|
.depthBoundsTestEnable = dynamic.depth_bounds_enable && device.IsDepthBoundsSupported(),
|
2021-03-19 23:28:31 +01:00
|
|
|
.stencilTestEnable = dynamic.stencil_enable,
|
|
|
|
.front = GetStencilFaceState(dynamic.front),
|
|
|
|
.back = GetStencilFaceState(dynamic.back),
|
|
|
|
.minDepthBounds = 0.0f,
|
|
|
|
.maxDepthBounds = 0.0f,
|
|
|
|
};
|
2021-05-31 01:43:47 +02:00
|
|
|
if (dynamic.depth_bounds_enable && !device.IsDepthBoundsSupported()) {
|
|
|
|
LOG_WARNING(Render_Vulkan, "Depth bounds is enabled but not supported");
|
|
|
|
}
|
2021-03-19 23:28:31 +01:00
|
|
|
static_vector<VkPipelineColorBlendAttachmentState, Maxwell::NumRenderTargets> cb_attachments;
|
2021-04-24 23:27:25 +02:00
|
|
|
const size_t num_attachments{NumAttachments(key.state)};
|
2021-04-23 02:05:10 +02:00
|
|
|
for (size_t index = 0; index < num_attachments; ++index) {
|
2021-03-19 23:28:31 +01:00
|
|
|
static constexpr std::array mask_table{
|
|
|
|
VK_COLOR_COMPONENT_R_BIT,
|
|
|
|
VK_COLOR_COMPONENT_G_BIT,
|
|
|
|
VK_COLOR_COMPONENT_B_BIT,
|
|
|
|
VK_COLOR_COMPONENT_A_BIT,
|
|
|
|
};
|
2021-04-24 23:27:25 +02:00
|
|
|
const auto& blend{key.state.attachments[index]};
|
2021-03-19 23:28:31 +01:00
|
|
|
const std::array mask{blend.Mask()};
|
|
|
|
VkColorComponentFlags write_mask{};
|
|
|
|
for (size_t i = 0; i < mask_table.size(); ++i) {
|
|
|
|
write_mask |= mask[i] ? mask_table[i] : 0;
|
|
|
|
}
|
|
|
|
cb_attachments.push_back({
|
|
|
|
.blendEnable = blend.enable != 0,
|
|
|
|
.srcColorBlendFactor = MaxwellToVK::BlendFactor(blend.SourceRGBFactor()),
|
|
|
|
.dstColorBlendFactor = MaxwellToVK::BlendFactor(blend.DestRGBFactor()),
|
|
|
|
.colorBlendOp = MaxwellToVK::BlendEquation(blend.EquationRGB()),
|
|
|
|
.srcAlphaBlendFactor = MaxwellToVK::BlendFactor(blend.SourceAlphaFactor()),
|
|
|
|
.dstAlphaBlendFactor = MaxwellToVK::BlendFactor(blend.DestAlphaFactor()),
|
|
|
|
.alphaBlendOp = MaxwellToVK::BlendEquation(blend.EquationAlpha()),
|
|
|
|
.colorWriteMask = write_mask,
|
|
|
|
});
|
|
|
|
}
|
|
|
|
const VkPipelineColorBlendStateCreateInfo color_blend_ci{
|
|
|
|
.sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,
|
|
|
|
.pNext = nullptr,
|
|
|
|
.flags = 0,
|
|
|
|
.logicOpEnable = VK_FALSE,
|
|
|
|
.logicOp = VK_LOGIC_OP_COPY,
|
|
|
|
.attachmentCount = static_cast<u32>(cb_attachments.size()),
|
|
|
|
.pAttachments = cb_attachments.data(),
|
|
|
|
.blendConstants = {},
|
|
|
|
};
|
2021-06-12 10:07:52 +02:00
|
|
|
static_vector<VkDynamicState, 18> dynamic_states{
|
2021-03-19 23:28:31 +01:00
|
|
|
VK_DYNAMIC_STATE_VIEWPORT, VK_DYNAMIC_STATE_SCISSOR,
|
|
|
|
VK_DYNAMIC_STATE_DEPTH_BIAS, VK_DYNAMIC_STATE_BLEND_CONSTANTS,
|
|
|
|
VK_DYNAMIC_STATE_DEPTH_BOUNDS, VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK,
|
|
|
|
VK_DYNAMIC_STATE_STENCIL_WRITE_MASK, VK_DYNAMIC_STATE_STENCIL_REFERENCE,
|
|
|
|
};
|
2021-06-12 10:07:52 +02:00
|
|
|
if (key.state.extended_dynamic_state) {
|
2021-03-19 23:28:31 +01:00
|
|
|
static constexpr std::array extended{
|
|
|
|
VK_DYNAMIC_STATE_CULL_MODE_EXT,
|
|
|
|
VK_DYNAMIC_STATE_FRONT_FACE_EXT,
|
|
|
|
VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT,
|
|
|
|
VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE_EXT,
|
|
|
|
VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE_EXT,
|
|
|
|
VK_DYNAMIC_STATE_DEPTH_COMPARE_OP_EXT,
|
|
|
|
VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE_EXT,
|
|
|
|
VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE_EXT,
|
|
|
|
VK_DYNAMIC_STATE_STENCIL_OP_EXT,
|
|
|
|
};
|
2021-06-12 10:07:52 +02:00
|
|
|
if (key.state.dynamic_vertex_input) {
|
|
|
|
dynamic_states.push_back(VK_DYNAMIC_STATE_VERTEX_INPUT_EXT);
|
|
|
|
}
|
2021-03-19 23:28:31 +01:00
|
|
|
dynamic_states.insert(dynamic_states.end(), extended.begin(), extended.end());
|
|
|
|
}
|
|
|
|
const VkPipelineDynamicStateCreateInfo dynamic_state_ci{
|
|
|
|
.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO,
|
|
|
|
.pNext = nullptr,
|
|
|
|
.flags = 0,
|
|
|
|
.dynamicStateCount = static_cast<u32>(dynamic_states.size()),
|
|
|
|
.pDynamicStates = dynamic_states.data(),
|
|
|
|
};
|
2021-04-06 04:25:22 +02:00
|
|
|
[[maybe_unused]] const VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT subgroup_size_ci{
|
2021-03-19 23:28:31 +01:00
|
|
|
.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT,
|
|
|
|
.pNext = nullptr,
|
|
|
|
.requiredSubgroupSize = GuestWarpSize,
|
|
|
|
};
|
|
|
|
static_vector<VkPipelineShaderStageCreateInfo, 5> shader_stages;
|
|
|
|
for (size_t stage = 0; stage < Maxwell::MaxShaderStage; ++stage) {
|
|
|
|
if (!spv_modules[stage]) {
|
|
|
|
continue;
|
|
|
|
}
|
2021-04-06 04:25:22 +02:00
|
|
|
[[maybe_unused]] auto& stage_ci =
|
|
|
|
shader_stages.emplace_back(VkPipelineShaderStageCreateInfo{
|
|
|
|
.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
|
|
|
|
.pNext = nullptr,
|
|
|
|
.flags = 0,
|
2021-06-23 07:41:00 +02:00
|
|
|
.stage = MaxwellToVK::ShaderStage(Shader::StageFromIndex(stage)),
|
2021-04-06 04:25:22 +02:00
|
|
|
.module = *spv_modules[stage],
|
|
|
|
.pName = "main",
|
|
|
|
.pSpecializationInfo = nullptr,
|
|
|
|
});
|
2021-03-19 23:28:31 +01:00
|
|
|
/*
|
|
|
|
if (program[stage]->entries.uses_warps && device.IsGuestWarpSizeSupported(stage_ci.stage)) {
|
|
|
|
stage_ci.pNext = &subgroup_size_ci;
|
|
|
|
}
|
|
|
|
*/
|
|
|
|
}
|
|
|
|
pipeline = device.GetLogical().CreateGraphicsPipeline({
|
|
|
|
.sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,
|
|
|
|
.pNext = nullptr,
|
|
|
|
.flags = 0,
|
|
|
|
.stageCount = static_cast<u32>(shader_stages.size()),
|
|
|
|
.pStages = shader_stages.data(),
|
|
|
|
.pVertexInputState = &vertex_input_ci,
|
|
|
|
.pInputAssemblyState = &input_assembly_ci,
|
|
|
|
.pTessellationState = &tessellation_ci,
|
|
|
|
.pViewportState = &viewport_ci,
|
|
|
|
.pRasterizationState = &rasterization_ci,
|
|
|
|
.pMultisampleState = &multisample_ci,
|
|
|
|
.pDepthStencilState = &depth_stencil_ci,
|
|
|
|
.pColorBlendState = &color_blend_ci,
|
|
|
|
.pDynamicState = &dynamic_state_ci,
|
|
|
|
.layout = *pipeline_layout,
|
|
|
|
.renderPass = render_pass,
|
|
|
|
.subpass = 0,
|
|
|
|
.basePipelineHandle = nullptr,
|
|
|
|
.basePipelineIndex = 0,
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2021-04-24 23:28:02 +02:00
|
|
|
void GraphicsPipeline::Validate() {
|
|
|
|
size_t num_images{};
|
|
|
|
for (const auto& info : stage_infos) {
|
|
|
|
for (const auto& desc : info.texture_buffer_descriptors) {
|
|
|
|
num_images += desc.count;
|
|
|
|
}
|
|
|
|
for (const auto& desc : info.image_buffer_descriptors) {
|
|
|
|
num_images += desc.count;
|
|
|
|
}
|
|
|
|
for (const auto& desc : info.texture_descriptors) {
|
|
|
|
num_images += desc.count;
|
|
|
|
}
|
|
|
|
for (const auto& desc : info.image_descriptors) {
|
|
|
|
num_images += desc.count;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
ASSERT(num_images <= MAX_IMAGE_ELEMENTS);
|
|
|
|
}
|
|
|
|
|
2021-03-19 23:28:31 +01:00
|
|
|
} // namespace Vulkan
|