2021-03-19 23:28:31 +01:00
|
|
|
// Copyright 2021 yuzu Emulator Project
|
|
|
|
// Licensed under GPLv2 or any later version
|
|
|
|
// Refer to the license.txt file included.
|
|
|
|
|
|
|
|
#include <algorithm>
|
|
|
|
#include <span>
|
|
|
|
|
|
|
|
#include <boost/container/small_vector.hpp>
|
|
|
|
#include <boost/container/static_vector.hpp>
|
|
|
|
|
|
|
|
#include "common/bit_field.h"
|
|
|
|
#include "video_core/renderer_vulkan/maxwell_to_vk.h"
|
|
|
|
#include "video_core/renderer_vulkan/pipeline_helper.h"
|
|
|
|
#include "video_core/renderer_vulkan/vk_buffer_cache.h"
|
|
|
|
#include "video_core/renderer_vulkan/vk_graphics_pipeline.h"
|
|
|
|
#include "video_core/renderer_vulkan/vk_render_pass_cache.h"
|
|
|
|
#include "video_core/renderer_vulkan/vk_scheduler.h"
|
|
|
|
#include "video_core/renderer_vulkan/vk_texture_cache.h"
|
|
|
|
#include "video_core/renderer_vulkan/vk_update_descriptor.h"
|
|
|
|
#include "video_core/vulkan_common/vulkan_device.h"
|
|
|
|
|
|
|
|
namespace Vulkan {
|
|
|
|
namespace {
|
|
|
|
using boost::container::small_vector;
|
|
|
|
using boost::container::static_vector;
|
|
|
|
using VideoCore::Surface::PixelFormat;
|
|
|
|
using VideoCore::Surface::PixelFormatFromDepthFormat;
|
|
|
|
using VideoCore::Surface::PixelFormatFromRenderTargetFormat;
|
|
|
|
|
2021-04-01 08:15:28 +02:00
|
|
|
DescriptorLayoutBuilder MakeBuilder(const Device& device, std::span<const Shader::Info> infos) {
|
|
|
|
DescriptorLayoutBuilder builder{device.GetLogical()};
|
2021-03-19 23:28:31 +01:00
|
|
|
for (size_t index = 0; index < infos.size(); ++index) {
|
|
|
|
static constexpr std::array stages{
|
|
|
|
VK_SHADER_STAGE_VERTEX_BIT,
|
|
|
|
VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT,
|
|
|
|
VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT,
|
|
|
|
VK_SHADER_STAGE_GEOMETRY_BIT,
|
|
|
|
VK_SHADER_STAGE_FRAGMENT_BIT,
|
|
|
|
};
|
|
|
|
builder.Add(infos[index], stages.at(index));
|
|
|
|
}
|
2021-04-01 08:15:28 +02:00
|
|
|
return builder;
|
2021-03-19 23:28:31 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
template <class StencilFace>
|
|
|
|
VkStencilOpState GetStencilFaceState(const StencilFace& face) {
|
|
|
|
return {
|
|
|
|
.failOp = MaxwellToVK::StencilOp(face.ActionStencilFail()),
|
|
|
|
.passOp = MaxwellToVK::StencilOp(face.ActionDepthPass()),
|
|
|
|
.depthFailOp = MaxwellToVK::StencilOp(face.ActionDepthFail()),
|
|
|
|
.compareOp = MaxwellToVK::ComparisonOp(face.TestFunc()),
|
|
|
|
.compareMask = 0,
|
|
|
|
.writeMask = 0,
|
|
|
|
.reference = 0,
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
|
|
|
bool SupportsPrimitiveRestart(VkPrimitiveTopology topology) {
|
|
|
|
static constexpr std::array unsupported_topologies{
|
|
|
|
VK_PRIMITIVE_TOPOLOGY_POINT_LIST,
|
|
|
|
VK_PRIMITIVE_TOPOLOGY_LINE_LIST,
|
|
|
|
VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST,
|
|
|
|
VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY,
|
|
|
|
VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY,
|
|
|
|
VK_PRIMITIVE_TOPOLOGY_PATCH_LIST,
|
|
|
|
// VK_PRIMITIVE_TOPOLOGY_QUAD_LIST_EXT,
|
|
|
|
};
|
|
|
|
return std::ranges::find(unsupported_topologies, topology) == unsupported_topologies.end();
|
|
|
|
}
|
|
|
|
|
|
|
|
VkViewportSwizzleNV UnpackViewportSwizzle(u16 swizzle) {
|
|
|
|
union Swizzle {
|
|
|
|
u32 raw;
|
|
|
|
BitField<0, 3, Maxwell::ViewportSwizzle> x;
|
|
|
|
BitField<4, 3, Maxwell::ViewportSwizzle> y;
|
|
|
|
BitField<8, 3, Maxwell::ViewportSwizzle> z;
|
|
|
|
BitField<12, 3, Maxwell::ViewportSwizzle> w;
|
|
|
|
};
|
|
|
|
const Swizzle unpacked{swizzle};
|
|
|
|
return VkViewportSwizzleNV{
|
|
|
|
.x = MaxwellToVK::ViewportSwizzle(unpacked.x),
|
|
|
|
.y = MaxwellToVK::ViewportSwizzle(unpacked.y),
|
|
|
|
.z = MaxwellToVK::ViewportSwizzle(unpacked.z),
|
|
|
|
.w = MaxwellToVK::ViewportSwizzle(unpacked.w),
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
|
|
|
PixelFormat DecodeFormat(u8 encoded_format) {
|
|
|
|
const auto format{static_cast<Tegra::RenderTargetFormat>(encoded_format)};
|
|
|
|
if (format == Tegra::RenderTargetFormat::NONE) {
|
|
|
|
return PixelFormat::Invalid;
|
|
|
|
}
|
|
|
|
return PixelFormatFromRenderTargetFormat(format);
|
|
|
|
}
|
|
|
|
|
|
|
|
RenderPassKey MakeRenderPassKey(const FixedPipelineState& state) {
|
|
|
|
RenderPassKey key;
|
|
|
|
std::ranges::transform(state.color_formats, key.color_formats.begin(), DecodeFormat);
|
|
|
|
if (state.depth_enabled != 0) {
|
|
|
|
const auto depth_format{static_cast<Tegra::DepthFormat>(state.depth_format.Value())};
|
|
|
|
key.depth_format = PixelFormatFromDepthFormat(depth_format);
|
|
|
|
} else {
|
|
|
|
key.depth_format = PixelFormat::Invalid;
|
|
|
|
}
|
|
|
|
key.samples = MaxwellToVK::MsaaMode(state.msaa_mode);
|
|
|
|
return key;
|
|
|
|
}
|
2021-04-23 02:05:10 +02:00
|
|
|
|
|
|
|
size_t NumAttachments(const FixedPipelineState& state) {
|
|
|
|
size_t num{};
|
|
|
|
for (size_t index = 0; index < Maxwell::NumRenderTargets; ++index) {
|
|
|
|
const auto format{static_cast<Tegra::RenderTargetFormat>(state.color_formats[index])};
|
|
|
|
if (format != Tegra::RenderTargetFormat::NONE) {
|
|
|
|
num = index + 1;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return num;
|
|
|
|
}
|
2021-03-19 23:28:31 +01:00
|
|
|
} // Anonymous namespace
|
|
|
|
|
|
|
|
GraphicsPipeline::GraphicsPipeline(Tegra::Engines::Maxwell3D& maxwell3d_,
|
|
|
|
Tegra::MemoryManager& gpu_memory_, VKScheduler& scheduler_,
|
|
|
|
BufferCache& buffer_cache_, TextureCache& texture_cache_,
|
|
|
|
const Device& device, VKDescriptorPool& descriptor_pool,
|
|
|
|
VKUpdateDescriptorQueue& update_descriptor_queue_,
|
2021-04-01 06:36:22 +02:00
|
|
|
Common::ThreadWorker* worker_thread,
|
2021-03-19 23:28:31 +01:00
|
|
|
RenderPassCache& render_pass_cache,
|
2021-04-24 23:27:25 +02:00
|
|
|
const GraphicsPipelineCacheKey& key_,
|
2021-03-19 23:28:31 +01:00
|
|
|
std::array<vk::ShaderModule, NUM_STAGES> stages,
|
|
|
|
const std::array<const Shader::Info*, NUM_STAGES>& infos)
|
2021-04-24 23:27:25 +02:00
|
|
|
: key{key_}, maxwell3d{maxwell3d_}, gpu_memory{gpu_memory_}, texture_cache{texture_cache_},
|
2021-04-01 06:36:22 +02:00
|
|
|
buffer_cache{buffer_cache_}, scheduler{scheduler_},
|
2021-04-24 23:27:25 +02:00
|
|
|
update_descriptor_queue{update_descriptor_queue_}, spv_modules{std::move(stages)} {
|
2021-03-19 23:28:31 +01:00
|
|
|
std::ranges::transform(infos, stage_infos.begin(),
|
|
|
|
[](const Shader::Info* info) { return info ? *info : Shader::Info{}; });
|
|
|
|
|
2021-04-01 08:15:28 +02:00
|
|
|
DescriptorLayoutBuilder builder{MakeBuilder(device, stage_infos)};
|
|
|
|
descriptor_set_layout = builder.CreateDescriptorSetLayout();
|
2021-03-19 23:28:31 +01:00
|
|
|
descriptor_allocator = DescriptorAllocator(descriptor_pool, *descriptor_set_layout);
|
|
|
|
|
2021-04-01 08:15:28 +02:00
|
|
|
auto func{[this, &device, &render_pass_cache, builder] {
|
|
|
|
const VkDescriptorSetLayout set_layout{*descriptor_set_layout};
|
|
|
|
pipeline_layout = builder.CreatePipelineLayout(set_layout);
|
|
|
|
descriptor_update_template = builder.CreateTemplate(set_layout, *pipeline_layout);
|
|
|
|
|
2021-04-24 23:27:25 +02:00
|
|
|
const VkRenderPass render_pass{render_pass_cache.Get(MakeRenderPassKey(key.state))};
|
2021-04-01 06:36:22 +02:00
|
|
|
MakePipeline(device, render_pass);
|
2021-04-04 02:41:49 +02:00
|
|
|
|
|
|
|
std::lock_guard lock{build_mutex};
|
|
|
|
is_built = true;
|
|
|
|
build_condvar.notify_one();
|
2021-04-01 06:36:22 +02:00
|
|
|
}};
|
|
|
|
if (worker_thread) {
|
|
|
|
worker_thread->QueueWork(std::move(func));
|
|
|
|
} else {
|
|
|
|
func();
|
|
|
|
}
|
2021-03-19 23:28:31 +01:00
|
|
|
}
|
|
|
|
|
2021-04-24 23:27:25 +02:00
|
|
|
void GraphicsPipeline::AddTransition(GraphicsPipeline* transition) {
|
|
|
|
transition_keys.push_back(transition->key);
|
|
|
|
transitions.push_back(transition);
|
|
|
|
}
|
|
|
|
|
2021-03-19 23:28:31 +01:00
|
|
|
void GraphicsPipeline::Configure(bool is_indexed) {
|
|
|
|
static constexpr size_t max_images_elements = 64;
|
|
|
|
std::array<ImageId, max_images_elements> image_view_ids;
|
|
|
|
static_vector<u32, max_images_elements> image_view_indices;
|
|
|
|
static_vector<VkSampler, max_images_elements> samplers;
|
|
|
|
|
2021-04-01 06:36:22 +02:00
|
|
|
texture_cache.SynchronizeGraphicsDescriptors();
|
2021-03-19 23:28:31 +01:00
|
|
|
|
2021-04-01 06:36:22 +02:00
|
|
|
const auto& regs{maxwell3d.regs};
|
2021-03-19 23:28:31 +01:00
|
|
|
const bool via_header_index{regs.sampler_index == Maxwell::SamplerIndex::ViaHeaderIndex};
|
|
|
|
for (size_t stage = 0; stage < Maxwell::MaxShaderStage; ++stage) {
|
|
|
|
const Shader::Info& info{stage_infos[stage]};
|
2021-04-01 06:36:22 +02:00
|
|
|
buffer_cache.SetEnabledUniformBuffers(stage, info.constant_buffer_mask);
|
|
|
|
buffer_cache.UnbindGraphicsStorageBuffers(stage);
|
2021-04-22 21:17:59 +02:00
|
|
|
size_t ssbo_index{};
|
2021-03-19 23:28:31 +01:00
|
|
|
for (const auto& desc : info.storage_buffers_descriptors) {
|
|
|
|
ASSERT(desc.count == 1);
|
2021-04-22 21:17:59 +02:00
|
|
|
buffer_cache.BindGraphicsStorageBuffer(stage, ssbo_index, desc.cbuf_index,
|
|
|
|
desc.cbuf_offset, desc.is_written);
|
|
|
|
++ssbo_index;
|
2021-03-19 23:28:31 +01:00
|
|
|
}
|
2021-04-01 06:36:22 +02:00
|
|
|
const auto& cbufs{maxwell3d.state.shader_stages[stage].const_buffers};
|
2021-04-22 21:17:59 +02:00
|
|
|
const auto read_handle{[&](const auto& desc, u32 index) {
|
2021-04-21 00:48:45 +02:00
|
|
|
ASSERT(cbufs[desc.cbuf_index].enabled);
|
2021-04-22 21:17:59 +02:00
|
|
|
const u32 index_offset{index << desc.size_shift};
|
|
|
|
const u32 offset{desc.cbuf_offset + index_offset};
|
|
|
|
const GPUVAddr addr{cbufs[desc.cbuf_index].address + offset};
|
2021-04-21 00:48:45 +02:00
|
|
|
if constexpr (std::is_same_v<decltype(desc), const Shader::TextureDescriptor&> ||
|
|
|
|
std::is_same_v<decltype(desc), const Shader::TextureBufferDescriptor&>) {
|
|
|
|
if (desc.has_secondary) {
|
|
|
|
ASSERT(cbufs[desc.secondary_cbuf_index].enabled);
|
2021-04-22 21:17:59 +02:00
|
|
|
const u32 second_offset{desc.secondary_cbuf_offset + index_offset};
|
2021-04-21 00:48:45 +02:00
|
|
|
const GPUVAddr separate_addr{cbufs[desc.secondary_cbuf_index].address +
|
2021-04-22 21:17:59 +02:00
|
|
|
second_offset};
|
2021-04-21 00:48:45 +02:00
|
|
|
const u32 lhs_raw{gpu_memory.Read<u32>(addr)};
|
|
|
|
const u32 rhs_raw{gpu_memory.Read<u32>(separate_addr)};
|
|
|
|
const u32 raw{lhs_raw | rhs_raw};
|
|
|
|
return TextureHandle{raw, via_header_index};
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return TextureHandle{gpu_memory.Read<u32>(addr), via_header_index};
|
2021-04-06 07:56:15 +02:00
|
|
|
}};
|
2021-04-15 02:36:36 +02:00
|
|
|
const auto add_image{[&](const auto& desc) {
|
2021-04-22 21:17:59 +02:00
|
|
|
for (u32 index = 0; index < desc.count; ++index) {
|
|
|
|
const TextureHandle handle{read_handle(desc, index)};
|
|
|
|
image_view_indices.push_back(handle.image);
|
|
|
|
}
|
2021-04-15 02:36:36 +02:00
|
|
|
}};
|
|
|
|
std::ranges::for_each(info.texture_buffer_descriptors, add_image);
|
|
|
|
std::ranges::for_each(info.image_buffer_descriptors, add_image);
|
2021-04-06 07:56:15 +02:00
|
|
|
for (const auto& desc : info.texture_descriptors) {
|
2021-04-22 21:17:59 +02:00
|
|
|
for (u32 index = 0; index < desc.count; ++index) {
|
|
|
|
const TextureHandle handle{read_handle(desc, index)};
|
|
|
|
image_view_indices.push_back(handle.image);
|
2021-03-19 23:28:31 +01:00
|
|
|
|
2021-04-22 21:17:59 +02:00
|
|
|
Sampler* const sampler{texture_cache.GetGraphicsSampler(handle.sampler)};
|
|
|
|
samplers.push_back(sampler->Handle());
|
|
|
|
}
|
2021-03-19 23:28:31 +01:00
|
|
|
}
|
2021-04-15 02:36:36 +02:00
|
|
|
std::ranges::for_each(info.image_descriptors, add_image);
|
2021-04-07 01:14:55 +02:00
|
|
|
}
|
|
|
|
const std::span indices_span(image_view_indices.data(), image_view_indices.size());
|
|
|
|
texture_cache.FillGraphicsImageViews(indices_span, image_view_ids);
|
|
|
|
|
|
|
|
ImageId* texture_buffer_index{image_view_ids.data()};
|
|
|
|
for (size_t stage = 0; stage < Maxwell::MaxShaderStage; ++stage) {
|
|
|
|
size_t index{};
|
2021-04-15 02:36:36 +02:00
|
|
|
const auto add_buffer{[&](const auto& desc) {
|
2021-04-23 07:38:02 +02:00
|
|
|
for (u32 i = 0; i < desc.count; ++i) {
|
2021-04-22 21:17:59 +02:00
|
|
|
bool is_written{false};
|
|
|
|
if constexpr (std::is_same_v<decltype(desc),
|
|
|
|
const Shader::ImageBufferDescriptor&>) {
|
|
|
|
is_written = desc.is_written;
|
|
|
|
}
|
|
|
|
ImageView& image_view{texture_cache.GetImageView(*texture_buffer_index)};
|
|
|
|
buffer_cache.BindGraphicsTextureBuffer(stage, index, image_view.GpuAddr(),
|
|
|
|
image_view.BufferSize(), image_view.format,
|
|
|
|
is_written);
|
|
|
|
++index;
|
|
|
|
++texture_buffer_index;
|
2021-04-15 02:36:36 +02:00
|
|
|
}
|
|
|
|
}};
|
|
|
|
const Shader::Info& info{stage_infos[stage]};
|
|
|
|
buffer_cache.UnbindGraphicsTextureBuffers(stage);
|
|
|
|
std::ranges::for_each(info.texture_buffer_descriptors, add_buffer);
|
|
|
|
std::ranges::for_each(info.image_buffer_descriptors, add_buffer);
|
2021-04-22 21:17:59 +02:00
|
|
|
for (const auto& desc : info.texture_descriptors) {
|
|
|
|
texture_buffer_index += desc.count;
|
|
|
|
}
|
|
|
|
for (const auto& desc : info.image_descriptors) {
|
|
|
|
texture_buffer_index += desc.count;
|
|
|
|
}
|
2021-03-19 23:28:31 +01:00
|
|
|
}
|
2021-04-01 06:36:22 +02:00
|
|
|
buffer_cache.UpdateGraphicsBuffers(is_indexed);
|
2021-03-19 23:28:31 +01:00
|
|
|
|
2021-04-01 06:36:22 +02:00
|
|
|
buffer_cache.BindHostGeometryBuffers(is_indexed);
|
2021-03-19 23:28:31 +01:00
|
|
|
|
2021-04-04 03:28:07 +02:00
|
|
|
update_descriptor_queue.Acquire();
|
|
|
|
|
2021-04-07 01:14:55 +02:00
|
|
|
const VkSampler* samplers_it{samplers.data()};
|
|
|
|
const ImageId* views_it{image_view_ids.data()};
|
2021-03-19 23:28:31 +01:00
|
|
|
for (size_t stage = 0; stage < Maxwell::MaxShaderStage; ++stage) {
|
2021-04-01 06:36:22 +02:00
|
|
|
buffer_cache.BindHostStageBuffers(stage);
|
2021-04-07 01:14:55 +02:00
|
|
|
PushImageDescriptors(stage_infos[stage], samplers_it, views_it, texture_cache,
|
|
|
|
update_descriptor_queue);
|
2021-03-19 23:28:31 +01:00
|
|
|
}
|
2021-04-01 06:36:22 +02:00
|
|
|
texture_cache.UpdateRenderTargets(false);
|
|
|
|
scheduler.RequestRenderpass(texture_cache.GetFramebuffer());
|
2021-03-26 22:55:07 +01:00
|
|
|
|
2021-04-04 02:41:49 +02:00
|
|
|
if (!is_built.load(std::memory_order::relaxed)) {
|
|
|
|
// Wait for the pipeline to be built
|
|
|
|
scheduler.Record([this](vk::CommandBuffer) {
|
|
|
|
std::unique_lock lock{build_mutex};
|
|
|
|
build_condvar.wait(lock, [this] { return is_built.load(std::memory_order::relaxed); });
|
|
|
|
});
|
2021-04-01 06:36:22 +02:00
|
|
|
}
|
|
|
|
if (scheduler.UpdateGraphicsPipeline(this)) {
|
|
|
|
scheduler.Record([this](vk::CommandBuffer cmdbuf) {
|
|
|
|
cmdbuf.BindPipeline(VK_PIPELINE_BIND_POINT_GRAPHICS, *pipeline);
|
|
|
|
});
|
|
|
|
}
|
2021-03-24 05:33:45 +01:00
|
|
|
if (!descriptor_set_layout) {
|
|
|
|
return;
|
|
|
|
}
|
2021-03-19 23:28:31 +01:00
|
|
|
const VkDescriptorSet descriptor_set{descriptor_allocator.Commit()};
|
2021-04-01 08:15:28 +02:00
|
|
|
update_descriptor_queue.Send(descriptor_update_template.address(), descriptor_set);
|
2021-03-19 23:28:31 +01:00
|
|
|
|
2021-04-01 08:15:28 +02:00
|
|
|
scheduler.Record([this, descriptor_set](vk::CommandBuffer cmdbuf) {
|
|
|
|
cmdbuf.BindDescriptorSets(VK_PIPELINE_BIND_POINT_GRAPHICS, *pipeline_layout, 0,
|
|
|
|
descriptor_set, nullptr);
|
2021-03-19 23:28:31 +01:00
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2021-04-01 06:36:22 +02:00
|
|
|
void GraphicsPipeline::MakePipeline(const Device& device, VkRenderPass render_pass) {
|
2021-03-19 23:28:31 +01:00
|
|
|
FixedPipelineState::DynamicState dynamic{};
|
|
|
|
if (!device.IsExtExtendedDynamicStateSupported()) {
|
2021-04-24 23:27:25 +02:00
|
|
|
dynamic = key.state.dynamic_state;
|
2021-03-19 23:28:31 +01:00
|
|
|
}
|
|
|
|
static_vector<VkVertexInputBindingDescription, 32> vertex_bindings;
|
|
|
|
static_vector<VkVertexInputBindingDivisorDescriptionEXT, 32> vertex_binding_divisors;
|
|
|
|
for (size_t index = 0; index < Maxwell::NumVertexArrays; ++index) {
|
2021-04-24 23:27:25 +02:00
|
|
|
const bool instanced = key.state.binding_divisors[index] != 0;
|
2021-03-19 23:28:31 +01:00
|
|
|
const auto rate = instanced ? VK_VERTEX_INPUT_RATE_INSTANCE : VK_VERTEX_INPUT_RATE_VERTEX;
|
|
|
|
vertex_bindings.push_back({
|
|
|
|
.binding = static_cast<u32>(index),
|
|
|
|
.stride = dynamic.vertex_strides[index],
|
|
|
|
.inputRate = rate,
|
|
|
|
});
|
|
|
|
if (instanced) {
|
|
|
|
vertex_binding_divisors.push_back({
|
|
|
|
.binding = static_cast<u32>(index),
|
2021-04-24 23:27:25 +02:00
|
|
|
.divisor = key.state.binding_divisors[index],
|
2021-03-19 23:28:31 +01:00
|
|
|
});
|
|
|
|
}
|
|
|
|
}
|
|
|
|
static_vector<VkVertexInputAttributeDescription, 32> vertex_attributes;
|
2021-03-27 08:59:58 +01:00
|
|
|
const auto& input_attributes = stage_infos[0].input_generics;
|
2021-04-24 23:27:25 +02:00
|
|
|
for (size_t index = 0; index < key.state.attributes.size(); ++index) {
|
|
|
|
const auto& attribute = key.state.attributes[index];
|
2021-03-27 08:59:58 +01:00
|
|
|
if (!attribute.enabled || !input_attributes[index].used) {
|
2021-03-19 23:28:31 +01:00
|
|
|
continue;
|
|
|
|
}
|
|
|
|
vertex_attributes.push_back({
|
|
|
|
.location = static_cast<u32>(index),
|
|
|
|
.binding = attribute.buffer,
|
|
|
|
.format = MaxwellToVK::VertexFormat(attribute.Type(), attribute.Size()),
|
|
|
|
.offset = attribute.offset,
|
|
|
|
});
|
|
|
|
}
|
|
|
|
VkPipelineVertexInputStateCreateInfo vertex_input_ci{
|
|
|
|
.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,
|
|
|
|
.pNext = nullptr,
|
|
|
|
.flags = 0,
|
|
|
|
.vertexBindingDescriptionCount = static_cast<u32>(vertex_bindings.size()),
|
|
|
|
.pVertexBindingDescriptions = vertex_bindings.data(),
|
|
|
|
.vertexAttributeDescriptionCount = static_cast<u32>(vertex_attributes.size()),
|
|
|
|
.pVertexAttributeDescriptions = vertex_attributes.data(),
|
|
|
|
};
|
|
|
|
const VkPipelineVertexInputDivisorStateCreateInfoEXT input_divisor_ci{
|
|
|
|
.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT,
|
|
|
|
.pNext = nullptr,
|
|
|
|
.vertexBindingDivisorCount = static_cast<u32>(vertex_binding_divisors.size()),
|
|
|
|
.pVertexBindingDivisors = vertex_binding_divisors.data(),
|
|
|
|
};
|
|
|
|
if (!vertex_binding_divisors.empty()) {
|
|
|
|
vertex_input_ci.pNext = &input_divisor_ci;
|
|
|
|
}
|
2021-04-24 23:27:25 +02:00
|
|
|
auto input_assembly_topology = MaxwellToVK::PrimitiveTopology(device, key.state.topology);
|
2021-04-23 12:33:21 +02:00
|
|
|
if (input_assembly_topology == VK_PRIMITIVE_TOPOLOGY_PATCH_LIST) {
|
|
|
|
if (!spv_modules[1] && !spv_modules[2]) {
|
|
|
|
LOG_WARNING(Render_Vulkan, "Patch topology used without tessellation, using points");
|
|
|
|
input_assembly_topology = VK_PRIMITIVE_TOPOLOGY_POINT_LIST;
|
|
|
|
}
|
|
|
|
}
|
2021-03-19 23:28:31 +01:00
|
|
|
const VkPipelineInputAssemblyStateCreateInfo input_assembly_ci{
|
|
|
|
.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,
|
|
|
|
.pNext = nullptr,
|
|
|
|
.flags = 0,
|
2021-04-23 12:33:21 +02:00
|
|
|
.topology = input_assembly_topology,
|
2021-04-24 23:27:25 +02:00
|
|
|
.primitiveRestartEnable = key.state.primitive_restart_enable != 0 &&
|
2021-03-19 23:28:31 +01:00
|
|
|
SupportsPrimitiveRestart(input_assembly_topology),
|
|
|
|
};
|
|
|
|
const VkPipelineTessellationStateCreateInfo tessellation_ci{
|
|
|
|
.sType = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO,
|
|
|
|
.pNext = nullptr,
|
|
|
|
.flags = 0,
|
2021-04-24 23:27:25 +02:00
|
|
|
.patchControlPoints = key.state.patch_control_points_minus_one.Value() + 1,
|
2021-03-19 23:28:31 +01:00
|
|
|
};
|
|
|
|
VkPipelineViewportStateCreateInfo viewport_ci{
|
|
|
|
.sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,
|
|
|
|
.pNext = nullptr,
|
|
|
|
.flags = 0,
|
|
|
|
.viewportCount = Maxwell::NumViewports,
|
|
|
|
.pViewports = nullptr,
|
|
|
|
.scissorCount = Maxwell::NumViewports,
|
|
|
|
.pScissors = nullptr,
|
|
|
|
};
|
|
|
|
std::array<VkViewportSwizzleNV, Maxwell::NumViewports> swizzles;
|
2021-04-24 23:27:25 +02:00
|
|
|
std::ranges::transform(key.state.viewport_swizzles, swizzles.begin(), UnpackViewportSwizzle);
|
2021-03-19 23:28:31 +01:00
|
|
|
VkPipelineViewportSwizzleStateCreateInfoNV swizzle_ci{
|
|
|
|
.sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV,
|
|
|
|
.pNext = nullptr,
|
|
|
|
.flags = 0,
|
|
|
|
.viewportCount = Maxwell::NumViewports,
|
|
|
|
.pViewportSwizzles = swizzles.data(),
|
|
|
|
};
|
|
|
|
if (device.IsNvViewportSwizzleSupported()) {
|
|
|
|
viewport_ci.pNext = &swizzle_ci;
|
|
|
|
}
|
|
|
|
|
|
|
|
const VkPipelineRasterizationStateCreateInfo rasterization_ci{
|
|
|
|
.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,
|
|
|
|
.pNext = nullptr,
|
|
|
|
.flags = 0,
|
|
|
|
.depthClampEnable =
|
2021-04-24 23:27:25 +02:00
|
|
|
static_cast<VkBool32>(key.state.depth_clamp_disabled == 0 ? VK_TRUE : VK_FALSE),
|
2021-03-19 23:28:31 +01:00
|
|
|
.rasterizerDiscardEnable =
|
2021-04-24 23:27:25 +02:00
|
|
|
static_cast<VkBool32>(key.state.rasterize_enable == 0 ? VK_TRUE : VK_FALSE),
|
2021-04-16 03:46:11 +02:00
|
|
|
.polygonMode =
|
2021-04-24 23:27:25 +02:00
|
|
|
MaxwellToVK::PolygonMode(FixedPipelineState::UnpackPolygonMode(key.state.polygon_mode)),
|
2021-03-19 23:28:31 +01:00
|
|
|
.cullMode = static_cast<VkCullModeFlags>(
|
|
|
|
dynamic.cull_enable ? MaxwellToVK::CullFace(dynamic.CullFace()) : VK_CULL_MODE_NONE),
|
|
|
|
.frontFace = MaxwellToVK::FrontFace(dynamic.FrontFace()),
|
2021-04-24 23:27:25 +02:00
|
|
|
.depthBiasEnable = key.state.depth_bias_enable,
|
2021-03-19 23:28:31 +01:00
|
|
|
.depthBiasConstantFactor = 0.0f,
|
|
|
|
.depthBiasClamp = 0.0f,
|
|
|
|
.depthBiasSlopeFactor = 0.0f,
|
|
|
|
.lineWidth = 1.0f,
|
|
|
|
};
|
|
|
|
const VkPipelineMultisampleStateCreateInfo multisample_ci{
|
|
|
|
.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,
|
|
|
|
.pNext = nullptr,
|
|
|
|
.flags = 0,
|
2021-04-24 23:27:25 +02:00
|
|
|
.rasterizationSamples = MaxwellToVK::MsaaMode(key.state.msaa_mode),
|
2021-03-19 23:28:31 +01:00
|
|
|
.sampleShadingEnable = VK_FALSE,
|
|
|
|
.minSampleShading = 0.0f,
|
|
|
|
.pSampleMask = nullptr,
|
|
|
|
.alphaToCoverageEnable = VK_FALSE,
|
|
|
|
.alphaToOneEnable = VK_FALSE,
|
|
|
|
};
|
|
|
|
const VkPipelineDepthStencilStateCreateInfo depth_stencil_ci{
|
|
|
|
.sType = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO,
|
|
|
|
.pNext = nullptr,
|
|
|
|
.flags = 0,
|
|
|
|
.depthTestEnable = dynamic.depth_test_enable,
|
|
|
|
.depthWriteEnable = dynamic.depth_write_enable,
|
|
|
|
.depthCompareOp = dynamic.depth_test_enable
|
|
|
|
? MaxwellToVK::ComparisonOp(dynamic.DepthTestFunc())
|
|
|
|
: VK_COMPARE_OP_ALWAYS,
|
|
|
|
.depthBoundsTestEnable = dynamic.depth_bounds_enable,
|
|
|
|
.stencilTestEnable = dynamic.stencil_enable,
|
|
|
|
.front = GetStencilFaceState(dynamic.front),
|
|
|
|
.back = GetStencilFaceState(dynamic.back),
|
|
|
|
.minDepthBounds = 0.0f,
|
|
|
|
.maxDepthBounds = 0.0f,
|
|
|
|
};
|
|
|
|
static_vector<VkPipelineColorBlendAttachmentState, Maxwell::NumRenderTargets> cb_attachments;
|
2021-04-24 23:27:25 +02:00
|
|
|
const size_t num_attachments{NumAttachments(key.state)};
|
2021-04-23 02:05:10 +02:00
|
|
|
for (size_t index = 0; index < num_attachments; ++index) {
|
2021-03-19 23:28:31 +01:00
|
|
|
static constexpr std::array mask_table{
|
|
|
|
VK_COLOR_COMPONENT_R_BIT,
|
|
|
|
VK_COLOR_COMPONENT_G_BIT,
|
|
|
|
VK_COLOR_COMPONENT_B_BIT,
|
|
|
|
VK_COLOR_COMPONENT_A_BIT,
|
|
|
|
};
|
2021-04-24 23:27:25 +02:00
|
|
|
const auto& blend{key.state.attachments[index]};
|
2021-03-19 23:28:31 +01:00
|
|
|
const std::array mask{blend.Mask()};
|
|
|
|
VkColorComponentFlags write_mask{};
|
|
|
|
for (size_t i = 0; i < mask_table.size(); ++i) {
|
|
|
|
write_mask |= mask[i] ? mask_table[i] : 0;
|
|
|
|
}
|
|
|
|
cb_attachments.push_back({
|
|
|
|
.blendEnable = blend.enable != 0,
|
|
|
|
.srcColorBlendFactor = MaxwellToVK::BlendFactor(blend.SourceRGBFactor()),
|
|
|
|
.dstColorBlendFactor = MaxwellToVK::BlendFactor(blend.DestRGBFactor()),
|
|
|
|
.colorBlendOp = MaxwellToVK::BlendEquation(blend.EquationRGB()),
|
|
|
|
.srcAlphaBlendFactor = MaxwellToVK::BlendFactor(blend.SourceAlphaFactor()),
|
|
|
|
.dstAlphaBlendFactor = MaxwellToVK::BlendFactor(blend.DestAlphaFactor()),
|
|
|
|
.alphaBlendOp = MaxwellToVK::BlendEquation(blend.EquationAlpha()),
|
|
|
|
.colorWriteMask = write_mask,
|
|
|
|
});
|
|
|
|
}
|
|
|
|
const VkPipelineColorBlendStateCreateInfo color_blend_ci{
|
|
|
|
.sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,
|
|
|
|
.pNext = nullptr,
|
|
|
|
.flags = 0,
|
|
|
|
.logicOpEnable = VK_FALSE,
|
|
|
|
.logicOp = VK_LOGIC_OP_COPY,
|
|
|
|
.attachmentCount = static_cast<u32>(cb_attachments.size()),
|
|
|
|
.pAttachments = cb_attachments.data(),
|
|
|
|
.blendConstants = {},
|
|
|
|
};
|
|
|
|
static_vector<VkDynamicState, 17> dynamic_states{
|
|
|
|
VK_DYNAMIC_STATE_VIEWPORT, VK_DYNAMIC_STATE_SCISSOR,
|
|
|
|
VK_DYNAMIC_STATE_DEPTH_BIAS, VK_DYNAMIC_STATE_BLEND_CONSTANTS,
|
|
|
|
VK_DYNAMIC_STATE_DEPTH_BOUNDS, VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK,
|
|
|
|
VK_DYNAMIC_STATE_STENCIL_WRITE_MASK, VK_DYNAMIC_STATE_STENCIL_REFERENCE,
|
|
|
|
};
|
|
|
|
if (device.IsExtExtendedDynamicStateSupported()) {
|
|
|
|
static constexpr std::array extended{
|
|
|
|
VK_DYNAMIC_STATE_CULL_MODE_EXT,
|
|
|
|
VK_DYNAMIC_STATE_FRONT_FACE_EXT,
|
|
|
|
VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT,
|
|
|
|
VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE_EXT,
|
|
|
|
VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE_EXT,
|
|
|
|
VK_DYNAMIC_STATE_DEPTH_COMPARE_OP_EXT,
|
|
|
|
VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE_EXT,
|
|
|
|
VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE_EXT,
|
|
|
|
VK_DYNAMIC_STATE_STENCIL_OP_EXT,
|
|
|
|
};
|
|
|
|
dynamic_states.insert(dynamic_states.end(), extended.begin(), extended.end());
|
|
|
|
}
|
|
|
|
const VkPipelineDynamicStateCreateInfo dynamic_state_ci{
|
|
|
|
.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO,
|
|
|
|
.pNext = nullptr,
|
|
|
|
.flags = 0,
|
|
|
|
.dynamicStateCount = static_cast<u32>(dynamic_states.size()),
|
|
|
|
.pDynamicStates = dynamic_states.data(),
|
|
|
|
};
|
2021-04-06 04:25:22 +02:00
|
|
|
[[maybe_unused]] const VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT subgroup_size_ci{
|
2021-03-19 23:28:31 +01:00
|
|
|
.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT,
|
|
|
|
.pNext = nullptr,
|
|
|
|
.requiredSubgroupSize = GuestWarpSize,
|
|
|
|
};
|
|
|
|
static_vector<VkPipelineShaderStageCreateInfo, 5> shader_stages;
|
|
|
|
for (size_t stage = 0; stage < Maxwell::MaxShaderStage; ++stage) {
|
|
|
|
if (!spv_modules[stage]) {
|
|
|
|
continue;
|
|
|
|
}
|
2021-04-06 04:25:22 +02:00
|
|
|
[[maybe_unused]] auto& stage_ci =
|
|
|
|
shader_stages.emplace_back(VkPipelineShaderStageCreateInfo{
|
|
|
|
.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
|
|
|
|
.pNext = nullptr,
|
|
|
|
.flags = 0,
|
|
|
|
.stage = MaxwellToVK::ShaderStage(static_cast<Tegra::Engines::ShaderType>(stage)),
|
|
|
|
.module = *spv_modules[stage],
|
|
|
|
.pName = "main",
|
|
|
|
.pSpecializationInfo = nullptr,
|
|
|
|
});
|
2021-03-19 23:28:31 +01:00
|
|
|
/*
|
|
|
|
if (program[stage]->entries.uses_warps && device.IsGuestWarpSizeSupported(stage_ci.stage)) {
|
|
|
|
stage_ci.pNext = &subgroup_size_ci;
|
|
|
|
}
|
|
|
|
*/
|
|
|
|
}
|
|
|
|
pipeline = device.GetLogical().CreateGraphicsPipeline({
|
|
|
|
.sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,
|
|
|
|
.pNext = nullptr,
|
|
|
|
.flags = 0,
|
|
|
|
.stageCount = static_cast<u32>(shader_stages.size()),
|
|
|
|
.pStages = shader_stages.data(),
|
|
|
|
.pVertexInputState = &vertex_input_ci,
|
|
|
|
.pInputAssemblyState = &input_assembly_ci,
|
|
|
|
.pTessellationState = &tessellation_ci,
|
|
|
|
.pViewportState = &viewport_ci,
|
|
|
|
.pRasterizationState = &rasterization_ci,
|
|
|
|
.pMultisampleState = &multisample_ci,
|
|
|
|
.pDepthStencilState = &depth_stencil_ci,
|
|
|
|
.pColorBlendState = &color_blend_ci,
|
|
|
|
.pDynamicState = &dynamic_state_ci,
|
|
|
|
.layout = *pipeline_layout,
|
|
|
|
.renderPass = render_pass,
|
|
|
|
.subpass = 0,
|
|
|
|
.basePipelineHandle = nullptr,
|
|
|
|
.basePipelineIndex = 0,
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
} // namespace Vulkan
|