2018-03-20 04:00:59 +01:00
|
|
|
// Copyright 2015 Citra Emulator Project
|
|
|
|
// Licensed under GPLv2 or any later version
|
|
|
|
// Refer to the license.txt file included.
|
|
|
|
|
2018-04-20 05:01:50 +02:00
|
|
|
#include <algorithm>
|
2018-09-13 02:27:43 +02:00
|
|
|
#include <array>
|
2019-07-15 03:25:13 +02:00
|
|
|
#include <bitset>
|
2018-03-20 04:00:59 +01:00
|
|
|
#include <memory>
|
|
|
|
#include <string>
|
2018-07-24 18:10:35 +02:00
|
|
|
#include <string_view>
|
2018-03-20 04:00:59 +01:00
|
|
|
#include <tuple>
|
|
|
|
#include <utility>
|
|
|
|
#include <glad/glad.h>
|
|
|
|
#include "common/alignment.h"
|
|
|
|
#include "common/assert.h"
|
|
|
|
#include "common/logging/log.h"
|
|
|
|
#include "common/math_util.h"
|
|
|
|
#include "common/microprofile.h"
|
2018-08-22 06:33:03 +02:00
|
|
|
#include "common/scope_exit.h"
|
2021-04-15 01:07:40 +02:00
|
|
|
#include "common/settings.h"
|
2018-03-24 07:01:03 +01:00
|
|
|
#include "core/core.h"
|
2021-04-24 07:04:28 +02:00
|
|
|
#include "core/hle/kernel/k_process.h"
|
2019-11-26 21:19:15 +01:00
|
|
|
#include "core/memory.h"
|
2019-07-15 03:25:13 +02:00
|
|
|
#include "video_core/engines/kepler_compute.h"
|
2018-03-24 07:01:03 +01:00
|
|
|
#include "video_core/engines/maxwell_3d.h"
|
2019-11-18 22:35:21 +01:00
|
|
|
#include "video_core/engines/shader_type.h"
|
2019-05-29 23:15:28 +02:00
|
|
|
#include "video_core/memory_manager.h"
|
2020-12-30 06:25:23 +01:00
|
|
|
#include "video_core/renderer_opengl/gl_device.h"
|
2019-11-26 22:52:15 +01:00
|
|
|
#include "video_core/renderer_opengl/gl_query_cache.h"
|
2018-03-20 04:00:59 +01:00
|
|
|
#include "video_core/renderer_opengl/gl_rasterizer.h"
|
2019-01-14 02:05:53 +01:00
|
|
|
#include "video_core/renderer_opengl/gl_shader_cache.h"
|
2020-12-30 06:25:23 +01:00
|
|
|
#include "video_core/renderer_opengl/gl_texture_cache.h"
|
2018-03-25 04:38:08 +02:00
|
|
|
#include "video_core/renderer_opengl/maxwell_to_gl.h"
|
2018-03-20 04:00:59 +01:00
|
|
|
#include "video_core/renderer_opengl/renderer_opengl.h"
|
2020-05-23 01:55:38 +02:00
|
|
|
#include "video_core/shader_cache.h"
|
2020-12-30 06:25:23 +01:00
|
|
|
#include "video_core/texture_cache/texture_cache.h"
|
2018-03-20 04:00:59 +01:00
|
|
|
|
2018-08-21 10:18:27 +02:00
|
|
|
namespace OpenGL {
|
|
|
|
|
2018-03-24 09:06:26 +01:00
|
|
|
using Maxwell = Tegra::Engines::Maxwell3D::Regs;
|
2021-02-13 05:01:09 +01:00
|
|
|
using GLvec4 = std::array<GLfloat, 4>;
|
2019-04-29 02:08:31 +02:00
|
|
|
|
2020-02-22 23:40:26 +01:00
|
|
|
using Tegra::Engines::ShaderType;
|
2019-04-29 02:08:31 +02:00
|
|
|
using VideoCore::Surface::PixelFormat;
|
|
|
|
using VideoCore::Surface::SurfaceTarget;
|
|
|
|
using VideoCore::Surface::SurfaceType;
|
2018-03-20 04:00:59 +01:00
|
|
|
|
|
|
|
MICROPROFILE_DEFINE(OpenGL_Drawing, "OpenGL", "Drawing", MP_RGB(128, 128, 192));
|
2021-01-17 00:48:58 +01:00
|
|
|
MICROPROFILE_DEFINE(OpenGL_Clears, "OpenGL", "Clears", MP_RGB(128, 128, 192));
|
2018-09-04 11:02:59 +02:00
|
|
|
MICROPROFILE_DEFINE(OpenGL_Blits, "OpenGL", "Blits", MP_RGB(128, 128, 192));
|
2021-01-17 00:48:58 +01:00
|
|
|
MICROPROFILE_DEFINE(OpenGL_CacheManagement, "OpenGL", "Cache Management", MP_RGB(100, 255, 100));
|
2018-10-02 19:47:26 +02:00
|
|
|
|
2019-11-13 04:27:12 +01:00
|
|
|
namespace {
|
|
|
|
|
2020-12-30 06:25:23 +01:00
|
|
|
constexpr size_t NUM_SUPPORTED_VERTEX_ATTRIBUTES = 16;
|
|
|
|
|
|
|
|
struct TextureHandle {
|
|
|
|
constexpr TextureHandle(u32 data, bool via_header_index) {
|
|
|
|
const Tegra::Texture::TextureHandle handle{data};
|
|
|
|
image = handle.tic_id;
|
|
|
|
sampler = via_header_index ? image : handle.tsc_id.Value();
|
|
|
|
}
|
|
|
|
|
|
|
|
u32 image;
|
|
|
|
u32 sampler;
|
|
|
|
};
|
2020-01-13 21:20:02 +01:00
|
|
|
|
2019-11-13 04:27:12 +01:00
|
|
|
template <typename Engine, typename Entry>
|
2020-12-30 06:25:23 +01:00
|
|
|
TextureHandle GetTextureInfo(const Engine& engine, bool via_header_index, const Entry& entry,
|
|
|
|
ShaderType shader_type, size_t index = 0) {
|
2020-06-05 04:03:49 +02:00
|
|
|
if constexpr (std::is_same_v<Entry, SamplerEntry>) {
|
|
|
|
if (entry.is_separated) {
|
|
|
|
const u32 buffer_1 = entry.buffer;
|
|
|
|
const u32 buffer_2 = entry.secondary_buffer;
|
|
|
|
const u32 offset_1 = entry.offset;
|
|
|
|
const u32 offset_2 = entry.secondary_offset;
|
|
|
|
const u32 handle_1 = engine.AccessConstBuffer32(shader_type, buffer_1, offset_1);
|
|
|
|
const u32 handle_2 = engine.AccessConstBuffer32(shader_type, buffer_2, offset_2);
|
2020-12-30 06:25:23 +01:00
|
|
|
return TextureHandle(handle_1 | handle_2, via_header_index);
|
2020-06-05 04:03:49 +02:00
|
|
|
}
|
|
|
|
}
|
2020-04-16 06:34:45 +02:00
|
|
|
if (entry.is_bindless) {
|
2020-12-30 06:25:23 +01:00
|
|
|
const u32 raw = engine.AccessConstBuffer32(shader_type, entry.buffer, entry.offset);
|
|
|
|
return TextureHandle(raw, via_header_index);
|
2019-11-13 04:27:12 +01:00
|
|
|
}
|
2020-12-30 06:25:23 +01:00
|
|
|
const u32 buffer = engine.GetBoundBuffer();
|
|
|
|
const u64 offset = (entry.offset + index) * sizeof(u32);
|
|
|
|
return TextureHandle(engine.AccessConstBuffer32(shader_type, buffer, offset), via_header_index);
|
2019-11-13 04:27:12 +01:00
|
|
|
}
|
|
|
|
|
2020-06-03 07:48:41 +02:00
|
|
|
/// Translates hardware transform feedback indices
|
|
|
|
/// @param location Hardware location
|
|
|
|
/// @return Pair of ARB_transform_feedback3 token stream first and third arguments
|
|
|
|
/// @note Read https://www.khronos.org/registry/OpenGL/extensions/ARB/ARB_transform_feedback3.txt
|
|
|
|
std::pair<GLint, GLint> TransformFeedbackEnum(u8 location) {
|
|
|
|
const u8 index = location / 4;
|
|
|
|
if (index >= 8 && index <= 39) {
|
|
|
|
return {GL_GENERIC_ATTRIB_NV, index - 8};
|
|
|
|
}
|
|
|
|
if (index >= 48 && index <= 55) {
|
|
|
|
return {GL_TEXTURE_COORD_NV, index - 48};
|
|
|
|
}
|
|
|
|
switch (index) {
|
|
|
|
case 7:
|
|
|
|
return {GL_POSITION, 0};
|
|
|
|
case 40:
|
|
|
|
return {GL_PRIMARY_COLOR_NV, 0};
|
|
|
|
case 41:
|
|
|
|
return {GL_SECONDARY_COLOR_NV, 0};
|
|
|
|
case 42:
|
|
|
|
return {GL_BACK_PRIMARY_COLOR_NV, 0};
|
|
|
|
case 43:
|
|
|
|
return {GL_BACK_SECONDARY_COLOR_NV, 0};
|
|
|
|
}
|
2020-12-07 06:41:47 +01:00
|
|
|
UNIMPLEMENTED_MSG("index={}", index);
|
2020-06-03 07:48:41 +02:00
|
|
|
return {GL_POSITION, 0};
|
|
|
|
}
|
|
|
|
|
2019-12-26 04:28:46 +01:00
|
|
|
void oglEnable(GLenum cap, bool state) {
|
|
|
|
(state ? glEnable : glDisable)(cap);
|
|
|
|
}
|
|
|
|
|
2020-12-30 06:25:23 +01:00
|
|
|
ImageViewType ImageViewTypeFromEntry(const SamplerEntry& entry) {
|
|
|
|
if (entry.is_buffer) {
|
|
|
|
return ImageViewType::Buffer;
|
|
|
|
}
|
|
|
|
switch (entry.type) {
|
|
|
|
case Tegra::Shader::TextureType::Texture1D:
|
|
|
|
return entry.is_array ? ImageViewType::e1DArray : ImageViewType::e1D;
|
|
|
|
case Tegra::Shader::TextureType::Texture2D:
|
|
|
|
return entry.is_array ? ImageViewType::e2DArray : ImageViewType::e2D;
|
|
|
|
case Tegra::Shader::TextureType::Texture3D:
|
|
|
|
return ImageViewType::e3D;
|
|
|
|
case Tegra::Shader::TextureType::TextureCube:
|
|
|
|
return entry.is_array ? ImageViewType::CubeArray : ImageViewType::Cube;
|
|
|
|
}
|
|
|
|
UNREACHABLE();
|
|
|
|
return ImageViewType::e2D;
|
|
|
|
}
|
|
|
|
|
|
|
|
ImageViewType ImageViewTypeFromEntry(const ImageEntry& entry) {
|
|
|
|
switch (entry.type) {
|
|
|
|
case Tegra::Shader::ImageType::Texture1D:
|
|
|
|
return ImageViewType::e1D;
|
|
|
|
case Tegra::Shader::ImageType::Texture1DArray:
|
|
|
|
return ImageViewType::e1DArray;
|
|
|
|
case Tegra::Shader::ImageType::Texture2D:
|
|
|
|
return ImageViewType::e2D;
|
|
|
|
case Tegra::Shader::ImageType::Texture2DArray:
|
|
|
|
return ImageViewType::e2DArray;
|
|
|
|
case Tegra::Shader::ImageType::Texture3D:
|
|
|
|
return ImageViewType::e3D;
|
|
|
|
case Tegra::Shader::ImageType::TextureBuffer:
|
|
|
|
return ImageViewType::Buffer;
|
|
|
|
}
|
|
|
|
UNREACHABLE();
|
|
|
|
return ImageViewType::e2D;
|
|
|
|
}
|
|
|
|
|
2019-11-13 04:27:12 +01:00
|
|
|
} // Anonymous namespace
|
|
|
|
|
2020-12-05 17:40:14 +01:00
|
|
|
RasterizerOpenGL::RasterizerOpenGL(Core::Frontend::EmuWindow& emu_window_, Tegra::GPU& gpu_,
|
|
|
|
Core::Memory::Memory& cpu_memory_, const Device& device_,
|
2020-06-12 02:24:45 +02:00
|
|
|
ScreenInfo& screen_info_, ProgramManager& program_manager_,
|
|
|
|
StateTracker& state_tracker_)
|
2020-12-30 06:25:23 +01:00
|
|
|
: RasterizerAccelerated(cpu_memory_), gpu(gpu_), maxwell3d(gpu.Maxwell3D()),
|
2020-06-12 02:24:45 +02:00
|
|
|
kepler_compute(gpu.KeplerCompute()), gpu_memory(gpu.MemoryManager()), device(device_),
|
|
|
|
screen_info(screen_info_), program_manager(program_manager_), state_tracker(state_tracker_),
|
2020-12-30 06:25:23 +01:00
|
|
|
texture_cache_runtime(device, program_manager, state_tracker),
|
|
|
|
texture_cache(texture_cache_runtime, *this, maxwell3d, kepler_compute, gpu_memory),
|
2021-01-18 23:04:27 +01:00
|
|
|
buffer_cache_runtime(device),
|
2021-01-17 00:48:58 +01:00
|
|
|
buffer_cache(*this, maxwell3d, kepler_compute, gpu_memory, cpu_memory_, buffer_cache_runtime),
|
2020-12-05 17:40:14 +01:00
|
|
|
shader_cache(*this, emu_window_, gpu, maxwell3d, kepler_compute, gpu_memory, device),
|
2020-06-12 02:24:45 +02:00
|
|
|
query_cache(*this, maxwell3d, gpu_memory),
|
|
|
|
fence_manager(*this, gpu, texture_cache, buffer_cache, query_cache),
|
2020-12-05 17:40:14 +01:00
|
|
|
async_shaders(emu_window_) {
|
2020-07-10 05:36:38 +02:00
|
|
|
if (device.UseAsynchronousShaders()) {
|
2020-08-05 18:53:26 +02:00
|
|
|
async_shaders.AllocateWorkers();
|
2020-07-10 05:36:38 +02:00
|
|
|
}
|
2018-03-20 04:00:59 +01:00
|
|
|
}
|
|
|
|
|
2021-01-17 00:48:58 +01:00
|
|
|
RasterizerOpenGL::~RasterizerOpenGL() = default;
|
2018-03-20 04:00:59 +01:00
|
|
|
|
2021-01-17 00:48:58 +01:00
|
|
|
void RasterizerOpenGL::SyncVertexFormats() {
|
2020-06-12 02:24:45 +02:00
|
|
|
auto& flags = maxwell3d.dirty.flags;
|
2019-12-29 03:08:40 +01:00
|
|
|
if (!flags[Dirty::VertexFormats]) {
|
|
|
|
return;
|
2019-01-06 05:53:27 +01:00
|
|
|
}
|
2019-12-29 03:08:40 +01:00
|
|
|
flags[Dirty::VertexFormats] = false;
|
2018-11-06 19:15:44 +01:00
|
|
|
|
2019-12-26 04:16:52 +01:00
|
|
|
// Use the vertex array as-is, assumes that the data is formatted correctly for OpenGL. Enables
|
|
|
|
// the first 16 vertex attributes always, as we don't know which ones are actually used until
|
|
|
|
// shader time. Note, Tegra technically supports 32, but we're capping this to 16 for now to
|
|
|
|
// avoid OpenGL errors.
|
|
|
|
// TODO(Subv): Analyze the shader to identify which attributes are actually used and don't
|
|
|
|
// assume every shader uses them all.
|
gl_rasterizer: Use NV_vertex_buffer_unified_memory for vertex buffer robustness
Switch games are allowed to bind less data than what they use in a
vertex buffer, the expected behavior here is that these values are read
as zero. At the moment of writing this only D3D12, OpenGL and NVN through
NV_vertex_buffer_unified_memory support vertex buffer with a size limit.
In theory this could be emulated on Vulkan creating a new VkBuffer for
each (handle, offset, length) tuple and binding the expected data to it.
This is likely going to be slow and memory expensive when used on the
vertex buffer and we have to do it on all draws because we can't know
without analyzing indices when a game is going to read vertex data out
of bounds.
This is not a problem on OpenGL's BufferAddressRangeNV because it takes
a length parameter, unlike Vulkan's CmdBindVertexBuffers that only takes
buffers and offsets (the length is implicit in VkBuffer). It isn't a
problem on D3D12 either, because D3D12_VERTEX_BUFFER_VIEW on
IASetVertexBuffers takes SizeInBytes as a parameter (although I am not
familiar with robustness on D3D12).
Currently this only implements buffer ranges for vertex buffers,
although indices can also be affected. A KHR_robustness profile is not
created, but Nvidia's driver reads out of bound vertex data as zero
anyway, this might have to be changed in the future.
- Fixes SMO random triangles when capturing an enemy, getting hit, or
looking at the environment on certain maps.
2020-06-18 08:56:31 +02:00
|
|
|
for (std::size_t index = 0; index < NUM_SUPPORTED_VERTEX_ATTRIBUTES; ++index) {
|
2019-12-29 03:08:40 +01:00
|
|
|
if (!flags[Dirty::VertexFormat0 + index]) {
|
|
|
|
continue;
|
2018-09-05 11:36:50 +02:00
|
|
|
}
|
2019-12-29 03:08:40 +01:00
|
|
|
flags[Dirty::VertexFormat0 + index] = false;
|
|
|
|
|
2020-06-12 02:24:45 +02:00
|
|
|
const auto attrib = maxwell3d.regs.vertex_attrib_format[index];
|
2019-12-29 03:08:40 +01:00
|
|
|
const auto gl_index = static_cast<GLuint>(index);
|
2018-11-06 21:26:27 +01:00
|
|
|
|
2020-04-14 22:58:53 +02:00
|
|
|
// Disable constant attributes.
|
|
|
|
if (attrib.IsConstant()) {
|
2019-12-29 03:08:40 +01:00
|
|
|
glDisableVertexAttribArray(gl_index);
|
2019-12-26 04:16:52 +01:00
|
|
|
continue;
|
2018-09-05 11:36:50 +02:00
|
|
|
}
|
2019-12-29 03:08:40 +01:00
|
|
|
glEnableVertexAttribArray(gl_index);
|
2019-01-06 05:53:27 +01:00
|
|
|
|
2019-12-26 04:16:52 +01:00
|
|
|
if (attrib.type == Maxwell::VertexAttribute::Type::SignedInt ||
|
|
|
|
attrib.type == Maxwell::VertexAttribute::Type::UnsignedInt) {
|
2019-12-29 03:08:40 +01:00
|
|
|
glVertexAttribIFormat(gl_index, attrib.ComponentCount(),
|
2020-06-29 17:48:38 +02:00
|
|
|
MaxwellToGL::VertexFormat(attrib), attrib.offset);
|
2019-12-26 04:16:52 +01:00
|
|
|
} else {
|
2020-06-29 17:48:38 +02:00
|
|
|
glVertexAttribFormat(gl_index, attrib.ComponentCount(),
|
|
|
|
MaxwellToGL::VertexFormat(attrib),
|
2019-12-26 04:16:52 +01:00
|
|
|
attrib.IsNormalized() ? GL_TRUE : GL_FALSE, attrib.offset);
|
|
|
|
}
|
2019-12-29 03:08:40 +01:00
|
|
|
glVertexAttribBinding(gl_index, attrib.buffer);
|
2019-12-26 04:16:52 +01:00
|
|
|
}
|
2018-11-06 19:37:10 +01:00
|
|
|
}
|
|
|
|
|
2021-01-17 00:48:58 +01:00
|
|
|
void RasterizerOpenGL::SyncVertexInstances() {
|
2020-06-12 02:24:45 +02:00
|
|
|
auto& flags = maxwell3d.dirty.flags;
|
2019-12-29 05:28:53 +01:00
|
|
|
if (!flags[Dirty::VertexInstances]) {
|
2019-07-10 21:38:31 +02:00
|
|
|
return;
|
2019-12-29 05:28:53 +01:00
|
|
|
}
|
|
|
|
flags[Dirty::VertexInstances] = false;
|
2019-07-10 21:38:31 +02:00
|
|
|
|
2020-06-12 02:24:45 +02:00
|
|
|
const auto& regs = maxwell3d.regs;
|
gl_rasterizer: Use NV_vertex_buffer_unified_memory for vertex buffer robustness
Switch games are allowed to bind less data than what they use in a
vertex buffer, the expected behavior here is that these values are read
as zero. At the moment of writing this only D3D12, OpenGL and NVN through
NV_vertex_buffer_unified_memory support vertex buffer with a size limit.
In theory this could be emulated on Vulkan creating a new VkBuffer for
each (handle, offset, length) tuple and binding the expected data to it.
This is likely going to be slow and memory expensive when used on the
vertex buffer and we have to do it on all draws because we can't know
without analyzing indices when a game is going to read vertex data out
of bounds.
This is not a problem on OpenGL's BufferAddressRangeNV because it takes
a length parameter, unlike Vulkan's CmdBindVertexBuffers that only takes
buffers and offsets (the length is implicit in VkBuffer). It isn't a
problem on D3D12 either, because D3D12_VERTEX_BUFFER_VIEW on
IASetVertexBuffers takes SizeInBytes as a parameter (although I am not
familiar with robustness on D3D12).
Currently this only implements buffer ranges for vertex buffers,
although indices can also be affected. A KHR_robustness profile is not
created, but Nvidia's driver reads out of bound vertex data as zero
anyway, this might have to be changed in the future.
- Fixes SMO random triangles when capturing an enemy, getting hit, or
looking at the environment on certain maps.
2020-06-18 08:56:31 +02:00
|
|
|
for (std::size_t index = 0; index < NUM_SUPPORTED_VERTEX_ATTRIBUTES; ++index) {
|
2019-12-29 05:28:53 +01:00
|
|
|
if (!flags[Dirty::VertexInstance0 + index]) {
|
2019-07-10 21:38:31 +02:00
|
|
|
continue;
|
|
|
|
}
|
2019-12-29 05:28:53 +01:00
|
|
|
flags[Dirty::VertexInstance0 + index] = false;
|
|
|
|
|
|
|
|
const auto gl_index = static_cast<GLuint>(index);
|
|
|
|
const bool instancing_enabled = regs.instanced_arrays.IsInstancingEnabled(gl_index);
|
|
|
|
const GLuint divisor = instancing_enabled ? regs.vertex_array[index].divisor : 0;
|
|
|
|
glVertexBindingDivisor(gl_index, divisor);
|
2019-07-10 21:38:31 +02:00
|
|
|
}
|
2018-03-20 04:00:59 +01:00
|
|
|
}
|
|
|
|
|
2021-01-17 00:48:58 +01:00
|
|
|
void RasterizerOpenGL::SetupShaders(bool is_indexed) {
|
2019-12-29 06:03:05 +01:00
|
|
|
u32 clip_distances = 0;
|
2018-04-15 21:14:57 +02:00
|
|
|
|
2020-12-30 06:25:23 +01:00
|
|
|
std::array<Shader*, Maxwell::MaxShaderStage> shaders{};
|
|
|
|
image_view_indices.clear();
|
|
|
|
sampler_handles.clear();
|
|
|
|
|
|
|
|
texture_cache.SynchronizeGraphicsDescriptors();
|
|
|
|
|
2018-09-15 15:21:06 +02:00
|
|
|
for (std::size_t index = 0; index < Maxwell::MaxShaderProgram; ++index) {
|
2020-06-12 02:24:45 +02:00
|
|
|
const auto& shader_config = maxwell3d.regs.shader_config[index];
|
2019-11-18 22:35:21 +01:00
|
|
|
const auto program{static_cast<Maxwell::ShaderProgram>(index)};
|
2018-04-08 06:00:11 +02:00
|
|
|
|
|
|
|
// Skip stages that are not enabled
|
2020-06-12 02:24:45 +02:00
|
|
|
if (!maxwell3d.regs.IsShaderConfigEnabled(index)) {
|
2018-10-07 04:17:31 +02:00
|
|
|
switch (program) {
|
|
|
|
case Maxwell::ShaderProgram::Geometry:
|
2019-12-26 20:04:41 +01:00
|
|
|
program_manager.UseGeometryShader(0);
|
2018-10-07 04:17:31 +02:00
|
|
|
break;
|
2019-12-26 20:38:39 +01:00
|
|
|
case Maxwell::ShaderProgram::Fragment:
|
2019-12-26 20:04:41 +01:00
|
|
|
program_manager.UseFragmentShader(0);
|
2019-12-26 20:38:39 +01:00
|
|
|
break;
|
2019-04-03 09:33:36 +02:00
|
|
|
default:
|
|
|
|
break;
|
2018-10-07 04:17:31 +02:00
|
|
|
}
|
2018-04-08 06:00:11 +02:00
|
|
|
continue;
|
|
|
|
}
|
2019-12-11 20:41:26 +01:00
|
|
|
// Currently this stages are not supported in the OpenGL backend.
|
2020-06-25 22:12:33 +02:00
|
|
|
// TODO(Blinkhawk): Port tesselation shaders from Vulkan to OpenGL
|
|
|
|
if (program == Maxwell::ShaderProgram::TesselationControl ||
|
|
|
|
program == Maxwell::ShaderProgram::TesselationEval) {
|
2019-12-11 20:41:26 +01:00
|
|
|
continue;
|
2020-05-18 03:32:49 +02:00
|
|
|
}
|
|
|
|
|
2020-06-25 22:12:33 +02:00
|
|
|
Shader* const shader = shader_cache.GetStageProgram(program, async_shaders);
|
2020-07-10 05:36:38 +02:00
|
|
|
const GLuint program_handle = shader->IsBuilt() ? shader->GetHandle() : 0;
|
2018-04-08 06:00:11 +02:00
|
|
|
switch (program) {
|
2018-08-23 23:30:27 +02:00
|
|
|
case Maxwell::ShaderProgram::VertexA:
|
2019-01-05 05:00:06 +01:00
|
|
|
case Maxwell::ShaderProgram::VertexB:
|
2019-12-26 20:04:41 +01:00
|
|
|
program_manager.UseVertexShader(program_handle);
|
2018-10-07 04:17:31 +02:00
|
|
|
break;
|
2019-01-05 05:00:06 +01:00
|
|
|
case Maxwell::ShaderProgram::Geometry:
|
2019-12-26 20:04:41 +01:00
|
|
|
program_manager.UseGeometryShader(program_handle);
|
2018-04-08 06:00:11 +02:00
|
|
|
break;
|
2019-01-05 05:00:06 +01:00
|
|
|
case Maxwell::ShaderProgram::Fragment:
|
2019-12-26 20:04:41 +01:00
|
|
|
program_manager.UseFragmentShader(program_handle);
|
2018-04-08 06:00:11 +02:00
|
|
|
break;
|
|
|
|
default:
|
2019-03-09 07:33:56 +01:00
|
|
|
UNIMPLEMENTED_MSG("Unimplemented shader index={}, enable={}, offset=0x{:08X}", index,
|
|
|
|
shader_config.enable.Value(), shader_config.offset);
|
2020-12-30 06:25:23 +01:00
|
|
|
break;
|
2018-04-08 06:00:11 +02:00
|
|
|
}
|
2018-04-15 18:15:54 +02:00
|
|
|
|
2020-06-25 22:12:33 +02:00
|
|
|
// Stage indices are 0 - 5
|
2020-12-30 06:25:23 +01:00
|
|
|
const size_t stage = index == 0 ? 0 : index - 1;
|
|
|
|
shaders[stage] = shader;
|
|
|
|
|
|
|
|
SetupDrawTextures(shader, stage);
|
|
|
|
SetupDrawImages(shader, stage);
|
2020-06-25 22:12:33 +02:00
|
|
|
|
2021-01-17 00:48:58 +01:00
|
|
|
buffer_cache.SetEnabledUniformBuffers(stage, shader->GetEntries().enabled_uniform_buffers);
|
|
|
|
|
|
|
|
buffer_cache.UnbindGraphicsStorageBuffers(stage);
|
|
|
|
u32 ssbo_index = 0;
|
|
|
|
for (const auto& buffer : shader->GetEntries().global_memory_entries) {
|
|
|
|
buffer_cache.BindGraphicsStorageBuffer(stage, ssbo_index, buffer.cbuf_index,
|
|
|
|
buffer.cbuf_offset, buffer.is_written);
|
|
|
|
++ssbo_index;
|
|
|
|
}
|
|
|
|
|
2018-11-29 20:13:13 +01:00
|
|
|
// Workaround for Intel drivers.
|
|
|
|
// When a clip distance is enabled but not set in the shader it crops parts of the screen
|
|
|
|
// (sometimes it's half the screen, sometimes three quarters). To avoid this, enable the
|
|
|
|
// clip distances only when it's written by a shader stage.
|
2020-02-26 20:13:47 +01:00
|
|
|
clip_distances |= shader->GetEntries().clip_distances;
|
2018-11-29 20:13:13 +01:00
|
|
|
|
2018-07-13 04:25:03 +02:00
|
|
|
// When VertexA is enabled, we have dual vertex shaders
|
|
|
|
if (program == Maxwell::ShaderProgram::VertexA) {
|
|
|
|
// VertexB was combined with VertexA, so we skip the VertexB iteration
|
2019-11-06 08:32:43 +01:00
|
|
|
++index;
|
2018-07-13 04:25:03 +02:00
|
|
|
}
|
2018-04-08 06:00:11 +02:00
|
|
|
}
|
2018-11-29 20:13:13 +01:00
|
|
|
SyncClipEnabled(clip_distances);
|
2020-06-12 02:24:45 +02:00
|
|
|
maxwell3d.dirty.flags[Dirty::Shaders] = false;
|
2020-12-30 06:25:23 +01:00
|
|
|
|
2021-01-17 00:48:58 +01:00
|
|
|
buffer_cache.UpdateGraphicsBuffers(is_indexed);
|
|
|
|
|
2020-12-30 06:25:23 +01:00
|
|
|
const std::span indices_span(image_view_indices.data(), image_view_indices.size());
|
|
|
|
texture_cache.FillGraphicsImageViews(indices_span, image_view_ids);
|
|
|
|
|
2021-01-17 00:48:58 +01:00
|
|
|
buffer_cache.BindHostGeometryBuffers(is_indexed);
|
|
|
|
|
2020-12-30 06:25:23 +01:00
|
|
|
size_t image_view_index = 0;
|
|
|
|
size_t texture_index = 0;
|
|
|
|
size_t image_index = 0;
|
|
|
|
for (size_t stage = 0; stage < Maxwell::MaxShaderStage; ++stage) {
|
|
|
|
const Shader* const shader = shaders[stage];
|
2021-01-17 00:48:58 +01:00
|
|
|
if (!shader) {
|
2018-04-22 02:19:33 +02:00
|
|
|
continue;
|
2021-01-17 00:48:58 +01:00
|
|
|
}
|
|
|
|
buffer_cache.BindHostStageBuffers(stage);
|
|
|
|
const auto& base = device.GetBaseBindings(stage);
|
|
|
|
BindTextures(shader->GetEntries(), base.sampler, base.image, image_view_index,
|
|
|
|
texture_index, image_index);
|
2018-04-22 02:19:33 +02:00
|
|
|
}
|
2018-10-02 19:47:26 +02:00
|
|
|
}
|
|
|
|
|
2021-06-22 05:04:55 +02:00
|
|
|
void RasterizerOpenGL::LoadDiskResources(u64 title_id, std::stop_token stop_loading,
|
2019-01-21 20:38:23 +01:00
|
|
|
const VideoCore::DiskResourceLoadCallback& callback) {
|
2020-06-12 02:24:45 +02:00
|
|
|
shader_cache.LoadDiskCache(title_id, stop_loading, callback);
|
2019-12-29 01:45:56 +01:00
|
|
|
}
|
|
|
|
|
2018-07-03 23:55:44 +02:00
|
|
|
void RasterizerOpenGL::Clear() {
|
2021-01-17 00:48:58 +01:00
|
|
|
MICROPROFILE_SCOPE(OpenGL_Clears);
|
2020-06-12 02:24:45 +02:00
|
|
|
if (!maxwell3d.ShouldExecute()) {
|
2019-07-01 04:21:28 +02:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2020-06-12 02:24:45 +02:00
|
|
|
const auto& regs = maxwell3d.regs;
|
2018-09-10 06:36:13 +02:00
|
|
|
bool use_color{};
|
|
|
|
bool use_depth{};
|
|
|
|
bool use_stencil{};
|
2018-07-04 05:32:59 +02:00
|
|
|
|
2018-08-22 06:33:03 +02:00
|
|
|
if (regs.clear_buffers.R || regs.clear_buffers.G || regs.clear_buffers.B ||
|
2018-07-03 23:55:44 +02:00
|
|
|
regs.clear_buffers.A) {
|
2018-09-10 06:36:13 +02:00
|
|
|
use_color = true;
|
2020-04-27 00:53:02 +02:00
|
|
|
|
2020-12-30 06:25:23 +01:00
|
|
|
const GLuint index = regs.clear_buffers.RT;
|
|
|
|
state_tracker.NotifyColorMask(index);
|
|
|
|
glColorMaski(index, regs.clear_buffers.R != 0, regs.clear_buffers.G != 0,
|
2019-12-29 02:51:04 +01:00
|
|
|
regs.clear_buffers.B != 0, regs.clear_buffers.A != 0);
|
2019-12-26 05:19:15 +01:00
|
|
|
|
2019-12-26 05:07:34 +01:00
|
|
|
// TODO(Rodrigo): Determine if clamping is used on clears
|
|
|
|
SyncFragmentColorClampState();
|
2020-01-03 02:30:41 +01:00
|
|
|
SyncFramebufferSRGB();
|
2018-11-08 02:27:47 +01:00
|
|
|
}
|
2018-07-04 05:32:59 +02:00
|
|
|
if (regs.clear_buffers.Z) {
|
2018-08-22 06:33:03 +02:00
|
|
|
ASSERT_MSG(regs.zeta_enable != 0, "Tried to clear Z but buffer is not enabled!");
|
2018-09-10 06:36:13 +02:00
|
|
|
use_depth = true;
|
2018-07-14 07:52:23 +02:00
|
|
|
|
2020-01-03 02:31:04 +01:00
|
|
|
state_tracker.NotifyDepthMask();
|
2019-12-26 01:52:39 +01:00
|
|
|
glDepthMask(GL_TRUE);
|
2018-08-22 06:33:03 +02:00
|
|
|
}
|
|
|
|
if (regs.clear_buffers.S) {
|
2020-01-03 02:30:41 +01:00
|
|
|
ASSERT_MSG(regs.zeta_enable, "Tried to clear stencil but buffer is not enabled!");
|
2018-09-10 06:36:13 +02:00
|
|
|
use_stencil = true;
|
2018-07-04 05:32:59 +02:00
|
|
|
}
|
2018-07-03 23:55:44 +02:00
|
|
|
|
2018-09-10 06:36:13 +02:00
|
|
|
if (!use_color && !use_depth && !use_stencil) {
|
2018-08-22 06:33:03 +02:00
|
|
|
// No color surface nor depth/stencil surface are enabled
|
2018-07-03 23:55:44 +02:00
|
|
|
return;
|
2018-08-22 06:33:03 +02:00
|
|
|
}
|
|
|
|
|
2019-12-26 06:11:01 +01:00
|
|
|
SyncRasterizeEnable();
|
2020-03-26 04:51:47 +01:00
|
|
|
SyncStencilTestState();
|
2019-07-14 21:00:37 +02:00
|
|
|
|
2018-11-21 04:40:32 +01:00
|
|
|
if (regs.clear_flags.scissor) {
|
2019-12-26 05:28:17 +01:00
|
|
|
SyncScissorTest();
|
2020-01-03 02:31:33 +01:00
|
|
|
} else {
|
|
|
|
state_tracker.NotifyScissor0();
|
|
|
|
glDisablei(GL_SCISSOR_TEST, 0);
|
2018-11-21 04:40:32 +01:00
|
|
|
}
|
2019-12-26 05:28:17 +01:00
|
|
|
UNIMPLEMENTED_IF(regs.clear_flags.viewport);
|
2018-11-21 04:40:32 +01:00
|
|
|
|
2021-01-17 00:48:58 +01:00
|
|
|
std::scoped_lock lock{texture_cache.mutex};
|
|
|
|
texture_cache.UpdateRenderTargets(true);
|
|
|
|
state_tracker.BindFramebuffer(texture_cache.GetFramebuffer()->Handle());
|
2018-08-22 06:33:03 +02:00
|
|
|
|
2018-09-10 06:36:13 +02:00
|
|
|
if (use_color) {
|
2020-12-30 06:25:23 +01:00
|
|
|
glClearBufferfv(GL_COLOR, regs.clear_buffers.RT, regs.clear_color);
|
2018-09-10 06:36:13 +02:00
|
|
|
}
|
2019-07-14 14:14:27 +02:00
|
|
|
if (use_depth && use_stencil) {
|
2018-09-10 06:36:13 +02:00
|
|
|
glClearBufferfi(GL_DEPTH_STENCIL, 0, regs.clear_depth, regs.clear_stencil);
|
2019-07-14 14:14:27 +02:00
|
|
|
} else if (use_depth) {
|
2018-09-10 06:36:13 +02:00
|
|
|
glClearBufferfv(GL_DEPTH, 0, ®s.clear_depth);
|
2019-07-14 14:14:27 +02:00
|
|
|
} else if (use_stencil) {
|
2018-09-10 06:36:13 +02:00
|
|
|
glClearBufferiv(GL_STENCIL, 0, ®s.clear_stencil);
|
|
|
|
}
|
2019-11-26 22:30:21 +01:00
|
|
|
++num_queued_commands;
|
2018-06-07 06:54:25 +02:00
|
|
|
}
|
|
|
|
|
2020-01-30 06:08:46 +01:00
|
|
|
void RasterizerOpenGL::Draw(bool is_indexed, bool is_instanced) {
|
|
|
|
MICROPROFILE_SCOPE(OpenGL_Drawing);
|
2019-11-26 22:52:15 +01:00
|
|
|
|
|
|
|
query_cache.UpdateCounters();
|
2019-07-28 00:40:10 +02:00
|
|
|
|
2021-01-17 00:48:58 +01:00
|
|
|
SyncState();
|
2019-06-20 08:22:25 +02:00
|
|
|
|
2019-05-28 00:41:19 +02:00
|
|
|
// Setup shaders and their used resources.
|
2021-01-17 00:48:58 +01:00
|
|
|
std::scoped_lock lock{buffer_cache.mutex, texture_cache.mutex};
|
|
|
|
SetupShaders(is_indexed);
|
2019-05-08 23:45:59 +02:00
|
|
|
|
2020-12-30 06:25:23 +01:00
|
|
|
texture_cache.UpdateRenderTargets(false);
|
|
|
|
state_tracker.BindFramebuffer(texture_cache.GetFramebuffer()->Handle());
|
2020-03-11 05:03:01 +01:00
|
|
|
program_manager.BindGraphicsPipeline();
|
2018-03-24 08:59:51 +01:00
|
|
|
|
2020-12-30 06:25:23 +01:00
|
|
|
const GLenum primitive_mode = MaxwellToGL::PrimitiveTopology(maxwell3d.regs.draw.topology);
|
2020-03-02 23:31:26 +01:00
|
|
|
BeginTransformFeedback(primitive_mode);
|
2019-11-26 22:30:21 +01:00
|
|
|
|
2020-06-12 02:24:45 +02:00
|
|
|
const GLuint base_instance = static_cast<GLuint>(maxwell3d.regs.vb_base_instance);
|
2020-01-30 06:08:46 +01:00
|
|
|
const GLsizei num_instances =
|
2020-06-12 02:24:45 +02:00
|
|
|
static_cast<GLsizei>(is_instanced ? maxwell3d.mme_draw.instance_count : 1);
|
2020-01-30 06:08:46 +01:00
|
|
|
if (is_indexed) {
|
2020-06-12 02:24:45 +02:00
|
|
|
const GLint base_vertex = static_cast<GLint>(maxwell3d.regs.vb_element_base);
|
|
|
|
const GLsizei num_vertices = static_cast<GLsizei>(maxwell3d.regs.index_array.count);
|
2021-01-17 00:48:58 +01:00
|
|
|
const GLvoid* const offset = buffer_cache_runtime.IndexOffset();
|
2020-06-12 02:24:45 +02:00
|
|
|
const GLenum format = MaxwellToGL::IndexFormat(maxwell3d.regs.index_array.format);
|
2020-02-14 01:55:21 +01:00
|
|
|
if (num_instances == 1 && base_instance == 0 && base_vertex == 0) {
|
|
|
|
glDrawElements(primitive_mode, num_vertices, format, offset);
|
|
|
|
} else if (num_instances == 1 && base_instance == 0) {
|
|
|
|
glDrawElementsBaseVertex(primitive_mode, num_vertices, format, offset, base_vertex);
|
|
|
|
} else if (base_vertex == 0 && base_instance == 0) {
|
|
|
|
glDrawElementsInstanced(primitive_mode, num_vertices, format, offset, num_instances);
|
|
|
|
} else if (base_vertex == 0) {
|
|
|
|
glDrawElementsInstancedBaseInstance(primitive_mode, num_vertices, format, offset,
|
|
|
|
num_instances, base_instance);
|
|
|
|
} else if (base_instance == 0) {
|
|
|
|
glDrawElementsInstancedBaseVertex(primitive_mode, num_vertices, format, offset,
|
|
|
|
num_instances, base_vertex);
|
|
|
|
} else {
|
|
|
|
glDrawElementsInstancedBaseVertexBaseInstance(primitive_mode, num_vertices, format,
|
|
|
|
offset, num_instances, base_vertex,
|
|
|
|
base_instance);
|
|
|
|
}
|
2019-09-15 17:48:54 +02:00
|
|
|
} else {
|
2020-06-12 02:24:45 +02:00
|
|
|
const GLint base_vertex = static_cast<GLint>(maxwell3d.regs.vertex_buffer.first);
|
|
|
|
const GLsizei num_vertices = static_cast<GLsizei>(maxwell3d.regs.vertex_buffer.count);
|
2020-02-14 01:55:21 +01:00
|
|
|
if (num_instances == 1 && base_instance == 0) {
|
|
|
|
glDrawArrays(primitive_mode, base_vertex, num_vertices);
|
|
|
|
} else if (base_instance == 0) {
|
|
|
|
glDrawArraysInstanced(primitive_mode, base_vertex, num_vertices, num_instances);
|
|
|
|
} else {
|
|
|
|
glDrawArraysInstancedBaseInstance(primitive_mode, base_vertex, num_vertices,
|
|
|
|
num_instances, base_instance);
|
|
|
|
}
|
2019-09-15 17:48:54 +02:00
|
|
|
}
|
2020-03-02 23:31:26 +01:00
|
|
|
|
|
|
|
EndTransformFeedback();
|
|
|
|
|
|
|
|
++num_queued_commands;
|
2020-02-20 16:55:32 +01:00
|
|
|
|
2020-06-12 02:24:45 +02:00
|
|
|
gpu.TickWork();
|
2020-01-30 06:08:46 +01:00
|
|
|
}
|
2019-06-15 19:22:57 +02:00
|
|
|
|
2019-07-15 03:25:13 +02:00
|
|
|
void RasterizerOpenGL::DispatchCompute(GPUVAddr code_addr) {
|
2020-12-30 06:25:23 +01:00
|
|
|
Shader* const kernel = shader_cache.GetComputeKernel(code_addr);
|
2020-06-25 22:12:33 +02:00
|
|
|
|
2021-01-17 00:48:58 +01:00
|
|
|
std::scoped_lock lock{buffer_cache.mutex, texture_cache.mutex};
|
2020-12-30 06:25:23 +01:00
|
|
|
BindComputeTextures(kernel);
|
2019-07-15 03:25:13 +02:00
|
|
|
|
2021-01-17 00:48:58 +01:00
|
|
|
const auto& entries = kernel->GetEntries();
|
|
|
|
buffer_cache.SetEnabledComputeUniformBuffers(entries.enabled_uniform_buffers);
|
|
|
|
buffer_cache.UnbindComputeStorageBuffers();
|
|
|
|
u32 ssbo_index = 0;
|
|
|
|
for (const auto& buffer : entries.global_memory_entries) {
|
|
|
|
buffer_cache.BindComputeStorageBuffer(ssbo_index, buffer.cbuf_index, buffer.cbuf_offset,
|
|
|
|
buffer.is_written);
|
|
|
|
++ssbo_index;
|
|
|
|
}
|
|
|
|
buffer_cache.UpdateComputeBuffers();
|
|
|
|
buffer_cache.BindHostComputeBuffers();
|
2019-07-15 03:25:13 +02:00
|
|
|
|
2020-06-12 02:24:45 +02:00
|
|
|
const auto& launch_desc = kepler_compute.launch_description;
|
2019-11-13 03:26:56 +01:00
|
|
|
glDispatchCompute(launch_desc.grid_dim_x, launch_desc.grid_dim_y, launch_desc.grid_dim_z);
|
2019-11-26 22:30:21 +01:00
|
|
|
++num_queued_commands;
|
2019-07-15 03:25:13 +02:00
|
|
|
}
|
|
|
|
|
2019-07-28 00:40:10 +02:00
|
|
|
void RasterizerOpenGL::ResetCounter(VideoCore::QueryType type) {
|
2019-11-26 22:52:15 +01:00
|
|
|
query_cache.ResetCounter(type);
|
2019-07-28 00:40:10 +02:00
|
|
|
}
|
|
|
|
|
2019-11-28 06:15:34 +01:00
|
|
|
void RasterizerOpenGL::Query(GPUVAddr gpu_addr, VideoCore::QueryType type,
|
|
|
|
std::optional<u64> timestamp) {
|
|
|
|
query_cache.Query(gpu_addr, type, timestamp);
|
2019-07-28 00:40:10 +02:00
|
|
|
}
|
|
|
|
|
2021-01-17 00:48:58 +01:00
|
|
|
void RasterizerOpenGL::BindGraphicsUniformBuffer(size_t stage, u32 index, GPUVAddr gpu_addr,
|
|
|
|
u32 size) {
|
|
|
|
std::scoped_lock lock{buffer_cache.mutex};
|
|
|
|
buffer_cache.BindGraphicsUniformBuffer(stage, index, gpu_addr, size);
|
|
|
|
}
|
|
|
|
|
2021-06-01 19:26:43 +02:00
|
|
|
void RasterizerOpenGL::DisableGraphicsUniformBuffer(size_t stage, u32 index) {
|
|
|
|
buffer_cache.DisableGraphicsUniformBuffer(stage, index);
|
|
|
|
}
|
|
|
|
|
2018-09-08 10:05:56 +02:00
|
|
|
void RasterizerOpenGL::FlushAll() {}
|
2018-03-20 04:00:59 +01:00
|
|
|
|
2020-04-05 18:58:23 +02:00
|
|
|
void RasterizerOpenGL::FlushRegion(VAddr addr, u64 size) {
|
2018-10-13 04:31:04 +02:00
|
|
|
MICROPROFILE_SCOPE(OpenGL_CacheManagement);
|
2020-04-08 19:34:59 +02:00
|
|
|
if (addr == 0 || size == 0) {
|
2019-02-28 03:21:31 +01:00
|
|
|
return;
|
|
|
|
}
|
2020-12-30 06:25:23 +01:00
|
|
|
{
|
2021-01-17 00:48:58 +01:00
|
|
|
std::scoped_lock lock{texture_cache.mutex};
|
2020-12-30 06:25:23 +01:00
|
|
|
texture_cache.DownloadMemory(addr, size);
|
|
|
|
}
|
2021-01-17 00:48:58 +01:00
|
|
|
{
|
|
|
|
std::scoped_lock lock{buffer_cache.mutex};
|
|
|
|
buffer_cache.DownloadMemory(addr, size);
|
|
|
|
}
|
2020-04-06 00:39:24 +02:00
|
|
|
query_cache.FlushRegion(addr, size);
|
2018-10-13 04:31:04 +02:00
|
|
|
}
|
2018-03-20 04:00:59 +01:00
|
|
|
|
2020-02-18 03:29:04 +01:00
|
|
|
bool RasterizerOpenGL::MustFlushRegion(VAddr addr, u64 size) {
|
2021-01-17 00:48:58 +01:00
|
|
|
std::scoped_lock lock{buffer_cache.mutex, texture_cache.mutex};
|
2020-04-19 19:47:45 +02:00
|
|
|
if (!Settings::IsGPULevelHigh()) {
|
2021-01-17 00:48:58 +01:00
|
|
|
return buffer_cache.IsRegionGpuModified(addr, size);
|
2020-04-19 19:47:45 +02:00
|
|
|
}
|
2020-12-30 06:25:23 +01:00
|
|
|
return texture_cache.IsRegionGpuModified(addr, size) ||
|
2021-01-17 00:48:58 +01:00
|
|
|
buffer_cache.IsRegionGpuModified(addr, size);
|
2020-02-18 03:29:04 +01:00
|
|
|
}
|
|
|
|
|
2020-04-05 18:58:23 +02:00
|
|
|
void RasterizerOpenGL::InvalidateRegion(VAddr addr, u64 size) {
|
2018-06-26 22:14:14 +02:00
|
|
|
MICROPROFILE_SCOPE(OpenGL_CacheManagement);
|
2020-04-08 19:34:59 +02:00
|
|
|
if (addr == 0 || size == 0) {
|
2019-02-28 03:21:31 +01:00
|
|
|
return;
|
|
|
|
}
|
2020-12-30 06:25:23 +01:00
|
|
|
{
|
2021-01-17 00:48:58 +01:00
|
|
|
std::scoped_lock lock{texture_cache.mutex};
|
2020-12-30 06:25:23 +01:00
|
|
|
texture_cache.WriteMemory(addr, size);
|
|
|
|
}
|
2021-01-17 00:48:58 +01:00
|
|
|
{
|
|
|
|
std::scoped_lock lock{buffer_cache.mutex};
|
|
|
|
buffer_cache.WriteMemory(addr, size);
|
|
|
|
}
|
2020-04-06 01:18:00 +02:00
|
|
|
shader_cache.InvalidateRegion(addr, size);
|
2020-04-06 00:39:24 +02:00
|
|
|
query_cache.InvalidateRegion(addr, size);
|
2018-06-26 22:14:14 +02:00
|
|
|
}
|
2018-03-20 04:00:59 +01:00
|
|
|
|
2020-02-16 14:51:37 +01:00
|
|
|
void RasterizerOpenGL::OnCPUWrite(VAddr addr, u64 size) {
|
|
|
|
MICROPROFILE_SCOPE(OpenGL_CacheManagement);
|
2020-04-16 18:29:53 +02:00
|
|
|
if (addr == 0 || size == 0) {
|
2020-02-16 14:51:37 +01:00
|
|
|
return;
|
|
|
|
}
|
2021-01-17 00:48:58 +01:00
|
|
|
shader_cache.OnCPUWrite(addr, size);
|
2020-12-30 06:25:23 +01:00
|
|
|
{
|
2021-01-17 00:48:58 +01:00
|
|
|
std::scoped_lock lock{texture_cache.mutex};
|
2020-12-30 06:25:23 +01:00
|
|
|
texture_cache.WriteMemory(addr, size);
|
|
|
|
}
|
2021-01-17 00:48:58 +01:00
|
|
|
{
|
|
|
|
std::scoped_lock lock{buffer_cache.mutex};
|
|
|
|
buffer_cache.CachedWriteMemory(addr, size);
|
|
|
|
}
|
2020-02-16 14:51:37 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
void RasterizerOpenGL::SyncGuestHost() {
|
|
|
|
MICROPROFILE_SCOPE(OpenGL_CacheManagement);
|
2020-05-10 01:25:29 +02:00
|
|
|
shader_cache.SyncGuestHost();
|
2021-01-17 00:48:58 +01:00
|
|
|
{
|
|
|
|
std::scoped_lock lock{buffer_cache.mutex};
|
|
|
|
buffer_cache.FlushCachedWrites();
|
|
|
|
}
|
2020-02-16 14:51:37 +01:00
|
|
|
}
|
|
|
|
|
2020-12-30 06:25:23 +01:00
|
|
|
void RasterizerOpenGL::UnmapMemory(VAddr addr, u64 size) {
|
|
|
|
{
|
2021-01-17 00:48:58 +01:00
|
|
|
std::scoped_lock lock{texture_cache.mutex};
|
2020-12-30 06:25:23 +01:00
|
|
|
texture_cache.UnmapMemory(addr, size);
|
|
|
|
}
|
2021-01-17 00:48:58 +01:00
|
|
|
{
|
|
|
|
std::scoped_lock lock{buffer_cache.mutex};
|
|
|
|
buffer_cache.WriteMemory(addr, size);
|
|
|
|
}
|
2020-12-30 06:25:23 +01:00
|
|
|
shader_cache.OnCPUWrite(addr, size);
|
|
|
|
}
|
|
|
|
|
2021-06-12 15:52:27 +02:00
|
|
|
void RasterizerOpenGL::ModifyGPUMemory(GPUVAddr addr, u64 size) {
|
|
|
|
{
|
|
|
|
std::scoped_lock lock{texture_cache.mutex};
|
|
|
|
texture_cache.UnmapGPUMemory(addr, size);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-02-19 18:40:37 +01:00
|
|
|
void RasterizerOpenGL::SignalSemaphore(GPUVAddr addr, u32 value) {
|
2020-02-18 16:26:31 +01:00
|
|
|
if (!gpu.IsAsync()) {
|
2020-06-12 02:24:45 +02:00
|
|
|
gpu_memory.Write<u32>(addr, value);
|
2020-02-18 16:26:31 +01:00
|
|
|
return;
|
|
|
|
}
|
2020-02-19 18:40:37 +01:00
|
|
|
fence_manager.SignalSemaphore(addr, value);
|
|
|
|
}
|
|
|
|
|
|
|
|
void RasterizerOpenGL::SignalSyncPoint(u32 value) {
|
|
|
|
if (!gpu.IsAsync()) {
|
|
|
|
gpu.IncrementSyncPoint(value);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
fence_manager.SignalSyncPoint(value);
|
2020-02-17 23:10:23 +01:00
|
|
|
}
|
|
|
|
|
2021-07-06 22:23:10 +02:00
|
|
|
void RasterizerOpenGL::SignalReference() {
|
|
|
|
if (!gpu.IsAsync()) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
fence_manager.SignalReference();
|
|
|
|
}
|
|
|
|
|
2020-02-17 23:10:23 +01:00
|
|
|
void RasterizerOpenGL::ReleaseFences() {
|
2020-02-18 16:26:31 +01:00
|
|
|
if (!gpu.IsAsync()) {
|
|
|
|
return;
|
|
|
|
}
|
2020-02-18 01:19:26 +01:00
|
|
|
fence_manager.WaitPendingFences();
|
2020-02-17 23:10:23 +01:00
|
|
|
}
|
|
|
|
|
2020-04-05 18:58:23 +02:00
|
|
|
void RasterizerOpenGL::FlushAndInvalidateRegion(VAddr addr, u64 size) {
|
2020-02-18 21:51:42 +01:00
|
|
|
if (Settings::IsGPULevelExtreme()) {
|
2019-05-08 16:32:30 +02:00
|
|
|
FlushRegion(addr, size);
|
|
|
|
}
|
2018-08-23 21:44:41 +02:00
|
|
|
InvalidateRegion(addr, size);
|
2018-06-26 22:14:14 +02:00
|
|
|
}
|
2018-03-20 04:00:59 +01:00
|
|
|
|
2020-04-28 07:14:11 +02:00
|
|
|
void RasterizerOpenGL::WaitForIdle() {
|
2021-01-17 00:48:58 +01:00
|
|
|
glMemoryBarrier(GL_ALL_BARRIER_BITS);
|
2021-07-07 16:42:26 +02:00
|
|
|
if (!gpu.IsAsync()) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
fence_manager.SignalOrdering();
|
2020-04-28 07:14:11 +02:00
|
|
|
}
|
|
|
|
|
2020-12-30 06:25:23 +01:00
|
|
|
void RasterizerOpenGL::FragmentBarrier() {
|
|
|
|
glMemoryBarrier(GL_FRAMEBUFFER_BARRIER_BIT);
|
|
|
|
}
|
|
|
|
|
|
|
|
void RasterizerOpenGL::TiledCacheBarrier() {
|
|
|
|
glTextureBarrier();
|
|
|
|
}
|
|
|
|
|
2019-07-26 20:20:43 +02:00
|
|
|
void RasterizerOpenGL::FlushCommands() {
|
2019-11-26 22:30:21 +01:00
|
|
|
// Only flush when we have commands queued to OpenGL.
|
|
|
|
if (num_queued_commands == 0) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
num_queued_commands = 0;
|
2019-07-26 20:20:43 +02:00
|
|
|
glFlush();
|
|
|
|
}
|
|
|
|
|
2019-06-20 08:22:25 +02:00
|
|
|
void RasterizerOpenGL::TickFrame() {
|
2019-11-26 22:30:21 +01:00
|
|
|
// Ticking a frame means that buffers will be swapped, calling glFlush implicitly.
|
|
|
|
num_queued_commands = 0;
|
|
|
|
|
2020-12-30 06:25:23 +01:00
|
|
|
fence_manager.TickFrame();
|
|
|
|
{
|
2021-01-17 00:48:58 +01:00
|
|
|
std::scoped_lock lock{texture_cache.mutex};
|
2020-12-30 06:25:23 +01:00
|
|
|
texture_cache.TickFrame();
|
|
|
|
}
|
2021-01-17 00:48:58 +01:00
|
|
|
{
|
|
|
|
std::scoped_lock lock{buffer_cache.mutex};
|
|
|
|
buffer_cache.TickFrame();
|
|
|
|
}
|
2019-06-20 08:22:25 +02:00
|
|
|
}
|
|
|
|
|
2020-12-30 06:25:23 +01:00
|
|
|
bool RasterizerOpenGL::AccelerateSurfaceCopy(const Tegra::Engines::Fermi2D::Surface& src,
|
|
|
|
const Tegra::Engines::Fermi2D::Surface& dst,
|
2019-05-18 10:57:49 +02:00
|
|
|
const Tegra::Engines::Fermi2D::Config& copy_config) {
|
2018-03-20 04:00:59 +01:00
|
|
|
MICROPROFILE_SCOPE(OpenGL_Blits);
|
2021-01-17 00:48:58 +01:00
|
|
|
std::scoped_lock lock{texture_cache.mutex};
|
2020-12-30 06:25:23 +01:00
|
|
|
texture_cache.BlitImage(dst, src, copy_config);
|
2018-03-20 04:00:59 +01:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2018-06-24 23:42:29 +02:00
|
|
|
bool RasterizerOpenGL::AccelerateDisplay(const Tegra::FramebufferConfig& config,
|
2018-08-21 01:34:02 +02:00
|
|
|
VAddr framebuffer_addr, u32 pixel_stride) {
|
2020-12-30 06:25:23 +01:00
|
|
|
if (framebuffer_addr == 0) {
|
|
|
|
return false;
|
2018-06-24 23:42:29 +02:00
|
|
|
}
|
|
|
|
MICROPROFILE_SCOPE(OpenGL_CacheManagement);
|
|
|
|
|
2021-01-17 00:48:58 +01:00
|
|
|
std::scoped_lock lock{texture_cache.mutex};
|
2020-12-30 06:25:23 +01:00
|
|
|
ImageView* const image_view{texture_cache.TryFindFramebufferImageView(framebuffer_addr)};
|
|
|
|
if (!image_view) {
|
|
|
|
return false;
|
2018-06-24 23:42:29 +02:00
|
|
|
}
|
|
|
|
// Verify that the cached surface is the same size and format as the requested framebuffer
|
2020-12-30 06:25:23 +01:00
|
|
|
// ASSERT_MSG(image_view->size.width == config.width, "Framebuffer width is different");
|
|
|
|
// ASSERT_MSG(image_view->size.height == config.height, "Framebuffer height is different");
|
2019-03-09 07:33:56 +01:00
|
|
|
|
2020-12-30 06:25:23 +01:00
|
|
|
screen_info.display_texture = image_view->Handle(ImageViewType::e2D);
|
|
|
|
screen_info.display_srgb = VideoCore::Surface::IsPixelFormatSRGB(image_view->format);
|
|
|
|
return true;
|
|
|
|
}
|
2018-06-24 23:42:29 +02:00
|
|
|
|
2020-12-30 06:25:23 +01:00
|
|
|
void RasterizerOpenGL::BindComputeTextures(Shader* kernel) {
|
|
|
|
image_view_indices.clear();
|
|
|
|
sampler_handles.clear();
|
2018-06-24 23:42:29 +02:00
|
|
|
|
2020-12-30 06:25:23 +01:00
|
|
|
texture_cache.SynchronizeComputeDescriptors();
|
|
|
|
|
|
|
|
SetupComputeTextures(kernel);
|
|
|
|
SetupComputeImages(kernel);
|
|
|
|
|
|
|
|
const std::span indices_span(image_view_indices.data(), image_view_indices.size());
|
|
|
|
texture_cache.FillComputeImageViews(indices_span, image_view_ids);
|
|
|
|
|
|
|
|
program_manager.BindCompute(kernel->GetHandle());
|
|
|
|
size_t image_view_index = 0;
|
|
|
|
size_t texture_index = 0;
|
|
|
|
size_t image_index = 0;
|
|
|
|
BindTextures(kernel->GetEntries(), 0, 0, image_view_index, texture_index, image_index);
|
|
|
|
}
|
|
|
|
|
|
|
|
void RasterizerOpenGL::BindTextures(const ShaderEntries& entries, GLuint base_texture,
|
|
|
|
GLuint base_image, size_t& image_view_index,
|
|
|
|
size_t& texture_index, size_t& image_index) {
|
|
|
|
const GLuint* const samplers = sampler_handles.data() + texture_index;
|
|
|
|
const GLuint* const textures = texture_handles.data() + texture_index;
|
|
|
|
const GLuint* const images = image_handles.data() + image_index;
|
|
|
|
|
|
|
|
const size_t num_samplers = entries.samplers.size();
|
|
|
|
for (const auto& sampler : entries.samplers) {
|
|
|
|
for (size_t i = 0; i < sampler.size; ++i) {
|
|
|
|
const ImageViewId image_view_id = image_view_ids[image_view_index++];
|
|
|
|
const ImageView& image_view = texture_cache.GetImageView(image_view_id);
|
|
|
|
const GLuint handle = image_view.Handle(ImageViewTypeFromEntry(sampler));
|
|
|
|
texture_handles[texture_index++] = handle;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
const size_t num_images = entries.images.size();
|
|
|
|
for (size_t unit = 0; unit < num_images; ++unit) {
|
|
|
|
// TODO: Mark as modified
|
|
|
|
const ImageViewId image_view_id = image_view_ids[image_view_index++];
|
|
|
|
const ImageView& image_view = texture_cache.GetImageView(image_view_id);
|
|
|
|
const GLuint handle = image_view.Handle(ImageViewTypeFromEntry(entries.images[unit]));
|
|
|
|
image_handles[image_index] = handle;
|
|
|
|
++image_index;
|
|
|
|
}
|
|
|
|
if (num_samplers > 0) {
|
|
|
|
glBindSamplers(base_texture, static_cast<GLsizei>(num_samplers), samplers);
|
|
|
|
glBindTextures(base_texture, static_cast<GLsizei>(num_samplers), textures);
|
|
|
|
}
|
|
|
|
if (num_images > 0) {
|
|
|
|
glBindImageTextures(base_image, static_cast<GLsizei>(num_images), images);
|
|
|
|
}
|
2018-03-20 04:00:59 +01:00
|
|
|
}
|
|
|
|
|
2020-12-30 06:25:23 +01:00
|
|
|
void RasterizerOpenGL::SetupDrawTextures(const Shader* shader, size_t stage_index) {
|
|
|
|
const bool via_header_index =
|
|
|
|
maxwell3d.regs.sampler_index == Maxwell::SamplerIndex::ViaHeaderIndex;
|
2020-02-26 20:13:47 +01:00
|
|
|
for (const auto& entry : shader->GetEntries().samplers) {
|
2020-02-22 23:40:26 +01:00
|
|
|
const auto shader_type = static_cast<ShaderType>(stage_index);
|
2020-12-30 06:25:23 +01:00
|
|
|
for (size_t index = 0; index < entry.size; ++index) {
|
|
|
|
const auto handle =
|
|
|
|
GetTextureInfo(maxwell3d, via_header_index, entry, shader_type, index);
|
|
|
|
const Sampler* const sampler = texture_cache.GetGraphicsSampler(handle.sampler);
|
|
|
|
sampler_handles.push_back(sampler->Handle());
|
|
|
|
image_view_indices.push_back(handle.image);
|
2020-01-06 16:43:13 +01:00
|
|
|
}
|
2018-06-06 19:58:16 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-12-30 06:25:23 +01:00
|
|
|
void RasterizerOpenGL::SetupComputeTextures(const Shader* kernel) {
|
|
|
|
const bool via_header_index = kepler_compute.launch_description.linked_tsc;
|
2020-02-26 20:13:47 +01:00
|
|
|
for (const auto& entry : kernel->GetEntries().samplers) {
|
2020-12-30 06:25:23 +01:00
|
|
|
for (size_t i = 0; i < entry.size; ++i) {
|
|
|
|
const auto handle =
|
|
|
|
GetTextureInfo(kepler_compute, via_header_index, entry, ShaderType::Compute, i);
|
|
|
|
const Sampler* const sampler = texture_cache.GetComputeSampler(handle.sampler);
|
|
|
|
sampler_handles.push_back(sampler->Handle());
|
|
|
|
image_view_indices.push_back(handle.image);
|
2020-01-06 16:43:13 +01:00
|
|
|
}
|
2019-07-18 02:50:21 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-12-30 06:25:23 +01:00
|
|
|
void RasterizerOpenGL::SetupDrawImages(const Shader* shader, size_t stage_index) {
|
|
|
|
const bool via_header_index =
|
|
|
|
maxwell3d.regs.sampler_index == Maxwell::SamplerIndex::ViaHeaderIndex;
|
2020-02-26 20:13:47 +01:00
|
|
|
for (const auto& entry : shader->GetEntries().images) {
|
2020-06-12 02:24:45 +02:00
|
|
|
const auto shader_type = static_cast<ShaderType>(stage_index);
|
2020-12-30 06:25:23 +01:00
|
|
|
const auto handle = GetTextureInfo(maxwell3d, via_header_index, entry, shader_type);
|
|
|
|
image_view_indices.push_back(handle.image);
|
2019-11-13 04:27:12 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-12-30 06:25:23 +01:00
|
|
|
void RasterizerOpenGL::SetupComputeImages(const Shader* shader) {
|
|
|
|
const bool via_header_index = kepler_compute.launch_description.linked_tsc;
|
2020-02-26 20:13:47 +01:00
|
|
|
for (const auto& entry : shader->GetEntries().images) {
|
2020-12-30 06:25:23 +01:00
|
|
|
const auto handle =
|
|
|
|
GetTextureInfo(kepler_compute, via_header_index, entry, ShaderType::Compute);
|
|
|
|
image_view_indices.push_back(handle.image);
|
2019-07-12 07:17:18 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-01-17 00:48:58 +01:00
|
|
|
void RasterizerOpenGL::SyncState() {
|
|
|
|
SyncViewport();
|
|
|
|
SyncRasterizeEnable();
|
|
|
|
SyncPolygonModes();
|
|
|
|
SyncColorMask();
|
|
|
|
SyncFragmentColorClampState();
|
|
|
|
SyncMultiSampleState();
|
|
|
|
SyncDepthTestState();
|
|
|
|
SyncDepthClamp();
|
|
|
|
SyncStencilTestState();
|
|
|
|
SyncBlendState();
|
|
|
|
SyncLogicOpState();
|
|
|
|
SyncCullMode();
|
|
|
|
SyncPrimitiveRestart();
|
|
|
|
SyncScissorTest();
|
|
|
|
SyncPointState();
|
|
|
|
SyncLineState();
|
|
|
|
SyncPolygonOffset();
|
|
|
|
SyncAlphaTest();
|
|
|
|
SyncFramebufferSRGB();
|
|
|
|
SyncVertexFormats();
|
|
|
|
SyncVertexInstances();
|
|
|
|
}
|
|
|
|
|
2019-12-26 05:50:38 +01:00
|
|
|
void RasterizerOpenGL::SyncViewport() {
|
2020-06-12 02:24:45 +02:00
|
|
|
auto& flags = maxwell3d.dirty.flags;
|
|
|
|
const auto& regs = maxwell3d.regs;
|
2019-12-29 02:12:12 +01:00
|
|
|
|
2019-12-30 05:40:27 +01:00
|
|
|
const bool dirty_viewport = flags[Dirty::Viewports];
|
2020-05-26 05:57:38 +02:00
|
|
|
const bool dirty_clip_control = flags[Dirty::ClipControl];
|
|
|
|
|
|
|
|
if (dirty_clip_control || flags[Dirty::FrontFace]) {
|
|
|
|
flags[Dirty::FrontFace] = false;
|
|
|
|
|
|
|
|
GLenum mode = MaxwellToGL::FrontFace(regs.front_face);
|
|
|
|
if (regs.screen_y_control.triangle_rast_flip != 0 &&
|
|
|
|
regs.viewport_transform[0].scale_y < 0.0f) {
|
|
|
|
switch (mode) {
|
|
|
|
case GL_CW:
|
|
|
|
mode = GL_CCW;
|
|
|
|
break;
|
|
|
|
case GL_CCW:
|
|
|
|
mode = GL_CW;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
glFrontFace(mode);
|
|
|
|
}
|
|
|
|
|
2019-12-30 05:40:27 +01:00
|
|
|
if (dirty_viewport || flags[Dirty::ClipControl]) {
|
|
|
|
flags[Dirty::ClipControl] = false;
|
|
|
|
|
|
|
|
bool flip_y = false;
|
2020-11-20 10:24:30 +01:00
|
|
|
if (regs.viewport_transform[0].scale_y < 0.0f) {
|
2019-12-30 05:40:27 +01:00
|
|
|
flip_y = !flip_y;
|
|
|
|
}
|
|
|
|
if (regs.screen_y_control.y_negate != 0) {
|
|
|
|
flip_y = !flip_y;
|
|
|
|
}
|
2021-01-17 00:48:58 +01:00
|
|
|
const bool is_zero_to_one = regs.depth_mode == Maxwell::DepthMode::ZeroToOne;
|
|
|
|
const GLenum origin = flip_y ? GL_UPPER_LEFT : GL_LOWER_LEFT;
|
|
|
|
const GLenum depth = is_zero_to_one ? GL_ZERO_TO_ONE : GL_NEGATIVE_ONE_TO_ONE;
|
|
|
|
state_tracker.ClipControl(origin, depth);
|
|
|
|
state_tracker.SetYNegate(regs.screen_y_control.y_negate != 0);
|
2019-12-30 05:40:27 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
if (dirty_viewport) {
|
2019-12-29 02:12:12 +01:00
|
|
|
flags[Dirty::Viewports] = false;
|
|
|
|
|
|
|
|
const bool force = flags[Dirty::ViewportTransform];
|
|
|
|
flags[Dirty::ViewportTransform] = false;
|
|
|
|
|
|
|
|
for (std::size_t i = 0; i < Maxwell::NumViewports; ++i) {
|
|
|
|
if (!force && !flags[Dirty::Viewport0 + i]) {
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
flags[Dirty::Viewport0 + i] = false;
|
|
|
|
|
2020-03-22 07:26:07 +01:00
|
|
|
const auto& src = regs.viewport_transform[i];
|
|
|
|
const Common::Rectangle<f32> rect{src.GetRect()};
|
2019-12-29 02:12:12 +01:00
|
|
|
glViewportIndexedf(static_cast<GLuint>(i), rect.left, rect.bottom, rect.GetWidth(),
|
|
|
|
rect.GetHeight());
|
|
|
|
|
2020-03-22 07:26:07 +01:00
|
|
|
const GLdouble reduce_z = regs.depth_mode == Maxwell::DepthMode::MinusOneToOne;
|
|
|
|
const GLdouble near_depth = src.translate_z - src.scale_z * reduce_z;
|
|
|
|
const GLdouble far_depth = src.translate_z + src.scale_z;
|
2021-02-24 23:04:51 +01:00
|
|
|
if (device.HasDepthBufferFloat()) {
|
|
|
|
glDepthRangeIndexeddNV(static_cast<GLuint>(i), near_depth, far_depth);
|
|
|
|
} else {
|
|
|
|
glDepthRangeIndexed(static_cast<GLuint>(i), near_depth, far_depth);
|
|
|
|
}
|
2020-05-04 22:51:30 +02:00
|
|
|
|
|
|
|
if (!GLAD_GL_NV_viewport_swizzle) {
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
glViewportSwizzleNV(static_cast<GLuint>(i), MaxwellToGL::ViewportSwizzle(src.swizzle.x),
|
|
|
|
MaxwellToGL::ViewportSwizzle(src.swizzle.y),
|
|
|
|
MaxwellToGL::ViewportSwizzle(src.swizzle.z),
|
|
|
|
MaxwellToGL::ViewportSwizzle(src.swizzle.w));
|
2019-12-29 02:12:12 +01:00
|
|
|
}
|
2018-11-02 04:21:25 +01:00
|
|
|
}
|
2018-03-27 02:45:10 +02:00
|
|
|
}
|
|
|
|
|
2019-12-26 01:57:10 +01:00
|
|
|
void RasterizerOpenGL::SyncDepthClamp() {
|
2020-06-12 02:24:45 +02:00
|
|
|
auto& flags = maxwell3d.dirty.flags;
|
2020-01-03 02:41:20 +01:00
|
|
|
if (!flags[Dirty::DepthClampEnabled]) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
flags[Dirty::DepthClampEnabled] = false;
|
2018-11-29 20:13:13 +01:00
|
|
|
|
2020-06-12 02:24:45 +02:00
|
|
|
oglEnable(GL_DEPTH_CLAMP, maxwell3d.regs.view_volume_clip_control.depth_clamp_disabled == 0);
|
2019-12-26 01:57:10 +01:00
|
|
|
}
|
|
|
|
|
2019-12-29 06:03:05 +01:00
|
|
|
void RasterizerOpenGL::SyncClipEnabled(u32 clip_mask) {
|
2020-06-12 02:24:45 +02:00
|
|
|
auto& flags = maxwell3d.dirty.flags;
|
2019-12-29 06:03:05 +01:00
|
|
|
if (!flags[Dirty::ClipDistances] && !flags[Dirty::Shaders]) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
flags[Dirty::ClipDistances] = false;
|
|
|
|
|
2020-06-12 02:24:45 +02:00
|
|
|
clip_mask &= maxwell3d.regs.clip_distance_enabled;
|
2019-12-29 06:03:05 +01:00
|
|
|
if (clip_mask == last_clip_distance_mask) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
last_clip_distance_mask = clip_mask;
|
2018-11-29 20:13:13 +01:00
|
|
|
|
|
|
|
for (std::size_t i = 0; i < Maxwell::Regs::NumClipDistances; ++i) {
|
2019-12-29 06:03:05 +01:00
|
|
|
oglEnable(static_cast<GLenum>(GL_CLIP_DISTANCE0 + i), (clip_mask >> i) & 1);
|
2018-11-29 20:13:13 +01:00
|
|
|
}
|
2018-03-20 04:00:59 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
void RasterizerOpenGL::SyncClipCoef() {
|
2018-11-29 20:13:13 +01:00
|
|
|
UNIMPLEMENTED();
|
2018-03-20 04:00:59 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
void RasterizerOpenGL::SyncCullMode() {
|
2020-06-12 02:24:45 +02:00
|
|
|
auto& flags = maxwell3d.dirty.flags;
|
|
|
|
const auto& regs = maxwell3d.regs;
|
2018-07-03 04:22:25 +02:00
|
|
|
|
2019-12-29 23:23:40 +01:00
|
|
|
if (flags[Dirty::CullTest]) {
|
|
|
|
flags[Dirty::CullTest] = false;
|
2019-07-18 01:37:01 +02:00
|
|
|
|
2019-12-29 23:23:40 +01:00
|
|
|
if (regs.cull_test_enabled) {
|
|
|
|
glEnable(GL_CULL_FACE);
|
|
|
|
glCullFace(MaxwellToGL::CullFace(regs.cull_face));
|
|
|
|
} else {
|
|
|
|
glDisable(GL_CULL_FACE);
|
|
|
|
}
|
2018-07-03 04:22:25 +02:00
|
|
|
}
|
2018-03-20 04:00:59 +01:00
|
|
|
}
|
|
|
|
|
2018-10-26 01:04:13 +02:00
|
|
|
void RasterizerOpenGL::SyncPrimitiveRestart() {
|
2020-06-12 02:24:45 +02:00
|
|
|
auto& flags = maxwell3d.dirty.flags;
|
2019-12-30 03:25:53 +01:00
|
|
|
if (!flags[Dirty::PrimitiveRestart]) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
flags[Dirty::PrimitiveRestart] = false;
|
2018-10-26 01:04:13 +02:00
|
|
|
|
2020-06-12 02:24:45 +02:00
|
|
|
if (maxwell3d.regs.primitive_restart.enabled) {
|
2019-12-30 03:25:53 +01:00
|
|
|
glEnable(GL_PRIMITIVE_RESTART);
|
2020-06-12 02:24:45 +02:00
|
|
|
glPrimitiveRestartIndex(maxwell3d.regs.primitive_restart.index);
|
2019-12-30 03:25:53 +01:00
|
|
|
} else {
|
|
|
|
glDisable(GL_PRIMITIVE_RESTART);
|
|
|
|
}
|
2018-10-26 01:04:13 +02:00
|
|
|
}
|
|
|
|
|
2018-07-02 20:33:06 +02:00
|
|
|
void RasterizerOpenGL::SyncDepthTestState() {
|
2020-06-12 02:24:45 +02:00
|
|
|
auto& flags = maxwell3d.dirty.flags;
|
|
|
|
const auto& regs = maxwell3d.regs;
|
2019-07-18 01:37:01 +02:00
|
|
|
|
2019-12-30 02:56:21 +01:00
|
|
|
if (flags[Dirty::DepthMask]) {
|
|
|
|
flags[Dirty::DepthMask] = false;
|
|
|
|
glDepthMask(regs.depth_write_enabled ? GL_TRUE : GL_FALSE);
|
2019-07-18 01:37:01 +02:00
|
|
|
}
|
|
|
|
|
2019-12-30 02:56:21 +01:00
|
|
|
if (flags[Dirty::DepthTest]) {
|
|
|
|
flags[Dirty::DepthTest] = false;
|
|
|
|
if (regs.depth_test_enable) {
|
|
|
|
glEnable(GL_DEPTH_TEST);
|
|
|
|
glDepthFunc(MaxwellToGL::ComparisonOp(regs.depth_test_func));
|
|
|
|
} else {
|
|
|
|
glDisable(GL_DEPTH_TEST);
|
|
|
|
}
|
2019-07-18 01:37:01 +02:00
|
|
|
}
|
2018-07-02 20:33:06 +02:00
|
|
|
}
|
|
|
|
|
2018-08-22 06:35:31 +02:00
|
|
|
void RasterizerOpenGL::SyncStencilTestState() {
|
2020-06-12 02:24:45 +02:00
|
|
|
auto& flags = maxwell3d.dirty.flags;
|
2019-12-30 03:08:32 +01:00
|
|
|
if (!flags[Dirty::StencilTest]) {
|
2018-08-22 06:35:31 +02:00
|
|
|
return;
|
|
|
|
}
|
2019-12-30 03:08:32 +01:00
|
|
|
flags[Dirty::StencilTest] = false;
|
2019-09-04 04:19:31 +02:00
|
|
|
|
2020-06-12 02:24:45 +02:00
|
|
|
const auto& regs = maxwell3d.regs;
|
2020-03-26 05:08:11 +01:00
|
|
|
oglEnable(GL_STENCIL_TEST, regs.stencil_enable);
|
2019-07-18 01:37:01 +02:00
|
|
|
|
2019-12-26 07:34:29 +01:00
|
|
|
glStencilFuncSeparate(GL_FRONT, MaxwellToGL::ComparisonOp(regs.stencil_front_func_func),
|
|
|
|
regs.stencil_front_func_ref, regs.stencil_front_func_mask);
|
|
|
|
glStencilOpSeparate(GL_FRONT, MaxwellToGL::StencilOp(regs.stencil_front_op_fail),
|
|
|
|
MaxwellToGL::StencilOp(regs.stencil_front_op_zfail),
|
|
|
|
MaxwellToGL::StencilOp(regs.stencil_front_op_zpass));
|
|
|
|
glStencilMaskSeparate(GL_FRONT, regs.stencil_front_mask);
|
|
|
|
|
2018-11-07 04:27:12 +01:00
|
|
|
if (regs.stencil_two_side_enable) {
|
2019-12-26 07:34:29 +01:00
|
|
|
glStencilFuncSeparate(GL_BACK, MaxwellToGL::ComparisonOp(regs.stencil_back_func_func),
|
|
|
|
regs.stencil_back_func_ref, regs.stencil_back_func_mask);
|
|
|
|
glStencilOpSeparate(GL_BACK, MaxwellToGL::StencilOp(regs.stencil_back_op_fail),
|
|
|
|
MaxwellToGL::StencilOp(regs.stencil_back_op_zfail),
|
|
|
|
MaxwellToGL::StencilOp(regs.stencil_back_op_zpass));
|
|
|
|
glStencilMaskSeparate(GL_BACK, regs.stencil_back_mask);
|
2018-11-07 04:27:12 +01:00
|
|
|
} else {
|
2019-12-26 07:34:29 +01:00
|
|
|
glStencilFuncSeparate(GL_BACK, GL_ALWAYS, 0, 0xFFFFFFFF);
|
|
|
|
glStencilOpSeparate(GL_BACK, GL_KEEP, GL_KEEP, GL_KEEP);
|
|
|
|
glStencilMaskSeparate(GL_BACK, 0xFFFFFFFF);
|
2018-11-07 04:27:12 +01:00
|
|
|
}
|
2018-08-22 06:35:31 +02:00
|
|
|
}
|
|
|
|
|
2019-12-26 06:11:01 +01:00
|
|
|
void RasterizerOpenGL::SyncRasterizeEnable() {
|
2020-06-12 02:24:45 +02:00
|
|
|
auto& flags = maxwell3d.dirty.flags;
|
2019-12-30 04:49:19 +01:00
|
|
|
if (!flags[Dirty::RasterizeEnable]) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
flags[Dirty::RasterizeEnable] = false;
|
|
|
|
|
2020-06-12 02:24:45 +02:00
|
|
|
oglEnable(GL_RASTERIZER_DISCARD, maxwell3d.regs.rasterize_enable == 0);
|
2019-12-18 23:26:52 +01:00
|
|
|
}
|
|
|
|
|
2020-02-24 23:43:57 +01:00
|
|
|
void RasterizerOpenGL::SyncPolygonModes() {
|
2020-06-12 02:24:45 +02:00
|
|
|
auto& flags = maxwell3d.dirty.flags;
|
2020-02-24 23:43:57 +01:00
|
|
|
if (!flags[Dirty::PolygonModes]) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
flags[Dirty::PolygonModes] = false;
|
|
|
|
|
2020-06-12 02:24:45 +02:00
|
|
|
const auto& regs = maxwell3d.regs;
|
|
|
|
if (regs.fill_rectangle) {
|
2020-02-24 23:43:57 +01:00
|
|
|
if (!GLAD_GL_NV_fill_rectangle) {
|
|
|
|
LOG_ERROR(Render_OpenGL, "GL_NV_fill_rectangle used and not supported");
|
|
|
|
glPolygonMode(GL_FRONT_AND_BACK, GL_FILL);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
flags[Dirty::PolygonModeFront] = true;
|
|
|
|
flags[Dirty::PolygonModeBack] = true;
|
|
|
|
glPolygonMode(GL_FRONT_AND_BACK, GL_FILL_RECTANGLE_NV);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2020-06-12 02:24:45 +02:00
|
|
|
if (regs.polygon_mode_front == regs.polygon_mode_back) {
|
2020-02-24 23:43:57 +01:00
|
|
|
flags[Dirty::PolygonModeFront] = false;
|
|
|
|
flags[Dirty::PolygonModeBack] = false;
|
2020-06-12 02:24:45 +02:00
|
|
|
glPolygonMode(GL_FRONT_AND_BACK, MaxwellToGL::PolygonMode(regs.polygon_mode_front));
|
2020-02-24 23:43:57 +01:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (flags[Dirty::PolygonModeFront]) {
|
|
|
|
flags[Dirty::PolygonModeFront] = false;
|
2020-06-12 02:24:45 +02:00
|
|
|
glPolygonMode(GL_FRONT, MaxwellToGL::PolygonMode(regs.polygon_mode_front));
|
2020-02-24 23:43:57 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
if (flags[Dirty::PolygonModeBack]) {
|
|
|
|
flags[Dirty::PolygonModeBack] = false;
|
2020-06-12 02:24:45 +02:00
|
|
|
glPolygonMode(GL_BACK, MaxwellToGL::PolygonMode(regs.polygon_mode_back));
|
2020-02-24 23:43:57 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-11-05 03:46:06 +01:00
|
|
|
void RasterizerOpenGL::SyncColorMask() {
|
2020-06-12 02:24:45 +02:00
|
|
|
auto& flags = maxwell3d.dirty.flags;
|
2019-12-29 02:51:04 +01:00
|
|
|
if (!flags[Dirty::ColorMasks]) {
|
2019-07-13 22:52:32 +02:00
|
|
|
return;
|
|
|
|
}
|
2019-12-29 02:51:04 +01:00
|
|
|
flags[Dirty::ColorMasks] = false;
|
2019-07-13 22:52:32 +02:00
|
|
|
|
2019-12-29 02:51:04 +01:00
|
|
|
const bool force = flags[Dirty::ColorMaskCommon];
|
|
|
|
flags[Dirty::ColorMaskCommon] = false;
|
2019-07-13 22:52:32 +02:00
|
|
|
|
2020-06-12 02:24:45 +02:00
|
|
|
const auto& regs = maxwell3d.regs;
|
2019-12-26 05:19:15 +01:00
|
|
|
if (regs.color_mask_common) {
|
2019-12-29 02:51:04 +01:00
|
|
|
if (!force && !flags[Dirty::ColorMask0]) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
flags[Dirty::ColorMask0] = false;
|
|
|
|
|
2019-12-26 05:19:15 +01:00
|
|
|
auto& mask = regs.color_mask[0];
|
2019-12-29 02:51:04 +01:00
|
|
|
glColorMask(mask.R != 0, mask.B != 0, mask.G != 0, mask.A != 0);
|
|
|
|
return;
|
2018-11-05 03:46:06 +01:00
|
|
|
}
|
2019-07-13 22:52:32 +02:00
|
|
|
|
2019-12-29 02:51:04 +01:00
|
|
|
// Path without color_mask_common set
|
|
|
|
for (std::size_t i = 0; i < Maxwell::NumRenderTargets; ++i) {
|
|
|
|
if (!force && !flags[Dirty::ColorMask0 + i]) {
|
|
|
|
continue;
|
2019-12-26 05:19:15 +01:00
|
|
|
}
|
2019-12-29 02:51:04 +01:00
|
|
|
flags[Dirty::ColorMask0 + i] = false;
|
|
|
|
|
|
|
|
const auto& mask = regs.color_mask[i];
|
|
|
|
glColorMaski(static_cast<GLuint>(i), mask.R != 0, mask.G != 0, mask.B != 0, mask.A != 0);
|
2018-11-05 03:46:06 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-11-14 04:02:54 +01:00
|
|
|
void RasterizerOpenGL::SyncMultiSampleState() {
|
2020-06-12 02:24:45 +02:00
|
|
|
auto& flags = maxwell3d.dirty.flags;
|
2019-12-30 04:43:15 +01:00
|
|
|
if (!flags[Dirty::MultisampleControl]) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
flags[Dirty::MultisampleControl] = false;
|
|
|
|
|
2020-06-12 02:24:45 +02:00
|
|
|
const auto& regs = maxwell3d.regs;
|
2019-12-26 05:04:36 +01:00
|
|
|
oglEnable(GL_SAMPLE_ALPHA_TO_COVERAGE, regs.multisample_control.alpha_to_coverage);
|
|
|
|
oglEnable(GL_SAMPLE_ALPHA_TO_ONE, regs.multisample_control.alpha_to_one);
|
2018-11-14 04:02:54 +01:00
|
|
|
}
|
|
|
|
|
2018-11-14 02:09:01 +01:00
|
|
|
void RasterizerOpenGL::SyncFragmentColorClampState() {
|
2020-06-12 02:24:45 +02:00
|
|
|
auto& flags = maxwell3d.dirty.flags;
|
2019-12-30 05:20:08 +01:00
|
|
|
if (!flags[Dirty::FragmentClampColor]) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
flags[Dirty::FragmentClampColor] = false;
|
|
|
|
|
2020-06-12 02:24:45 +02:00
|
|
|
glClampColor(GL_CLAMP_FRAGMENT_COLOR, maxwell3d.regs.frag_color_clamp ? GL_TRUE : GL_FALSE);
|
2018-11-14 02:09:01 +01:00
|
|
|
}
|
|
|
|
|
2018-06-09 00:05:52 +02:00
|
|
|
void RasterizerOpenGL::SyncBlendState() {
|
2020-06-12 02:24:45 +02:00
|
|
|
auto& flags = maxwell3d.dirty.flags;
|
|
|
|
const auto& regs = maxwell3d.regs;
|
2019-12-29 22:14:40 +01:00
|
|
|
|
|
|
|
if (flags[Dirty::BlendColor]) {
|
|
|
|
flags[Dirty::BlendColor] = false;
|
|
|
|
glBlendColor(regs.blend_color.r, regs.blend_color.g, regs.blend_color.b,
|
|
|
|
regs.blend_color.a);
|
|
|
|
}
|
2018-03-20 04:00:59 +01:00
|
|
|
|
2019-12-29 22:14:40 +01:00
|
|
|
// TODO(Rodrigo): Revisit blending, there are several registers we are not reading
|
|
|
|
|
|
|
|
if (!flags[Dirty::BlendStates]) {
|
2019-07-13 22:52:32 +02:00
|
|
|
return;
|
|
|
|
}
|
2019-12-29 22:14:40 +01:00
|
|
|
flags[Dirty::BlendStates] = false;
|
2018-11-02 04:21:25 +01:00
|
|
|
|
2019-12-26 07:51:50 +01:00
|
|
|
if (!regs.independent_blend_enable) {
|
2019-12-29 22:14:40 +01:00
|
|
|
if (!regs.blend.enable[0]) {
|
|
|
|
glDisable(GL_BLEND);
|
2019-12-26 07:51:50 +01:00
|
|
|
return;
|
2018-11-02 04:21:25 +01:00
|
|
|
}
|
2019-12-29 22:14:40 +01:00
|
|
|
glEnable(GL_BLEND);
|
|
|
|
glBlendFuncSeparate(MaxwellToGL::BlendFunc(regs.blend.factor_source_rgb),
|
|
|
|
MaxwellToGL::BlendFunc(regs.blend.factor_dest_rgb),
|
|
|
|
MaxwellToGL::BlendFunc(regs.blend.factor_source_a),
|
|
|
|
MaxwellToGL::BlendFunc(regs.blend.factor_dest_a));
|
|
|
|
glBlendEquationSeparate(MaxwellToGL::BlendEquation(regs.blend.equation_rgb),
|
|
|
|
MaxwellToGL::BlendEquation(regs.blend.equation_a));
|
2018-06-09 00:05:52 +02:00
|
|
|
return;
|
2018-11-02 04:21:25 +01:00
|
|
|
}
|
2018-06-09 00:05:52 +02:00
|
|
|
|
2019-12-29 22:14:40 +01:00
|
|
|
const bool force = flags[Dirty::BlendIndependentEnabled];
|
|
|
|
flags[Dirty::BlendIndependentEnabled] = false;
|
|
|
|
|
2019-12-26 07:51:50 +01:00
|
|
|
for (std::size_t i = 0; i < Maxwell::NumRenderTargets; ++i) {
|
2019-12-29 22:14:40 +01:00
|
|
|
if (!force && !flags[Dirty::BlendState0 + i]) {
|
2018-11-02 04:21:25 +01:00
|
|
|
continue;
|
2019-12-29 22:14:40 +01:00
|
|
|
}
|
|
|
|
flags[Dirty::BlendState0 + i] = false;
|
|
|
|
|
2019-12-26 07:51:50 +01:00
|
|
|
if (!regs.blend.enable[i]) {
|
2019-12-29 22:14:40 +01:00
|
|
|
glDisablei(GL_BLEND, static_cast<GLuint>(i));
|
2018-11-02 04:21:25 +01:00
|
|
|
continue;
|
2019-12-26 07:51:50 +01:00
|
|
|
}
|
2019-12-29 22:14:40 +01:00
|
|
|
glEnablei(GL_BLEND, static_cast<GLuint>(i));
|
2019-07-13 22:52:32 +02:00
|
|
|
|
2019-12-26 07:51:50 +01:00
|
|
|
const auto& src = regs.independent_blend[i];
|
|
|
|
glBlendFuncSeparatei(static_cast<GLuint>(i), MaxwellToGL::BlendFunc(src.factor_source_rgb),
|
|
|
|
MaxwellToGL::BlendFunc(src.factor_dest_rgb),
|
|
|
|
MaxwellToGL::BlendFunc(src.factor_source_a),
|
|
|
|
MaxwellToGL::BlendFunc(src.factor_dest_a));
|
|
|
|
glBlendEquationSeparatei(static_cast<GLuint>(i),
|
|
|
|
MaxwellToGL::BlendEquation(src.equation_rgb),
|
|
|
|
MaxwellToGL::BlendEquation(src.equation_a));
|
2018-11-02 04:21:25 +01:00
|
|
|
}
|
2018-03-20 04:00:59 +01:00
|
|
|
}
|
2018-08-21 01:44:47 +02:00
|
|
|
|
|
|
|
void RasterizerOpenGL::SyncLogicOpState() {
|
2020-06-12 02:24:45 +02:00
|
|
|
auto& flags = maxwell3d.dirty.flags;
|
2019-12-30 04:57:50 +01:00
|
|
|
if (!flags[Dirty::LogicOp]) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
flags[Dirty::LogicOp] = false;
|
2018-08-21 01:44:47 +02:00
|
|
|
|
2020-06-12 02:24:45 +02:00
|
|
|
const auto& regs = maxwell3d.regs;
|
2019-12-26 00:21:53 +01:00
|
|
|
if (regs.logic_op.enable) {
|
2019-12-30 04:57:50 +01:00
|
|
|
glEnable(GL_COLOR_LOGIC_OP);
|
2019-12-26 00:21:53 +01:00
|
|
|
glLogicOp(MaxwellToGL::LogicOp(regs.logic_op.operation));
|
2019-12-30 04:57:50 +01:00
|
|
|
} else {
|
|
|
|
glDisable(GL_COLOR_LOGIC_OP);
|
2019-12-26 00:21:53 +01:00
|
|
|
}
|
2018-08-21 01:44:47 +02:00
|
|
|
}
|
|
|
|
|
2019-12-26 05:28:17 +01:00
|
|
|
void RasterizerOpenGL::SyncScissorTest() {
|
2020-06-12 02:24:45 +02:00
|
|
|
auto& flags = maxwell3d.dirty.flags;
|
2019-12-29 02:31:00 +01:00
|
|
|
if (!flags[Dirty::Scissors]) {
|
2018-08-21 01:44:47 +02:00
|
|
|
return;
|
2019-12-29 02:31:00 +01:00
|
|
|
}
|
|
|
|
flags[Dirty::Scissors] = false;
|
2018-08-21 01:44:47 +02:00
|
|
|
|
2020-06-12 02:24:45 +02:00
|
|
|
const auto& regs = maxwell3d.regs;
|
2019-12-26 05:28:17 +01:00
|
|
|
for (std::size_t index = 0; index < Maxwell::NumViewports; ++index) {
|
2019-12-29 02:31:00 +01:00
|
|
|
if (!flags[Dirty::Scissor0 + index]) {
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
flags[Dirty::Scissor0 + index] = false;
|
2018-08-21 10:18:27 +02:00
|
|
|
|
2019-12-26 05:28:17 +01:00
|
|
|
const auto& src = regs.scissor_test[index];
|
2019-12-29 02:31:00 +01:00
|
|
|
if (src.enable) {
|
|
|
|
glEnablei(GL_SCISSOR_TEST, static_cast<GLuint>(index));
|
|
|
|
glScissorIndexed(static_cast<GLuint>(index), src.min_x, src.min_y,
|
|
|
|
src.max_x - src.min_x, src.max_y - src.min_y);
|
|
|
|
} else {
|
|
|
|
glDisablei(GL_SCISSOR_TEST, static_cast<GLuint>(index));
|
2018-11-14 00:13:16 +01:00
|
|
|
}
|
2018-10-09 02:49:36 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-09-28 06:31:01 +02:00
|
|
|
void RasterizerOpenGL::SyncPointState() {
|
2020-06-12 02:24:45 +02:00
|
|
|
auto& flags = maxwell3d.dirty.flags;
|
2019-12-30 05:27:42 +01:00
|
|
|
if (!flags[Dirty::PointSize]) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
flags[Dirty::PointSize] = false;
|
|
|
|
|
2020-06-12 02:24:45 +02:00
|
|
|
oglEnable(GL_POINT_SPRITE, maxwell3d.regs.point_sprite_enable);
|
2020-12-30 06:25:23 +01:00
|
|
|
oglEnable(GL_PROGRAM_POINT_SIZE, maxwell3d.regs.vp_point_size.enable);
|
2019-12-30 05:27:42 +01:00
|
|
|
|
2020-06-12 02:24:45 +02:00
|
|
|
glPointSize(std::max(1.0f, maxwell3d.regs.point_size));
|
2018-09-28 06:31:01 +02:00
|
|
|
}
|
|
|
|
|
2020-02-25 01:02:32 +01:00
|
|
|
void RasterizerOpenGL::SyncLineState() {
|
2020-06-12 02:24:45 +02:00
|
|
|
auto& flags = maxwell3d.dirty.flags;
|
2020-02-25 01:02:32 +01:00
|
|
|
if (!flags[Dirty::LineWidth]) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
flags[Dirty::LineWidth] = false;
|
|
|
|
|
2020-06-12 02:24:45 +02:00
|
|
|
const auto& regs = maxwell3d.regs;
|
2020-02-25 01:02:32 +01:00
|
|
|
oglEnable(GL_LINE_SMOOTH, regs.line_smooth_enable);
|
|
|
|
glLineWidth(regs.line_smooth_enable ? regs.line_width_smooth : regs.line_width_aliased);
|
|
|
|
}
|
|
|
|
|
2018-11-27 00:31:44 +01:00
|
|
|
void RasterizerOpenGL::SyncPolygonOffset() {
|
2020-06-12 02:24:45 +02:00
|
|
|
auto& flags = maxwell3d.dirty.flags;
|
2019-12-30 04:22:43 +01:00
|
|
|
if (!flags[Dirty::PolygonOffset]) {
|
2019-07-13 22:52:32 +02:00
|
|
|
return;
|
|
|
|
}
|
2019-12-30 04:22:43 +01:00
|
|
|
flags[Dirty::PolygonOffset] = false;
|
2019-10-01 07:00:23 +02:00
|
|
|
|
2020-06-12 02:24:45 +02:00
|
|
|
const auto& regs = maxwell3d.regs;
|
2019-12-26 04:25:53 +01:00
|
|
|
oglEnable(GL_POLYGON_OFFSET_FILL, regs.polygon_offset_fill_enable);
|
|
|
|
oglEnable(GL_POLYGON_OFFSET_LINE, regs.polygon_offset_line_enable);
|
|
|
|
oglEnable(GL_POLYGON_OFFSET_POINT, regs.polygon_offset_point_enable);
|
2019-07-13 22:52:32 +02:00
|
|
|
|
2020-01-03 02:42:56 +01:00
|
|
|
if (regs.polygon_offset_fill_enable || regs.polygon_offset_line_enable ||
|
|
|
|
regs.polygon_offset_point_enable) {
|
|
|
|
// Hardware divides polygon offset units by two
|
|
|
|
glPolygonOffsetClamp(regs.polygon_offset_factor, regs.polygon_offset_units / 2.0f,
|
|
|
|
regs.polygon_offset_clamp);
|
|
|
|
}
|
2018-11-27 00:31:44 +01:00
|
|
|
}
|
|
|
|
|
2019-05-22 01:28:09 +02:00
|
|
|
void RasterizerOpenGL::SyncAlphaTest() {
|
2020-06-12 02:24:45 +02:00
|
|
|
auto& flags = maxwell3d.dirty.flags;
|
2019-12-30 04:37:35 +01:00
|
|
|
if (!flags[Dirty::AlphaTest]) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
flags[Dirty::AlphaTest] = false;
|
|
|
|
|
2020-06-12 02:24:45 +02:00
|
|
|
const auto& regs = maxwell3d.regs;
|
2019-12-26 00:03:40 +01:00
|
|
|
if (regs.alpha_test_enabled) {
|
2019-12-30 04:37:35 +01:00
|
|
|
glEnable(GL_ALPHA_TEST);
|
2019-12-26 00:03:40 +01:00
|
|
|
glAlphaFunc(MaxwellToGL::ComparisonOp(regs.alpha_test_func), regs.alpha_test_ref);
|
2019-12-30 04:37:35 +01:00
|
|
|
} else {
|
|
|
|
glDisable(GL_ALPHA_TEST);
|
2019-05-22 01:28:09 +02:00
|
|
|
}
|
2018-10-12 02:29:11 +02:00
|
|
|
}
|
|
|
|
|
2019-12-26 05:01:41 +01:00
|
|
|
void RasterizerOpenGL::SyncFramebufferSRGB() {
|
2020-06-12 02:24:45 +02:00
|
|
|
auto& flags = maxwell3d.dirty.flags;
|
2019-12-30 04:53:53 +01:00
|
|
|
if (!flags[Dirty::FramebufferSRGB]) {
|
2019-05-22 01:28:09 +02:00
|
|
|
return;
|
|
|
|
}
|
2019-12-30 04:53:53 +01:00
|
|
|
flags[Dirty::FramebufferSRGB] = false;
|
|
|
|
|
2020-06-12 02:24:45 +02:00
|
|
|
oglEnable(GL_FRAMEBUFFER_SRGB, maxwell3d.regs.framebuffer_srgb);
|
2018-10-12 02:29:11 +02:00
|
|
|
}
|
|
|
|
|
2020-06-03 07:48:41 +02:00
|
|
|
void RasterizerOpenGL::SyncTransformFeedback() {
|
|
|
|
// TODO(Rodrigo): Inject SKIP_COMPONENTS*_NV when required. An unimplemented message will signal
|
|
|
|
// when this is required.
|
2020-06-12 02:24:45 +02:00
|
|
|
const auto& regs = maxwell3d.regs;
|
2020-06-03 07:48:41 +02:00
|
|
|
|
|
|
|
static constexpr std::size_t STRIDE = 3;
|
|
|
|
std::array<GLint, 128 * STRIDE * Maxwell::NumTransformFeedbackBuffers> attribs;
|
|
|
|
std::array<GLint, Maxwell::NumTransformFeedbackBuffers> streams;
|
|
|
|
|
|
|
|
GLint* cursor = attribs.data();
|
|
|
|
GLint* current_stream = streams.data();
|
|
|
|
|
|
|
|
for (std::size_t feedback = 0; feedback < Maxwell::NumTransformFeedbackBuffers; ++feedback) {
|
|
|
|
const auto& layout = regs.tfb_layouts[feedback];
|
|
|
|
UNIMPLEMENTED_IF_MSG(layout.stride != layout.varying_count * 4, "Stride padding");
|
|
|
|
if (layout.varying_count == 0) {
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
*current_stream = static_cast<GLint>(feedback);
|
|
|
|
if (current_stream != streams.data()) {
|
|
|
|
// When stepping one stream, push the expected token
|
|
|
|
cursor[0] = GL_NEXT_BUFFER_NV;
|
|
|
|
cursor[1] = 0;
|
|
|
|
cursor[2] = 0;
|
|
|
|
cursor += STRIDE;
|
|
|
|
}
|
|
|
|
++current_stream;
|
|
|
|
|
|
|
|
const auto& locations = regs.tfb_varying_locs[feedback];
|
|
|
|
std::optional<u8> current_index;
|
|
|
|
for (u32 offset = 0; offset < layout.varying_count; ++offset) {
|
|
|
|
const u8 location = locations[offset];
|
|
|
|
const u8 index = location / 4;
|
|
|
|
|
|
|
|
if (current_index == index) {
|
|
|
|
// Increase number of components of the previous attachment
|
|
|
|
++cursor[-2];
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
current_index = index;
|
|
|
|
|
|
|
|
std::tie(cursor[0], cursor[2]) = TransformFeedbackEnum(location);
|
|
|
|
cursor[1] = 1;
|
|
|
|
cursor += STRIDE;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
const GLsizei num_attribs = static_cast<GLsizei>((cursor - attribs.data()) / STRIDE);
|
|
|
|
const GLsizei num_strides = static_cast<GLsizei>(current_stream - streams.data());
|
|
|
|
glTransformFeedbackStreamAttribsNV(num_attribs, attribs.data(), num_strides, streams.data(),
|
|
|
|
GL_INTERLEAVED_ATTRIBS);
|
|
|
|
}
|
|
|
|
|
2020-03-02 23:31:26 +01:00
|
|
|
void RasterizerOpenGL::BeginTransformFeedback(GLenum primitive_mode) {
|
2020-06-12 02:24:45 +02:00
|
|
|
const auto& regs = maxwell3d.regs;
|
2020-03-02 23:31:26 +01:00
|
|
|
if (regs.tfb_enabled == 0) {
|
|
|
|
return;
|
|
|
|
}
|
2020-06-03 07:48:41 +02:00
|
|
|
if (device.UseAssemblyShaders()) {
|
|
|
|
SyncTransformFeedback();
|
|
|
|
}
|
2020-03-02 23:31:26 +01:00
|
|
|
UNIMPLEMENTED_IF(regs.IsShaderConfigEnabled(Maxwell::ShaderProgram::TesselationControl) ||
|
|
|
|
regs.IsShaderConfigEnabled(Maxwell::ShaderProgram::TesselationEval) ||
|
|
|
|
regs.IsShaderConfigEnabled(Maxwell::ShaderProgram::Geometry));
|
2021-01-17 00:48:58 +01:00
|
|
|
UNIMPLEMENTED_IF(primitive_mode != GL_POINTS);
|
2020-03-02 23:31:26 +01:00
|
|
|
|
2020-06-03 07:48:41 +02:00
|
|
|
// We may have to call BeginTransformFeedbackNV here since they seem to call different
|
|
|
|
// implementations on Nvidia's driver (the pointer is different) but we are using
|
|
|
|
// ARB_transform_feedback3 features with NV_transform_feedback interactions and the ARB
|
|
|
|
// extension doesn't define BeginTransformFeedback (without NV) interactions. It just works.
|
2020-03-02 23:31:26 +01:00
|
|
|
glBeginTransformFeedback(GL_POINTS);
|
|
|
|
}
|
|
|
|
|
|
|
|
void RasterizerOpenGL::EndTransformFeedback() {
|
2020-06-12 02:24:45 +02:00
|
|
|
const auto& regs = maxwell3d.regs;
|
2020-03-02 23:31:26 +01:00
|
|
|
if (regs.tfb_enabled == 0) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
glEndTransformFeedback();
|
|
|
|
}
|
|
|
|
|
2018-08-21 10:18:27 +02:00
|
|
|
} // namespace OpenGL
|