2018-03-20 04:00:59 +01:00
|
|
|
// Copyright 2015 Citra Emulator Project
|
|
|
|
// Licensed under GPLv2 or any later version
|
|
|
|
// Refer to the license.txt file included.
|
|
|
|
|
2018-04-20 05:01:50 +02:00
|
|
|
#include <algorithm>
|
2018-09-13 02:27:43 +02:00
|
|
|
#include <array>
|
2019-07-15 03:25:13 +02:00
|
|
|
#include <bitset>
|
2018-03-20 04:00:59 +01:00
|
|
|
#include <memory>
|
|
|
|
#include <string>
|
2018-07-24 18:10:35 +02:00
|
|
|
#include <string_view>
|
2018-03-20 04:00:59 +01:00
|
|
|
#include <tuple>
|
|
|
|
#include <utility>
|
|
|
|
#include <glad/glad.h>
|
|
|
|
#include "common/alignment.h"
|
|
|
|
#include "common/assert.h"
|
|
|
|
#include "common/logging/log.h"
|
|
|
|
#include "common/math_util.h"
|
|
|
|
#include "common/microprofile.h"
|
2018-08-22 06:33:03 +02:00
|
|
|
#include "common/scope_exit.h"
|
2018-03-24 07:01:03 +01:00
|
|
|
#include "core/core.h"
|
|
|
|
#include "core/hle/kernel/process.h"
|
2019-11-26 21:19:15 +01:00
|
|
|
#include "core/memory.h"
|
2018-03-20 04:00:59 +01:00
|
|
|
#include "core/settings.h"
|
2019-07-15 03:25:13 +02:00
|
|
|
#include "video_core/engines/kepler_compute.h"
|
2018-03-24 07:01:03 +01:00
|
|
|
#include "video_core/engines/maxwell_3d.h"
|
2019-11-18 22:35:21 +01:00
|
|
|
#include "video_core/engines/shader_type.h"
|
2019-05-29 23:15:28 +02:00
|
|
|
#include "video_core/memory_manager.h"
|
2019-11-26 22:52:15 +01:00
|
|
|
#include "video_core/renderer_opengl/gl_query_cache.h"
|
2018-03-20 04:00:59 +01:00
|
|
|
#include "video_core/renderer_opengl/gl_rasterizer.h"
|
2019-01-14 02:05:53 +01:00
|
|
|
#include "video_core/renderer_opengl/gl_shader_cache.h"
|
2018-03-25 04:38:08 +02:00
|
|
|
#include "video_core/renderer_opengl/maxwell_to_gl.h"
|
2018-03-20 04:00:59 +01:00
|
|
|
#include "video_core/renderer_opengl/renderer_opengl.h"
|
|
|
|
|
2018-08-21 10:18:27 +02:00
|
|
|
namespace OpenGL {
|
|
|
|
|
2018-03-24 09:06:26 +01:00
|
|
|
using Maxwell = Tegra::Engines::Maxwell3D::Regs;
|
2019-04-29 02:08:31 +02:00
|
|
|
|
2020-02-22 23:40:26 +01:00
|
|
|
using Tegra::Engines::ShaderType;
|
2019-04-29 02:08:31 +02:00
|
|
|
using VideoCore::Surface::PixelFormat;
|
|
|
|
using VideoCore::Surface::SurfaceTarget;
|
|
|
|
using VideoCore::Surface::SurfaceType;
|
2018-03-20 04:00:59 +01:00
|
|
|
|
2018-11-06 19:37:10 +01:00
|
|
|
MICROPROFILE_DEFINE(OpenGL_VAO, "OpenGL", "Vertex Format Setup", MP_RGB(128, 128, 192));
|
|
|
|
MICROPROFILE_DEFINE(OpenGL_VB, "OpenGL", "Vertex Buffer Setup", MP_RGB(128, 128, 192));
|
2018-09-04 11:02:59 +02:00
|
|
|
MICROPROFILE_DEFINE(OpenGL_Shader, "OpenGL", "Shader Setup", MP_RGB(128, 128, 192));
|
|
|
|
MICROPROFILE_DEFINE(OpenGL_UBO, "OpenGL", "Const Buffer Setup", MP_RGB(128, 128, 192));
|
|
|
|
MICROPROFILE_DEFINE(OpenGL_Index, "OpenGL", "Index Buffer Setup", MP_RGB(128, 128, 192));
|
|
|
|
MICROPROFILE_DEFINE(OpenGL_Texture, "OpenGL", "Texture Setup", MP_RGB(128, 128, 192));
|
|
|
|
MICROPROFILE_DEFINE(OpenGL_Framebuffer, "OpenGL", "Framebuffer Setup", MP_RGB(128, 128, 192));
|
2018-03-20 04:00:59 +01:00
|
|
|
MICROPROFILE_DEFINE(OpenGL_Drawing, "OpenGL", "Drawing", MP_RGB(128, 128, 192));
|
2018-09-04 11:02:59 +02:00
|
|
|
MICROPROFILE_DEFINE(OpenGL_Blits, "OpenGL", "Blits", MP_RGB(128, 128, 192));
|
2018-03-20 04:00:59 +01:00
|
|
|
MICROPROFILE_DEFINE(OpenGL_CacheManagement, "OpenGL", "Cache Mgmt", MP_RGB(100, 255, 100));
|
2018-10-02 19:47:26 +02:00
|
|
|
MICROPROFILE_DEFINE(OpenGL_PrimitiveAssembly, "OpenGL", "Prim Asmbl", MP_RGB(255, 100, 100));
|
|
|
|
|
2019-11-13 04:27:12 +01:00
|
|
|
namespace {
|
|
|
|
|
2020-01-13 21:20:02 +01:00
|
|
|
constexpr std::size_t NumSupportedVertexAttributes = 16;
|
|
|
|
|
2019-11-13 04:27:12 +01:00
|
|
|
template <typename Engine, typename Entry>
|
|
|
|
Tegra::Texture::FullTextureInfo GetTextureInfo(const Engine& engine, const Entry& entry,
|
2020-02-22 23:40:26 +01:00
|
|
|
ShaderType shader_type, std::size_t index = 0) {
|
2019-11-13 04:27:12 +01:00
|
|
|
if (entry.IsBindless()) {
|
|
|
|
const Tegra::Texture::TextureHandle tex_handle =
|
|
|
|
engine.AccessConstBuffer32(shader_type, entry.GetBuffer(), entry.GetOffset());
|
|
|
|
return engine.GetTextureInfo(tex_handle);
|
|
|
|
}
|
2020-01-06 16:43:13 +01:00
|
|
|
const auto& gpu_profile = engine.AccessGuestDriverProfile();
|
|
|
|
const u32 offset =
|
|
|
|
entry.GetOffset() + static_cast<u32>(index * gpu_profile.GetTextureHandlerSize());
|
2019-11-13 04:27:12 +01:00
|
|
|
if constexpr (std::is_same_v<Engine, Tegra::Engines::Maxwell3D>) {
|
2020-01-06 16:43:13 +01:00
|
|
|
return engine.GetStageTexture(shader_type, offset);
|
2019-11-13 04:27:12 +01:00
|
|
|
} else {
|
2020-01-06 16:43:13 +01:00
|
|
|
return engine.GetTexture(offset);
|
2019-11-13 04:27:12 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
std::size_t GetConstBufferSize(const Tegra::Engines::ConstBufferInfo& buffer,
|
2020-02-26 20:13:47 +01:00
|
|
|
const ConstBufferEntry& entry) {
|
2019-07-06 04:11:58 +02:00
|
|
|
if (!entry.IsIndirect()) {
|
|
|
|
return entry.GetSize();
|
|
|
|
}
|
|
|
|
|
|
|
|
if (buffer.size > Maxwell::MaxConstBufferSize) {
|
|
|
|
LOG_WARNING(Render_OpenGL, "Indirect constbuffer size {} exceeds maximum {}", buffer.size,
|
|
|
|
Maxwell::MaxConstBufferSize);
|
|
|
|
return Maxwell::MaxConstBufferSize;
|
|
|
|
}
|
|
|
|
|
|
|
|
return buffer.size;
|
|
|
|
}
|
|
|
|
|
2019-12-26 04:28:46 +01:00
|
|
|
void oglEnable(GLenum cap, bool state) {
|
|
|
|
(state ? glEnable : glDisable)(cap);
|
|
|
|
}
|
|
|
|
|
2019-12-26 05:27:43 +01:00
|
|
|
void oglEnablei(GLenum cap, bool state, GLuint index) {
|
|
|
|
(state ? glEnablei : glDisablei)(cap, index);
|
|
|
|
}
|
|
|
|
|
2019-11-13 04:27:12 +01:00
|
|
|
} // Anonymous namespace
|
|
|
|
|
2019-04-06 22:59:56 +02:00
|
|
|
RasterizerOpenGL::RasterizerOpenGL(Core::System& system, Core::Frontend::EmuWindow& emu_window,
|
2019-12-29 01:45:56 +01:00
|
|
|
ScreenInfo& info, GLShader::ProgramManager& program_manager,
|
|
|
|
StateTracker& state_tracker)
|
|
|
|
: RasterizerAccelerated{system.Memory()}, texture_cache{system, *this, device, state_tracker},
|
2019-11-26 22:52:15 +01:00
|
|
|
shader_cache{*this, system, emu_window, device}, query_cache{system, *this}, system{system},
|
2019-12-29 01:45:56 +01:00
|
|
|
screen_info{info}, program_manager{program_manager}, state_tracker{state_tracker},
|
|
|
|
buffer_cache{*this, system, device, STREAM_BUFFER_SIZE} {
|
2018-11-23 16:11:21 +01:00
|
|
|
CheckExtensions();
|
2018-03-20 04:00:59 +01:00
|
|
|
}
|
|
|
|
|
2018-08-09 21:31:46 +02:00
|
|
|
RasterizerOpenGL::~RasterizerOpenGL() {}
|
2018-03-20 04:00:59 +01:00
|
|
|
|
2018-11-23 16:11:21 +01:00
|
|
|
void RasterizerOpenGL::CheckExtensions() {
|
|
|
|
if (!GLAD_GL_ARB_texture_filter_anisotropic && !GLAD_GL_EXT_texture_filter_anisotropic) {
|
|
|
|
LOG_WARNING(
|
|
|
|
Render_OpenGL,
|
|
|
|
"Anisotropic filter is not supported! This can cause graphical issues in some games.");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-12-26 04:16:52 +01:00
|
|
|
void RasterizerOpenGL::SetupVertexFormat() {
|
2019-03-09 07:25:11 +01:00
|
|
|
auto& gpu = system.GPU().Maxwell3D();
|
2019-12-29 03:08:40 +01:00
|
|
|
auto& flags = gpu.dirty.flags;
|
|
|
|
if (!flags[Dirty::VertexFormats]) {
|
|
|
|
return;
|
2019-01-06 05:53:27 +01:00
|
|
|
}
|
2019-12-29 03:08:40 +01:00
|
|
|
flags[Dirty::VertexFormats] = false;
|
2018-11-06 19:15:44 +01:00
|
|
|
|
|
|
|
MICROPROFILE_SCOPE(OpenGL_VAO);
|
|
|
|
|
2019-12-26 04:16:52 +01:00
|
|
|
// Use the vertex array as-is, assumes that the data is formatted correctly for OpenGL. Enables
|
|
|
|
// the first 16 vertex attributes always, as we don't know which ones are actually used until
|
|
|
|
// shader time. Note, Tegra technically supports 32, but we're capping this to 16 for now to
|
|
|
|
// avoid OpenGL errors.
|
|
|
|
// TODO(Subv): Analyze the shader to identify which attributes are actually used and don't
|
|
|
|
// assume every shader uses them all.
|
2020-01-13 21:20:02 +01:00
|
|
|
for (std::size_t index = 0; index < NumSupportedVertexAttributes; ++index) {
|
2019-12-29 03:08:40 +01:00
|
|
|
if (!flags[Dirty::VertexFormat0 + index]) {
|
|
|
|
continue;
|
2018-09-05 11:36:50 +02:00
|
|
|
}
|
2019-12-29 03:08:40 +01:00
|
|
|
flags[Dirty::VertexFormat0 + index] = false;
|
|
|
|
|
|
|
|
const auto attrib = gpu.regs.vertex_attrib_format[index];
|
|
|
|
const auto gl_index = static_cast<GLuint>(index);
|
2018-11-06 21:26:27 +01:00
|
|
|
|
2019-12-26 04:16:52 +01:00
|
|
|
// Ignore invalid attributes.
|
|
|
|
if (!attrib.IsValid()) {
|
2019-12-29 03:08:40 +01:00
|
|
|
glDisableVertexAttribArray(gl_index);
|
2019-12-26 04:16:52 +01:00
|
|
|
continue;
|
2018-09-05 11:36:50 +02:00
|
|
|
}
|
2019-12-29 03:08:40 +01:00
|
|
|
glEnableVertexAttribArray(gl_index);
|
2019-01-06 05:53:27 +01:00
|
|
|
|
2019-12-26 04:16:52 +01:00
|
|
|
if (attrib.type == Maxwell::VertexAttribute::Type::SignedInt ||
|
|
|
|
attrib.type == Maxwell::VertexAttribute::Type::UnsignedInt) {
|
2019-12-29 03:08:40 +01:00
|
|
|
glVertexAttribIFormat(gl_index, attrib.ComponentCount(),
|
|
|
|
MaxwellToGL::VertexType(attrib), attrib.offset);
|
2019-12-26 04:16:52 +01:00
|
|
|
} else {
|
2019-12-29 03:08:40 +01:00
|
|
|
glVertexAttribFormat(gl_index, attrib.ComponentCount(), MaxwellToGL::VertexType(attrib),
|
2019-12-26 04:16:52 +01:00
|
|
|
attrib.IsNormalized() ? GL_TRUE : GL_FALSE, attrib.offset);
|
|
|
|
}
|
2019-12-29 03:08:40 +01:00
|
|
|
glVertexAttribBinding(gl_index, attrib.buffer);
|
2019-12-26 04:16:52 +01:00
|
|
|
}
|
2018-11-06 19:37:10 +01:00
|
|
|
}
|
|
|
|
|
2019-12-26 04:16:52 +01:00
|
|
|
void RasterizerOpenGL::SetupVertexBuffer() {
|
2019-03-09 07:25:11 +01:00
|
|
|
auto& gpu = system.GPU().Maxwell3D();
|
2019-12-29 05:28:53 +01:00
|
|
|
auto& flags = gpu.dirty.flags;
|
|
|
|
if (!flags[Dirty::VertexBuffers]) {
|
2018-11-06 21:26:27 +01:00
|
|
|
return;
|
2019-12-29 05:28:53 +01:00
|
|
|
}
|
|
|
|
flags[Dirty::VertexBuffers] = false;
|
2018-11-06 21:26:27 +01:00
|
|
|
|
|
|
|
MICROPROFILE_SCOPE(OpenGL_VB);
|
|
|
|
|
2018-04-22 02:19:33 +02:00
|
|
|
// Upload all guest vertex arrays sequentially to our buffer
|
2019-12-29 05:28:53 +01:00
|
|
|
const auto& regs = gpu.regs;
|
|
|
|
for (std::size_t index = 0; index < Maxwell::NumVertexArrays; ++index) {
|
|
|
|
if (!flags[Dirty::VertexBuffer0 + index]) {
|
2018-11-06 21:26:27 +01:00
|
|
|
continue;
|
2019-12-29 05:28:53 +01:00
|
|
|
}
|
|
|
|
flags[Dirty::VertexBuffer0 + index] = false;
|
2018-11-06 21:26:27 +01:00
|
|
|
|
2018-04-22 02:19:33 +02:00
|
|
|
const auto& vertex_array = regs.vertex_array[index];
|
2019-12-26 04:16:52 +01:00
|
|
|
if (!vertex_array.IsEnabled()) {
|
2018-04-22 02:19:33 +02:00
|
|
|
continue;
|
2019-12-26 04:16:52 +01:00
|
|
|
}
|
2018-04-22 02:19:33 +02:00
|
|
|
|
2019-03-04 05:17:35 +01:00
|
|
|
const GPUVAddr start = vertex_array.StartAddress();
|
|
|
|
const GPUVAddr end = regs.vertex_array_limit[index].LimitAddress();
|
2018-04-22 02:19:33 +02:00
|
|
|
|
|
|
|
ASSERT(end > start);
|
2018-09-08 08:59:59 +02:00
|
|
|
const u64 size = end - start + 1;
|
2019-05-28 00:55:44 +02:00
|
|
|
const auto [vertex_buffer, vertex_buffer_offset] = buffer_cache.UploadMemory(start, size);
|
2018-04-22 02:19:33 +02:00
|
|
|
|
|
|
|
// Bind the vertex array to the buffer at the current offset.
|
2019-12-29 05:28:53 +01:00
|
|
|
vertex_array_pushbuffer.SetVertexBuffer(static_cast<GLuint>(index), vertex_buffer,
|
|
|
|
vertex_buffer_offset, vertex_array.stride);
|
2018-03-25 03:29:47 +02:00
|
|
|
}
|
2019-07-10 21:38:31 +02:00
|
|
|
}
|
2018-11-06 19:37:10 +01:00
|
|
|
|
2019-12-26 04:16:52 +01:00
|
|
|
void RasterizerOpenGL::SetupVertexInstances() {
|
2019-07-10 21:38:31 +02:00
|
|
|
auto& gpu = system.GPU().Maxwell3D();
|
2019-12-29 05:28:53 +01:00
|
|
|
auto& flags = gpu.dirty.flags;
|
|
|
|
if (!flags[Dirty::VertexInstances]) {
|
2019-07-10 21:38:31 +02:00
|
|
|
return;
|
2019-12-29 05:28:53 +01:00
|
|
|
}
|
|
|
|
flags[Dirty::VertexInstances] = false;
|
2019-07-10 21:38:31 +02:00
|
|
|
|
|
|
|
const auto& regs = gpu.regs;
|
2020-01-13 21:20:02 +01:00
|
|
|
for (std::size_t index = 0; index < NumSupportedVertexAttributes; ++index) {
|
2019-12-29 05:28:53 +01:00
|
|
|
if (!flags[Dirty::VertexInstance0 + index]) {
|
2019-07-10 21:38:31 +02:00
|
|
|
continue;
|
|
|
|
}
|
2019-12-29 05:28:53 +01:00
|
|
|
flags[Dirty::VertexInstance0 + index] = false;
|
|
|
|
|
|
|
|
const auto gl_index = static_cast<GLuint>(index);
|
|
|
|
const bool instancing_enabled = regs.instanced_arrays.IsInstancingEnabled(gl_index);
|
|
|
|
const GLuint divisor = instancing_enabled ? regs.vertex_array[index].divisor : 0;
|
|
|
|
glVertexBindingDivisor(gl_index, divisor);
|
2019-07-10 21:38:31 +02:00
|
|
|
}
|
2018-03-20 04:00:59 +01:00
|
|
|
}
|
|
|
|
|
2019-06-20 08:44:06 +02:00
|
|
|
GLintptr RasterizerOpenGL::SetupIndexBuffer() {
|
2019-05-28 00:55:44 +02:00
|
|
|
MICROPROFILE_SCOPE(OpenGL_Index);
|
2019-05-28 00:37:46 +02:00
|
|
|
const auto& regs = system.GPU().Maxwell3D().regs;
|
2019-05-28 00:55:44 +02:00
|
|
|
const std::size_t size = CalculateIndexBufferSize();
|
|
|
|
const auto [buffer, offset] = buffer_cache.UploadMemory(regs.index_array.IndexStart(), size);
|
2019-06-20 08:44:06 +02:00
|
|
|
vertex_array_pushbuffer.SetIndexBuffer(buffer);
|
2019-05-28 00:55:44 +02:00
|
|
|
return offset;
|
2019-05-28 00:37:46 +02:00
|
|
|
}
|
|
|
|
|
2018-10-07 04:17:31 +02:00
|
|
|
void RasterizerOpenGL::SetupShaders(GLenum primitive_mode) {
|
2018-09-04 11:02:59 +02:00
|
|
|
MICROPROFILE_SCOPE(OpenGL_Shader);
|
2019-03-09 07:25:11 +01:00
|
|
|
auto& gpu = system.GPU().Maxwell3D();
|
2019-12-29 06:03:05 +01:00
|
|
|
u32 clip_distances = 0;
|
2018-04-15 21:14:57 +02:00
|
|
|
|
2018-09-15 15:21:06 +02:00
|
|
|
for (std::size_t index = 0; index < Maxwell::MaxShaderProgram; ++index) {
|
2018-09-08 08:59:59 +02:00
|
|
|
const auto& shader_config = gpu.regs.shader_config[index];
|
2019-11-18 22:35:21 +01:00
|
|
|
const auto program{static_cast<Maxwell::ShaderProgram>(index)};
|
2018-04-08 06:00:11 +02:00
|
|
|
|
|
|
|
// Skip stages that are not enabled
|
2018-07-13 04:57:57 +02:00
|
|
|
if (!gpu.regs.IsShaderConfigEnabled(index)) {
|
2018-10-07 04:17:31 +02:00
|
|
|
switch (program) {
|
|
|
|
case Maxwell::ShaderProgram::Geometry:
|
2019-12-26 20:04:41 +01:00
|
|
|
program_manager.UseGeometryShader(0);
|
2018-10-07 04:17:31 +02:00
|
|
|
break;
|
2019-12-26 20:38:39 +01:00
|
|
|
case Maxwell::ShaderProgram::Fragment:
|
2019-12-26 20:04:41 +01:00
|
|
|
program_manager.UseFragmentShader(0);
|
2019-12-26 20:38:39 +01:00
|
|
|
break;
|
2019-04-03 09:33:36 +02:00
|
|
|
default:
|
|
|
|
break;
|
2018-10-07 04:17:31 +02:00
|
|
|
}
|
2018-04-08 06:00:11 +02:00
|
|
|
continue;
|
|
|
|
}
|
2018-03-20 04:00:59 +01:00
|
|
|
|
2019-12-11 20:41:26 +01:00
|
|
|
// Currently this stages are not supported in the OpenGL backend.
|
|
|
|
// Todo(Blinkhawk): Port tesselation shaders from Vulkan to OpenGL
|
|
|
|
if (program == Maxwell::ShaderProgram::TesselationControl) {
|
|
|
|
continue;
|
|
|
|
} else if (program == Maxwell::ShaderProgram::TesselationEval) {
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
2018-08-23 23:30:27 +02:00
|
|
|
Shader shader{shader_cache.GetStageProgram(program)};
|
2019-04-29 02:08:31 +02:00
|
|
|
|
2019-10-30 04:23:09 +01:00
|
|
|
// Stage indices are 0 - 5
|
2019-11-18 22:35:21 +01:00
|
|
|
const std::size_t stage = index == 0 ? 0 : index - 1;
|
2019-10-30 04:23:09 +01:00
|
|
|
SetupDrawConstBuffers(stage, shader);
|
|
|
|
SetupDrawGlobalMemory(stage, shader);
|
2019-11-19 01:38:15 +01:00
|
|
|
SetupDrawTextures(stage, shader);
|
|
|
|
SetupDrawImages(stage, shader);
|
2019-04-29 02:08:31 +02:00
|
|
|
|
2020-02-26 20:13:47 +01:00
|
|
|
const GLuint program_handle = shader->GetHandle();
|
2018-04-08 06:00:11 +02:00
|
|
|
switch (program) {
|
2018-08-23 23:30:27 +02:00
|
|
|
case Maxwell::ShaderProgram::VertexA:
|
2019-01-05 05:00:06 +01:00
|
|
|
case Maxwell::ShaderProgram::VertexB:
|
2019-12-26 20:04:41 +01:00
|
|
|
program_manager.UseVertexShader(program_handle);
|
2018-10-07 04:17:31 +02:00
|
|
|
break;
|
2019-01-05 05:00:06 +01:00
|
|
|
case Maxwell::ShaderProgram::Geometry:
|
2019-12-26 20:04:41 +01:00
|
|
|
program_manager.UseGeometryShader(program_handle);
|
2018-04-08 06:00:11 +02:00
|
|
|
break;
|
2019-01-05 05:00:06 +01:00
|
|
|
case Maxwell::ShaderProgram::Fragment:
|
2019-12-26 20:04:41 +01:00
|
|
|
program_manager.UseFragmentShader(program_handle);
|
2018-04-08 06:00:11 +02:00
|
|
|
break;
|
|
|
|
default:
|
2019-03-09 07:33:56 +01:00
|
|
|
UNIMPLEMENTED_MSG("Unimplemented shader index={}, enable={}, offset=0x{:08X}", index,
|
|
|
|
shader_config.enable.Value(), shader_config.offset);
|
2018-04-08 06:00:11 +02:00
|
|
|
}
|
2018-04-15 18:15:54 +02:00
|
|
|
|
2018-11-29 20:13:13 +01:00
|
|
|
// Workaround for Intel drivers.
|
|
|
|
// When a clip distance is enabled but not set in the shader it crops parts of the screen
|
|
|
|
// (sometimes it's half the screen, sometimes three quarters). To avoid this, enable the
|
|
|
|
// clip distances only when it's written by a shader stage.
|
2020-02-26 20:13:47 +01:00
|
|
|
clip_distances |= shader->GetEntries().clip_distances;
|
2018-11-29 20:13:13 +01:00
|
|
|
|
2018-07-13 04:25:03 +02:00
|
|
|
// When VertexA is enabled, we have dual vertex shaders
|
|
|
|
if (program == Maxwell::ShaderProgram::VertexA) {
|
|
|
|
// VertexB was combined with VertexA, so we skip the VertexB iteration
|
2019-11-06 08:32:43 +01:00
|
|
|
++index;
|
2018-07-13 04:25:03 +02:00
|
|
|
}
|
2018-04-08 06:00:11 +02:00
|
|
|
}
|
2018-11-29 20:13:13 +01:00
|
|
|
|
|
|
|
SyncClipEnabled(clip_distances);
|
2019-12-29 06:03:05 +01:00
|
|
|
gpu.dirty.flags[Dirty::Shaders] = false;
|
2018-03-20 04:00:59 +01:00
|
|
|
}
|
|
|
|
|
2018-09-15 15:21:06 +02:00
|
|
|
std::size_t RasterizerOpenGL::CalculateVertexArraysSize() const {
|
2019-03-09 07:25:11 +01:00
|
|
|
const auto& regs = system.GPU().Maxwell3D().regs;
|
2018-04-22 02:19:33 +02:00
|
|
|
|
2018-09-15 15:21:06 +02:00
|
|
|
std::size_t size = 0;
|
2018-04-22 02:19:33 +02:00
|
|
|
for (u32 index = 0; index < Maxwell::NumVertexArrays; ++index) {
|
|
|
|
if (!regs.vertex_array[index].IsEnabled())
|
|
|
|
continue;
|
|
|
|
|
2019-03-04 05:17:35 +01:00
|
|
|
const GPUVAddr start = regs.vertex_array[index].StartAddress();
|
|
|
|
const GPUVAddr end = regs.vertex_array_limit[index].LimitAddress();
|
2018-04-22 02:19:33 +02:00
|
|
|
|
|
|
|
ASSERT(end > start);
|
|
|
|
size += end - start + 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
return size;
|
|
|
|
}
|
|
|
|
|
2018-10-02 19:47:26 +02:00
|
|
|
std::size_t RasterizerOpenGL::CalculateIndexBufferSize() const {
|
2019-03-09 07:25:11 +01:00
|
|
|
const auto& regs = system.GPU().Maxwell3D().regs;
|
2018-10-02 19:47:26 +02:00
|
|
|
|
|
|
|
return static_cast<std::size_t>(regs.index_array.count) *
|
|
|
|
static_cast<std::size_t>(regs.index_array.FormatSizeInBytes());
|
|
|
|
}
|
|
|
|
|
2019-01-21 20:38:23 +01:00
|
|
|
void RasterizerOpenGL::LoadDiskResources(const std::atomic_bool& stop_loading,
|
|
|
|
const VideoCore::DiskResourceLoadCallback& callback) {
|
|
|
|
shader_cache.LoadDiskCache(stop_loading, callback);
|
2019-01-14 02:05:53 +01:00
|
|
|
}
|
|
|
|
|
2019-12-29 01:45:56 +01:00
|
|
|
void RasterizerOpenGL::SetupDirtyFlags() {
|
|
|
|
state_tracker.Initialize();
|
|
|
|
}
|
|
|
|
|
2019-09-17 07:36:47 +02:00
|
|
|
void RasterizerOpenGL::ConfigureFramebuffers() {
|
2018-09-04 11:02:59 +02:00
|
|
|
MICROPROFILE_SCOPE(OpenGL_Framebuffer);
|
2019-03-09 07:25:11 +01:00
|
|
|
auto& gpu = system.GPU().Maxwell3D();
|
2019-12-29 01:45:56 +01:00
|
|
|
if (!gpu.dirty.flags[VideoCommon::Dirty::RenderTargets]) {
|
2019-09-17 07:36:47 +02:00
|
|
|
return;
|
2019-01-07 06:22:00 +01:00
|
|
|
}
|
2019-12-29 01:45:56 +01:00
|
|
|
gpu.dirty.flags[VideoCommon::Dirty::RenderTargets] = false;
|
2018-03-24 08:59:51 +01:00
|
|
|
|
2019-06-15 19:22:57 +02:00
|
|
|
texture_cache.GuardRenderTargets(true);
|
2019-05-14 01:14:02 +02:00
|
|
|
|
2019-09-17 07:36:47 +02:00
|
|
|
View depth_surface = texture_cache.GetDepthBufferSurface(true);
|
2018-03-24 08:59:51 +01:00
|
|
|
|
2019-09-17 07:36:47 +02:00
|
|
|
const auto& regs = gpu.regs;
|
2019-01-22 08:14:29 +01:00
|
|
|
UNIMPLEMENTED_IF(regs.rt_separate_frag_data == 0);
|
2018-03-24 08:59:51 +01:00
|
|
|
|
|
|
|
// Bind the framebuffer surfaces
|
2019-11-29 02:59:09 +01:00
|
|
|
FramebufferCacheKey key;
|
|
|
|
const auto colors_count = static_cast<std::size_t>(regs.rt_control.count);
|
|
|
|
for (std::size_t index = 0; index < colors_count; ++index) {
|
2019-09-17 07:36:47 +02:00
|
|
|
View color_surface{texture_cache.GetColorBufferSurface(index, true)};
|
2019-11-29 02:59:09 +01:00
|
|
|
if (!color_surface) {
|
|
|
|
continue;
|
2018-09-10 06:36:13 +02:00
|
|
|
}
|
2019-11-29 02:59:09 +01:00
|
|
|
// Assume that a surface will be written to if it is used as a framebuffer, even
|
|
|
|
// if the shader doesn't actually write to it.
|
|
|
|
texture_cache.MarkColorBufferInUse(index);
|
2019-09-17 07:36:47 +02:00
|
|
|
|
2019-11-29 02:59:09 +01:00
|
|
|
key.SetAttachment(index, regs.rt_control.GetMap(index));
|
|
|
|
key.colors[index] = std::move(color_surface);
|
2018-09-10 01:01:21 +02:00
|
|
|
}
|
2018-03-27 04:54:16 +02:00
|
|
|
|
2018-09-10 01:01:21 +02:00
|
|
|
if (depth_surface) {
|
2018-10-13 04:31:04 +02:00
|
|
|
// Assume that a surface will be written to if it is used as a framebuffer, even if
|
|
|
|
// the shader doesn't actually write to it.
|
2019-05-07 16:57:16 +02:00
|
|
|
texture_cache.MarkDepthBufferInUse();
|
2019-11-29 02:59:09 +01:00
|
|
|
key.zeta = std::move(depth_surface);
|
2018-09-10 01:01:21 +02:00
|
|
|
}
|
2018-11-19 02:20:26 +01:00
|
|
|
|
2019-06-15 19:22:57 +02:00
|
|
|
texture_cache.GuardRenderTargets(false);
|
2019-05-14 01:14:02 +02:00
|
|
|
|
2019-12-26 09:01:43 +01:00
|
|
|
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, framebuffer_cache.GetFramebuffer(key));
|
2018-07-03 23:55:44 +02:00
|
|
|
}
|
|
|
|
|
2019-12-26 20:15:24 +01:00
|
|
|
void RasterizerOpenGL::ConfigureClearFramebuffer(bool using_color_fb, bool using_depth_fb,
|
|
|
|
bool using_stencil_fb) {
|
2019-07-14 14:14:27 +02:00
|
|
|
auto& gpu = system.GPU().Maxwell3D();
|
|
|
|
const auto& regs = gpu.regs;
|
|
|
|
|
|
|
|
texture_cache.GuardRenderTargets(true);
|
2019-11-29 01:04:57 +01:00
|
|
|
View color_surface;
|
2019-07-14 14:14:27 +02:00
|
|
|
if (using_color_fb) {
|
|
|
|
color_surface = texture_cache.GetColorBufferSurface(regs.clear_buffers.RT, false);
|
|
|
|
}
|
2019-11-29 01:04:57 +01:00
|
|
|
View depth_surface;
|
2019-07-14 14:14:27 +02:00
|
|
|
if (using_depth_fb || using_stencil_fb) {
|
|
|
|
depth_surface = texture_cache.GetDepthBufferSurface(false);
|
|
|
|
}
|
|
|
|
texture_cache.GuardRenderTargets(false);
|
|
|
|
|
2019-11-29 01:04:57 +01:00
|
|
|
FramebufferCacheKey key;
|
|
|
|
key.colors[0] = color_surface;
|
|
|
|
key.zeta = depth_surface;
|
2019-07-14 14:14:27 +02:00
|
|
|
|
2019-12-29 01:45:56 +01:00
|
|
|
state_tracker.NotifyFramebuffer();
|
2019-12-26 09:01:43 +01:00
|
|
|
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, framebuffer_cache.GetFramebuffer(key));
|
2019-07-14 14:14:27 +02:00
|
|
|
}
|
|
|
|
|
2018-07-03 23:55:44 +02:00
|
|
|
void RasterizerOpenGL::Clear() {
|
2020-01-03 02:30:41 +01:00
|
|
|
const auto& gpu = system.GPU().Maxwell3D();
|
|
|
|
if (!gpu.ShouldExecute()) {
|
2019-07-01 04:21:28 +02:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2020-01-03 02:30:41 +01:00
|
|
|
const auto& regs = gpu.regs;
|
2018-09-10 06:36:13 +02:00
|
|
|
bool use_color{};
|
|
|
|
bool use_depth{};
|
|
|
|
bool use_stencil{};
|
2018-07-04 05:32:59 +02:00
|
|
|
|
2018-08-22 06:33:03 +02:00
|
|
|
if (regs.clear_buffers.R || regs.clear_buffers.G || regs.clear_buffers.B ||
|
2018-07-03 23:55:44 +02:00
|
|
|
regs.clear_buffers.A) {
|
2018-09-10 06:36:13 +02:00
|
|
|
use_color = true;
|
2018-07-03 23:55:44 +02:00
|
|
|
}
|
2018-11-08 02:27:47 +01:00
|
|
|
if (use_color) {
|
2019-12-29 02:51:04 +01:00
|
|
|
state_tracker.NotifyColorMask0();
|
|
|
|
glColorMaski(0, regs.clear_buffers.R != 0, regs.clear_buffers.G != 0,
|
|
|
|
regs.clear_buffers.B != 0, regs.clear_buffers.A != 0);
|
2019-12-26 05:19:15 +01:00
|
|
|
|
2019-12-26 05:07:34 +01:00
|
|
|
// TODO(Rodrigo): Determine if clamping is used on clears
|
|
|
|
SyncFragmentColorClampState();
|
2020-01-03 02:30:41 +01:00
|
|
|
SyncFramebufferSRGB();
|
2018-11-08 02:27:47 +01:00
|
|
|
}
|
2018-07-04 05:32:59 +02:00
|
|
|
if (regs.clear_buffers.Z) {
|
2018-08-22 06:33:03 +02:00
|
|
|
ASSERT_MSG(regs.zeta_enable != 0, "Tried to clear Z but buffer is not enabled!");
|
2018-09-10 06:36:13 +02:00
|
|
|
use_depth = true;
|
2018-07-14 07:52:23 +02:00
|
|
|
|
2020-01-03 02:31:04 +01:00
|
|
|
state_tracker.NotifyDepthMask();
|
2019-12-26 01:52:39 +01:00
|
|
|
glDepthMask(GL_TRUE);
|
2018-08-22 06:33:03 +02:00
|
|
|
}
|
|
|
|
if (regs.clear_buffers.S) {
|
2020-01-03 02:30:41 +01:00
|
|
|
ASSERT_MSG(regs.zeta_enable, "Tried to clear stencil but buffer is not enabled!");
|
2018-09-10 06:36:13 +02:00
|
|
|
use_stencil = true;
|
2018-07-04 05:32:59 +02:00
|
|
|
}
|
2018-07-03 23:55:44 +02:00
|
|
|
|
2018-09-10 06:36:13 +02:00
|
|
|
if (!use_color && !use_depth && !use_stencil) {
|
2018-08-22 06:33:03 +02:00
|
|
|
// No color surface nor depth/stencil surface are enabled
|
2018-07-03 23:55:44 +02:00
|
|
|
return;
|
2018-08-22 06:33:03 +02:00
|
|
|
}
|
|
|
|
|
2019-12-26 06:11:01 +01:00
|
|
|
SyncRasterizeEnable();
|
2019-07-14 21:00:37 +02:00
|
|
|
|
2018-11-21 04:40:32 +01:00
|
|
|
if (regs.clear_flags.scissor) {
|
2019-12-26 05:28:17 +01:00
|
|
|
SyncScissorTest();
|
2020-01-03 02:31:33 +01:00
|
|
|
} else {
|
|
|
|
state_tracker.NotifyScissor0();
|
|
|
|
glDisablei(GL_SCISSOR_TEST, 0);
|
2018-11-21 04:40:32 +01:00
|
|
|
}
|
|
|
|
|
2019-12-26 05:28:17 +01:00
|
|
|
UNIMPLEMENTED_IF(regs.clear_flags.viewport);
|
2018-11-21 04:40:32 +01:00
|
|
|
|
2020-01-03 02:30:41 +01:00
|
|
|
ConfigureClearFramebuffer(use_color, use_depth, use_stencil);
|
2018-08-22 06:33:03 +02:00
|
|
|
|
2018-09-10 06:36:13 +02:00
|
|
|
if (use_color) {
|
2019-07-14 14:14:27 +02:00
|
|
|
glClearBufferfv(GL_COLOR, 0, regs.clear_color);
|
2018-09-10 06:36:13 +02:00
|
|
|
}
|
2018-06-07 06:54:25 +02:00
|
|
|
|
2019-07-14 14:14:27 +02:00
|
|
|
if (use_depth && use_stencil) {
|
2018-09-10 06:36:13 +02:00
|
|
|
glClearBufferfi(GL_DEPTH_STENCIL, 0, regs.clear_depth, regs.clear_stencil);
|
2019-07-14 14:14:27 +02:00
|
|
|
} else if (use_depth) {
|
2018-09-10 06:36:13 +02:00
|
|
|
glClearBufferfv(GL_DEPTH, 0, ®s.clear_depth);
|
2019-07-14 14:14:27 +02:00
|
|
|
} else if (use_stencil) {
|
2018-09-10 06:36:13 +02:00
|
|
|
glClearBufferiv(GL_STENCIL, 0, ®s.clear_stencil);
|
|
|
|
}
|
2019-11-26 22:30:21 +01:00
|
|
|
|
|
|
|
++num_queued_commands;
|
2018-06-07 06:54:25 +02:00
|
|
|
}
|
|
|
|
|
2020-01-30 06:08:46 +01:00
|
|
|
void RasterizerOpenGL::Draw(bool is_indexed, bool is_instanced) {
|
|
|
|
MICROPROFILE_SCOPE(OpenGL_Drawing);
|
2019-03-09 07:25:11 +01:00
|
|
|
auto& gpu = system.GPU().Maxwell3D();
|
2019-07-28 00:40:10 +02:00
|
|
|
const auto& regs = gpu.regs;
|
2019-11-26 22:52:15 +01:00
|
|
|
|
|
|
|
query_cache.UpdateCounters();
|
2019-07-28 00:40:10 +02:00
|
|
|
|
2019-12-26 05:50:38 +01:00
|
|
|
SyncViewport();
|
2019-12-26 06:11:01 +01:00
|
|
|
SyncRasterizeEnable();
|
2018-11-05 03:46:06 +01:00
|
|
|
SyncColorMask();
|
2018-11-14 02:09:01 +01:00
|
|
|
SyncFragmentColorClampState();
|
2018-11-14 04:02:54 +01:00
|
|
|
SyncMultiSampleState();
|
2018-07-14 07:52:23 +02:00
|
|
|
SyncDepthTestState();
|
2020-01-03 02:41:20 +01:00
|
|
|
SyncDepthClamp();
|
2018-08-22 06:35:31 +02:00
|
|
|
SyncStencilTestState();
|
2018-06-09 00:05:52 +02:00
|
|
|
SyncBlendState();
|
2018-08-21 01:44:47 +02:00
|
|
|
SyncLogicOpState();
|
2018-07-02 20:33:41 +02:00
|
|
|
SyncCullMode();
|
2018-10-26 01:04:13 +02:00
|
|
|
SyncPrimitiveRestart();
|
2019-12-26 05:28:17 +01:00
|
|
|
SyncScissorTest();
|
2018-09-26 00:41:21 +02:00
|
|
|
SyncTransformFeedback();
|
2018-09-28 06:31:01 +02:00
|
|
|
SyncPointState();
|
2018-11-27 00:31:44 +01:00
|
|
|
SyncPolygonOffset();
|
2019-05-22 01:28:09 +02:00
|
|
|
SyncAlphaTest();
|
2019-12-26 05:01:41 +01:00
|
|
|
SyncFramebufferSRGB();
|
2018-03-27 04:54:16 +02:00
|
|
|
|
2019-11-02 08:08:31 +01:00
|
|
|
buffer_cache.Acquire();
|
|
|
|
|
2018-09-15 15:21:06 +02:00
|
|
|
std::size_t buffer_size = CalculateVertexArraysSize();
|
2018-04-22 02:19:33 +02:00
|
|
|
|
2019-05-22 00:21:57 +02:00
|
|
|
// Add space for index buffer
|
|
|
|
if (is_indexed) {
|
|
|
|
buffer_size = Common::AlignUp(buffer_size, 4) + CalculateIndexBufferSize();
|
2018-03-24 08:59:51 +01:00
|
|
|
}
|
2018-04-08 06:00:11 +02:00
|
|
|
|
|
|
|
// Uniform space for the 5 shader stages
|
2019-04-10 20:56:12 +02:00
|
|
|
buffer_size = Common::AlignUp<std::size_t>(buffer_size, 4) +
|
|
|
|
(sizeof(GLShader::MaxwellUniformData) + device.GetUniformBufferAlignment()) *
|
|
|
|
Maxwell::MaxShaderStage;
|
2018-03-24 08:59:51 +01:00
|
|
|
|
2018-08-10 10:29:37 +02:00
|
|
|
// Add space for at least 18 constant buffers
|
2019-07-06 04:11:58 +02:00
|
|
|
buffer_size += Maxwell::MaxConstBuffers *
|
|
|
|
(Maxwell::MaxConstBufferSize + device.GetUniformBufferAlignment());
|
2018-08-10 10:29:37 +02:00
|
|
|
|
2019-06-20 08:22:25 +02:00
|
|
|
// Prepare the vertex array.
|
|
|
|
buffer_cache.Map(buffer_size);
|
2018-03-24 08:59:51 +01:00
|
|
|
|
2019-05-28 00:41:19 +02:00
|
|
|
// Prepare vertex array format.
|
2019-12-26 04:16:52 +01:00
|
|
|
SetupVertexFormat();
|
|
|
|
vertex_array_pushbuffer.Setup();
|
2019-05-28 00:37:46 +02:00
|
|
|
|
2019-05-28 00:41:19 +02:00
|
|
|
// Upload vertex and index data.
|
2019-12-26 04:16:52 +01:00
|
|
|
SetupVertexBuffer();
|
|
|
|
SetupVertexInstances();
|
2020-01-30 06:08:46 +01:00
|
|
|
GLintptr index_buffer_offset;
|
|
|
|
if (is_indexed) {
|
|
|
|
index_buffer_offset = SetupIndexBuffer();
|
|
|
|
}
|
2019-05-28 00:41:19 +02:00
|
|
|
|
2019-06-20 08:22:25 +02:00
|
|
|
// Prepare packed bindings.
|
2019-11-19 01:38:15 +01:00
|
|
|
bind_ubo_pushbuffer.Setup();
|
|
|
|
bind_ssbo_pushbuffer.Setup();
|
|
|
|
|
|
|
|
// Setup emulation uniform buffer.
|
|
|
|
GLShader::MaxwellUniformData ubo;
|
|
|
|
ubo.SetFromRegs(gpu);
|
|
|
|
const auto [buffer, offset] =
|
|
|
|
buffer_cache.UploadHostMemory(&ubo, sizeof(ubo), device.GetUniformBufferAlignment());
|
|
|
|
bind_ubo_pushbuffer.Push(EmulationUniformBlockBinding, buffer, offset,
|
|
|
|
static_cast<GLsizeiptr>(sizeof(ubo)));
|
2019-06-20 08:22:25 +02:00
|
|
|
|
2019-05-28 00:41:19 +02:00
|
|
|
// Setup shaders and their used resources.
|
2019-06-15 19:22:57 +02:00
|
|
|
texture_cache.GuardSamplers(true);
|
2020-02-14 01:55:21 +01:00
|
|
|
const GLenum primitive_mode = MaxwellToGL::PrimitiveTopology(gpu.regs.draw.topology);
|
2019-09-15 17:48:54 +02:00
|
|
|
SetupShaders(primitive_mode);
|
2019-06-15 19:22:57 +02:00
|
|
|
texture_cache.GuardSamplers(false);
|
2018-03-24 08:59:51 +01:00
|
|
|
|
2019-09-17 07:36:47 +02:00
|
|
|
ConfigureFramebuffers();
|
2019-05-08 23:45:59 +02:00
|
|
|
|
2019-06-20 08:22:25 +02:00
|
|
|
// Signal the buffer cache that we are not going to upload more things.
|
|
|
|
const bool invalidate = buffer_cache.Unmap();
|
|
|
|
|
|
|
|
// Now that we are no longer uploading data, we can safely bind the buffers to OpenGL.
|
2019-06-20 08:44:06 +02:00
|
|
|
vertex_array_pushbuffer.Bind();
|
2019-06-20 08:22:25 +02:00
|
|
|
bind_ubo_pushbuffer.Bind();
|
|
|
|
bind_ssbo_pushbuffer.Bind();
|
|
|
|
|
2019-12-26 20:04:41 +01:00
|
|
|
program_manager.Update();
|
2018-03-24 08:59:51 +01:00
|
|
|
|
2019-06-15 19:22:57 +02:00
|
|
|
if (texture_cache.TextureBarrier()) {
|
|
|
|
glTextureBarrier();
|
|
|
|
}
|
2019-09-20 21:44:28 +02:00
|
|
|
|
2019-11-26 22:30:21 +01:00
|
|
|
++num_queued_commands;
|
|
|
|
|
2020-01-30 06:08:46 +01:00
|
|
|
const GLuint base_instance = static_cast<GLuint>(gpu.regs.vb_base_instance);
|
|
|
|
const GLsizei num_instances =
|
|
|
|
static_cast<GLsizei>(is_instanced ? gpu.mme_draw.instance_count : 1);
|
|
|
|
if (is_indexed) {
|
|
|
|
const GLint base_vertex = static_cast<GLint>(gpu.regs.vb_element_base);
|
|
|
|
const GLsizei num_vertices = static_cast<GLsizei>(gpu.regs.index_array.count);
|
2020-02-14 01:55:21 +01:00
|
|
|
const GLvoid* offset = reinterpret_cast<const GLvoid*>(index_buffer_offset);
|
|
|
|
const GLenum format = MaxwellToGL::IndexFormat(gpu.regs.index_array.format);
|
|
|
|
if (num_instances == 1 && base_instance == 0 && base_vertex == 0) {
|
|
|
|
glDrawElements(primitive_mode, num_vertices, format, offset);
|
|
|
|
} else if (num_instances == 1 && base_instance == 0) {
|
|
|
|
glDrawElementsBaseVertex(primitive_mode, num_vertices, format, offset, base_vertex);
|
|
|
|
} else if (base_vertex == 0 && base_instance == 0) {
|
|
|
|
glDrawElementsInstanced(primitive_mode, num_vertices, format, offset, num_instances);
|
|
|
|
} else if (base_vertex == 0) {
|
|
|
|
glDrawElementsInstancedBaseInstance(primitive_mode, num_vertices, format, offset,
|
|
|
|
num_instances, base_instance);
|
|
|
|
} else if (base_instance == 0) {
|
|
|
|
glDrawElementsInstancedBaseVertex(primitive_mode, num_vertices, format, offset,
|
|
|
|
num_instances, base_vertex);
|
|
|
|
} else {
|
|
|
|
glDrawElementsInstancedBaseVertexBaseInstance(primitive_mode, num_vertices, format,
|
|
|
|
offset, num_instances, base_vertex,
|
|
|
|
base_instance);
|
|
|
|
}
|
2019-09-15 17:48:54 +02:00
|
|
|
} else {
|
2020-01-30 06:08:46 +01:00
|
|
|
const GLint base_vertex = static_cast<GLint>(gpu.regs.vertex_buffer.first);
|
|
|
|
const GLsizei num_vertices = static_cast<GLsizei>(gpu.regs.vertex_buffer.count);
|
2020-02-14 01:55:21 +01:00
|
|
|
if (num_instances == 1 && base_instance == 0) {
|
|
|
|
glDrawArrays(primitive_mode, base_vertex, num_vertices);
|
|
|
|
} else if (base_instance == 0) {
|
|
|
|
glDrawArraysInstanced(primitive_mode, base_vertex, num_vertices, num_instances);
|
|
|
|
} else {
|
|
|
|
glDrawArraysInstancedBaseInstance(primitive_mode, base_vertex, num_vertices,
|
|
|
|
num_instances, base_instance);
|
|
|
|
}
|
2019-09-15 17:48:54 +02:00
|
|
|
}
|
2020-01-30 06:08:46 +01:00
|
|
|
}
|
2019-06-15 19:22:57 +02:00
|
|
|
|
2019-07-15 03:25:13 +02:00
|
|
|
void RasterizerOpenGL::DispatchCompute(GPUVAddr code_addr) {
|
2019-11-23 01:12:31 +01:00
|
|
|
if (device.HasBrokenCompute()) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2019-11-21 18:21:27 +01:00
|
|
|
buffer_cache.Acquire();
|
|
|
|
|
2019-07-15 03:25:13 +02:00
|
|
|
auto kernel = shader_cache.GetComputeKernel(code_addr);
|
2019-11-06 08:32:43 +01:00
|
|
|
SetupComputeTextures(kernel);
|
2019-07-12 02:54:07 +02:00
|
|
|
SetupComputeImages(kernel);
|
2020-02-26 20:13:47 +01:00
|
|
|
glUseProgramStages(program_manager.GetHandle(), GL_COMPUTE_SHADER_BIT, kernel->GetHandle());
|
2019-07-15 03:25:13 +02:00
|
|
|
|
|
|
|
const std::size_t buffer_size =
|
|
|
|
Tegra::Engines::KeplerCompute::NumConstBuffers *
|
|
|
|
(Maxwell::MaxConstBufferSize + device.GetUniformBufferAlignment());
|
|
|
|
buffer_cache.Map(buffer_size);
|
|
|
|
|
2019-11-19 01:38:15 +01:00
|
|
|
bind_ubo_pushbuffer.Setup();
|
|
|
|
bind_ssbo_pushbuffer.Setup();
|
2019-07-15 03:25:13 +02:00
|
|
|
|
|
|
|
SetupComputeConstBuffers(kernel);
|
|
|
|
SetupComputeGlobalMemory(kernel);
|
|
|
|
|
|
|
|
buffer_cache.Unmap();
|
|
|
|
|
|
|
|
bind_ubo_pushbuffer.Bind();
|
|
|
|
bind_ssbo_pushbuffer.Bind();
|
|
|
|
|
2020-02-26 20:13:47 +01:00
|
|
|
const auto& launch_desc = system.GPU().KeplerCompute().launch_description;
|
2019-11-13 03:26:56 +01:00
|
|
|
glDispatchCompute(launch_desc.grid_dim_x, launch_desc.grid_dim_y, launch_desc.grid_dim_z);
|
2019-11-26 22:30:21 +01:00
|
|
|
++num_queued_commands;
|
2019-07-15 03:25:13 +02:00
|
|
|
}
|
|
|
|
|
2019-07-28 00:40:10 +02:00
|
|
|
void RasterizerOpenGL::ResetCounter(VideoCore::QueryType type) {
|
2019-11-26 22:52:15 +01:00
|
|
|
query_cache.ResetCounter(type);
|
2019-07-28 00:40:10 +02:00
|
|
|
}
|
|
|
|
|
2019-11-28 06:15:34 +01:00
|
|
|
void RasterizerOpenGL::Query(GPUVAddr gpu_addr, VideoCore::QueryType type,
|
|
|
|
std::optional<u64> timestamp) {
|
|
|
|
query_cache.Query(gpu_addr, type, timestamp);
|
2019-07-28 00:40:10 +02:00
|
|
|
}
|
|
|
|
|
2018-09-08 10:05:56 +02:00
|
|
|
void RasterizerOpenGL::FlushAll() {}
|
2018-03-20 04:00:59 +01:00
|
|
|
|
2019-02-19 02:58:32 +01:00
|
|
|
void RasterizerOpenGL::FlushRegion(CacheAddr addr, u64 size) {
|
2018-10-13 04:31:04 +02:00
|
|
|
MICROPROFILE_SCOPE(OpenGL_CacheManagement);
|
2019-02-28 03:21:31 +01:00
|
|
|
if (!addr || !size) {
|
|
|
|
return;
|
|
|
|
}
|
2019-05-08 16:32:30 +02:00
|
|
|
texture_cache.FlushRegion(addr, size);
|
2019-05-29 23:15:28 +02:00
|
|
|
buffer_cache.FlushRegion(addr, size);
|
2019-11-26 22:52:15 +01:00
|
|
|
query_cache.FlushRegion(addr, size);
|
2018-10-13 04:31:04 +02:00
|
|
|
}
|
2018-03-20 04:00:59 +01:00
|
|
|
|
2019-02-19 02:58:32 +01:00
|
|
|
void RasterizerOpenGL::InvalidateRegion(CacheAddr addr, u64 size) {
|
2018-06-26 22:14:14 +02:00
|
|
|
MICROPROFILE_SCOPE(OpenGL_CacheManagement);
|
2019-02-28 03:21:31 +01:00
|
|
|
if (!addr || !size) {
|
|
|
|
return;
|
|
|
|
}
|
2019-04-11 22:14:55 +02:00
|
|
|
texture_cache.InvalidateRegion(addr, size);
|
2018-08-23 23:30:27 +02:00
|
|
|
shader_cache.InvalidateRegion(addr, size);
|
2018-08-29 00:27:03 +02:00
|
|
|
buffer_cache.InvalidateRegion(addr, size);
|
2019-11-26 22:52:15 +01:00
|
|
|
query_cache.InvalidateRegion(addr, size);
|
2018-06-26 22:14:14 +02:00
|
|
|
}
|
2018-03-20 04:00:59 +01:00
|
|
|
|
2019-02-19 02:58:32 +01:00
|
|
|
void RasterizerOpenGL::FlushAndInvalidateRegion(CacheAddr addr, u64 size) {
|
2019-05-08 16:32:30 +02:00
|
|
|
if (Settings::values.use_accurate_gpu_emulation) {
|
|
|
|
FlushRegion(addr, size);
|
|
|
|
}
|
2018-08-23 21:44:41 +02:00
|
|
|
InvalidateRegion(addr, size);
|
2018-06-26 22:14:14 +02:00
|
|
|
}
|
2018-03-20 04:00:59 +01:00
|
|
|
|
2019-07-26 20:20:43 +02:00
|
|
|
void RasterizerOpenGL::FlushCommands() {
|
2019-11-26 22:30:21 +01:00
|
|
|
// Only flush when we have commands queued to OpenGL.
|
|
|
|
if (num_queued_commands == 0) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
num_queued_commands = 0;
|
2019-07-26 20:20:43 +02:00
|
|
|
glFlush();
|
|
|
|
}
|
|
|
|
|
2019-06-20 08:22:25 +02:00
|
|
|
void RasterizerOpenGL::TickFrame() {
|
2019-11-26 22:30:21 +01:00
|
|
|
// Ticking a frame means that buffers will be swapped, calling glFlush implicitly.
|
|
|
|
num_queued_commands = 0;
|
|
|
|
|
2019-06-20 08:22:25 +02:00
|
|
|
buffer_cache.TickFrame();
|
|
|
|
}
|
|
|
|
|
2018-10-06 05:39:03 +02:00
|
|
|
bool RasterizerOpenGL::AccelerateSurfaceCopy(const Tegra::Engines::Fermi2D::Regs::Surface& src,
|
2018-12-15 06:20:00 +01:00
|
|
|
const Tegra::Engines::Fermi2D::Regs::Surface& dst,
|
2019-05-18 10:57:49 +02:00
|
|
|
const Tegra::Engines::Fermi2D::Config& copy_config) {
|
2018-03-20 04:00:59 +01:00
|
|
|
MICROPROFILE_SCOPE(OpenGL_Blits);
|
2019-05-18 10:57:49 +02:00
|
|
|
texture_cache.DoFermiCopy(src, dst, copy_config);
|
2018-03-20 04:00:59 +01:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2018-06-24 23:42:29 +02:00
|
|
|
bool RasterizerOpenGL::AccelerateDisplay(const Tegra::FramebufferConfig& config,
|
2018-08-21 01:34:02 +02:00
|
|
|
VAddr framebuffer_addr, u32 pixel_stride) {
|
2018-06-24 23:42:29 +02:00
|
|
|
if (!framebuffer_addr) {
|
|
|
|
return {};
|
|
|
|
}
|
|
|
|
|
|
|
|
MICROPROFILE_SCOPE(OpenGL_CacheManagement);
|
|
|
|
|
2019-04-11 22:14:55 +02:00
|
|
|
const auto surface{
|
2019-11-26 21:19:15 +01:00
|
|
|
texture_cache.TryFindFramebufferSurface(system.Memory().GetPointer(framebuffer_addr))};
|
2018-06-24 23:42:29 +02:00
|
|
|
if (!surface) {
|
|
|
|
return {};
|
|
|
|
}
|
|
|
|
|
|
|
|
// Verify that the cached surface is the same size and format as the requested framebuffer
|
|
|
|
const auto& params{surface->GetSurfaceParams()};
|
2018-10-29 02:14:25 +01:00
|
|
|
const auto& pixel_format{
|
|
|
|
VideoCore::Surface::PixelFormatFromGPUPixelFormat(config.pixel_format)};
|
2019-05-07 16:57:16 +02:00
|
|
|
ASSERT_MSG(params.width == config.width, "Framebuffer width is different");
|
|
|
|
ASSERT_MSG(params.height == config.height, "Framebuffer height is different");
|
2019-03-09 07:33:56 +01:00
|
|
|
|
2019-05-07 16:57:16 +02:00
|
|
|
if (params.pixel_format != pixel_format) {
|
2019-11-19 01:38:15 +01:00
|
|
|
LOG_DEBUG(Render_OpenGL, "Framebuffer pixel_format is different");
|
2019-03-09 07:33:56 +01:00
|
|
|
}
|
2018-06-24 23:42:29 +02:00
|
|
|
|
2019-04-11 22:14:55 +02:00
|
|
|
screen_info.display_texture = surface->GetTexture();
|
2019-09-03 06:05:23 +02:00
|
|
|
screen_info.display_srgb = surface->GetSurfaceParams().srgb_conversion;
|
2018-06-24 23:42:29 +02:00
|
|
|
|
|
|
|
return true;
|
2018-03-20 04:00:59 +01:00
|
|
|
}
|
|
|
|
|
2019-11-18 22:35:21 +01:00
|
|
|
void RasterizerOpenGL::SetupDrawConstBuffers(std::size_t stage_index, const Shader& shader) {
|
2018-09-04 11:02:59 +02:00
|
|
|
MICROPROFILE_SCOPE(OpenGL_UBO);
|
2019-07-15 03:25:13 +02:00
|
|
|
const auto& stages = system.GPU().Maxwell3D().state.shader_stages;
|
2019-11-18 22:35:21 +01:00
|
|
|
const auto& shader_stage = stages[stage_index];
|
2019-11-19 01:38:15 +01:00
|
|
|
|
2019-11-21 18:21:42 +01:00
|
|
|
u32 binding = device.GetBaseBindings(stage_index).uniform_buffer;
|
2020-02-26 20:13:47 +01:00
|
|
|
for (const auto& entry : shader->GetEntries().const_buffers) {
|
2019-07-15 03:25:13 +02:00
|
|
|
const auto& buffer = shader_stage.const_buffers[entry.GetIndex()];
|
2019-11-21 18:21:42 +01:00
|
|
|
SetupConstBuffer(binding++, buffer, entry);
|
2019-07-15 03:25:13 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void RasterizerOpenGL::SetupComputeConstBuffers(const Shader& kernel) {
|
|
|
|
MICROPROFILE_SCOPE(OpenGL_UBO);
|
|
|
|
const auto& launch_desc = system.GPU().KeplerCompute().launch_description;
|
2019-11-19 01:38:15 +01:00
|
|
|
|
|
|
|
u32 binding = 0;
|
2020-02-26 20:13:47 +01:00
|
|
|
for (const auto& entry : kernel->GetEntries().const_buffers) {
|
2019-07-15 03:25:13 +02:00
|
|
|
const auto& config = launch_desc.const_buffer_config[entry.GetIndex()];
|
2019-07-12 02:54:07 +02:00
|
|
|
const std::bitset<8> mask = launch_desc.const_buffer_enable_mask.Value();
|
2019-07-15 03:25:13 +02:00
|
|
|
Tegra::Engines::ConstBufferInfo buffer;
|
|
|
|
buffer.address = config.Address();
|
|
|
|
buffer.size = config.size;
|
|
|
|
buffer.enabled = mask[entry.GetIndex()];
|
2019-11-19 01:38:15 +01:00
|
|
|
SetupConstBuffer(binding++, buffer, entry);
|
2019-05-31 22:33:21 +02:00
|
|
|
}
|
|
|
|
}
|
2018-08-08 08:07:44 +02:00
|
|
|
|
2019-11-19 01:38:15 +01:00
|
|
|
void RasterizerOpenGL::SetupConstBuffer(u32 binding, const Tegra::Engines::ConstBufferInfo& buffer,
|
2020-02-26 20:13:47 +01:00
|
|
|
const ConstBufferEntry& entry) {
|
2019-05-31 22:33:21 +02:00
|
|
|
if (!buffer.enabled) {
|
|
|
|
// Set values to zero to unbind buffers
|
2019-11-19 01:38:15 +01:00
|
|
|
bind_ubo_pushbuffer.Push(binding, buffer_cache.GetEmptyBuffer(sizeof(float)), 0,
|
|
|
|
sizeof(float));
|
2019-05-31 22:33:21 +02:00
|
|
|
return;
|
|
|
|
}
|
2018-06-10 01:02:05 +02:00
|
|
|
|
2019-05-31 22:33:21 +02:00
|
|
|
// Align the actual size so it ends up being a multiple of vec4 to meet the OpenGL std140
|
|
|
|
// UBO alignment requirements.
|
2019-07-06 04:11:58 +02:00
|
|
|
const std::size_t size = Common::AlignUp(GetConstBufferSize(buffer, entry), sizeof(GLvec4));
|
2018-04-15 18:18:09 +02:00
|
|
|
|
2019-05-28 01:50:11 +02:00
|
|
|
const auto alignment = device.GetUniformBufferAlignment();
|
2019-11-02 08:08:31 +01:00
|
|
|
const auto [cbuf, offset] = buffer_cache.UploadMemory(buffer.address, size, alignment, false,
|
|
|
|
device.HasFastBufferSubData());
|
2019-11-19 01:38:15 +01:00
|
|
|
bind_ubo_pushbuffer.Push(binding, cbuf, offset, size);
|
2018-04-14 18:50:15 +02:00
|
|
|
}
|
|
|
|
|
2019-11-18 22:35:21 +01:00
|
|
|
void RasterizerOpenGL::SetupDrawGlobalMemory(std::size_t stage_index, const Shader& shader) {
|
2019-05-29 23:15:28 +02:00
|
|
|
auto& gpu{system.GPU()};
|
|
|
|
auto& memory_manager{gpu.MemoryManager()};
|
2019-11-18 22:35:21 +01:00
|
|
|
const auto cbufs{gpu.Maxwell3D().state.shader_stages[stage_index]};
|
2019-11-19 01:38:15 +01:00
|
|
|
|
|
|
|
u32 binding = device.GetBaseBindings(stage_index).shader_storage_buffer;
|
2020-02-26 20:13:47 +01:00
|
|
|
for (const auto& entry : shader->GetEntries().global_memory_entries) {
|
2019-05-29 23:15:28 +02:00
|
|
|
const auto addr{cbufs.const_buffers[entry.GetCbufIndex()].address + entry.GetCbufOffset()};
|
2019-07-15 03:25:13 +02:00
|
|
|
const auto gpu_addr{memory_manager.Read<u64>(addr)};
|
2019-05-29 23:15:28 +02:00
|
|
|
const auto size{memory_manager.Read<u32>(addr + 8)};
|
2019-11-19 01:38:15 +01:00
|
|
|
SetupGlobalMemory(binding++, entry, gpu_addr, size);
|
2019-07-15 03:25:13 +02:00
|
|
|
}
|
|
|
|
}
|
2019-05-29 23:15:28 +02:00
|
|
|
|
2019-07-15 03:25:13 +02:00
|
|
|
void RasterizerOpenGL::SetupComputeGlobalMemory(const Shader& kernel) {
|
|
|
|
auto& gpu{system.GPU()};
|
|
|
|
auto& memory_manager{gpu.MemoryManager()};
|
|
|
|
const auto cbufs{gpu.KeplerCompute().launch_description.const_buffer_config};
|
2019-11-19 01:38:15 +01:00
|
|
|
|
|
|
|
u32 binding = 0;
|
2020-02-26 20:13:47 +01:00
|
|
|
for (const auto& entry : kernel->GetEntries().global_memory_entries) {
|
2019-07-15 03:25:13 +02:00
|
|
|
const auto addr{cbufs[entry.GetCbufIndex()].Address() + entry.GetCbufOffset()};
|
|
|
|
const auto gpu_addr{memory_manager.Read<u64>(addr)};
|
|
|
|
const auto size{memory_manager.Read<u32>(addr + 8)};
|
2019-11-19 01:38:15 +01:00
|
|
|
SetupGlobalMemory(binding++, entry, gpu_addr, size);
|
2019-01-05 05:01:38 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-02-26 20:13:47 +01:00
|
|
|
void RasterizerOpenGL::SetupGlobalMemory(u32 binding, const GlobalMemoryEntry& entry,
|
2019-07-15 03:25:13 +02:00
|
|
|
GPUVAddr gpu_addr, std::size_t size) {
|
|
|
|
const auto alignment{device.GetShaderStorageBufferAlignment()};
|
|
|
|
const auto [ssbo, buffer_offset] =
|
2019-07-19 16:50:40 +02:00
|
|
|
buffer_cache.UploadMemory(gpu_addr, size, alignment, entry.IsWritten());
|
2019-11-19 01:38:15 +01:00
|
|
|
bind_ssbo_pushbuffer.Push(binding, ssbo, buffer_offset, static_cast<GLsizeiptr>(size));
|
2019-07-15 03:25:13 +02:00
|
|
|
}
|
|
|
|
|
2019-11-19 01:38:15 +01:00
|
|
|
void RasterizerOpenGL::SetupDrawTextures(std::size_t stage_index, const Shader& shader) {
|
2018-09-04 11:02:59 +02:00
|
|
|
MICROPROFILE_SCOPE(OpenGL_Texture);
|
2019-11-22 08:34:14 +01:00
|
|
|
const auto& maxwell3d = system.GPU().Maxwell3D();
|
2019-11-19 01:38:15 +01:00
|
|
|
u32 binding = device.GetBaseBindings(stage_index).sampler;
|
2020-02-26 20:13:47 +01:00
|
|
|
for (const auto& entry : shader->GetEntries().samplers) {
|
2020-02-22 23:40:26 +01:00
|
|
|
const auto shader_type = static_cast<ShaderType>(stage_index);
|
|
|
|
for (std::size_t i = 0; i < entry.Size(); ++i) {
|
|
|
|
const auto texture = GetTextureInfo(maxwell3d, entry, shader_type, i);
|
2020-01-06 16:43:13 +01:00
|
|
|
SetupTexture(binding++, texture, entry);
|
|
|
|
}
|
2018-06-06 19:58:16 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-11-06 08:32:43 +01:00
|
|
|
void RasterizerOpenGL::SetupComputeTextures(const Shader& kernel) {
|
2019-07-18 02:50:21 +02:00
|
|
|
MICROPROFILE_SCOPE(OpenGL_Texture);
|
|
|
|
const auto& compute = system.GPU().KeplerCompute();
|
2019-11-19 01:38:15 +01:00
|
|
|
u32 binding = 0;
|
2020-02-26 20:13:47 +01:00
|
|
|
for (const auto& entry : kernel->GetEntries().samplers) {
|
2020-02-22 23:40:26 +01:00
|
|
|
for (std::size_t i = 0; i < entry.Size(); ++i) {
|
|
|
|
const auto texture = GetTextureInfo(compute, entry, ShaderType::Compute, i);
|
2020-01-06 16:43:13 +01:00
|
|
|
SetupTexture(binding++, texture, entry);
|
|
|
|
}
|
2019-07-18 02:50:21 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-11-06 08:32:43 +01:00
|
|
|
void RasterizerOpenGL::SetupTexture(u32 binding, const Tegra::Texture::FullTextureInfo& texture,
|
2020-02-26 20:13:47 +01:00
|
|
|
const SamplerEntry& entry) {
|
2019-07-12 07:01:27 +02:00
|
|
|
const auto view = texture_cache.GetTextureSurface(texture.tic, entry);
|
2019-07-12 01:09:53 +02:00
|
|
|
if (!view) {
|
|
|
|
// Can occur when texture addr is null or its memory is unmapped/invalid
|
2019-12-26 08:01:11 +01:00
|
|
|
glBindSampler(binding, 0);
|
|
|
|
glBindTextureUnit(binding, 0);
|
2019-11-06 08:32:43 +01:00
|
|
|
return;
|
2019-07-12 01:09:53 +02:00
|
|
|
}
|
2019-12-26 08:01:11 +01:00
|
|
|
glBindTextureUnit(binding, view->GetTexture());
|
2019-07-12 01:09:53 +02:00
|
|
|
|
|
|
|
if (view->GetSurfaceParams().IsBuffer()) {
|
2019-11-06 08:32:43 +01:00
|
|
|
return;
|
2019-07-12 01:09:53 +02:00
|
|
|
}
|
|
|
|
// Apply swizzle to textures that are not buffers.
|
|
|
|
view->ApplySwizzle(texture.tic.x_source, texture.tic.y_source, texture.tic.z_source,
|
|
|
|
texture.tic.w_source);
|
2019-12-26 08:01:11 +01:00
|
|
|
|
|
|
|
glBindSampler(binding, sampler_cache.GetSampler(texture.tsc));
|
2019-07-12 01:09:53 +02:00
|
|
|
}
|
|
|
|
|
2019-11-19 01:38:15 +01:00
|
|
|
void RasterizerOpenGL::SetupDrawImages(std::size_t stage_index, const Shader& shader) {
|
2019-11-13 04:27:12 +01:00
|
|
|
const auto& maxwell3d = system.GPU().Maxwell3D();
|
2019-11-22 08:34:14 +01:00
|
|
|
u32 binding = device.GetBaseBindings(stage_index).image;
|
2020-02-26 20:13:47 +01:00
|
|
|
for (const auto& entry : shader->GetEntries().images) {
|
2019-11-18 22:35:21 +01:00
|
|
|
const auto shader_type = static_cast<Tegra::Engines::ShaderType>(stage_index);
|
2019-11-13 04:27:12 +01:00
|
|
|
const auto tic = GetTextureInfo(maxwell3d, entry, shader_type).tic;
|
2019-11-22 08:34:14 +01:00
|
|
|
SetupImage(binding++, tic, entry);
|
2019-11-13 04:27:12 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-07-12 02:54:07 +02:00
|
|
|
void RasterizerOpenGL::SetupComputeImages(const Shader& shader) {
|
|
|
|
const auto& compute = system.GPU().KeplerCompute();
|
2019-11-22 08:34:14 +01:00
|
|
|
u32 binding = 0;
|
2020-02-26 20:13:47 +01:00
|
|
|
for (const auto& entry : shader->GetEntries().images) {
|
2019-11-13 04:27:12 +01:00
|
|
|
const auto tic = GetTextureInfo(compute, entry, Tegra::Engines::ShaderType::Compute).tic;
|
2019-11-22 08:34:14 +01:00
|
|
|
SetupImage(binding++, tic, entry);
|
2019-07-12 07:17:18 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void RasterizerOpenGL::SetupImage(u32 binding, const Tegra::Texture::TICEntry& tic,
|
2020-02-26 20:13:47 +01:00
|
|
|
const ImageEntry& entry) {
|
2019-07-12 07:17:18 +02:00
|
|
|
const auto view = texture_cache.GetImageSurface(tic, entry);
|
|
|
|
if (!view) {
|
2019-12-26 08:17:02 +01:00
|
|
|
glBindImageTexture(binding, 0, 0, GL_FALSE, 0, GL_READ_ONLY, GL_R8);
|
2019-07-12 07:17:18 +02:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
if (!tic.IsBuffer()) {
|
|
|
|
view->ApplySwizzle(tic.x_source, tic.y_source, tic.z_source, tic.w_source);
|
2019-07-12 02:54:07 +02:00
|
|
|
}
|
2019-09-06 04:26:05 +02:00
|
|
|
if (entry.IsWritten()) {
|
|
|
|
view->MarkAsModified(texture_cache.Tick());
|
|
|
|
}
|
2019-12-26 08:17:02 +01:00
|
|
|
glBindImageTexture(binding, view->GetTexture(), 0, GL_TRUE, 0, GL_READ_WRITE,
|
|
|
|
view->GetFormat());
|
2019-07-12 02:54:07 +02:00
|
|
|
}
|
|
|
|
|
2019-12-26 05:50:38 +01:00
|
|
|
void RasterizerOpenGL::SyncViewport() {
|
2019-12-29 02:12:12 +01:00
|
|
|
auto& gpu = system.GPU().Maxwell3D();
|
|
|
|
auto& flags = gpu.dirty.flags;
|
|
|
|
const auto& regs = gpu.regs;
|
|
|
|
|
2019-12-30 05:40:27 +01:00
|
|
|
const bool dirty_viewport = flags[Dirty::Viewports];
|
|
|
|
if (dirty_viewport || flags[Dirty::ClipControl]) {
|
|
|
|
flags[Dirty::ClipControl] = false;
|
|
|
|
|
|
|
|
bool flip_y = false;
|
|
|
|
if (regs.viewport_transform[0].scale_y < 0.0) {
|
|
|
|
flip_y = !flip_y;
|
|
|
|
}
|
|
|
|
if (regs.screen_y_control.y_negate != 0) {
|
|
|
|
flip_y = !flip_y;
|
|
|
|
}
|
|
|
|
glClipControl(flip_y ? GL_UPPER_LEFT : GL_LOWER_LEFT,
|
|
|
|
regs.depth_mode == Maxwell::DepthMode::ZeroToOne ? GL_ZERO_TO_ONE
|
|
|
|
: GL_NEGATIVE_ONE_TO_ONE);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (dirty_viewport) {
|
2019-12-29 02:12:12 +01:00
|
|
|
flags[Dirty::Viewports] = false;
|
|
|
|
|
|
|
|
const bool force = flags[Dirty::ViewportTransform];
|
|
|
|
flags[Dirty::ViewportTransform] = false;
|
|
|
|
|
|
|
|
for (std::size_t i = 0; i < Maxwell::NumViewports; ++i) {
|
|
|
|
if (!force && !flags[Dirty::Viewport0 + i]) {
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
flags[Dirty::Viewport0 + i] = false;
|
|
|
|
|
|
|
|
const Common::Rectangle<f32> rect{regs.viewport_transform[i].GetRect()};
|
|
|
|
glViewportIndexedf(static_cast<GLuint>(i), rect.left, rect.bottom, rect.GetWidth(),
|
|
|
|
rect.GetHeight());
|
|
|
|
|
|
|
|
const auto& src = regs.viewports[i];
|
|
|
|
glDepthRangeIndexed(static_cast<GLuint>(i), static_cast<GLdouble>(src.depth_range_near),
|
|
|
|
static_cast<GLdouble>(src.depth_range_far));
|
|
|
|
}
|
2018-11-02 04:21:25 +01:00
|
|
|
}
|
2018-03-27 02:45:10 +02:00
|
|
|
}
|
|
|
|
|
2019-12-26 01:57:10 +01:00
|
|
|
void RasterizerOpenGL::SyncDepthClamp() {
|
2020-01-03 02:41:20 +01:00
|
|
|
auto& gpu = system.GPU().Maxwell3D();
|
|
|
|
auto& flags = gpu.dirty.flags;
|
|
|
|
if (!flags[Dirty::DepthClampEnabled]) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
flags[Dirty::DepthClampEnabled] = false;
|
2018-11-29 20:13:13 +01:00
|
|
|
|
2020-01-03 02:41:20 +01:00
|
|
|
const auto& state = gpu.regs.view_volume_clip_control;
|
2019-12-26 01:57:10 +01:00
|
|
|
UNIMPLEMENTED_IF_MSG(state.depth_clamp_far != state.depth_clamp_near,
|
2020-01-03 02:41:20 +01:00
|
|
|
"Unimplemented depth clamp separation!");
|
2019-12-26 01:57:10 +01:00
|
|
|
|
|
|
|
oglEnable(GL_DEPTH_CLAMP, state.depth_clamp_far || state.depth_clamp_near);
|
|
|
|
}
|
|
|
|
|
2019-12-29 06:03:05 +01:00
|
|
|
void RasterizerOpenGL::SyncClipEnabled(u32 clip_mask) {
|
|
|
|
auto& gpu = system.GPU().Maxwell3D();
|
|
|
|
auto& flags = gpu.dirty.flags;
|
|
|
|
if (!flags[Dirty::ClipDistances] && !flags[Dirty::Shaders]) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
flags[Dirty::ClipDistances] = false;
|
|
|
|
|
|
|
|
clip_mask &= gpu.regs.clip_distance_enabled;
|
|
|
|
if (clip_mask == last_clip_distance_mask) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
last_clip_distance_mask = clip_mask;
|
2018-11-29 20:13:13 +01:00
|
|
|
|
|
|
|
for (std::size_t i = 0; i < Maxwell::Regs::NumClipDistances; ++i) {
|
2019-12-29 06:03:05 +01:00
|
|
|
oglEnable(static_cast<GLenum>(GL_CLIP_DISTANCE0 + i), (clip_mask >> i) & 1);
|
2018-11-29 20:13:13 +01:00
|
|
|
}
|
2018-03-20 04:00:59 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
void RasterizerOpenGL::SyncClipCoef() {
|
2018-11-29 20:13:13 +01:00
|
|
|
UNIMPLEMENTED();
|
2018-03-20 04:00:59 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
void RasterizerOpenGL::SyncCullMode() {
|
2019-12-29 23:23:40 +01:00
|
|
|
auto& gpu = system.GPU().Maxwell3D();
|
|
|
|
auto& flags = gpu.dirty.flags;
|
|
|
|
const auto& regs = gpu.regs;
|
2018-07-03 04:22:25 +02:00
|
|
|
|
2019-12-29 23:23:40 +01:00
|
|
|
if (flags[Dirty::CullTest]) {
|
|
|
|
flags[Dirty::CullTest] = false;
|
2019-07-18 01:37:01 +02:00
|
|
|
|
2019-12-29 23:23:40 +01:00
|
|
|
if (regs.cull_test_enabled) {
|
|
|
|
glEnable(GL_CULL_FACE);
|
|
|
|
glCullFace(MaxwellToGL::CullFace(regs.cull_face));
|
|
|
|
} else {
|
|
|
|
glDisable(GL_CULL_FACE);
|
|
|
|
}
|
2018-07-03 04:22:25 +02:00
|
|
|
}
|
2019-07-18 01:37:01 +02:00
|
|
|
|
2019-12-29 23:23:40 +01:00
|
|
|
if (flags[Dirty::FrontFace]) {
|
|
|
|
flags[Dirty::FrontFace] = false;
|
|
|
|
glFrontFace(MaxwellToGL::FrontFace(regs.front_face));
|
|
|
|
}
|
2018-03-20 04:00:59 +01:00
|
|
|
}
|
|
|
|
|
2018-10-26 01:04:13 +02:00
|
|
|
void RasterizerOpenGL::SyncPrimitiveRestart() {
|
2019-12-30 03:25:53 +01:00
|
|
|
auto& gpu = system.GPU().Maxwell3D();
|
|
|
|
auto& flags = gpu.dirty.flags;
|
|
|
|
if (!flags[Dirty::PrimitiveRestart]) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
flags[Dirty::PrimitiveRestart] = false;
|
2018-10-26 01:04:13 +02:00
|
|
|
|
2019-12-30 03:25:53 +01:00
|
|
|
if (gpu.regs.primitive_restart.enabled) {
|
|
|
|
glEnable(GL_PRIMITIVE_RESTART);
|
|
|
|
glPrimitiveRestartIndex(gpu.regs.primitive_restart.index);
|
|
|
|
} else {
|
|
|
|
glDisable(GL_PRIMITIVE_RESTART);
|
|
|
|
}
|
2018-10-26 01:04:13 +02:00
|
|
|
}
|
|
|
|
|
2018-07-02 20:33:06 +02:00
|
|
|
void RasterizerOpenGL::SyncDepthTestState() {
|
2019-12-30 02:56:21 +01:00
|
|
|
auto& gpu = system.GPU().Maxwell3D();
|
|
|
|
auto& flags = gpu.dirty.flags;
|
2019-07-18 01:37:01 +02:00
|
|
|
|
2019-12-30 02:56:21 +01:00
|
|
|
const auto& regs = gpu.regs;
|
|
|
|
if (flags[Dirty::DepthMask]) {
|
|
|
|
flags[Dirty::DepthMask] = false;
|
|
|
|
glDepthMask(regs.depth_write_enabled ? GL_TRUE : GL_FALSE);
|
2019-07-18 01:37:01 +02:00
|
|
|
}
|
|
|
|
|
2019-12-30 02:56:21 +01:00
|
|
|
if (flags[Dirty::DepthTest]) {
|
|
|
|
flags[Dirty::DepthTest] = false;
|
|
|
|
if (regs.depth_test_enable) {
|
|
|
|
glEnable(GL_DEPTH_TEST);
|
|
|
|
glDepthFunc(MaxwellToGL::ComparisonOp(regs.depth_test_func));
|
|
|
|
} else {
|
|
|
|
glDisable(GL_DEPTH_TEST);
|
|
|
|
}
|
2019-07-18 01:37:01 +02:00
|
|
|
}
|
2018-07-02 20:33:06 +02:00
|
|
|
}
|
|
|
|
|
2018-08-22 06:35:31 +02:00
|
|
|
void RasterizerOpenGL::SyncStencilTestState() {
|
2019-12-30 03:08:32 +01:00
|
|
|
auto& gpu = system.GPU().Maxwell3D();
|
|
|
|
auto& flags = gpu.dirty.flags;
|
|
|
|
if (!flags[Dirty::StencilTest]) {
|
2018-08-22 06:35:31 +02:00
|
|
|
return;
|
|
|
|
}
|
2019-12-30 03:08:32 +01:00
|
|
|
flags[Dirty::StencilTest] = false;
|
2019-09-04 04:19:31 +02:00
|
|
|
|
2019-12-30 03:08:32 +01:00
|
|
|
const auto& regs = gpu.regs;
|
2019-07-18 01:37:01 +02:00
|
|
|
if (!regs.stencil_enable) {
|
2019-12-30 03:08:32 +01:00
|
|
|
glDisable(GL_STENCIL_TEST);
|
2019-07-18 01:37:01 +02:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2019-12-30 03:08:32 +01:00
|
|
|
glEnable(GL_STENCIL_TEST);
|
2019-12-26 07:34:29 +01:00
|
|
|
glStencilFuncSeparate(GL_FRONT, MaxwellToGL::ComparisonOp(regs.stencil_front_func_func),
|
|
|
|
regs.stencil_front_func_ref, regs.stencil_front_func_mask);
|
|
|
|
glStencilOpSeparate(GL_FRONT, MaxwellToGL::StencilOp(regs.stencil_front_op_fail),
|
|
|
|
MaxwellToGL::StencilOp(regs.stencil_front_op_zfail),
|
|
|
|
MaxwellToGL::StencilOp(regs.stencil_front_op_zpass));
|
|
|
|
glStencilMaskSeparate(GL_FRONT, regs.stencil_front_mask);
|
|
|
|
|
2018-11-07 04:27:12 +01:00
|
|
|
if (regs.stencil_two_side_enable) {
|
2019-12-26 07:34:29 +01:00
|
|
|
glStencilFuncSeparate(GL_BACK, MaxwellToGL::ComparisonOp(regs.stencil_back_func_func),
|
|
|
|
regs.stencil_back_func_ref, regs.stencil_back_func_mask);
|
|
|
|
glStencilOpSeparate(GL_BACK, MaxwellToGL::StencilOp(regs.stencil_back_op_fail),
|
|
|
|
MaxwellToGL::StencilOp(regs.stencil_back_op_zfail),
|
|
|
|
MaxwellToGL::StencilOp(regs.stencil_back_op_zpass));
|
|
|
|
glStencilMaskSeparate(GL_BACK, regs.stencil_back_mask);
|
2018-11-07 04:27:12 +01:00
|
|
|
} else {
|
2019-12-26 07:34:29 +01:00
|
|
|
glStencilFuncSeparate(GL_BACK, GL_ALWAYS, 0, 0xFFFFFFFF);
|
|
|
|
glStencilOpSeparate(GL_BACK, GL_KEEP, GL_KEEP, GL_KEEP);
|
|
|
|
glStencilMaskSeparate(GL_BACK, 0xFFFFFFFF);
|
2018-11-07 04:27:12 +01:00
|
|
|
}
|
2018-08-22 06:35:31 +02:00
|
|
|
}
|
|
|
|
|
2019-12-26 06:11:01 +01:00
|
|
|
void RasterizerOpenGL::SyncRasterizeEnable() {
|
2019-12-30 04:49:19 +01:00
|
|
|
auto& gpu = system.GPU().Maxwell3D();
|
|
|
|
auto& flags = gpu.dirty.flags;
|
|
|
|
if (!flags[Dirty::RasterizeEnable]) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
flags[Dirty::RasterizeEnable] = false;
|
|
|
|
|
|
|
|
oglEnable(GL_RASTERIZER_DISCARD, gpu.regs.rasterize_enable == 0);
|
2019-12-18 23:26:52 +01:00
|
|
|
}
|
|
|
|
|
2018-11-05 03:46:06 +01:00
|
|
|
void RasterizerOpenGL::SyncColorMask() {
|
2019-12-29 02:51:04 +01:00
|
|
|
auto& gpu = system.GPU().Maxwell3D();
|
|
|
|
auto& flags = gpu.dirty.flags;
|
|
|
|
if (!flags[Dirty::ColorMasks]) {
|
2019-07-13 22:52:32 +02:00
|
|
|
return;
|
|
|
|
}
|
2019-12-29 02:51:04 +01:00
|
|
|
flags[Dirty::ColorMasks] = false;
|
2019-07-13 22:52:32 +02:00
|
|
|
|
2019-12-29 02:51:04 +01:00
|
|
|
const bool force = flags[Dirty::ColorMaskCommon];
|
|
|
|
flags[Dirty::ColorMaskCommon] = false;
|
2019-07-13 22:52:32 +02:00
|
|
|
|
2019-12-29 02:51:04 +01:00
|
|
|
const auto& regs = gpu.regs;
|
2019-12-26 05:19:15 +01:00
|
|
|
if (regs.color_mask_common) {
|
2019-12-29 02:51:04 +01:00
|
|
|
if (!force && !flags[Dirty::ColorMask0]) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
flags[Dirty::ColorMask0] = false;
|
|
|
|
|
2019-12-26 05:19:15 +01:00
|
|
|
auto& mask = regs.color_mask[0];
|
2019-12-29 02:51:04 +01:00
|
|
|
glColorMask(mask.R != 0, mask.B != 0, mask.G != 0, mask.A != 0);
|
|
|
|
return;
|
2018-11-05 03:46:06 +01:00
|
|
|
}
|
2019-07-13 22:52:32 +02:00
|
|
|
|
2019-12-29 02:51:04 +01:00
|
|
|
// Path without color_mask_common set
|
|
|
|
for (std::size_t i = 0; i < Maxwell::NumRenderTargets; ++i) {
|
|
|
|
if (!force && !flags[Dirty::ColorMask0 + i]) {
|
|
|
|
continue;
|
2019-12-26 05:19:15 +01:00
|
|
|
}
|
2019-12-29 02:51:04 +01:00
|
|
|
flags[Dirty::ColorMask0 + i] = false;
|
|
|
|
|
|
|
|
const auto& mask = regs.color_mask[i];
|
|
|
|
glColorMaski(static_cast<GLuint>(i), mask.R != 0, mask.G != 0, mask.B != 0, mask.A != 0);
|
2018-11-05 03:46:06 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-11-14 04:02:54 +01:00
|
|
|
void RasterizerOpenGL::SyncMultiSampleState() {
|
2019-12-30 04:43:15 +01:00
|
|
|
auto& gpu = system.GPU().Maxwell3D();
|
|
|
|
auto& flags = gpu.dirty.flags;
|
|
|
|
if (!flags[Dirty::MultisampleControl]) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
flags[Dirty::MultisampleControl] = false;
|
|
|
|
|
2019-03-09 07:25:11 +01:00
|
|
|
const auto& regs = system.GPU().Maxwell3D().regs;
|
2019-12-26 05:04:36 +01:00
|
|
|
oglEnable(GL_SAMPLE_ALPHA_TO_COVERAGE, regs.multisample_control.alpha_to_coverage);
|
|
|
|
oglEnable(GL_SAMPLE_ALPHA_TO_ONE, regs.multisample_control.alpha_to_one);
|
2018-11-14 04:02:54 +01:00
|
|
|
}
|
|
|
|
|
2018-11-14 02:09:01 +01:00
|
|
|
void RasterizerOpenGL::SyncFragmentColorClampState() {
|
2019-12-30 05:20:08 +01:00
|
|
|
auto& gpu = system.GPU().Maxwell3D();
|
|
|
|
auto& flags = gpu.dirty.flags;
|
|
|
|
if (!flags[Dirty::FragmentClampColor]) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
flags[Dirty::FragmentClampColor] = false;
|
|
|
|
|
|
|
|
glClampColor(GL_CLAMP_FRAGMENT_COLOR, gpu.regs.frag_color_clamp ? GL_TRUE : GL_FALSE);
|
2018-11-14 02:09:01 +01:00
|
|
|
}
|
|
|
|
|
2018-06-09 00:05:52 +02:00
|
|
|
void RasterizerOpenGL::SyncBlendState() {
|
2019-12-29 22:14:40 +01:00
|
|
|
auto& gpu = system.GPU().Maxwell3D();
|
|
|
|
auto& flags = gpu.dirty.flags;
|
|
|
|
const auto& regs = gpu.regs;
|
|
|
|
|
|
|
|
if (flags[Dirty::BlendColor]) {
|
|
|
|
flags[Dirty::BlendColor] = false;
|
|
|
|
glBlendColor(regs.blend_color.r, regs.blend_color.g, regs.blend_color.b,
|
|
|
|
regs.blend_color.a);
|
|
|
|
}
|
2018-03-20 04:00:59 +01:00
|
|
|
|
2019-12-29 22:14:40 +01:00
|
|
|
// TODO(Rodrigo): Revisit blending, there are several registers we are not reading
|
|
|
|
|
|
|
|
if (!flags[Dirty::BlendStates]) {
|
2019-07-13 22:52:32 +02:00
|
|
|
return;
|
|
|
|
}
|
2019-12-29 22:14:40 +01:00
|
|
|
flags[Dirty::BlendStates] = false;
|
2018-11-02 04:21:25 +01:00
|
|
|
|
2019-12-26 07:51:50 +01:00
|
|
|
if (!regs.independent_blend_enable) {
|
2019-12-29 22:14:40 +01:00
|
|
|
if (!regs.blend.enable[0]) {
|
|
|
|
glDisable(GL_BLEND);
|
2019-12-26 07:51:50 +01:00
|
|
|
return;
|
2018-11-02 04:21:25 +01:00
|
|
|
}
|
2019-12-29 22:14:40 +01:00
|
|
|
glEnable(GL_BLEND);
|
|
|
|
glBlendFuncSeparate(MaxwellToGL::BlendFunc(regs.blend.factor_source_rgb),
|
|
|
|
MaxwellToGL::BlendFunc(regs.blend.factor_dest_rgb),
|
|
|
|
MaxwellToGL::BlendFunc(regs.blend.factor_source_a),
|
|
|
|
MaxwellToGL::BlendFunc(regs.blend.factor_dest_a));
|
|
|
|
glBlendEquationSeparate(MaxwellToGL::BlendEquation(regs.blend.equation_rgb),
|
|
|
|
MaxwellToGL::BlendEquation(regs.blend.equation_a));
|
2018-06-09 00:05:52 +02:00
|
|
|
return;
|
2018-11-02 04:21:25 +01:00
|
|
|
}
|
2018-06-09 00:05:52 +02:00
|
|
|
|
2019-12-29 22:14:40 +01:00
|
|
|
const bool force = flags[Dirty::BlendIndependentEnabled];
|
|
|
|
flags[Dirty::BlendIndependentEnabled] = false;
|
|
|
|
|
2019-12-26 07:51:50 +01:00
|
|
|
for (std::size_t i = 0; i < Maxwell::NumRenderTargets; ++i) {
|
2019-12-29 22:14:40 +01:00
|
|
|
if (!force && !flags[Dirty::BlendState0 + i]) {
|
2018-11-02 04:21:25 +01:00
|
|
|
continue;
|
2019-12-29 22:14:40 +01:00
|
|
|
}
|
|
|
|
flags[Dirty::BlendState0 + i] = false;
|
|
|
|
|
2019-12-26 07:51:50 +01:00
|
|
|
if (!regs.blend.enable[i]) {
|
2019-12-29 22:14:40 +01:00
|
|
|
glDisablei(GL_BLEND, static_cast<GLuint>(i));
|
2018-11-02 04:21:25 +01:00
|
|
|
continue;
|
2019-12-26 07:51:50 +01:00
|
|
|
}
|
2019-12-29 22:14:40 +01:00
|
|
|
glEnablei(GL_BLEND, static_cast<GLuint>(i));
|
2019-07-13 22:52:32 +02:00
|
|
|
|
2019-12-26 07:51:50 +01:00
|
|
|
const auto& src = regs.independent_blend[i];
|
|
|
|
glBlendFuncSeparatei(static_cast<GLuint>(i), MaxwellToGL::BlendFunc(src.factor_source_rgb),
|
|
|
|
MaxwellToGL::BlendFunc(src.factor_dest_rgb),
|
|
|
|
MaxwellToGL::BlendFunc(src.factor_source_a),
|
|
|
|
MaxwellToGL::BlendFunc(src.factor_dest_a));
|
|
|
|
glBlendEquationSeparatei(static_cast<GLuint>(i),
|
|
|
|
MaxwellToGL::BlendEquation(src.equation_rgb),
|
|
|
|
MaxwellToGL::BlendEquation(src.equation_a));
|
2018-11-02 04:21:25 +01:00
|
|
|
}
|
2018-03-20 04:00:59 +01:00
|
|
|
}
|
2018-08-21 01:44:47 +02:00
|
|
|
|
|
|
|
void RasterizerOpenGL::SyncLogicOpState() {
|
2019-12-30 04:57:50 +01:00
|
|
|
auto& gpu = system.GPU().Maxwell3D();
|
|
|
|
auto& flags = gpu.dirty.flags;
|
|
|
|
if (!flags[Dirty::LogicOp]) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
flags[Dirty::LogicOp] = false;
|
2018-08-21 01:44:47 +02:00
|
|
|
|
2019-12-30 04:57:50 +01:00
|
|
|
const auto& regs = gpu.regs;
|
2019-12-26 00:21:53 +01:00
|
|
|
if (regs.logic_op.enable) {
|
2019-12-30 04:57:50 +01:00
|
|
|
glEnable(GL_COLOR_LOGIC_OP);
|
2019-12-26 00:21:53 +01:00
|
|
|
glLogicOp(MaxwellToGL::LogicOp(regs.logic_op.operation));
|
2019-12-30 04:57:50 +01:00
|
|
|
} else {
|
|
|
|
glDisable(GL_COLOR_LOGIC_OP);
|
2019-12-26 00:21:53 +01:00
|
|
|
}
|
2018-08-21 01:44:47 +02:00
|
|
|
}
|
|
|
|
|
2019-12-26 05:28:17 +01:00
|
|
|
void RasterizerOpenGL::SyncScissorTest() {
|
2019-12-29 02:31:00 +01:00
|
|
|
auto& gpu = system.GPU().Maxwell3D();
|
|
|
|
auto& flags = gpu.dirty.flags;
|
|
|
|
if (!flags[Dirty::Scissors]) {
|
2018-08-21 01:44:47 +02:00
|
|
|
return;
|
2019-12-29 02:31:00 +01:00
|
|
|
}
|
|
|
|
flags[Dirty::Scissors] = false;
|
2018-08-21 01:44:47 +02:00
|
|
|
|
2019-12-29 02:31:00 +01:00
|
|
|
const auto& regs = gpu.regs;
|
2019-12-26 05:28:17 +01:00
|
|
|
for (std::size_t index = 0; index < Maxwell::NumViewports; ++index) {
|
2019-12-29 02:31:00 +01:00
|
|
|
if (!flags[Dirty::Scissor0 + index]) {
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
flags[Dirty::Scissor0 + index] = false;
|
2018-08-21 10:18:27 +02:00
|
|
|
|
2019-12-26 05:28:17 +01:00
|
|
|
const auto& src = regs.scissor_test[index];
|
2019-12-29 02:31:00 +01:00
|
|
|
if (src.enable) {
|
|
|
|
glEnablei(GL_SCISSOR_TEST, static_cast<GLuint>(index));
|
|
|
|
glScissorIndexed(static_cast<GLuint>(index), src.min_x, src.min_y,
|
|
|
|
src.max_x - src.min_x, src.max_y - src.min_y);
|
|
|
|
} else {
|
|
|
|
glDisablei(GL_SCISSOR_TEST, static_cast<GLuint>(index));
|
2018-11-14 00:13:16 +01:00
|
|
|
}
|
2018-10-09 02:49:36 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-09-26 00:41:21 +02:00
|
|
|
void RasterizerOpenGL::SyncTransformFeedback() {
|
2019-03-09 07:25:11 +01:00
|
|
|
const auto& regs = system.GPU().Maxwell3D().regs;
|
2019-03-09 07:33:56 +01:00
|
|
|
UNIMPLEMENTED_IF_MSG(regs.tfb_enabled != 0, "Transform feedbacks are not implemented");
|
2018-09-26 00:41:21 +02:00
|
|
|
}
|
|
|
|
|
2018-09-28 06:31:01 +02:00
|
|
|
void RasterizerOpenGL::SyncPointState() {
|
2019-12-30 05:27:42 +01:00
|
|
|
auto& gpu = system.GPU().Maxwell3D();
|
|
|
|
auto& flags = gpu.dirty.flags;
|
|
|
|
if (!flags[Dirty::PointSize]) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
flags[Dirty::PointSize] = false;
|
|
|
|
|
|
|
|
oglEnable(GL_POINT_SPRITE, gpu.regs.point_sprite_enable);
|
|
|
|
|
|
|
|
if (gpu.regs.vp_point_size.enable) {
|
|
|
|
// By definition of GL_POINT_SIZE, it only matters if GL_PROGRAM_POINT_SIZE is disabled.
|
|
|
|
glEnable(GL_PROGRAM_POINT_SIZE);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2019-05-18 08:02:34 +02:00
|
|
|
// Limit the point size to 1 since nouveau sometimes sets a point size of 0 (and that's invalid
|
|
|
|
// in OpenGL).
|
2019-12-30 05:27:42 +01:00
|
|
|
glPointSize(std::max(1.0f, gpu.regs.point_size));
|
|
|
|
glDisable(GL_PROGRAM_POINT_SIZE);
|
2018-09-28 06:31:01 +02:00
|
|
|
}
|
|
|
|
|
2018-11-27 00:31:44 +01:00
|
|
|
void RasterizerOpenGL::SyncPolygonOffset() {
|
2019-12-30 04:22:43 +01:00
|
|
|
auto& gpu = system.GPU().Maxwell3D();
|
|
|
|
auto& flags = gpu.dirty.flags;
|
|
|
|
if (!flags[Dirty::PolygonOffset]) {
|
2019-07-13 22:52:32 +02:00
|
|
|
return;
|
|
|
|
}
|
2019-12-30 04:22:43 +01:00
|
|
|
flags[Dirty::PolygonOffset] = false;
|
2019-10-01 07:00:23 +02:00
|
|
|
|
2019-12-30 04:22:43 +01:00
|
|
|
const auto& regs = gpu.regs;
|
2019-12-26 04:25:53 +01:00
|
|
|
oglEnable(GL_POLYGON_OFFSET_FILL, regs.polygon_offset_fill_enable);
|
|
|
|
oglEnable(GL_POLYGON_OFFSET_LINE, regs.polygon_offset_line_enable);
|
|
|
|
oglEnable(GL_POLYGON_OFFSET_POINT, regs.polygon_offset_point_enable);
|
2019-07-13 22:52:32 +02:00
|
|
|
|
2020-01-03 02:42:56 +01:00
|
|
|
if (regs.polygon_offset_fill_enable || regs.polygon_offset_line_enable ||
|
|
|
|
regs.polygon_offset_point_enable) {
|
|
|
|
// Hardware divides polygon offset units by two
|
|
|
|
glPolygonOffsetClamp(regs.polygon_offset_factor, regs.polygon_offset_units / 2.0f,
|
|
|
|
regs.polygon_offset_clamp);
|
|
|
|
}
|
2018-11-27 00:31:44 +01:00
|
|
|
}
|
|
|
|
|
2019-05-22 01:28:09 +02:00
|
|
|
void RasterizerOpenGL::SyncAlphaTest() {
|
2019-12-30 04:37:35 +01:00
|
|
|
auto& gpu = system.GPU().Maxwell3D();
|
|
|
|
auto& flags = gpu.dirty.flags;
|
|
|
|
if (!flags[Dirty::AlphaTest]) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
flags[Dirty::AlphaTest] = false;
|
|
|
|
|
|
|
|
const auto& regs = gpu.regs;
|
|
|
|
if (regs.alpha_test_enabled && regs.rt_control.count > 1) {
|
|
|
|
LOG_WARNING(Render_OpenGL, "Alpha testing with more than one render target is not tested");
|
|
|
|
}
|
2019-05-22 01:28:09 +02:00
|
|
|
|
2019-12-26 00:03:40 +01:00
|
|
|
if (regs.alpha_test_enabled) {
|
2019-12-30 04:37:35 +01:00
|
|
|
glEnable(GL_ALPHA_TEST);
|
2019-12-26 00:03:40 +01:00
|
|
|
glAlphaFunc(MaxwellToGL::ComparisonOp(regs.alpha_test_func), regs.alpha_test_ref);
|
2019-12-30 04:37:35 +01:00
|
|
|
} else {
|
|
|
|
glDisable(GL_ALPHA_TEST);
|
2019-05-22 01:28:09 +02:00
|
|
|
}
|
2018-10-12 02:29:11 +02:00
|
|
|
}
|
|
|
|
|
2019-12-26 05:01:41 +01:00
|
|
|
void RasterizerOpenGL::SyncFramebufferSRGB() {
|
2019-12-30 04:53:53 +01:00
|
|
|
auto& gpu = system.GPU().Maxwell3D();
|
|
|
|
auto& flags = gpu.dirty.flags;
|
|
|
|
if (!flags[Dirty::FramebufferSRGB]) {
|
2019-05-22 01:28:09 +02:00
|
|
|
return;
|
|
|
|
}
|
2019-12-30 04:53:53 +01:00
|
|
|
flags[Dirty::FramebufferSRGB] = false;
|
|
|
|
|
|
|
|
oglEnable(GL_FRAMEBUFFER_SRGB, gpu.regs.framebuffer_srgb);
|
2018-10-12 02:29:11 +02:00
|
|
|
}
|
|
|
|
|
2018-08-21 10:18:27 +02:00
|
|
|
} // namespace OpenGL
|