2018-03-20 04:00:59 +01:00
|
|
|
// Copyright 2015 Citra Emulator Project
|
|
|
|
// Licensed under GPLv2 or any later version
|
|
|
|
// Refer to the license.txt file included.
|
|
|
|
|
2018-04-20 05:01:50 +02:00
|
|
|
#include <algorithm>
|
2018-09-13 02:27:43 +02:00
|
|
|
#include <array>
|
2019-07-15 03:25:13 +02:00
|
|
|
#include <bitset>
|
2018-03-20 04:00:59 +01:00
|
|
|
#include <memory>
|
|
|
|
#include <string>
|
2018-07-24 18:10:35 +02:00
|
|
|
#include <string_view>
|
2018-03-20 04:00:59 +01:00
|
|
|
#include <tuple>
|
|
|
|
#include <utility>
|
|
|
|
#include <glad/glad.h>
|
|
|
|
#include "common/alignment.h"
|
|
|
|
#include "common/assert.h"
|
|
|
|
#include "common/logging/log.h"
|
|
|
|
#include "common/math_util.h"
|
|
|
|
#include "common/microprofile.h"
|
2018-08-22 06:33:03 +02:00
|
|
|
#include "common/scope_exit.h"
|
2018-03-24 07:01:03 +01:00
|
|
|
#include "core/core.h"
|
|
|
|
#include "core/hle/kernel/process.h"
|
2018-03-20 04:00:59 +01:00
|
|
|
#include "core/settings.h"
|
2019-07-15 03:25:13 +02:00
|
|
|
#include "video_core/engines/kepler_compute.h"
|
2018-03-24 07:01:03 +01:00
|
|
|
#include "video_core/engines/maxwell_3d.h"
|
2019-05-29 23:15:28 +02:00
|
|
|
#include "video_core/memory_manager.h"
|
2018-03-20 04:00:59 +01:00
|
|
|
#include "video_core/renderer_opengl/gl_rasterizer.h"
|
2019-01-14 02:05:53 +01:00
|
|
|
#include "video_core/renderer_opengl/gl_shader_cache.h"
|
2018-03-20 04:00:59 +01:00
|
|
|
#include "video_core/renderer_opengl/gl_shader_gen.h"
|
2018-03-25 04:38:08 +02:00
|
|
|
#include "video_core/renderer_opengl/maxwell_to_gl.h"
|
2018-03-20 04:00:59 +01:00
|
|
|
#include "video_core/renderer_opengl/renderer_opengl.h"
|
|
|
|
|
2018-08-21 10:18:27 +02:00
|
|
|
namespace OpenGL {
|
|
|
|
|
2018-03-24 09:06:26 +01:00
|
|
|
using Maxwell = Tegra::Engines::Maxwell3D::Regs;
|
2019-04-29 02:08:31 +02:00
|
|
|
|
|
|
|
using VideoCore::Surface::PixelFormat;
|
|
|
|
using VideoCore::Surface::SurfaceTarget;
|
|
|
|
using VideoCore::Surface::SurfaceType;
|
2018-03-20 04:00:59 +01:00
|
|
|
|
2018-11-06 19:37:10 +01:00
|
|
|
MICROPROFILE_DEFINE(OpenGL_VAO, "OpenGL", "Vertex Format Setup", MP_RGB(128, 128, 192));
|
|
|
|
MICROPROFILE_DEFINE(OpenGL_VB, "OpenGL", "Vertex Buffer Setup", MP_RGB(128, 128, 192));
|
2018-09-04 11:02:59 +02:00
|
|
|
MICROPROFILE_DEFINE(OpenGL_Shader, "OpenGL", "Shader Setup", MP_RGB(128, 128, 192));
|
|
|
|
MICROPROFILE_DEFINE(OpenGL_UBO, "OpenGL", "Const Buffer Setup", MP_RGB(128, 128, 192));
|
|
|
|
MICROPROFILE_DEFINE(OpenGL_Index, "OpenGL", "Index Buffer Setup", MP_RGB(128, 128, 192));
|
|
|
|
MICROPROFILE_DEFINE(OpenGL_Texture, "OpenGL", "Texture Setup", MP_RGB(128, 128, 192));
|
|
|
|
MICROPROFILE_DEFINE(OpenGL_Framebuffer, "OpenGL", "Framebuffer Setup", MP_RGB(128, 128, 192));
|
2018-03-20 04:00:59 +01:00
|
|
|
MICROPROFILE_DEFINE(OpenGL_Drawing, "OpenGL", "Drawing", MP_RGB(128, 128, 192));
|
2018-09-04 11:02:59 +02:00
|
|
|
MICROPROFILE_DEFINE(OpenGL_Blits, "OpenGL", "Blits", MP_RGB(128, 128, 192));
|
2018-03-20 04:00:59 +01:00
|
|
|
MICROPROFILE_DEFINE(OpenGL_CacheManagement, "OpenGL", "Cache Mgmt", MP_RGB(100, 255, 100));
|
2018-10-02 19:47:26 +02:00
|
|
|
MICROPROFILE_DEFINE(OpenGL_PrimitiveAssembly, "OpenGL", "Prim Asmbl", MP_RGB(255, 100, 100));
|
|
|
|
|
2019-07-06 04:11:58 +02:00
|
|
|
static std::size_t GetConstBufferSize(const Tegra::Engines::ConstBufferInfo& buffer,
|
|
|
|
const GLShader::ConstBufferEntry& entry) {
|
|
|
|
if (!entry.IsIndirect()) {
|
|
|
|
return entry.GetSize();
|
|
|
|
}
|
|
|
|
|
|
|
|
if (buffer.size > Maxwell::MaxConstBufferSize) {
|
|
|
|
LOG_WARNING(Render_OpenGL, "Indirect constbuffer size {} exceeds maximum {}", buffer.size,
|
|
|
|
Maxwell::MaxConstBufferSize);
|
|
|
|
return Maxwell::MaxConstBufferSize;
|
|
|
|
}
|
|
|
|
|
|
|
|
return buffer.size;
|
|
|
|
}
|
|
|
|
|
2019-04-06 22:59:56 +02:00
|
|
|
RasterizerOpenGL::RasterizerOpenGL(Core::System& system, Core::Frontend::EmuWindow& emu_window,
|
|
|
|
ScreenInfo& info)
|
2019-06-15 00:58:16 +02:00
|
|
|
: texture_cache{system, *this, device}, shader_cache{*this, system, emu_window, device},
|
|
|
|
system{system}, screen_info{info}, buffer_cache{*this, system, STREAM_BUFFER_SIZE} {
|
2018-10-30 04:55:53 +01:00
|
|
|
OpenGLState::ApplyDefaultState();
|
2018-03-20 04:00:59 +01:00
|
|
|
|
2018-04-07 11:22:08 +02:00
|
|
|
shader_program_manager = std::make_unique<GLShader::ProgramManager>();
|
|
|
|
state.draw.shader_program = 0;
|
|
|
|
state.Apply();
|
2019-07-14 14:14:27 +02:00
|
|
|
clear_framebuffer.Create();
|
2018-03-20 04:00:59 +01:00
|
|
|
|
2019-03-09 07:33:56 +01:00
|
|
|
LOG_DEBUG(Render_OpenGL, "Sync fixed function OpenGL state here");
|
2018-11-23 16:11:21 +01:00
|
|
|
CheckExtensions();
|
2018-03-20 04:00:59 +01:00
|
|
|
}
|
|
|
|
|
2018-08-09 21:31:46 +02:00
|
|
|
RasterizerOpenGL::~RasterizerOpenGL() {}
|
2018-03-20 04:00:59 +01:00
|
|
|
|
2018-11-23 16:11:21 +01:00
|
|
|
void RasterizerOpenGL::CheckExtensions() {
|
|
|
|
if (!GLAD_GL_ARB_texture_filter_anisotropic && !GLAD_GL_EXT_texture_filter_anisotropic) {
|
|
|
|
LOG_WARNING(
|
|
|
|
Render_OpenGL,
|
|
|
|
"Anisotropic filter is not supported! This can cause graphical issues in some games.");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-01-06 05:53:27 +01:00
|
|
|
GLuint RasterizerOpenGL::SetupVertexFormat() {
|
2019-03-09 07:25:11 +01:00
|
|
|
auto& gpu = system.GPU().Maxwell3D();
|
2018-08-18 21:42:26 +02:00
|
|
|
const auto& regs = gpu.regs;
|
2018-03-25 03:29:47 +02:00
|
|
|
|
2019-07-10 21:38:31 +02:00
|
|
|
if (!gpu.dirty.vertex_attrib_format) {
|
2019-01-06 05:53:27 +01:00
|
|
|
return state.draw.vertex_array;
|
|
|
|
}
|
2019-07-10 21:38:31 +02:00
|
|
|
gpu.dirty.vertex_attrib_format = false;
|
2018-11-06 19:15:44 +01:00
|
|
|
|
|
|
|
MICROPROFILE_SCOPE(OpenGL_VAO);
|
|
|
|
|
2018-09-05 11:36:50 +02:00
|
|
|
auto [iter, is_cache_miss] = vertex_array_cache.try_emplace(regs.vertex_attrib_format);
|
2019-01-06 05:53:27 +01:00
|
|
|
auto& vao_entry = iter->second;
|
2018-09-05 11:36:50 +02:00
|
|
|
|
|
|
|
if (is_cache_miss) {
|
2019-01-06 05:53:27 +01:00
|
|
|
vao_entry.Create();
|
|
|
|
const GLuint vao = vao_entry.handle;
|
2018-09-05 11:36:50 +02:00
|
|
|
|
2019-01-09 06:40:19 +01:00
|
|
|
// Eventhough we are using DSA to create this vertex array, there is a bug on Intel's blob
|
|
|
|
// that fails to properly create the vertex array if it's not bound even after creating it
|
|
|
|
// with glCreateVertexArrays
|
|
|
|
state.draw.vertex_array = vao;
|
|
|
|
state.ApplyVertexArrayState();
|
|
|
|
|
2018-09-05 11:36:50 +02:00
|
|
|
// Use the vertex array as-is, assumes that the data is formatted correctly for OpenGL.
|
|
|
|
// Enables the first 16 vertex attributes always, as we don't know which ones are actually
|
|
|
|
// used until shader time. Note, Tegra technically supports 32, but we're capping this to 16
|
|
|
|
// for now to avoid OpenGL errors.
|
|
|
|
// TODO(Subv): Analyze the shader to identify which attributes are actually used and don't
|
|
|
|
// assume every shader uses them all.
|
2019-01-06 05:53:27 +01:00
|
|
|
for (u32 index = 0; index < 16; ++index) {
|
2018-09-05 11:36:50 +02:00
|
|
|
const auto& attrib = regs.vertex_attrib_format[index];
|
|
|
|
|
|
|
|
// Ignore invalid attributes.
|
|
|
|
if (!attrib.IsValid())
|
|
|
|
continue;
|
|
|
|
|
|
|
|
const auto& buffer = regs.vertex_array[attrib.buffer];
|
2019-03-09 07:33:56 +01:00
|
|
|
LOG_TRACE(Render_OpenGL,
|
2018-09-05 11:36:50 +02:00
|
|
|
"vertex attrib {}, count={}, size={}, type={}, offset={}, normalize={}",
|
|
|
|
index, attrib.ComponentCount(), attrib.SizeString(), attrib.TypeString(),
|
|
|
|
attrib.offset.Value(), attrib.IsNormalized());
|
|
|
|
|
|
|
|
ASSERT(buffer.IsEnabled());
|
|
|
|
|
2019-01-06 05:53:27 +01:00
|
|
|
glEnableVertexArrayAttrib(vao, index);
|
2018-09-05 11:36:50 +02:00
|
|
|
if (attrib.type == Tegra::Engines::Maxwell3D::Regs::VertexAttribute::Type::SignedInt ||
|
|
|
|
attrib.type ==
|
|
|
|
Tegra::Engines::Maxwell3D::Regs::VertexAttribute::Type::UnsignedInt) {
|
2019-01-06 05:53:27 +01:00
|
|
|
glVertexArrayAttribIFormat(vao, index, attrib.ComponentCount(),
|
|
|
|
MaxwellToGL::VertexType(attrib), attrib.offset);
|
2018-09-05 11:36:50 +02:00
|
|
|
} else {
|
2019-01-06 05:53:27 +01:00
|
|
|
glVertexArrayAttribFormat(
|
|
|
|
vao, index, attrib.ComponentCount(), MaxwellToGL::VertexType(attrib),
|
|
|
|
attrib.IsNormalized() ? GL_TRUE : GL_FALSE, attrib.offset);
|
2018-09-05 11:36:50 +02:00
|
|
|
}
|
2019-01-06 05:53:27 +01:00
|
|
|
glVertexArrayAttribBinding(vao, index, attrib.buffer);
|
2018-09-05 11:36:50 +02:00
|
|
|
}
|
|
|
|
}
|
2018-11-06 21:26:27 +01:00
|
|
|
|
|
|
|
// Rebinding the VAO invalidates the vertex buffer bindings.
|
2019-07-10 21:38:31 +02:00
|
|
|
gpu.dirty.ResetVertexArrays();
|
2019-01-06 05:53:27 +01:00
|
|
|
|
|
|
|
state.draw.vertex_array = vao_entry.handle;
|
|
|
|
return vao_entry.handle;
|
2018-11-06 19:37:10 +01:00
|
|
|
}
|
|
|
|
|
2019-01-06 05:53:27 +01:00
|
|
|
void RasterizerOpenGL::SetupVertexBuffer(GLuint vao) {
|
2019-03-09 07:25:11 +01:00
|
|
|
auto& gpu = system.GPU().Maxwell3D();
|
2019-07-10 21:38:31 +02:00
|
|
|
if (!gpu.dirty.vertex_array_buffers)
|
2018-11-06 21:26:27 +01:00
|
|
|
return;
|
2019-07-10 21:38:31 +02:00
|
|
|
gpu.dirty.vertex_array_buffers = false;
|
|
|
|
|
|
|
|
const auto& regs = gpu.regs;
|
2018-11-06 21:26:27 +01:00
|
|
|
|
|
|
|
MICROPROFILE_SCOPE(OpenGL_VB);
|
|
|
|
|
2018-04-22 02:19:33 +02:00
|
|
|
// Upload all guest vertex arrays sequentially to our buffer
|
|
|
|
for (u32 index = 0; index < Maxwell::NumVertexArrays; ++index) {
|
2019-07-10 21:38:31 +02:00
|
|
|
if (!gpu.dirty.vertex_array[index])
|
2018-11-06 21:26:27 +01:00
|
|
|
continue;
|
2019-07-10 21:38:31 +02:00
|
|
|
gpu.dirty.vertex_array[index] = false;
|
|
|
|
gpu.dirty.vertex_instance[index] = false;
|
2018-11-06 21:26:27 +01:00
|
|
|
|
2018-04-22 02:19:33 +02:00
|
|
|
const auto& vertex_array = regs.vertex_array[index];
|
|
|
|
if (!vertex_array.IsEnabled())
|
|
|
|
continue;
|
|
|
|
|
2019-03-04 05:17:35 +01:00
|
|
|
const GPUVAddr start = vertex_array.StartAddress();
|
|
|
|
const GPUVAddr end = regs.vertex_array_limit[index].LimitAddress();
|
2018-04-22 02:19:33 +02:00
|
|
|
|
|
|
|
ASSERT(end > start);
|
2018-09-08 08:59:59 +02:00
|
|
|
const u64 size = end - start + 1;
|
2019-05-28 00:55:44 +02:00
|
|
|
const auto [vertex_buffer, vertex_buffer_offset] = buffer_cache.UploadMemory(start, size);
|
2018-04-22 02:19:33 +02:00
|
|
|
|
|
|
|
// Bind the vertex array to the buffer at the current offset.
|
2019-06-20 08:44:06 +02:00
|
|
|
vertex_array_pushbuffer.SetVertexBuffer(index, vertex_buffer, vertex_buffer_offset,
|
|
|
|
vertex_array.stride);
|
2018-04-22 02:19:33 +02:00
|
|
|
|
2018-08-18 21:42:26 +02:00
|
|
|
if (regs.instanced_arrays.IsInstancingEnabled(index) && vertex_array.divisor != 0) {
|
2018-09-08 10:05:56 +02:00
|
|
|
// Enable vertex buffer instancing with the specified divisor.
|
2019-01-06 05:53:27 +01:00
|
|
|
glVertexArrayBindingDivisor(vao, index, vertex_array.divisor);
|
2018-08-18 21:42:26 +02:00
|
|
|
} else {
|
|
|
|
// Disable the vertex buffer instancing.
|
2019-01-06 05:53:27 +01:00
|
|
|
glVertexArrayBindingDivisor(vao, index, 0);
|
2018-08-18 21:42:26 +02:00
|
|
|
}
|
2018-03-25 03:29:47 +02:00
|
|
|
}
|
2019-07-10 21:38:31 +02:00
|
|
|
}
|
2018-11-06 19:37:10 +01:00
|
|
|
|
2019-07-10 21:38:31 +02:00
|
|
|
void RasterizerOpenGL::SetupVertexInstances(GLuint vao) {
|
|
|
|
auto& gpu = system.GPU().Maxwell3D();
|
|
|
|
|
|
|
|
if (!gpu.dirty.vertex_instances)
|
|
|
|
return;
|
|
|
|
gpu.dirty.vertex_instances = false;
|
|
|
|
|
|
|
|
const auto& regs = gpu.regs;
|
|
|
|
// Upload all guest vertex arrays sequentially to our buffer
|
|
|
|
for (u32 index = 0; index < Maxwell::NumVertexArrays; ++index) {
|
|
|
|
if (!gpu.dirty.vertex_instance[index])
|
|
|
|
continue;
|
|
|
|
|
|
|
|
gpu.dirty.vertex_instance[index] = false;
|
|
|
|
|
|
|
|
if (regs.instanced_arrays.IsInstancingEnabled(index) &&
|
|
|
|
regs.vertex_array[index].divisor != 0) {
|
|
|
|
// Enable vertex buffer instancing with the specified divisor.
|
|
|
|
glVertexArrayBindingDivisor(vao, index, regs.vertex_array[index].divisor);
|
|
|
|
} else {
|
|
|
|
// Disable the vertex buffer instancing.
|
|
|
|
glVertexArrayBindingDivisor(vao, index, 0);
|
|
|
|
}
|
|
|
|
}
|
2018-03-20 04:00:59 +01:00
|
|
|
}
|
|
|
|
|
2019-06-20 08:44:06 +02:00
|
|
|
GLintptr RasterizerOpenGL::SetupIndexBuffer() {
|
2019-05-28 00:37:46 +02:00
|
|
|
if (accelerate_draw != AccelDraw::Indexed) {
|
|
|
|
return 0;
|
|
|
|
}
|
2019-05-28 00:55:44 +02:00
|
|
|
MICROPROFILE_SCOPE(OpenGL_Index);
|
2019-05-28 00:37:46 +02:00
|
|
|
const auto& regs = system.GPU().Maxwell3D().regs;
|
2019-05-28 00:55:44 +02:00
|
|
|
const std::size_t size = CalculateIndexBufferSize();
|
|
|
|
const auto [buffer, offset] = buffer_cache.UploadMemory(regs.index_array.IndexStart(), size);
|
2019-06-20 08:44:06 +02:00
|
|
|
vertex_array_pushbuffer.SetIndexBuffer(buffer);
|
2019-05-28 00:55:44 +02:00
|
|
|
return offset;
|
2019-05-28 00:37:46 +02:00
|
|
|
}
|
|
|
|
|
2018-10-07 04:17:31 +02:00
|
|
|
void RasterizerOpenGL::SetupShaders(GLenum primitive_mode) {
|
2018-09-04 11:02:59 +02:00
|
|
|
MICROPROFILE_SCOPE(OpenGL_Shader);
|
2019-03-09 07:25:11 +01:00
|
|
|
auto& gpu = system.GPU().Maxwell3D();
|
2018-04-08 06:00:11 +02:00
|
|
|
|
2019-01-05 05:00:06 +01:00
|
|
|
BaseBindings base_bindings;
|
2018-11-29 20:13:13 +01:00
|
|
|
std::array<bool, Maxwell::NumClipDistances> clip_distances{};
|
2018-04-15 21:14:57 +02:00
|
|
|
|
2018-09-15 15:21:06 +02:00
|
|
|
for (std::size_t index = 0; index < Maxwell::MaxShaderProgram; ++index) {
|
2018-09-08 08:59:59 +02:00
|
|
|
const auto& shader_config = gpu.regs.shader_config[index];
|
2018-04-08 06:00:11 +02:00
|
|
|
const Maxwell::ShaderProgram program{static_cast<Maxwell::ShaderProgram>(index)};
|
|
|
|
|
|
|
|
// Skip stages that are not enabled
|
2018-07-13 04:57:57 +02:00
|
|
|
if (!gpu.regs.IsShaderConfigEnabled(index)) {
|
2018-10-07 04:17:31 +02:00
|
|
|
switch (program) {
|
|
|
|
case Maxwell::ShaderProgram::Geometry:
|
|
|
|
shader_program_manager->UseTrivialGeometryShader();
|
|
|
|
break;
|
2019-04-03 09:33:36 +02:00
|
|
|
default:
|
|
|
|
break;
|
2018-10-07 04:17:31 +02:00
|
|
|
}
|
2018-04-08 06:00:11 +02:00
|
|
|
continue;
|
|
|
|
}
|
2018-03-20 04:00:59 +01:00
|
|
|
|
2018-09-15 15:21:06 +02:00
|
|
|
const std::size_t stage{index == 0 ? 0 : index - 1}; // Stage indices are 0 - 5
|
2018-07-13 04:57:57 +02:00
|
|
|
|
2018-06-07 15:33:23 +02:00
|
|
|
GLShader::MaxwellUniformData ubo{};
|
2019-03-27 18:17:35 +01:00
|
|
|
ubo.SetFromRegs(gpu, stage);
|
2019-05-28 00:55:44 +02:00
|
|
|
const auto [buffer, offset] =
|
2019-04-10 20:56:12 +02:00
|
|
|
buffer_cache.UploadHostMemory(&ubo, sizeof(ubo), device.GetUniformBufferAlignment());
|
2018-06-07 15:33:23 +02:00
|
|
|
|
2019-01-05 05:00:06 +01:00
|
|
|
// Bind the emulation info buffer
|
2019-05-28 00:55:44 +02:00
|
|
|
bind_ubo_pushbuffer.Push(buffer, offset, static_cast<GLsizeiptr>(sizeof(ubo)));
|
2018-04-22 02:19:33 +02:00
|
|
|
|
2018-08-23 23:30:27 +02:00
|
|
|
Shader shader{shader_cache.GetStageProgram(program)};
|
2019-04-29 02:08:31 +02:00
|
|
|
|
2019-07-15 03:25:13 +02:00
|
|
|
const auto stage_enum = static_cast<Maxwell::ShaderStage>(stage);
|
2019-04-29 02:08:31 +02:00
|
|
|
SetupDrawConstBuffers(stage_enum, shader);
|
2019-07-15 03:25:13 +02:00
|
|
|
SetupDrawGlobalMemory(stage_enum, shader);
|
2019-07-12 01:09:53 +02:00
|
|
|
const auto texture_buffer_usage{SetupDrawTextures(stage_enum, shader, base_bindings)};
|
2019-04-29 02:08:31 +02:00
|
|
|
|
|
|
|
const ProgramVariant variant{base_bindings, primitive_mode, texture_buffer_usage};
|
|
|
|
const auto [program_handle, next_bindings] = shader->GetProgramHandle(variant);
|
2018-04-15 18:15:54 +02:00
|
|
|
|
2018-04-08 06:00:11 +02:00
|
|
|
switch (program) {
|
2018-08-23 23:30:27 +02:00
|
|
|
case Maxwell::ShaderProgram::VertexA:
|
2019-01-05 05:00:06 +01:00
|
|
|
case Maxwell::ShaderProgram::VertexB:
|
|
|
|
shader_program_manager->UseProgrammableVertexShader(program_handle);
|
2018-10-07 04:17:31 +02:00
|
|
|
break;
|
2019-01-05 05:00:06 +01:00
|
|
|
case Maxwell::ShaderProgram::Geometry:
|
|
|
|
shader_program_manager->UseProgrammableGeometryShader(program_handle);
|
2018-04-08 06:00:11 +02:00
|
|
|
break;
|
2019-01-05 05:00:06 +01:00
|
|
|
case Maxwell::ShaderProgram::Fragment:
|
|
|
|
shader_program_manager->UseProgrammableFragmentShader(program_handle);
|
2018-04-08 06:00:11 +02:00
|
|
|
break;
|
|
|
|
default:
|
2019-03-09 07:33:56 +01:00
|
|
|
UNIMPLEMENTED_MSG("Unimplemented shader index={}, enable={}, offset=0x{:08X}", index,
|
|
|
|
shader_config.enable.Value(), shader_config.offset);
|
2018-04-08 06:00:11 +02:00
|
|
|
}
|
2018-04-15 18:15:54 +02:00
|
|
|
|
2018-11-29 20:13:13 +01:00
|
|
|
// Workaround for Intel drivers.
|
|
|
|
// When a clip distance is enabled but not set in the shader it crops parts of the screen
|
|
|
|
// (sometimes it's half the screen, sometimes three quarters). To avoid this, enable the
|
|
|
|
// clip distances only when it's written by a shader stage.
|
|
|
|
for (std::size_t i = 0; i < Maxwell::NumClipDistances; ++i) {
|
2019-01-18 07:25:28 +01:00
|
|
|
clip_distances[i] = clip_distances[i] || shader->GetShaderEntries().clip_distances[i];
|
2018-11-29 20:13:13 +01:00
|
|
|
}
|
|
|
|
|
2018-07-13 04:25:03 +02:00
|
|
|
// When VertexA is enabled, we have dual vertex shaders
|
|
|
|
if (program == Maxwell::ShaderProgram::VertexA) {
|
|
|
|
// VertexB was combined with VertexA, so we skip the VertexB iteration
|
|
|
|
index++;
|
|
|
|
}
|
2019-01-05 05:00:06 +01:00
|
|
|
|
|
|
|
base_bindings = next_bindings;
|
2018-04-08 06:00:11 +02:00
|
|
|
}
|
2018-11-29 20:13:13 +01:00
|
|
|
|
|
|
|
SyncClipEnabled(clip_distances);
|
2019-01-06 07:58:43 +01:00
|
|
|
|
2019-07-10 21:38:31 +02:00
|
|
|
gpu.dirty.shaders = false;
|
2018-03-20 04:00:59 +01:00
|
|
|
}
|
|
|
|
|
2018-09-15 15:21:06 +02:00
|
|
|
std::size_t RasterizerOpenGL::CalculateVertexArraysSize() const {
|
2019-03-09 07:25:11 +01:00
|
|
|
const auto& regs = system.GPU().Maxwell3D().regs;
|
2018-04-22 02:19:33 +02:00
|
|
|
|
2018-09-15 15:21:06 +02:00
|
|
|
std::size_t size = 0;
|
2018-04-22 02:19:33 +02:00
|
|
|
for (u32 index = 0; index < Maxwell::NumVertexArrays; ++index) {
|
|
|
|
if (!regs.vertex_array[index].IsEnabled())
|
|
|
|
continue;
|
|
|
|
|
2019-03-04 05:17:35 +01:00
|
|
|
const GPUVAddr start = regs.vertex_array[index].StartAddress();
|
|
|
|
const GPUVAddr end = regs.vertex_array_limit[index].LimitAddress();
|
2018-04-22 02:19:33 +02:00
|
|
|
|
|
|
|
ASSERT(end > start);
|
|
|
|
size += end - start + 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
return size;
|
|
|
|
}
|
|
|
|
|
2018-10-02 19:47:26 +02:00
|
|
|
std::size_t RasterizerOpenGL::CalculateIndexBufferSize() const {
|
2019-03-09 07:25:11 +01:00
|
|
|
const auto& regs = system.GPU().Maxwell3D().regs;
|
2018-10-02 19:47:26 +02:00
|
|
|
|
|
|
|
return static_cast<std::size_t>(regs.index_array.count) *
|
|
|
|
static_cast<std::size_t>(regs.index_array.FormatSizeInBytes());
|
|
|
|
}
|
|
|
|
|
2018-08-29 00:43:08 +02:00
|
|
|
template <typename Map, typename Interval>
|
|
|
|
static constexpr auto RangeFromInterval(Map& map, const Interval& interval) {
|
|
|
|
return boost::make_iterator_range(map.equal_range(interval));
|
|
|
|
}
|
|
|
|
|
2019-02-19 02:58:32 +01:00
|
|
|
void RasterizerOpenGL::UpdatePagesCachedCount(VAddr addr, u64 size, int delta) {
|
2019-09-27 15:39:42 +02:00
|
|
|
std::lock_guard lock{pages_mutex};
|
2018-08-28 03:35:15 +02:00
|
|
|
const u64 page_start{addr >> Memory::PAGE_BITS};
|
|
|
|
const u64 page_end{(addr + size + Memory::PAGE_SIZE - 1) >> Memory::PAGE_BITS};
|
2018-08-29 00:43:08 +02:00
|
|
|
|
|
|
|
// Interval maps will erase segments if count reaches 0, so if delta is negative we have to
|
|
|
|
// subtract after iterating
|
|
|
|
const auto pages_interval = CachedPageMap::interval_type::right_open(page_start, page_end);
|
|
|
|
if (delta > 0)
|
|
|
|
cached_pages.add({pages_interval, delta});
|
|
|
|
|
|
|
|
for (const auto& pair : RangeFromInterval(cached_pages, pages_interval)) {
|
|
|
|
const auto interval = pair.first & pages_interval;
|
|
|
|
const int count = pair.second;
|
|
|
|
|
2018-08-28 03:35:15 +02:00
|
|
|
const VAddr interval_start_addr = boost::icl::first(interval) << Memory::PAGE_BITS;
|
|
|
|
const VAddr interval_end_addr = boost::icl::last_next(interval) << Memory::PAGE_BITS;
|
2018-08-29 00:43:08 +02:00
|
|
|
const u64 interval_size = interval_end_addr - interval_start_addr;
|
|
|
|
|
|
|
|
if (delta > 0 && count == delta)
|
|
|
|
Memory::RasterizerMarkRegionCached(interval_start_addr, interval_size, true);
|
|
|
|
else if (delta < 0 && count == -delta)
|
|
|
|
Memory::RasterizerMarkRegionCached(interval_start_addr, interval_size, false);
|
|
|
|
else
|
|
|
|
ASSERT(count >= 0);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (delta < 0)
|
|
|
|
cached_pages.add({pages_interval, delta});
|
|
|
|
}
|
|
|
|
|
2019-01-21 20:38:23 +01:00
|
|
|
void RasterizerOpenGL::LoadDiskResources(const std::atomic_bool& stop_loading,
|
|
|
|
const VideoCore::DiskResourceLoadCallback& callback) {
|
|
|
|
shader_cache.LoadDiskCache(stop_loading, callback);
|
2019-01-14 02:05:53 +01:00
|
|
|
}
|
|
|
|
|
2019-09-17 07:36:47 +02:00
|
|
|
void RasterizerOpenGL::ConfigureFramebuffers() {
|
2018-09-04 11:02:59 +02:00
|
|
|
MICROPROFILE_SCOPE(OpenGL_Framebuffer);
|
2019-03-09 07:25:11 +01:00
|
|
|
auto& gpu = system.GPU().Maxwell3D();
|
2019-09-17 07:36:47 +02:00
|
|
|
if (!gpu.dirty.render_settings) {
|
|
|
|
return;
|
2019-01-07 06:22:00 +01:00
|
|
|
}
|
2019-07-10 21:38:31 +02:00
|
|
|
gpu.dirty.render_settings = false;
|
2018-03-24 08:59:51 +01:00
|
|
|
|
2019-06-15 19:22:57 +02:00
|
|
|
texture_cache.GuardRenderTargets(true);
|
2019-05-14 01:14:02 +02:00
|
|
|
|
2019-09-17 07:36:47 +02:00
|
|
|
View depth_surface = texture_cache.GetDepthBufferSurface(true);
|
2018-03-24 08:59:51 +01:00
|
|
|
|
2019-09-17 07:36:47 +02:00
|
|
|
const auto& regs = gpu.regs;
|
|
|
|
state.framebuffer_srgb.enabled = regs.framebuffer_srgb != 0;
|
2019-01-22 08:14:29 +01:00
|
|
|
UNIMPLEMENTED_IF(regs.rt_separate_frag_data == 0);
|
2018-03-24 08:59:51 +01:00
|
|
|
|
|
|
|
// Bind the framebuffer surfaces
|
2018-11-19 02:20:26 +01:00
|
|
|
FramebufferCacheKey fbkey;
|
2019-09-17 07:36:47 +02:00
|
|
|
for (std::size_t index = 0; index < Maxwell::NumRenderTargets; ++index) {
|
|
|
|
View color_surface{texture_cache.GetColorBufferSurface(index, true)};
|
2018-11-19 02:20:26 +01:00
|
|
|
|
2019-09-17 07:36:47 +02:00
|
|
|
if (color_surface) {
|
|
|
|
// Assume that a surface will be written to if it is used as a framebuffer, even
|
|
|
|
// if the shader doesn't actually write to it.
|
|
|
|
texture_cache.MarkColorBufferInUse(index);
|
2018-09-10 06:36:13 +02:00
|
|
|
}
|
2019-09-17 07:36:47 +02:00
|
|
|
|
|
|
|
fbkey.color_attachments[index] = GL_COLOR_ATTACHMENT0 + regs.rt_control.GetMap(index);
|
|
|
|
fbkey.colors[index] = std::move(color_surface);
|
2018-09-10 01:01:21 +02:00
|
|
|
}
|
2019-09-17 07:36:47 +02:00
|
|
|
fbkey.colors_count = regs.rt_control.count;
|
2018-03-27 04:54:16 +02:00
|
|
|
|
2018-09-10 01:01:21 +02:00
|
|
|
if (depth_surface) {
|
2018-10-13 04:31:04 +02:00
|
|
|
// Assume that a surface will be written to if it is used as a framebuffer, even if
|
|
|
|
// the shader doesn't actually write to it.
|
2019-05-07 16:57:16 +02:00
|
|
|
texture_cache.MarkDepthBufferInUse();
|
2018-10-13 04:31:04 +02:00
|
|
|
|
2019-09-04 04:19:31 +02:00
|
|
|
fbkey.stencil_enable = depth_surface->GetSurfaceParams().type == SurfaceType::DepthStencil;
|
2019-09-17 07:36:47 +02:00
|
|
|
fbkey.zeta = std::move(depth_surface);
|
2018-09-10 01:01:21 +02:00
|
|
|
}
|
2018-11-19 02:20:26 +01:00
|
|
|
|
2019-06-15 19:22:57 +02:00
|
|
|
texture_cache.GuardRenderTargets(false);
|
2019-05-14 01:14:02 +02:00
|
|
|
|
2019-09-17 07:36:47 +02:00
|
|
|
state.draw.draw_framebuffer = framebuffer_cache.GetFramebuffer(fbkey);
|
|
|
|
SyncViewport(state);
|
2018-07-03 23:55:44 +02:00
|
|
|
}
|
|
|
|
|
2019-07-14 14:14:27 +02:00
|
|
|
void RasterizerOpenGL::ConfigureClearFramebuffer(OpenGLState& current_state, bool using_color_fb,
|
|
|
|
bool using_depth_fb, bool using_stencil_fb) {
|
|
|
|
auto& gpu = system.GPU().Maxwell3D();
|
|
|
|
const auto& regs = gpu.regs;
|
|
|
|
|
|
|
|
texture_cache.GuardRenderTargets(true);
|
|
|
|
View color_surface{};
|
|
|
|
if (using_color_fb) {
|
|
|
|
color_surface = texture_cache.GetColorBufferSurface(regs.clear_buffers.RT, false);
|
|
|
|
}
|
|
|
|
View depth_surface{};
|
|
|
|
if (using_depth_fb || using_stencil_fb) {
|
|
|
|
depth_surface = texture_cache.GetDepthBufferSurface(false);
|
|
|
|
}
|
|
|
|
texture_cache.GuardRenderTargets(false);
|
|
|
|
|
|
|
|
current_state.draw.draw_framebuffer = clear_framebuffer.handle;
|
|
|
|
current_state.ApplyFramebufferState();
|
|
|
|
|
|
|
|
if (color_surface) {
|
|
|
|
color_surface->Attach(GL_COLOR_ATTACHMENT0, GL_DRAW_FRAMEBUFFER);
|
|
|
|
} else {
|
|
|
|
glFramebufferTexture2D(GL_DRAW_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, 0, 0);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (depth_surface) {
|
|
|
|
const auto& params = depth_surface->GetSurfaceParams();
|
|
|
|
switch (params.type) {
|
2019-09-04 04:19:31 +02:00
|
|
|
case VideoCore::Surface::SurfaceType::Depth:
|
2019-07-14 14:14:27 +02:00
|
|
|
depth_surface->Attach(GL_DEPTH_ATTACHMENT, GL_DRAW_FRAMEBUFFER);
|
|
|
|
glFramebufferTexture2D(GL_DRAW_FRAMEBUFFER, GL_STENCIL_ATTACHMENT, GL_TEXTURE_2D, 0, 0);
|
|
|
|
break;
|
2019-09-04 04:19:31 +02:00
|
|
|
case VideoCore::Surface::SurfaceType::DepthStencil:
|
|
|
|
depth_surface->Attach(GL_DEPTH_STENCIL_ATTACHMENT, GL_DRAW_FRAMEBUFFER);
|
2019-07-14 14:14:27 +02:00
|
|
|
break;
|
2019-09-04 04:19:31 +02:00
|
|
|
default:
|
|
|
|
UNIMPLEMENTED();
|
2019-07-14 14:14:27 +02:00
|
|
|
}
|
|
|
|
} else {
|
|
|
|
glFramebufferTexture2D(GL_DRAW_FRAMEBUFFER, GL_DEPTH_STENCIL_ATTACHMENT, GL_TEXTURE_2D, 0,
|
|
|
|
0);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-07-03 23:55:44 +02:00
|
|
|
void RasterizerOpenGL::Clear() {
|
2019-07-01 04:21:28 +02:00
|
|
|
const auto& maxwell3d = system.GPU().Maxwell3D();
|
|
|
|
|
|
|
|
if (!maxwell3d.ShouldExecute()) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
const auto& regs = maxwell3d.regs;
|
2018-09-10 06:36:13 +02:00
|
|
|
bool use_color{};
|
|
|
|
bool use_depth{};
|
|
|
|
bool use_stencil{};
|
2018-07-04 05:32:59 +02:00
|
|
|
|
2019-07-14 14:14:27 +02:00
|
|
|
OpenGLState prev_state{OpenGLState::GetCurState()};
|
|
|
|
SCOPE_EXIT({
|
|
|
|
prev_state.AllDirty();
|
|
|
|
prev_state.Apply();
|
|
|
|
});
|
|
|
|
|
2019-07-14 21:00:37 +02:00
|
|
|
OpenGLState clear_state{OpenGLState::GetCurState()};
|
2019-07-15 16:24:01 +02:00
|
|
|
clear_state.SetDefaultViewports();
|
2018-08-22 06:33:03 +02:00
|
|
|
if (regs.clear_buffers.R || regs.clear_buffers.G || regs.clear_buffers.B ||
|
2018-07-03 23:55:44 +02:00
|
|
|
regs.clear_buffers.A) {
|
2018-09-10 06:36:13 +02:00
|
|
|
use_color = true;
|
2018-07-03 23:55:44 +02:00
|
|
|
}
|
2018-11-08 02:27:47 +01:00
|
|
|
if (use_color) {
|
|
|
|
clear_state.color_mask[0].red_enabled = regs.clear_buffers.R ? GL_TRUE : GL_FALSE;
|
|
|
|
clear_state.color_mask[0].green_enabled = regs.clear_buffers.G ? GL_TRUE : GL_FALSE;
|
|
|
|
clear_state.color_mask[0].blue_enabled = regs.clear_buffers.B ? GL_TRUE : GL_FALSE;
|
|
|
|
clear_state.color_mask[0].alpha_enabled = regs.clear_buffers.A ? GL_TRUE : GL_FALSE;
|
|
|
|
}
|
2018-07-04 05:32:59 +02:00
|
|
|
if (regs.clear_buffers.Z) {
|
2018-08-22 06:33:03 +02:00
|
|
|
ASSERT_MSG(regs.zeta_enable != 0, "Tried to clear Z but buffer is not enabled!");
|
2018-09-10 06:36:13 +02:00
|
|
|
use_depth = true;
|
2018-07-14 07:52:23 +02:00
|
|
|
|
|
|
|
// Always enable the depth write when clearing the depth buffer. The depth write mask is
|
2018-11-08 02:27:47 +01:00
|
|
|
// ignored when clearing the buffer in the Switch, but OpenGL obeys it so we set it to
|
|
|
|
// true.
|
2018-08-22 06:33:03 +02:00
|
|
|
clear_state.depth.test_enabled = true;
|
|
|
|
clear_state.depth.test_func = GL_ALWAYS;
|
2019-07-14 21:00:37 +02:00
|
|
|
clear_state.depth.write_mask = GL_TRUE;
|
2018-08-22 06:33:03 +02:00
|
|
|
}
|
|
|
|
if (regs.clear_buffers.S) {
|
|
|
|
ASSERT_MSG(regs.zeta_enable != 0, "Tried to clear stencil but buffer is not enabled!");
|
2018-09-10 06:36:13 +02:00
|
|
|
use_stencil = true;
|
2018-08-22 06:33:03 +02:00
|
|
|
clear_state.stencil.test_enabled = true;
|
2019-09-04 04:19:31 +02:00
|
|
|
|
2018-11-21 04:40:32 +01:00
|
|
|
if (regs.clear_flags.stencil) {
|
|
|
|
// Stencil affects the clear so fill it with the used masks
|
|
|
|
clear_state.stencil.front.test_func = GL_ALWAYS;
|
|
|
|
clear_state.stencil.front.test_mask = regs.stencil_front_func_mask;
|
|
|
|
clear_state.stencil.front.action_stencil_fail = GL_KEEP;
|
|
|
|
clear_state.stencil.front.action_depth_fail = GL_KEEP;
|
|
|
|
clear_state.stencil.front.action_depth_pass = GL_KEEP;
|
|
|
|
clear_state.stencil.front.write_mask = regs.stencil_front_mask;
|
|
|
|
if (regs.stencil_two_side_enable) {
|
|
|
|
clear_state.stencil.back.test_func = GL_ALWAYS;
|
|
|
|
clear_state.stencil.back.test_mask = regs.stencil_back_func_mask;
|
|
|
|
clear_state.stencil.back.action_stencil_fail = GL_KEEP;
|
|
|
|
clear_state.stencil.back.action_depth_fail = GL_KEEP;
|
|
|
|
clear_state.stencil.back.action_depth_pass = GL_KEEP;
|
|
|
|
clear_state.stencil.back.write_mask = regs.stencil_back_mask;
|
|
|
|
} else {
|
|
|
|
clear_state.stencil.back.test_func = GL_ALWAYS;
|
|
|
|
clear_state.stencil.back.test_mask = 0xFFFFFFFF;
|
|
|
|
clear_state.stencil.back.write_mask = 0xFFFFFFFF;
|
|
|
|
clear_state.stencil.back.action_stencil_fail = GL_KEEP;
|
|
|
|
clear_state.stencil.back.action_depth_fail = GL_KEEP;
|
|
|
|
clear_state.stencil.back.action_depth_pass = GL_KEEP;
|
|
|
|
}
|
|
|
|
}
|
2018-07-04 05:32:59 +02:00
|
|
|
}
|
2018-07-03 23:55:44 +02:00
|
|
|
|
2018-09-10 06:36:13 +02:00
|
|
|
if (!use_color && !use_depth && !use_stencil) {
|
2018-08-22 06:33:03 +02:00
|
|
|
// No color surface nor depth/stencil surface are enabled
|
2018-07-03 23:55:44 +02:00
|
|
|
return;
|
2018-08-22 06:33:03 +02:00
|
|
|
}
|
|
|
|
|
2019-07-14 14:14:27 +02:00
|
|
|
ConfigureClearFramebuffer(clear_state, use_color, use_depth, use_stencil);
|
2019-07-14 21:00:37 +02:00
|
|
|
|
2019-07-14 14:14:27 +02:00
|
|
|
SyncViewport(clear_state);
|
2018-11-21 04:40:32 +01:00
|
|
|
if (regs.clear_flags.scissor) {
|
|
|
|
SyncScissorTest(clear_state);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (regs.clear_flags.viewport) {
|
|
|
|
clear_state.EmulateViewportWithScissor();
|
|
|
|
}
|
|
|
|
|
2019-07-14 21:00:37 +02:00
|
|
|
clear_state.AllDirty();
|
|
|
|
clear_state.Apply();
|
2018-08-22 06:33:03 +02:00
|
|
|
|
2018-09-10 06:36:13 +02:00
|
|
|
if (use_color) {
|
2019-07-14 14:14:27 +02:00
|
|
|
glClearBufferfv(GL_COLOR, 0, regs.clear_color);
|
2018-09-10 06:36:13 +02:00
|
|
|
}
|
2018-06-07 06:54:25 +02:00
|
|
|
|
2019-07-14 14:14:27 +02:00
|
|
|
if (use_depth && use_stencil) {
|
2018-09-10 06:36:13 +02:00
|
|
|
glClearBufferfi(GL_DEPTH_STENCIL, 0, regs.clear_depth, regs.clear_stencil);
|
2019-07-14 14:14:27 +02:00
|
|
|
} else if (use_depth) {
|
2018-09-10 06:36:13 +02:00
|
|
|
glClearBufferfv(GL_DEPTH, 0, ®s.clear_depth);
|
2019-07-14 14:14:27 +02:00
|
|
|
} else if (use_stencil) {
|
2018-09-10 06:36:13 +02:00
|
|
|
glClearBufferiv(GL_STENCIL, 0, ®s.clear_stencil);
|
|
|
|
}
|
2018-06-07 06:54:25 +02:00
|
|
|
}
|
|
|
|
|
2019-09-15 17:48:54 +02:00
|
|
|
void RasterizerOpenGL::DrawPrelude() {
|
2019-03-09 07:25:11 +01:00
|
|
|
auto& gpu = system.GPU().Maxwell3D();
|
2019-07-01 04:21:28 +02:00
|
|
|
|
2018-11-05 03:46:06 +01:00
|
|
|
SyncColorMask();
|
2018-11-14 02:09:01 +01:00
|
|
|
SyncFragmentColorClampState();
|
2018-11-14 04:02:54 +01:00
|
|
|
SyncMultiSampleState();
|
2018-07-14 07:52:23 +02:00
|
|
|
SyncDepthTestState();
|
2018-08-22 06:35:31 +02:00
|
|
|
SyncStencilTestState();
|
2018-06-09 00:05:52 +02:00
|
|
|
SyncBlendState();
|
2018-08-21 01:44:47 +02:00
|
|
|
SyncLogicOpState();
|
2018-07-02 20:33:41 +02:00
|
|
|
SyncCullMode();
|
2018-10-26 01:04:13 +02:00
|
|
|
SyncPrimitiveRestart();
|
2018-11-21 04:40:32 +01:00
|
|
|
SyncScissorTest(state);
|
2018-09-26 00:41:21 +02:00
|
|
|
SyncTransformFeedback();
|
2018-09-28 06:31:01 +02:00
|
|
|
SyncPointState();
|
2018-11-27 00:31:44 +01:00
|
|
|
SyncPolygonOffset();
|
2019-05-22 01:28:09 +02:00
|
|
|
SyncAlphaTest();
|
2018-03-27 04:54:16 +02:00
|
|
|
|
2018-03-24 08:59:51 +01:00
|
|
|
// Draw the vertex batch
|
|
|
|
const bool is_indexed = accelerate_draw == AccelDraw::Indexed;
|
2018-04-13 20:18:37 +02:00
|
|
|
|
2018-09-15 15:21:06 +02:00
|
|
|
std::size_t buffer_size = CalculateVertexArraysSize();
|
2018-04-22 02:19:33 +02:00
|
|
|
|
2019-05-22 00:21:57 +02:00
|
|
|
// Add space for index buffer
|
|
|
|
if (is_indexed) {
|
|
|
|
buffer_size = Common::AlignUp(buffer_size, 4) + CalculateIndexBufferSize();
|
2018-03-24 08:59:51 +01:00
|
|
|
}
|
2018-04-08 06:00:11 +02:00
|
|
|
|
|
|
|
// Uniform space for the 5 shader stages
|
2019-04-10 20:56:12 +02:00
|
|
|
buffer_size = Common::AlignUp<std::size_t>(buffer_size, 4) +
|
|
|
|
(sizeof(GLShader::MaxwellUniformData) + device.GetUniformBufferAlignment()) *
|
|
|
|
Maxwell::MaxShaderStage;
|
2018-03-24 08:59:51 +01:00
|
|
|
|
2018-08-10 10:29:37 +02:00
|
|
|
// Add space for at least 18 constant buffers
|
2019-07-06 04:11:58 +02:00
|
|
|
buffer_size += Maxwell::MaxConstBuffers *
|
|
|
|
(Maxwell::MaxConstBufferSize + device.GetUniformBufferAlignment());
|
2018-08-10 10:29:37 +02:00
|
|
|
|
2019-06-20 08:22:25 +02:00
|
|
|
// Prepare the vertex array.
|
|
|
|
buffer_cache.Map(buffer_size);
|
2018-03-24 08:59:51 +01:00
|
|
|
|
2019-05-28 00:41:19 +02:00
|
|
|
// Prepare vertex array format.
|
2019-05-28 00:55:44 +02:00
|
|
|
const GLuint vao = SetupVertexFormat();
|
2019-06-20 08:44:06 +02:00
|
|
|
vertex_array_pushbuffer.Setup(vao);
|
2019-05-28 00:37:46 +02:00
|
|
|
|
2019-05-28 00:41:19 +02:00
|
|
|
// Upload vertex and index data.
|
2019-05-28 00:55:44 +02:00
|
|
|
SetupVertexBuffer(vao);
|
2019-07-10 21:38:31 +02:00
|
|
|
SetupVertexInstances(vao);
|
2019-09-15 17:48:54 +02:00
|
|
|
index_buffer_offset = SetupIndexBuffer();
|
2019-05-28 00:41:19 +02:00
|
|
|
|
2019-06-20 08:22:25 +02:00
|
|
|
// Prepare packed bindings.
|
|
|
|
bind_ubo_pushbuffer.Setup(0);
|
|
|
|
bind_ssbo_pushbuffer.Setup(0);
|
|
|
|
|
2019-05-28 00:41:19 +02:00
|
|
|
// Setup shaders and their used resources.
|
2019-06-15 19:22:57 +02:00
|
|
|
texture_cache.GuardSamplers(true);
|
2019-09-15 17:48:54 +02:00
|
|
|
const auto primitive_mode = MaxwellToGL::PrimitiveTopology(gpu.regs.draw.topology);
|
|
|
|
SetupShaders(primitive_mode);
|
2019-06-15 19:22:57 +02:00
|
|
|
texture_cache.GuardSamplers(false);
|
2018-03-24 08:59:51 +01:00
|
|
|
|
2019-09-17 07:36:47 +02:00
|
|
|
ConfigureFramebuffers();
|
2019-05-08 23:45:59 +02:00
|
|
|
|
2019-06-20 08:22:25 +02:00
|
|
|
// Signal the buffer cache that we are not going to upload more things.
|
|
|
|
const bool invalidate = buffer_cache.Unmap();
|
|
|
|
|
|
|
|
// Now that we are no longer uploading data, we can safely bind the buffers to OpenGL.
|
2019-06-20 08:44:06 +02:00
|
|
|
vertex_array_pushbuffer.Bind();
|
2019-06-20 08:22:25 +02:00
|
|
|
bind_ubo_pushbuffer.Bind();
|
|
|
|
bind_ssbo_pushbuffer.Bind();
|
|
|
|
|
|
|
|
if (invalidate) {
|
|
|
|
// As all cached buffers are invalidated, we need to recheck their state.
|
2019-07-10 21:38:31 +02:00
|
|
|
gpu.dirty.ResetVertexArrays();
|
2019-06-20 08:22:25 +02:00
|
|
|
}
|
2018-03-24 08:59:51 +01:00
|
|
|
|
2018-04-07 11:22:08 +02:00
|
|
|
shader_program_manager->ApplyTo(state);
|
|
|
|
state.Apply();
|
2018-03-24 08:59:51 +01:00
|
|
|
|
2019-06-15 19:22:57 +02:00
|
|
|
if (texture_cache.TextureBarrier()) {
|
|
|
|
glTextureBarrier();
|
|
|
|
}
|
2019-09-15 17:48:54 +02:00
|
|
|
}
|
|
|
|
|
2019-09-18 13:51:05 +02:00
|
|
|
struct DrawParams {
|
2019-09-18 16:20:33 +02:00
|
|
|
bool is_indexed{};
|
|
|
|
bool is_instanced{};
|
|
|
|
GLenum primitive_mode{};
|
|
|
|
GLint count{};
|
|
|
|
GLint base_vertex{};
|
2019-09-18 13:51:05 +02:00
|
|
|
|
|
|
|
// Indexed settings
|
2019-09-18 16:20:33 +02:00
|
|
|
GLenum index_format{};
|
|
|
|
GLintptr index_buffer_offset{};
|
2019-09-18 13:51:05 +02:00
|
|
|
|
|
|
|
// Instanced setting
|
2019-09-18 16:20:33 +02:00
|
|
|
GLint num_instances{};
|
|
|
|
GLint base_instance{};
|
2019-09-18 13:51:05 +02:00
|
|
|
|
|
|
|
void DispatchDraw() {
|
|
|
|
if (is_indexed) {
|
|
|
|
const auto index_buffer_ptr = reinterpret_cast<const void*>(index_buffer_offset);
|
|
|
|
if (is_instanced) {
|
|
|
|
glDrawElementsInstancedBaseVertexBaseInstance(primitive_mode, count, index_format,
|
|
|
|
index_buffer_ptr, num_instances,
|
|
|
|
base_vertex, base_instance);
|
|
|
|
} else {
|
|
|
|
glDrawElementsBaseVertex(primitive_mode, count, index_format, index_buffer_ptr,
|
|
|
|
base_vertex);
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
if (is_instanced) {
|
|
|
|
glDrawArraysInstancedBaseInstance(primitive_mode, base_vertex, count, num_instances,
|
|
|
|
base_instance);
|
|
|
|
} else {
|
|
|
|
glDrawArrays(primitive_mode, base_vertex, count);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2019-09-19 17:41:07 +02:00
|
|
|
bool RasterizerOpenGL::DrawBatch(bool is_indexed) {
|
|
|
|
accelerate_draw = is_indexed ? AccelDraw::Indexed : AccelDraw::Arrays;
|
2019-09-20 21:44:28 +02:00
|
|
|
|
|
|
|
MICROPROFILE_SCOPE(OpenGL_Drawing);
|
2019-09-15 17:48:54 +02:00
|
|
|
|
2019-09-20 21:44:28 +02:00
|
|
|
DrawPrelude();
|
|
|
|
|
2019-09-22 13:23:13 +02:00
|
|
|
auto& maxwell3d = system.GPU().Maxwell3D();
|
2019-09-15 20:25:07 +02:00
|
|
|
const auto& regs = maxwell3d.regs;
|
|
|
|
const auto current_instance = maxwell3d.state.current_instance;
|
2019-09-18 16:20:33 +02:00
|
|
|
DrawParams draw_call{};
|
2019-09-19 17:41:07 +02:00
|
|
|
draw_call.is_indexed = is_indexed;
|
2019-09-18 13:51:05 +02:00
|
|
|
draw_call.num_instances = static_cast<GLint>(1);
|
|
|
|
draw_call.base_instance = static_cast<GLint>(current_instance);
|
|
|
|
draw_call.is_instanced = current_instance > 0;
|
|
|
|
draw_call.primitive_mode = MaxwellToGL::PrimitiveTopology(regs.draw.topology);
|
|
|
|
if (draw_call.is_indexed) {
|
|
|
|
draw_call.count = static_cast<GLint>(regs.index_array.count);
|
|
|
|
draw_call.base_vertex = static_cast<GLint>(regs.vb_element_base);
|
|
|
|
draw_call.index_format = MaxwellToGL::IndexFormat(regs.index_array.format);
|
|
|
|
draw_call.index_buffer_offset = index_buffer_offset;
|
2019-09-15 17:48:54 +02:00
|
|
|
} else {
|
2019-09-18 13:51:05 +02:00
|
|
|
draw_call.count = static_cast<GLint>(regs.vertex_buffer.count);
|
|
|
|
draw_call.base_vertex = static_cast<GLint>(regs.vertex_buffer.first);
|
2019-09-15 17:48:54 +02:00
|
|
|
}
|
2019-09-18 13:51:05 +02:00
|
|
|
draw_call.DispatchDraw();
|
2019-06-15 19:22:57 +02:00
|
|
|
|
2019-09-15 17:48:54 +02:00
|
|
|
maxwell3d.dirty.memory_general = false;
|
2019-09-19 17:41:07 +02:00
|
|
|
accelerate_draw = AccelDraw::Disabled;
|
|
|
|
return true;
|
2019-09-15 17:48:54 +02:00
|
|
|
}
|
2019-02-09 01:06:49 +01:00
|
|
|
|
2019-09-19 17:41:07 +02:00
|
|
|
bool RasterizerOpenGL::DrawMultiBatch(bool is_indexed) {
|
|
|
|
accelerate_draw = is_indexed ? AccelDraw::Indexed : AccelDraw::Arrays;
|
2019-09-20 21:44:28 +02:00
|
|
|
|
|
|
|
MICROPROFILE_SCOPE(OpenGL_Drawing);
|
2019-09-15 17:48:54 +02:00
|
|
|
|
2019-09-20 21:44:28 +02:00
|
|
|
DrawPrelude();
|
|
|
|
|
2019-09-22 13:23:13 +02:00
|
|
|
auto& maxwell3d = system.GPU().Maxwell3D();
|
2019-09-15 20:25:07 +02:00
|
|
|
const auto& regs = maxwell3d.regs;
|
|
|
|
const auto& draw_setup = maxwell3d.mme_draw;
|
2019-09-18 16:20:33 +02:00
|
|
|
DrawParams draw_call{};
|
2019-09-19 17:41:07 +02:00
|
|
|
draw_call.is_indexed = is_indexed;
|
2019-09-18 13:51:05 +02:00
|
|
|
draw_call.num_instances = static_cast<GLint>(draw_setup.instance_count);
|
|
|
|
draw_call.base_instance = static_cast<GLint>(regs.vb_base_instance);
|
|
|
|
draw_call.is_instanced = draw_setup.instance_count > 1;
|
|
|
|
draw_call.primitive_mode = MaxwellToGL::PrimitiveTopology(regs.draw.topology);
|
|
|
|
if (draw_call.is_indexed) {
|
|
|
|
draw_call.count = static_cast<GLint>(regs.index_array.count);
|
|
|
|
draw_call.base_vertex = static_cast<GLint>(regs.vb_element_base);
|
|
|
|
draw_call.index_format = MaxwellToGL::IndexFormat(regs.index_array.format);
|
|
|
|
draw_call.index_buffer_offset = index_buffer_offset;
|
2019-09-15 17:48:54 +02:00
|
|
|
} else {
|
2019-09-18 13:51:05 +02:00
|
|
|
draw_call.count = static_cast<GLint>(regs.vertex_buffer.count);
|
|
|
|
draw_call.base_vertex = static_cast<GLint>(regs.vertex_buffer.first);
|
2019-09-15 17:48:54 +02:00
|
|
|
}
|
2019-09-18 13:51:05 +02:00
|
|
|
draw_call.DispatchDraw();
|
2019-02-09 01:06:49 +01:00
|
|
|
|
2019-09-15 17:48:54 +02:00
|
|
|
maxwell3d.dirty.memory_general = false;
|
2018-03-24 08:59:51 +01:00
|
|
|
accelerate_draw = AccelDraw::Disabled;
|
2019-09-19 17:41:07 +02:00
|
|
|
return true;
|
2018-03-20 04:00:59 +01:00
|
|
|
}
|
|
|
|
|
2019-07-15 03:25:13 +02:00
|
|
|
void RasterizerOpenGL::DispatchCompute(GPUVAddr code_addr) {
|
|
|
|
if (!GLAD_GL_ARB_compute_variable_group_size) {
|
|
|
|
LOG_ERROR(Render_OpenGL, "Compute is currently not supported on this device due to the "
|
|
|
|
"lack of GL_ARB_compute_variable_group_size");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
auto kernel = shader_cache.GetComputeKernel(code_addr);
|
2019-07-18 02:50:21 +02:00
|
|
|
ProgramVariant variant;
|
|
|
|
variant.texture_buffer_usage = SetupComputeTextures(kernel);
|
2019-07-12 02:54:07 +02:00
|
|
|
SetupComputeImages(kernel);
|
|
|
|
|
2019-07-18 02:50:21 +02:00
|
|
|
const auto [program, next_bindings] = kernel->GetProgramHandle(variant);
|
2019-07-15 03:25:13 +02:00
|
|
|
state.draw.shader_program = program;
|
|
|
|
state.draw.program_pipeline = 0;
|
|
|
|
|
|
|
|
const std::size_t buffer_size =
|
|
|
|
Tegra::Engines::KeplerCompute::NumConstBuffers *
|
|
|
|
(Maxwell::MaxConstBufferSize + device.GetUniformBufferAlignment());
|
|
|
|
buffer_cache.Map(buffer_size);
|
|
|
|
|
|
|
|
bind_ubo_pushbuffer.Setup(0);
|
|
|
|
bind_ssbo_pushbuffer.Setup(0);
|
|
|
|
|
|
|
|
SetupComputeConstBuffers(kernel);
|
|
|
|
SetupComputeGlobalMemory(kernel);
|
|
|
|
|
|
|
|
buffer_cache.Unmap();
|
|
|
|
|
|
|
|
bind_ubo_pushbuffer.Bind();
|
|
|
|
bind_ssbo_pushbuffer.Bind();
|
|
|
|
|
2019-07-18 07:12:01 +02:00
|
|
|
state.ApplyTextures();
|
|
|
|
state.ApplyImages();
|
2019-07-15 03:25:13 +02:00
|
|
|
state.ApplyShaderProgram();
|
|
|
|
state.ApplyProgramPipeline();
|
|
|
|
|
|
|
|
const auto& launch_desc = system.GPU().KeplerCompute().launch_description;
|
|
|
|
glDispatchComputeGroupSizeARB(launch_desc.grid_dim_x, launch_desc.grid_dim_y,
|
|
|
|
launch_desc.grid_dim_z, launch_desc.block_dim_x,
|
|
|
|
launch_desc.block_dim_y, launch_desc.block_dim_z);
|
|
|
|
}
|
|
|
|
|
2018-09-08 10:05:56 +02:00
|
|
|
void RasterizerOpenGL::FlushAll() {}
|
2018-03-20 04:00:59 +01:00
|
|
|
|
2019-02-19 02:58:32 +01:00
|
|
|
void RasterizerOpenGL::FlushRegion(CacheAddr addr, u64 size) {
|
2018-10-13 04:31:04 +02:00
|
|
|
MICROPROFILE_SCOPE(OpenGL_CacheManagement);
|
2019-02-28 03:21:31 +01:00
|
|
|
if (!addr || !size) {
|
|
|
|
return;
|
|
|
|
}
|
2019-05-08 16:32:30 +02:00
|
|
|
texture_cache.FlushRegion(addr, size);
|
2019-05-29 23:15:28 +02:00
|
|
|
buffer_cache.FlushRegion(addr, size);
|
2018-10-13 04:31:04 +02:00
|
|
|
}
|
2018-03-20 04:00:59 +01:00
|
|
|
|
2019-02-19 02:58:32 +01:00
|
|
|
void RasterizerOpenGL::InvalidateRegion(CacheAddr addr, u64 size) {
|
2018-06-26 22:14:14 +02:00
|
|
|
MICROPROFILE_SCOPE(OpenGL_CacheManagement);
|
2019-02-28 03:21:31 +01:00
|
|
|
if (!addr || !size) {
|
|
|
|
return;
|
|
|
|
}
|
2019-04-11 22:14:55 +02:00
|
|
|
texture_cache.InvalidateRegion(addr, size);
|
2018-08-23 23:30:27 +02:00
|
|
|
shader_cache.InvalidateRegion(addr, size);
|
2018-08-29 00:27:03 +02:00
|
|
|
buffer_cache.InvalidateRegion(addr, size);
|
2018-06-26 22:14:14 +02:00
|
|
|
}
|
2018-03-20 04:00:59 +01:00
|
|
|
|
2019-02-19 02:58:32 +01:00
|
|
|
void RasterizerOpenGL::FlushAndInvalidateRegion(CacheAddr addr, u64 size) {
|
2019-05-08 16:32:30 +02:00
|
|
|
if (Settings::values.use_accurate_gpu_emulation) {
|
|
|
|
FlushRegion(addr, size);
|
|
|
|
}
|
2018-08-23 21:44:41 +02:00
|
|
|
InvalidateRegion(addr, size);
|
2018-06-26 22:14:14 +02:00
|
|
|
}
|
2018-03-20 04:00:59 +01:00
|
|
|
|
2019-07-26 20:20:43 +02:00
|
|
|
void RasterizerOpenGL::FlushCommands() {
|
|
|
|
glFlush();
|
|
|
|
}
|
|
|
|
|
2019-06-20 08:22:25 +02:00
|
|
|
void RasterizerOpenGL::TickFrame() {
|
|
|
|
buffer_cache.TickFrame();
|
|
|
|
}
|
|
|
|
|
2018-10-06 05:39:03 +02:00
|
|
|
bool RasterizerOpenGL::AccelerateSurfaceCopy(const Tegra::Engines::Fermi2D::Regs::Surface& src,
|
2018-12-15 06:20:00 +01:00
|
|
|
const Tegra::Engines::Fermi2D::Regs::Surface& dst,
|
2019-05-18 10:57:49 +02:00
|
|
|
const Tegra::Engines::Fermi2D::Config& copy_config) {
|
2018-03-20 04:00:59 +01:00
|
|
|
MICROPROFILE_SCOPE(OpenGL_Blits);
|
2019-05-18 10:57:49 +02:00
|
|
|
texture_cache.DoFermiCopy(src, dst, copy_config);
|
2018-03-20 04:00:59 +01:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2018-06-24 23:42:29 +02:00
|
|
|
bool RasterizerOpenGL::AccelerateDisplay(const Tegra::FramebufferConfig& config,
|
2018-08-21 01:34:02 +02:00
|
|
|
VAddr framebuffer_addr, u32 pixel_stride) {
|
2018-06-24 23:42:29 +02:00
|
|
|
if (!framebuffer_addr) {
|
|
|
|
return {};
|
|
|
|
}
|
|
|
|
|
|
|
|
MICROPROFILE_SCOPE(OpenGL_CacheManagement);
|
|
|
|
|
2019-04-11 22:14:55 +02:00
|
|
|
const auto surface{
|
|
|
|
texture_cache.TryFindFramebufferSurface(Memory::GetPointer(framebuffer_addr))};
|
2018-06-24 23:42:29 +02:00
|
|
|
if (!surface) {
|
|
|
|
return {};
|
|
|
|
}
|
|
|
|
|
|
|
|
// Verify that the cached surface is the same size and format as the requested framebuffer
|
|
|
|
const auto& params{surface->GetSurfaceParams()};
|
2018-10-29 02:14:25 +01:00
|
|
|
const auto& pixel_format{
|
|
|
|
VideoCore::Surface::PixelFormatFromGPUPixelFormat(config.pixel_format)};
|
2019-05-07 16:57:16 +02:00
|
|
|
ASSERT_MSG(params.width == config.width, "Framebuffer width is different");
|
|
|
|
ASSERT_MSG(params.height == config.height, "Framebuffer height is different");
|
2019-03-09 07:33:56 +01:00
|
|
|
|
2019-05-07 16:57:16 +02:00
|
|
|
if (params.pixel_format != pixel_format) {
|
2019-03-09 07:33:56 +01:00
|
|
|
LOG_WARNING(Render_OpenGL, "Framebuffer pixel_format is different");
|
|
|
|
}
|
2018-06-24 23:42:29 +02:00
|
|
|
|
2019-04-11 22:14:55 +02:00
|
|
|
screen_info.display_texture = surface->GetTexture();
|
2019-09-03 06:05:23 +02:00
|
|
|
screen_info.display_srgb = surface->GetSurfaceParams().srgb_conversion;
|
2018-06-24 23:42:29 +02:00
|
|
|
|
|
|
|
return true;
|
2018-03-20 04:00:59 +01:00
|
|
|
}
|
|
|
|
|
2019-05-31 22:33:21 +02:00
|
|
|
void RasterizerOpenGL::SetupDrawConstBuffers(Tegra::Engines::Maxwell3D::Regs::ShaderStage stage,
|
|
|
|
const Shader& shader) {
|
2018-09-04 11:02:59 +02:00
|
|
|
MICROPROFILE_SCOPE(OpenGL_UBO);
|
2019-07-15 03:25:13 +02:00
|
|
|
const auto& stages = system.GPU().Maxwell3D().state.shader_stages;
|
|
|
|
const auto& shader_stage = stages[static_cast<std::size_t>(stage)];
|
2019-07-06 04:11:58 +02:00
|
|
|
for (const auto& entry : shader->GetShaderEntries().const_buffers) {
|
2019-07-15 03:25:13 +02:00
|
|
|
const auto& buffer = shader_stage.const_buffers[entry.GetIndex()];
|
|
|
|
SetupConstBuffer(buffer, entry);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void RasterizerOpenGL::SetupComputeConstBuffers(const Shader& kernel) {
|
|
|
|
MICROPROFILE_SCOPE(OpenGL_UBO);
|
|
|
|
const auto& launch_desc = system.GPU().KeplerCompute().launch_description;
|
|
|
|
for (const auto& entry : kernel->GetShaderEntries().const_buffers) {
|
|
|
|
const auto& config = launch_desc.const_buffer_config[entry.GetIndex()];
|
2019-07-12 02:54:07 +02:00
|
|
|
const std::bitset<8> mask = launch_desc.const_buffer_enable_mask.Value();
|
2019-07-15 03:25:13 +02:00
|
|
|
Tegra::Engines::ConstBufferInfo buffer;
|
|
|
|
buffer.address = config.Address();
|
|
|
|
buffer.size = config.size;
|
|
|
|
buffer.enabled = mask[entry.GetIndex()];
|
|
|
|
SetupConstBuffer(buffer, entry);
|
2019-05-31 22:33:21 +02:00
|
|
|
}
|
|
|
|
}
|
2018-08-08 08:07:44 +02:00
|
|
|
|
2019-05-31 22:33:21 +02:00
|
|
|
void RasterizerOpenGL::SetupConstBuffer(const Tegra::Engines::ConstBufferInfo& buffer,
|
|
|
|
const GLShader::ConstBufferEntry& entry) {
|
|
|
|
if (!buffer.enabled) {
|
|
|
|
// Set values to zero to unbind buffers
|
2019-07-06 04:49:45 +02:00
|
|
|
bind_ubo_pushbuffer.Push(buffer_cache.GetEmptyBuffer(sizeof(float)), 0, sizeof(float));
|
2019-05-31 22:33:21 +02:00
|
|
|
return;
|
|
|
|
}
|
2018-06-10 01:02:05 +02:00
|
|
|
|
2019-05-31 22:33:21 +02:00
|
|
|
// Align the actual size so it ends up being a multiple of vec4 to meet the OpenGL std140
|
|
|
|
// UBO alignment requirements.
|
2019-07-06 04:11:58 +02:00
|
|
|
const std::size_t size = Common::AlignUp(GetConstBufferSize(buffer, entry), sizeof(GLvec4));
|
2018-04-15 18:18:09 +02:00
|
|
|
|
2019-05-28 01:50:11 +02:00
|
|
|
const auto alignment = device.GetUniformBufferAlignment();
|
2019-05-28 00:55:44 +02:00
|
|
|
const auto [cbuf, offset] = buffer_cache.UploadMemory(buffer.address, size, alignment);
|
|
|
|
bind_ubo_pushbuffer.Push(cbuf, offset, size);
|
2018-04-14 18:50:15 +02:00
|
|
|
}
|
|
|
|
|
2019-07-15 03:25:13 +02:00
|
|
|
void RasterizerOpenGL::SetupDrawGlobalMemory(Tegra::Engines::Maxwell3D::Regs::ShaderStage stage,
|
|
|
|
const Shader& shader) {
|
2019-05-29 23:15:28 +02:00
|
|
|
auto& gpu{system.GPU()};
|
|
|
|
auto& memory_manager{gpu.MemoryManager()};
|
|
|
|
const auto cbufs{gpu.Maxwell3D().state.shader_stages[static_cast<std::size_t>(stage)]};
|
2019-07-06 04:11:58 +02:00
|
|
|
for (const auto& entry : shader->GetShaderEntries().global_memory_entries) {
|
2019-05-29 23:15:28 +02:00
|
|
|
const auto addr{cbufs.const_buffers[entry.GetCbufIndex()].address + entry.GetCbufOffset()};
|
2019-07-15 03:25:13 +02:00
|
|
|
const auto gpu_addr{memory_manager.Read<u64>(addr)};
|
2019-05-29 23:15:28 +02:00
|
|
|
const auto size{memory_manager.Read<u32>(addr + 8)};
|
2019-07-15 03:25:13 +02:00
|
|
|
SetupGlobalMemory(entry, gpu_addr, size);
|
|
|
|
}
|
|
|
|
}
|
2019-05-29 23:15:28 +02:00
|
|
|
|
2019-07-15 03:25:13 +02:00
|
|
|
void RasterizerOpenGL::SetupComputeGlobalMemory(const Shader& kernel) {
|
|
|
|
auto& gpu{system.GPU()};
|
|
|
|
auto& memory_manager{gpu.MemoryManager()};
|
|
|
|
const auto cbufs{gpu.KeplerCompute().launch_description.const_buffer_config};
|
|
|
|
for (const auto& entry : kernel->GetShaderEntries().global_memory_entries) {
|
|
|
|
const auto addr{cbufs[entry.GetCbufIndex()].Address() + entry.GetCbufOffset()};
|
|
|
|
const auto gpu_addr{memory_manager.Read<u64>(addr)};
|
|
|
|
const auto size{memory_manager.Read<u32>(addr + 8)};
|
|
|
|
SetupGlobalMemory(entry, gpu_addr, size);
|
2019-01-05 05:01:38 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-07-15 03:25:13 +02:00
|
|
|
void RasterizerOpenGL::SetupGlobalMemory(const GLShader::GlobalMemoryEntry& entry,
|
|
|
|
GPUVAddr gpu_addr, std::size_t size) {
|
|
|
|
const auto alignment{device.GetShaderStorageBufferAlignment()};
|
|
|
|
const auto [ssbo, buffer_offset] =
|
2019-07-19 16:50:40 +02:00
|
|
|
buffer_cache.UploadMemory(gpu_addr, size, alignment, entry.IsWritten());
|
2019-07-15 03:25:13 +02:00
|
|
|
bind_ssbo_pushbuffer.Push(ssbo, buffer_offset, static_cast<GLsizeiptr>(size));
|
|
|
|
}
|
|
|
|
|
2019-07-12 01:09:53 +02:00
|
|
|
TextureBufferUsage RasterizerOpenGL::SetupDrawTextures(Maxwell::ShaderStage stage,
|
|
|
|
const Shader& shader,
|
|
|
|
BaseBindings base_bindings) {
|
2018-09-04 11:02:59 +02:00
|
|
|
MICROPROFILE_SCOPE(OpenGL_Texture);
|
2019-03-09 07:25:11 +01:00
|
|
|
const auto& gpu = system.GPU();
|
2018-07-21 00:31:36 +02:00
|
|
|
const auto& maxwell3d = gpu.Maxwell3D();
|
2018-12-21 02:29:15 +01:00
|
|
|
const auto& entries = shader->GetShaderEntries().samplers;
|
2018-06-06 19:58:16 +02:00
|
|
|
|
2019-07-11 06:32:12 +02:00
|
|
|
ASSERT_MSG(base_bindings.sampler + entries.size() <= std::size(state.textures),
|
2018-06-06 19:58:16 +02:00
|
|
|
"Exceeded the number of active textures.");
|
|
|
|
|
2019-04-29 02:08:31 +02:00
|
|
|
TextureBufferUsage texture_buffer_usage{0};
|
|
|
|
|
2018-06-06 19:58:16 +02:00
|
|
|
for (u32 bindpoint = 0; bindpoint < entries.size(); ++bindpoint) {
|
|
|
|
const auto& entry = entries[bindpoint];
|
2019-10-20 09:03:33 +02:00
|
|
|
const auto texture = [&] {
|
2019-07-12 07:17:18 +02:00
|
|
|
if (!entry.IsBindless()) {
|
|
|
|
return maxwell3d.GetStageTexture(stage, entry.GetOffset());
|
|
|
|
}
|
2019-03-26 23:18:54 +01:00
|
|
|
const auto cbuf = entry.GetBindlessCBuf();
|
|
|
|
Tegra::Texture::TextureHandle tex_handle;
|
2019-09-23 20:02:02 +02:00
|
|
|
Tegra::Engines::ShaderType shader_type = static_cast<Tegra::Engines::ShaderType>(stage);
|
|
|
|
tex_handle.raw = maxwell3d.AccessConstBuffer32(shader_type, cbuf.first, cbuf.second);
|
2019-10-20 09:03:33 +02:00
|
|
|
return maxwell3d.GetTextureInfo(tex_handle);
|
2019-07-12 07:17:18 +02:00
|
|
|
}();
|
2018-06-06 19:58:16 +02:00
|
|
|
|
2019-07-12 07:17:18 +02:00
|
|
|
if (SetupTexture(base_bindings.sampler + bindpoint, texture, entry)) {
|
2019-07-12 01:09:53 +02:00
|
|
|
texture_buffer_usage.set(bindpoint);
|
2018-06-06 19:58:16 +02:00
|
|
|
}
|
|
|
|
}
|
2019-04-29 02:08:31 +02:00
|
|
|
|
|
|
|
return texture_buffer_usage;
|
2018-06-06 19:58:16 +02:00
|
|
|
}
|
|
|
|
|
2019-07-18 02:50:21 +02:00
|
|
|
TextureBufferUsage RasterizerOpenGL::SetupComputeTextures(const Shader& kernel) {
|
|
|
|
MICROPROFILE_SCOPE(OpenGL_Texture);
|
|
|
|
const auto& compute = system.GPU().KeplerCompute();
|
|
|
|
const auto& entries = kernel->GetShaderEntries().samplers;
|
|
|
|
|
|
|
|
ASSERT_MSG(entries.size() <= std::size(state.textures),
|
|
|
|
"Exceeded the number of active textures.");
|
|
|
|
|
|
|
|
TextureBufferUsage texture_buffer_usage{0};
|
|
|
|
|
|
|
|
for (u32 bindpoint = 0; bindpoint < entries.size(); ++bindpoint) {
|
|
|
|
const auto& entry = entries[bindpoint];
|
2019-10-20 09:03:33 +02:00
|
|
|
const auto texture = [&] {
|
2019-07-18 02:50:21 +02:00
|
|
|
if (!entry.IsBindless()) {
|
|
|
|
return compute.GetTexture(entry.GetOffset());
|
|
|
|
}
|
|
|
|
const auto cbuf = entry.GetBindlessCBuf();
|
|
|
|
Tegra::Texture::TextureHandle tex_handle;
|
2019-09-23 21:40:58 +02:00
|
|
|
tex_handle.raw = compute.AccessConstBuffer32(Tegra::Engines::ShaderType::Compute,
|
|
|
|
cbuf.first, cbuf.second);
|
2019-10-20 09:03:33 +02:00
|
|
|
return compute.GetTextureInfo(tex_handle);
|
2019-07-18 02:50:21 +02:00
|
|
|
}();
|
|
|
|
|
|
|
|
if (SetupTexture(bindpoint, texture, entry)) {
|
|
|
|
texture_buffer_usage.set(bindpoint);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return texture_buffer_usage;
|
|
|
|
}
|
|
|
|
|
2019-07-12 07:17:18 +02:00
|
|
|
bool RasterizerOpenGL::SetupTexture(u32 binding, const Tegra::Texture::FullTextureInfo& texture,
|
2019-07-12 01:09:53 +02:00
|
|
|
const GLShader::SamplerEntry& entry) {
|
2019-07-11 06:32:12 +02:00
|
|
|
state.samplers[binding] = sampler_cache.GetSampler(texture.tsc);
|
2019-07-12 01:09:53 +02:00
|
|
|
|
2019-07-12 07:01:27 +02:00
|
|
|
const auto view = texture_cache.GetTextureSurface(texture.tic, entry);
|
2019-07-12 01:09:53 +02:00
|
|
|
if (!view) {
|
|
|
|
// Can occur when texture addr is null or its memory is unmapped/invalid
|
2019-07-11 06:32:12 +02:00
|
|
|
state.textures[binding] = 0;
|
2019-07-12 01:09:53 +02:00
|
|
|
return false;
|
|
|
|
}
|
2019-07-11 06:32:12 +02:00
|
|
|
state.textures[binding] = view->GetTexture();
|
2019-07-12 01:09:53 +02:00
|
|
|
|
|
|
|
if (view->GetSurfaceParams().IsBuffer()) {
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Apply swizzle to textures that are not buffers.
|
|
|
|
view->ApplySwizzle(texture.tic.x_source, texture.tic.y_source, texture.tic.z_source,
|
|
|
|
texture.tic.w_source);
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2019-07-12 02:54:07 +02:00
|
|
|
void RasterizerOpenGL::SetupComputeImages(const Shader& shader) {
|
|
|
|
const auto& compute = system.GPU().KeplerCompute();
|
|
|
|
const auto& entries = shader->GetShaderEntries().images;
|
|
|
|
for (u32 bindpoint = 0; bindpoint < entries.size(); ++bindpoint) {
|
|
|
|
const auto& entry = entries[bindpoint];
|
2019-10-20 09:03:33 +02:00
|
|
|
const auto tic = [&] {
|
2019-07-12 02:54:07 +02:00
|
|
|
if (!entry.IsBindless()) {
|
2019-07-12 07:17:18 +02:00
|
|
|
return compute.GetTexture(entry.GetOffset()).tic;
|
2019-07-12 02:54:07 +02:00
|
|
|
}
|
|
|
|
const auto cbuf = entry.GetBindlessCBuf();
|
|
|
|
Tegra::Texture::TextureHandle tex_handle;
|
2019-09-23 21:40:58 +02:00
|
|
|
tex_handle.raw = compute.AccessConstBuffer32(Tegra::Engines::ShaderType::Compute,
|
|
|
|
cbuf.first, cbuf.second);
|
2019-10-20 09:03:33 +02:00
|
|
|
return compute.GetTextureInfo(tex_handle).tic;
|
2019-07-12 02:54:07 +02:00
|
|
|
}();
|
2019-07-12 07:17:18 +02:00
|
|
|
SetupImage(bindpoint, tic, entry);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void RasterizerOpenGL::SetupImage(u32 binding, const Tegra::Texture::TICEntry& tic,
|
|
|
|
const GLShader::ImageEntry& entry) {
|
|
|
|
const auto view = texture_cache.GetImageSurface(tic, entry);
|
|
|
|
if (!view) {
|
|
|
|
state.images[binding] = 0;
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
if (!tic.IsBuffer()) {
|
|
|
|
view->ApplySwizzle(tic.x_source, tic.y_source, tic.z_source, tic.w_source);
|
2019-07-12 02:54:07 +02:00
|
|
|
}
|
2019-09-06 04:26:05 +02:00
|
|
|
if (entry.IsWritten()) {
|
|
|
|
view->MarkAsModified(texture_cache.Tick());
|
|
|
|
}
|
2019-07-12 07:17:18 +02:00
|
|
|
state.images[binding] = view->GetTexture();
|
2019-07-12 02:54:07 +02:00
|
|
|
}
|
|
|
|
|
2018-11-08 02:27:47 +01:00
|
|
|
void RasterizerOpenGL::SyncViewport(OpenGLState& current_state) {
|
2019-03-09 07:25:11 +01:00
|
|
|
const auto& regs = system.GPU().Maxwell3D().regs;
|
2018-11-25 16:18:29 +01:00
|
|
|
const bool geometry_shaders_enabled =
|
|
|
|
regs.IsShaderConfigEnabled(static_cast<size_t>(Maxwell::ShaderProgram::Geometry));
|
|
|
|
const std::size_t viewport_count =
|
|
|
|
geometry_shaders_enabled ? Tegra::Engines::Maxwell3D::Regs::NumViewports : 1;
|
|
|
|
for (std::size_t i = 0; i < viewport_count; i++) {
|
2018-11-08 02:27:47 +01:00
|
|
|
auto& viewport = current_state.viewports[i];
|
2018-11-24 17:15:14 +01:00
|
|
|
const auto& src = regs.viewports[i];
|
2019-02-27 04:47:49 +01:00
|
|
|
const Common::Rectangle<s32> viewport_rect{regs.viewport_transform[i].GetRect()};
|
2018-11-27 04:04:33 +01:00
|
|
|
viewport.x = viewport_rect.left;
|
|
|
|
viewport.y = viewport_rect.bottom;
|
|
|
|
viewport.width = viewport_rect.GetWidth();
|
|
|
|
viewport.height = viewport_rect.GetHeight();
|
2019-05-04 08:00:16 +02:00
|
|
|
viewport.depth_range_far = src.depth_range_far;
|
|
|
|
viewport.depth_range_near = src.depth_range_near;
|
2018-11-02 04:21:25 +01:00
|
|
|
}
|
2018-11-23 16:11:21 +01:00
|
|
|
state.depth_clamp.far_plane = regs.view_volume_clip_control.depth_clamp_far != 0;
|
|
|
|
state.depth_clamp.near_plane = regs.view_volume_clip_control.depth_clamp_near != 0;
|
2018-03-27 02:45:10 +02:00
|
|
|
}
|
|
|
|
|
2018-11-29 20:13:13 +01:00
|
|
|
void RasterizerOpenGL::SyncClipEnabled(
|
|
|
|
const std::array<bool, Maxwell::Regs::NumClipDistances>& clip_mask) {
|
|
|
|
|
2019-03-09 07:25:11 +01:00
|
|
|
const auto& regs = system.GPU().Maxwell3D().regs;
|
2018-11-29 20:13:13 +01:00
|
|
|
const std::array<bool, Maxwell::Regs::NumClipDistances> reg_state{
|
|
|
|
regs.clip_distance_enabled.c0 != 0, regs.clip_distance_enabled.c1 != 0,
|
|
|
|
regs.clip_distance_enabled.c2 != 0, regs.clip_distance_enabled.c3 != 0,
|
|
|
|
regs.clip_distance_enabled.c4 != 0, regs.clip_distance_enabled.c5 != 0,
|
|
|
|
regs.clip_distance_enabled.c6 != 0, regs.clip_distance_enabled.c7 != 0};
|
|
|
|
|
|
|
|
for (std::size_t i = 0; i < Maxwell::Regs::NumClipDistances; ++i) {
|
|
|
|
state.clip_distance[i] = reg_state[i] && clip_mask[i];
|
|
|
|
}
|
2018-03-20 04:00:59 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
void RasterizerOpenGL::SyncClipCoef() {
|
2018-11-29 20:13:13 +01:00
|
|
|
UNIMPLEMENTED();
|
2018-03-20 04:00:59 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
void RasterizerOpenGL::SyncCullMode() {
|
2019-07-13 22:52:32 +02:00
|
|
|
auto& maxwell3d = system.GPU().Maxwell3D();
|
2018-07-02 20:33:41 +02:00
|
|
|
|
2019-07-13 22:52:32 +02:00
|
|
|
const auto& regs = maxwell3d.regs;
|
2018-07-03 04:22:25 +02:00
|
|
|
|
2019-07-13 22:52:32 +02:00
|
|
|
state.cull.enabled = regs.cull.enabled != 0;
|
2019-07-18 01:37:01 +02:00
|
|
|
if (state.cull.enabled) {
|
|
|
|
state.cull.front_face = MaxwellToGL::FrontFace(regs.cull.front_face);
|
|
|
|
state.cull.mode = MaxwellToGL::CullFace(regs.cull.cull_face);
|
|
|
|
|
|
|
|
const bool flip_triangles{regs.screen_y_control.triangle_rast_flip == 0 ||
|
|
|
|
regs.viewport_transform[0].scale_y < 0.0f};
|
|
|
|
|
|
|
|
// If the GPU is configured to flip the rasterized triangles, then we need to flip the
|
|
|
|
// notion of front and back. Note: We flip the triangles when the value of the register is 0
|
|
|
|
// because OpenGL already does it for us.
|
|
|
|
if (flip_triangles) {
|
|
|
|
if (state.cull.front_face == GL_CCW)
|
|
|
|
state.cull.front_face = GL_CW;
|
|
|
|
else if (state.cull.front_face == GL_CW)
|
|
|
|
state.cull.front_face = GL_CCW;
|
|
|
|
}
|
2018-07-03 04:22:25 +02:00
|
|
|
}
|
2018-03-20 04:00:59 +01:00
|
|
|
}
|
|
|
|
|
2018-10-26 01:04:13 +02:00
|
|
|
void RasterizerOpenGL::SyncPrimitiveRestart() {
|
2019-07-18 01:37:01 +02:00
|
|
|
const auto& regs = system.GPU().Maxwell3D().regs;
|
2018-10-26 01:04:13 +02:00
|
|
|
|
|
|
|
state.primitive_restart.enabled = regs.primitive_restart.enabled;
|
|
|
|
state.primitive_restart.index = regs.primitive_restart.index;
|
|
|
|
}
|
|
|
|
|
2018-07-02 20:33:06 +02:00
|
|
|
void RasterizerOpenGL::SyncDepthTestState() {
|
2019-07-18 01:37:01 +02:00
|
|
|
const auto& regs = system.GPU().Maxwell3D().regs;
|
2018-07-02 20:33:06 +02:00
|
|
|
|
|
|
|
state.depth.test_enabled = regs.depth_test_enable != 0;
|
|
|
|
state.depth.write_mask = regs.depth_write_enabled ? GL_TRUE : GL_FALSE;
|
2019-07-18 01:37:01 +02:00
|
|
|
|
|
|
|
if (!state.depth.test_enabled) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2018-07-02 20:33:06 +02:00
|
|
|
state.depth.test_func = MaxwellToGL::ComparisonOp(regs.depth_test_func);
|
|
|
|
}
|
|
|
|
|
2018-08-22 06:35:31 +02:00
|
|
|
void RasterizerOpenGL::SyncStencilTestState() {
|
2019-07-13 22:52:32 +02:00
|
|
|
auto& maxwell3d = system.GPU().Maxwell3D();
|
|
|
|
if (!maxwell3d.dirty.stencil_test) {
|
2018-08-22 06:35:31 +02:00
|
|
|
return;
|
|
|
|
}
|
2019-09-04 04:19:31 +02:00
|
|
|
maxwell3d.dirty.stencil_test = false;
|
2018-08-22 06:35:31 +02:00
|
|
|
|
2019-09-04 04:19:31 +02:00
|
|
|
const auto& regs = maxwell3d.regs;
|
2019-07-13 22:52:32 +02:00
|
|
|
state.stencil.test_enabled = regs.stencil_enable != 0;
|
2019-09-04 04:19:31 +02:00
|
|
|
state.MarkDirtyStencilState();
|
|
|
|
|
2019-07-18 01:37:01 +02:00
|
|
|
if (!regs.stencil_enable) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2018-08-22 06:35:31 +02:00
|
|
|
state.stencil.front.test_func = MaxwellToGL::ComparisonOp(regs.stencil_front_func_func);
|
|
|
|
state.stencil.front.test_ref = regs.stencil_front_func_ref;
|
|
|
|
state.stencil.front.test_mask = regs.stencil_front_func_mask;
|
|
|
|
state.stencil.front.action_stencil_fail = MaxwellToGL::StencilOp(regs.stencil_front_op_fail);
|
|
|
|
state.stencil.front.action_depth_fail = MaxwellToGL::StencilOp(regs.stencil_front_op_zfail);
|
|
|
|
state.stencil.front.action_depth_pass = MaxwellToGL::StencilOp(regs.stencil_front_op_zpass);
|
|
|
|
state.stencil.front.write_mask = regs.stencil_front_mask;
|
2018-11-07 04:27:12 +01:00
|
|
|
if (regs.stencil_two_side_enable) {
|
2018-11-07 04:25:16 +01:00
|
|
|
state.stencil.back.test_func = MaxwellToGL::ComparisonOp(regs.stencil_back_func_func);
|
|
|
|
state.stencil.back.test_ref = regs.stencil_back_func_ref;
|
|
|
|
state.stencil.back.test_mask = regs.stencil_back_func_mask;
|
|
|
|
state.stencil.back.action_stencil_fail = MaxwellToGL::StencilOp(regs.stencil_back_op_fail);
|
|
|
|
state.stencil.back.action_depth_fail = MaxwellToGL::StencilOp(regs.stencil_back_op_zfail);
|
|
|
|
state.stencil.back.action_depth_pass = MaxwellToGL::StencilOp(regs.stencil_back_op_zpass);
|
|
|
|
state.stencil.back.write_mask = regs.stencil_back_mask;
|
2018-11-07 04:27:12 +01:00
|
|
|
} else {
|
|
|
|
state.stencil.back.test_func = GL_ALWAYS;
|
|
|
|
state.stencil.back.test_ref = 0;
|
|
|
|
state.stencil.back.test_mask = 0xFFFFFFFF;
|
|
|
|
state.stencil.back.write_mask = 0xFFFFFFFF;
|
|
|
|
state.stencil.back.action_stencil_fail = GL_KEEP;
|
|
|
|
state.stencil.back.action_depth_fail = GL_KEEP;
|
|
|
|
state.stencil.back.action_depth_pass = GL_KEEP;
|
|
|
|
}
|
2018-08-22 06:35:31 +02:00
|
|
|
}
|
|
|
|
|
2018-11-05 03:46:06 +01:00
|
|
|
void RasterizerOpenGL::SyncColorMask() {
|
2019-07-13 22:52:32 +02:00
|
|
|
auto& maxwell3d = system.GPU().Maxwell3D();
|
|
|
|
if (!maxwell3d.dirty.color_mask) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
const auto& regs = maxwell3d.regs;
|
|
|
|
|
2018-11-16 17:48:10 +01:00
|
|
|
const std::size_t count =
|
2018-11-14 01:23:23 +01:00
|
|
|
regs.independent_blend_enable ? Tegra::Engines::Maxwell3D::Regs::NumRenderTargets : 1;
|
2018-11-16 17:48:10 +01:00
|
|
|
for (std::size_t i = 0; i < count; i++) {
|
2018-11-05 03:46:06 +01:00
|
|
|
const auto& source = regs.color_mask[regs.color_mask_common ? 0 : i];
|
|
|
|
auto& dest = state.color_mask[i];
|
|
|
|
dest.red_enabled = (source.R == 0) ? GL_FALSE : GL_TRUE;
|
|
|
|
dest.green_enabled = (source.G == 0) ? GL_FALSE : GL_TRUE;
|
|
|
|
dest.blue_enabled = (source.B == 0) ? GL_FALSE : GL_TRUE;
|
|
|
|
dest.alpha_enabled = (source.A == 0) ? GL_FALSE : GL_TRUE;
|
|
|
|
}
|
2019-07-13 22:52:32 +02:00
|
|
|
|
2019-07-17 23:00:06 +02:00
|
|
|
state.MarkDirtyColorMask();
|
2019-07-13 22:52:32 +02:00
|
|
|
maxwell3d.dirty.color_mask = false;
|
2018-11-05 03:46:06 +01:00
|
|
|
}
|
|
|
|
|
2018-11-14 04:02:54 +01:00
|
|
|
void RasterizerOpenGL::SyncMultiSampleState() {
|
2019-03-09 07:25:11 +01:00
|
|
|
const auto& regs = system.GPU().Maxwell3D().regs;
|
2018-11-14 04:02:54 +01:00
|
|
|
state.multisample_control.alpha_to_coverage = regs.multisample_control.alpha_to_coverage != 0;
|
|
|
|
state.multisample_control.alpha_to_one = regs.multisample_control.alpha_to_one != 0;
|
|
|
|
}
|
|
|
|
|
2018-11-14 02:09:01 +01:00
|
|
|
void RasterizerOpenGL::SyncFragmentColorClampState() {
|
2019-03-09 07:25:11 +01:00
|
|
|
const auto& regs = system.GPU().Maxwell3D().regs;
|
2018-11-14 02:09:01 +01:00
|
|
|
state.fragment_color_clamp.enabled = regs.frag_color_clamp != 0;
|
|
|
|
}
|
|
|
|
|
2018-06-09 00:05:52 +02:00
|
|
|
void RasterizerOpenGL::SyncBlendState() {
|
2019-07-13 22:52:32 +02:00
|
|
|
auto& maxwell3d = system.GPU().Maxwell3D();
|
|
|
|
if (!maxwell3d.dirty.blend_state) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
const auto& regs = maxwell3d.regs;
|
2018-03-20 04:00:59 +01:00
|
|
|
|
2018-11-02 04:21:25 +01:00
|
|
|
state.blend_color.red = regs.blend_color.r;
|
|
|
|
state.blend_color.green = regs.blend_color.g;
|
|
|
|
state.blend_color.blue = regs.blend_color.b;
|
|
|
|
state.blend_color.alpha = regs.blend_color.a;
|
|
|
|
|
|
|
|
state.independant_blend.enabled = regs.independent_blend_enable;
|
|
|
|
if (!state.independant_blend.enabled) {
|
|
|
|
auto& blend = state.blend[0];
|
2018-11-18 07:44:48 +01:00
|
|
|
const auto& src = regs.blend;
|
|
|
|
blend.enabled = src.enable[0] != 0;
|
|
|
|
if (blend.enabled) {
|
|
|
|
blend.rgb_equation = MaxwellToGL::BlendEquation(src.equation_rgb);
|
|
|
|
blend.src_rgb_func = MaxwellToGL::BlendFunc(src.factor_source_rgb);
|
|
|
|
blend.dst_rgb_func = MaxwellToGL::BlendFunc(src.factor_dest_rgb);
|
|
|
|
blend.a_equation = MaxwellToGL::BlendEquation(src.equation_a);
|
|
|
|
blend.src_a_func = MaxwellToGL::BlendFunc(src.factor_source_a);
|
|
|
|
blend.dst_a_func = MaxwellToGL::BlendFunc(src.factor_dest_a);
|
2018-11-02 04:21:25 +01:00
|
|
|
}
|
2018-11-16 17:48:10 +01:00
|
|
|
for (std::size_t i = 1; i < Tegra::Engines::Maxwell3D::Regs::NumRenderTargets; i++) {
|
2018-11-02 04:21:25 +01:00
|
|
|
state.blend[i].enabled = false;
|
|
|
|
}
|
2019-07-13 22:52:32 +02:00
|
|
|
maxwell3d.dirty.blend_state = false;
|
2019-07-17 23:00:06 +02:00
|
|
|
state.MarkDirtyBlendState();
|
2018-06-09 00:05:52 +02:00
|
|
|
return;
|
2018-11-02 04:21:25 +01:00
|
|
|
}
|
2018-06-09 00:05:52 +02:00
|
|
|
|
2018-11-16 17:48:10 +01:00
|
|
|
for (std::size_t i = 0; i < Tegra::Engines::Maxwell3D::Regs::NumRenderTargets; i++) {
|
2018-11-02 04:21:25 +01:00
|
|
|
auto& blend = state.blend[i];
|
2018-11-18 07:44:48 +01:00
|
|
|
const auto& src = regs.independent_blend[i];
|
2018-11-02 04:21:25 +01:00
|
|
|
blend.enabled = regs.blend.enable[i] != 0;
|
|
|
|
if (!blend.enabled)
|
|
|
|
continue;
|
2018-11-18 07:44:48 +01:00
|
|
|
blend.rgb_equation = MaxwellToGL::BlendEquation(src.equation_rgb);
|
|
|
|
blend.src_rgb_func = MaxwellToGL::BlendFunc(src.factor_source_rgb);
|
|
|
|
blend.dst_rgb_func = MaxwellToGL::BlendFunc(src.factor_dest_rgb);
|
|
|
|
blend.a_equation = MaxwellToGL::BlendEquation(src.equation_a);
|
|
|
|
blend.src_a_func = MaxwellToGL::BlendFunc(src.factor_source_a);
|
|
|
|
blend.dst_a_func = MaxwellToGL::BlendFunc(src.factor_dest_a);
|
2018-11-02 04:21:25 +01:00
|
|
|
}
|
2019-07-13 22:52:32 +02:00
|
|
|
|
2019-07-17 23:00:06 +02:00
|
|
|
state.MarkDirtyBlendState();
|
2019-07-13 22:52:32 +02:00
|
|
|
maxwell3d.dirty.blend_state = false;
|
2018-03-20 04:00:59 +01:00
|
|
|
}
|
2018-08-21 01:44:47 +02:00
|
|
|
|
|
|
|
void RasterizerOpenGL::SyncLogicOpState() {
|
2019-03-09 07:25:11 +01:00
|
|
|
const auto& regs = system.GPU().Maxwell3D().regs;
|
2018-08-21 01:44:47 +02:00
|
|
|
|
|
|
|
state.logic_op.enabled = regs.logic_op.enable != 0;
|
|
|
|
|
|
|
|
if (!state.logic_op.enabled)
|
|
|
|
return;
|
|
|
|
|
2018-08-24 04:59:50 +02:00
|
|
|
ASSERT_MSG(regs.blend.enable[0] == 0,
|
|
|
|
"Blending and logic op can't be enabled at the same time.");
|
2018-08-21 01:44:47 +02:00
|
|
|
|
|
|
|
state.logic_op.operation = MaxwellToGL::LogicOp(regs.logic_op.operation);
|
|
|
|
}
|
2018-08-21 10:18:27 +02:00
|
|
|
|
2018-11-21 04:40:32 +01:00
|
|
|
void RasterizerOpenGL::SyncScissorTest(OpenGLState& current_state) {
|
2019-03-09 07:25:11 +01:00
|
|
|
const auto& regs = system.GPU().Maxwell3D().regs;
|
2018-11-25 16:18:29 +01:00
|
|
|
const bool geometry_shaders_enabled =
|
|
|
|
regs.IsShaderConfigEnabled(static_cast<size_t>(Maxwell::ShaderProgram::Geometry));
|
|
|
|
const std::size_t viewport_count =
|
|
|
|
geometry_shaders_enabled ? Tegra::Engines::Maxwell3D::Regs::NumViewports : 1;
|
|
|
|
for (std::size_t i = 0; i < viewport_count; i++) {
|
2018-11-14 00:13:16 +01:00
|
|
|
const auto& src = regs.scissor_test[i];
|
2018-11-21 04:40:32 +01:00
|
|
|
auto& dst = current_state.viewports[i].scissor;
|
2018-11-14 00:13:16 +01:00
|
|
|
dst.enabled = (src.enable != 0);
|
|
|
|
if (dst.enabled == 0) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
const u32 width = src.max_x - src.min_x;
|
|
|
|
const u32 height = src.max_y - src.min_y;
|
|
|
|
dst.x = src.min_x;
|
|
|
|
dst.y = src.min_y;
|
|
|
|
dst.width = width;
|
|
|
|
dst.height = height;
|
2018-10-09 02:49:36 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-09-26 00:41:21 +02:00
|
|
|
void RasterizerOpenGL::SyncTransformFeedback() {
|
2019-03-09 07:25:11 +01:00
|
|
|
const auto& regs = system.GPU().Maxwell3D().regs;
|
2019-03-09 07:33:56 +01:00
|
|
|
UNIMPLEMENTED_IF_MSG(regs.tfb_enabled != 0, "Transform feedbacks are not implemented");
|
2018-09-26 00:41:21 +02:00
|
|
|
}
|
|
|
|
|
2018-09-28 06:31:01 +02:00
|
|
|
void RasterizerOpenGL::SyncPointState() {
|
2019-03-09 07:25:11 +01:00
|
|
|
const auto& regs = system.GPU().Maxwell3D().regs;
|
2019-05-18 08:02:34 +02:00
|
|
|
// Limit the point size to 1 since nouveau sometimes sets a point size of 0 (and that's invalid
|
|
|
|
// in OpenGL).
|
|
|
|
state.point.size = std::max(1.0f, regs.point_size);
|
2018-09-28 06:31:01 +02:00
|
|
|
}
|
|
|
|
|
2018-11-27 00:31:44 +01:00
|
|
|
void RasterizerOpenGL::SyncPolygonOffset() {
|
2019-07-13 22:52:32 +02:00
|
|
|
auto& maxwell3d = system.GPU().Maxwell3D();
|
|
|
|
if (!maxwell3d.dirty.polygon_offset) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
const auto& regs = maxwell3d.regs;
|
|
|
|
|
2018-11-27 00:31:44 +01:00
|
|
|
state.polygon_offset.fill_enable = regs.polygon_offset_fill_enable != 0;
|
|
|
|
state.polygon_offset.line_enable = regs.polygon_offset_line_enable != 0;
|
|
|
|
state.polygon_offset.point_enable = regs.polygon_offset_point_enable != 0;
|
2019-10-01 07:00:23 +02:00
|
|
|
|
|
|
|
// Hardware divides polygon offset units by two
|
|
|
|
state.polygon_offset.units = regs.polygon_offset_units / 2.0f;
|
2018-11-27 00:31:44 +01:00
|
|
|
state.polygon_offset.factor = regs.polygon_offset_factor;
|
|
|
|
state.polygon_offset.clamp = regs.polygon_offset_clamp;
|
2019-07-13 22:52:32 +02:00
|
|
|
|
2019-07-17 23:00:06 +02:00
|
|
|
state.MarkDirtyPolygonOffset();
|
2019-07-13 22:52:32 +02:00
|
|
|
maxwell3d.dirty.polygon_offset = false;
|
2018-11-27 00:31:44 +01:00
|
|
|
}
|
|
|
|
|
2019-05-22 01:28:09 +02:00
|
|
|
void RasterizerOpenGL::SyncAlphaTest() {
|
2019-03-09 07:25:11 +01:00
|
|
|
const auto& regs = system.GPU().Maxwell3D().regs;
|
2019-03-09 07:33:56 +01:00
|
|
|
UNIMPLEMENTED_IF_MSG(regs.alpha_test_enabled != 0 && regs.rt_control.count > 1,
|
|
|
|
"Alpha Testing is enabled with more than one rendertarget");
|
2019-05-22 01:28:09 +02:00
|
|
|
|
|
|
|
state.alpha_test.enabled = regs.alpha_test_enabled;
|
|
|
|
if (!state.alpha_test.enabled) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
state.alpha_test.func = MaxwellToGL::ComparisonOp(regs.alpha_test_func);
|
|
|
|
state.alpha_test.ref = regs.alpha_test_ref;
|
2018-10-12 02:29:11 +02:00
|
|
|
}
|
|
|
|
|
2018-08-21 10:18:27 +02:00
|
|
|
} // namespace OpenGL
|