3
0
Fork 0
forked from suyu/suyu

gl_shader_cache: Use CompileDepth::FullDecompile on GLSL

From my testing on a Splatoon 2 shader that takes 3800ms on average to
compile changing to FullDecompile reduces it to 900ms on average.

The shader decoder will automatically fallback to a more naive method if
it can't use full decompile.
This commit is contained in:
ReinUsesLisp 2020-04-14 01:34:20 -03:00
parent 7e4a132a77
commit 453d7419d9

View file

@ -34,6 +34,8 @@
namespace OpenGL { namespace OpenGL {
using Tegra::Engines::ShaderType; using Tegra::Engines::ShaderType;
using VideoCommon::Shader::CompileDepth;
using VideoCommon::Shader::CompilerSettings;
using VideoCommon::Shader::ProgramCode; using VideoCommon::Shader::ProgramCode;
using VideoCommon::Shader::Registry; using VideoCommon::Shader::Registry;
using VideoCommon::Shader::ShaderIR; using VideoCommon::Shader::ShaderIR;
@ -43,7 +45,7 @@ namespace {
constexpr u32 STAGE_MAIN_OFFSET = 10; constexpr u32 STAGE_MAIN_OFFSET = 10;
constexpr u32 KERNEL_MAIN_OFFSET = 0; constexpr u32 KERNEL_MAIN_OFFSET = 0;
constexpr VideoCommon::Shader::CompilerSettings COMPILER_SETTINGS{}; constexpr CompilerSettings COMPILER_SETTINGS{CompileDepth::FullDecompile};
/// Gets the address for the specified shader stage program /// Gets the address for the specified shader stage program
GPUVAddr GetShaderAddress(Core::System& system, Maxwell::ShaderProgram program) { GPUVAddr GetShaderAddress(Core::System& system, Maxwell::ShaderProgram program) {