2022-04-23 10:59:50 +02:00
|
|
|
// SPDX-FileCopyrightText: Copyright 2021 yuzu Emulator Project
|
|
|
|
// SPDX-License-Identifier: GPL-2.0-or-later
|
2021-04-26 08:53:26 +02:00
|
|
|
|
2021-09-09 18:36:00 +02:00
|
|
|
#include <algorithm>
|
2021-04-26 08:53:26 +02:00
|
|
|
#include <filesystem>
|
|
|
|
#include <fstream>
|
|
|
|
#include <memory>
|
|
|
|
#include <optional>
|
|
|
|
#include <utility>
|
|
|
|
|
|
|
|
#include "common/assert.h"
|
|
|
|
#include "common/cityhash.h"
|
|
|
|
#include "common/common_types.h"
|
|
|
|
#include "common/div_ceil.h"
|
|
|
|
#include "common/fs/fs.h"
|
2021-11-17 04:19:29 +01:00
|
|
|
#include "common/fs/path_util.h"
|
2021-04-26 08:53:26 +02:00
|
|
|
#include "common/logging/log.h"
|
2022-11-21 17:31:18 +01:00
|
|
|
#include "common/polyfill_ranges.h"
|
2021-04-26 08:53:26 +02:00
|
|
|
#include "shader_recompiler/environment.h"
|
2021-10-01 06:57:02 +02:00
|
|
|
#include "video_core/engines/kepler_compute.h"
|
2021-04-26 08:53:26 +02:00
|
|
|
#include "video_core/memory_manager.h"
|
|
|
|
#include "video_core/shader_environment.h"
|
2022-11-04 07:39:42 +01:00
|
|
|
#include "video_core/texture_cache/format_lookup_table.h"
|
2021-04-26 08:53:26 +02:00
|
|
|
#include "video_core/textures/texture.h"
|
|
|
|
|
|
|
|
namespace VideoCommon {
|
|
|
|
|
|
|
|
constexpr std::array<char, 8> MAGIC_NUMBER{'y', 'u', 'z', 'u', 'c', 'a', 'c', 'h'};
|
|
|
|
|
|
|
|
constexpr size_t INST_SIZE = sizeof(u64);
|
|
|
|
|
|
|
|
using Maxwell = Tegra::Engines::Maxwell3D::Regs;
|
|
|
|
|
|
|
|
static u64 MakeCbufKey(u32 index, u32 offset) {
|
|
|
|
return (static_cast<u64>(index) << 32) | offset;
|
|
|
|
}
|
|
|
|
|
2022-11-04 07:39:42 +01:00
|
|
|
static Shader::TextureType ConvertTextureType(const Tegra::Texture::TICEntry& entry) {
|
2021-04-26 08:53:26 +02:00
|
|
|
switch (entry.texture_type) {
|
|
|
|
case Tegra::Texture::TextureType::Texture1D:
|
|
|
|
return Shader::TextureType::Color1D;
|
|
|
|
case Tegra::Texture::TextureType::Texture2D:
|
|
|
|
case Tegra::Texture::TextureType::Texture2DNoMipmap:
|
2022-08-31 14:51:47 +02:00
|
|
|
return entry.normalized_coords ? Shader::TextureType::Color2D
|
|
|
|
: Shader::TextureType::Color2DRect;
|
2021-04-26 08:53:26 +02:00
|
|
|
case Tegra::Texture::TextureType::Texture3D:
|
|
|
|
return Shader::TextureType::Color3D;
|
|
|
|
case Tegra::Texture::TextureType::TextureCubemap:
|
|
|
|
return Shader::TextureType::ColorCube;
|
|
|
|
case Tegra::Texture::TextureType::Texture1DArray:
|
|
|
|
return Shader::TextureType::ColorArray1D;
|
|
|
|
case Tegra::Texture::TextureType::Texture2DArray:
|
|
|
|
return Shader::TextureType::ColorArray2D;
|
|
|
|
case Tegra::Texture::TextureType::Texture1DBuffer:
|
|
|
|
return Shader::TextureType::Buffer;
|
|
|
|
case Tegra::Texture::TextureType::TextureCubeArray:
|
|
|
|
return Shader::TextureType::ColorArrayCube;
|
|
|
|
default:
|
2022-08-10 04:10:32 +02:00
|
|
|
UNIMPLEMENTED();
|
|
|
|
return Shader::TextureType::Color2D;
|
2021-04-26 08:53:26 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-11-04 07:39:42 +01:00
|
|
|
static Shader::TexturePixelFormat ConvertTexturePixelFormat(const Tegra::Texture::TICEntry& entry) {
|
|
|
|
switch (PixelFormatFromTextureInfo(entry.format, entry.r_type, entry.g_type, entry.b_type,
|
|
|
|
entry.a_type, entry.srgb_conversion)) {
|
|
|
|
case VideoCore::Surface::PixelFormat::A8B8G8R8_SNORM:
|
|
|
|
return Shader::TexturePixelFormat::A8B8G8R8_SNORM;
|
|
|
|
case VideoCore::Surface::PixelFormat::R8_SNORM:
|
|
|
|
return Shader::TexturePixelFormat::R8_SNORM;
|
|
|
|
case VideoCore::Surface::PixelFormat::R8G8_SNORM:
|
|
|
|
return Shader::TexturePixelFormat::R8G8_SNORM;
|
|
|
|
case VideoCore::Surface::PixelFormat::R16G16B16A16_SNORM:
|
|
|
|
return Shader::TexturePixelFormat::R16G16B16A16_SNORM;
|
|
|
|
case VideoCore::Surface::PixelFormat::R16G16_SNORM:
|
|
|
|
return Shader::TexturePixelFormat::R16G16_SNORM;
|
|
|
|
case VideoCore::Surface::PixelFormat::R16_SNORM:
|
|
|
|
return Shader::TexturePixelFormat::R16_SNORM;
|
|
|
|
default:
|
|
|
|
return Shader::TexturePixelFormat::OTHER;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-11-17 04:19:29 +01:00
|
|
|
static std::string_view StageToPrefix(Shader::Stage stage) {
|
|
|
|
switch (stage) {
|
|
|
|
case Shader::Stage::VertexB:
|
|
|
|
return "VB";
|
|
|
|
case Shader::Stage::TessellationControl:
|
|
|
|
return "TC";
|
|
|
|
case Shader::Stage::TessellationEval:
|
|
|
|
return "TE";
|
|
|
|
case Shader::Stage::Geometry:
|
|
|
|
return "GS";
|
|
|
|
case Shader::Stage::Fragment:
|
|
|
|
return "FS";
|
|
|
|
case Shader::Stage::Compute:
|
|
|
|
return "CS";
|
|
|
|
case Shader::Stage::VertexA:
|
|
|
|
return "VA";
|
|
|
|
default:
|
|
|
|
return "UK";
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-08-03 13:18:35 +02:00
|
|
|
static void DumpImpl(u64 pipeline_hash, u64 shader_hash, std::span<const u64> code,
|
|
|
|
[[maybe_unused]] u32 read_highest, [[maybe_unused]] u32 read_lowest,
|
2021-11-17 04:19:29 +01:00
|
|
|
u32 initial_offset, Shader::Stage stage) {
|
|
|
|
const auto shader_dir{Common::FS::GetYuzuPath(Common::FS::YuzuPath::DumpDir)};
|
|
|
|
const auto base_dir{shader_dir / "shaders"};
|
|
|
|
if (!Common::FS::CreateDir(shader_dir) || !Common::FS::CreateDir(base_dir)) {
|
|
|
|
LOG_ERROR(Common_Filesystem, "Failed to create shader dump directories");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
const auto prefix = StageToPrefix(stage);
|
2023-08-03 13:18:35 +02:00
|
|
|
const auto name{base_dir /
|
|
|
|
fmt::format("{:016x}_{}_{:016x}.ash", pipeline_hash, prefix, shader_hash)};
|
2021-11-17 04:19:29 +01:00
|
|
|
std::fstream shader_file(name, std::ios::out | std::ios::binary);
|
2023-08-03 13:18:35 +02:00
|
|
|
ASSERT(initial_offset % sizeof(u64) == 0);
|
2021-11-17 04:19:29 +01:00
|
|
|
const size_t jump_index = initial_offset / sizeof(u64);
|
2023-08-03 13:18:35 +02:00
|
|
|
const size_t code_size = code.size_bytes() - initial_offset;
|
|
|
|
shader_file.write(reinterpret_cast<const char*>(&code[jump_index]), code_size);
|
|
|
|
|
|
|
|
// + 1 instruction, due to the fact that we skip the final self branch instruction in the code,
|
|
|
|
// but we need to consider it for padding, otherwise nvdisasm rages.
|
|
|
|
const size_t padding_needed = (32 - ((code_size + INST_SIZE) % 32)) % 32;
|
|
|
|
for (size_t i = 0; i < INST_SIZE + padding_needed; i++) {
|
2021-11-17 04:19:29 +01:00
|
|
|
shader_file.put(0);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-04-26 08:53:26 +02:00
|
|
|
GenericEnvironment::GenericEnvironment(Tegra::MemoryManager& gpu_memory_, GPUVAddr program_base_,
|
|
|
|
u32 start_address_)
|
|
|
|
: gpu_memory{&gpu_memory_}, program_base{program_base_} {
|
|
|
|
start_address = start_address_;
|
|
|
|
}
|
|
|
|
|
|
|
|
GenericEnvironment::~GenericEnvironment() = default;
|
|
|
|
|
|
|
|
u32 GenericEnvironment::TextureBoundBuffer() const {
|
|
|
|
return texture_bound;
|
|
|
|
}
|
|
|
|
|
|
|
|
u32 GenericEnvironment::LocalMemorySize() const {
|
2021-06-10 07:27:00 +02:00
|
|
|
return local_memory_size;
|
2021-04-26 08:53:26 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
u32 GenericEnvironment::SharedMemorySize() const {
|
|
|
|
return shared_memory_size;
|
|
|
|
}
|
|
|
|
|
|
|
|
std::array<u32, 3> GenericEnvironment::WorkgroupSize() const {
|
|
|
|
return workgroup_size;
|
|
|
|
}
|
|
|
|
|
|
|
|
u64 GenericEnvironment::ReadInstruction(u32 address) {
|
|
|
|
read_lowest = std::min(read_lowest, address);
|
|
|
|
read_highest = std::max(read_highest, address);
|
|
|
|
|
|
|
|
if (address >= cached_lowest && address < cached_highest) {
|
|
|
|
return code[(address - cached_lowest) / INST_SIZE];
|
|
|
|
}
|
|
|
|
has_unbound_instructions = true;
|
|
|
|
return gpu_memory->Read<u64>(program_base + address);
|
|
|
|
}
|
|
|
|
|
|
|
|
std::optional<u64> GenericEnvironment::Analyze() {
|
|
|
|
const std::optional<u64> size{TryFindSize()};
|
|
|
|
if (!size) {
|
|
|
|
return std::nullopt;
|
|
|
|
}
|
|
|
|
cached_lowest = start_address;
|
|
|
|
cached_highest = start_address + static_cast<u32>(*size);
|
|
|
|
return Common::CityHash64(reinterpret_cast<const char*>(code.data()), *size);
|
|
|
|
}
|
|
|
|
|
|
|
|
void GenericEnvironment::SetCachedSize(size_t size_bytes) {
|
|
|
|
cached_lowest = start_address;
|
|
|
|
cached_highest = start_address + static_cast<u32>(size_bytes);
|
2023-05-03 00:52:21 +02:00
|
|
|
code.resize(CachedSizeWords());
|
2021-04-26 08:53:26 +02:00
|
|
|
gpu_memory->ReadBlock(program_base + cached_lowest, code.data(), code.size() * sizeof(u64));
|
|
|
|
}
|
|
|
|
|
2023-05-03 00:52:21 +02:00
|
|
|
size_t GenericEnvironment::CachedSizeWords() const noexcept {
|
|
|
|
return CachedSizeBytes() / INST_SIZE;
|
2021-04-26 08:53:26 +02:00
|
|
|
}
|
|
|
|
|
2023-05-03 00:52:21 +02:00
|
|
|
size_t GenericEnvironment::CachedSizeBytes() const noexcept {
|
|
|
|
return static_cast<size_t>(cached_highest) - cached_lowest + INST_SIZE;
|
|
|
|
}
|
|
|
|
|
|
|
|
size_t GenericEnvironment::ReadSizeBytes() const noexcept {
|
2021-04-26 08:53:26 +02:00
|
|
|
return read_highest - read_lowest + INST_SIZE;
|
|
|
|
}
|
|
|
|
|
|
|
|
bool GenericEnvironment::CanBeSerialized() const noexcept {
|
|
|
|
return !has_unbound_instructions;
|
|
|
|
}
|
|
|
|
|
|
|
|
u64 GenericEnvironment::CalculateHash() const {
|
2023-05-03 00:52:21 +02:00
|
|
|
const size_t size{ReadSizeBytes()};
|
2021-04-26 08:53:26 +02:00
|
|
|
const auto data{std::make_unique<char[]>(size)};
|
|
|
|
gpu_memory->ReadBlock(program_base + read_lowest, data.get(), size);
|
|
|
|
return Common::CityHash64(data.get(), size);
|
|
|
|
}
|
|
|
|
|
2023-08-03 13:18:35 +02:00
|
|
|
void GenericEnvironment::Dump(u64 pipeline_hash, u64 shader_hash) {
|
|
|
|
DumpImpl(pipeline_hash, shader_hash, code, read_highest, read_lowest, initial_offset, stage);
|
2021-11-17 04:19:29 +01:00
|
|
|
}
|
|
|
|
|
2021-04-26 08:53:26 +02:00
|
|
|
void GenericEnvironment::Serialize(std::ofstream& file) const {
|
2023-05-03 00:52:21 +02:00
|
|
|
const u64 code_size{static_cast<u64>(CachedSizeBytes())};
|
2021-04-26 08:53:26 +02:00
|
|
|
const u64 num_texture_types{static_cast<u64>(texture_types.size())};
|
2022-11-04 07:39:42 +01:00
|
|
|
const u64 num_texture_pixel_formats{static_cast<u64>(texture_pixel_formats.size())};
|
2021-04-26 08:53:26 +02:00
|
|
|
const u64 num_cbuf_values{static_cast<u64>(cbuf_values.size())};
|
2022-11-09 17:58:10 +01:00
|
|
|
const u64 num_cbuf_replacement_values{static_cast<u64>(cbuf_replacements.size())};
|
2021-04-26 08:53:26 +02:00
|
|
|
|
|
|
|
file.write(reinterpret_cast<const char*>(&code_size), sizeof(code_size))
|
|
|
|
.write(reinterpret_cast<const char*>(&num_texture_types), sizeof(num_texture_types))
|
2022-11-04 07:39:42 +01:00
|
|
|
.write(reinterpret_cast<const char*>(&num_texture_pixel_formats),
|
|
|
|
sizeof(num_texture_pixel_formats))
|
2021-04-26 08:53:26 +02:00
|
|
|
.write(reinterpret_cast<const char*>(&num_cbuf_values), sizeof(num_cbuf_values))
|
2022-11-09 17:58:10 +01:00
|
|
|
.write(reinterpret_cast<const char*>(&num_cbuf_replacement_values),
|
|
|
|
sizeof(num_cbuf_replacement_values))
|
2021-04-26 08:53:26 +02:00
|
|
|
.write(reinterpret_cast<const char*>(&local_memory_size), sizeof(local_memory_size))
|
|
|
|
.write(reinterpret_cast<const char*>(&texture_bound), sizeof(texture_bound))
|
|
|
|
.write(reinterpret_cast<const char*>(&start_address), sizeof(start_address))
|
|
|
|
.write(reinterpret_cast<const char*>(&cached_lowest), sizeof(cached_lowest))
|
|
|
|
.write(reinterpret_cast<const char*>(&cached_highest), sizeof(cached_highest))
|
2022-09-01 16:05:11 +02:00
|
|
|
.write(reinterpret_cast<const char*>(&viewport_transform_state),
|
|
|
|
sizeof(viewport_transform_state))
|
2021-04-26 08:53:26 +02:00
|
|
|
.write(reinterpret_cast<const char*>(&stage), sizeof(stage))
|
|
|
|
.write(reinterpret_cast<const char*>(code.data()), code_size);
|
2022-04-28 19:22:34 +02:00
|
|
|
for (const auto& [key, type] : texture_types) {
|
2021-04-26 08:53:26 +02:00
|
|
|
file.write(reinterpret_cast<const char*>(&key), sizeof(key))
|
|
|
|
.write(reinterpret_cast<const char*>(&type), sizeof(type));
|
|
|
|
}
|
2022-11-04 07:39:42 +01:00
|
|
|
for (const auto& [key, format] : texture_pixel_formats) {
|
|
|
|
file.write(reinterpret_cast<const char*>(&key), sizeof(key))
|
|
|
|
.write(reinterpret_cast<const char*>(&format), sizeof(format));
|
|
|
|
}
|
2022-04-28 19:22:34 +02:00
|
|
|
for (const auto& [key, type] : cbuf_values) {
|
2021-04-26 08:53:26 +02:00
|
|
|
file.write(reinterpret_cast<const char*>(&key), sizeof(key))
|
|
|
|
.write(reinterpret_cast<const char*>(&type), sizeof(type));
|
|
|
|
}
|
2022-11-09 17:58:10 +01:00
|
|
|
for (const auto& [key, type] : cbuf_replacements) {
|
|
|
|
file.write(reinterpret_cast<const char*>(&key), sizeof(key))
|
|
|
|
.write(reinterpret_cast<const char*>(&type), sizeof(type));
|
|
|
|
}
|
2021-04-26 08:53:26 +02:00
|
|
|
if (stage == Shader::Stage::Compute) {
|
|
|
|
file.write(reinterpret_cast<const char*>(&workgroup_size), sizeof(workgroup_size))
|
|
|
|
.write(reinterpret_cast<const char*>(&shared_memory_size), sizeof(shared_memory_size));
|
|
|
|
} else {
|
|
|
|
file.write(reinterpret_cast<const char*>(&sph), sizeof(sph));
|
2021-06-24 07:41:09 +02:00
|
|
|
if (stage == Shader::Stage::Geometry) {
|
|
|
|
file.write(reinterpret_cast<const char*>(&gp_passthrough_mask),
|
|
|
|
sizeof(gp_passthrough_mask));
|
|
|
|
}
|
2021-04-26 08:53:26 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
std::optional<u64> GenericEnvironment::TryFindSize() {
|
|
|
|
static constexpr size_t BLOCK_SIZE = 0x1000;
|
|
|
|
static constexpr size_t MAXIMUM_SIZE = 0x100000;
|
|
|
|
|
|
|
|
static constexpr u64 SELF_BRANCH_A = 0xE2400FFFFF87000FULL;
|
|
|
|
static constexpr u64 SELF_BRANCH_B = 0xE2400FFFFF07000FULL;
|
|
|
|
|
|
|
|
GPUVAddr guest_addr{program_base + start_address};
|
|
|
|
size_t offset{0};
|
|
|
|
size_t size{BLOCK_SIZE};
|
|
|
|
while (size <= MAXIMUM_SIZE) {
|
|
|
|
code.resize(size / INST_SIZE);
|
|
|
|
u64* const data = code.data() + offset / INST_SIZE;
|
|
|
|
gpu_memory->ReadBlock(guest_addr, data, BLOCK_SIZE);
|
|
|
|
for (size_t index = 0; index < BLOCK_SIZE; index += INST_SIZE) {
|
|
|
|
const u64 inst = data[index / INST_SIZE];
|
|
|
|
if (inst == SELF_BRANCH_A || inst == SELF_BRANCH_B) {
|
|
|
|
return offset + index;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
guest_addr += BLOCK_SIZE;
|
|
|
|
size += BLOCK_SIZE;
|
|
|
|
offset += BLOCK_SIZE;
|
|
|
|
}
|
|
|
|
return std::nullopt;
|
|
|
|
}
|
|
|
|
|
2022-11-04 07:39:42 +01:00
|
|
|
Tegra::Texture::TICEntry GenericEnvironment::ReadTextureInfo(GPUVAddr tic_addr, u32 tic_limit,
|
|
|
|
bool via_header_index, u32 raw) {
|
2021-05-23 09:28:34 +02:00
|
|
|
const auto handle{Tegra::Texture::TexturePair(raw, via_header_index)};
|
2023-08-03 13:18:35 +02:00
|
|
|
ASSERT(handle.first <= tic_limit);
|
2021-05-23 09:28:34 +02:00
|
|
|
const GPUVAddr descriptor_addr{tic_addr + handle.first * sizeof(Tegra::Texture::TICEntry)};
|
2021-04-26 08:53:26 +02:00
|
|
|
Tegra::Texture::TICEntry entry;
|
|
|
|
gpu_memory->ReadBlock(descriptor_addr, &entry, sizeof(entry));
|
2022-11-04 07:39:42 +01:00
|
|
|
return entry;
|
2021-04-26 08:53:26 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
GraphicsEnvironment::GraphicsEnvironment(Tegra::Engines::Maxwell3D& maxwell3d_,
|
|
|
|
Tegra::MemoryManager& gpu_memory_,
|
2022-08-12 11:58:09 +02:00
|
|
|
Maxwell::ShaderType program, GPUVAddr program_base_,
|
2021-04-26 08:53:26 +02:00
|
|
|
u32 start_address_)
|
|
|
|
: GenericEnvironment{gpu_memory_, program_base_, start_address_}, maxwell3d{&maxwell3d_} {
|
|
|
|
gpu_memory->ReadBlock(program_base + start_address, &sph, sizeof(sph));
|
2021-11-17 04:19:29 +01:00
|
|
|
initial_offset = sizeof(sph);
|
2022-08-12 11:58:09 +02:00
|
|
|
gp_passthrough_mask = maxwell3d->regs.post_vtg_shader_attrib_skip_mask;
|
2021-04-26 08:53:26 +02:00
|
|
|
switch (program) {
|
2022-08-12 11:58:09 +02:00
|
|
|
case Maxwell::ShaderType::VertexA:
|
2021-04-26 08:53:26 +02:00
|
|
|
stage = Shader::Stage::VertexA;
|
|
|
|
stage_index = 0;
|
|
|
|
break;
|
2022-08-12 11:58:09 +02:00
|
|
|
case Maxwell::ShaderType::VertexB:
|
2021-04-26 08:53:26 +02:00
|
|
|
stage = Shader::Stage::VertexB;
|
|
|
|
stage_index = 0;
|
|
|
|
break;
|
2022-08-12 11:58:09 +02:00
|
|
|
case Maxwell::ShaderType::TessellationInit:
|
2021-04-26 08:53:26 +02:00
|
|
|
stage = Shader::Stage::TessellationControl;
|
|
|
|
stage_index = 1;
|
|
|
|
break;
|
2022-08-12 11:58:09 +02:00
|
|
|
case Maxwell::ShaderType::Tessellation:
|
2021-04-26 08:53:26 +02:00
|
|
|
stage = Shader::Stage::TessellationEval;
|
|
|
|
stage_index = 2;
|
|
|
|
break;
|
2022-08-12 11:58:09 +02:00
|
|
|
case Maxwell::ShaderType::Geometry:
|
2021-04-26 08:53:26 +02:00
|
|
|
stage = Shader::Stage::Geometry;
|
|
|
|
stage_index = 3;
|
|
|
|
break;
|
2022-08-12 11:58:09 +02:00
|
|
|
case Maxwell::ShaderType::Pixel:
|
2021-04-26 08:53:26 +02:00
|
|
|
stage = Shader::Stage::Fragment;
|
|
|
|
stage_index = 4;
|
|
|
|
break;
|
|
|
|
default:
|
2022-06-07 23:02:29 +02:00
|
|
|
ASSERT_MSG(false, "Invalid program={}", program);
|
2021-04-26 08:53:26 +02:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
const u64 local_size{sph.LocalMemorySize()};
|
|
|
|
ASSERT(local_size <= std::numeric_limits<u32>::max());
|
2021-06-10 07:27:00 +02:00
|
|
|
local_memory_size = static_cast<u32>(local_size) + sph.common3.shader_local_memory_crs_size;
|
2022-08-12 11:58:09 +02:00
|
|
|
texture_bound = maxwell3d->regs.bindless_texture_const_buffer_slot;
|
2022-12-28 03:39:46 +01:00
|
|
|
is_propietary_driver = texture_bound == 2;
|
2022-11-09 17:58:10 +01:00
|
|
|
has_hle_engine_state =
|
|
|
|
maxwell3d->engine_state == Tegra::Engines::Maxwell3D::EngineHint::OnHLEMacro;
|
2021-04-26 08:53:26 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
u32 GraphicsEnvironment::ReadCbufValue(u32 cbuf_index, u32 cbuf_offset) {
|
|
|
|
const auto& cbuf{maxwell3d->state.shader_stages[stage_index].const_buffers[cbuf_index]};
|
|
|
|
ASSERT(cbuf.enabled);
|
|
|
|
u32 value{};
|
|
|
|
if (cbuf_offset < cbuf.size) {
|
|
|
|
value = gpu_memory->Read<u32>(cbuf.address + cbuf_offset);
|
|
|
|
}
|
|
|
|
cbuf_values.emplace(MakeCbufKey(cbuf_index, cbuf_offset), value);
|
|
|
|
return value;
|
|
|
|
}
|
|
|
|
|
2022-11-09 17:58:10 +01:00
|
|
|
std::optional<Shader::ReplaceConstant> GraphicsEnvironment::GetReplaceConstBuffer(u32 bank,
|
|
|
|
u32 offset) {
|
|
|
|
if (!has_hle_engine_state) {
|
|
|
|
return std::nullopt;
|
|
|
|
}
|
|
|
|
const u64 key = (static_cast<u64>(bank) << 32) | static_cast<u64>(offset);
|
|
|
|
auto it = maxwell3d->replace_table.find(key);
|
|
|
|
if (it == maxwell3d->replace_table.end()) {
|
|
|
|
return std::nullopt;
|
|
|
|
}
|
2023-01-03 16:01:25 +01:00
|
|
|
const auto converted_value = [](Tegra::Engines::Maxwell3D::HLEReplacementAttributeType name) {
|
2022-11-09 17:58:10 +01:00
|
|
|
switch (name) {
|
2023-01-03 16:01:25 +01:00
|
|
|
case Tegra::Engines::Maxwell3D::HLEReplacementAttributeType::BaseVertex:
|
2022-11-09 17:58:10 +01:00
|
|
|
return Shader::ReplaceConstant::BaseVertex;
|
2023-01-03 16:01:25 +01:00
|
|
|
case Tegra::Engines::Maxwell3D::HLEReplacementAttributeType::BaseInstance:
|
2022-11-09 17:58:10 +01:00
|
|
|
return Shader::ReplaceConstant::BaseInstance;
|
2023-01-03 16:01:25 +01:00
|
|
|
case Tegra::Engines::Maxwell3D::HLEReplacementAttributeType::DrawID:
|
|
|
|
return Shader::ReplaceConstant::DrawID;
|
2022-11-09 17:58:10 +01:00
|
|
|
default:
|
|
|
|
UNREACHABLE();
|
|
|
|
}
|
|
|
|
}(it->second);
|
|
|
|
cbuf_replacements.emplace(key, converted_value);
|
|
|
|
return converted_value;
|
|
|
|
}
|
|
|
|
|
2021-04-26 08:53:26 +02:00
|
|
|
Shader::TextureType GraphicsEnvironment::ReadTextureType(u32 handle) {
|
|
|
|
const auto& regs{maxwell3d->regs};
|
2022-08-12 11:58:09 +02:00
|
|
|
const bool via_header_index{regs.sampler_binding == Maxwell::SamplerBinding::ViaHeaderBinding};
|
2022-11-04 07:39:42 +01:00
|
|
|
auto entry =
|
|
|
|
ReadTextureInfo(regs.tex_header.Address(), regs.tex_header.limit, via_header_index, handle);
|
|
|
|
const Shader::TextureType result{ConvertTextureType(entry)};
|
|
|
|
texture_types.emplace(handle, result);
|
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
|
|
|
Shader::TexturePixelFormat GraphicsEnvironment::ReadTexturePixelFormat(u32 handle) {
|
|
|
|
const auto& regs{maxwell3d->regs};
|
|
|
|
const bool via_header_index{regs.sampler_binding == Maxwell::SamplerBinding::ViaHeaderBinding};
|
|
|
|
auto entry =
|
|
|
|
ReadTextureInfo(regs.tex_header.Address(), regs.tex_header.limit, via_header_index, handle);
|
|
|
|
const Shader::TexturePixelFormat result(ConvertTexturePixelFormat(entry));
|
|
|
|
texture_pixel_formats.emplace(handle, result);
|
|
|
|
return result;
|
2021-04-26 08:53:26 +02:00
|
|
|
}
|
|
|
|
|
2022-09-01 16:05:11 +02:00
|
|
|
u32 GraphicsEnvironment::ReadViewportTransformState() {
|
|
|
|
const auto& regs{maxwell3d->regs};
|
2022-11-11 03:32:53 +01:00
|
|
|
viewport_transform_state = regs.viewport_scale_offset_enabled;
|
2022-09-01 16:05:11 +02:00
|
|
|
return viewport_transform_state;
|
|
|
|
}
|
|
|
|
|
2021-04-26 08:53:26 +02:00
|
|
|
ComputeEnvironment::ComputeEnvironment(Tegra::Engines::KeplerCompute& kepler_compute_,
|
|
|
|
Tegra::MemoryManager& gpu_memory_, GPUVAddr program_base_,
|
|
|
|
u32 start_address_)
|
|
|
|
: GenericEnvironment{gpu_memory_, program_base_, start_address_}, kepler_compute{
|
|
|
|
&kepler_compute_} {
|
|
|
|
const auto& qmd{kepler_compute->launch_description};
|
|
|
|
stage = Shader::Stage::Compute;
|
2021-06-10 07:27:00 +02:00
|
|
|
local_memory_size = qmd.local_pos_alloc + qmd.local_crs_alloc;
|
2021-04-26 08:53:26 +02:00
|
|
|
texture_bound = kepler_compute->regs.tex_cb_index;
|
2022-12-28 03:39:46 +01:00
|
|
|
is_propietary_driver = texture_bound == 2;
|
2021-04-26 08:53:26 +02:00
|
|
|
shared_memory_size = qmd.shared_alloc;
|
|
|
|
workgroup_size = {qmd.block_dim_x, qmd.block_dim_y, qmd.block_dim_z};
|
|
|
|
}
|
|
|
|
|
|
|
|
u32 ComputeEnvironment::ReadCbufValue(u32 cbuf_index, u32 cbuf_offset) {
|
|
|
|
const auto& qmd{kepler_compute->launch_description};
|
|
|
|
ASSERT(((qmd.const_buffer_enable_mask.Value() >> cbuf_index) & 1) != 0);
|
|
|
|
const auto& cbuf{qmd.const_buffer_config[cbuf_index]};
|
|
|
|
u32 value{};
|
|
|
|
if (cbuf_offset < cbuf.size) {
|
|
|
|
value = gpu_memory->Read<u32>(cbuf.Address() + cbuf_offset);
|
|
|
|
}
|
|
|
|
cbuf_values.emplace(MakeCbufKey(cbuf_index, cbuf_offset), value);
|
|
|
|
return value;
|
|
|
|
}
|
|
|
|
|
|
|
|
Shader::TextureType ComputeEnvironment::ReadTextureType(u32 handle) {
|
|
|
|
const auto& regs{kepler_compute->regs};
|
|
|
|
const auto& qmd{kepler_compute->launch_description};
|
2022-11-04 07:39:42 +01:00
|
|
|
auto entry = ReadTextureInfo(regs.tic.Address(), regs.tic.limit, qmd.linked_tsc != 0, handle);
|
|
|
|
const Shader::TextureType result{ConvertTextureType(entry)};
|
|
|
|
texture_types.emplace(handle, result);
|
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
|
|
|
Shader::TexturePixelFormat ComputeEnvironment::ReadTexturePixelFormat(u32 handle) {
|
|
|
|
const auto& regs{kepler_compute->regs};
|
|
|
|
const auto& qmd{kepler_compute->launch_description};
|
|
|
|
auto entry = ReadTextureInfo(regs.tic.Address(), regs.tic.limit, qmd.linked_tsc != 0, handle);
|
|
|
|
const Shader::TexturePixelFormat result(ConvertTexturePixelFormat(entry));
|
|
|
|
texture_pixel_formats.emplace(handle, result);
|
|
|
|
return result;
|
2021-04-26 08:53:26 +02:00
|
|
|
}
|
|
|
|
|
2022-09-01 16:05:11 +02:00
|
|
|
u32 ComputeEnvironment::ReadViewportTransformState() {
|
|
|
|
return viewport_transform_state;
|
|
|
|
}
|
|
|
|
|
2021-04-26 08:53:26 +02:00
|
|
|
void FileEnvironment::Deserialize(std::ifstream& file) {
|
|
|
|
u64 code_size{};
|
|
|
|
u64 num_texture_types{};
|
2022-11-04 07:39:42 +01:00
|
|
|
u64 num_texture_pixel_formats{};
|
2021-04-26 08:53:26 +02:00
|
|
|
u64 num_cbuf_values{};
|
2022-11-09 17:58:10 +01:00
|
|
|
u64 num_cbuf_replacement_values{};
|
2021-04-26 08:53:26 +02:00
|
|
|
file.read(reinterpret_cast<char*>(&code_size), sizeof(code_size))
|
|
|
|
.read(reinterpret_cast<char*>(&num_texture_types), sizeof(num_texture_types))
|
2022-11-04 07:39:42 +01:00
|
|
|
.read(reinterpret_cast<char*>(&num_texture_pixel_formats),
|
|
|
|
sizeof(num_texture_pixel_formats))
|
2021-04-26 08:53:26 +02:00
|
|
|
.read(reinterpret_cast<char*>(&num_cbuf_values), sizeof(num_cbuf_values))
|
2022-11-09 17:58:10 +01:00
|
|
|
.read(reinterpret_cast<char*>(&num_cbuf_replacement_values),
|
|
|
|
sizeof(num_cbuf_replacement_values))
|
2021-04-26 08:53:26 +02:00
|
|
|
.read(reinterpret_cast<char*>(&local_memory_size), sizeof(local_memory_size))
|
|
|
|
.read(reinterpret_cast<char*>(&texture_bound), sizeof(texture_bound))
|
|
|
|
.read(reinterpret_cast<char*>(&start_address), sizeof(start_address))
|
|
|
|
.read(reinterpret_cast<char*>(&read_lowest), sizeof(read_lowest))
|
|
|
|
.read(reinterpret_cast<char*>(&read_highest), sizeof(read_highest))
|
2022-09-01 16:05:11 +02:00
|
|
|
.read(reinterpret_cast<char*>(&viewport_transform_state), sizeof(viewport_transform_state))
|
2021-04-26 08:53:26 +02:00
|
|
|
.read(reinterpret_cast<char*>(&stage), sizeof(stage));
|
2023-08-03 13:18:35 +02:00
|
|
|
code.resize(Common::DivCeil(code_size, sizeof(u64)));
|
|
|
|
file.read(reinterpret_cast<char*>(code.data()), code_size);
|
2021-04-26 08:53:26 +02:00
|
|
|
for (size_t i = 0; i < num_texture_types; ++i) {
|
|
|
|
u32 key;
|
|
|
|
Shader::TextureType type;
|
|
|
|
file.read(reinterpret_cast<char*>(&key), sizeof(key))
|
|
|
|
.read(reinterpret_cast<char*>(&type), sizeof(type));
|
|
|
|
texture_types.emplace(key, type);
|
|
|
|
}
|
2022-11-04 07:39:42 +01:00
|
|
|
for (size_t i = 0; i < num_texture_pixel_formats; ++i) {
|
|
|
|
u32 key;
|
|
|
|
Shader::TexturePixelFormat format;
|
|
|
|
file.read(reinterpret_cast<char*>(&key), sizeof(key))
|
|
|
|
.read(reinterpret_cast<char*>(&format), sizeof(format));
|
|
|
|
texture_pixel_formats.emplace(key, format);
|
|
|
|
}
|
2021-04-26 08:53:26 +02:00
|
|
|
for (size_t i = 0; i < num_cbuf_values; ++i) {
|
|
|
|
u64 key;
|
|
|
|
u32 value;
|
|
|
|
file.read(reinterpret_cast<char*>(&key), sizeof(key))
|
|
|
|
.read(reinterpret_cast<char*>(&value), sizeof(value));
|
|
|
|
cbuf_values.emplace(key, value);
|
|
|
|
}
|
2022-11-09 17:58:10 +01:00
|
|
|
for (size_t i = 0; i < num_cbuf_replacement_values; ++i) {
|
|
|
|
u64 key;
|
|
|
|
Shader::ReplaceConstant value;
|
|
|
|
file.read(reinterpret_cast<char*>(&key), sizeof(key))
|
|
|
|
.read(reinterpret_cast<char*>(&value), sizeof(value));
|
|
|
|
cbuf_replacements.emplace(key, value);
|
|
|
|
}
|
2021-04-26 08:53:26 +02:00
|
|
|
if (stage == Shader::Stage::Compute) {
|
|
|
|
file.read(reinterpret_cast<char*>(&workgroup_size), sizeof(workgroup_size))
|
|
|
|
.read(reinterpret_cast<char*>(&shared_memory_size), sizeof(shared_memory_size));
|
2021-11-17 04:19:29 +01:00
|
|
|
initial_offset = 0;
|
2021-04-26 08:53:26 +02:00
|
|
|
} else {
|
|
|
|
file.read(reinterpret_cast<char*>(&sph), sizeof(sph));
|
2021-11-17 04:19:29 +01:00
|
|
|
initial_offset = sizeof(sph);
|
2021-06-24 07:41:09 +02:00
|
|
|
if (stage == Shader::Stage::Geometry) {
|
|
|
|
file.read(reinterpret_cast<char*>(&gp_passthrough_mask), sizeof(gp_passthrough_mask));
|
|
|
|
}
|
2021-04-26 08:53:26 +02:00
|
|
|
}
|
2022-12-28 03:39:46 +01:00
|
|
|
is_propietary_driver = texture_bound == 2;
|
2021-04-26 08:53:26 +02:00
|
|
|
}
|
|
|
|
|
2023-08-03 13:18:35 +02:00
|
|
|
void FileEnvironment::Dump(u64 pipeline_hash, u64 shader_hash) {
|
|
|
|
DumpImpl(pipeline_hash, shader_hash, code, read_highest, read_lowest, initial_offset, stage);
|
2021-11-17 04:19:29 +01:00
|
|
|
}
|
|
|
|
|
2021-04-26 08:53:26 +02:00
|
|
|
u64 FileEnvironment::ReadInstruction(u32 address) {
|
|
|
|
if (address < read_lowest || address > read_highest) {
|
|
|
|
throw Shader::LogicError("Out of bounds address {}", address);
|
|
|
|
}
|
|
|
|
return code[(address - read_lowest) / sizeof(u64)];
|
|
|
|
}
|
|
|
|
|
|
|
|
u32 FileEnvironment::ReadCbufValue(u32 cbuf_index, u32 cbuf_offset) {
|
|
|
|
const auto it{cbuf_values.find(MakeCbufKey(cbuf_index, cbuf_offset))};
|
|
|
|
if (it == cbuf_values.end()) {
|
|
|
|
throw Shader::LogicError("Uncached read texture type");
|
|
|
|
}
|
|
|
|
return it->second;
|
|
|
|
}
|
|
|
|
|
|
|
|
Shader::TextureType FileEnvironment::ReadTextureType(u32 handle) {
|
|
|
|
const auto it{texture_types.find(handle)};
|
|
|
|
if (it == texture_types.end()) {
|
|
|
|
throw Shader::LogicError("Uncached read texture type");
|
|
|
|
}
|
2022-11-04 07:39:42 +01:00
|
|
|
return it->second;
|
|
|
|
}
|
|
|
|
|
|
|
|
Shader::TexturePixelFormat FileEnvironment::ReadTexturePixelFormat(u32 handle) {
|
|
|
|
const auto it{texture_pixel_formats.find(handle)};
|
|
|
|
if (it == texture_pixel_formats.end()) {
|
|
|
|
throw Shader::LogicError("Uncached read texture pixel format");
|
|
|
|
}
|
2021-04-26 08:53:26 +02:00
|
|
|
return it->second;
|
|
|
|
}
|
|
|
|
|
2022-09-01 16:05:11 +02:00
|
|
|
u32 FileEnvironment::ReadViewportTransformState() {
|
|
|
|
return viewport_transform_state;
|
|
|
|
}
|
|
|
|
|
2021-04-26 08:53:26 +02:00
|
|
|
u32 FileEnvironment::LocalMemorySize() const {
|
|
|
|
return local_memory_size;
|
|
|
|
}
|
|
|
|
|
|
|
|
u32 FileEnvironment::SharedMemorySize() const {
|
|
|
|
return shared_memory_size;
|
|
|
|
}
|
|
|
|
|
|
|
|
u32 FileEnvironment::TextureBoundBuffer() const {
|
|
|
|
return texture_bound;
|
|
|
|
}
|
|
|
|
|
|
|
|
std::array<u32, 3> FileEnvironment::WorkgroupSize() const {
|
|
|
|
return workgroup_size;
|
|
|
|
}
|
|
|
|
|
2022-11-09 17:58:10 +01:00
|
|
|
std::optional<Shader::ReplaceConstant> FileEnvironment::GetReplaceConstBuffer(u32 bank,
|
|
|
|
u32 offset) {
|
|
|
|
const u64 key = (static_cast<u64>(bank) << 32) | static_cast<u64>(offset);
|
|
|
|
auto it = cbuf_replacements.find(key);
|
|
|
|
if (it == cbuf_replacements.end()) {
|
|
|
|
return std::nullopt;
|
|
|
|
}
|
|
|
|
return it->second;
|
|
|
|
}
|
|
|
|
|
2021-04-26 08:53:26 +02:00
|
|
|
void SerializePipeline(std::span<const char> key, std::span<const GenericEnvironment* const> envs,
|
2021-07-19 02:07:12 +02:00
|
|
|
const std::filesystem::path& filename, u32 cache_version) try {
|
2021-04-26 08:53:26 +02:00
|
|
|
std::ofstream file(filename, std::ios::binary | std::ios::ate | std::ios::app);
|
|
|
|
file.exceptions(std::ifstream::failbit);
|
|
|
|
if (!file.is_open()) {
|
|
|
|
LOG_ERROR(Common_Filesystem, "Failed to open pipeline cache file {}",
|
|
|
|
Common::FS::PathToUTF8String(filename));
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
if (file.tellp() == 0) {
|
|
|
|
// Write header
|
|
|
|
file.write(MAGIC_NUMBER.data(), MAGIC_NUMBER.size())
|
2021-07-19 02:07:12 +02:00
|
|
|
.write(reinterpret_cast<const char*>(&cache_version), sizeof(cache_version));
|
2021-04-26 08:53:26 +02:00
|
|
|
}
|
|
|
|
if (!std::ranges::all_of(envs, &GenericEnvironment::CanBeSerialized)) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
const u32 num_envs{static_cast<u32>(envs.size())};
|
|
|
|
file.write(reinterpret_cast<const char*>(&num_envs), sizeof(num_envs));
|
|
|
|
for (const GenericEnvironment* const env : envs) {
|
|
|
|
env->Serialize(file);
|
|
|
|
}
|
|
|
|
file.write(key.data(), key.size_bytes());
|
|
|
|
|
|
|
|
} catch (const std::ios_base::failure& e) {
|
|
|
|
LOG_ERROR(Common_Filesystem, "{}", e.what());
|
|
|
|
if (!Common::FS::RemoveFile(filename)) {
|
|
|
|
LOG_ERROR(Common_Filesystem, "Failed to delete pipeline cache file {}",
|
|
|
|
Common::FS::PathToUTF8String(filename));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void LoadPipelines(
|
2021-07-19 02:07:12 +02:00
|
|
|
std::stop_token stop_loading, const std::filesystem::path& filename, u32 expected_cache_version,
|
2021-04-26 08:53:26 +02:00
|
|
|
Common::UniqueFunction<void, std::ifstream&, FileEnvironment> load_compute,
|
|
|
|
Common::UniqueFunction<void, std::ifstream&, std::vector<FileEnvironment>> load_graphics) try {
|
|
|
|
std::ifstream file(filename, std::ios::binary | std::ios::ate);
|
|
|
|
if (!file.is_open()) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
file.exceptions(std::ifstream::failbit);
|
|
|
|
const auto end{file.tellg()};
|
|
|
|
file.seekg(0, std::ios::beg);
|
|
|
|
|
|
|
|
std::array<char, 8> magic_number;
|
|
|
|
u32 cache_version;
|
|
|
|
file.read(magic_number.data(), magic_number.size())
|
|
|
|
.read(reinterpret_cast<char*>(&cache_version), sizeof(cache_version));
|
2021-07-19 02:07:12 +02:00
|
|
|
if (magic_number != MAGIC_NUMBER || cache_version != expected_cache_version) {
|
2021-04-26 08:53:26 +02:00
|
|
|
file.close();
|
|
|
|
if (Common::FS::RemoveFile(filename)) {
|
|
|
|
if (magic_number != MAGIC_NUMBER) {
|
|
|
|
LOG_ERROR(Common_Filesystem, "Invalid pipeline cache file");
|
|
|
|
}
|
2021-07-19 02:07:12 +02:00
|
|
|
if (cache_version != expected_cache_version) {
|
2021-04-26 08:53:26 +02:00
|
|
|
LOG_INFO(Common_Filesystem, "Deleting old pipeline cache");
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
LOG_ERROR(Common_Filesystem,
|
|
|
|
"Invalid pipeline cache file and failed to delete it in \"{}\"",
|
|
|
|
Common::FS::PathToUTF8String(filename));
|
|
|
|
}
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
while (file.tellg() != end) {
|
|
|
|
if (stop_loading.stop_requested()) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
u32 num_envs{};
|
|
|
|
file.read(reinterpret_cast<char*>(&num_envs), sizeof(num_envs));
|
|
|
|
std::vector<FileEnvironment> envs(num_envs);
|
|
|
|
for (FileEnvironment& env : envs) {
|
|
|
|
env.Deserialize(file);
|
|
|
|
}
|
|
|
|
if (envs.front().ShaderStage() == Shader::Stage::Compute) {
|
|
|
|
load_compute(file, std::move(envs.front()));
|
|
|
|
} else {
|
|
|
|
load_graphics(file, std::move(envs));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
} catch (const std::ios_base::failure& e) {
|
|
|
|
LOG_ERROR(Common_Filesystem, "{}", e.what());
|
|
|
|
if (!Common::FS::RemoveFile(filename)) {
|
|
|
|
LOG_ERROR(Common_Filesystem, "Failed to delete pipeline cache file {}",
|
|
|
|
Common::FS::PathToUTF8String(filename));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
} // namespace VideoCommon
|