2019-04-27 07:07:18 +02:00
|
|
|
// Copyright 2019 yuzu Emulator Project
|
|
|
|
// Licensed under GPLv2 or any later version
|
|
|
|
// Refer to the license.txt file included.
|
|
|
|
|
|
|
|
#include <algorithm>
|
2019-06-08 17:25:11 +02:00
|
|
|
#include <vector>
|
|
|
|
#include <fmt/format.h>
|
2019-04-27 07:07:18 +02:00
|
|
|
|
|
|
|
#include "common/assert.h"
|
2019-06-08 17:25:11 +02:00
|
|
|
#include "common/bit_field.h"
|
2019-04-27 07:07:18 +02:00
|
|
|
#include "common/common_types.h"
|
2019-06-08 17:25:11 +02:00
|
|
|
#include "common/logging/log.h"
|
2019-04-27 07:07:18 +02:00
|
|
|
#include "video_core/engines/shader_bytecode.h"
|
2019-06-08 17:25:11 +02:00
|
|
|
#include "video_core/shader/node_helper.h"
|
2019-04-27 07:07:18 +02:00
|
|
|
#include "video_core/shader/shader_ir.h"
|
2020-03-22 14:14:12 +01:00
|
|
|
#include "video_core/textures/texture.h"
|
2019-04-27 07:07:18 +02:00
|
|
|
|
|
|
|
namespace VideoCommon::Shader {
|
|
|
|
|
|
|
|
using Tegra::Shader::Instruction;
|
|
|
|
using Tegra::Shader::OpCode;
|
2020-03-11 13:19:56 +01:00
|
|
|
using Tegra::Shader::PredCondition;
|
2020-03-11 15:11:11 +01:00
|
|
|
using Tegra::Shader::StoreType;
|
2020-03-22 14:14:12 +01:00
|
|
|
using Tegra::Texture::ComponentType;
|
|
|
|
using Tegra::Texture::TextureFormat;
|
|
|
|
using Tegra::Texture::TICEntry;
|
2019-04-27 07:07:18 +02:00
|
|
|
|
|
|
|
namespace {
|
2020-04-05 07:54:48 +02:00
|
|
|
|
2020-04-05 07:57:50 +02:00
|
|
|
ComponentType GetComponentType(Tegra::Engines::SamplerDescriptor descriptor,
|
|
|
|
std::size_t component) {
|
2020-04-05 07:54:48 +02:00
|
|
|
const TextureFormat format{descriptor.format};
|
2020-03-22 14:29:46 +01:00
|
|
|
switch (format) {
|
|
|
|
case TextureFormat::R16_G16_B16_A16:
|
|
|
|
case TextureFormat::R32_G32_B32_A32:
|
|
|
|
case TextureFormat::R32_G32_B32:
|
|
|
|
case TextureFormat::R32_G32:
|
|
|
|
case TextureFormat::R16_G16:
|
|
|
|
case TextureFormat::R32:
|
|
|
|
case TextureFormat::R16:
|
|
|
|
case TextureFormat::R8:
|
|
|
|
case TextureFormat::R1:
|
2020-04-05 07:54:48 +02:00
|
|
|
if (component == 0) {
|
|
|
|
return descriptor.r_type;
|
2020-03-22 14:29:46 +01:00
|
|
|
}
|
2020-04-05 07:54:48 +02:00
|
|
|
if (component == 1) {
|
|
|
|
return descriptor.g_type;
|
2020-03-22 14:29:46 +01:00
|
|
|
}
|
2020-04-05 07:54:48 +02:00
|
|
|
if (component == 2) {
|
|
|
|
return descriptor.b_type;
|
2020-03-22 14:29:46 +01:00
|
|
|
}
|
2020-04-05 07:54:48 +02:00
|
|
|
if (component == 3) {
|
|
|
|
return descriptor.a_type;
|
2020-03-22 14:29:46 +01:00
|
|
|
}
|
|
|
|
break;
|
|
|
|
case TextureFormat::A8R8G8B8:
|
2020-04-05 07:54:48 +02:00
|
|
|
if (component == 0) {
|
|
|
|
return descriptor.a_type;
|
2020-03-22 14:29:46 +01:00
|
|
|
}
|
2020-04-05 07:54:48 +02:00
|
|
|
if (component == 1) {
|
|
|
|
return descriptor.r_type;
|
2020-03-22 14:29:46 +01:00
|
|
|
}
|
2020-04-05 07:54:48 +02:00
|
|
|
if (component == 2) {
|
|
|
|
return descriptor.g_type;
|
2020-03-22 14:29:46 +01:00
|
|
|
}
|
2020-04-05 07:54:48 +02:00
|
|
|
if (component == 3) {
|
|
|
|
return descriptor.b_type;
|
2020-03-22 14:29:46 +01:00
|
|
|
}
|
|
|
|
break;
|
|
|
|
case TextureFormat::A2B10G10R10:
|
|
|
|
case TextureFormat::A4B4G4R4:
|
|
|
|
case TextureFormat::A5B5G5R1:
|
|
|
|
case TextureFormat::A1B5G5R5:
|
2020-04-05 07:54:48 +02:00
|
|
|
if (component == 0) {
|
|
|
|
return descriptor.a_type;
|
2020-03-22 14:29:46 +01:00
|
|
|
}
|
2020-04-05 07:54:48 +02:00
|
|
|
if (component == 1) {
|
|
|
|
return descriptor.b_type;
|
2020-03-22 14:29:46 +01:00
|
|
|
}
|
2020-04-05 07:54:48 +02:00
|
|
|
if (component == 2) {
|
|
|
|
return descriptor.g_type;
|
2020-03-22 14:29:46 +01:00
|
|
|
}
|
2020-04-05 07:54:48 +02:00
|
|
|
if (component == 3) {
|
|
|
|
return descriptor.r_type;
|
2020-03-22 14:29:46 +01:00
|
|
|
}
|
|
|
|
break;
|
|
|
|
case TextureFormat::R32_B24G8:
|
2020-04-05 07:54:48 +02:00
|
|
|
if (component == 0) {
|
|
|
|
return descriptor.r_type;
|
2020-03-22 14:29:46 +01:00
|
|
|
}
|
2020-04-05 07:54:48 +02:00
|
|
|
if (component == 1) {
|
|
|
|
return descriptor.b_type;
|
2020-03-22 14:29:46 +01:00
|
|
|
}
|
2020-04-05 07:54:48 +02:00
|
|
|
if (component == 2) {
|
|
|
|
return descriptor.g_type;
|
2020-03-22 14:29:46 +01:00
|
|
|
}
|
|
|
|
break;
|
|
|
|
case TextureFormat::B5G6R5:
|
|
|
|
case TextureFormat::B6G5R5:
|
2020-04-05 07:54:48 +02:00
|
|
|
if (component == 0) {
|
|
|
|
return descriptor.b_type;
|
2020-03-22 14:29:46 +01:00
|
|
|
}
|
2020-04-05 07:54:48 +02:00
|
|
|
if (component == 1) {
|
|
|
|
return descriptor.g_type;
|
2020-03-22 14:29:46 +01:00
|
|
|
}
|
2020-04-05 07:54:48 +02:00
|
|
|
if (component == 2) {
|
|
|
|
return descriptor.r_type;
|
2020-03-22 14:29:46 +01:00
|
|
|
}
|
|
|
|
break;
|
|
|
|
case TextureFormat::G8R24:
|
|
|
|
case TextureFormat::G24R8:
|
|
|
|
case TextureFormat::G8R8:
|
|
|
|
case TextureFormat::G4R4:
|
2020-04-05 07:54:48 +02:00
|
|
|
if (component == 0) {
|
|
|
|
return descriptor.g_type;
|
2020-03-22 14:29:46 +01:00
|
|
|
}
|
2020-04-05 07:54:48 +02:00
|
|
|
if (component == 1) {
|
|
|
|
return descriptor.r_type;
|
2020-03-22 14:29:46 +01:00
|
|
|
}
|
|
|
|
break;
|
2020-03-22 14:14:12 +01:00
|
|
|
}
|
2020-03-22 14:29:46 +01:00
|
|
|
UNIMPLEMENTED_MSG("texture format not implement={}", format);
|
2020-03-22 14:14:12 +01:00
|
|
|
return ComponentType::FLOAT;
|
|
|
|
}
|
|
|
|
|
|
|
|
bool IsComponentEnabled(std::size_t component_mask, std::size_t component) {
|
|
|
|
constexpr u8 R = 0b0001;
|
|
|
|
constexpr u8 G = 0b0010;
|
|
|
|
constexpr u8 B = 0b0100;
|
|
|
|
constexpr u8 A = 0b1000;
|
|
|
|
constexpr std::array<u8, 16> mask = {
|
|
|
|
0, (R), (G), (R | G), (B), (R | B), (G | B), (R | G | B),
|
|
|
|
(A), (R | A), (G | A), (R | G | A), (B | A), (R | B | A), (G | B | A), (R | G | B | A)};
|
|
|
|
return std::bitset<4>{mask.at(component_mask)}.test(component);
|
|
|
|
}
|
|
|
|
|
|
|
|
u32 GetComponentSize(TextureFormat format, std::size_t component) {
|
|
|
|
switch (format) {
|
|
|
|
case TextureFormat::R32_G32_B32_A32:
|
|
|
|
return 32;
|
|
|
|
case TextureFormat::R16_G16_B16_A16:
|
|
|
|
return 16;
|
|
|
|
case TextureFormat::R32_G32_B32:
|
2020-04-07 02:55:49 +02:00
|
|
|
return component <= 2 ? 32 : 0;
|
2020-03-22 14:14:12 +01:00
|
|
|
case TextureFormat::R32_G32:
|
2020-04-07 02:55:49 +02:00
|
|
|
return component <= 1 ? 32 : 0;
|
2020-03-22 14:14:12 +01:00
|
|
|
case TextureFormat::R16_G16:
|
2020-04-07 02:55:49 +02:00
|
|
|
return component <= 1 ? 16 : 0;
|
2020-03-22 14:14:12 +01:00
|
|
|
case TextureFormat::R32:
|
2020-04-07 02:55:49 +02:00
|
|
|
return component == 0 ? 32 : 0;
|
2020-03-22 14:14:12 +01:00
|
|
|
case TextureFormat::R16:
|
2020-04-07 02:55:49 +02:00
|
|
|
return component == 0 ? 16 : 0;
|
2020-03-22 14:14:12 +01:00
|
|
|
case TextureFormat::R8:
|
2020-04-07 02:55:49 +02:00
|
|
|
return component == 0 ? 8 : 0;
|
2020-03-22 14:14:12 +01:00
|
|
|
case TextureFormat::R1:
|
2020-04-07 02:55:49 +02:00
|
|
|
return component == 0 ? 1 : 0;
|
2020-03-22 14:14:12 +01:00
|
|
|
case TextureFormat::A8R8G8B8:
|
|
|
|
return 8;
|
|
|
|
case TextureFormat::A2B10G10R10:
|
2020-04-05 07:54:48 +02:00
|
|
|
return (component == 3 || component == 2 || component == 1) ? 10 : 2;
|
2020-03-22 14:14:12 +01:00
|
|
|
case TextureFormat::A4B4G4R4:
|
|
|
|
return 4;
|
|
|
|
case TextureFormat::A5B5G5R1:
|
2020-04-05 07:54:48 +02:00
|
|
|
return (component == 0 || component == 1 || component == 2) ? 5 : 1;
|
2020-03-22 14:14:12 +01:00
|
|
|
case TextureFormat::A1B5G5R5:
|
2020-04-05 07:54:48 +02:00
|
|
|
return (component == 1 || component == 2 || component == 3) ? 5 : 1;
|
2020-03-22 14:14:12 +01:00
|
|
|
case TextureFormat::R32_B24G8:
|
2020-04-05 07:54:48 +02:00
|
|
|
if (component == 0) {
|
2020-03-22 14:14:12 +01:00
|
|
|
return 32;
|
|
|
|
}
|
2020-04-05 07:54:48 +02:00
|
|
|
if (component == 1) {
|
2020-03-22 14:14:12 +01:00
|
|
|
return 24;
|
|
|
|
}
|
2020-04-05 07:54:48 +02:00
|
|
|
if (component == 2) {
|
2020-03-22 14:14:12 +01:00
|
|
|
return 8;
|
|
|
|
}
|
|
|
|
return 0;
|
|
|
|
case TextureFormat::B5G6R5:
|
2020-04-05 07:54:48 +02:00
|
|
|
if (component == 0 || component == 2) {
|
2020-03-22 14:14:12 +01:00
|
|
|
return 5;
|
|
|
|
}
|
2020-04-05 07:54:48 +02:00
|
|
|
if (component == 1) {
|
2020-03-22 14:14:12 +01:00
|
|
|
return 6;
|
|
|
|
}
|
|
|
|
return 0;
|
|
|
|
case TextureFormat::B6G5R5:
|
2020-04-05 07:54:48 +02:00
|
|
|
if (component == 1 || component == 2) {
|
2020-03-22 14:14:12 +01:00
|
|
|
return 5;
|
|
|
|
}
|
2020-04-05 07:54:48 +02:00
|
|
|
if (component == 0) {
|
2020-03-22 14:14:12 +01:00
|
|
|
return 6;
|
|
|
|
}
|
|
|
|
return 0;
|
|
|
|
case TextureFormat::G8R24:
|
2020-04-05 07:54:48 +02:00
|
|
|
if (component == 0) {
|
2020-03-22 14:14:12 +01:00
|
|
|
return 8;
|
|
|
|
}
|
2020-04-05 07:54:48 +02:00
|
|
|
if (component == 1) {
|
2020-03-22 14:14:12 +01:00
|
|
|
return 24;
|
|
|
|
}
|
|
|
|
return 0;
|
|
|
|
case TextureFormat::G24R8:
|
2020-04-05 07:54:48 +02:00
|
|
|
if (component == 0) {
|
2020-03-22 14:14:12 +01:00
|
|
|
return 8;
|
|
|
|
}
|
2020-04-05 07:54:48 +02:00
|
|
|
if (component == 1) {
|
2020-03-22 14:14:12 +01:00
|
|
|
return 24;
|
|
|
|
}
|
|
|
|
return 0;
|
|
|
|
case TextureFormat::G8R8:
|
2020-04-05 07:54:48 +02:00
|
|
|
return (component == 0 || component == 1) ? 8 : 0;
|
2020-03-22 14:14:12 +01:00
|
|
|
case TextureFormat::G4R4:
|
2020-04-05 07:54:48 +02:00
|
|
|
return (component == 0 || component == 1) ? 4 : 0;
|
2020-03-22 14:14:12 +01:00
|
|
|
default:
|
|
|
|
UNIMPLEMENTED_MSG("texture format not implement={}", format);
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
std::size_t GetImageComponentMask(TextureFormat format) {
|
|
|
|
constexpr u8 R = 0b0001;
|
|
|
|
constexpr u8 G = 0b0010;
|
|
|
|
constexpr u8 B = 0b0100;
|
|
|
|
constexpr u8 A = 0b1000;
|
|
|
|
switch (format) {
|
|
|
|
case TextureFormat::R32_G32_B32_A32:
|
|
|
|
case TextureFormat::R16_G16_B16_A16:
|
|
|
|
case TextureFormat::A8R8G8B8:
|
|
|
|
case TextureFormat::A2B10G10R10:
|
|
|
|
case TextureFormat::A4B4G4R4:
|
|
|
|
case TextureFormat::A5B5G5R1:
|
|
|
|
case TextureFormat::A1B5G5R5:
|
|
|
|
return std::size_t{R | G | B | A};
|
|
|
|
case TextureFormat::R32_G32_B32:
|
|
|
|
case TextureFormat::R32_B24G8:
|
|
|
|
case TextureFormat::B5G6R5:
|
|
|
|
case TextureFormat::B6G5R5:
|
|
|
|
return std::size_t{R | G | B};
|
|
|
|
case TextureFormat::R32_G32:
|
|
|
|
case TextureFormat::R16_G16:
|
|
|
|
case TextureFormat::G8R24:
|
|
|
|
case TextureFormat::G24R8:
|
|
|
|
case TextureFormat::G8R8:
|
|
|
|
case TextureFormat::G4R4:
|
|
|
|
return std::size_t{R | G};
|
|
|
|
case TextureFormat::R32:
|
|
|
|
case TextureFormat::R16:
|
|
|
|
case TextureFormat::R8:
|
|
|
|
case TextureFormat::R1:
|
|
|
|
return std::size_t{R};
|
|
|
|
default:
|
|
|
|
UNIMPLEMENTED_MSG("texture format not implement={}", format);
|
|
|
|
return std::size_t{R | G | B | A};
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-04-27 07:07:18 +02:00
|
|
|
std::size_t GetImageTypeNumCoordinates(Tegra::Shader::ImageType image_type) {
|
|
|
|
switch (image_type) {
|
|
|
|
case Tegra::Shader::ImageType::Texture1D:
|
|
|
|
case Tegra::Shader::ImageType::TextureBuffer:
|
|
|
|
return 1;
|
|
|
|
case Tegra::Shader::ImageType::Texture1DArray:
|
|
|
|
case Tegra::Shader::ImageType::Texture2D:
|
|
|
|
return 2;
|
|
|
|
case Tegra::Shader::ImageType::Texture2DArray:
|
|
|
|
case Tegra::Shader::ImageType::Texture3D:
|
|
|
|
return 3;
|
|
|
|
}
|
|
|
|
UNREACHABLE();
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
} // Anonymous namespace
|
|
|
|
|
2020-04-06 08:46:55 +02:00
|
|
|
std::pair<Node, bool> ShaderIR::GetComponentValue(ComponentType component_type, u32 component_size,
|
|
|
|
Node original_value) {
|
2020-04-06 08:09:19 +02:00
|
|
|
switch (component_type) {
|
|
|
|
case ComponentType::SNORM: {
|
|
|
|
// range [-1.0, 1.0]
|
|
|
|
auto cnv_value = Operation(OperationCode::FMul, original_value,
|
2020-04-06 08:26:58 +02:00
|
|
|
Immediate(static_cast<float>(1 << component_size) / 2.f - 1.f));
|
2020-04-06 08:46:55 +02:00
|
|
|
cnv_value = Operation(OperationCode::ICastFloat, std::move(cnv_value));
|
|
|
|
return {BitfieldExtract(std::move(cnv_value), 0, component_size), true};
|
2020-04-06 08:09:19 +02:00
|
|
|
}
|
|
|
|
case ComponentType::SINT:
|
|
|
|
case ComponentType::UNORM: {
|
2020-04-06 08:46:55 +02:00
|
|
|
bool is_signed = component_type == ComponentType::SINT;
|
2020-04-06 08:09:19 +02:00
|
|
|
// range [0.0, 1.0]
|
2020-04-06 08:26:58 +02:00
|
|
|
auto cnv_value = Operation(OperationCode::FMul, original_value,
|
|
|
|
Immediate(static_cast<float>(1 << component_size) - 1.f));
|
2020-04-06 08:46:55 +02:00
|
|
|
return {SignedOperation(OperationCode::ICastFloat, is_signed, std::move(cnv_value)),
|
|
|
|
is_signed};
|
2020-04-06 08:09:19 +02:00
|
|
|
}
|
|
|
|
case ComponentType::UINT: // range [0, (1 << component_size) - 1]
|
2020-04-07 13:29:30 +02:00
|
|
|
return {std::move(original_value), false};
|
2020-04-06 08:09:19 +02:00
|
|
|
case ComponentType::FLOAT:
|
|
|
|
if (component_size == 16) {
|
2020-04-06 08:46:55 +02:00
|
|
|
return {Operation(OperationCode::HCastFloat, original_value), true};
|
2020-04-06 08:09:19 +02:00
|
|
|
} else {
|
2020-04-07 02:55:49 +02:00
|
|
|
return {std::move(original_value), true};
|
2020-04-06 08:09:19 +02:00
|
|
|
}
|
|
|
|
default:
|
|
|
|
UNIMPLEMENTED_MSG("Unimplement component type={}", component_type);
|
2020-04-07 02:55:49 +02:00
|
|
|
return {std::move(original_value), true};
|
2020-04-06 08:09:19 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-04-27 07:07:18 +02:00
|
|
|
u32 ShaderIR::DecodeImage(NodeBlock& bb, u32 pc) {
|
|
|
|
const Instruction instr = {program_code[pc]};
|
|
|
|
const auto opcode = OpCode::Decode(instr);
|
|
|
|
|
2019-09-18 06:07:01 +02:00
|
|
|
const auto GetCoordinates = [this, instr](Tegra::Shader::ImageType image_type) {
|
|
|
|
std::vector<Node> coords;
|
|
|
|
const std::size_t num_coords{GetImageTypeNumCoordinates(image_type)};
|
|
|
|
coords.reserve(num_coords);
|
|
|
|
for (std::size_t i = 0; i < num_coords; ++i) {
|
|
|
|
coords.push_back(GetRegister(instr.gpr8.Value() + i));
|
|
|
|
}
|
|
|
|
return coords;
|
|
|
|
};
|
|
|
|
|
2019-04-27 07:07:18 +02:00
|
|
|
switch (opcode->get().GetId()) {
|
2019-09-18 06:07:01 +02:00
|
|
|
case OpCode::Id::SULD: {
|
|
|
|
UNIMPLEMENTED_IF(instr.suldst.out_of_bounds_store !=
|
|
|
|
Tegra::Shader::OutOfBoundsStore::Ignore);
|
|
|
|
|
|
|
|
const auto type{instr.suldst.image_type};
|
|
|
|
auto& image{instr.suldst.is_immediate ? GetImage(instr.image, type)
|
|
|
|
: GetBindlessImage(instr.gpr39, type)};
|
|
|
|
image.MarkRead();
|
|
|
|
|
2020-03-09 13:33:26 +01:00
|
|
|
if (instr.suldst.mode == Tegra::Shader::SurfaceDataMode::P) {
|
|
|
|
u32 indexer = 0;
|
|
|
|
for (u32 element = 0; element < 4; ++element) {
|
|
|
|
if (!instr.suldst.IsComponentEnabled(element)) {
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
MetaImage meta{image, {}, element};
|
|
|
|
Node value = Operation(OperationCode::ImageLoad, meta, GetCoordinates(type));
|
|
|
|
SetTemporary(bb, indexer++, std::move(value));
|
|
|
|
}
|
|
|
|
for (u32 i = 0; i < indexer; ++i) {
|
|
|
|
SetRegister(bb, instr.gpr0.Value() + i, GetTemporary(i));
|
|
|
|
}
|
|
|
|
} else if (instr.suldst.mode == Tegra::Shader::SurfaceDataMode::D_BA) {
|
2020-04-06 08:09:19 +02:00
|
|
|
UNIMPLEMENTED_IF(instr.suldst.GetStoreDataLayout() != StoreType::Bits32 &&
|
|
|
|
instr.suldst.GetStoreDataLayout() != StoreType::Bits64);
|
2020-03-09 13:33:26 +01:00
|
|
|
|
2020-04-05 07:54:48 +02:00
|
|
|
auto descriptor = [this, instr] {
|
|
|
|
std::optional<Tegra::Engines::SamplerDescriptor> descriptor;
|
|
|
|
if (instr.suldst.is_immediate) {
|
2020-04-05 11:02:07 +02:00
|
|
|
descriptor =
|
|
|
|
registry.ObtainBoundSampler(static_cast<u32>(instr.image.index.Value()));
|
2020-04-05 07:54:48 +02:00
|
|
|
} else {
|
|
|
|
const Node image_register = GetRegister(instr.gpr39);
|
2020-04-15 21:59:23 +02:00
|
|
|
const auto result = TrackCbuf(image_register, global_code,
|
|
|
|
static_cast<s64>(global_code.size()));
|
|
|
|
const auto buffer = std::get<1>(result);
|
|
|
|
const auto offset = std::get<2>(result);
|
2020-04-05 07:54:48 +02:00
|
|
|
descriptor = registry.ObtainBindlessSampler(buffer, offset);
|
|
|
|
}
|
|
|
|
if (!descriptor) {
|
|
|
|
UNREACHABLE_MSG("Failed to obtain image descriptor");
|
|
|
|
}
|
|
|
|
return *descriptor;
|
|
|
|
}();
|
2020-03-22 14:14:12 +01:00
|
|
|
|
2020-04-05 07:54:48 +02:00
|
|
|
const auto comp_mask = GetImageComponentMask(descriptor.format);
|
2020-03-22 14:14:12 +01:00
|
|
|
|
2020-03-09 13:33:26 +01:00
|
|
|
switch (instr.suldst.GetStoreDataLayout()) {
|
2020-04-06 08:34:06 +02:00
|
|
|
case StoreType::Bits32:
|
2020-04-06 08:09:19 +02:00
|
|
|
case StoreType::Bits64: {
|
|
|
|
u32 indexer = 0;
|
|
|
|
u32 shifted_counter = 0;
|
|
|
|
Node value = Immediate(0);
|
|
|
|
for (u32 element = 0; element < 4; ++element) {
|
|
|
|
if (!IsComponentEnabled(comp_mask, element)) {
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
const auto component_type = GetComponentType(descriptor, element);
|
|
|
|
const auto component_size = GetComponentSize(descriptor.format, element);
|
|
|
|
MetaImage meta{image, {}, element};
|
|
|
|
|
2020-04-06 08:46:55 +02:00
|
|
|
auto [converted_value, is_signed] = GetComponentValue(
|
2020-04-06 08:09:19 +02:00
|
|
|
component_type, component_size,
|
2020-04-06 08:46:55 +02:00
|
|
|
Operation(OperationCode::ImageLoad, meta, GetCoordinates(type)));
|
2020-04-06 08:09:19 +02:00
|
|
|
|
|
|
|
// shift element to correct position
|
|
|
|
const auto shifted = shifted_counter;
|
|
|
|
if (shifted > 0) {
|
|
|
|
converted_value =
|
|
|
|
SignedOperation(OperationCode::ILogicalShiftLeft, is_signed,
|
|
|
|
std::move(converted_value), Immediate(shifted));
|
|
|
|
}
|
|
|
|
shifted_counter += component_size;
|
|
|
|
|
|
|
|
// add value into result
|
|
|
|
value = Operation(OperationCode::UBitwiseOr, value, std::move(converted_value));
|
|
|
|
|
|
|
|
// if we shifted enough for 1 byte -> we save it into temp
|
|
|
|
if (shifted_counter >= 32) {
|
|
|
|
SetTemporary(bb, indexer++, std::move(value));
|
|
|
|
// reset counter and value to prepare pack next byte
|
|
|
|
value = Immediate(0);
|
|
|
|
shifted_counter = 0;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
for (u32 i = 0; i < indexer; ++i) {
|
|
|
|
SetRegister(bb, instr.gpr0.Value() + i, GetTemporary(i));
|
|
|
|
}
|
|
|
|
break;
|
|
|
|
}
|
2020-03-09 13:33:26 +01:00
|
|
|
default:
|
|
|
|
UNREACHABLE();
|
|
|
|
break;
|
2019-09-18 06:07:01 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
break;
|
|
|
|
}
|
2019-04-27 07:07:18 +02:00
|
|
|
case OpCode::Id::SUST: {
|
2019-09-18 06:07:01 +02:00
|
|
|
UNIMPLEMENTED_IF(instr.suldst.mode != Tegra::Shader::SurfaceDataMode::P);
|
|
|
|
UNIMPLEMENTED_IF(instr.suldst.out_of_bounds_store !=
|
|
|
|
Tegra::Shader::OutOfBoundsStore::Ignore);
|
|
|
|
UNIMPLEMENTED_IF(instr.suldst.component_mask_selector != 0xf); // Ensure we have RGBA
|
2019-04-27 07:07:18 +02:00
|
|
|
|
|
|
|
std::vector<Node> values;
|
|
|
|
constexpr std::size_t hardcoded_size{4};
|
|
|
|
for (std::size_t i = 0; i < hardcoded_size; ++i) {
|
|
|
|
values.push_back(GetRegister(instr.gpr0.Value() + i));
|
|
|
|
}
|
|
|
|
|
2019-09-18 06:07:01 +02:00
|
|
|
const auto type{instr.suldst.image_type};
|
|
|
|
auto& image{instr.suldst.is_immediate ? GetImage(instr.image, type)
|
|
|
|
: GetBindlessImage(instr.gpr39, type)};
|
2019-09-06 04:26:05 +02:00
|
|
|
image.MarkWrite();
|
|
|
|
|
2019-09-18 06:07:01 +02:00
|
|
|
MetaImage meta{image, std::move(values)};
|
|
|
|
bb.push_back(Operation(OperationCode::ImageStore, meta, GetCoordinates(type)));
|
2019-07-18 02:03:53 +02:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
case OpCode::Id::SUATOM: {
|
|
|
|
UNIMPLEMENTED_IF(instr.suatom_d.is_ba != 0);
|
|
|
|
|
|
|
|
const OperationCode operation_code = [instr] {
|
2019-09-18 06:50:40 +02:00
|
|
|
switch (instr.suatom_d.operation_type) {
|
|
|
|
case Tegra::Shader::ImageAtomicOperationType::S32:
|
|
|
|
case Tegra::Shader::ImageAtomicOperationType::U32:
|
|
|
|
switch (instr.suatom_d.operation) {
|
|
|
|
case Tegra::Shader::ImageAtomicOperation::Add:
|
|
|
|
return OperationCode::AtomicImageAdd;
|
|
|
|
case Tegra::Shader::ImageAtomicOperation::And:
|
|
|
|
return OperationCode::AtomicImageAnd;
|
|
|
|
case Tegra::Shader::ImageAtomicOperation::Or:
|
|
|
|
return OperationCode::AtomicImageOr;
|
|
|
|
case Tegra::Shader::ImageAtomicOperation::Xor:
|
|
|
|
return OperationCode::AtomicImageXor;
|
|
|
|
case Tegra::Shader::ImageAtomicOperation::Exch:
|
|
|
|
return OperationCode::AtomicImageExchange;
|
|
|
|
}
|
2019-07-18 02:03:53 +02:00
|
|
|
default:
|
2019-09-18 06:50:40 +02:00
|
|
|
break;
|
2019-07-18 02:03:53 +02:00
|
|
|
}
|
2019-09-18 06:50:40 +02:00
|
|
|
UNIMPLEMENTED_MSG("Unimplemented operation={} type={}",
|
|
|
|
static_cast<u64>(instr.suatom_d.operation.Value()),
|
|
|
|
static_cast<u64>(instr.suatom_d.operation_type.Value()));
|
|
|
|
return OperationCode::AtomicImageAdd;
|
2019-07-18 02:03:53 +02:00
|
|
|
}();
|
|
|
|
|
2019-09-18 06:07:01 +02:00
|
|
|
Node value = GetRegister(instr.gpr0);
|
|
|
|
|
|
|
|
const auto type = instr.suatom_d.image_type;
|
2019-09-18 06:50:40 +02:00
|
|
|
auto& image = GetImage(instr.image, type);
|
|
|
|
image.MarkAtomic();
|
2019-09-18 06:07:01 +02:00
|
|
|
|
2019-07-18 02:03:53 +02:00
|
|
|
MetaImage meta{image, {std::move(value)}};
|
2019-09-18 06:07:01 +02:00
|
|
|
SetRegister(bb, instr.gpr0, Operation(operation_code, meta, GetCoordinates(type)));
|
2019-04-27 07:07:18 +02:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
default:
|
2019-09-06 04:26:05 +02:00
|
|
|
UNIMPLEMENTED_MSG("Unhandled image instruction: {}", opcode->get().GetName());
|
2019-04-27 07:07:18 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
return pc;
|
|
|
|
}
|
|
|
|
|
2019-09-18 06:50:40 +02:00
|
|
|
Image& ShaderIR::GetImage(Tegra::Shader::Image image, Tegra::Shader::ImageType type) {
|
2019-10-28 06:31:05 +01:00
|
|
|
const auto offset = static_cast<u32>(image.index.Value());
|
|
|
|
|
2020-04-16 06:34:45 +02:00
|
|
|
const auto it = std::find_if(std::begin(used_images), std::end(used_images),
|
|
|
|
[offset](const Image& entry) { return entry.offset == offset; });
|
2019-10-28 06:31:05 +01:00
|
|
|
if (it != std::end(used_images)) {
|
2020-04-16 06:34:45 +02:00
|
|
|
ASSERT(!it->is_bindless && it->type == type);
|
2019-10-28 06:31:05 +01:00
|
|
|
return *it;
|
2019-04-27 07:07:18 +02:00
|
|
|
}
|
|
|
|
|
2019-10-28 06:31:05 +01:00
|
|
|
const auto next_index = static_cast<u32>(used_images.size());
|
|
|
|
return used_images.emplace_back(next_index, offset, type);
|
2019-04-27 07:07:18 +02:00
|
|
|
}
|
|
|
|
|
2019-09-18 06:50:40 +02:00
|
|
|
Image& ShaderIR::GetBindlessImage(Tegra::Shader::Register reg, Tegra::Shader::ImageType type) {
|
2019-10-28 06:31:05 +01:00
|
|
|
const Node image_register = GetRegister(reg);
|
2020-04-15 21:59:23 +02:00
|
|
|
const auto result =
|
2019-10-28 06:31:05 +01:00
|
|
|
TrackCbuf(image_register, global_code, static_cast<s64>(global_code.size()));
|
|
|
|
|
2020-04-15 21:59:23 +02:00
|
|
|
const auto buffer = std::get<1>(result);
|
|
|
|
const auto offset = std::get<2>(result);
|
|
|
|
|
2020-04-16 06:34:45 +02:00
|
|
|
const auto it = std::find_if(std::begin(used_images), std::end(used_images),
|
|
|
|
[buffer, offset](const Image& entry) {
|
|
|
|
return entry.buffer == buffer && entry.offset == offset;
|
|
|
|
});
|
2019-10-28 06:31:05 +01:00
|
|
|
if (it != std::end(used_images)) {
|
2020-04-16 06:34:45 +02:00
|
|
|
ASSERT(it->is_bindless && it->type == type);
|
2019-10-28 06:31:05 +01:00
|
|
|
return *it;
|
2019-07-18 02:03:53 +02:00
|
|
|
}
|
|
|
|
|
2019-10-28 06:31:05 +01:00
|
|
|
const auto next_index = static_cast<u32>(used_images.size());
|
|
|
|
return used_images.emplace_back(next_index, offset, buffer, type);
|
2019-07-18 02:03:53 +02:00
|
|
|
}
|
|
|
|
|
2019-04-27 07:07:18 +02:00
|
|
|
} // namespace VideoCommon::Shader
|