2019-12-25 22:12:17 +01:00
|
|
|
// Copyright 2019 yuzu Emulator Project
|
|
|
|
// Licensed under GPLv2 or any later version
|
|
|
|
// Refer to the license.txt file included.
|
|
|
|
|
|
|
|
#include <algorithm>
|
|
|
|
#include <utility>
|
|
|
|
#include <vector>
|
|
|
|
|
2020-12-31 02:58:05 +01:00
|
|
|
#include <fmt/format.h>
|
|
|
|
|
2021-01-16 20:20:18 +01:00
|
|
|
#include "common/alignment.h"
|
2021-01-03 22:38:15 +01:00
|
|
|
#include "common/assert.h"
|
2019-12-25 22:12:17 +01:00
|
|
|
#include "common/bit_util.h"
|
|
|
|
#include "common/common_types.h"
|
|
|
|
#include "video_core/renderer_vulkan/vk_scheduler.h"
|
|
|
|
#include "video_core/renderer_vulkan/vk_staging_buffer_pool.h"
|
2020-12-31 02:58:05 +01:00
|
|
|
#include "video_core/vulkan_common/vulkan_device.h"
|
2021-01-03 22:11:01 +01:00
|
|
|
#include "video_core/vulkan_common/vulkan_wrapper.h"
|
2019-12-25 22:12:17 +01:00
|
|
|
|
|
|
|
namespace Vulkan {
|
2021-01-16 20:20:18 +01:00
|
|
|
namespace {
|
|
|
|
// Maximum potential alignment of a Vulkan buffer
|
|
|
|
constexpr VkDeviceSize MAX_ALIGNMENT = 256;
|
|
|
|
// Maximum size to put elements in the stream buffer
|
|
|
|
constexpr VkDeviceSize MAX_STREAM_BUFFER_REQUEST_SIZE = 8 * 1024 * 1024;
|
|
|
|
// Stream buffer size in bytes
|
|
|
|
constexpr VkDeviceSize STREAM_BUFFER_SIZE = 128 * 1024 * 1024;
|
|
|
|
constexpr VkDeviceSize REGION_SIZE = STREAM_BUFFER_SIZE / StagingBufferPool::NUM_SYNCS;
|
|
|
|
|
|
|
|
constexpr VkMemoryPropertyFlags HOST_FLAGS =
|
|
|
|
VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
|
|
|
|
constexpr VkMemoryPropertyFlags STREAM_FLAGS = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT | HOST_FLAGS;
|
|
|
|
|
|
|
|
bool IsStreamHeap(VkMemoryHeap heap) noexcept {
|
|
|
|
return STREAM_BUFFER_SIZE < (heap.size * 2) / 3;
|
|
|
|
}
|
|
|
|
|
|
|
|
std::optional<u32> FindMemoryTypeIndex(const VkPhysicalDeviceMemoryProperties& props, u32 type_mask,
|
|
|
|
VkMemoryPropertyFlags flags) noexcept {
|
|
|
|
for (u32 type_index = 0; type_index < props.memoryTypeCount; ++type_index) {
|
|
|
|
if (((type_mask >> type_index) & 1) == 0) {
|
|
|
|
// Memory type is incompatible
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
const VkMemoryType& memory_type = props.memoryTypes[type_index];
|
|
|
|
if ((memory_type.propertyFlags & flags) != flags) {
|
|
|
|
// Memory type doesn't have the flags we want
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
if (!IsStreamHeap(props.memoryHeaps[memory_type.heapIndex])) {
|
|
|
|
// Memory heap is not suitable for streaming
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
// Success!
|
|
|
|
return type_index;
|
|
|
|
}
|
|
|
|
return std::nullopt;
|
|
|
|
}
|
|
|
|
|
|
|
|
u32 FindMemoryTypeIndex(const VkPhysicalDeviceMemoryProperties& props, u32 type_mask) {
|
|
|
|
// Try to find a DEVICE_LOCAL_BIT type, Nvidia and AMD have a dedicated heap for this
|
|
|
|
std::optional<u32> type = FindMemoryTypeIndex(props, type_mask, STREAM_FLAGS);
|
|
|
|
if (type) {
|
|
|
|
return *type;
|
|
|
|
}
|
|
|
|
// Otherwise try without the DEVICE_LOCAL_BIT
|
|
|
|
type = FindMemoryTypeIndex(props, type_mask, HOST_FLAGS);
|
|
|
|
if (type) {
|
|
|
|
return *type;
|
|
|
|
}
|
|
|
|
// This should never happen, and in case it does, signal it as an out of memory situation
|
|
|
|
throw vk::Exception(VK_ERROR_OUT_OF_DEVICE_MEMORY);
|
|
|
|
}
|
|
|
|
|
|
|
|
size_t Region(size_t iterator) noexcept {
|
|
|
|
return iterator / REGION_SIZE;
|
|
|
|
}
|
|
|
|
} // Anonymous namespace
|
2019-12-25 22:12:17 +01:00
|
|
|
|
2021-01-03 22:11:01 +01:00
|
|
|
StagingBufferPool::StagingBufferPool(const Device& device_, MemoryAllocator& memory_allocator_,
|
2020-12-31 02:58:05 +01:00
|
|
|
VKScheduler& scheduler_)
|
2021-01-16 20:20:18 +01:00
|
|
|
: device{device_}, memory_allocator{memory_allocator_}, scheduler{scheduler_} {
|
|
|
|
const vk::Device& dev = device.GetLogical();
|
|
|
|
stream_buffer = dev.CreateBuffer(VkBufferCreateInfo{
|
|
|
|
.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
|
|
|
|
.pNext = nullptr,
|
|
|
|
.flags = 0,
|
|
|
|
.size = STREAM_BUFFER_SIZE,
|
2021-01-17 07:16:15 +01:00
|
|
|
.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT |
|
|
|
|
VK_BUFFER_USAGE_INDEX_BUFFER_BIT,
|
2021-01-16 20:20:18 +01:00
|
|
|
.sharingMode = VK_SHARING_MODE_EXCLUSIVE,
|
|
|
|
.queueFamilyIndexCount = 0,
|
|
|
|
.pQueueFamilyIndices = nullptr,
|
|
|
|
});
|
|
|
|
if (device.HasDebuggingToolAttached()) {
|
|
|
|
stream_buffer.SetObjectNameEXT("Stream Buffer");
|
|
|
|
}
|
|
|
|
VkMemoryDedicatedRequirements dedicated_reqs{
|
|
|
|
.sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS,
|
|
|
|
.pNext = nullptr,
|
|
|
|
.prefersDedicatedAllocation = VK_FALSE,
|
|
|
|
.requiresDedicatedAllocation = VK_FALSE,
|
|
|
|
};
|
|
|
|
const auto requirements = dev.GetBufferMemoryRequirements(*stream_buffer, &dedicated_reqs);
|
|
|
|
const bool make_dedicated = dedicated_reqs.prefersDedicatedAllocation == VK_TRUE ||
|
|
|
|
dedicated_reqs.requiresDedicatedAllocation == VK_TRUE;
|
|
|
|
const VkMemoryDedicatedAllocateInfo dedicated_info{
|
|
|
|
.sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO,
|
|
|
|
.pNext = nullptr,
|
|
|
|
.image = nullptr,
|
|
|
|
.buffer = *stream_buffer,
|
|
|
|
};
|
|
|
|
const auto memory_properties = device.GetPhysical().GetMemoryProperties();
|
|
|
|
stream_memory = dev.AllocateMemory(VkMemoryAllocateInfo{
|
|
|
|
.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
|
|
|
|
.pNext = make_dedicated ? &dedicated_info : nullptr,
|
|
|
|
.allocationSize = requirements.size,
|
|
|
|
.memoryTypeIndex = FindMemoryTypeIndex(memory_properties, requirements.memoryTypeBits),
|
|
|
|
});
|
|
|
|
if (device.HasDebuggingToolAttached()) {
|
|
|
|
stream_memory.SetObjectNameEXT("Stream Buffer Memory");
|
|
|
|
}
|
|
|
|
stream_buffer.BindMemory(*stream_memory, 0);
|
|
|
|
stream_pointer = stream_memory.Map(0, STREAM_BUFFER_SIZE);
|
|
|
|
}
|
2019-12-25 22:12:17 +01:00
|
|
|
|
2020-12-31 02:58:05 +01:00
|
|
|
StagingBufferPool::~StagingBufferPool() = default;
|
2019-12-25 22:12:17 +01:00
|
|
|
|
2021-01-03 22:38:15 +01:00
|
|
|
StagingBufferRef StagingBufferPool::Request(size_t size, MemoryUsage usage) {
|
2021-01-16 20:20:18 +01:00
|
|
|
if (usage == MemoryUsage::Upload && size <= MAX_STREAM_BUFFER_REQUEST_SIZE) {
|
|
|
|
return GetStreamBuffer(size);
|
|
|
|
}
|
2021-01-21 05:07:11 +01:00
|
|
|
return GetStagingBuffer(size, usage);
|
2019-12-25 22:12:17 +01:00
|
|
|
}
|
|
|
|
|
2020-12-31 02:58:05 +01:00
|
|
|
void StagingBufferPool::TickFrame() {
|
|
|
|
current_delete_level = (current_delete_level + 1) % NUM_LEVELS;
|
2019-12-25 22:12:17 +01:00
|
|
|
|
2021-01-03 22:38:15 +01:00
|
|
|
ReleaseCache(MemoryUsage::DeviceLocal);
|
|
|
|
ReleaseCache(MemoryUsage::Upload);
|
|
|
|
ReleaseCache(MemoryUsage::Download);
|
2019-12-25 22:12:17 +01:00
|
|
|
}
|
|
|
|
|
2021-01-16 20:20:18 +01:00
|
|
|
StagingBufferRef StagingBufferPool::GetStreamBuffer(size_t size) {
|
2021-01-23 21:59:32 +01:00
|
|
|
if (AreRegionsActive(Region(free_iterator) + 1,
|
|
|
|
std::min(Region(iterator + size) + 1, NUM_SYNCS))) {
|
|
|
|
// Avoid waiting for the previous usages to be free
|
|
|
|
return GetStagingBuffer(size, MemoryUsage::Upload);
|
2021-01-16 20:20:18 +01:00
|
|
|
}
|
2021-01-23 21:59:32 +01:00
|
|
|
const u64 current_tick = scheduler.CurrentTick();
|
|
|
|
std::fill(sync_ticks.begin() + Region(used_iterator), sync_ticks.begin() + Region(iterator),
|
|
|
|
current_tick);
|
2021-01-16 20:20:18 +01:00
|
|
|
used_iterator = iterator;
|
2021-01-23 21:59:32 +01:00
|
|
|
free_iterator = std::max(free_iterator, iterator + size);
|
2021-01-16 20:20:18 +01:00
|
|
|
|
2021-02-13 09:11:48 +01:00
|
|
|
if (iterator + size >= STREAM_BUFFER_SIZE) {
|
2021-01-23 21:59:32 +01:00
|
|
|
std::fill(sync_ticks.begin() + Region(used_iterator), sync_ticks.begin() + NUM_SYNCS,
|
|
|
|
current_tick);
|
2021-01-16 20:20:18 +01:00
|
|
|
used_iterator = 0;
|
|
|
|
iterator = 0;
|
|
|
|
free_iterator = size;
|
|
|
|
|
2021-01-23 21:59:32 +01:00
|
|
|
if (AreRegionsActive(0, Region(size) + 1)) {
|
|
|
|
// Avoid waiting for the previous usages to be free
|
|
|
|
return GetStagingBuffer(size, MemoryUsage::Upload);
|
2021-01-16 20:20:18 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
const size_t offset = iterator;
|
|
|
|
iterator = Common::AlignUp(iterator + size, MAX_ALIGNMENT);
|
|
|
|
return StagingBufferRef{
|
|
|
|
.buffer = *stream_buffer,
|
|
|
|
.offset = static_cast<VkDeviceSize>(offset),
|
|
|
|
.mapped_span = std::span<u8>(stream_pointer + offset, size),
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
2021-01-23 21:59:32 +01:00
|
|
|
bool StagingBufferPool::AreRegionsActive(size_t region_begin, size_t region_end) const {
|
|
|
|
return std::any_of(sync_ticks.begin() + region_begin, sync_ticks.begin() + region_end,
|
|
|
|
[this](u64 sync_tick) { return !scheduler.IsFree(sync_tick); });
|
|
|
|
};
|
|
|
|
|
2021-01-21 05:07:11 +01:00
|
|
|
StagingBufferRef StagingBufferPool::GetStagingBuffer(size_t size, MemoryUsage usage) {
|
|
|
|
if (const std::optional<StagingBufferRef> ref = TryGetReservedBuffer(size, usage)) {
|
|
|
|
return *ref;
|
|
|
|
}
|
|
|
|
return CreateStagingBuffer(size, usage);
|
|
|
|
}
|
|
|
|
|
2020-12-31 02:58:05 +01:00
|
|
|
std::optional<StagingBufferRef> StagingBufferPool::TryGetReservedBuffer(size_t size,
|
2021-01-03 22:38:15 +01:00
|
|
|
MemoryUsage usage) {
|
|
|
|
StagingBuffers& cache_level = GetCache(usage)[Common::Log2Ceil64(size)];
|
2020-12-31 02:58:05 +01:00
|
|
|
|
|
|
|
const auto is_free = [this](const StagingBuffer& entry) {
|
|
|
|
return scheduler.IsFree(entry.tick);
|
|
|
|
};
|
|
|
|
auto& entries = cache_level.entries;
|
|
|
|
const auto hint_it = entries.begin() + cache_level.iterate_index;
|
|
|
|
auto it = std::find_if(entries.begin() + cache_level.iterate_index, entries.end(), is_free);
|
|
|
|
if (it == entries.end()) {
|
|
|
|
it = std::find_if(entries.begin(), hint_it, is_free);
|
|
|
|
if (it == hint_it) {
|
|
|
|
return std::nullopt;
|
2019-12-25 22:12:17 +01:00
|
|
|
}
|
|
|
|
}
|
2020-12-31 02:58:05 +01:00
|
|
|
cache_level.iterate_index = std::distance(entries.begin(), it) + 1;
|
|
|
|
it->tick = scheduler.CurrentTick();
|
|
|
|
return it->Ref();
|
2019-12-25 22:12:17 +01:00
|
|
|
}
|
|
|
|
|
2021-01-03 22:38:15 +01:00
|
|
|
StagingBufferRef StagingBufferPool::CreateStagingBuffer(size_t size, MemoryUsage usage) {
|
2019-12-25 22:12:17 +01:00
|
|
|
const u32 log2 = Common::Log2Ceil64(size);
|
2020-12-31 02:58:05 +01:00
|
|
|
vk::Buffer buffer = device.GetLogical().CreateBuffer({
|
2020-07-17 01:19:24 +02:00
|
|
|
.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
|
|
|
|
.pNext = nullptr,
|
|
|
|
.flags = 0,
|
|
|
|
.size = 1ULL << log2,
|
|
|
|
.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT |
|
|
|
|
VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_STORAGE_BUFFER_BIT |
|
|
|
|
VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT,
|
|
|
|
.sharingMode = VK_SHARING_MODE_EXCLUSIVE,
|
|
|
|
.queueFamilyIndexCount = 0,
|
|
|
|
.pQueueFamilyIndices = nullptr,
|
|
|
|
});
|
2020-12-31 02:58:05 +01:00
|
|
|
if (device.HasDebuggingToolAttached()) {
|
|
|
|
++buffer_index;
|
|
|
|
buffer.SetObjectNameEXT(fmt::format("Staging Buffer {}", buffer_index).c_str());
|
|
|
|
}
|
2021-01-03 22:38:15 +01:00
|
|
|
MemoryCommit commit = memory_allocator.Commit(buffer, usage);
|
|
|
|
const std::span<u8> mapped_span = IsHostVisible(usage) ? commit.Map() : std::span<u8>{};
|
2020-12-31 02:58:05 +01:00
|
|
|
|
2021-01-03 22:38:15 +01:00
|
|
|
StagingBuffer& entry = GetCache(usage)[log2].entries.emplace_back(StagingBuffer{
|
2020-12-31 02:58:05 +01:00
|
|
|
.buffer = std::move(buffer),
|
|
|
|
.commit = std::move(commit),
|
|
|
|
.mapped_span = mapped_span,
|
|
|
|
.tick = scheduler.CurrentTick(),
|
|
|
|
});
|
|
|
|
return entry.Ref();
|
2019-12-25 22:12:17 +01:00
|
|
|
}
|
|
|
|
|
2021-01-03 22:38:15 +01:00
|
|
|
StagingBufferPool::StagingBuffersCache& StagingBufferPool::GetCache(MemoryUsage usage) {
|
|
|
|
switch (usage) {
|
|
|
|
case MemoryUsage::DeviceLocal:
|
|
|
|
return device_local_cache;
|
|
|
|
case MemoryUsage::Upload:
|
|
|
|
return upload_cache;
|
|
|
|
case MemoryUsage::Download:
|
|
|
|
return download_cache;
|
|
|
|
default:
|
|
|
|
UNREACHABLE_MSG("Invalid memory usage={}", usage);
|
|
|
|
return upload_cache;
|
|
|
|
}
|
2019-12-25 22:12:17 +01:00
|
|
|
}
|
|
|
|
|
2021-01-03 22:38:15 +01:00
|
|
|
void StagingBufferPool::ReleaseCache(MemoryUsage usage) {
|
|
|
|
ReleaseLevel(GetCache(usage), current_delete_level);
|
2019-12-25 22:12:17 +01:00
|
|
|
}
|
|
|
|
|
2020-12-31 02:58:05 +01:00
|
|
|
void StagingBufferPool::ReleaseLevel(StagingBuffersCache& cache, size_t log2) {
|
|
|
|
constexpr size_t deletions_per_tick = 16;
|
2019-12-25 22:12:17 +01:00
|
|
|
auto& staging = cache[log2];
|
|
|
|
auto& entries = staging.entries;
|
2020-12-31 02:58:05 +01:00
|
|
|
const size_t old_size = entries.size();
|
2019-12-25 22:12:17 +01:00
|
|
|
|
2020-09-10 08:43:30 +02:00
|
|
|
const auto is_deleteable = [this](const StagingBuffer& entry) {
|
|
|
|
return scheduler.IsFree(entry.tick);
|
2019-12-25 22:12:17 +01:00
|
|
|
};
|
2020-12-31 02:58:05 +01:00
|
|
|
const size_t begin_offset = staging.delete_index;
|
|
|
|
const size_t end_offset = std::min(begin_offset + deletions_per_tick, old_size);
|
|
|
|
const auto begin = entries.begin() + begin_offset;
|
|
|
|
const auto end = entries.begin() + end_offset;
|
2019-12-25 22:12:17 +01:00
|
|
|
entries.erase(std::remove_if(begin, end, is_deleteable), end);
|
|
|
|
|
2020-12-31 02:58:05 +01:00
|
|
|
const size_t new_size = entries.size();
|
2019-12-25 22:12:17 +01:00
|
|
|
staging.delete_index += deletions_per_tick;
|
|
|
|
if (staging.delete_index >= new_size) {
|
|
|
|
staging.delete_index = 0;
|
|
|
|
}
|
2020-12-31 02:58:05 +01:00
|
|
|
if (staging.iterate_index > new_size) {
|
|
|
|
staging.iterate_index = 0;
|
|
|
|
}
|
2019-12-25 22:12:17 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
} // namespace Vulkan
|