1
0
Fork 0
forked from suyu/suyu

Vulkan: Allow stagging buffer deferrals.

This commit is contained in:
Fernando Sahmkow 2022-11-19 23:16:07 +01:00
parent 8d694701bc
commit 2793304117
2 changed files with 56 additions and 21 deletions

View file

@ -1,5 +1,5 @@
// SPDX-FileCopyrightText: Copyright 2019 yuzu Emulator Project // SPDX-FileCopyrightText: Copyright 2022 yuzu Emulator Project
// SPDX-License-Identifier: GPL-2.0-or-later // SPDX-License-Identifier: GPL-3.0-or-later
#include <algorithm> #include <algorithm>
#include <utility> #include <utility>
@ -94,7 +94,7 @@ StagingBufferPool::StagingBufferPool(const Device& device_, MemoryAllocator& mem
.flags = 0, .flags = 0,
.size = STREAM_BUFFER_SIZE, .size = STREAM_BUFFER_SIZE,
.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | .usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT |
VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_STORAGE_BUFFER_BIT, VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_STORAGE_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT ,
.sharingMode = VK_SHARING_MODE_EXCLUSIVE, .sharingMode = VK_SHARING_MODE_EXCLUSIVE,
.queueFamilyIndexCount = 0, .queueFamilyIndexCount = 0,
.pQueueFamilyIndices = nullptr, .pQueueFamilyIndices = nullptr,
@ -142,11 +142,23 @@ StagingBufferPool::StagingBufferPool(const Device& device_, MemoryAllocator& mem
StagingBufferPool::~StagingBufferPool() = default; StagingBufferPool::~StagingBufferPool() = default;
StagingBufferRef StagingBufferPool::Request(size_t size, MemoryUsage usage) { StagingBufferRef StagingBufferPool::Request(size_t size, MemoryUsage usage, bool deferred) {
if (usage == MemoryUsage::Upload && size <= MAX_STREAM_BUFFER_REQUEST_SIZE) { if (!deferred && usage == MemoryUsage::Upload && size <= MAX_STREAM_BUFFER_REQUEST_SIZE) {
return GetStreamBuffer(size); return GetStreamBuffer(size);
} }
return GetStagingBuffer(size, usage); return GetStagingBuffer(size, usage, deferred);
}
void StagingBufferPool::FreeDeferred(StagingBufferRef& ref) {
auto& entries = GetCache(ref.usage)[ref.log2_level].entries;
const auto is_this_one = [&ref](const StagingBuffer& entry) {
return entry.index == ref.index;
};
auto it = std::find_if(entries.begin(), entries.end(), is_this_one);
ASSERT(it != entries.end());
ASSERT(it->deferred);
it->tick = scheduler.CurrentTick();
it->deferred = false;
} }
void StagingBufferPool::TickFrame() { void StagingBufferPool::TickFrame() {
@ -196,19 +208,21 @@ bool StagingBufferPool::AreRegionsActive(size_t region_begin, size_t region_end)
[gpu_tick](u64 sync_tick) { return gpu_tick < sync_tick; }); [gpu_tick](u64 sync_tick) { return gpu_tick < sync_tick; });
}; };
StagingBufferRef StagingBufferPool::GetStagingBuffer(size_t size, MemoryUsage usage) { StagingBufferRef StagingBufferPool::GetStagingBuffer(size_t size, MemoryUsage usage,
if (const std::optional<StagingBufferRef> ref = TryGetReservedBuffer(size, usage)) { bool deferred) {
if (const std::optional<StagingBufferRef> ref = TryGetReservedBuffer(size, usage, deferred)) {
return *ref; return *ref;
} }
return CreateStagingBuffer(size, usage); return CreateStagingBuffer(size, usage, deferred);
} }
std::optional<StagingBufferRef> StagingBufferPool::TryGetReservedBuffer(size_t size, std::optional<StagingBufferRef> StagingBufferPool::TryGetReservedBuffer(size_t size,
MemoryUsage usage) { MemoryUsage usage,
bool deferred) {
StagingBuffers& cache_level = GetCache(usage)[Common::Log2Ceil64(size)]; StagingBuffers& cache_level = GetCache(usage)[Common::Log2Ceil64(size)];
const auto is_free = [this](const StagingBuffer& entry) { const auto is_free = [this](const StagingBuffer& entry) {
return scheduler.IsFree(entry.tick); return !entry.deferred && scheduler.IsFree(entry.tick);
}; };
auto& entries = cache_level.entries; auto& entries = cache_level.entries;
const auto hint_it = entries.begin() + cache_level.iterate_index; const auto hint_it = entries.begin() + cache_level.iterate_index;
@ -220,11 +234,14 @@ std::optional<StagingBufferRef> StagingBufferPool::TryGetReservedBuffer(size_t s
} }
} }
cache_level.iterate_index = std::distance(entries.begin(), it) + 1; cache_level.iterate_index = std::distance(entries.begin(), it) + 1;
it->tick = scheduler.CurrentTick(); it->tick = deferred ? std::numeric_limits<u64>::max() : scheduler.CurrentTick();
ASSERT(!it->deferred);
it->deferred = deferred;
return it->Ref(); return it->Ref();
} }
StagingBufferRef StagingBufferPool::CreateStagingBuffer(size_t size, MemoryUsage usage) { StagingBufferRef StagingBufferPool::CreateStagingBuffer(size_t size, MemoryUsage usage,
bool deferred) {
const u32 log2 = Common::Log2Ceil64(size); const u32 log2 = Common::Log2Ceil64(size);
vk::Buffer buffer = device.GetLogical().CreateBuffer({ vk::Buffer buffer = device.GetLogical().CreateBuffer({
.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO, .sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
@ -233,7 +250,7 @@ StagingBufferRef StagingBufferPool::CreateStagingBuffer(size_t size, MemoryUsage
.size = 1ULL << log2, .size = 1ULL << log2,
.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT | .usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT |
VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_STORAGE_BUFFER_BIT | VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_STORAGE_BUFFER_BIT |
VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT, VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT ,
.sharingMode = VK_SHARING_MODE_EXCLUSIVE, .sharingMode = VK_SHARING_MODE_EXCLUSIVE,
.queueFamilyIndexCount = 0, .queueFamilyIndexCount = 0,
.pQueueFamilyIndices = nullptr, .pQueueFamilyIndices = nullptr,
@ -249,7 +266,11 @@ StagingBufferRef StagingBufferPool::CreateStagingBuffer(size_t size, MemoryUsage
.buffer = std::move(buffer), .buffer = std::move(buffer),
.commit = std::move(commit), .commit = std::move(commit),
.mapped_span = mapped_span, .mapped_span = mapped_span,
.tick = scheduler.CurrentTick(), .usage = usage,
.log2_level = log2,
.index = unique_ids++,
.tick = deferred ? std::numeric_limits<u64>::max() : scheduler.CurrentTick(),
.deferred = deferred,
}); });
return entry.Ref(); return entry.Ref();
} }

View file

@ -1,5 +1,6 @@
// SPDX-FileCopyrightText: Copyright 2019 yuzu Emulator Project // Copyright 2019 yuzu Emulator Project
// SPDX-License-Identifier: GPL-2.0-or-later // Licensed under GPLv2 or any later version
// Refer to the license.txt file included.
#pragma once #pragma once
@ -20,6 +21,9 @@ struct StagingBufferRef {
VkBuffer buffer; VkBuffer buffer;
VkDeviceSize offset; VkDeviceSize offset;
std::span<u8> mapped_span; std::span<u8> mapped_span;
MemoryUsage usage;
u32 log2_level;
u64 index;
}; };
class StagingBufferPool { class StagingBufferPool {
@ -30,7 +34,8 @@ public:
Scheduler& scheduler); Scheduler& scheduler);
~StagingBufferPool(); ~StagingBufferPool();
StagingBufferRef Request(size_t size, MemoryUsage usage); StagingBufferRef Request(size_t size, MemoryUsage usage, bool deferred = false);
void FreeDeferred(StagingBufferRef& ref);
void TickFrame(); void TickFrame();
@ -44,13 +49,20 @@ private:
vk::Buffer buffer; vk::Buffer buffer;
MemoryCommit commit; MemoryCommit commit;
std::span<u8> mapped_span; std::span<u8> mapped_span;
MemoryUsage usage;
u32 log2_level;
u64 index;
u64 tick = 0; u64 tick = 0;
bool deferred{};
StagingBufferRef Ref() const noexcept { StagingBufferRef Ref() const noexcept {
return { return {
.buffer = *buffer, .buffer = *buffer,
.offset = 0, .offset = 0,
.mapped_span = mapped_span, .mapped_span = mapped_span,
.usage = usage,
.log2_level = log2_level,
.index = index,
}; };
} }
}; };
@ -68,11 +80,12 @@ private:
bool AreRegionsActive(size_t region_begin, size_t region_end) const; bool AreRegionsActive(size_t region_begin, size_t region_end) const;
StagingBufferRef GetStagingBuffer(size_t size, MemoryUsage usage); StagingBufferRef GetStagingBuffer(size_t size, MemoryUsage usage, bool deferred = false);
std::optional<StagingBufferRef> TryGetReservedBuffer(size_t size, MemoryUsage usage); std::optional<StagingBufferRef> TryGetReservedBuffer(size_t size, MemoryUsage usage,
bool deferred);
StagingBufferRef CreateStagingBuffer(size_t size, MemoryUsage usage); StagingBufferRef CreateStagingBuffer(size_t size, MemoryUsage usage, bool deferred);
StagingBuffersCache& GetCache(MemoryUsage usage); StagingBuffersCache& GetCache(MemoryUsage usage);
@ -99,6 +112,7 @@ private:
size_t current_delete_level = 0; size_t current_delete_level = 0;
u64 buffer_index = 0; u64 buffer_index = 0;
u64 unique_ids{};
}; };
} // namespace Vulkan } // namespace Vulkan