2019-02-24 08:22:33 +01:00
|
|
|
// Copyright 2019 yuzu Emulator Project
|
|
|
|
// Licensed under GPLv2 or any later version
|
|
|
|
// Refer to the license.txt file included.
|
|
|
|
|
|
|
|
#include <algorithm>
|
2020-04-17 10:13:35 +02:00
|
|
|
#include <limits>
|
2019-02-24 08:22:33 +01:00
|
|
|
#include <optional>
|
2020-01-06 21:59:20 +01:00
|
|
|
#include <tuple>
|
2019-02-24 08:22:33 +01:00
|
|
|
#include <vector>
|
|
|
|
|
2020-01-06 21:59:20 +01:00
|
|
|
#include "common/alignment.h"
|
2019-02-24 08:22:33 +01:00
|
|
|
#include "common/assert.h"
|
|
|
|
#include "video_core/renderer_vulkan/vk_device.h"
|
|
|
|
#include "video_core/renderer_vulkan/vk_scheduler.h"
|
|
|
|
#include "video_core/renderer_vulkan/vk_stream_buffer.h"
|
2020-03-27 05:33:21 +01:00
|
|
|
#include "video_core/renderer_vulkan/wrapper.h"
|
2019-02-24 08:22:33 +01:00
|
|
|
|
|
|
|
namespace Vulkan {
|
|
|
|
|
2020-01-06 21:59:20 +01:00
|
|
|
namespace {
|
|
|
|
|
2019-02-24 08:22:33 +01:00
|
|
|
constexpr u64 WATCHES_INITIAL_RESERVE = 0x4000;
|
|
|
|
constexpr u64 WATCHES_RESERVE_CHUNK = 0x1000;
|
|
|
|
|
2020-04-17 10:13:35 +02:00
|
|
|
constexpr u64 PREFERRED_STREAM_BUFFER_SIZE = 256 * 1024 * 1024;
|
2020-01-06 21:59:20 +01:00
|
|
|
|
2020-04-17 10:13:35 +02:00
|
|
|
/// Find a memory type with the passed requirements
|
|
|
|
std::optional<u32> FindMemoryType(const VkPhysicalDeviceMemoryProperties& properties,
|
|
|
|
VkMemoryPropertyFlags wanted,
|
|
|
|
u32 filter = std::numeric_limits<u32>::max()) {
|
|
|
|
for (u32 i = 0; i < properties.memoryTypeCount; ++i) {
|
|
|
|
const auto flags = properties.memoryTypes[i].propertyFlags;
|
|
|
|
if ((flags & wanted) == wanted && (filter & (1U << i)) != 0) {
|
2020-01-06 21:59:20 +01:00
|
|
|
return i;
|
|
|
|
}
|
|
|
|
}
|
2020-03-27 05:33:21 +01:00
|
|
|
return std::nullopt;
|
2020-01-06 21:59:20 +01:00
|
|
|
}
|
|
|
|
|
2020-04-17 10:13:35 +02:00
|
|
|
/// Get the preferred host visible memory type.
|
|
|
|
u32 GetMemoryType(const VkPhysicalDeviceMemoryProperties& properties,
|
|
|
|
u32 filter = std::numeric_limits<u32>::max()) {
|
|
|
|
// Prefer device local host visible allocations. Both AMD and Nvidia now provide one.
|
|
|
|
// Otherwise search for a host visible allocation.
|
|
|
|
static constexpr auto HOST_MEMORY =
|
|
|
|
VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
|
|
|
|
static constexpr auto DYNAMIC_MEMORY = HOST_MEMORY | VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
|
|
|
|
|
|
|
|
std::optional preferred_type = FindMemoryType(properties, DYNAMIC_MEMORY);
|
|
|
|
if (!preferred_type) {
|
|
|
|
preferred_type = FindMemoryType(properties, HOST_MEMORY);
|
|
|
|
ASSERT_MSG(preferred_type, "No host visible and coherent memory type found");
|
|
|
|
}
|
|
|
|
return preferred_type.value_or(0);
|
|
|
|
}
|
|
|
|
|
2020-01-06 21:59:20 +01:00
|
|
|
} // Anonymous namespace
|
|
|
|
|
2020-06-12 02:24:45 +02:00
|
|
|
VKStreamBuffer::VKStreamBuffer(const VKDevice& device_, VKScheduler& scheduler_,
|
2020-03-27 05:33:21 +01:00
|
|
|
VkBufferUsageFlags usage)
|
2020-06-12 02:24:45 +02:00
|
|
|
: device{device_}, scheduler{scheduler_} {
|
2020-01-06 21:59:20 +01:00
|
|
|
CreateBuffers(usage);
|
|
|
|
ReserveWatches(current_watches, WATCHES_INITIAL_RESERVE);
|
|
|
|
ReserveWatches(previous_watches, WATCHES_INITIAL_RESERVE);
|
2019-02-24 08:22:33 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
VKStreamBuffer::~VKStreamBuffer() = default;
|
|
|
|
|
2020-01-06 21:59:20 +01:00
|
|
|
std::tuple<u8*, u64, bool> VKStreamBuffer::Map(u64 size, u64 alignment) {
|
2020-04-17 10:13:35 +02:00
|
|
|
ASSERT(size <= stream_buffer_size);
|
2019-02-24 08:22:33 +01:00
|
|
|
mapped_size = size;
|
|
|
|
|
2020-01-06 21:59:20 +01:00
|
|
|
if (alignment > 0) {
|
|
|
|
offset = Common::AlignUp(offset, alignment);
|
|
|
|
}
|
|
|
|
|
|
|
|
WaitPendingOperations(offset);
|
|
|
|
|
|
|
|
bool invalidated = false;
|
2020-04-17 10:13:35 +02:00
|
|
|
if (offset + size > stream_buffer_size) {
|
2020-01-06 21:59:20 +01:00
|
|
|
// The buffer would overflow, save the amount of used watches and reset the state.
|
|
|
|
invalidation_mark = current_watch_cursor;
|
|
|
|
current_watch_cursor = 0;
|
2019-02-24 08:22:33 +01:00
|
|
|
offset = 0;
|
2020-01-06 21:59:20 +01:00
|
|
|
|
|
|
|
// Swap watches and reset waiting cursors.
|
|
|
|
std::swap(previous_watches, current_watches);
|
|
|
|
wait_cursor = 0;
|
|
|
|
wait_bound = 0;
|
|
|
|
|
|
|
|
// Ensure that we don't wait for uncommitted fences.
|
|
|
|
scheduler.Flush();
|
|
|
|
|
|
|
|
invalidated = true;
|
2019-02-24 08:22:33 +01:00
|
|
|
}
|
|
|
|
|
2020-03-27 05:33:21 +01:00
|
|
|
return {memory.Map(offset, size), offset, invalidated};
|
2019-02-24 08:22:33 +01:00
|
|
|
}
|
|
|
|
|
2020-01-06 21:59:20 +01:00
|
|
|
void VKStreamBuffer::Unmap(u64 size) {
|
2019-02-24 08:22:33 +01:00
|
|
|
ASSERT_MSG(size <= mapped_size, "Reserved size is too small");
|
|
|
|
|
2020-03-27 05:33:21 +01:00
|
|
|
memory.Unmap();
|
2020-01-06 21:59:20 +01:00
|
|
|
|
|
|
|
offset += size;
|
2019-02-24 08:22:33 +01:00
|
|
|
|
2020-01-06 21:59:20 +01:00
|
|
|
if (current_watch_cursor + 1 >= current_watches.size()) {
|
2019-02-24 08:22:33 +01:00
|
|
|
// Ensure that there are enough watches.
|
2020-01-06 21:59:20 +01:00
|
|
|
ReserveWatches(current_watches, WATCHES_RESERVE_CHUNK);
|
2019-02-24 08:22:33 +01:00
|
|
|
}
|
2020-01-06 21:59:20 +01:00
|
|
|
auto& watch = current_watches[current_watch_cursor++];
|
|
|
|
watch.upper_bound = offset;
|
2020-09-10 08:43:30 +02:00
|
|
|
watch.tick = scheduler.CurrentTick();
|
2019-02-24 08:22:33 +01:00
|
|
|
}
|
|
|
|
|
2020-03-27 05:33:21 +01:00
|
|
|
void VKStreamBuffer::CreateBuffers(VkBufferUsageFlags usage) {
|
2020-04-17 10:13:35 +02:00
|
|
|
const auto memory_properties = device.GetPhysical().GetMemoryProperties();
|
|
|
|
const u32 preferred_type = GetMemoryType(memory_properties);
|
|
|
|
const u32 preferred_heap = memory_properties.memoryTypes[preferred_type].heapIndex;
|
|
|
|
|
|
|
|
// Substract from the preferred heap size some bytes to avoid getting out of memory.
|
|
|
|
const VkDeviceSize heap_size = memory_properties.memoryHeaps[preferred_heap].size;
|
2020-07-10 03:28:32 +02:00
|
|
|
const VkDeviceSize allocable_size = heap_size - 9 * 1024 * 1024;
|
2020-07-17 01:21:42 +02:00
|
|
|
buffer = device.GetLogical().CreateBuffer({
|
|
|
|
.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
|
|
|
|
.pNext = nullptr,
|
|
|
|
.flags = 0,
|
|
|
|
.size = std::min(PREFERRED_STREAM_BUFFER_SIZE, allocable_size),
|
|
|
|
.usage = usage,
|
|
|
|
.sharingMode = VK_SHARING_MODE_EXCLUSIVE,
|
|
|
|
.queueFamilyIndexCount = 0,
|
|
|
|
.pQueueFamilyIndices = nullptr,
|
|
|
|
});
|
2020-04-17 10:13:35 +02:00
|
|
|
|
|
|
|
const auto requirements = device.GetLogical().GetBufferMemoryRequirements(*buffer);
|
|
|
|
const u32 required_flags = requirements.memoryTypeBits;
|
|
|
|
stream_buffer_size = static_cast<u64>(requirements.size);
|
|
|
|
|
2020-07-17 01:21:42 +02:00
|
|
|
memory = device.GetLogical().AllocateMemory({
|
|
|
|
.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
|
|
|
|
.pNext = nullptr,
|
|
|
|
.allocationSize = requirements.size,
|
|
|
|
.memoryTypeIndex = GetMemoryType(memory_properties, required_flags),
|
|
|
|
});
|
2020-03-27 05:33:21 +01:00
|
|
|
buffer.BindMemory(*memory, 0);
|
2019-02-24 08:22:33 +01:00
|
|
|
}
|
|
|
|
|
2020-01-06 21:59:20 +01:00
|
|
|
void VKStreamBuffer::ReserveWatches(std::vector<Watch>& watches, std::size_t grow_size) {
|
|
|
|
watches.resize(watches.size() + grow_size);
|
|
|
|
}
|
|
|
|
|
|
|
|
void VKStreamBuffer::WaitPendingOperations(u64 requested_upper_bound) {
|
|
|
|
if (!invalidation_mark) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
while (requested_upper_bound < wait_bound && wait_cursor < *invalidation_mark) {
|
|
|
|
auto& watch = previous_watches[wait_cursor];
|
|
|
|
wait_bound = watch.upper_bound;
|
2020-09-10 08:43:30 +02:00
|
|
|
scheduler.Wait(watch.tick);
|
2020-01-06 21:59:20 +01:00
|
|
|
++wait_cursor;
|
|
|
|
}
|
2019-02-24 08:22:33 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
} // namespace Vulkan
|