Fence Manager: implement async fence management in a sepparate thread.

This commit is contained in:
Fernando Sahmkow 2023-04-15 00:03:48 +02:00
parent d600183583
commit fca72beb2d
5 changed files with 133 additions and 35 deletions

View file

@ -4,13 +4,20 @@
#pragma once #pragma once
#include <algorithm> #include <algorithm>
#include <condition_variable>
#include <cstring> #include <cstring>
#include <deque> #include <deque>
#include <functional> #include <functional>
#include <memory> #include <memory>
#include <mutex>
#include <thread>
#include <queue> #include <queue>
#include "common/common_types.h" #include "common/common_types.h"
#include "common/microprofile.h"
#include "common/scope_exit.h"
#include "common/settings.h"
#include "common/thread.h"
#include "video_core/delayed_destruction_ring.h" #include "video_core/delayed_destruction_ring.h"
#include "video_core/gpu.h" #include "video_core/gpu.h"
#include "video_core/host1x/host1x.h" #include "video_core/host1x/host1x.h"
@ -23,15 +30,26 @@ class FenceBase {
public: public:
explicit FenceBase(bool is_stubbed_) : is_stubbed{is_stubbed_} {} explicit FenceBase(bool is_stubbed_) : is_stubbed{is_stubbed_} {}
bool IsStubbed() const {
return is_stubbed;
}
protected: protected:
bool is_stubbed; bool is_stubbed;
}; };
template <typename TFence, typename TTextureCache, typename TTBufferCache, typename TQueryCache> template <typename Traits>
class FenceManager { class FenceManager {
using TFence = typename Traits::FenceType;
using TTextureCache = typename Traits::TextureCacheType;
using TBufferCache = typename Traits::BufferCacheType;
using TQueryCache = typename Traits::QueryCacheType;
static constexpr bool can_async_check = Traits::HAS_ASYNC_CHECK;
public: public:
/// Notify the fence manager about a new frame /// Notify the fence manager about a new frame
void TickFrame() { void TickFrame() {
std::unique_lock lock(ring_guard);
delayed_destruction_ring.Tick(); delayed_destruction_ring.Tick();
} }
@ -46,17 +64,27 @@ public:
} }
void SignalFence(std::function<void()>&& func) { void SignalFence(std::function<void()>&& func) {
TryReleasePendingFences(); if constexpr (!can_async_check) {
TryReleasePendingFences<false>();
}
std::function<void()> callback = std::move(func);
const bool should_flush = ShouldFlush(); const bool should_flush = ShouldFlush();
CommitAsyncFlushes(); CommitAsyncFlushes();
uncommitted_operations.emplace_back(std::move(func));
CommitOperations();
TFence new_fence = CreateFence(!should_flush); TFence new_fence = CreateFence(!should_flush);
fences.push(new_fence); if constexpr (can_async_check) {
guard.lock();
}
pending_operations.emplace_back(std::move(uncommitted_operations));
QueueFence(new_fence); QueueFence(new_fence);
callback();
fences.push(std::move(new_fence));
if (should_flush) { if (should_flush) {
rasterizer.FlushCommands(); rasterizer.FlushCommands();
} }
if constexpr (can_async_check) {
guard.unlock();
cv.notify_all();
}
} }
void SignalSyncPoint(u32 value) { void SignalSyncPoint(u32 value) {
@ -66,29 +94,30 @@ public:
} }
void WaitPendingFences() { void WaitPendingFences() {
while (!fences.empty()) { if constexpr (!can_async_check) {
TFence& current_fence = fences.front(); TryReleasePendingFences<true>();
if (ShouldWait()) {
WaitFence(current_fence);
}
PopAsyncFlushes();
auto operations = std::move(pending_operations.front());
pending_operations.pop_front();
for (auto& operation : operations) {
operation();
}
PopFence();
} }
} }
protected: protected:
explicit FenceManager(VideoCore::RasterizerInterface& rasterizer_, Tegra::GPU& gpu_, explicit FenceManager(VideoCore::RasterizerInterface& rasterizer_, Tegra::GPU& gpu_,
TTextureCache& texture_cache_, TTBufferCache& buffer_cache_, TTextureCache& texture_cache_, TBufferCache& buffer_cache_,
TQueryCache& query_cache_) TQueryCache& query_cache_)
: rasterizer{rasterizer_}, gpu{gpu_}, syncpoint_manager{gpu.Host1x().GetSyncpointManager()}, : rasterizer{rasterizer_}, gpu{gpu_}, syncpoint_manager{gpu.Host1x().GetSyncpointManager()},
texture_cache{texture_cache_}, buffer_cache{buffer_cache_}, query_cache{query_cache_} {} texture_cache{texture_cache_}, buffer_cache{buffer_cache_}, query_cache{query_cache_} {
if constexpr (can_async_check) {
fence_thread =
std::jthread([this](std::stop_token token) { ReleaseThreadFunc(token); });
}
}
virtual ~FenceManager() = default; virtual ~FenceManager() {
if constexpr (can_async_check) {
fence_thread.request_stop();
cv.notify_all();
fence_thread.join();
}
}
/// Creates a Fence Interface, does not create a backend fence if 'is_stubbed' is /// Creates a Fence Interface, does not create a backend fence if 'is_stubbed' is
/// true /// true
@ -104,23 +133,70 @@ protected:
Tegra::GPU& gpu; Tegra::GPU& gpu;
Tegra::Host1x::SyncpointManager& syncpoint_manager; Tegra::Host1x::SyncpointManager& syncpoint_manager;
TTextureCache& texture_cache; TTextureCache& texture_cache;
TTBufferCache& buffer_cache; TBufferCache& buffer_cache;
TQueryCache& query_cache; TQueryCache& query_cache;
private: private:
template <bool force_wait>
void TryReleasePendingFences() { void TryReleasePendingFences() {
while (!fences.empty()) { while (!fences.empty()) {
TFence& current_fence = fences.front(); TFence& current_fence = fences.front();
if (ShouldWait() && !IsFenceSignaled(current_fence)) { if (ShouldWait() && !IsFenceSignaled(current_fence)) {
if constexpr (force_wait) {
WaitFence(current_fence);
} else {
return; return;
} }
}
PopAsyncFlushes(); PopAsyncFlushes();
auto operations = std::move(pending_operations.front()); auto operations = std::move(pending_operations.front());
pending_operations.pop_front(); pending_operations.pop_front();
for (auto& operation : operations) { for (auto& operation : operations) {
operation(); operation();
} }
PopFence(); {
std::unique_lock lock(ring_guard);
delayed_destruction_ring.Push(std::move(current_fence));
}
fences.pop();
}
}
void ReleaseThreadFunc(std::stop_token stop_token) {
std::string name = "GPUFencingThread";
MicroProfileOnThreadCreate(name.c_str());
// Cleanup
SCOPE_EXIT({ MicroProfileOnThreadExit(); });
Common::SetCurrentThreadName(name.c_str());
Common::SetCurrentThreadPriority(Common::ThreadPriority::High);
TFence current_fence;
std::deque<std::function<void()>> current_operations;
while (!stop_token.stop_requested()) {
{
std::unique_lock lock(guard);
cv.wait(lock, [&] { return stop_token.stop_requested() || !fences.empty(); });
if (stop_token.stop_requested()) [[unlikely]] {
return;
}
current_fence = std::move(fences.front());
current_operations = std::move(pending_operations.front());
fences.pop();
pending_operations.pop_front();
}
if (!current_fence->IsStubbed()) {
WaitFence(current_fence);
}
PopAsyncFlushes();
for (auto& operation : current_operations) {
operation();
}
{
std::unique_lock lock(ring_guard);
delayed_destruction_ring.Push(std::move(current_fence));
}
} }
} }
@ -154,19 +230,16 @@ private:
query_cache.CommitAsyncFlushes(); query_cache.CommitAsyncFlushes();
} }
void PopFence() {
delayed_destruction_ring.Push(std::move(fences.front()));
fences.pop();
}
void CommitOperations() {
pending_operations.emplace_back(std::move(uncommitted_operations));
}
std::queue<TFence> fences; std::queue<TFence> fences;
std::deque<std::function<void()>> uncommitted_operations; std::deque<std::function<void()>> uncommitted_operations;
std::deque<std::deque<std::function<void()>>> pending_operations; std::deque<std::deque<std::function<void()>>> pending_operations;
std::mutex guard;
std::mutex ring_guard;
std::condition_variable cv;
std::jthread fence_thread;
DelayedDestructionRing<TFence, 6> delayed_destruction_ring; DelayedDestructionRing<TFence, 6> delayed_destruction_ring;
}; };

View file

@ -173,15 +173,18 @@ public:
} }
void CommitAsyncFlushes() { void CommitAsyncFlushes() {
std::unique_lock lock{mutex};
committed_flushes.push_back(uncommitted_flushes); committed_flushes.push_back(uncommitted_flushes);
uncommitted_flushes.reset(); uncommitted_flushes.reset();
} }
bool HasUncommittedFlushes() const { bool HasUncommittedFlushes() const {
std::unique_lock lock{mutex};
return uncommitted_flushes != nullptr; return uncommitted_flushes != nullptr;
} }
bool ShouldWaitAsyncFlushes() const { bool ShouldWaitAsyncFlushes() const {
std::unique_lock lock{mutex};
if (committed_flushes.empty()) { if (committed_flushes.empty()) {
return false; return false;
} }
@ -189,6 +192,7 @@ public:
} }
void PopAsyncFlushes() { void PopAsyncFlushes() {
std::unique_lock lock{mutex};
if (committed_flushes.empty()) { if (committed_flushes.empty()) {
return; return;
} }
@ -265,7 +269,7 @@ private:
VideoCore::RasterizerInterface& rasterizer; VideoCore::RasterizerInterface& rasterizer;
std::recursive_mutex mutex; mutable std::recursive_mutex mutex;
std::unordered_map<u64, std::vector<CachedQuery>> cached_queries; std::unordered_map<u64, std::vector<CachedQuery>> cached_queries;

View file

@ -30,7 +30,17 @@ private:
}; };
using Fence = std::shared_ptr<GLInnerFence>; using Fence = std::shared_ptr<GLInnerFence>;
using GenericFenceManager = VideoCommon::FenceManager<Fence, TextureCache, BufferCache, QueryCache>;
struct FenceManagerParams {
using FenceType = Fence;
using BufferCacheType = BufferCache;
using TextureCacheType = TextureCache;
using QueryCacheType = QueryCache;
static constexpr bool HAS_ASYNC_CHECK = false;
};
using GenericFenceManager = VideoCommon::FenceManager<FenceManagerParams>;
class FenceManagerOpenGL final : public GenericFenceManager { class FenceManagerOpenGL final : public GenericFenceManager {
public: public:

View file

@ -5,10 +5,12 @@
#include "video_core/renderer_vulkan/vk_buffer_cache.h" #include "video_core/renderer_vulkan/vk_buffer_cache.h"
#include "video_core/renderer_vulkan/vk_fence_manager.h" #include "video_core/renderer_vulkan/vk_fence_manager.h"
#include "video_core/renderer_vulkan/vk_query_cache.h"
#include "video_core/renderer_vulkan/vk_scheduler.h" #include "video_core/renderer_vulkan/vk_scheduler.h"
#include "video_core/renderer_vulkan/vk_texture_cache.h" #include "video_core/renderer_vulkan/vk_texture_cache.h"
#include "video_core/vulkan_common/vulkan_device.h" #include "video_core/vulkan_common/vulkan_device.h"
namespace Vulkan { namespace Vulkan {
InnerFence::InnerFence(Scheduler& scheduler_, bool is_stubbed_) InnerFence::InnerFence(Scheduler& scheduler_, bool is_stubbed_)

View file

@ -40,7 +40,16 @@ private:
}; };
using Fence = std::shared_ptr<InnerFence>; using Fence = std::shared_ptr<InnerFence>;
using GenericFenceManager = VideoCommon::FenceManager<Fence, TextureCache, BufferCache, QueryCache>; struct FenceManagerParams {
using FenceType = Fence;
using BufferCacheType = BufferCache;
using TextureCacheType = TextureCache;
using QueryCacheType = QueryCache;
static constexpr bool HAS_ASYNC_CHECK = true;
};
using GenericFenceManager = VideoCommon::FenceManager<FenceManagerParams>;
class FenceManager final : public GenericFenceManager { class FenceManager final : public GenericFenceManager {
public: public: