2022-04-23 10:59:50 +02:00
|
|
|
// SPDX-FileCopyrightText: Copyright 2018 yuzu Emulator Project
|
|
|
|
// SPDX-License-Identifier: GPL-2.0-or-later
|
2018-02-08 03:54:35 +01:00
|
|
|
|
|
|
|
#pragma once
|
|
|
|
|
2019-03-04 05:54:16 +01:00
|
|
|
#include <map>
|
2018-10-30 05:03:25 +01:00
|
|
|
#include <optional>
|
2020-07-26 06:16:21 +02:00
|
|
|
#include <vector>
|
2018-04-21 18:31:30 +02:00
|
|
|
|
2018-02-08 03:54:35 +01:00
|
|
|
#include "common/common_types.h"
|
|
|
|
|
2020-02-15 23:47:15 +01:00
|
|
|
namespace VideoCore {
|
|
|
|
class RasterizerInterface;
|
|
|
|
}
|
|
|
|
|
2019-07-09 08:17:44 +02:00
|
|
|
namespace Core {
|
|
|
|
class System;
|
|
|
|
}
|
|
|
|
|
2018-02-12 05:44:12 +01:00
|
|
|
namespace Tegra {
|
|
|
|
|
2020-07-26 06:16:21 +02:00
|
|
|
class PageEntry final {
|
|
|
|
public:
|
|
|
|
enum class State : u32 {
|
|
|
|
Unmapped = static_cast<u32>(-1),
|
|
|
|
Allocated = static_cast<u32>(-2),
|
2019-03-04 05:54:16 +01:00
|
|
|
};
|
|
|
|
|
2020-07-26 06:16:21 +02:00
|
|
|
constexpr PageEntry() = default;
|
2020-12-04 20:39:12 +01:00
|
|
|
constexpr PageEntry(State state_) : state{state_} {}
|
2020-07-26 06:16:21 +02:00
|
|
|
constexpr PageEntry(VAddr addr) : state{static_cast<State>(addr >> ShiftBits)} {}
|
|
|
|
|
2020-08-27 02:14:13 +02:00
|
|
|
[[nodiscard]] constexpr bool IsUnmapped() const {
|
2020-07-26 06:16:21 +02:00
|
|
|
return state == State::Unmapped;
|
|
|
|
}
|
|
|
|
|
2020-08-27 02:14:13 +02:00
|
|
|
[[nodiscard]] constexpr bool IsAllocated() const {
|
2020-07-26 06:16:21 +02:00
|
|
|
return state == State::Allocated;
|
|
|
|
}
|
|
|
|
|
2020-08-27 02:14:13 +02:00
|
|
|
[[nodiscard]] constexpr bool IsValid() const {
|
2020-07-26 06:16:21 +02:00
|
|
|
return !IsUnmapped() && !IsAllocated();
|
|
|
|
}
|
|
|
|
|
2020-08-27 02:14:13 +02:00
|
|
|
[[nodiscard]] constexpr VAddr ToAddress() const {
|
2020-07-26 06:16:21 +02:00
|
|
|
if (!IsValid()) {
|
|
|
|
return {};
|
|
|
|
}
|
|
|
|
|
|
|
|
return static_cast<VAddr>(state) << ShiftBits;
|
|
|
|
}
|
|
|
|
|
2020-08-27 02:14:13 +02:00
|
|
|
[[nodiscard]] constexpr PageEntry operator+(u64 offset) const {
|
2020-07-26 06:16:21 +02:00
|
|
|
// If this is a reserved value, offsets do not apply
|
|
|
|
if (!IsValid()) {
|
|
|
|
return *this;
|
|
|
|
}
|
|
|
|
return PageEntry{(static_cast<VAddr>(state) << ShiftBits) + offset};
|
|
|
|
}
|
|
|
|
|
|
|
|
private:
|
|
|
|
static constexpr std::size_t ShiftBits{12};
|
|
|
|
|
|
|
|
State state{State::Unmapped};
|
2019-03-04 05:54:16 +01:00
|
|
|
};
|
2020-07-26 06:16:21 +02:00
|
|
|
static_assert(sizeof(PageEntry) == 4, "PageEntry is too large");
|
2019-03-04 05:54:16 +01:00
|
|
|
|
2018-02-08 03:54:35 +01:00
|
|
|
class MemoryManager final {
|
|
|
|
public:
|
2020-12-04 20:39:12 +01:00
|
|
|
explicit MemoryManager(Core::System& system_);
|
2019-05-10 01:04:41 +02:00
|
|
|
~MemoryManager();
|
2018-02-08 03:54:35 +01:00
|
|
|
|
2020-06-11 05:58:57 +02:00
|
|
|
/// Binds a renderer to the memory manager.
|
2021-01-05 08:09:39 +01:00
|
|
|
void BindRasterizer(VideoCore::RasterizerInterface* rasterizer);
|
2020-06-11 05:58:57 +02:00
|
|
|
|
2020-08-27 02:14:13 +02:00
|
|
|
[[nodiscard]] std::optional<VAddr> GpuToCpuAddress(GPUVAddr addr) const;
|
2018-02-08 03:54:35 +01:00
|
|
|
|
2021-06-13 03:34:06 +02:00
|
|
|
[[nodiscard]] std::optional<VAddr> GpuToCpuAddress(GPUVAddr addr, std::size_t size) const;
|
|
|
|
|
2019-03-04 05:54:16 +01:00
|
|
|
template <typename T>
|
2020-08-27 02:14:13 +02:00
|
|
|
[[nodiscard]] T Read(GPUVAddr addr) const;
|
2019-02-24 06:15:35 +01:00
|
|
|
|
2019-03-04 05:54:16 +01:00
|
|
|
template <typename T>
|
2019-03-09 20:06:51 +01:00
|
|
|
void Write(GPUVAddr addr, T data);
|
2019-02-24 06:15:35 +01:00
|
|
|
|
2020-08-27 02:14:13 +02:00
|
|
|
[[nodiscard]] u8* GetPointer(GPUVAddr addr);
|
|
|
|
[[nodiscard]] const u8* GetPointer(GPUVAddr addr) const;
|
2019-02-24 06:15:35 +01:00
|
|
|
|
2021-01-22 22:31:08 +01:00
|
|
|
/// Returns the number of bytes until the end of the memory map containing the given GPU address
|
|
|
|
[[nodiscard]] size_t BytesToMapEnd(GPUVAddr gpu_addr) const noexcept;
|
|
|
|
|
2019-04-16 21:45:24 +02:00
|
|
|
/**
|
2019-04-16 16:11:35 +02:00
|
|
|
* ReadBlock and WriteBlock are full read and write operations over virtual
|
2019-05-10 01:02:52 +02:00
|
|
|
* GPU Memory. It's important to use these when GPU memory may not be continuous
|
2019-04-16 16:11:35 +02:00
|
|
|
* in the Host Memory counterpart. Note: This functions cause Host GPU Memory
|
|
|
|
* Flushes and Invalidations, respectively to each operation.
|
|
|
|
*/
|
2020-06-20 04:02:56 +02:00
|
|
|
void ReadBlock(GPUVAddr gpu_src_addr, void* dest_buffer, std::size_t size) const;
|
|
|
|
void WriteBlock(GPUVAddr gpu_dest_addr, const void* src_buffer, std::size_t size);
|
|
|
|
void CopyBlock(GPUVAddr gpu_dest_addr, GPUVAddr gpu_src_addr, std::size_t size);
|
2019-04-16 16:11:35 +02:00
|
|
|
|
2019-04-16 21:45:24 +02:00
|
|
|
/**
|
2019-04-16 16:11:35 +02:00
|
|
|
* ReadBlockUnsafe and WriteBlockUnsafe are special versions of ReadBlock and
|
|
|
|
* WriteBlock respectively. In this versions, no flushing or invalidation is actually
|
|
|
|
* done and their performance is similar to a memcpy. This functions can be used
|
|
|
|
* on either of this 2 scenarios instead of their safe counterpart:
|
|
|
|
* - Memory which is sure to never be represented in the Host GPU.
|
|
|
|
* - Memory Managed by a Cache Manager. Example: Texture Flushing should use
|
|
|
|
* WriteBlockUnsafe instead of WriteBlock since it shouldn't invalidate the texture
|
|
|
|
* being flushed.
|
|
|
|
*/
|
2020-06-20 04:02:56 +02:00
|
|
|
void ReadBlockUnsafe(GPUVAddr gpu_src_addr, void* dest_buffer, std::size_t size) const;
|
|
|
|
void WriteBlockUnsafe(GPUVAddr gpu_dest_addr, const void* src_buffer, std::size_t size);
|
2019-04-16 16:11:35 +02:00
|
|
|
|
2020-04-08 19:34:59 +02:00
|
|
|
/**
|
2021-06-20 12:25:59 +02:00
|
|
|
* Checks if a gpu region can be simply read with a pointer.
|
2020-04-08 19:34:59 +02:00
|
|
|
*/
|
2020-08-27 02:14:13 +02:00
|
|
|
[[nodiscard]] bool IsGranularRange(GPUVAddr gpu_addr, std::size_t size) const;
|
2020-04-05 23:23:49 +02:00
|
|
|
|
2021-06-13 03:34:06 +02:00
|
|
|
/**
|
2021-06-20 12:25:59 +02:00
|
|
|
* Checks if a gpu region is mapped by a single range of cpu addresses.
|
2021-06-13 03:34:06 +02:00
|
|
|
*/
|
|
|
|
[[nodiscard]] bool IsContinousRange(GPUVAddr gpu_addr, std::size_t size) const;
|
|
|
|
|
|
|
|
/**
|
2021-06-20 12:25:59 +02:00
|
|
|
* Checks if a gpu region is mapped entirely.
|
2021-06-13 03:34:06 +02:00
|
|
|
*/
|
|
|
|
[[nodiscard]] bool IsFullyMappedRange(GPUVAddr gpu_addr, std::size_t size) const;
|
|
|
|
|
|
|
|
/**
|
2021-06-20 12:25:59 +02:00
|
|
|
* Returns a vector with all the subranges of cpu addresses mapped beneath.
|
2021-06-13 03:34:06 +02:00
|
|
|
* if the region is continous, a single pair will be returned. If it's unmapped, an empty vector
|
|
|
|
* will be returned;
|
|
|
|
*/
|
|
|
|
std::vector<std::pair<GPUVAddr, std::size_t>> GetSubmappedRange(GPUVAddr gpu_addr,
|
|
|
|
std::size_t size) const;
|
|
|
|
|
2020-08-27 02:14:13 +02:00
|
|
|
[[nodiscard]] GPUVAddr Map(VAddr cpu_addr, GPUVAddr gpu_addr, std::size_t size);
|
|
|
|
[[nodiscard]] GPUVAddr MapAllocate(VAddr cpu_addr, std::size_t size, std::size_t align);
|
2020-10-27 04:07:36 +01:00
|
|
|
[[nodiscard]] GPUVAddr MapAllocate32(VAddr cpu_addr, std::size_t size);
|
2020-08-27 02:14:13 +02:00
|
|
|
[[nodiscard]] std::optional<GPUVAddr> AllocateFixed(GPUVAddr gpu_addr, std::size_t size);
|
|
|
|
[[nodiscard]] GPUVAddr Allocate(std::size_t size, std::size_t align);
|
2020-07-26 06:16:21 +02:00
|
|
|
void Unmap(GPUVAddr gpu_addr, std::size_t size);
|
2019-03-04 05:54:16 +01:00
|
|
|
|
2022-01-29 17:42:28 +01:00
|
|
|
void FlushRegion(GPUVAddr gpu_addr, size_t size) const;
|
|
|
|
|
2020-07-26 06:16:21 +02:00
|
|
|
private:
|
2020-08-27 02:14:13 +02:00
|
|
|
[[nodiscard]] PageEntry GetPageEntry(GPUVAddr gpu_addr) const;
|
2020-07-26 06:16:21 +02:00
|
|
|
void SetPageEntry(GPUVAddr gpu_addr, PageEntry page_entry, std::size_t size = page_size);
|
|
|
|
GPUVAddr UpdateRange(GPUVAddr gpu_addr, PageEntry page_entry, std::size_t size);
|
2020-10-27 04:07:36 +01:00
|
|
|
[[nodiscard]] std::optional<GPUVAddr> FindFreeRange(std::size_t size, std::size_t align,
|
|
|
|
bool start_32bit_address = false) const;
|
2019-03-04 05:54:16 +01:00
|
|
|
|
2020-07-26 06:16:21 +02:00
|
|
|
void TryLockPage(PageEntry page_entry, std::size_t size);
|
|
|
|
void TryUnlockPage(PageEntry page_entry, std::size_t size);
|
2019-03-04 05:54:16 +01:00
|
|
|
|
2021-12-31 05:36:00 +01:00
|
|
|
void ReadBlockImpl(GPUVAddr gpu_src_addr, void* dest_buffer, std::size_t size,
|
|
|
|
bool is_safe) const;
|
|
|
|
void WriteBlockImpl(GPUVAddr gpu_dest_addr, const void* src_buffer, std::size_t size,
|
|
|
|
bool is_safe);
|
|
|
|
|
2020-08-27 02:14:13 +02:00
|
|
|
[[nodiscard]] static constexpr std::size_t PageEntryIndex(GPUVAddr gpu_addr) {
|
2020-07-26 06:16:21 +02:00
|
|
|
return (gpu_addr >> page_bits) & page_table_mask;
|
|
|
|
}
|
2018-04-21 20:40:51 +02:00
|
|
|
|
2020-07-26 06:16:21 +02:00
|
|
|
static constexpr u64 address_space_size = 1ULL << 40;
|
|
|
|
static constexpr u64 address_space_start = 1ULL << 32;
|
2020-10-27 04:07:36 +01:00
|
|
|
static constexpr u64 address_space_start_low = 1ULL << 16;
|
2019-03-04 05:54:16 +01:00
|
|
|
static constexpr u64 page_bits{16};
|
|
|
|
static constexpr u64 page_size{1 << page_bits};
|
|
|
|
static constexpr u64 page_mask{page_size - 1};
|
2020-07-26 06:16:21 +02:00
|
|
|
static constexpr u64 page_table_bits{24};
|
|
|
|
static constexpr u64 page_table_size{1 << page_table_bits};
|
|
|
|
static constexpr u64 page_table_mask{page_table_size - 1};
|
2019-03-04 05:54:16 +01:00
|
|
|
|
2020-07-26 06:16:21 +02:00
|
|
|
Core::System& system;
|
2019-03-04 05:54:16 +01:00
|
|
|
|
2020-06-11 05:58:57 +02:00
|
|
|
VideoCore::RasterizerInterface* rasterizer = nullptr;
|
2019-07-09 08:17:44 +02:00
|
|
|
|
2020-07-26 06:16:21 +02:00
|
|
|
std::vector<PageEntry> page_table;
|
2021-01-22 22:31:08 +01:00
|
|
|
|
|
|
|
using MapRange = std::pair<GPUVAddr, size_t>;
|
|
|
|
std::vector<MapRange> map_ranges;
|
2021-01-22 22:33:10 +01:00
|
|
|
|
|
|
|
std::vector<std::pair<VAddr, std::size_t>> cache_invalidate_queue;
|
2018-02-08 03:54:35 +01:00
|
|
|
};
|
|
|
|
|
2018-02-12 05:44:12 +01:00
|
|
|
} // namespace Tegra
|