1
0
Fork 0
forked from suyu/suyu

core: hle: kernel: k_slab_heap: Refresh to use guest allocations.

This commit is contained in:
bunnei 2022-03-11 16:29:53 -08:00
parent a25cd4bb4b
commit 15d9b0418f
2 changed files with 114 additions and 132 deletions

View file

@ -16,39 +16,34 @@ class KernelCore;
namespace impl { namespace impl {
class KSlabHeapImpl final { class KSlabHeapImpl {
public:
YUZU_NON_COPYABLE(KSlabHeapImpl); YUZU_NON_COPYABLE(KSlabHeapImpl);
YUZU_NON_MOVEABLE(KSlabHeapImpl); YUZU_NON_MOVEABLE(KSlabHeapImpl);
public:
struct Node { struct Node {
Node* next{}; Node* next{};
}; };
public:
constexpr KSlabHeapImpl() = default; constexpr KSlabHeapImpl() = default;
constexpr ~KSlabHeapImpl() = default;
void Initialize(std::size_t size) { void Initialize() {
ASSERT(head == nullptr); ASSERT(m_head == nullptr);
obj_size = size;
}
constexpr std::size_t GetObjectSize() const {
return obj_size;
} }
Node* GetHead() const { Node* GetHead() const {
return head; return m_head;
} }
void* Allocate() { void* Allocate() {
Node* ret = head.load(); Node* ret = m_head.load();
do { do {
if (ret == nullptr) { if (ret == nullptr) {
break; break;
} }
} while (!head.compare_exchange_weak(ret, ret->next)); } while (!m_head.compare_exchange_weak(ret, ret->next));
return ret; return ret;
} }
@ -56,170 +51,157 @@ public:
void Free(void* obj) { void Free(void* obj) {
Node* node = static_cast<Node*>(obj); Node* node = static_cast<Node*>(obj);
Node* cur_head = head.load(); Node* cur_head = m_head.load();
do { do {
node->next = cur_head; node->next = cur_head;
} while (!head.compare_exchange_weak(cur_head, node)); } while (!m_head.compare_exchange_weak(cur_head, node));
} }
private: private:
std::atomic<Node*> head{}; std::atomic<Node*> m_head{};
std::size_t obj_size{};
}; };
} // namespace impl } // namespace impl
class KSlabHeapBase { template <bool SupportDynamicExpansion>
public: class KSlabHeapBase : protected impl::KSlabHeapImpl {
YUZU_NON_COPYABLE(KSlabHeapBase); YUZU_NON_COPYABLE(KSlabHeapBase);
YUZU_NON_MOVEABLE(KSlabHeapBase); YUZU_NON_MOVEABLE(KSlabHeapBase);
private:
size_t m_obj_size{};
uintptr_t m_peak{};
uintptr_t m_start{};
uintptr_t m_end{};
private:
void UpdatePeakImpl(uintptr_t obj) {
static_assert(std::atomic_ref<uintptr_t>::is_always_lock_free);
std::atomic_ref<uintptr_t> peak_ref(m_peak);
const uintptr_t alloc_peak = obj + this->GetObjectSize();
uintptr_t cur_peak = m_peak;
do {
if (alloc_peak <= cur_peak) {
break;
}
} while (!peak_ref.compare_exchange_strong(cur_peak, alloc_peak));
}
public:
constexpr KSlabHeapBase() = default; constexpr KSlabHeapBase() = default;
constexpr ~KSlabHeapBase() = default;
constexpr bool Contains(uintptr_t addr) const { bool Contains(uintptr_t address) const {
return start <= addr && addr < end; return m_start <= address && address < m_end;
} }
constexpr std::size_t GetSlabHeapSize() const { void Initialize(size_t obj_size, void* memory, size_t memory_size) {
return (end - start) / GetObjectSize(); // Ensure we don't initialize a slab using null memory.
}
constexpr std::size_t GetObjectSize() const {
return impl.GetObjectSize();
}
constexpr uintptr_t GetSlabHeapAddress() const {
return start;
}
std::size_t GetObjectIndexImpl(const void* obj) const {
return (reinterpret_cast<uintptr_t>(obj) - start) / GetObjectSize();
}
std::size_t GetPeakIndex() const {
return GetObjectIndexImpl(reinterpret_cast<const void*>(peak));
}
void* AllocateImpl() {
return impl.Allocate();
}
void FreeImpl(void* obj) {
// Don't allow freeing an object that wasn't allocated from this heap
ASSERT(Contains(reinterpret_cast<uintptr_t>(obj)));
impl.Free(obj);
}
void InitializeImpl(std::size_t obj_size, void* memory, std::size_t memory_size) {
// Ensure we don't initialize a slab using null memory
ASSERT(memory != nullptr); ASSERT(memory != nullptr);
// Initialize the base allocator // Set our object size.
impl.Initialize(obj_size); m_obj_size = obj_size;
// Set our tracking variables // Initialize the base allocator.
const std::size_t num_obj = (memory_size / obj_size); KSlabHeapImpl::Initialize();
start = reinterpret_cast<uintptr_t>(memory);
end = start + num_obj * obj_size;
peak = start;
// Free the objects // Set our tracking variables.
u8* cur = reinterpret_cast<u8*>(end); const size_t num_obj = (memory_size / obj_size);
m_start = reinterpret_cast<uintptr_t>(memory);
m_end = m_start + num_obj * obj_size;
m_peak = m_start;
for (std::size_t i{}; i < num_obj; i++) { // Free the objects.
u8* cur = reinterpret_cast<u8*>(m_end);
for (size_t i = 0; i < num_obj; i++) {
cur -= obj_size; cur -= obj_size;
impl.Free(cur); KSlabHeapImpl::Free(cur);
} }
} }
private: size_t GetSlabHeapSize() const {
using Impl = impl::KSlabHeapImpl; return (m_end - m_start) / this->GetObjectSize();
}
Impl impl; size_t GetObjectSize() const {
uintptr_t peak{}; return m_obj_size;
uintptr_t start{}; }
uintptr_t end{};
void* Allocate() {
void* obj = KSlabHeapImpl::Allocate();
return obj;
}
void Free(void* obj) {
// Don't allow freeing an object that wasn't allocated from this heap.
const bool contained = this->Contains(reinterpret_cast<uintptr_t>(obj));
ASSERT(contained);
KSlabHeapImpl::Free(obj);
}
size_t GetObjectIndex(const void* obj) const {
if constexpr (SupportDynamicExpansion) {
if (!this->Contains(reinterpret_cast<uintptr_t>(obj))) {
return std::numeric_limits<size_t>::max();
}
}
return (reinterpret_cast<uintptr_t>(obj) - m_start) / this->GetObjectSize();
}
size_t GetPeakIndex() const {
return this->GetObjectIndex(reinterpret_cast<const void*>(m_peak));
}
uintptr_t GetSlabHeapAddress() const {
return m_start;
}
size_t GetNumRemaining() const {
// Only calculate the number of remaining objects under debug configuration.
return 0;
}
}; };
template <typename T> template <typename T>
class KSlabHeap final : public KSlabHeapBase { class KSlabHeap final : public KSlabHeapBase<false> {
private:
using BaseHeap = KSlabHeapBase<false>;
public: public:
enum class AllocationType { constexpr KSlabHeap() = default;
Host,
Guest,
};
explicit constexpr KSlabHeap(AllocationType allocation_type_ = AllocationType::Host) void Initialize(void* memory, size_t memory_size) {
: KSlabHeapBase(), allocation_type{allocation_type_} {} BaseHeap::Initialize(sizeof(T), memory, memory_size);
void Initialize(void* memory, std::size_t memory_size) {
if (allocation_type == AllocationType::Guest) {
InitializeImpl(sizeof(T), memory, memory_size);
}
} }
T* Allocate() { T* Allocate() {
switch (allocation_type) { T* obj = static_cast<T*>(BaseHeap::Allocate());
case AllocationType::Host:
// Fallback for cases where we do not yet support allocating guest memory from the slab
// heap, such as for kernel memory regions.
return new T;
case AllocationType::Guest: if (obj != nullptr) [[likely]] {
T* obj = static_cast<T*>(AllocateImpl()); std::construct_at(obj);
if (obj != nullptr) {
new (obj) T();
}
return obj;
} }
return obj;
UNREACHABLE_MSG("Invalid AllocationType {}", allocation_type);
return nullptr;
} }
T* AllocateWithKernel(KernelCore& kernel) { T* Allocate(KernelCore& kernel) {
switch (allocation_type) { T* obj = static_cast<T*>(BaseHeap::Allocate());
case AllocationType::Host:
// Fallback for cases where we do not yet support allocating guest memory from the slab
// heap, such as for kernel memory regions.
return new T(kernel);
case AllocationType::Guest: if (obj != nullptr) [[likely]] {
T* obj = static_cast<T*>(AllocateImpl()); std::construct_at(obj, kernel);
if (obj != nullptr) {
new (obj) T(kernel);
}
return obj;
} }
return obj;
UNREACHABLE_MSG("Invalid AllocationType {}", allocation_type);
return nullptr;
} }
void Free(T* obj) { void Free(T* obj) {
switch (allocation_type) { BaseHeap::Free(obj);
case AllocationType::Host:
// Fallback for cases where we do not yet support allocating guest memory from the slab
// heap, such as for kernel memory regions.
delete obj;
return;
case AllocationType::Guest:
FreeImpl(obj);
return;
}
UNREACHABLE_MSG("Invalid AllocationType {}", allocation_type);
} }
constexpr std::size_t GetObjectIndex(const T* obj) const { size_t GetObjectIndex(const T* obj) const {
return GetObjectIndexImpl(obj); return BaseHeap::GetObjectIndex(obj);
} }
private:
const AllocationType allocation_type;
}; };
} // namespace Kernel } // namespace Kernel

View file

@ -59,7 +59,7 @@ class KAutoObjectWithSlabHeapAndContainer : public Base {
private: private:
static Derived* Allocate(KernelCore& kernel) { static Derived* Allocate(KernelCore& kernel) {
return kernel.SlabHeap<Derived>().AllocateWithKernel(kernel); return kernel.SlabHeap<Derived>().Allocate(kernel);
} }
static void Free(KernelCore& kernel, Derived* obj) { static void Free(KernelCore& kernel, Derived* obj) {