1
0
Fork 1
forked from suyu/suyu

kernel: fix single-core preemption points

This commit is contained in:
Liam 2022-07-07 12:34:46 -04:00
parent 21945ae127
commit da07e13e07
6 changed files with 27 additions and 39 deletions

View file

@ -144,39 +144,25 @@ void CpuManager::SingleCoreRunIdleThread() {
} }
void CpuManager::PreemptSingleCore(bool from_running_environment) { void CpuManager::PreemptSingleCore(bool from_running_environment) {
{ auto& kernel = system.Kernel();
auto& kernel = system.Kernel();
auto& scheduler = kernel.Scheduler(current_core);
Kernel::KThread* current_thread = scheduler.GetSchedulerCurrentThread(); if (idle_count >= 4 || from_running_environment) {
if (idle_count >= 4 || from_running_environment) { if (!from_running_environment) {
if (!from_running_environment) { system.CoreTiming().Idle();
system.CoreTiming().Idle(); idle_count = 0;
idle_count = 0;
}
kernel.SetIsPhantomModeForSingleCore(true);
system.CoreTiming().Advance();
kernel.SetIsPhantomModeForSingleCore(false);
} }
current_core.store((current_core + 1) % Core::Hardware::NUM_CPU_CORES); kernel.SetIsPhantomModeForSingleCore(true);
system.CoreTiming().ResetTicks(); system.CoreTiming().Advance();
scheduler.Unload(scheduler.GetSchedulerCurrentThread()); kernel.SetIsPhantomModeForSingleCore(false);
auto& next_scheduler = kernel.Scheduler(current_core);
// Disable dispatch. We're about to preempt this thread.
Kernel::KScopedDisableDispatch dd{kernel};
Common::Fiber::YieldTo(current_thread->GetHostContext(), *next_scheduler.GetSwitchFiber());
} }
current_core.store((current_core + 1) % Core::Hardware::NUM_CPU_CORES);
system.CoreTiming().ResetTicks();
kernel.Scheduler(current_core).PreemptSingleCore();
// We've now been scheduled again, and we may have exchanged schedulers. // We've now been scheduled again, and we may have exchanged schedulers.
// Reload the scheduler in case it's different. // Reload the scheduler in case it's different.
{ if (!kernel.Scheduler(current_core).IsIdle()) {
auto& scheduler = system.Kernel().Scheduler(current_core); idle_count = 0;
scheduler.Reload(scheduler.GetSchedulerCurrentThread());
if (!scheduler.IsIdle()) {
idle_count = 0;
}
} }
} }

View file

@ -5,7 +5,6 @@
#include <array> #include <array>
#include <atomic> #include <atomic>
#include <csetjmp>
#include <functional> #include <functional>
#include <memory> #include <memory>
#include <thread> #include <thread>

View file

@ -103,7 +103,20 @@ void KScheduler::ScheduleOnInterrupt() {
GetCurrentThread(kernel).EnableDispatch(); GetCurrentThread(kernel).EnableDispatch();
} }
void KScheduler::PreemptSingleCore() {
GetCurrentThread(kernel).DisableDispatch();
auto* thread = GetCurrentThreadPointer(kernel);
auto& previous_scheduler = kernel.Scheduler(thread->GetCurrentCore());
previous_scheduler.Unload(thread);
Common::Fiber::YieldTo(thread->GetHostContext(), *m_switch_fiber);
GetCurrentThread(kernel).EnableDispatch();
}
void KScheduler::RescheduleCurrentCore() { void KScheduler::RescheduleCurrentCore() {
ASSERT(!kernel.IsPhantomModeForSingleCore());
ASSERT(GetCurrentThread(kernel).GetDisableDispatchCount() == 1); ASSERT(GetCurrentThread(kernel).GetDisableDispatchCount() == 1);
GetCurrentThread(kernel).EnableDispatch(); GetCurrentThread(kernel).EnableDispatch();

View file

@ -49,6 +49,7 @@ public:
void SetInterruptTaskRunnable(); void SetInterruptTaskRunnable();
void RequestScheduleOnInterrupt(); void RequestScheduleOnInterrupt();
void PreemptSingleCore();
u64 GetIdleCount() { u64 GetIdleCount() {
return m_state.idle_count; return m_state.idle_count;
@ -62,10 +63,6 @@ public:
return m_current_thread.load() == m_idle_thread; return m_current_thread.load() == m_idle_thread;
} }
std::shared_ptr<Common::Fiber> GetSwitchFiber() {
return m_switch_fiber;
}
KThread* GetPreviousThread() const { KThread* GetPreviousThread() const {
return m_state.prev_thread; return m_state.prev_thread;
} }

View file

@ -1204,12 +1204,6 @@ KScopedDisableDispatch::~KScopedDisableDispatch() {
return; return;
} }
// Skip the reschedule if single-core.
if (!Settings::values.use_multi_core.GetValue()) {
GetCurrentThread(kernel).EnableDispatch();
return;
}
if (GetCurrentThread(kernel).GetDisableDispatchCount() <= 1) { if (GetCurrentThread(kernel).GetDisableDispatchCount() <= 1) {
auto scheduler = kernel.CurrentScheduler(); auto scheduler = kernel.CurrentScheduler();

View file

@ -110,7 +110,6 @@ void SetCurrentThread(KernelCore& kernel, KThread* thread);
[[nodiscard]] KThread* GetCurrentThreadPointer(KernelCore& kernel); [[nodiscard]] KThread* GetCurrentThreadPointer(KernelCore& kernel);
[[nodiscard]] KThread& GetCurrentThread(KernelCore& kernel); [[nodiscard]] KThread& GetCurrentThread(KernelCore& kernel);
[[nodiscard]] s32 GetCurrentCoreId(KernelCore& kernel); [[nodiscard]] s32 GetCurrentCoreId(KernelCore& kernel);
size_t CaptureBacktrace(void** buffer, size_t max);
class KThread final : public KAutoObjectWithSlabHeapAndContainer<KThread, KWorkerTask>, class KThread final : public KAutoObjectWithSlabHeapAndContainer<KThread, KWorkerTask>,
public boost::intrusive::list_base_hook<> { public boost::intrusive::list_base_hook<> {