kern: implement improved [new page tables are zero] invariant

This commit is contained in:
Michael Scire 2021-06-17 13:03:46 -07:00
parent 25305257d6
commit 4892ffae15
5 changed files with 33 additions and 12 deletions

View file

@ -64,8 +64,11 @@ namespace ams::kern {
m_page_bitmap.Initialize(management_ptr, m_count);
/* Free the pages to the bitmap. */
std::memset(GetPointer<PageBuffer>(m_address), 0, m_count * sizeof(PageBuffer));
for (size_t i = 0; i < m_count; i++) {
/* Ensure the freed page is all-zero. */
cpu::ClearPageToZero(GetPointer<PageBuffer>(m_address) + i);
/* Set the bit for the free page. */
m_page_bitmap.SetBit(i);
}
@ -99,6 +102,9 @@ namespace ams::kern {
}
void Free(PageBuffer *pb) {
/* Ensure all pages in the heap are zero. */
cpu::ClearPageToZero(pb);
/* Take the lock. */
KScopedInterruptDisable di;
KScopedSpinLock lk(m_lock);

View file

@ -22,7 +22,7 @@
namespace ams::kern {
template<typename T>
template<typename T, bool ClearNode = false>
class KDynamicSlabHeap {
NON_COPYABLE(KDynamicSlabHeap);
NON_MOVEABLE(KDynamicSlabHeap);
@ -97,6 +97,13 @@ namespace ams::kern {
T *Allocate() {
T *allocated = reinterpret_cast<T *>(this->GetImpl()->Allocate());
/* If we successfully allocated and we should clear the node, do so. */
if constexpr (ClearNode) {
if (AMS_LIKELY(allocated != nullptr)) {
reinterpret_cast<Impl::Node *>(allocated)->next = nullptr;
}
}
/* If we fail to allocate, try to get a new page from our next allocator. */
if (AMS_UNLIKELY(allocated == nullptr)) {
if (m_page_allocator != nullptr) {

View file

@ -107,8 +107,11 @@ namespace ams::kern {
Node *Peek() const { return m_root; }
Node *Pop() {
Node *r = m_root;
m_root = m_root->m_next;
Node * const r = m_root;
m_root = r->m_next;
r->m_next = nullptr;
return r;
}
};

View file

@ -25,18 +25,20 @@ namespace ams::kern {
class PageTablePage {
private:
u8 m_buffer[PageSize];
public:
ALWAYS_INLINE PageTablePage() { /* Do not initialize anything. */ }
};
static_assert(sizeof(PageTablePage) == PageSize);
}
class KPageTableManager : public KDynamicSlabHeap<impl::PageTablePage> {
class KPageTableManager : public KDynamicSlabHeap<impl::PageTablePage, true> {
public:
using RefCount = u16;
static constexpr size_t PageTableSize = sizeof(impl::PageTablePage);
static_assert(PageTableSize == PageSize);
private:
using BaseHeap = KDynamicSlabHeap<impl::PageTablePage>;
using BaseHeap = KDynamicSlabHeap<impl::PageTablePage, true>;
private:
RefCount *m_ref_counts;
public:
@ -72,9 +74,6 @@ namespace ams::kern {
}
void Free(KVirtualAddress addr) {
/* Ensure all pages in the heap are zero. */
cpu::ClearPageToZero(GetVoidPointer(addr));
/* Free the page. */
BaseHeap::Free(GetPointer<impl::PageTablePage>(addr));
}