strat: use m_ for member variables

This commit is contained in:
Michael Scire 2021-10-10 00:14:06 -07:00
parent ce28591ab2
commit a595c232b9
425 changed files with 8531 additions and 8484 deletions

View file

@ -21,43 +21,43 @@
namespace ams::mem::impl::heap {
void *CachedHeap::Allocate(size_t n) {
return this->tls_heap_cache->Allocate(n);
return m_tls_heap_cache->Allocate(n);
}
void *CachedHeap::Allocate(size_t n, size_t align) {
return this->tls_heap_cache->Allocate(n, align);
return m_tls_heap_cache->Allocate(n, align);
}
size_t CachedHeap::GetAllocationSize(const void *ptr) {
return this->tls_heap_cache->GetAllocationSize(ptr);
return m_tls_heap_cache->GetAllocationSize(ptr);
}
errno_t CachedHeap::Free(void *p) {
return this->tls_heap_cache->Free(p);
return m_tls_heap_cache->Free(p);
}
errno_t CachedHeap::FreeWithSize(void *p, size_t size) {
return this->tls_heap_cache->FreeWithSize(p, size);
return m_tls_heap_cache->FreeWithSize(p, size);
}
errno_t CachedHeap::Reallocate(void *ptr, size_t size, void **p) {
return this->tls_heap_cache->Reallocate(ptr, size, p);
return m_tls_heap_cache->Reallocate(ptr, size, p);
}
errno_t CachedHeap::Shrink(void *ptr, size_t size) {
return this->tls_heap_cache->Shrink(ptr, size);
return m_tls_heap_cache->Shrink(ptr, size);
}
void CachedHeap::ReleaseAllCache() {
if (this->tls_heap_cache) {
this->tls_heap_cache->ReleaseAllCache();
if (m_tls_heap_cache) {
m_tls_heap_cache->ReleaseAllCache();
}
}
void CachedHeap::Finalize() {
if (this->tls_heap_cache) {
this->tls_heap_cache->Finalize();
this->tls_heap_cache = nullptr;
if (m_tls_heap_cache) {
m_tls_heap_cache->Finalize();
m_tls_heap_cache = nullptr;
}
}
@ -78,7 +78,7 @@ namespace ams::mem::impl::heap {
{
bool *out = va_arg(vl, bool *);
if (out) {
*out = (this->tls_heap_cache == nullptr) || this->tls_heap_cache->CheckCache();
*out = (m_tls_heap_cache == nullptr) || m_tls_heap_cache->CheckCache();
}
return 0;
}
@ -107,12 +107,12 @@ namespace ams::mem::impl::heap {
void CachedHeap::Reset(TlsHeapCache *thc) {
this->Finalize();
this->tls_heap_cache = thc;
m_tls_heap_cache = thc;
}
TlsHeapCache *CachedHeap::Release() {
TlsHeapCache *ret = this->tls_heap_cache;
this->tls_heap_cache = nullptr;
TlsHeapCache *ret = m_tls_heap_cache;
m_tls_heap_cache = nullptr;
return ret;
}

View file

@ -27,7 +27,7 @@ namespace ams::mem::impl::heap {
}
/* Don't allow initializing twice. */
if (this->start) {
if (m_start) {
return EEXIST;
}
@ -39,17 +39,17 @@ namespace ams::mem::impl::heap {
return EINVAL;
}
this->start = aligned_start;
this->end = aligned_end;
this->option = option;
this->tls_heap_central = std::construct_at(reinterpret_cast<TlsHeapCentral *>(this->start));
if (auto err = this->tls_heap_central->Initialize(this->start, this->end - this->start, false); err != 0) {
std::destroy_at(this->tls_heap_central);
this->tls_heap_central = nullptr;
m_start = aligned_start;
m_end = aligned_end;
m_option = option;
m_tls_heap_central = std::construct_at(reinterpret_cast<TlsHeapCentral *>(m_start));
if (auto err = m_tls_heap_central->Initialize(m_start, m_end - m_start, false); err != 0) {
std::destroy_at(m_tls_heap_central);
m_tls_heap_central = nullptr;
AMS_ASSERT(err == 0);
return err;
}
this->use_virtual_memory = false;
m_use_virtual_memory = false;
} else {
/* We were not provided with a region to use as backing. */
void *mem = nullptr;
@ -63,39 +63,39 @@ namespace ams::mem::impl::heap {
return err;
}
}
this->start = static_cast<u8 *>(mem);
this->end = this->start + size;
this->option = option;
m_start = static_cast<u8 *>(mem);
m_end = m_start + size;
m_option = option;
void *central = reinterpret_cast<void *>(util::AlignUp(reinterpret_cast<uintptr_t>(mem), PageSize));
if (auto err = AllocatePhysicalMemory(central, sizeof(TlsHeapCentral)); err != 0) {
return err;
}
this->tls_heap_central = std::construct_at(static_cast<TlsHeapCentral *>(central));
if (auto err = this->tls_heap_central->Initialize(central, size, true); err != 0) {
std::destroy_at(this->tls_heap_central);
this->tls_heap_central = nullptr;
m_tls_heap_central = std::construct_at(static_cast<TlsHeapCentral *>(central));
if (auto err = m_tls_heap_central->Initialize(central, size, true); err != 0) {
std::destroy_at(m_tls_heap_central);
m_tls_heap_central = nullptr;
AMS_ASSERT(err == 0);
return err;
}
this->use_virtual_memory = true;
m_use_virtual_memory = true;
}
return 0;
}
void CentralHeap::Finalize() {
if (this->tls_heap_central) {
std::destroy_at(this->tls_heap_central);
if (m_tls_heap_central) {
std::destroy_at(m_tls_heap_central);
}
if (this->use_virtual_memory) {
mem::impl::physical_free(util::AlignUp(static_cast<void *>(this->start), PageSize), this->end - this->start);
mem::impl::virtual_free(this->start, this->end - this->start);
if (m_use_virtual_memory) {
mem::impl::physical_free(util::AlignUp(static_cast<void *>(m_start), PageSize), m_end - m_start);
mem::impl::virtual_free(m_start, m_end - m_start);
}
this->tls_heap_central = nullptr;
this->use_virtual_memory = false;
this->option = 0;
this->start = nullptr;
this->end = nullptr;
m_tls_heap_central = nullptr;
m_use_virtual_memory = false;
m_option = 0;
m_start = nullptr;
m_end = nullptr;
}
void *CentralHeap::Allocate(size_t n, size_t align) {
@ -106,23 +106,23 @@ namespace ams::mem::impl::heap {
return nullptr;
}
if (align > PageSize) {
return this->tls_heap_central->CacheLargeMemoryWithBigAlign(util::AlignUp(n, PageSize), align);
return m_tls_heap_central->CacheLargeMemoryWithBigAlign(util::AlignUp(n, PageSize), align);
}
const size_t real_size = TlsHeapStatic::GetRealSizeFromSizeAndAlignment(util::AlignUp(n, align), align);
const auto cls = TlsHeapStatic::GetClassFromSize(real_size);
if (!cls) {
return this->tls_heap_central->CacheLargeMemory(real_size);
return m_tls_heap_central->CacheLargeMemory(real_size);
}
if (real_size == 0) {
return nullptr;
}
AMS_ASSERT(cls < TlsHeapStatic::NumClassInfo);
return this->tls_heap_central->CacheSmallMemory(cls, align);
return m_tls_heap_central->CacheSmallMemory(cls, align);
}
size_t CentralHeap::GetAllocationSize(const void *ptr) {
const auto cls = this->tls_heap_central->GetClassFromPointer(ptr);
const auto cls = m_tls_heap_central->GetClassFromPointer(ptr);
if (cls > 0) {
/* Check that the pointer has alignment from out allocator. */
if (!util::IsAligned(reinterpret_cast<uintptr_t>(ptr), MinimumAlignment)) {
@ -131,7 +131,7 @@ namespace ams::mem::impl::heap {
AMS_ASSERT(cls < TlsHeapStatic::NumClassInfo);
return TlsHeapStatic::GetChunkSize(cls);
} else if (ptr != nullptr) {
return this->tls_heap_central->GetAllocationSize(ptr);
return m_tls_heap_central->GetAllocationSize(ptr);
} else {
return 0;
}
@ -149,13 +149,13 @@ namespace ams::mem::impl::heap {
return EFAULT;
}
const auto cls = this->tls_heap_central->GetClassFromPointer(ptr);
const auto cls = m_tls_heap_central->GetClassFromPointer(ptr);
if (cls >= 0) {
AMS_ASSERT(cls < TlsHeapStatic::NumClassInfo);
if (cls) {
return this->tls_heap_central->UncacheSmallMemory(ptr);
return m_tls_heap_central->UncacheSmallMemory(ptr);
} else {
return this->tls_heap_central->UncacheLargeMemory(ptr);
return m_tls_heap_central->UncacheLargeMemory(ptr);
}
} else {
AMS_ASSERT(cls >= 0);
@ -165,9 +165,9 @@ namespace ams::mem::impl::heap {
errno_t CentralHeap::FreeWithSize(void *ptr, size_t size) {
if (TlsHeapStatic::GetClassFromSize(size)) {
return this->tls_heap_central->UncacheSmallMemory(ptr);
return m_tls_heap_central->UncacheSmallMemory(ptr);
} else {
return this->tls_heap_central->UncacheLargeMemory(ptr);
return m_tls_heap_central->UncacheLargeMemory(ptr);
}
}
@ -181,7 +181,7 @@ namespace ams::mem::impl::heap {
}
const auto cls_from_size = TlsHeapStatic::GetClassFromSize(size);
const auto cls_from_ptr = this->tls_heap_central->GetClassFromPointer(ptr);
const auto cls_from_ptr = m_tls_heap_central->GetClassFromPointer(ptr);
if (cls_from_ptr) {
if (cls_from_ptr <= 0) {
return EFAULT;
@ -193,7 +193,7 @@ namespace ams::mem::impl::heap {
*p = this->Allocate(new_chunk_size);
if (*p) {
std::memcpy(*p, ptr, size);
return this->tls_heap_central->UncacheSmallMemory(ptr);
return m_tls_heap_central->UncacheSmallMemory(ptr);
} else {
return ENOMEM;
}
@ -202,12 +202,12 @@ namespace ams::mem::impl::heap {
*p = this->Allocate(size);
if (*p) {
std::memcpy(*p, ptr, size);
return this->tls_heap_central->UncacheLargeMemory(ptr);
return m_tls_heap_central->UncacheLargeMemory(ptr);
} else {
return ENOMEM;
}
} else {
return this->tls_heap_central->ReallocateLargeMemory(ptr, size, p);
return m_tls_heap_central->ReallocateLargeMemory(ptr, size, p);
}
}
@ -221,7 +221,7 @@ namespace ams::mem::impl::heap {
}
const auto cls_from_size = TlsHeapStatic::GetClassFromSize(size);
const auto cls_from_ptr = this->tls_heap_central->GetClassFromPointer(ptr);
const auto cls_from_ptr = m_tls_heap_central->GetClassFromPointer(ptr);
if (cls_from_ptr) {
if (cls_from_ptr <= 0) {
return EFAULT;
@ -231,9 +231,9 @@ namespace ams::mem::impl::heap {
return EINVAL;
}
} else if (cls_from_size) {
return this->tls_heap_central->ShrinkLargeMemory(ptr, PageSize);
return m_tls_heap_central->ShrinkLargeMemory(ptr, PageSize);
} else {
return this->tls_heap_central->ShrinkLargeMemory(ptr, size);
return m_tls_heap_central->ShrinkLargeMemory(ptr, size);
}
}
@ -242,16 +242,16 @@ namespace ams::mem::impl::heap {
return false;
}
AMS_ASSERT(this->tls_heap_central != nullptr);
AMS_ASSERT(m_tls_heap_central != nullptr);
const auto cls = TlsHeapStatic::GetClassFromSize(sizeof(*cached_heap));
void *tls_heap_cache = this->tls_heap_central->CacheSmallMemoryForSystem(cls);
void *tls_heap_cache = m_tls_heap_central->CacheSmallMemoryForSystem(cls);
if (tls_heap_cache == nullptr) {
return false;
}
std::construct_at(static_cast<TlsHeapCache *>(tls_heap_cache), this->tls_heap_central, this->option);
if (this->tls_heap_central->AddThreadCache(reinterpret_cast<TlsHeapCache *>(tls_heap_cache)) != 0) {
this->tls_heap_central->UncacheSmallMemory(tls_heap_cache);
std::construct_at(static_cast<TlsHeapCache *>(tls_heap_cache), m_tls_heap_central, m_option);
if (m_tls_heap_central->AddThreadCache(reinterpret_cast<TlsHeapCache *>(tls_heap_cache)) != 0) {
m_tls_heap_central->UncacheSmallMemory(tls_heap_cache);
return false;
}
@ -260,10 +260,10 @@ namespace ams::mem::impl::heap {
}
errno_t CentralHeap::WalkAllocatedPointers(HeapWalkCallback callback, void *user_data) {
if (!callback || !this->tls_heap_central) {
if (!callback || !m_tls_heap_central) {
return EINVAL;
}
return this->tls_heap_central->WalkAllocatedPointers(callback, user_data);
return m_tls_heap_central->WalkAllocatedPointers(callback, user_data);
}
errno_t CentralHeap::QueryV(int query, std::va_list vl) {
@ -286,8 +286,8 @@ namespace ams::mem::impl::heap {
{
auto dump_mode = static_cast<DumpMode>(va_arg(*vl_ptr, int));
auto fd = va_arg(*vl_ptr, int);
if (this->tls_heap_central) {
this->tls_heap_central->Dump(dump_mode, fd, query == AllocQuery_DumpJson);
if (m_tls_heap_central) {
m_tls_heap_central->Dump(dump_mode, fd, query == AllocQuery_DumpJson);
}
return 0;
}
@ -308,12 +308,12 @@ namespace ams::mem::impl::heap {
if (!out) {
return 0;
}
if (!this->tls_heap_central) {
if (!m_tls_heap_central) {
*out = 0;
return 0;
}
TlsHeapMemStats stats;
this->tls_heap_central->GetMemStats(std::addressof(stats));
m_tls_heap_central->GetMemStats(std::addressof(stats));
switch (query) {
case AllocQuery_AllocatedSize:
default:
@ -335,7 +335,7 @@ namespace ams::mem::impl::heap {
{
int *out = va_arg(*vl_ptr, int *);
if (out) {
*out = !this->tls_heap_central || this->tls_heap_central->IsClean();
*out = !m_tls_heap_central || m_tls_heap_central->IsClean();
}
return 0;
}
@ -343,8 +343,8 @@ namespace ams::mem::impl::heap {
{
HeapHash *out = va_arg(*vl_ptr, HeapHash *);
if (out) {
if (this->tls_heap_central) {
this->tls_heap_central->CalculateHeapHash(out);
if (m_tls_heap_central) {
m_tls_heap_central->CalculateHeapHash(out);
} else {
*out = {};
}
@ -353,28 +353,28 @@ namespace ams::mem::impl::heap {
}
case AllocQuery_UnifyFreeList:
/* NOTE: Nintendo does not check that the ptr is not null for this query, even though they do for other queries. */
this->tls_heap_central->IsClean();
m_tls_heap_central->IsClean();
return 0;
case AllocQuery_SetColor:
{
/* NOTE: Nintendo does not check that the ptr is not null for this query, even though they do for other queries. */
void *ptr = va_arg(*vl_ptr, void *);
int color = va_arg(*vl_ptr, int);
return this->tls_heap_central->SetColor(ptr, color);
return m_tls_heap_central->SetColor(ptr, color);
}
case AllocQuery_GetColor:
{
/* NOTE: Nintendo does not check that the ptr is not null for this query, even though they do for other queries. */
void *ptr = va_arg(*vl_ptr, void *);
int *out = va_arg(*vl_ptr, int *);
return this->tls_heap_central->GetColor(ptr, out);
return m_tls_heap_central->GetColor(ptr, out);
}
case AllocQuery_SetName:
{
/* NOTE: Nintendo does not check that the ptr is not null for this query, even though they do for other queries. */
void *ptr = va_arg(*vl_ptr, void *);
const char *name = va_arg(*vl_ptr, const char *);
return this->tls_heap_central->SetName(ptr, name);
return m_tls_heap_central->SetName(ptr, name);
}
case AllocQuery_GetName:
{
@ -382,7 +382,7 @@ namespace ams::mem::impl::heap {
void *ptr = va_arg(*vl_ptr, void *);
char *dst = va_arg(*vl_ptr, char *);
size_t dst_size = va_arg(*vl_ptr, size_t);
return this->tls_heap_central->GetName(ptr, dst, dst_size);
return m_tls_heap_central->GetName(ptr, dst, dst_size);
}
case AllocQuery_FreeSizeMapped:
case AllocQuery_MaxAllocatableSizeMapped:
@ -391,7 +391,7 @@ namespace ams::mem::impl::heap {
size_t *out = va_arg(*vl_ptr, size_t *);
size_t free_size;
size_t max_allocatable_size;
auto err = this->tls_heap_central->GetMappedMemStats(std::addressof(free_size), std::addressof(max_allocatable_size));
auto err = m_tls_heap_central->GetMappedMemStats(std::addressof(free_size), std::addressof(max_allocatable_size));
if (err == 0) {
if (query == AllocQuery_FreeSizeMapped) {
*out = free_size;

View file

@ -24,54 +24,54 @@ namespace ams::mem::impl::heap {
TlsHeapCache::TlsHeapCache(TlsHeapCentral *central, u32 option) {
/* Choose function impls based on option. */
if ((option & HeapOption_DisableCache) != 0) {
this->allocate = AllocateImpl<false>;
this->allocate_aligned = AllocateAlignedImpl<false>;
this->free = FreeImpl<false>;
this->free_with_size = FreeWithSizeImpl<false>;
this->get_allocation_size = GetAllocationSizeImpl<false>;
this->reallocate = ReallocateImpl<false>;
this->shrink = ShrinkImpl<false>;
m_allocate = AllocateImpl<false>;
m_allocate_aligned = AllocateAlignedImpl<false>;
m_free = FreeImpl<false>;
m_free_with_size = FreeWithSizeImpl<false>;
m_get_allocation_size = GetAllocationSizeImpl<false>;
m_reallocate = ReallocateImpl<false>;
m_shrink = ShrinkImpl<false>;
} else {
this->allocate = AllocateImpl<true>;
this->allocate_aligned = AllocateAlignedImpl<true>;
this->free = FreeImpl<true>;
this->free_with_size = FreeWithSizeImpl<true>;
this->get_allocation_size = GetAllocationSizeImpl<true>;
this->reallocate = ReallocateImpl<true>;
this->shrink = ShrinkImpl<true>;
m_allocate = AllocateImpl<true>;
m_allocate_aligned = AllocateAlignedImpl<true>;
m_free = FreeImpl<true>;
m_free_with_size = FreeWithSizeImpl<true>;
m_get_allocation_size = GetAllocationSizeImpl<true>;
m_reallocate = ReallocateImpl<true>;
m_shrink = ShrinkImpl<true>;
}
/* Generate random bytes to mangle pointers. */
if (auto err = gen_random(std::addressof(this->mangle_val), sizeof(this->mangle_val)); err != 0) {
if (auto err = gen_random(std::addressof(m_mangle_val), sizeof(m_mangle_val)); err != 0) {
s64 epoch_time;
epochtime(std::addressof(epoch_time));
this->mangle_val = reinterpret_cast<uintptr_t>(std::addressof(epoch_time)) ^ static_cast<u64>(epoch_time);
m_mangle_val = reinterpret_cast<uintptr_t>(std::addressof(epoch_time)) ^ static_cast<u64>(epoch_time);
}
/* Set member variables. */
this->central = central;
this->total_heap_size = central->GetTotalHeapSize();
this->heap_option = option;
this->total_cached_size = 0;
this->largest_class = 0;
m_central = central;
m_total_heap_size = central->GetTotalHeapSize();
m_heap_option = option;
m_total_cached_size = 0;
m_largest_class = 0;
/* Setup chunks. */
for (size_t i = 0; i < TlsHeapStatic::NumClassInfo; i++) {
this->small_mem_lists[i] = nullptr;
this->cached_size[i] = 0;
this->chunk_count[i] = 1;
m_small_mem_lists[i] = nullptr;
m_cached_size[i] = 0;
m_chunk_count[i] = 1;
}
/* Set fixed chunk counts for particularly small chunks. */
this->chunk_count[1] = MaxChunkCount;
this->chunk_count[2] = MaxChunkCount;
this->chunk_count[3] = MaxChunkCount;
this->chunk_count[4] = MaxChunkCount / 2;
this->chunk_count[5] = MaxChunkCount / 2;
this->chunk_count[6] = MaxChunkCount / 2;
this->chunk_count[7] = MaxChunkCount / 4;
this->chunk_count[8] = MaxChunkCount / 4;
this->chunk_count[9] = MaxChunkCount / 4;
m_chunk_count[1] = MaxChunkCount;
m_chunk_count[2] = MaxChunkCount;
m_chunk_count[3] = MaxChunkCount;
m_chunk_count[4] = MaxChunkCount / 2;
m_chunk_count[5] = MaxChunkCount / 2;
m_chunk_count[6] = MaxChunkCount / 2;
m_chunk_count[7] = MaxChunkCount / 4;
m_chunk_count[8] = MaxChunkCount / 4;
m_chunk_count[9] = MaxChunkCount / 4;
}
void TlsHeapCache::Finalize() {
@ -79,15 +79,15 @@ namespace ams::mem::impl::heap {
this->ReleaseAllCache();
/* Remove this cache from the owner central heap. */
this->central->RemoveThreadCache(this);
this->central->UncacheSmallMemory(this);
m_central->RemoveThreadCache(this);
m_central->UncacheSmallMemory(this);
}
bool TlsHeapCache::CheckCache() const {
for (size_t i = 0; i < util::size(this->small_mem_lists); i++) {
void *ptr = this->small_mem_lists[i];
for (size_t i = 0; i < util::size(m_small_mem_lists); i++) {
void *ptr = m_small_mem_lists[i];
if (ptr) {
s64 depth = -static_cast<s64>(this->cached_size[i] / TlsHeapStatic::GetChunkSize(i));
s64 depth = -static_cast<s64>(m_cached_size[i] / TlsHeapStatic::GetChunkSize(i));
while (ptr) {
ptr = *reinterpret_cast<void **>(this->ManglePointer(ptr));
if ((++depth) == 0) {
@ -102,20 +102,20 @@ namespace ams::mem::impl::heap {
}
void TlsHeapCache::ReleaseAllCache() {
for (size_t i = 0; i < util::size(this->small_mem_lists); i++) {
if (this->small_mem_lists[i]) {
this->central->UncacheSmallMemoryList(this, this->small_mem_lists[i]);
this->small_mem_lists[i] = nullptr;
this->cached_size[i] = 0;
for (size_t i = 0; i < util::size(m_small_mem_lists); i++) {
if (m_small_mem_lists[i]) {
m_central->UncacheSmallMemoryList(this, m_small_mem_lists[i]);
m_small_mem_lists[i] = nullptr;
m_cached_size[i] = 0;
}
}
this->total_cached_size = 0;
this->largest_class = 0;
m_total_cached_size = 0;
m_largest_class = 0;
}
template<>
void *TlsHeapCache::AllocateImpl<false>(TlsHeapCache *_this, size_t size) {
void *TlsHeapCache::AllocateImpl<false>(TlsHeapCache *tls_heap_cache, size_t size) {
/* Validate allocation size. */
if (size == 0 || size > MaxSize) {
return nullptr;
@ -123,18 +123,18 @@ namespace ams::mem::impl::heap {
if (const size_t cls = TlsHeapStatic::GetClassFromSize(size); cls != 0) {
AMS_ASSERT(cls < TlsHeapStatic::NumClassInfo);
return _this->central->CacheSmallMemory(cls);
return tls_heap_cache->m_central->CacheSmallMemory(cls);
} else {
/* If allocating a huge size, release our cache. */
if (size >= _this->total_heap_size / 4) {
_this->ReleaseAllCache();
if (size >= tls_heap_cache->m_total_heap_size / 4) {
tls_heap_cache->ReleaseAllCache();
}
return _this->central->CacheLargeMemory(size);
return tls_heap_cache->m_central->CacheLargeMemory(size);
}
}
template<>
void *TlsHeapCache::AllocateImpl<true>(TlsHeapCache *_this, size_t size) {
void *TlsHeapCache::AllocateImpl<true>(TlsHeapCache *tls_heap_cache, size_t size) {
/* Validate allocation size. */
if (size == 0 || size > MaxSize) {
return nullptr;
@ -143,12 +143,12 @@ namespace ams::mem::impl::heap {
if (size_t cls = TlsHeapStatic::GetClassFromSize(size); cls != 0) {
AMS_ASSERT(cls < TlsHeapStatic::NumClassInfo);
/* Allocate a chunk. */
void *ptr = _this->small_mem_lists[cls];
void *ptr = tls_heap_cache->m_small_mem_lists[cls];
if (ptr == nullptr) {
const size_t prev_cls = cls;
size_t count = _this->chunk_count[cls];
size_t count = tls_heap_cache->m_chunk_count[cls];
size_t n = _this->central->CacheSmallMemoryList(_this, std::addressof(cls), count, std::addressof(ptr));
size_t n = tls_heap_cache->m_central->CacheSmallMemoryList(tls_heap_cache, std::addressof(cls), count, std::addressof(ptr));
if (n == 0) {
return nullptr;
}
@ -157,35 +157,35 @@ namespace ams::mem::impl::heap {
if (count < MaxChunkCount) {
count++;
}
_this->chunk_count[cls] = std::max(count, n);
tls_heap_cache->m_chunk_count[cls] = std::max(count, n);
} else {
AMS_ASSERT(n == 1);
}
const size_t csize = TlsHeapStatic::GetChunkSize(cls) * (n - 1);
_this->cached_size[cls] += csize;
if (_this->cached_size[cls] > _this->cached_size[_this->largest_class]) {
_this->largest_class = cls;
tls_heap_cache->m_cached_size[cls] += csize;
if (tls_heap_cache->m_cached_size[cls] > tls_heap_cache->m_cached_size[tls_heap_cache->m_largest_class]) {
tls_heap_cache->m_largest_class = cls;
}
_this->total_cached_size += csize;
tls_heap_cache->m_total_cached_size += csize;
}
/* Demangle our pointer, update free list. */
ptr = _this->ManglePointer(ptr);
_this->small_mem_lists[cls] = *reinterpret_cast<void **>(ptr);
ptr = tls_heap_cache->ManglePointer(ptr);
tls_heap_cache->m_small_mem_lists[cls] = *reinterpret_cast<void **>(ptr);
return ptr;
} else {
/* If allocating a huge size, release our cache. */
if (size >= _this->total_heap_size / 4) {
_this->ReleaseAllCache();
if (size >= tls_heap_cache->m_total_heap_size / 4) {
tls_heap_cache->ReleaseAllCache();
}
return _this->central->CacheLargeMemory(size);
return tls_heap_cache->m_central->CacheLargeMemory(size);
}
}
template<>
void *TlsHeapCache::AllocateAlignedImpl<false>(TlsHeapCache *_this, size_t size, size_t align) {
void *TlsHeapCache::AllocateAlignedImpl<false>(TlsHeapCache *tls_heap_cache, size_t size, size_t align) {
/* Ensure valid alignment. */
if (!util::IsPowerOfTwo(align)) {
return nullptr;
@ -198,7 +198,7 @@ namespace ams::mem::impl::heap {
/* Handle big alignment. */
if (align > TlsHeapStatic::PageSize) {
return _this->central->CacheLargeMemoryWithBigAlign(util::AlignUp(size, TlsHeapStatic::PageSize), align);
return tls_heap_cache->m_central->CacheLargeMemoryWithBigAlign(util::AlignUp(size, TlsHeapStatic::PageSize), align);
}
const size_t real_size = TlsHeapStatic::GetRealSizeFromSizeAndAlignment(util::AlignUp(size, align), align);
@ -209,18 +209,18 @@ namespace ams::mem::impl::heap {
}
AMS_ASSERT(cls < TlsHeapStatic::NumClassInfo);
return _this->central->CacheSmallMemory(cls, align);
return tls_heap_cache->m_central->CacheSmallMemory(cls, align);
} else {
/* If allocating a huge size, release our cache. */
if (real_size >= _this->total_heap_size / 4) {
_this->ReleaseAllCache();
if (real_size >= tls_heap_cache->m_total_heap_size / 4) {
tls_heap_cache->ReleaseAllCache();
}
return _this->central->CacheLargeMemory(real_size);
return tls_heap_cache->m_central->CacheLargeMemory(real_size);
}
}
template<>
void *TlsHeapCache::AllocateAlignedImpl<true>(TlsHeapCache *_this, size_t size, size_t align) {
void *TlsHeapCache::AllocateAlignedImpl<true>(TlsHeapCache *tls_heap_cache, size_t size, size_t align) {
/* Ensure valid alignment. */
if (!util::IsPowerOfTwo(align)) {
return nullptr;
@ -233,7 +233,7 @@ namespace ams::mem::impl::heap {
/* Handle big alignment. */
if (align > TlsHeapStatic::PageSize) {
return _this->central->CacheLargeMemoryWithBigAlign(util::AlignUp(size, TlsHeapStatic::PageSize), align);
return tls_heap_cache->m_central->CacheLargeMemoryWithBigAlign(util::AlignUp(size, TlsHeapStatic::PageSize), align);
}
const size_t real_size = TlsHeapStatic::GetRealSizeFromSizeAndAlignment(util::AlignUp(size, align), align);
@ -246,12 +246,12 @@ namespace ams::mem::impl::heap {
AMS_ASSERT(cls < TlsHeapStatic::NumClassInfo);
/* Allocate a chunk. */
void *ptr = _this->small_mem_lists[cls];
void *ptr = tls_heap_cache->m_small_mem_lists[cls];
if (ptr == nullptr) {
const size_t prev_cls = cls;
size_t count = _this->chunk_count[cls];
size_t count = tls_heap_cache->m_chunk_count[cls];
size_t n = _this->central->CacheSmallMemoryList(_this, std::addressof(cls), count, std::addressof(ptr), align);
size_t n = tls_heap_cache->m_central->CacheSmallMemoryList(tls_heap_cache, std::addressof(cls), count, std::addressof(ptr), align);
if (n == 0) {
return nullptr;
}
@ -260,44 +260,44 @@ namespace ams::mem::impl::heap {
if (count < MaxChunkCount) {
count++;
}
_this->chunk_count[cls] = std::max(count, n);
tls_heap_cache->m_chunk_count[cls] = std::max(count, n);
} else {
AMS_ASSERT(n == 1);
}
const s32 csize = TlsHeapStatic::GetChunkSize(cls) * (n - 1);
_this->total_cached_size += csize;
_this->cached_size[cls] += csize;
if (_this->cached_size[cls] > _this->cached_size[_this->largest_class]) {
_this->largest_class = cls;
tls_heap_cache->m_total_cached_size += csize;
tls_heap_cache->m_cached_size[cls] += csize;
if (tls_heap_cache->m_cached_size[cls] > tls_heap_cache->m_cached_size[tls_heap_cache->m_largest_class]) {
tls_heap_cache->m_largest_class = cls;
}
}
/* Demangle our pointer, update free list. */
ptr = _this->ManglePointer(ptr);
_this->small_mem_lists[cls] = *reinterpret_cast<void **>(ptr);
ptr = tls_heap_cache->ManglePointer(ptr);
tls_heap_cache->m_small_mem_lists[cls] = *reinterpret_cast<void **>(ptr);
return ptr;
} else {
/* If allocating a huge size, release our cache. */
if (size >= _this->total_heap_size / 4) {
_this->ReleaseAllCache();
if (size >= tls_heap_cache->m_total_heap_size / 4) {
tls_heap_cache->ReleaseAllCache();
}
return _this->central->CacheLargeMemory(size);
return tls_heap_cache->m_central->CacheLargeMemory(size);
}
}
template<>
errno_t TlsHeapCache::FreeImpl<false>(TlsHeapCache *_this, void *ptr) {
const auto cls = _this->central->GetClassFromPointer(ptr);
errno_t TlsHeapCache::FreeImpl<false>(TlsHeapCache *tls_heap_cache, void *ptr) {
const auto cls = tls_heap_cache->m_central->GetClassFromPointer(ptr);
if (cls == 0) {
return _this->central->UncacheLargeMemory(ptr);
return tls_heap_cache->m_central->UncacheLargeMemory(ptr);
}
AMS_ASSERT(cls < TlsHeapStatic::NumClassInfo);
if (cls >= 0) {
return _this->central->UncacheSmallMemory(ptr);
return tls_heap_cache->m_central->UncacheSmallMemory(ptr);
} else if (ptr == nullptr) {
return 0;
} else {
@ -306,41 +306,41 @@ namespace ams::mem::impl::heap {
}
template<>
errno_t TlsHeapCache::FreeImpl<true>(TlsHeapCache *_this, void *ptr) {
const auto cls = _this->central->GetClassFromPointer(ptr);
errno_t TlsHeapCache::FreeImpl<true>(TlsHeapCache *tls_heap_cache, void *ptr) {
const auto cls = tls_heap_cache->m_central->GetClassFromPointer(ptr);
if (cls == 0) {
return _this->central->UncacheLargeMemory(ptr);
return tls_heap_cache->m_central->UncacheLargeMemory(ptr);
}
AMS_ASSERT(cls < TlsHeapStatic::NumClassInfo);
if (cls >= 0) {
*reinterpret_cast<void **>(ptr) = _this->small_mem_lists[cls];
_this->small_mem_lists[cls] = _this->ManglePointer(ptr);
*reinterpret_cast<void **>(ptr) = tls_heap_cache->m_small_mem_lists[cls];
tls_heap_cache->m_small_mem_lists[cls] = tls_heap_cache->ManglePointer(ptr);
const s32 csize = TlsHeapStatic::GetChunkSize(cls);
_this->total_cached_size += csize;
_this->cached_size[cls] += csize;
if (_this->cached_size[cls] > _this->cached_size[_this->largest_class]) {
_this->largest_class = cls;
tls_heap_cache->m_total_cached_size += csize;
tls_heap_cache->m_cached_size[cls] += csize;
if (tls_heap_cache->m_cached_size[cls] > tls_heap_cache->m_cached_size[tls_heap_cache->m_largest_class]) {
tls_heap_cache->m_largest_class = cls;
}
errno_t err = 0;
if (!_this->central->CheckCachedSize(_this->total_cached_size)) {
_this->central->UncacheSmallMemoryList(_this, _this->small_mem_lists[_this->largest_class]);
_this->small_mem_lists[_this->largest_class] = nullptr;
_this->total_cached_size -= _this->cached_size[_this->largest_class];
_this->cached_size[_this->largest_class] = 0;
if (!tls_heap_cache->m_central->CheckCachedSize(tls_heap_cache->m_total_cached_size)) {
tls_heap_cache->m_central->UncacheSmallMemoryList(tls_heap_cache, tls_heap_cache->m_small_mem_lists[tls_heap_cache->m_largest_class]);
tls_heap_cache->m_small_mem_lists[tls_heap_cache->m_largest_class] = nullptr;
tls_heap_cache->m_total_cached_size -= tls_heap_cache->m_cached_size[tls_heap_cache->m_largest_class];
tls_heap_cache->m_cached_size[tls_heap_cache->m_largest_class] = 0;
s32 largest_class = 0;
s32 biggest_size = -1;
for (size_t i = 0; i < TlsHeapStatic::NumClassInfo; i++) {
if (biggest_size < _this->cached_size[i]) {
biggest_size = _this->cached_size[i];
if (biggest_size < tls_heap_cache->m_cached_size[i]) {
biggest_size = tls_heap_cache->m_cached_size[i];
largest_class = static_cast<s32>(i);
}
}
_this->largest_class = largest_class;
tls_heap_cache->m_largest_class = largest_class;
}
return err;
} else if (ptr == nullptr) {
@ -351,72 +351,72 @@ namespace ams::mem::impl::heap {
}
template<>
errno_t TlsHeapCache::FreeWithSizeImpl<false>(TlsHeapCache *_this, void *ptr, size_t size) {
errno_t TlsHeapCache::FreeWithSizeImpl<false>(TlsHeapCache *tls_heap_cache, void *ptr, size_t size) {
if (ptr == nullptr) {
return 0;
}
const size_t cls = TlsHeapStatic::GetClassFromSize(size);
if (cls == 0) {
return _this->central->UncacheLargeMemory(ptr);
return tls_heap_cache->m_central->UncacheLargeMemory(ptr);
} else {
return _this->central->UncacheSmallMemory(ptr);
return tls_heap_cache->m_central->UncacheSmallMemory(ptr);
}
}
template<>
errno_t TlsHeapCache::FreeWithSizeImpl<true>(TlsHeapCache *_this, void *ptr, size_t size) {
errno_t TlsHeapCache::FreeWithSizeImpl<true>(TlsHeapCache *tls_heap_cache, void *ptr, size_t size) {
if (ptr == nullptr) {
return 0;
}
const size_t cls = TlsHeapStatic::GetClassFromSize(size);
if (cls == 0) {
return _this->central->UncacheLargeMemory(ptr);
return tls_heap_cache->m_central->UncacheLargeMemory(ptr);
} else {
*reinterpret_cast<void **>(ptr) = _this->small_mem_lists[cls];
_this->small_mem_lists[cls] = _this->ManglePointer(ptr);
*reinterpret_cast<void **>(ptr) = tls_heap_cache->m_small_mem_lists[cls];
tls_heap_cache->m_small_mem_lists[cls] = tls_heap_cache->ManglePointer(ptr);
const s32 csize = TlsHeapStatic::GetChunkSize(cls);
_this->total_cached_size += csize;
_this->cached_size[cls] += csize;
if (_this->cached_size[cls] > _this->cached_size[_this->largest_class]) {
_this->largest_class = cls;
tls_heap_cache->m_total_cached_size += csize;
tls_heap_cache->m_cached_size[cls] += csize;
if (tls_heap_cache->m_cached_size[cls] > tls_heap_cache->m_cached_size[tls_heap_cache->m_largest_class]) {
tls_heap_cache->m_largest_class = cls;
}
errno_t err = 0;
if (!_this->central->CheckCachedSize(_this->total_cached_size)) {
_this->central->UncacheSmallMemoryList(_this, _this->small_mem_lists[_this->largest_class]);
_this->small_mem_lists[_this->largest_class] = nullptr;
_this->total_cached_size -= _this->cached_size[_this->largest_class];
_this->cached_size[_this->largest_class] = 0;
if (!tls_heap_cache->m_central->CheckCachedSize(tls_heap_cache->m_total_cached_size)) {
tls_heap_cache->m_central->UncacheSmallMemoryList(tls_heap_cache, tls_heap_cache->m_small_mem_lists[tls_heap_cache->m_largest_class]);
tls_heap_cache->m_small_mem_lists[tls_heap_cache->m_largest_class] = nullptr;
tls_heap_cache->m_total_cached_size -= tls_heap_cache->m_cached_size[tls_heap_cache->m_largest_class];
tls_heap_cache->m_cached_size[tls_heap_cache->m_largest_class] = 0;
s32 largest_class = 0;
s32 biggest_size = -1;
for (size_t i = 0; i < TlsHeapStatic::NumClassInfo; i++) {
if (biggest_size < _this->cached_size[i]) {
biggest_size = _this->cached_size[i];
if (biggest_size < tls_heap_cache->m_cached_size[i]) {
biggest_size = tls_heap_cache->m_cached_size[i];
largest_class = static_cast<s32>(i);
}
}
_this->largest_class = largest_class;
tls_heap_cache->m_largest_class = largest_class;
}
return err;
}
}
template<>
size_t TlsHeapCache::GetAllocationSizeImpl<false>(TlsHeapCache *_this, const void *ptr) {
return _this->GetAllocationSizeCommonImpl(ptr);
size_t TlsHeapCache::GetAllocationSizeImpl<false>(TlsHeapCache *tls_heap_cache, const void *ptr) {
return tls_heap_cache->GetAllocationSizeCommonImpl(ptr);
}
template<>
size_t TlsHeapCache::GetAllocationSizeImpl<true>(TlsHeapCache *_this, const void *ptr) {
return _this->GetAllocationSizeCommonImpl(ptr);
size_t TlsHeapCache::GetAllocationSizeImpl<true>(TlsHeapCache *tls_heap_cache, const void *ptr) {
return tls_heap_cache->GetAllocationSizeCommonImpl(ptr);
}
size_t TlsHeapCache::GetAllocationSizeCommonImpl(const void *ptr) const {
const s32 cls = this->central->GetClassFromPointer(ptr);
const s32 cls = m_central->GetClassFromPointer(ptr);
if (cls > 0) {
if (!util::IsAligned(ptr, alignof(u64))) {
/* All pointers we allocate have alignment at least 8. */
@ -431,14 +431,14 @@ namespace ams::mem::impl::heap {
return TlsHeapStatic::GetChunkSize(cls);
} else if (ptr != nullptr) {
return this->central->GetAllocationSize(ptr);
return m_central->GetAllocationSize(ptr);
} else {
return 0;
}
}
template<>
errno_t TlsHeapCache::ReallocateImpl<false>(TlsHeapCache *_this, void *ptr, size_t size, void **p) {
errno_t TlsHeapCache::ReallocateImpl<false>(TlsHeapCache *tls_heap_cache, void *ptr, size_t size, void **p) {
AMS_ASSERT(ptr != nullptr && size != 0);
if (size > MaxSize) {
return ENOMEM;
@ -447,7 +447,7 @@ namespace ams::mem::impl::heap {
size_t alloc_size, copy_size;
const s32 cls_from_size = TlsHeapStatic::GetClassFromSize(size);
const s32 cls_from_ptr = _this->central->GetClassFromPointer(ptr);
const s32 cls_from_ptr = tls_heap_cache->m_central->GetClassFromPointer(ptr);
if (cls_from_ptr < 0) {
/* error case. */
return EFAULT;
@ -465,22 +465,22 @@ namespace ams::mem::impl::heap {
copy_size = size;
}
} else if (cls_from_ptr == 0) {
return _this->central->ReallocateLargeMemory(ptr, size, p);
return tls_heap_cache->m_central->ReallocateLargeMemory(ptr, size, p);
} else /* if (cls_from_ptr > 0) */ {
alloc_size = size;
copy_size = TlsHeapStatic::GetChunkSize(cls_from_ptr);
}
*p = AllocateImpl<false>(_this, alloc_size);
*p = AllocateImpl<false>(tls_heap_cache, alloc_size);
if (*p == nullptr) {
return ENOMEM;
}
std::memcpy(*p, ptr, copy_size);
return FreeImpl<false>(_this, ptr);
return FreeImpl<false>(tls_heap_cache, ptr);
}
template<>
errno_t TlsHeapCache::ReallocateImpl<true>(TlsHeapCache *_this, void *ptr, size_t size, void **p) {
errno_t TlsHeapCache::ReallocateImpl<true>(TlsHeapCache *tls_heap_cache, void *ptr, size_t size, void **p) {
AMS_ASSERT(ptr != nullptr && size != 0);
if (size > MaxSize) {
return ENOMEM;
@ -489,7 +489,7 @@ namespace ams::mem::impl::heap {
size_t alloc_size, copy_size;
const s32 cls_from_size = TlsHeapStatic::GetClassFromSize(size);
const s32 cls_from_ptr = _this->central->GetClassFromPointer(ptr);
const s32 cls_from_ptr = tls_heap_cache->m_central->GetClassFromPointer(ptr);
if (cls_from_ptr < 0) {
/* error case. */
return EFAULT;
@ -507,28 +507,28 @@ namespace ams::mem::impl::heap {
copy_size = size;
}
} else if (cls_from_ptr == 0) {
return _this->central->ReallocateLargeMemory(ptr, size, p);
return tls_heap_cache->m_central->ReallocateLargeMemory(ptr, size, p);
} else /* if (cls_from_ptr > 0) */ {
alloc_size = size;
copy_size = TlsHeapStatic::GetChunkSize(cls_from_ptr);
}
*p = AllocateImpl<true>(_this, alloc_size);
*p = AllocateImpl<true>(tls_heap_cache, alloc_size);
if (*p == nullptr) {
return ENOMEM;
}
std::memcpy(*p, ptr, copy_size);
return FreeImpl<true>(_this, ptr);
return FreeImpl<true>(tls_heap_cache, ptr);
}
template<>
errno_t TlsHeapCache::ShrinkImpl<false>(TlsHeapCache *_this, void *ptr, size_t size) {
return _this->ShrinkCommonImpl(ptr, size);
errno_t TlsHeapCache::ShrinkImpl<false>(TlsHeapCache *tls_heap_cache, void *ptr, size_t size) {
return tls_heap_cache->ShrinkCommonImpl(ptr, size);
}
template<>
errno_t TlsHeapCache::ShrinkImpl<true>(TlsHeapCache *_this, void *ptr, size_t size) {
return _this->ShrinkCommonImpl(ptr, size);
errno_t TlsHeapCache::ShrinkImpl<true>(TlsHeapCache *tls_heap_cache, void *ptr, size_t size) {
return tls_heap_cache->ShrinkCommonImpl(ptr, size);
}
errno_t TlsHeapCache::ShrinkCommonImpl(void *ptr, size_t size) const {
@ -538,7 +538,7 @@ namespace ams::mem::impl::heap {
}
const s32 cls_from_size = TlsHeapStatic::GetClassFromSize(size);
const s32 cls_from_ptr = this->central->GetClassFromPointer(ptr);
const s32 cls_from_ptr = m_central->GetClassFromPointer(ptr);
if (cls_from_ptr) {
if (cls_from_ptr <= 0) {
return EFAULT;
@ -548,9 +548,9 @@ namespace ams::mem::impl::heap {
return EINVAL;
}
} else if (cls_from_size) {
return this->central->ShrinkLargeMemory(ptr, TlsHeapStatic::PageSize);
return m_central->ShrinkLargeMemory(ptr, TlsHeapStatic::PageSize);
} else {
return this->central->ShrinkLargeMemory(ptr, size);
return m_central->ShrinkLargeMemory(ptr, size);
}
}

View file

@ -22,14 +22,14 @@ namespace ams::mem::impl::heap {
class TlsHeapCentral;
#define FOREACH_TLS_HEAP_CACHE_FUNC(HANDLER) \
HANDLER(void *, Allocate, allocate, size_t size); \
HANDLER(void *, AllocateAligned, allocate_aligned, size_t size, size_t align); \
HANDLER(errno_t, Free, free, void *ptr); \
HANDLER(errno_t, FreeWithSize, free_with_size, void *ptr, size_t size); \
HANDLER(size_t, GetAllocationSize, get_allocation_size, const void *ptr); \
HANDLER(errno_t, Reallocate, reallocate, void *ptr, size_t size, void **p); \
HANDLER(errno_t, Shrink, shrink, void *ptr, size_t size);
#define FOREACH_TLS_HEAP_CACHE_FUNC(HANDLER) \
HANDLER(void *, Allocate, m_allocate, size_t size); \
HANDLER(void *, AllocateAligned, m_allocate_aligned, size_t size, size_t align); \
HANDLER(errno_t, Free, m_free, void *ptr); \
HANDLER(errno_t, FreeWithSize, m_free_with_size, void *ptr, size_t size); \
HANDLER(size_t, GetAllocationSize, m_get_allocation_size, const void *ptr); \
HANDLER(errno_t, Reallocate, m_reallocate, void *ptr, size_t size, void **p); \
HANDLER(errno_t, Shrink, m_shrink, void *ptr, size_t size);
class TlsHeapCache {
public:
@ -49,21 +49,21 @@ namespace ams::mem::impl::heap {
#undef TLS_HEAP_CACHE_DECLARE_MEMBER
uintptr_t mangle_val;
TlsHeapCentral *central;
size_t total_heap_size;
u32 heap_option;
s32 total_cached_size;
s32 largest_class;
void *small_mem_lists[TlsHeapStatic::NumClassInfo];
s32 cached_size[TlsHeapStatic::NumClassInfo];
u8 chunk_count[TlsHeapStatic::NumClassInfo];
uintptr_t m_mangle_val;
TlsHeapCentral *m_central;
size_t m_total_heap_size;
u32 m_heap_option;
s32 m_total_cached_size;
s32 m_largest_class;
void *m_small_mem_lists[TlsHeapStatic::NumClassInfo];
s32 m_cached_size[TlsHeapStatic::NumClassInfo];
u8 m_chunk_count[TlsHeapStatic::NumClassInfo];
public:
TlsHeapCache(TlsHeapCentral *central, u32 option);
void Finalize();
void *ManglePointer(void *ptr) const {
return reinterpret_cast<void *>(reinterpret_cast<uintptr_t>(ptr) ^ this->mangle_val);
return reinterpret_cast<void *>(reinterpret_cast<uintptr_t>(ptr) ^ m_mangle_val);
}
bool CheckCache() const;
@ -71,16 +71,16 @@ namespace ams::mem::impl::heap {
public:
/* TODO: Better handler with type info to macro this? */
ALWAYS_INLINE void *Allocate(size_t size) { return this->allocate(this, size); }
ALWAYS_INLINE void *Allocate(size_t size, size_t align) { return this->allocate_aligned(this, size, align); }
ALWAYS_INLINE errno_t Free(void *ptr) { return this->free(this, ptr); }
ALWAYS_INLINE errno_t FreeWithSize(void *ptr, size_t size) { return this->free_with_size(this, ptr, size); }
ALWAYS_INLINE size_t GetAllocationSize(const void *ptr) { return this->get_allocation_size(this, ptr); }
ALWAYS_INLINE errno_t Reallocate(void *ptr, size_t size, void **p) { return this->reallocate(this, ptr, size, p); }
ALWAYS_INLINE errno_t Shrink(void *ptr, size_t size) { return this->shrink(this, ptr, size); }
ALWAYS_INLINE void *Allocate(size_t size) { return m_allocate(this, size); }
ALWAYS_INLINE void *Allocate(size_t size, size_t align) { return m_allocate_aligned(this, size, align); }
ALWAYS_INLINE errno_t Free(void *ptr) { return m_free(this, ptr); }
ALWAYS_INLINE errno_t FreeWithSize(void *ptr, size_t size) { return m_free_with_size(this, ptr, size); }
ALWAYS_INLINE size_t GetAllocationSize(const void *ptr) { return m_get_allocation_size(this, ptr); }
ALWAYS_INLINE errno_t Reallocate(void *ptr, size_t size, void **p) { return m_reallocate(this, ptr, size, p); }
ALWAYS_INLINE errno_t Shrink(void *ptr, size_t size) { return m_shrink(this, ptr, size); }
private:
#define TLS_HEAP_CACHE_DECLARE_TEMPLATE(RETURN, NAME, MEMBER_NAME, ...) \
template<bool Cache> static RETURN NAME##Impl(TlsHeapCache *_this, ## __VA_ARGS__ )
template<bool Cache> static RETURN NAME##Impl(TlsHeapCache *tls_heap_cache, ## __VA_ARGS__ )
FOREACH_TLS_HEAP_CACHE_FUNC(TLS_HEAP_CACHE_DECLARE_TEMPLATE)
@ -90,9 +90,9 @@ namespace ams::mem::impl::heap {
errno_t ShrinkCommonImpl(void *ptr, size_t size) const;
};
#define TLS_HEAP_CACHE_DECLARE_INSTANTIATION(RETURN, NAME, MEMBER_NAME, ...) \
template<> RETURN TlsHeapCache::NAME##Impl<false>(TlsHeapCache *_this, ##__VA_ARGS__); \
template<> RETURN TlsHeapCache::NAME##Impl<true>(TlsHeapCache *_this, ##__VA_ARGS__)
#define TLS_HEAP_CACHE_DECLARE_INSTANTIATION(RETURN, NAME, MEMBER_NAME, ...) \
template<> RETURN TlsHeapCache::NAME##Impl<false>(TlsHeapCache *tls_heap_cache, ##__VA_ARGS__); \
template<> RETURN TlsHeapCache::NAME##Impl<true>(TlsHeapCache *tls_heap_cache, ##__VA_ARGS__)
FOREACH_TLS_HEAP_CACHE_FUNC(TLS_HEAP_CACHE_DECLARE_INSTANTIATION)

View file

@ -301,35 +301,35 @@ namespace ams::mem::impl::heap {
AMS_ASSERT(TlsHeapStatic::IsPageAligned(size));
/* Clear lists. */
ListClearLink(std::addressof(this->spanpage_list));
ListClearLink(std::addressof(this->full_spanpage_list));
for (size_t i = 0; i < util::size(this->freelists); i++) {
ListClearLink(std::addressof(this->freelists[i]));
ListClearLink(std::addressof(m_spanpage_list));
ListClearLink(std::addressof(m_full_spanpage_list));
for (size_t i = 0; i < util::size(m_freelists); i++) {
ListClearLink(std::addressof(m_freelists[i]));
}
for (size_t i = 0; i < util::size(this->freelists_bitmap); i++) {
this->freelists_bitmap[i] = 0;
for (size_t i = 0; i < util::size(m_freelists_bitmap); i++) {
m_freelists_bitmap[i] = 0;
}
for (size_t i = 0; i < util::size(this->smallmem_lists); i++) {
ListClearLink(std::addressof(this->smallmem_lists[i]));
for (size_t i = 0; i < util::size(m_smallmem_lists); i++) {
ListClearLink(std::addressof(m_smallmem_lists[i]));
}
/* Setup span table. */
const size_t total_pages = TlsHeapStatic::GetPageIndex(size);
const size_t n = total_pages * sizeof(Span *);
this->span_table.total_pages = total_pages;
this->span_table.page_to_span = reinterpret_cast<Span **>(static_cast<u8 *>(start) + sizeof(*this));
this->span_table.pageclass_cache = static_cast<u8 *>(start) + sizeof(*this) + n;
m_span_table.total_pages = total_pages;
m_span_table.page_to_span = reinterpret_cast<Span **>(static_cast<u8 *>(start) + sizeof(*this));
m_span_table.pageclass_cache = static_cast<u8 *>(start) + sizeof(*this) + n;
u8 *meta_end = this->span_table.pageclass_cache + total_pages;
u8 *meta_end = m_span_table.pageclass_cache + total_pages;
size_t num_physical_page_flags;
if (use_virtual_memory) {
this->physical_page_flags = meta_end;
m_physical_page_flags = meta_end;
const uintptr_t phys_start = TlsHeapStatic::AlignDownPhysicalPage(reinterpret_cast<uintptr_t>(start));
const uintptr_t phys_end = TlsHeapStatic::AlignUpPhysicalPage(reinterpret_cast<uintptr_t>(start) + size);
num_physical_page_flags = TlsHeapStatic::GetPhysicalPageIndex(phys_end - phys_start);
meta_end = TlsHeapStatic::AlignUpPage(meta_end + num_physical_page_flags);
} else {
this->physical_page_flags = nullptr;
m_physical_page_flags = nullptr;
num_physical_page_flags = 0;
meta_end = TlsHeapStatic::AlignUpPage(meta_end);
}
@ -340,19 +340,19 @@ namespace ams::mem::impl::heap {
size_t phys_size = phys_end - reinterpret_cast<uintptr_t>(start);
phys_size = std::min(phys_size, size);
if (auto err = AllocatePhysicalMemory(start, phys_size); err != 0) {
this->span_table.total_pages = 0;
m_span_table.total_pages = 0;
return err;
}
std::memset(this->physical_page_flags, 0, num_physical_page_flags);
std::memset(this->physical_page_flags, 1, TlsHeapStatic::GetPhysicalPageIndex(phys_end) - TlsHeapStatic::GetPhysicalPageIndex(reinterpret_cast<uintptr_t>(start)));
std::memset(m_physical_page_flags, 0, num_physical_page_flags);
std::memset(m_physical_page_flags, 1, TlsHeapStatic::GetPhysicalPageIndex(phys_end) - TlsHeapStatic::GetPhysicalPageIndex(reinterpret_cast<uintptr_t>(start)));
}
std::memset(this->span_table.page_to_span, 0, n);
std::memset(this->span_table.pageclass_cache, 0, total_pages);
std::memset(m_span_table.page_to_span, 0, n);
std::memset(m_span_table.pageclass_cache, 0, total_pages);
SpanPage *span_page = reinterpret_cast<SpanPage *>(meta_end);
InitializeSpanPage(span_page);
ListInsertAfter(std::addressof(this->spanpage_list), span_page);
ListInsertAfter(std::addressof(m_spanpage_list), span_page);
meta_end += TlsHeapStatic::PageSize;
AMS_ASSERT(TlsHeapStatic::IsPageAligned(meta_end));
@ -387,28 +387,28 @@ namespace ams::mem::impl::heap {
span_admin->status = Span::Status_InUseSystem;
span_admin->id = 0;
RegisterSpan(std::addressof(this->span_table), span_admin);
RegisterSpan(std::addressof(this->span_table), std::addressof(span_page->info.span_of_spanpage));
RegisterSpan(std::addressof(this->span_table), span);
RegisterSpan(std::addressof(m_span_table), span_admin);
RegisterSpan(std::addressof(m_span_table), std::addressof(span_page->info.span_of_spanpage));
RegisterSpan(std::addressof(m_span_table), span);
this->AddToFreeBlockList(span);
this->num_threads = 1;
this->static_thread_quota = std::min((this->span_table.total_pages * TlsHeapStatic::PageSize) / sizeof(void *), 2_MB);
this->dynamic_thread_quota = this->static_thread_quota;
this->use_virtual_memory = use_virtual_memory;
m_num_threads = 1;
m_static_thread_quota = std::min((m_span_table.total_pages * TlsHeapStatic::PageSize) / sizeof(void *), 2_MB);
m_dynamic_thread_quota = m_static_thread_quota;
m_use_virtual_memory = use_virtual_memory;
return 0;
}
bool TlsHeapCentral::IsClean() {
std::scoped_lock lk(this->lock);
std::scoped_lock lk(m_lock);
this->MakeFreeSpan(std::numeric_limits<size_t>::max());
Span *span = this->GetFirstSpan();
Span *next = GetNextSpan(std::addressof(this->span_table), span);
if (next && next->status == Span::Status_InFreeList && GetNextSpan(std::addressof(this->span_table), next) == nullptr) {
Span *next = GetNextSpan(std::addressof(m_span_table), span);
if (next && next->status == Span::Status_InFreeList && GetNextSpan(std::addressof(m_span_table), next) == nullptr) {
return true;
} else {
return false;
@ -424,12 +424,12 @@ namespace ams::mem::impl::heap {
/* NOTE: This function uses locks unsafely (unscoped) */
this->lock.Lock();
m_lock.Lock();
Span *ptr_span = GetSpanFromPointer(std::addressof(this->span_table), ptr);
Span *ptr_span = GetSpanFromPointer(std::addressof(m_span_table), ptr);
if (!ptr_span) {
AMS_ASSERT(ptr_span != nullptr);
this->lock.Unlock();
m_lock.Unlock();
return EFAULT;
}
@ -442,16 +442,16 @@ namespace ams::mem::impl::heap {
span->num_pages = ptr_span->num_pages - num_pages;
span->id = 0;
span->status = Span::Status_InUse;
ChangeRangeOfSpan(std::addressof(this->span_table), ptr_span, ptr_span->start.u, num_pages);
RegisterSpan(std::addressof(this->span_table), span);
ChangeRangeOfSpan(std::addressof(m_span_table), ptr_span, ptr_span->start.u, num_pages);
RegisterSpan(std::addressof(m_span_table), span);
this->FreePagesImpl(span);
}
} else {
Span *next_span = GetNextSpan(std::addressof(this->span_table), ptr_span);
Span *next_span = GetNextSpan(std::addressof(m_span_table), ptr_span);
if (!next_span || next_span->status != Span::Status_InFreeList || next_span->num_pages < num_pages - ptr_span->num_pages) {
this->lock.Unlock();
m_lock.Unlock();
this->lock.Lock();
m_lock.Lock();
Span *span = this->AllocatePagesImpl(num_pages);
if (span) {
@ -461,44 +461,44 @@ namespace ams::mem::impl::heap {
*p = nullptr;
}
this->lock.Unlock();
m_lock.Unlock();
if (*p == nullptr) {
return ENOMEM;
}
std::memcpy(*p, ptr, num_pages * TlsHeapStatic::PageSize);
this->lock.Lock();
m_lock.Lock();
this->FreePagesImpl(ptr_span);
this->lock.Unlock();
m_lock.Unlock();
return 0;
}
if (this->use_virtual_memory && this->AllocatePhysical(next_span->start.p, (num_pages - ptr_span->num_pages) * TlsHeapStatic::PageSize)) {
this->lock.Unlock();
if (m_use_virtual_memory && this->AllocatePhysical(next_span->start.p, (num_pages - ptr_span->num_pages) * TlsHeapStatic::PageSize)) {
m_lock.Unlock();
return ENOMEM;
}
this->RemoveFromFreeBlockList(next_span);
if (next_span->num_pages == num_pages - ptr_span->num_pages) {
UnregisterSpan(std::addressof(this->span_table), next_span);
ChangeRangeOfSpan(std::addressof(this->span_table), ptr_span, ptr_span->start.u, num_pages);
UnregisterSpan(std::addressof(m_span_table), next_span);
ChangeRangeOfSpan(std::addressof(m_span_table), ptr_span, ptr_span->start.u, num_pages);
SpanPage *sp = GetSpanPage(next_span);
this->FreeSpanToSpanPage(next_span, sp);
this->DestroySpanPageIfEmpty(sp, false);
} else {
const uintptr_t new_end = ptr_span->start.u + num_pages * TlsHeapStatic::PageSize;
const size_t new_num_pages = next_span->num_pages - (num_pages - ptr_span->num_pages);
ChangeRangeOfSpan(std::addressof(this->span_table), next_span, new_end, new_num_pages);
ChangeRangeOfSpan(std::addressof(this->span_table), ptr_span, ptr_span->start.u, num_pages);
ChangeRangeOfSpan(std::addressof(m_span_table), next_span, new_end, new_num_pages);
ChangeRangeOfSpan(std::addressof(m_span_table), ptr_span, ptr_span->start.u, num_pages);
this->MergeIntoFreeList(next_span);
}
}
}
*p = ptr;
this->lock.Unlock();
m_lock.Unlock();
return 0;
}
@ -509,9 +509,9 @@ namespace ams::mem::impl::heap {
AMS_ASSERT(size <= MaxSize);
std::scoped_lock lk(this->lock);
std::scoped_lock lk(m_lock);
Span *ptr_span = GetSpanFromPointer(std::addressof(this->span_table), ptr);
Span *ptr_span = GetSpanFromPointer(std::addressof(m_span_table), ptr);
if (!ptr_span) {
AMS_ASSERT(ptr_span != nullptr);
return EFAULT;
@ -528,8 +528,8 @@ namespace ams::mem::impl::heap {
span->num_pages = ptr_span->num_pages - num_pages;
span->id = 0;
span->status = Span::Status_InUse;
ChangeRangeOfSpan(std::addressof(this->span_table), ptr_span, ptr_span->start.u, num_pages);
RegisterSpan(std::addressof(this->span_table), span);
ChangeRangeOfSpan(std::addressof(m_span_table), ptr_span, ptr_span->start.u, num_pages);
RegisterSpan(std::addressof(m_span_table), span);
this->FreePagesImpl(span);
}
}
@ -543,9 +543,9 @@ namespace ams::mem::impl::heap {
size_t hash = 0;
{
std::scoped_lock lk(this->lock);
std::scoped_lock lk(m_lock);
for (Span *span = GetSpanFromPointer(std::addressof(this->span_table), this); span != nullptr; span = GetNextSpan(std::addressof(this->span_table), span)) {
for (Span *span = GetSpanFromPointer(std::addressof(m_span_table), this); span != nullptr; span = GetNextSpan(std::addressof(m_span_table), span)) {
if (span->status != Span::Status_InUse) {
continue;
}
@ -594,7 +594,7 @@ namespace ams::mem::impl::heap {
}
AMS_ASSERT(span->page_class == 0);
if (this->use_virtual_memory && this->AllocatePhysical(span->start.p, TlsHeapStatic::PageSize) != 0) {
if (m_use_virtual_memory && this->AllocatePhysical(span->start.p, TlsHeapStatic::PageSize) != 0) {
return nullptr;
}
@ -604,7 +604,7 @@ namespace ams::mem::impl::heap {
Span *new_span = GetSpanPageSpan(sp);
if (span->num_pages == 1) {
this->RemoveFromFreeBlockList(span);
MigrateSpan(std::addressof(this->span_table), span, new_span);
MigrateSpan(std::addressof(m_span_table), span, new_span);
AMS_ASSERT(new_span->num_pages == 1);
new_span->status = Span::Status_InUseSystem;
@ -620,15 +620,15 @@ namespace ams::mem::impl::heap {
if (span->num_pages - 1 < FreeListCount) {
this->RemoveFromFreeBlockList(span);
}
ChangeRangeOfSpan(std::addressof(this->span_table), span, span->start.u + TlsHeapStatic::PageSize, span->num_pages - 1);
RegisterSpan(std::addressof(this->span_table), new_span);
ChangeRangeOfSpan(std::addressof(m_span_table), span, span->start.u + TlsHeapStatic::PageSize, span->num_pages - 1);
RegisterSpan(std::addressof(m_span_table), new_span);
if (span->num_pages < FreeListCount) {
this->AddToFreeBlockList(span);
}
}
ListInsertAfter(std::addressof(this->spanpage_list), sp);
ListInsertAfter(std::addressof(m_spanpage_list), sp);
return sp;
}
@ -648,7 +648,7 @@ namespace ams::mem::impl::heap {
if (sp->info.free_count == 0) {
ListRemoveSelf(sp);
ListInsertAfter(std::addressof(this->full_spanpage_list), sp);
ListInsertAfter(std::addressof(m_full_spanpage_list), sp);
}
return span;
@ -671,10 +671,10 @@ namespace ams::mem::impl::heap {
if (remaining_pages >= FreeListCount) {
ChangeRangeOfSpan(std::addressof(this->span_table), span, old_start, remaining_pages);
ChangeRangeOfSpan(std::addressof(m_span_table), span, old_start, remaining_pages);
} else {
this->RemoveFromFreeBlockList(span);
ChangeRangeOfSpan(std::addressof(this->span_table), span, old_start, remaining_pages);
ChangeRangeOfSpan(std::addressof(m_span_table), span, old_start, remaining_pages);
this->AddToFreeBlockList(span);
}
@ -686,22 +686,22 @@ namespace ams::mem::impl::heap {
new_span->aux.large_clear.zero = 0;
span->aux.large_clear.zero = 0;
if (this->use_virtual_memory && this->AllocatePhysical(new_span->start.p, new_span->num_pages * TlsHeapStatic::PageSize) != 0) {
if (m_use_virtual_memory && this->AllocatePhysical(new_span->start.p, new_span->num_pages * TlsHeapStatic::PageSize) != 0) {
new_span->status = Span::Status_InFreeList;
this->MergeIntoFreeList(new_span);
return nullptr;
}
RegisterSpan(std::addressof(this->span_table), new_span);
RegisterSpan(std::addressof(m_span_table), new_span);
return new_span;
}
void TlsHeapCentral::MergeFreeSpans(Span *span, Span *span_to_merge, uintptr_t start) {
const size_t total_pages = span->num_pages + span_to_merge->num_pages;
UnregisterSpan(std::addressof(this->span_table), span_to_merge);
UnregisterSpan(std::addressof(m_span_table), span_to_merge);
SpanPage *span_page = GetSpanPage(span_to_merge);
this->FreeSpanToSpanPage(span_to_merge, span_page);
ChangeRangeOfSpan(std::addressof(this->span_table), span, start, total_pages);
ChangeRangeOfSpan(std::addressof(m_span_table), span, start, total_pages);
}
bool TlsHeapCentral::DestroySpanPageIfEmpty(SpanPage *sp, bool full) {
@ -716,7 +716,7 @@ namespace ams::mem::impl::heap {
size_t first = this->FreeListFirstNonEmpty(0);
while (first < FreeListCount) {
for (Span *target = ListGetNext(std::addressof(this->freelists[first])); target; target = ListGetNext(target)) {
for (Span *target = ListGetNext(std::addressof(m_freelists[first])); target; target = ListGetNext(target)) {
AMS_ASSERT(target->status == Span::Status_InFreeList);
SpanPage *target_sp = GetSpanPage(target);
@ -724,7 +724,7 @@ namespace ams::mem::impl::heap {
Span *new_span = this->AllocateSpanFromSpanPage(sp);
AMS_ASSERT(new_span != nullptr);
MigrateSpan(std::addressof(this->span_table), target, new_span);
MigrateSpan(std::addressof(m_span_table), target, new_span);
this->FreeSpanToSpanPage(target, target_sp);
this->DestroySpanPageIfEmpty(target_sp, full);
}
@ -743,7 +743,7 @@ namespace ams::mem::impl::heap {
if (other_sp->info.free_count > 0x10) {
target = other_sp;
} else {
for (target = ListGetNext(std::addressof(this->spanpage_list)); target && (target == sp || !target->info.free_count); target = ListGetNext(target)) {
for (target = ListGetNext(std::addressof(m_spanpage_list)); target && (target == sp || !target->info.free_count); target = ListGetNext(target)) {
/* ... */
}
if (!target) {
@ -757,7 +757,7 @@ namespace ams::mem::impl::heap {
Span *new_span = this->AllocateSpanFromSpanPage(target);
AMS_ASSERT(new_span != nullptr);
MigrateSpan(std::addressof(this->span_table), GetSpanPageSpan(sp), new_span);
MigrateSpan(std::addressof(m_span_table), GetSpanPageSpan(sp), new_span);
ListRemoveSelf(sp);
this->FreePagesImpl(new_span);
@ -768,15 +768,15 @@ namespace ams::mem::impl::heap {
}
Span *TlsHeapCentral::GetFirstSpan() const {
Span *span = GetSpanFromPointer(std::addressof(this->span_table), reinterpret_cast<const void *>(this));
Span *span = GetSpanFromPointer(std::addressof(m_span_table), reinterpret_cast<const void *>(this));
AMS_ASSERT(span != nullptr);
return GetNextSpan(std::addressof(this->span_table), span);
return GetNextSpan(std::addressof(m_span_table), span);
}
Span *TlsHeapCentral::MakeFreeSpan(size_t num_pages) {
while (true) {
SpanPage *sp;
for (sp = ListGetNext(std::addressof(this->spanpage_list)); sp && !this->DestroySpanPageIfEmpty(sp, true); sp = ListGetNext(sp)) {
for (sp = ListGetNext(std::addressof(m_spanpage_list)); sp && !this->DestroySpanPageIfEmpty(sp, true); sp = ListGetNext(sp)) {
/* ... */
}
if (!sp) {
@ -798,7 +798,7 @@ namespace ams::mem::impl::heap {
}
}
Span *cur = ListGetNext(std::addressof(this->freelists[start]));
Span *cur = ListGetNext(std::addressof(m_freelists[start]));
Span *best = cur;
if (start == FreeListCount - 1) {
if (num_pages >= FreeListCount) {
@ -849,7 +849,7 @@ namespace ams::mem::impl::heap {
sp->info.alloc_bitmap &= ~(TopBit >> span_idx);
if ((++(sp->info.free_count)) == 1) {
ListRemoveSelf(sp);
ListInsertAfter(std::addressof(this->spanpage_list), sp);
ListInsertAfter(std::addressof(m_spanpage_list), sp);
}
}
@ -861,8 +861,8 @@ namespace ams::mem::impl::heap {
AMS_ASSERT(!span->list_prev && !span->list_next);
AMS_ASSERT(span->status != Span::Status_InUse);
Span *prev_span = GetPrevSpan(std::addressof(this->span_table), span);
Span *next_span = GetNextSpan(std::addressof(this->span_table), span);
Span *prev_span = GetPrevSpan(std::addressof(m_span_table), span);
Span *next_span = GetNextSpan(std::addressof(m_span_table), span);
const bool prev_free = prev_span && prev_span->status == Span::Status_InFreeList;
const bool prev_small = prev_span && prev_span->num_pages < FreeListCount;
const bool next_free = next_span && next_span->status == Span::Status_InFreeList;
@ -936,16 +936,16 @@ namespace ams::mem::impl::heap {
AMS_ASSERT(i < idx_end);
if (i + 1 == idx_end) {
if (this->physical_page_flags[i]) {
this->physical_page_flags[i] = 2;
if (m_physical_page_flags[i]) {
m_physical_page_flags[i] = 2;
}
} else {
const void *set_flag = ::memchr(std::addressof(this->physical_page_flags[i]), 1, idx_end - i);
const void *set_flag = ::memchr(std::addressof(m_physical_page_flags[i]), 1, idx_end - i);
if (set_flag) {
const uintptr_t set_idx = reinterpret_cast<const u8 *>(set_flag) - this->physical_page_flags;
const void *lst_flag = ::memrchr(std::addressof(this->physical_page_flags[set_idx]), 1, idx_end - set_idx);
const uintptr_t lst_idx = (lst_flag) ? (reinterpret_cast<const u8 *>(lst_flag) - this->physical_page_flags + 1) : idx_end;
std::memset(std::addressof(this->physical_page_flags[set_idx]), 2, lst_idx - set_idx);
const uintptr_t set_idx = reinterpret_cast<const u8 *>(set_flag) - m_physical_page_flags;
const void *lst_flag = ::memrchr(std::addressof(m_physical_page_flags[set_idx]), 1, idx_end - set_idx);
const uintptr_t lst_idx = (lst_flag) ? (reinterpret_cast<const u8 *>(lst_flag) - m_physical_page_flags + 1) : idx_end;
std::memset(std::addressof(m_physical_page_flags[set_idx]), 2, lst_idx - set_idx);
}
}
@ -953,7 +953,7 @@ namespace ams::mem::impl::heap {
}
Span *TlsHeapCentral::AllocatePagesImpl(size_t num_pages) {
if (num_pages >= this->span_table.total_pages / 4) {
if (num_pages >= m_span_table.total_pages / 4) {
this->MakeFreeSpan(std::numeric_limits<size_t>::max());
}
@ -968,7 +968,7 @@ namespace ams::mem::impl::heap {
AMS_ASSERT(span->status == Span::Status_InFreeList);
if (num_pages == span->num_pages) {
if (this->use_virtual_memory && this->AllocatePhysical(span->start.p, span->num_pages * TlsHeapStatic::PageSize) != 0) {
if (m_use_virtual_memory && this->AllocatePhysical(span->start.p, span->num_pages * TlsHeapStatic::PageSize) != 0) {
return nullptr;
} else {
this->RemoveFromFreeBlockList(span);
@ -990,7 +990,7 @@ namespace ams::mem::impl::heap {
auto new_span_guard = SCOPE_GUARD { this->FreeSpanToSpanPage(new_span); };
/* Allocating the new span potentially invalidates the span we were looking at, so find the span for it in the table. */
span = GetSpanFromPointer(std::addressof(this->span_table), prev_ptr);
span = GetSpanFromPointer(std::addressof(m_span_table), prev_ptr);
const size_t cur_pages = span->num_pages;
/* If the span was partially allocated, we need to find a new one that's big enough. */
@ -1010,7 +1010,7 @@ namespace ams::mem::impl::heap {
new_span_guard.Cancel();
return this->SplitSpan(span, num_pages, new_span);
} else if (this->use_virtual_memory && this->AllocatePhysical(span->start.p, span->num_pages * TlsHeapStatic::PageSize) != 0) {
} else if (m_use_virtual_memory && this->AllocatePhysical(span->start.p, span->num_pages * TlsHeapStatic::PageSize) != 0) {
return nullptr;
} else {
this->RemoveFromFreeBlockList(span);
@ -1023,7 +1023,7 @@ namespace ams::mem::impl::heap {
}
Span *TlsHeapCentral::AllocatePagesWithBigAlignImpl(size_t num_pages, size_t align) {
if (num_pages >= this->span_table.total_pages / 4) {
if (num_pages >= m_span_table.total_pages / 4) {
this->MakeFreeSpan(std::numeric_limits<size_t>::max());
}
@ -1054,7 +1054,7 @@ namespace ams::mem::impl::heap {
AMS_ASSERT(span->status == Span::Status_InFreeList);
const uintptr_t aligned_start = util::AlignUp(span->start.u, align);
if (this->use_virtual_memory && this->AllocatePhysical(reinterpret_cast<void *>(aligned_start), num_pages * TlsHeapStatic::PageSize) != 0) {
if (m_use_virtual_memory && this->AllocatePhysical(reinterpret_cast<void *>(aligned_start), num_pages * TlsHeapStatic::PageSize) != 0) {
this->FreeSpanToSpanPage(before_span);
this->FreeSpanToSpanPage(after_span);
return nullptr;
@ -1072,9 +1072,9 @@ namespace ams::mem::impl::heap {
span->status = Span::Status_InUse;
span->aux.large_clear.zero = 0;
ChangeRangeOfSpan(std::addressof(this->span_table), span, aligned_start, num_pages);
ChangeRangeOfSpan(std::addressof(m_span_table), span, aligned_start, num_pages);
RegisterSpan(std::addressof(this->span_table), after_span);
RegisterSpan(std::addressof(m_span_table), after_span);
this->MergeIntoFreeList(after_span);
return span;
@ -1100,10 +1100,10 @@ namespace ams::mem::impl::heap {
span->status = Span::Status_InUse;
span->aux.large_clear.zero = 0;
ChangeRangeOfSpan(std::addressof(this->span_table), span, aligned_start, num_pages);
ChangeRangeOfSpan(std::addressof(m_span_table), span, aligned_start, num_pages);
RegisterSpan(std::addressof(this->span_table), before_span);
RegisterSpan(std::addressof(this->span_table), after_span);
RegisterSpan(std::addressof(m_span_table), before_span);
RegisterSpan(std::addressof(m_span_table), after_span);
this->MergeIntoFreeList(before_span);
this->MergeIntoFreeList(after_span);
@ -1117,9 +1117,9 @@ namespace ams::mem::impl::heap {
span->status = Span::Status_InUse;
span->aux.large_clear.zero = 0;
ChangeRangeOfSpan(std::addressof(this->span_table), span, aligned_start, num_pages);
ChangeRangeOfSpan(std::addressof(m_span_table), span, aligned_start, num_pages);
RegisterSpan(std::addressof(this->span_table), before_span);
RegisterSpan(std::addressof(m_span_table), before_span);
this->MergeIntoFreeList(before_span);
return span;
@ -1134,7 +1134,7 @@ namespace ams::mem::impl::heap {
/* Double free error. */
} else {
span->status = Span::Status_InFreeList;
if (this->use_virtual_memory) {
if (m_use_virtual_memory) {
const uintptr_t start = span->start.u;
const uintptr_t end = span->start.u + (span->num_pages * TlsHeapStatic::PageSize);
uintptr_t start_alignup = TlsHeapStatic::AlignUpPhysicalPage(start);
@ -1166,7 +1166,7 @@ namespace ams::mem::impl::heap {
void *TlsHeapCentral::CacheSmallMemoryImpl(size_t cls, size_t align, bool for_system) {
AMS_ASSERT(cls != 0 && cls < TlsHeapStatic::NumClassInfo);
Span *span = ListGetNext(std::addressof(this->smallmem_lists[cls]));
Span *span = ListGetNext(std::addressof(m_smallmem_lists[cls]));
while (true) {
if (for_system) {
while (span && span->status != Span::Status_InUseSystem) {
@ -1200,8 +1200,8 @@ namespace ams::mem::impl::heap {
Span *new_span = this->AllocatePagesImpl(TlsHeapStatic::GetNumPages(cls));
if (new_span) {
SpanToSmallMemorySpan(std::addressof(this->span_table), new_span, cls);
ListInsertAfter(std::addressof(this->smallmem_lists[cls]), new_span);
SpanToSmallMemorySpan(std::addressof(m_span_table), new_span, cls);
ListInsertAfter(std::addressof(m_smallmem_lists[cls]), new_span);
InitSmallMemorySpan(new_span, cls, for_system, 0);
void *mem = AllocateSmallMemory(new_span);
@ -1212,7 +1212,7 @@ namespace ams::mem::impl::heap {
} else {
for (size_t cur_cls = cls; cur_cls < TlsHeapStatic::NumClassInfo; cur_cls++) {
if (align == 0 || util::IsAligned(TlsHeapStatic::GetChunkSize(cur_cls), align)) {
span = ListGetNext(std::addressof(this->smallmem_lists[cur_cls]));
span = ListGetNext(std::addressof(m_smallmem_lists[cur_cls]));
if (for_system) {
while (span && span->status != Span::Status_InUseSystem) {
span = ListGetNext(span);
@ -1242,10 +1242,10 @@ namespace ams::mem::impl::heap {
}
errno_t TlsHeapCentral::UncacheSmallMemoryImpl(void *ptr) {
Span *span = GetSpanFromPointer(std::addressof(this->span_table), ptr);
Span *span = GetSpanFromPointer(std::addressof(m_span_table), ptr);
if (span && span->page_class) {
if (!span->aux.small.objects) {
ListInsertAfter(std::addressof(this->smallmem_lists[span->page_class]), span);
ListInsertAfter(std::addressof(m_smallmem_lists[span->page_class]), span);
}
ReleaseSmallMemory(span, ptr);
@ -1254,7 +1254,7 @@ namespace ams::mem::impl::heap {
span->aux.small.objects = nullptr;
ListRemoveSelf(span);
AMS_ASSERT(span->page_class != 0);
SmallMemorySpanToSpan(std::addressof(this->span_table), span);
SmallMemorySpanToSpan(std::addressof(m_span_table), span);
this->FreePagesImpl(span);
}
@ -1272,7 +1272,7 @@ namespace ams::mem::impl::heap {
Span::SmallMemory head = {};
Span::SmallMemory *hptr = std::addressof(head);
Span *span = ListGetNext(std::addressof(this->smallmem_lists[*cls]));
Span *span = ListGetNext(std::addressof(m_smallmem_lists[*cls]));
size_t n = 0;
while (span) {
@ -1304,8 +1304,8 @@ namespace ams::mem::impl::heap {
Span *new_span = this->AllocatePagesImpl(TlsHeapStatic::GetNumPages(*cls));
if (new_span) {
SpanToSmallMemorySpan(std::addressof(this->span_table), new_span, *cls);
ListInsertAfter(std::addressof(this->smallmem_lists[*cls]), new_span);
SpanToSmallMemorySpan(std::addressof(m_span_table), new_span, *cls);
ListInsertAfter(std::addressof(m_smallmem_lists[*cls]), new_span);
InitSmallMemorySpan(new_span, *cls, false, cpu_id);
MangledSmallMemory memlist;
@ -1329,7 +1329,7 @@ namespace ams::mem::impl::heap {
} else {
for (size_t cur_cls = *cls; cur_cls < TlsHeapStatic::NumClassInfo; cur_cls++) {
if (align == 0 || util::IsAligned(TlsHeapStatic::GetChunkSize(cur_cls), align)) {
span = ListGetNext(std::addressof(this->smallmem_lists[cur_cls]));
span = ListGetNext(std::addressof(m_smallmem_lists[cur_cls]));
while (span && (span->status == Span::Status_InUseSystem)) {
span = ListGetNext(span);
@ -1363,7 +1363,7 @@ namespace ams::mem::impl::heap {
errno_t TlsHeapCentral::WalkAllocatedPointersImpl(HeapWalkCallback callback, void *user_data) {
errno_t err = ENOENT;
for (Span *span = GetSpanFromPointer(std::addressof(this->span_table), this); span != nullptr; span = GetNextSpan(std::addressof(this->span_table), span)) {
for (Span *span = GetSpanFromPointer(std::addressof(m_span_table), this); span != nullptr; span = GetNextSpan(std::addressof(m_span_table), span)) {
if (span->status != Span::Status_InUse) {
continue;
}
@ -1416,7 +1416,7 @@ namespace ams::mem::impl::heap {
}
errno_t TlsHeapCentral::GetMappedMemStatsImpl(size_t *out_free_size, size_t *out_max_allocatable_size) {
if (!this->use_virtual_memory) {
if (!m_use_virtual_memory) {
return EOPNOTSUPP;
}
@ -1434,7 +1434,7 @@ namespace ams::mem::impl::heap {
size_t num_free_spans = 0;
size_t wip_allocatable_size = 0;
Span *span = GetSpanFromPointer(std::addressof(this->span_table), this);
Span *span = GetSpanFromPointer(std::addressof(m_span_table), this);
while (span) {
const size_t size = span->num_pages * TlsHeapStatic::PageSize;
@ -1463,13 +1463,13 @@ namespace ams::mem::impl::heap {
wip_allocatable_size += size;
}
span = GetNextSpan(std::addressof(this->span_table), span);
span = GetNextSpan(std::addressof(m_span_table), span);
}
max_allocatable_size = std::max(max_allocatable_size, wip_allocatable_size);
bool sp_full = true;
for (SpanPage *sp = ListGetNext(std::addressof(this->spanpage_list)); sp != nullptr; sp = ListGetNext(sp)) {
for (SpanPage *sp = ListGetNext(std::addressof(m_spanpage_list)); sp != nullptr; sp = ListGetNext(sp)) {
if (sp->info.is_sticky == 0 && CanAllocateSpan(sp)) {
sp_full = false;
break;

View file

@ -195,21 +195,21 @@ namespace ams::mem::impl::heap {
static_assert(NumFreeListBitmaps * BITSIZEOF(FreeListAvailableWord) == FreeListCount);
private:
SpanTable span_table;
u8 *physical_page_flags;
s32 num_threads;
s32 static_thread_quota;
s32 dynamic_thread_quota;
bool use_virtual_memory;
os::SdkRecursiveMutex lock;
ListHeader<SpanPage> spanpage_list;
ListHeader<SpanPage> full_spanpage_list;
ListHeader<Span> freelists[FreeListCount];
FreeListAvailableWord freelists_bitmap[NumFreeListBitmaps];
ListHeader<Span> smallmem_lists[TlsHeapStatic::NumClassInfo];
SpanTable m_span_table;
u8 *m_physical_page_flags;
s32 m_num_threads;
s32 m_static_thread_quota;
s32 m_dynamic_thread_quota;
bool m_use_virtual_memory;
os::SdkRecursiveMutex m_lock;
ListHeader<SpanPage> m_spanpage_list;
ListHeader<SpanPage> m_full_spanpage_list;
ListHeader<Span> m_freelists[FreeListCount];
FreeListAvailableWord m_freelists_bitmap[NumFreeListBitmaps];
ListHeader<Span> m_smallmem_lists[TlsHeapStatic::NumClassInfo];
public:
TlsHeapCentral() : lock() {
this->span_table.total_pages = 0;
TlsHeapCentral() : m_lock() {
m_span_table.total_pages = 0;
}
errno_t Initialize(void *start, size_t size, bool use_virtual_memory);
@ -223,11 +223,11 @@ namespace ams::mem::impl::heap {
errno_t AddThreadCache(TlsHeapCache *cache) {
AMS_UNUSED(cache);
std::scoped_lock lk(this->lock);
std::scoped_lock lk(m_lock);
/* Add thread and recalculate. */
this->num_threads++;
this->dynamic_thread_quota = this->GetTotalHeapSize() / (2 * this->num_threads);
m_num_threads++;
m_dynamic_thread_quota = this->GetTotalHeapSize() / (2 * m_num_threads);
return 0;
}
@ -235,17 +235,17 @@ namespace ams::mem::impl::heap {
errno_t RemoveThreadCache(TlsHeapCache *cache) {
AMS_UNUSED(cache);
std::scoped_lock lk(this->lock);
std::scoped_lock lk(m_lock);
/* Remove thread and recalculate. */
this->num_threads--;
this->dynamic_thread_quota = this->GetTotalHeapSize() / (2 * this->num_threads);
m_num_threads--;
m_dynamic_thread_quota = this->GetTotalHeapSize() / (2 * m_num_threads);
return 0;
}
void *CacheLargeMemory(size_t size) {
std::scoped_lock lk(this->lock);
std::scoped_lock lk(m_lock);
const size_t num_pages = util::AlignUp(size, TlsHeapStatic::PageSize) / TlsHeapStatic::PageSize;
if (Span *span = this->AllocatePagesImpl(num_pages); span != nullptr) {
@ -256,7 +256,7 @@ namespace ams::mem::impl::heap {
}
void *CacheLargeMemoryWithBigAlign(size_t size, size_t align) {
std::scoped_lock lk(this->lock);
std::scoped_lock lk(m_lock);
const size_t num_pages = util::AlignUp(size, TlsHeapStatic::PageSize) / TlsHeapStatic::PageSize;
@ -275,19 +275,19 @@ namespace ams::mem::impl::heap {
}
void *CacheSmallMemory(size_t cls, size_t align = 0) {
std::scoped_lock lk(this->lock);
std::scoped_lock lk(m_lock);
return this->CacheSmallMemoryImpl(cls, align, false);
}
void *CacheSmallMemoryForSystem(size_t cls) {
std::scoped_lock lk(this->lock);
std::scoped_lock lk(m_lock);
return this->CacheSmallMemoryImpl(cls, 0, true);
}
size_t CacheSmallMemoryList(TlsHeapCache *cache, size_t *cls, size_t count, void **p, size_t align = 0) {
std::scoped_lock lk(this->lock);
std::scoped_lock lk(m_lock);
s32 cpu_id = 0;
if (*cls < 8) {
@ -298,11 +298,11 @@ namespace ams::mem::impl::heap {
}
bool CheckCachedSize(s32 size) const {
return size < this->dynamic_thread_quota && size < this->static_thread_quota;
return size < m_dynamic_thread_quota && size < m_static_thread_quota;
}
void Dump(DumpMode dump_mode, int fd, bool json) {
std::scoped_lock lk(this->lock);
std::scoped_lock lk(m_lock);
return this->DumpImpl(dump_mode, fd, json);
}
@ -310,8 +310,8 @@ namespace ams::mem::impl::heap {
if (TlsHeapStatic::IsPageAligned(ptr)) {
Span *span = nullptr;
{
std::scoped_lock lk(this->lock);
span = GetSpanFromPointer(std::addressof(this->span_table), ptr);
std::scoped_lock lk(m_lock);
span = GetSpanFromPointer(std::addressof(m_span_table), ptr);
}
if (span != nullptr) {
return span->num_pages * TlsHeapStatic::PageSize;
@ -329,17 +329,17 @@ namespace ams::mem::impl::heap {
std::atomic_thread_fence(std::memory_order_acquire);
const size_t idx = (reinterpret_cast<uintptr_t>(ptr) - reinterpret_cast<uintptr_t>(this)) / TlsHeapStatic::PageSize;
if (idx < this->span_table.total_pages) {
if (idx < m_span_table.total_pages) {
if (ptr != nullptr) {
std::scoped_lock lk(this->lock);
Span *span = GetSpanFromPointer(std::addressof(this->span_table), ptr);
std::scoped_lock lk(m_lock);
Span *span = GetSpanFromPointer(std::addressof(m_span_table), ptr);
if (span != nullptr) {
AMS_ASSERT(span->page_class == this->span_table.pageclass_cache[idx]);
AMS_ASSERT(span->page_class == m_span_table.pageclass_cache[idx]);
} else {
AMS_ASSERT(span != nullptr);
}
}
return this->span_table.pageclass_cache[idx];
return m_span_table.pageclass_cache[idx];
} else {
/* TODO: Handle error? */
return -1;
@ -351,8 +351,8 @@ namespace ams::mem::impl::heap {
return EINVAL;
}
std::scoped_lock lk(this->lock);
if (Span *span = GetSpanFromPointer(std::addressof(this->span_table), ptr); span != nullptr && !span->page_class) {
std::scoped_lock lk(m_lock);
if (Span *span = GetSpanFromPointer(std::addressof(m_span_table), ptr); span != nullptr && !span->page_class) {
*out = (span->aux.large.color[0] << 0) | (span->aux.large.color[1] << 8) | (span->aux.large.color[2] << 16);
return 0;
} else {
@ -361,8 +361,8 @@ namespace ams::mem::impl::heap {
}
errno_t SetColor(const void *ptr, int color) {
std::scoped_lock lk(this->lock);
if (Span *span = GetSpanFromPointer(std::addressof(this->span_table), ptr); span != nullptr && !span->page_class) {
std::scoped_lock lk(m_lock);
if (Span *span = GetSpanFromPointer(std::addressof(m_span_table), ptr); span != nullptr && !span->page_class) {
span->aux.large.color[0] = (color >> 0) & 0xFF;
span->aux.large.color[1] = (color >> 8) & 0xFF;
span->aux.large.color[2] = (color >> 16) & 0xFF;
@ -373,20 +373,20 @@ namespace ams::mem::impl::heap {
}
errno_t GetMappedMemStats(size_t *out_free_size, size_t *out_max_allocatable_size) {
std::scoped_lock lk(this->lock);
std::scoped_lock lk(m_lock);
return this->GetMappedMemStatsImpl(out_free_size, out_max_allocatable_size);
}
errno_t GetMemStats(TlsHeapMemStats *out) {
std::scoped_lock lk(this->lock);
std::scoped_lock lk(m_lock);
return this->GetMemStatsImpl(out);
}
errno_t GetName(const void *ptr, char *dst, size_t dst_size) {
std::scoped_lock lk(this->lock);
if (Span *span = GetSpanFromPointer(std::addressof(this->span_table), ptr); span != nullptr && !span->page_class) {
std::scoped_lock lk(m_lock);
if (Span *span = GetSpanFromPointer(std::addressof(m_span_table), ptr); span != nullptr && !span->page_class) {
util::Strlcpy(dst, span->aux.large.name, dst_size);
return 0;
} else {
@ -395,8 +395,8 @@ namespace ams::mem::impl::heap {
}
errno_t SetName(const void *ptr, const char *name) {
std::scoped_lock lk(this->lock);
if (Span *span = GetSpanFromPointer(std::addressof(this->span_table), ptr); span != nullptr && !span->page_class) {
std::scoped_lock lk(m_lock);
if (Span *span = GetSpanFromPointer(std::addressof(m_span_table), ptr); span != nullptr && !span->page_class) {
util::Strlcpy(span->aux.large.name, name, sizeof(span->aux.large.name));
return 0;
} else {
@ -405,13 +405,13 @@ namespace ams::mem::impl::heap {
}
size_t GetTotalHeapSize() const {
return this->span_table.total_pages * TlsHeapStatic::PageSize;
return m_span_table.total_pages * TlsHeapStatic::PageSize;
}
errno_t UncacheLargeMemory(void *ptr) {
if (TlsHeapStatic::IsPageAligned(ptr)) {
std::scoped_lock lk(this->lock);
if (Span *span = GetSpanFromPointer(std::addressof(this->span_table), ptr); span != nullptr) {
std::scoped_lock lk(m_lock);
if (Span *span = GetSpanFromPointer(std::addressof(m_span_table), ptr); span != nullptr) {
this->FreePagesImpl(span);
return 0;
} else {
@ -423,12 +423,12 @@ namespace ams::mem::impl::heap {
}
errno_t UncacheSmallMemory(void *ptr) {
std::scoped_lock lk(this->lock);
std::scoped_lock lk(m_lock);
return this->UncacheSmallMemoryImpl(ptr);
}
errno_t UncacheSmallMemoryList(TlsHeapCache *cache, void *ptr) {
std::scoped_lock lk(this->lock);
std::scoped_lock lk(m_lock);
while (true) {
if (ptr == nullptr) {
@ -445,8 +445,8 @@ namespace ams::mem::impl::heap {
errno_t WalkAllocatedPointers(HeapWalkCallback callback, void *user_data) {
/* Explicitly handle locking, as we will release the lock during callback. */
this->lock.lock();
ON_SCOPE_EXIT { this->lock.unlock(); };
m_lock.lock();
ON_SCOPE_EXIT { m_lock.unlock(); };
return this->WalkAllocatedPointersImpl(callback, user_data);
}
@ -488,8 +488,8 @@ namespace ams::mem::impl::heap {
private:
size_t FreeListFirstNonEmpty(size_t start) const {
if (start < FreeListCount) {
for (size_t i = FreeListAvailableIndex(start); i < util::size(this->freelists_bitmap); i++) {
const FreeListAvailableWord masked = this->freelists_bitmap[i] & ~(FreeListAvailableMask(start) - 1);
for (size_t i = FreeListAvailableIndex(start); i < util::size(m_freelists_bitmap); i++) {
const FreeListAvailableWord masked = m_freelists_bitmap[i] & ~(FreeListAvailableMask(start) - 1);
if (masked) {
const size_t b = __builtin_ctzll(masked);
const size_t res = i * BITSIZEOF(FreeListAvailableWord) + b;
@ -506,20 +506,20 @@ namespace ams::mem::impl::heap {
AMS_ASSERT(GetSpanPageSpan(GetSpanPage(span)) != span);
AMS_ASSERT(span->status == Span::Status_InFreeList);
const size_t which = std::min(span->num_pages, FreeListCount) - 1;
ListInsertAfter(std::addressof(this->freelists[which]), span);
this->freelists_bitmap[FreeListAvailableIndex(which)] |= FreeListAvailableMask(which);
ListInsertAfter(std::addressof(m_freelists[which]), span);
m_freelists_bitmap[FreeListAvailableIndex(which)] |= FreeListAvailableMask(which);
}
ALWAYS_INLINE void RemoveFromFreeBlockList(Span *span) {
const size_t which = std::min(span->num_pages, FreeListCount) - 1;
ListRemoveSelf(span);
if (!ListGetNext(std::addressof(this->freelists[which]))) {
this->freelists_bitmap[FreeListAvailableIndex(which)] &= ~FreeListAvailableMask(which);
if (!ListGetNext(std::addressof(m_freelists[which]))) {
m_freelists_bitmap[FreeListAvailableIndex(which)] &= ~FreeListAvailableMask(which);
}
}
Span *AllocateSpanStruct() {
SpanPage *sp = ListGetNext(std::addressof(this->spanpage_list));
SpanPage *sp = ListGetNext(std::addressof(m_spanpage_list));
while (sp && (sp->info.is_sticky || !CanAllocateSpan(sp))) {
sp = ListGetNext(sp);
}
@ -536,9 +536,9 @@ namespace ams::mem::impl::heap {
}
s32 CallWalkCallback(HeapWalkCallback callback, void *ptr, size_t size, void *user_data) {
this->lock.unlock();
m_lock.unlock();
int res = callback(ptr, size, user_data);
this->lock.lock();
m_lock.lock();
if (res) {
return 0;
} else {