mirror of
https://github.com/Atmosphere-NX/Atmosphere.git
synced 2025-06-04 16:53:48 -04:00
mem: implement most of StandardAllocator (#860)
This was tested using `https://github.com/node-dot-cpp/alloc-test` plus a few other by-hand tests. It seems to work for the case we care about (sysmodules without thread cache-ing). External users are advised to build with assertions on and contact SciresM if you find issues. This is a lot of code to have gotten right in one go, and it was written mostly after midnight while sick, so there are probably un-noticed issues.
This commit is contained in:
parent
7502e2174f
commit
87ec045a98
47 changed files with 5473 additions and 43 deletions
|
@ -0,0 +1,116 @@
|
|||
/*
|
||||
* Copyright (c) 2018-2020 Atmosphère-NX
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify it
|
||||
* under the terms and conditions of the GNU General Public License,
|
||||
* version 2, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
* more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
#include <stratosphere.hpp>
|
||||
#include "mem_impl_heap_platform.hpp"
|
||||
#include "mem_impl_heap_tls_heap_static.hpp"
|
||||
#include "mem_impl_heap_tls_heap_central.hpp"
|
||||
|
||||
namespace ams::mem::impl::heap {
|
||||
|
||||
void *CachedHeap::Allocate(size_t n) {
|
||||
return this->tls_heap_cache->Allocate(n);
|
||||
}
|
||||
|
||||
void *CachedHeap::Allocate(size_t n, size_t align) {
|
||||
return this->tls_heap_cache->Allocate(n, align);
|
||||
}
|
||||
|
||||
size_t CachedHeap::GetAllocationSize(const void *ptr) {
|
||||
return this->tls_heap_cache->GetAllocationSize(ptr);
|
||||
}
|
||||
|
||||
errno_t CachedHeap::Free(void *p) {
|
||||
return this->tls_heap_cache->Free(p);
|
||||
}
|
||||
|
||||
errno_t CachedHeap::FreeWithSize(void *p, size_t size) {
|
||||
return this->tls_heap_cache->FreeWithSize(p, size);
|
||||
}
|
||||
|
||||
errno_t CachedHeap::Reallocate(void *ptr, size_t size, void **p) {
|
||||
return this->tls_heap_cache->Reallocate(ptr, size, p);
|
||||
}
|
||||
|
||||
errno_t CachedHeap::Shrink(void *ptr, size_t size) {
|
||||
return this->tls_heap_cache->Shrink(ptr, size);
|
||||
}
|
||||
|
||||
void CachedHeap::ReleaseAllCache() {
|
||||
if (this->tls_heap_cache) {
|
||||
this->tls_heap_cache->ReleaseAllCache();
|
||||
}
|
||||
}
|
||||
|
||||
void CachedHeap::Finalize() {
|
||||
if (this->tls_heap_cache) {
|
||||
this->tls_heap_cache->Finalize();
|
||||
this->tls_heap_cache = nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
bool CachedHeap::CheckCache() {
|
||||
bool cache = false;
|
||||
auto err = this->Query(AllocQuery_CheckCache, std::addressof(cache));
|
||||
AMS_ASSERT(err != 0);
|
||||
return cache;
|
||||
}
|
||||
|
||||
errno_t CachedHeap::QueryV(int _query, std::va_list vl) {
|
||||
const AllocQuery query = static_cast<AllocQuery>(_query);
|
||||
switch (query) {
|
||||
case AllocQuery_CheckCache:
|
||||
{
|
||||
bool *out = va_arg(vl, bool *);
|
||||
if (out) {
|
||||
*out = (this->tls_heap_cache == nullptr) || this->tls_heap_cache->CheckCache();
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
case AllocQuery_ClearCache:
|
||||
{
|
||||
this->ReleaseAllCache();
|
||||
return 0;
|
||||
}
|
||||
case AllocQuery_FinalizeCache:
|
||||
{
|
||||
this->Finalize();
|
||||
return 0;
|
||||
}
|
||||
default:
|
||||
return EINVAL;
|
||||
}
|
||||
}
|
||||
|
||||
errno_t CachedHeap::Query(int query, ...) {
|
||||
std::va_list vl;
|
||||
va_start(vl, query);
|
||||
auto err = this->QueryV(query, vl);
|
||||
va_end(vl);
|
||||
return err;
|
||||
}
|
||||
|
||||
void CachedHeap::Reset(TlsHeapCache *thc) {
|
||||
this->Finalize();
|
||||
this->tls_heap_cache = thc;
|
||||
}
|
||||
|
||||
TlsHeapCache *CachedHeap::Release() {
|
||||
TlsHeapCache *ret = this->tls_heap_cache;
|
||||
this->tls_heap_cache = nullptr;
|
||||
return ret;
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,409 @@
|
|||
/*
|
||||
* Copyright (c) 2018-2020 Atmosphère-NX
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify it
|
||||
* under the terms and conditions of the GNU General Public License,
|
||||
* version 2, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
* more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
#include <stratosphere.hpp>
|
||||
#include "mem_impl_heap_platform.hpp"
|
||||
#include "mem_impl_heap_tls_heap_static.hpp"
|
||||
#include "mem_impl_heap_tls_heap_central.hpp"
|
||||
|
||||
namespace ams::mem::impl::heap {
|
||||
|
||||
errno_t CentralHeap::Initialize(void *start, size_t size, u32 option) {
|
||||
/* Validate size. */
|
||||
if (size == 0 || !util::IsAligned(size, PageSize)) {
|
||||
return EINVAL;
|
||||
}
|
||||
|
||||
/* Don't allow initializing twice. */
|
||||
if (this->start) {
|
||||
return EEXIST;
|
||||
}
|
||||
|
||||
if (start) {
|
||||
/* We were provided with a region to use as backing memory. */
|
||||
u8 *aligned_start = reinterpret_cast<u8 *>(util::AlignUp(reinterpret_cast<uintptr_t>(start), PageSize));
|
||||
u8 *aligned_end = reinterpret_cast<u8 *>(util::AlignDown(reinterpret_cast<uintptr_t>(start) + size, PageSize));
|
||||
if (aligned_start >= aligned_end) {
|
||||
return EINVAL;
|
||||
}
|
||||
|
||||
this->start = aligned_start;
|
||||
this->end = aligned_end;
|
||||
this->option = option;
|
||||
this->tls_heap_central = new (this->start) TlsHeapCentral;
|
||||
if (auto err = this->tls_heap_central->Initialize(this->start, this->end - this->start, false); err != 0) {
|
||||
this->tls_heap_central->~TlsHeapCentral();
|
||||
this->tls_heap_central = nullptr;
|
||||
AMS_ASSERT(err == 0);
|
||||
return err;
|
||||
}
|
||||
this->use_virtual_memory = false;
|
||||
} else {
|
||||
/* We were not provided with a region to use as backing. */
|
||||
void *mem;
|
||||
if (auto err = AllocateVirtualMemory(std::addressof(mem), size); err != 0) {
|
||||
return err;
|
||||
}
|
||||
if (!util::IsAligned(reinterpret_cast<uintptr_t>(mem), PageSize)) {
|
||||
FreeVirtualMemory(mem, size);
|
||||
size += PageSize;
|
||||
if (auto err = AllocateVirtualMemory(std::addressof(mem), size); err != 0) {
|
||||
return err;
|
||||
}
|
||||
}
|
||||
this->start = static_cast<u8 *>(mem);
|
||||
this->end = this->start + size;
|
||||
this->option = option;
|
||||
void *central = reinterpret_cast<void *>(util::AlignUp(reinterpret_cast<uintptr_t>(mem), PageSize));
|
||||
if (auto err = AllocatePhysicalMemory(central, sizeof(TlsHeapCentral)); err != 0) {
|
||||
return err;
|
||||
}
|
||||
this->tls_heap_central = new (central) TlsHeapCentral;
|
||||
if (auto err = this->tls_heap_central->Initialize(central, size, true); err != 0) {
|
||||
this->tls_heap_central->~TlsHeapCentral();
|
||||
this->tls_heap_central = nullptr;
|
||||
AMS_ASSERT(err == 0);
|
||||
return err;
|
||||
}
|
||||
this->use_virtual_memory = true;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
void CentralHeap::Finalize() {
|
||||
if (this->tls_heap_central) {
|
||||
this->tls_heap_central->~TlsHeapCentral();
|
||||
}
|
||||
if (this->use_virtual_memory) {
|
||||
mem::impl::physical_free(util::AlignUp(static_cast<void *>(this->start), PageSize), this->end - this->start);
|
||||
mem::impl::virtual_free(this->start, this->end - this->start);
|
||||
}
|
||||
this->tls_heap_central = nullptr;
|
||||
this->use_virtual_memory = false;
|
||||
this->option = 0;
|
||||
this->start = nullptr;
|
||||
this->end = nullptr;
|
||||
}
|
||||
|
||||
void *CentralHeap::Allocate(size_t n, size_t align) {
|
||||
if (!util::IsPowerOfTwo(align)) {
|
||||
return nullptr;
|
||||
}
|
||||
if (n > MaxSize) {
|
||||
return nullptr;
|
||||
}
|
||||
if (align > PageSize) {
|
||||
return this->tls_heap_central->CacheLargeMemoryWithBigAlign(util::AlignUp(n, PageSize), align);
|
||||
}
|
||||
|
||||
const size_t real_size = TlsHeapStatic::GetRealSizeFromSizeAndAlignment(util::AlignUp(n, align), align);
|
||||
const auto cls = TlsHeapStatic::GetClassFromSize(real_size);
|
||||
if (!cls) {
|
||||
return this->tls_heap_central->CacheLargeMemory(real_size);
|
||||
}
|
||||
if (real_size == 0) {
|
||||
return nullptr;
|
||||
}
|
||||
AMS_ASSERT(cls < TlsHeapStatic::NumClassInfo);
|
||||
return this->tls_heap_central->CacheSmallMemory(cls, align);
|
||||
}
|
||||
|
||||
size_t CentralHeap::GetAllocationSize(const void *ptr) {
|
||||
const auto cls = this->tls_heap_central->GetClassFromPointer(ptr);
|
||||
if (cls > 0) {
|
||||
/* Check that the pointer has alignment from out allocator. */
|
||||
if (!util::IsAligned(reinterpret_cast<uintptr_t>(ptr), MinimumAlignment)) {
|
||||
return 0;
|
||||
}
|
||||
AMS_ASSERT(cls < TlsHeapStatic::NumClassInfo);
|
||||
return TlsHeapStatic::GetChunkSize(cls);
|
||||
} else if (ptr != nullptr) {
|
||||
return this->tls_heap_central->GetAllocationSize(ptr);
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
errno_t CentralHeap::Free(void *ptr) {
|
||||
/* Allow Free(nullptr) */
|
||||
if (ptr == nullptr) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
/* Check that the pointer has alignment from out allocator. */
|
||||
if(!util::IsAligned(reinterpret_cast<uintptr_t>(ptr), MinimumAlignment)) {
|
||||
AMS_ASSERT(util::IsAligned(reinterpret_cast<uintptr_t>(ptr), MinimumAlignment));
|
||||
return EFAULT;
|
||||
}
|
||||
|
||||
const auto cls = this->tls_heap_central->GetClassFromPointer(ptr);
|
||||
if (cls >= 0) {
|
||||
AMS_ASSERT(cls < TlsHeapStatic::NumClassInfo);
|
||||
if (cls) {
|
||||
return this->tls_heap_central->UncacheSmallMemory(ptr);
|
||||
} else {
|
||||
return this->tls_heap_central->UncacheLargeMemory(ptr);
|
||||
}
|
||||
} else {
|
||||
AMS_ASSERT(cls >= 0);
|
||||
return EFAULT;
|
||||
}
|
||||
}
|
||||
|
||||
errno_t CentralHeap::FreeWithSize(void *ptr, size_t size) {
|
||||
if (TlsHeapStatic::GetClassFromSize(size)) {
|
||||
return this->tls_heap_central->UncacheSmallMemory(ptr);
|
||||
} else {
|
||||
return this->tls_heap_central->UncacheLargeMemory(ptr);
|
||||
}
|
||||
}
|
||||
|
||||
errno_t CentralHeap::Reallocate(void *ptr, size_t size, void **p) {
|
||||
AMS_ASSERT(ptr != nullptr && size != 0);
|
||||
if (!size) {
|
||||
return EINVAL;
|
||||
}
|
||||
if (size > MaxSize) {
|
||||
return ENOMEM;
|
||||
}
|
||||
|
||||
const auto cls_from_size = TlsHeapStatic::GetClassFromSize(size);
|
||||
const auto cls_from_ptr = this->tls_heap_central->GetClassFromPointer(ptr);
|
||||
if (cls_from_ptr) {
|
||||
if (cls_from_ptr <= 0) {
|
||||
return EFAULT;
|
||||
} else if (cls_from_size && cls_from_size <= cls_from_ptr) {
|
||||
*p = ptr;
|
||||
return 0;
|
||||
} else {
|
||||
const size_t new_chunk_size = TlsHeapStatic::GetChunkSize(cls_from_ptr);
|
||||
*p = this->Allocate(new_chunk_size);
|
||||
if (*p) {
|
||||
std::memcpy(*p, ptr, size);
|
||||
return this->tls_heap_central->UncacheSmallMemory(ptr);
|
||||
} else {
|
||||
return ENOMEM;
|
||||
}
|
||||
}
|
||||
} else if (cls_from_size) {
|
||||
*p = this->Allocate(size);
|
||||
if (*p) {
|
||||
std::memcpy(*p, ptr, size);
|
||||
return this->tls_heap_central->UncacheLargeMemory(ptr);
|
||||
} else {
|
||||
return ENOMEM;
|
||||
}
|
||||
} else {
|
||||
return this->tls_heap_central->ReallocateLargeMemory(ptr, size, p);
|
||||
}
|
||||
}
|
||||
|
||||
errno_t CentralHeap::Shrink(void *ptr, size_t size) {
|
||||
AMS_ASSERT(ptr != nullptr && size != 0);
|
||||
if (!size) {
|
||||
return EINVAL;
|
||||
}
|
||||
if (size > MaxSize) {
|
||||
return ENOMEM;
|
||||
}
|
||||
|
||||
const auto cls_from_size = TlsHeapStatic::GetClassFromSize(size);
|
||||
const auto cls_from_ptr = this->tls_heap_central->GetClassFromPointer(ptr);
|
||||
if (cls_from_ptr) {
|
||||
if (cls_from_ptr <= 0) {
|
||||
return EFAULT;
|
||||
} else if (cls_from_size && cls_from_size <= cls_from_ptr) {
|
||||
return 0;
|
||||
} else {
|
||||
return EINVAL;
|
||||
}
|
||||
} else if (cls_from_size) {
|
||||
return this->tls_heap_central->ShrinkLargeMemory(ptr, PageSize);
|
||||
} else {
|
||||
return this->tls_heap_central->ShrinkLargeMemory(ptr, size);
|
||||
}
|
||||
}
|
||||
|
||||
bool CentralHeap::MakeCache(CachedHeap *cached_heap) {
|
||||
if (cached_heap == nullptr) {
|
||||
return false;
|
||||
}
|
||||
|
||||
AMS_ASSERT(this->tls_heap_central != nullptr);
|
||||
const auto cls = TlsHeapStatic::GetClassFromSize(sizeof(*cached_heap));
|
||||
void *tls_heap_cache = this->tls_heap_central->CacheSmallMemoryForSystem(cls);
|
||||
if (tls_heap_cache == nullptr) {
|
||||
return false;
|
||||
}
|
||||
|
||||
new (tls_heap_cache) TlsHeapCache(this->tls_heap_central, this->option);
|
||||
if (this->tls_heap_central->AddThreadCache(reinterpret_cast<TlsHeapCache *>(tls_heap_cache)) != 0) {
|
||||
this->tls_heap_central->UncacheSmallMemory(tls_heap_cache);
|
||||
return false;
|
||||
}
|
||||
|
||||
cached_heap->Reset(reinterpret_cast<TlsHeapCache *>(tls_heap_cache));
|
||||
return true;
|
||||
}
|
||||
|
||||
errno_t CentralHeap::WalkAllocatedPointers(HeapWalkCallback callback, void *user_data) {
|
||||
if (!callback || !this->tls_heap_central) {
|
||||
return EINVAL;
|
||||
}
|
||||
return this->tls_heap_central->WalkAllocatedPointers(callback, user_data);
|
||||
}
|
||||
|
||||
errno_t CentralHeap::QueryV(int query, std::va_list vl) {
|
||||
return this->QueryVImpl(query, std::addressof(vl));
|
||||
}
|
||||
|
||||
errno_t CentralHeap::Query(int query, ...) {
|
||||
std::va_list vl;
|
||||
va_start(vl, query);
|
||||
auto err = this->QueryVImpl(query, std::addressof(vl));
|
||||
va_end(vl);
|
||||
return err;
|
||||
}
|
||||
|
||||
errno_t CentralHeap::QueryVImpl(int _query, std::va_list *vl_ptr) {
|
||||
const AllocQuery query = static_cast<AllocQuery>(_query);
|
||||
switch (query) {
|
||||
case AllocQuery_Dump:
|
||||
case AllocQuery_DumpJson:
|
||||
{
|
||||
auto dump_mode = static_cast<DumpMode>(va_arg(*vl_ptr, int));
|
||||
auto fd = va_arg(*vl_ptr, int);
|
||||
if (this->tls_heap_central) {
|
||||
this->tls_heap_central->Dump(dump_mode, fd, query == AllocQuery_DumpJson);
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
case AllocQuery_PageSize:
|
||||
{
|
||||
size_t *out = va_arg(*vl_ptr, size_t *);
|
||||
if (out) {
|
||||
*out = PageSize;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
case AllocQuery_AllocatedSize:
|
||||
case AllocQuery_FreeSize:
|
||||
case AllocQuery_SystemSize:
|
||||
case AllocQuery_MaxAllocatableSize:
|
||||
{
|
||||
size_t *out = va_arg(*vl_ptr, size_t *);
|
||||
if (!out) {
|
||||
return 0;
|
||||
}
|
||||
if (!this->tls_heap_central) {
|
||||
*out = 0;
|
||||
return 0;
|
||||
}
|
||||
TlsHeapMemStats stats;
|
||||
this->tls_heap_central->GetMemStats(std::addressof(stats));
|
||||
switch (query) {
|
||||
case AllocQuery_AllocatedSize:
|
||||
default:
|
||||
*out = stats.allocated_size;
|
||||
break;
|
||||
case AllocQuery_FreeSize:
|
||||
*out = stats.free_size;
|
||||
break;
|
||||
case AllocQuery_SystemSize:
|
||||
*out = stats.system_size;
|
||||
break;
|
||||
case AllocQuery_MaxAllocatableSize:
|
||||
*out = stats.max_allocatable_size;
|
||||
break;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
case AllocQuery_IsClean:
|
||||
{
|
||||
int *out = va_arg(*vl_ptr, int *);
|
||||
if (out) {
|
||||
*out = !this->tls_heap_central || this->tls_heap_central->IsClean();
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
case AllocQuery_HeapHash:
|
||||
{
|
||||
HeapHash *out = va_arg(*vl_ptr, HeapHash *);
|
||||
if (out) {
|
||||
if (this->tls_heap_central) {
|
||||
this->tls_heap_central->CalculateHeapHash(out);
|
||||
} else {
|
||||
*out = {};
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
case AllocQuery_UnifyFreeList:
|
||||
/* NOTE: Nintendo does not check that the ptr is not null for this query, even though they do for other queries. */
|
||||
this->tls_heap_central->IsClean();
|
||||
return 0;
|
||||
case AllocQuery_SetColor:
|
||||
{
|
||||
/* NOTE: Nintendo does not check that the ptr is not null for this query, even though they do for other queries. */
|
||||
void *ptr = va_arg(*vl_ptr, void *);
|
||||
int color = va_arg(*vl_ptr, int);
|
||||
return this->tls_heap_central->SetColor(ptr, color);
|
||||
}
|
||||
case AllocQuery_GetColor:
|
||||
{
|
||||
/* NOTE: Nintendo does not check that the ptr is not null for this query, even though they do for other queries. */
|
||||
void *ptr = va_arg(*vl_ptr, void *);
|
||||
int *out = va_arg(*vl_ptr, int *);
|
||||
return this->tls_heap_central->GetColor(ptr, out);
|
||||
}
|
||||
case AllocQuery_SetName:
|
||||
{
|
||||
/* NOTE: Nintendo does not check that the ptr is not null for this query, even though they do for other queries. */
|
||||
void *ptr = va_arg(*vl_ptr, void *);
|
||||
const char *name = va_arg(*vl_ptr, const char *);
|
||||
return this->tls_heap_central->SetName(ptr, name);
|
||||
}
|
||||
case AllocQuery_GetName:
|
||||
{
|
||||
/* NOTE: Nintendo does not check that the ptr is not null for this query, even though they do for other queries. */
|
||||
void *ptr = va_arg(*vl_ptr, void *);
|
||||
char *dst = va_arg(*vl_ptr, char *);
|
||||
size_t dst_size = va_arg(*vl_ptr, size_t);
|
||||
return this->tls_heap_central->GetName(ptr, dst, dst_size);
|
||||
}
|
||||
case AllocQuery_FreeSizeMapped:
|
||||
case AllocQuery_MaxAllocatableSizeMapped:
|
||||
{
|
||||
/* NOTE: Nintendo does not check that the ptr is not null for this query, even though they do for other queries. */
|
||||
size_t *out = va_arg(*vl_ptr, size_t *);
|
||||
size_t free_size;
|
||||
size_t max_allocatable_size;
|
||||
auto err = this->tls_heap_central->GetMappedMemStats(std::addressof(free_size), std::addressof(max_allocatable_size));
|
||||
if (err == 0) {
|
||||
if (query == AllocQuery_FreeSizeMapped) {
|
||||
*out = free_size;
|
||||
} else {
|
||||
*out = max_allocatable_size;
|
||||
}
|
||||
}
|
||||
return err;
|
||||
}
|
||||
default:
|
||||
return EINVAL;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,40 @@
|
|||
/*
|
||||
* Copyright (c) 2018-2020 Atmosphère-NX
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify it
|
||||
* under the terms and conditions of the GNU General Public License,
|
||||
* version 2, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
* more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
#pragma once
|
||||
#include <stratosphere.hpp>
|
||||
#include "../mem_impl_platform.hpp"
|
||||
|
||||
namespace ams::mem::impl::heap {
|
||||
|
||||
using Prot = mem::impl::Prot;
|
||||
|
||||
inline errno_t AllocateVirtualMemory(void **ptr, size_t size) {
|
||||
return ::ams::mem::impl::virtual_alloc(ptr, size);
|
||||
}
|
||||
|
||||
inline errno_t FreeVirtualMemory(void *ptr, size_t size) {
|
||||
return ::ams::mem::impl::virtual_free(ptr, size);
|
||||
}
|
||||
|
||||
inline errno_t AllocatePhysicalMemory(void *ptr, size_t size) {
|
||||
return ::ams::mem::impl::physical_alloc(ptr, size, static_cast<Prot>(Prot_read | Prot_write));
|
||||
}
|
||||
|
||||
inline errno_t FreePhysicalMemory(void *ptr, size_t size) {
|
||||
return ::ams::mem::impl::physical_free(ptr, size);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,557 @@
|
|||
/*
|
||||
* Copyright (c) 2018-2020 Atmosphère-NX
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify it
|
||||
* under the terms and conditions of the GNU General Public License,
|
||||
* version 2, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
* more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
#include <stratosphere.hpp>
|
||||
#include "mem_impl_heap_platform.hpp"
|
||||
#include "mem_impl_heap_tls_heap_static.hpp"
|
||||
#include "mem_impl_heap_tls_heap_cache.hpp"
|
||||
#include "mem_impl_heap_tls_heap_central.hpp"
|
||||
|
||||
namespace ams::mem::impl::heap {
|
||||
|
||||
TlsHeapCache::TlsHeapCache(TlsHeapCentral *central, u32 option) {
|
||||
/* Choose function impls based on option. */
|
||||
if ((option & HeapOption_DisableCache) != 0) {
|
||||
this->allocate = AllocateImpl<false>;
|
||||
this->allocate_aligned = AllocateAlignedImpl<false>;
|
||||
this->free = FreeImpl<false>;
|
||||
this->free_with_size = FreeWithSizeImpl<false>;
|
||||
this->get_allocation_size = GetAllocationSizeImpl<false>;
|
||||
this->reallocate = ReallocateImpl<false>;
|
||||
this->shrink = ShrinkImpl<false>;
|
||||
} else {
|
||||
this->allocate = AllocateImpl<true>;
|
||||
this->allocate_aligned = AllocateAlignedImpl<true>;
|
||||
this->free = FreeImpl<true>;
|
||||
this->free_with_size = FreeWithSizeImpl<true>;
|
||||
this->get_allocation_size = GetAllocationSizeImpl<true>;
|
||||
this->reallocate = ReallocateImpl<true>;
|
||||
this->shrink = ShrinkImpl<true>;
|
||||
}
|
||||
|
||||
/* Generate random bytes to mangle pointers. */
|
||||
if (auto err = gen_random(std::addressof(this->mangle_val), sizeof(this->mangle_val)); err != 0) {
|
||||
s64 epoch_time;
|
||||
epochtime(std::addressof(epoch_time));
|
||||
this->mangle_val = reinterpret_cast<uintptr_t>(std::addressof(epoch_time)) ^ static_cast<u64>(epoch_time);
|
||||
}
|
||||
|
||||
/* Set member variables. */
|
||||
this->central = central;
|
||||
this->total_heap_size = central->GetTotalHeapSize();
|
||||
this->heap_option = option;
|
||||
this->total_cached_size = 0;
|
||||
this->largest_class = 0;
|
||||
|
||||
/* Setup chunks. */
|
||||
for (size_t i = 0; i < TlsHeapStatic::NumClassInfo; i++) {
|
||||
this->small_mem_lists[i] = nullptr;
|
||||
this->cached_size[i] = 0;
|
||||
this->chunk_count[i] = 1;
|
||||
}
|
||||
|
||||
/* Set fixed chunk counts for particularly small chunks. */
|
||||
this->chunk_count[1] = MaxChunkCount;
|
||||
this->chunk_count[2] = MaxChunkCount;
|
||||
this->chunk_count[3] = MaxChunkCount;
|
||||
this->chunk_count[4] = MaxChunkCount / 2;
|
||||
this->chunk_count[5] = MaxChunkCount / 2;
|
||||
this->chunk_count[6] = MaxChunkCount / 2;
|
||||
this->chunk_count[7] = MaxChunkCount / 4;
|
||||
this->chunk_count[8] = MaxChunkCount / 4;
|
||||
this->chunk_count[9] = MaxChunkCount / 4;
|
||||
}
|
||||
|
||||
void TlsHeapCache::Finalize() {
|
||||
/* Free all small mem lists. */
|
||||
this->ReleaseAllCache();
|
||||
|
||||
/* Remove this cache from the owner central heap. */
|
||||
this->central->RemoveThreadCache(this);
|
||||
this->central->UncacheSmallMemory(this);
|
||||
}
|
||||
|
||||
bool TlsHeapCache::CheckCache() const {
|
||||
for (size_t i = 0; i < util::size(this->small_mem_lists); i++) {
|
||||
void *ptr = this->small_mem_lists[i];
|
||||
if (ptr) {
|
||||
s64 depth = -static_cast<s64>(this->cached_size[i] / TlsHeapStatic::GetChunkSize(i));
|
||||
while (ptr) {
|
||||
ptr = *reinterpret_cast<void **>(this->ManglePointer(ptr));
|
||||
if ((++depth) == 0) {
|
||||
AMS_ASSERT(ptr == nullptr);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
void TlsHeapCache::ReleaseAllCache() {
|
||||
for (size_t i = 0; i < util::size(this->small_mem_lists); i++) {
|
||||
if (this->small_mem_lists[i]) {
|
||||
this->central->UncacheSmallMemoryList(this, this->small_mem_lists[i]);
|
||||
this->small_mem_lists[i] = nullptr;
|
||||
this->cached_size[i] = 0;
|
||||
}
|
||||
}
|
||||
|
||||
this->total_cached_size = 0;
|
||||
this->largest_class = 0;
|
||||
}
|
||||
|
||||
template<>
|
||||
void *TlsHeapCache::AllocateImpl<false>(TlsHeapCache *_this, size_t size) {
|
||||
/* Validate allocation size. */
|
||||
if (size == 0 || size > MaxSize) {
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
if (const size_t cls = TlsHeapStatic::GetClassFromSize(size); cls != 0) {
|
||||
AMS_ASSERT(cls < TlsHeapStatic::NumClassInfo);
|
||||
return _this->central->CacheSmallMemory(cls);
|
||||
} else {
|
||||
/* If allocating a huge size, release our cache. */
|
||||
if (size >= _this->total_heap_size / 4) {
|
||||
_this->ReleaseAllCache();
|
||||
}
|
||||
return _this->central->CacheLargeMemory(size);
|
||||
}
|
||||
}
|
||||
|
||||
template<>
|
||||
void *TlsHeapCache::AllocateImpl<true>(TlsHeapCache *_this, size_t size) {
|
||||
/* Validate allocation size. */
|
||||
if (size == 0 || size > MaxSize) {
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
if (size_t cls = TlsHeapStatic::GetClassFromSize(size); cls != 0) {
|
||||
AMS_ASSERT(cls < TlsHeapStatic::NumClassInfo);
|
||||
/* Allocate a chunk. */
|
||||
void *ptr = _this->small_mem_lists[cls];
|
||||
if (ptr == nullptr) {
|
||||
const size_t prev_cls = cls;
|
||||
size_t count = _this->chunk_count[cls];
|
||||
|
||||
size_t n = _this->central->CacheSmallMemoryList(_this, std::addressof(cls), count, std::addressof(ptr));
|
||||
if (n == 0) {
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
if (cls == prev_cls) {
|
||||
if (count < MaxChunkCount) {
|
||||
count++;
|
||||
}
|
||||
_this->chunk_count[cls] = std::max(count, n);
|
||||
} else {
|
||||
AMS_ASSERT(n == 1);
|
||||
}
|
||||
|
||||
const size_t csize = TlsHeapStatic::GetChunkSize(cls) * (n - 1);
|
||||
_this->cached_size[cls] += csize;
|
||||
if (_this->cached_size[cls] > _this->cached_size[_this->largest_class]) {
|
||||
_this->largest_class = cls;
|
||||
}
|
||||
_this->total_cached_size += csize;
|
||||
}
|
||||
|
||||
/* Demangle our pointer, update free list. */
|
||||
ptr = _this->ManglePointer(ptr);
|
||||
_this->small_mem_lists[cls] = *reinterpret_cast<void **>(ptr);
|
||||
|
||||
return ptr;
|
||||
} else {
|
||||
/* If allocating a huge size, release our cache. */
|
||||
if (size >= _this->total_heap_size / 4) {
|
||||
_this->ReleaseAllCache();
|
||||
}
|
||||
return _this->central->CacheLargeMemory(size);
|
||||
}
|
||||
}
|
||||
|
||||
template<>
|
||||
void *TlsHeapCache::AllocateAlignedImpl<false>(TlsHeapCache *_this, size_t size, size_t align) {
|
||||
/* Ensure valid alignment. */
|
||||
if (!util::IsPowerOfTwo(align)) {
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
/* NOTE: Nintendo does not check size == 0 here, despite doing so in Alloc */
|
||||
if (size > MaxSize) {
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
/* Handle big alignment. */
|
||||
if (align > TlsHeapStatic::PageSize) {
|
||||
return _this->central->CacheLargeMemoryWithBigAlign(util::AlignUp(size, TlsHeapStatic::PageSize), align);
|
||||
}
|
||||
|
||||
const size_t real_size = TlsHeapStatic::GetRealSizeFromSizeAndAlignment(util::AlignUp(size, align), align);
|
||||
|
||||
if (const size_t cls = TlsHeapStatic::GetClassFromSize(real_size); cls != 0) {
|
||||
if (real_size == 0) {
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
AMS_ASSERT(cls < TlsHeapStatic::NumClassInfo);
|
||||
return _this->central->CacheSmallMemory(cls, align);
|
||||
} else {
|
||||
/* If allocating a huge size, release our cache. */
|
||||
if (real_size >= _this->total_heap_size / 4) {
|
||||
_this->ReleaseAllCache();
|
||||
}
|
||||
return _this->central->CacheLargeMemory(real_size);
|
||||
}
|
||||
}
|
||||
|
||||
template<>
|
||||
void *TlsHeapCache::AllocateAlignedImpl<true>(TlsHeapCache *_this, size_t size, size_t align) {
|
||||
/* Ensure valid alignment. */
|
||||
if (!util::IsPowerOfTwo(align)) {
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
/* NOTE: Nintendo does not check size == 0 here, despite doing so in Alloc */
|
||||
if (size > MaxSize) {
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
/* Handle big alignment. */
|
||||
if (align > TlsHeapStatic::PageSize) {
|
||||
return _this->central->CacheLargeMemoryWithBigAlign(util::AlignUp(size, TlsHeapStatic::PageSize), align);
|
||||
}
|
||||
|
||||
const size_t real_size = TlsHeapStatic::GetRealSizeFromSizeAndAlignment(util::AlignUp(size, align), align);
|
||||
|
||||
if (size_t cls = TlsHeapStatic::GetClassFromSize(real_size); cls != 0) {
|
||||
if (real_size == 0) {
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
AMS_ASSERT(cls < TlsHeapStatic::NumClassInfo);
|
||||
|
||||
/* Allocate a chunk. */
|
||||
void *ptr = _this->small_mem_lists[cls];
|
||||
if (ptr == nullptr) {
|
||||
const size_t prev_cls = cls;
|
||||
size_t count = _this->chunk_count[cls];
|
||||
|
||||
size_t n = _this->central->CacheSmallMemoryList(_this, std::addressof(cls), count, std::addressof(ptr), align);
|
||||
if (n == 0) {
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
if (cls == prev_cls) {
|
||||
if (count < MaxChunkCount) {
|
||||
count++;
|
||||
}
|
||||
_this->chunk_count[cls] = std::max(count, n);
|
||||
} else {
|
||||
AMS_ASSERT(n == 1);
|
||||
}
|
||||
|
||||
const s32 csize = TlsHeapStatic::GetChunkSize(cls) * (n - 1);
|
||||
_this->total_cached_size += csize;
|
||||
_this->cached_size[cls] += csize;
|
||||
if (_this->cached_size[cls] > _this->cached_size[_this->largest_class]) {
|
||||
_this->largest_class = cls;
|
||||
}
|
||||
}
|
||||
|
||||
/* Demangle our pointer, update free list. */
|
||||
ptr = _this->ManglePointer(ptr);
|
||||
_this->small_mem_lists[cls] = *reinterpret_cast<void **>(ptr);
|
||||
|
||||
return ptr;
|
||||
} else {
|
||||
/* If allocating a huge size, release our cache. */
|
||||
if (size >= _this->total_heap_size / 4) {
|
||||
_this->ReleaseAllCache();
|
||||
}
|
||||
return _this->central->CacheLargeMemory(size);
|
||||
}
|
||||
}
|
||||
|
||||
template<>
|
||||
errno_t TlsHeapCache::FreeImpl<false>(TlsHeapCache *_this, void *ptr) {
|
||||
const size_t cls = _this->central->GetClassFromPointer(ptr);
|
||||
if (cls == 0) {
|
||||
return _this->central->UncacheLargeMemory(ptr);
|
||||
}
|
||||
|
||||
AMS_ASSERT(cls < TlsHeapStatic::NumClassInfo);
|
||||
|
||||
if (static_cast<s32>(cls) >= 0) {
|
||||
return _this->central->UncacheSmallMemory(ptr);
|
||||
} else if (ptr == nullptr) {
|
||||
return 0;
|
||||
} else {
|
||||
return EFAULT;
|
||||
}
|
||||
}
|
||||
|
||||
template<>
|
||||
errno_t TlsHeapCache::FreeImpl<true>(TlsHeapCache *_this, void *ptr) {
|
||||
const size_t cls = _this->central->GetClassFromPointer(ptr);
|
||||
if (cls == 0) {
|
||||
return _this->central->UncacheLargeMemory(ptr);
|
||||
}
|
||||
|
||||
AMS_ASSERT(cls < TlsHeapStatic::NumClassInfo);
|
||||
|
||||
if (static_cast<s32>(cls) >= 0) {
|
||||
*reinterpret_cast<void **>(ptr) = _this->small_mem_lists[cls];
|
||||
_this->small_mem_lists[cls] = _this->ManglePointer(ptr);
|
||||
|
||||
const s32 csize = TlsHeapStatic::GetChunkSize(cls);
|
||||
_this->total_cached_size += csize;
|
||||
_this->cached_size[cls] += csize;
|
||||
if (_this->cached_size[cls] > _this->cached_size[_this->largest_class]) {
|
||||
_this->largest_class = cls;
|
||||
}
|
||||
|
||||
errno_t err = 0;
|
||||
if (!_this->central->CheckCachedSize(_this->total_cached_size)) {
|
||||
_this->central->UncacheSmallMemoryList(_this, _this->small_mem_lists[_this->largest_class]);
|
||||
_this->small_mem_lists[_this->largest_class] = nullptr;
|
||||
_this->total_cached_size -= _this->cached_size[_this->largest_class];
|
||||
_this->cached_size[_this->largest_class] = 0;
|
||||
|
||||
s32 largest_class = 0;
|
||||
s32 biggest_size = -1;
|
||||
for (size_t i = 0; i < TlsHeapStatic::NumClassInfo; i++) {
|
||||
if (biggest_size < _this->cached_size[i]) {
|
||||
biggest_size = _this->cached_size[i];
|
||||
largest_class = static_cast<s32>(i);
|
||||
}
|
||||
}
|
||||
_this->largest_class = largest_class;
|
||||
}
|
||||
return err;
|
||||
} else if (ptr == nullptr) {
|
||||
return 0;
|
||||
} else {
|
||||
return EFAULT;
|
||||
}
|
||||
}
|
||||
|
||||
template<>
|
||||
errno_t TlsHeapCache::FreeWithSizeImpl<false>(TlsHeapCache *_this, void *ptr, size_t size) {
|
||||
if (ptr == nullptr) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
const size_t cls = TlsHeapStatic::GetClassFromSize(size);
|
||||
if (cls == 0) {
|
||||
return _this->central->UncacheLargeMemory(ptr);
|
||||
} else {
|
||||
return _this->central->UncacheSmallMemory(ptr);
|
||||
}
|
||||
}
|
||||
|
||||
template<>
|
||||
errno_t TlsHeapCache::FreeWithSizeImpl<true>(TlsHeapCache *_this, void *ptr, size_t size) {
|
||||
if (ptr == nullptr) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
const size_t cls = TlsHeapStatic::GetClassFromSize(size);
|
||||
if (cls == 0) {
|
||||
return _this->central->UncacheLargeMemory(ptr);
|
||||
} else {
|
||||
*reinterpret_cast<void **>(ptr) = _this->small_mem_lists[cls];
|
||||
_this->small_mem_lists[cls] = _this->ManglePointer(ptr);
|
||||
|
||||
const s32 csize = TlsHeapStatic::GetChunkSize(cls);
|
||||
_this->total_cached_size += csize;
|
||||
_this->cached_size[cls] += csize;
|
||||
if (_this->cached_size[cls] > _this->cached_size[_this->largest_class]) {
|
||||
_this->largest_class = cls;
|
||||
}
|
||||
|
||||
errno_t err = 0;
|
||||
if (!_this->central->CheckCachedSize(_this->total_cached_size)) {
|
||||
_this->central->UncacheSmallMemoryList(_this, _this->small_mem_lists[_this->largest_class]);
|
||||
_this->small_mem_lists[_this->largest_class] = nullptr;
|
||||
_this->total_cached_size -= _this->cached_size[_this->largest_class];
|
||||
_this->cached_size[_this->largest_class] = 0;
|
||||
|
||||
s32 largest_class = 0;
|
||||
s32 biggest_size = -1;
|
||||
for (size_t i = 0; i < TlsHeapStatic::NumClassInfo; i++) {
|
||||
if (biggest_size < _this->cached_size[i]) {
|
||||
biggest_size = _this->cached_size[i];
|
||||
largest_class = static_cast<s32>(i);
|
||||
}
|
||||
}
|
||||
_this->largest_class = largest_class;
|
||||
}
|
||||
return err;
|
||||
}
|
||||
}
|
||||
|
||||
template<>
|
||||
size_t TlsHeapCache::GetAllocationSizeImpl<false>(TlsHeapCache *_this, const void *ptr) {
|
||||
return _this->GetAllocationSizeCommonImpl(ptr);
|
||||
}
|
||||
|
||||
template<>
|
||||
size_t TlsHeapCache::GetAllocationSizeImpl<true>(TlsHeapCache *_this, const void *ptr) {
|
||||
return _this->GetAllocationSizeCommonImpl(ptr);
|
||||
}
|
||||
|
||||
size_t TlsHeapCache::GetAllocationSizeCommonImpl(const void *ptr) const {
|
||||
const s32 cls = this->central->GetClassFromPointer(ptr);
|
||||
if (cls > 0) {
|
||||
if (!util::IsAligned(ptr, alignof(u64))) {
|
||||
/* All pointers we allocate have alignment at least 8. */
|
||||
return 0;
|
||||
}
|
||||
|
||||
/* Validate class. */
|
||||
AMS_ASSERT(cls < static_cast<s32>(TlsHeapStatic::NumClassInfo));
|
||||
if (cls < 0) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
return TlsHeapStatic::GetChunkSize(cls);
|
||||
} else if (ptr != nullptr) {
|
||||
return this->central->GetAllocationSize(ptr);
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
template<>
|
||||
errno_t TlsHeapCache::ReallocateImpl<false>(TlsHeapCache *_this, void *ptr, size_t size, void **p) {
|
||||
AMS_ASSERT(ptr != nullptr && size != 0);
|
||||
if (size > MaxSize) {
|
||||
return ENOMEM;
|
||||
}
|
||||
|
||||
size_t alloc_size, copy_size;
|
||||
|
||||
const s32 cls_from_size = TlsHeapStatic::GetClassFromSize(size);
|
||||
const s32 cls_from_ptr = _this->central->GetClassFromPointer(ptr);
|
||||
if (cls_from_ptr < 0) {
|
||||
/* error case. */
|
||||
return EFAULT;
|
||||
} else if (cls_from_size) {
|
||||
if (cls_from_ptr > 0) {
|
||||
if (cls_from_size <= cls_from_ptr) {
|
||||
*p = ptr;
|
||||
return 0;
|
||||
} else {
|
||||
alloc_size = size;
|
||||
copy_size = TlsHeapStatic::GetChunkSize(cls_from_ptr);
|
||||
}
|
||||
} else /* if (cls_from_ptr == 0) */ {
|
||||
alloc_size = size;
|
||||
copy_size = size;
|
||||
}
|
||||
} else if (cls_from_ptr == 0) {
|
||||
return _this->central->ReallocateLargeMemory(ptr, size, p);
|
||||
} else /* if (cls_from_ptr > 0) */ {
|
||||
alloc_size = size;
|
||||
copy_size = TlsHeapStatic::GetChunkSize(cls_from_ptr);
|
||||
}
|
||||
|
||||
*p = AllocateImpl<false>(_this, alloc_size);
|
||||
if (*p == nullptr) {
|
||||
return ENOMEM;
|
||||
}
|
||||
std::memcpy(*p, ptr, copy_size);
|
||||
return FreeImpl<false>(_this, ptr);
|
||||
}
|
||||
|
||||
template<>
|
||||
errno_t TlsHeapCache::ReallocateImpl<true>(TlsHeapCache *_this, void *ptr, size_t size, void **p) {
|
||||
AMS_ASSERT(ptr != nullptr && size != 0);
|
||||
if (size > MaxSize) {
|
||||
return ENOMEM;
|
||||
}
|
||||
|
||||
size_t alloc_size, copy_size;
|
||||
|
||||
const s32 cls_from_size = TlsHeapStatic::GetClassFromSize(size);
|
||||
const s32 cls_from_ptr = _this->central->GetClassFromPointer(ptr);
|
||||
if (cls_from_ptr < 0) {
|
||||
/* error case. */
|
||||
return EFAULT;
|
||||
} else if (cls_from_size) {
|
||||
if (cls_from_ptr > 0) {
|
||||
if (cls_from_size <= cls_from_ptr) {
|
||||
*p = ptr;
|
||||
return 0;
|
||||
} else {
|
||||
alloc_size = size;
|
||||
copy_size = TlsHeapStatic::GetChunkSize(cls_from_ptr);
|
||||
}
|
||||
} else /* if (cls_from_ptr == 0) */ {
|
||||
alloc_size = size;
|
||||
copy_size = size;
|
||||
}
|
||||
} else if (cls_from_ptr == 0) {
|
||||
return _this->central->ReallocateLargeMemory(ptr, size, p);
|
||||
} else /* if (cls_from_ptr > 0) */ {
|
||||
alloc_size = size;
|
||||
copy_size = TlsHeapStatic::GetChunkSize(cls_from_ptr);
|
||||
}
|
||||
|
||||
*p = AllocateImpl<true>(_this, alloc_size);
|
||||
if (*p == nullptr) {
|
||||
return ENOMEM;
|
||||
}
|
||||
std::memcpy(*p, ptr, copy_size);
|
||||
return FreeImpl<true>(_this, ptr);
|
||||
}
|
||||
|
||||
template<>
|
||||
errno_t TlsHeapCache::ShrinkImpl<false>(TlsHeapCache *_this, void *ptr, size_t size) {
|
||||
return _this->ShrinkCommonImpl(ptr, size);
|
||||
}
|
||||
|
||||
template<>
|
||||
errno_t TlsHeapCache::ShrinkImpl<true>(TlsHeapCache *_this, void *ptr, size_t size) {
|
||||
return _this->ShrinkCommonImpl(ptr, size);
|
||||
}
|
||||
|
||||
errno_t TlsHeapCache::ShrinkCommonImpl(void *ptr, size_t size) const {
|
||||
AMS_ASSERT(ptr != nullptr && size != 0);
|
||||
if (size > MaxSize) {
|
||||
return ENOMEM;
|
||||
}
|
||||
|
||||
const s32 cls_from_size = TlsHeapStatic::GetClassFromSize(size);
|
||||
const s32 cls_from_ptr = this->central->GetClassFromPointer(ptr);
|
||||
if (cls_from_ptr) {
|
||||
if (cls_from_ptr <= 0) {
|
||||
return EFAULT;
|
||||
} else if (cls_from_size && cls_from_size <= cls_from_ptr) {
|
||||
return 0;
|
||||
} else {
|
||||
return EINVAL;
|
||||
}
|
||||
} else if (cls_from_size) {
|
||||
return this->central->ShrinkLargeMemory(ptr, TlsHeapStatic::PageSize);
|
||||
} else {
|
||||
return this->central->ShrinkLargeMemory(ptr, size);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,102 @@
|
|||
/*
|
||||
* Copyright (c) 2018-2020 Atmosphère-NX
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify it
|
||||
* under the terms and conditions of the GNU General Public License,
|
||||
* version 2, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
* more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
#pragma once
|
||||
#include <stratosphere.hpp>
|
||||
#include "mem_impl_heap_platform.hpp"
|
||||
#include "mem_impl_heap_tls_heap_static.hpp"
|
||||
|
||||
namespace ams::mem::impl::heap {
|
||||
|
||||
class TlsHeapCentral;
|
||||
|
||||
#define FOREACH_TLS_HEAP_CACHE_FUNC(HANDLER) \
|
||||
HANDLER(void *, Allocate, allocate, size_t size); \
|
||||
HANDLER(void *, AllocateAligned, allocate_aligned, size_t size, size_t align); \
|
||||
HANDLER(errno_t, Free, free, void *ptr); \
|
||||
HANDLER(errno_t, FreeWithSize, free_with_size, void *ptr, size_t size); \
|
||||
HANDLER(size_t, GetAllocationSize, get_allocation_size, const void *ptr); \
|
||||
HANDLER(errno_t, Reallocate, reallocate, void *ptr, size_t size, void **p); \
|
||||
HANDLER(errno_t, Shrink, shrink, void *ptr, size_t size);
|
||||
|
||||
class TlsHeapCache {
|
||||
public:
|
||||
static constexpr size_t MaxChunkCount = BITSIZEOF(u64);
|
||||
public:
|
||||
#define TLS_HEAP_CACHE_DECLARE_TYPEDEF(RETURN, NAME, MEMBER_NAME, ...) \
|
||||
using NAME##Func = RETURN (*)(TlsHeapCache *, ## __VA_ARGS__)
|
||||
|
||||
FOREACH_TLS_HEAP_CACHE_FUNC(TLS_HEAP_CACHE_DECLARE_TYPEDEF)
|
||||
|
||||
#undef TLS_HEAP_CACHE_DECLARE_TYPEDEF
|
||||
private:
|
||||
#define TLS_HEAP_CACHE_DECLARE_MEMBER(RETURN, NAME, MEMBER_NAME, ...) \
|
||||
NAME##Func MEMBER_NAME;
|
||||
|
||||
FOREACH_TLS_HEAP_CACHE_FUNC(TLS_HEAP_CACHE_DECLARE_MEMBER)
|
||||
|
||||
#undef TLS_HEAP_CACHE_DECLARE_MEMBER
|
||||
|
||||
uintptr_t mangle_val;
|
||||
TlsHeapCentral *central;
|
||||
size_t total_heap_size;
|
||||
u32 heap_option;
|
||||
s32 total_cached_size;
|
||||
s32 largest_class;
|
||||
void *small_mem_lists[TlsHeapStatic::NumClassInfo];
|
||||
s32 cached_size[TlsHeapStatic::NumClassInfo];
|
||||
u8 chunk_count[TlsHeapStatic::NumClassInfo];
|
||||
public:
|
||||
TlsHeapCache(TlsHeapCentral *central, u32 option);
|
||||
void Finalize();
|
||||
|
||||
void *ManglePointer(void *ptr) const {
|
||||
return reinterpret_cast<void *>(reinterpret_cast<uintptr_t>(ptr) ^ this->mangle_val);
|
||||
}
|
||||
|
||||
bool CheckCache() const;
|
||||
void ReleaseAllCache();
|
||||
|
||||
public:
|
||||
/* TODO: Better handler with type info to macro this? */
|
||||
ALWAYS_INLINE void *Allocate(size_t size) { return this->allocate(this, size); }
|
||||
ALWAYS_INLINE void *Allocate(size_t size, size_t align) { return this->allocate_aligned(this, size, align); }
|
||||
ALWAYS_INLINE errno_t Free(void *ptr) { return this->free(this, ptr); }
|
||||
ALWAYS_INLINE errno_t FreeWithSize(void *ptr, size_t size) { return this->free_with_size(this, ptr, size); }
|
||||
ALWAYS_INLINE size_t GetAllocationSize(const void *ptr) { return this->get_allocation_size(this, ptr); }
|
||||
ALWAYS_INLINE errno_t Reallocate(void *ptr, size_t size, void **p) { return this->reallocate(this, ptr, size, p); }
|
||||
ALWAYS_INLINE errno_t Shrink(void *ptr, size_t size) { return this->shrink(this, ptr, size); }
|
||||
private:
|
||||
#define TLS_HEAP_CACHE_DECLARE_TEMPLATE(RETURN, NAME, MEMBER_NAME, ...) \
|
||||
template<bool Cache> static RETURN NAME##Impl(TlsHeapCache *_this, ## __VA_ARGS__ )
|
||||
|
||||
FOREACH_TLS_HEAP_CACHE_FUNC(TLS_HEAP_CACHE_DECLARE_TEMPLATE)
|
||||
|
||||
#undef TLS_HEAP_CACHE_DECLARE_TEMPLATE
|
||||
|
||||
size_t GetAllocationSizeCommonImpl(const void *ptr) const;
|
||||
errno_t ShrinkCommonImpl(void *ptr, size_t size) const;
|
||||
};
|
||||
|
||||
#define TLS_HEAP_CACHE_DECLARE_INSTANTIATION(RETURN, NAME, MEMBER_NAME, ...) \
|
||||
template<> RETURN TlsHeapCache::NAME##Impl<false>(TlsHeapCache *_this, ##__VA_ARGS__); \
|
||||
template<> RETURN TlsHeapCache::NAME##Impl<true>(TlsHeapCache *_this, ##__VA_ARGS__)
|
||||
|
||||
FOREACH_TLS_HEAP_CACHE_FUNC(TLS_HEAP_CACHE_DECLARE_INSTANTIATION)
|
||||
|
||||
#undef FOREACH_TLS_HEAP_CACHE_FUNC
|
||||
|
||||
|
||||
}
|
File diff suppressed because it is too large
Load diff
|
@ -0,0 +1,547 @@
|
|||
/*
|
||||
* Copyright (c) 2018-2020 Atmosphère-NX
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify it
|
||||
* under the terms and conditions of the GNU General Public License,
|
||||
* version 2, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
* more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
#pragma once
|
||||
#include <stratosphere.hpp>
|
||||
#include "mem_impl_heap_platform.hpp"
|
||||
#include "mem_impl_heap_tls_heap_static.hpp"
|
||||
#include "mem_impl_heap_tls_heap_cache.hpp"
|
||||
|
||||
namespace ams::mem::impl::heap {
|
||||
|
||||
/* Simple intrusive list. */
|
||||
template<typename T>
|
||||
struct ListHeader {
|
||||
T *list_next;
|
||||
};
|
||||
|
||||
template<typename T>
|
||||
struct ListElement : public ListHeader<T> {
|
||||
T *list_prev;
|
||||
};
|
||||
|
||||
template<typename T>
|
||||
constexpr inline void ListClearLink(ListHeader<T> *l) {
|
||||
l->list_next = nullptr;
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
constexpr inline void ListClearLink(ListElement<T> *l) {
|
||||
l->list_next = nullptr;
|
||||
l->list_prev = nullptr;
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
constexpr inline T *ListGetNext(const ListHeader<T> *l) {
|
||||
return l->list_next;
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
constexpr inline T *ListGetNext(const ListElement<T> *l) {
|
||||
return l->list_next;
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
constexpr inline T *ListGetPrev(const ListElement<T> *l) {
|
||||
return l->list_prev;
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
constexpr inline void ListInsertAfter(ListHeader<T> *hdr, T *e) {
|
||||
e->list_next = hdr->list_next;
|
||||
e->list_prev = static_cast<T *>(hdr);
|
||||
|
||||
if (hdr->list_next != nullptr) {
|
||||
hdr->list_next->list_prev = e;
|
||||
}
|
||||
hdr->list_next = e;
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
constexpr inline void ListRemoveSelf(T *e) {
|
||||
if (e->list_next != nullptr) {
|
||||
e->list_next->list_prev = e->list_prev;
|
||||
}
|
||||
if (e->list_prev != nullptr) {
|
||||
e->list_prev->list_next = e->list_next;
|
||||
}
|
||||
e->list_next = nullptr;
|
||||
e->list_prev = nullptr;
|
||||
}
|
||||
|
||||
struct Span : public ListElement<Span> {
|
||||
struct SmallMemory {
|
||||
SmallMemory *next;
|
||||
};
|
||||
|
||||
enum Status : u8 {
|
||||
Status_NotUsed = 0,
|
||||
Status_InUse = 1,
|
||||
Status_InFreeList = 2,
|
||||
Status_InUseSystem = 3,
|
||||
};
|
||||
|
||||
u16 object_count;
|
||||
u8 page_class;
|
||||
u8 status;
|
||||
s32 id;
|
||||
union {
|
||||
uintptr_t u;
|
||||
void *p;
|
||||
SmallMemory *sm;
|
||||
char *cp;
|
||||
} start;
|
||||
uintptr_t num_pages;
|
||||
union {
|
||||
struct {
|
||||
SmallMemory *objects;
|
||||
u64 is_allocated[8];
|
||||
} small;
|
||||
struct {
|
||||
u8 color[3];
|
||||
char name[0x10];
|
||||
} large;
|
||||
struct {
|
||||
u32 zero;
|
||||
} large_clear;
|
||||
} aux;
|
||||
};
|
||||
|
||||
struct SpanPage : public ListElement<SpanPage> {
|
||||
struct Info {
|
||||
u64 alloc_bitmap;
|
||||
u16 free_count;
|
||||
u8 is_sticky;
|
||||
Span span_of_spanpage;
|
||||
} info;
|
||||
Span spans[(TlsHeapStatic::PageSize - sizeof(Info) - sizeof(ListElement<SpanPage>)) / sizeof(Span)];
|
||||
|
||||
static constexpr size_t MaxSpanCount = sizeof(spans) / sizeof(spans[0]);
|
||||
};
|
||||
static_assert(sizeof(SpanPage) <= TlsHeapStatic::PageSize);
|
||||
|
||||
static constexpr ALWAYS_INLINE bool CanAllocateSpan(const SpanPage *span_page) {
|
||||
return span_page->info.alloc_bitmap != ~(decltype(span_page->info.alloc_bitmap){});
|
||||
}
|
||||
|
||||
struct SpanTable {
|
||||
uintptr_t total_pages;
|
||||
Span **page_to_span;
|
||||
u8 *pageclass_cache;
|
||||
};
|
||||
|
||||
struct TlsHeapMemStats {
|
||||
size_t allocated_size;
|
||||
size_t free_size;
|
||||
size_t system_size;
|
||||
size_t max_allocatable_size;
|
||||
};
|
||||
|
||||
ALWAYS_INLINE Span *GetSpanFromPointer(const SpanTable *table, const void *ptr) {
|
||||
const size_t idx = TlsHeapStatic::GetPageIndex(reinterpret_cast<uintptr_t>(ptr) - reinterpret_cast<uintptr_t>(table));
|
||||
if (idx < table->total_pages) {
|
||||
return table->page_to_span[idx];
|
||||
} else {
|
||||
return nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
ALWAYS_INLINE SpanPage *GetSpanPage(Span *span) {
|
||||
return reinterpret_cast<SpanPage *>(TlsHeapStatic::AlignDownPage(reinterpret_cast<uintptr_t>(span)));
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Span *GetSpanPageSpan(SpanPage *span_page) {
|
||||
return std::addressof(span_page->info.span_of_spanpage);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Span *GetPrevSpan(const SpanTable *span_table, const Span *span) {
|
||||
return GetSpanFromPointer(span_table, reinterpret_cast<const void *>(span->start.u - 1));
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Span *GetNextSpan(const SpanTable *span_table, const Span *span) {
|
||||
return GetSpanFromPointer(span_table, reinterpret_cast<const void *>(span->start.u + span->num_pages * TlsHeapStatic::PageSize));
|
||||
}
|
||||
|
||||
class TlsHeapCentral {
|
||||
private:
|
||||
using FreeListAvailableWord = u64;
|
||||
|
||||
static constexpr size_t FreeListCount = 0x100;
|
||||
static constexpr size_t NumFreeListBitmaps = FreeListCount / BITSIZEOF(FreeListAvailableWord);
|
||||
|
||||
static constexpr ALWAYS_INLINE size_t FreeListAvailableIndex(size_t which) {
|
||||
return which / BITSIZEOF(FreeListAvailableWord);
|
||||
}
|
||||
|
||||
static constexpr ALWAYS_INLINE size_t FreeListAvailableBit(size_t which) {
|
||||
return which % BITSIZEOF(FreeListAvailableWord);
|
||||
}
|
||||
|
||||
static constexpr ALWAYS_INLINE FreeListAvailableWord FreeListAvailableMask(size_t which) {
|
||||
return static_cast<FreeListAvailableWord>(1) << FreeListAvailableBit(which);
|
||||
}
|
||||
|
||||
static_assert(NumFreeListBitmaps * BITSIZEOF(FreeListAvailableWord) == FreeListCount);
|
||||
private:
|
||||
SpanTable span_table;
|
||||
u8 *physical_page_flags;
|
||||
s32 num_threads;
|
||||
s32 static_thread_quota;
|
||||
s32 dynamic_thread_quota;
|
||||
bool use_virtual_memory;
|
||||
os::RecursiveMutex lock;
|
||||
ListHeader<SpanPage> spanpage_list;
|
||||
ListHeader<SpanPage> full_spanpage_list;
|
||||
ListHeader<Span> freelists[FreeListCount];
|
||||
FreeListAvailableWord freelists_bitmap[NumFreeListBitmaps];
|
||||
ListHeader<Span> smallmem_lists[TlsHeapStatic::NumClassInfo];
|
||||
public:
|
||||
TlsHeapCentral() {
|
||||
this->span_table.total_pages = 0;
|
||||
}
|
||||
|
||||
errno_t Initialize(void *start, size_t size, bool use_virtual_memory);
|
||||
bool IsClean();
|
||||
|
||||
errno_t ReallocateLargeMemory(void *ptr, size_t size, void **p);
|
||||
errno_t ShrinkLargeMemory(void *ptr, size_t size);
|
||||
|
||||
void CalculateHeapHash(HeapHash *out);
|
||||
|
||||
errno_t AddThreadCache(TlsHeapCache *cache) {
|
||||
std::scoped_lock lk(this->lock);
|
||||
|
||||
/* Add thread and recalculate. */
|
||||
this->num_threads++;
|
||||
this->dynamic_thread_quota = this->GetTotalHeapSize() / (2 * this->num_threads);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
errno_t RemoveThreadCache(TlsHeapCache *cache) {
|
||||
std::scoped_lock lk(this->lock);
|
||||
|
||||
/* Remove thread and recalculate. */
|
||||
this->num_threads--;
|
||||
this->dynamic_thread_quota = this->GetTotalHeapSize() / (2 * this->num_threads);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
void *CacheLargeMemory(size_t size) {
|
||||
std::scoped_lock lk(this->lock);
|
||||
|
||||
const size_t num_pages = util::AlignUp(size, TlsHeapStatic::PageSize) / TlsHeapStatic::PageSize;
|
||||
if (Span *span = this->AllocatePagesImpl(num_pages); span != nullptr) {
|
||||
return span->start.p;
|
||||
} else {
|
||||
return nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
void *CacheLargeMemoryWithBigAlign(size_t size, size_t align) {
|
||||
std::scoped_lock lk(this->lock);
|
||||
|
||||
const size_t num_pages = util::AlignUp(size, TlsHeapStatic::PageSize) / TlsHeapStatic::PageSize;
|
||||
|
||||
Span *span = nullptr;
|
||||
if (align > TlsHeapStatic::PageSize) {
|
||||
span = this->AllocatePagesWithBigAlignImpl(num_pages, align);
|
||||
} else {
|
||||
span = this->AllocatePagesImpl(num_pages);
|
||||
}
|
||||
|
||||
if (span != nullptr) {
|
||||
return span->start.p;
|
||||
} else {
|
||||
return nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
void *CacheSmallMemory(size_t cls, size_t align = 0) {
|
||||
std::scoped_lock lk(this->lock);
|
||||
|
||||
return this->CacheSmallMemoryImpl(cls, align, false);
|
||||
}
|
||||
|
||||
void *CacheSmallMemoryForSystem(size_t cls) {
|
||||
std::scoped_lock lk(this->lock);
|
||||
|
||||
return this->CacheSmallMemoryImpl(cls, 0, true);
|
||||
}
|
||||
|
||||
size_t CacheSmallMemoryList(TlsHeapCache *cache, size_t *cls, size_t count, void **p, size_t align = 0) {
|
||||
std::scoped_lock lk(this->lock);
|
||||
|
||||
s32 cpu_id = 0;
|
||||
if (*cls < 8) {
|
||||
getcpu(std::addressof(cpu_id));
|
||||
}
|
||||
|
||||
return this->CacheSmallMemoryListImpl(cache, cls, count, p, cpu_id, 0);
|
||||
}
|
||||
|
||||
bool CheckCachedSize(s32 size) const {
|
||||
return size < this->dynamic_thread_quota && size < this->static_thread_quota;
|
||||
}
|
||||
|
||||
void Dump(DumpMode dump_mode, int fd, bool json) {
|
||||
std::scoped_lock lk(this->lock);
|
||||
return this->DumpImpl(dump_mode, fd, json);
|
||||
}
|
||||
|
||||
size_t GetAllocationSize(const void *ptr) {
|
||||
if (TlsHeapStatic::IsPageAligned(ptr)) {
|
||||
Span *span = nullptr;
|
||||
{
|
||||
std::scoped_lock lk(this->lock);
|
||||
span = GetSpanFromPointer(std::addressof(this->span_table), ptr);
|
||||
}
|
||||
if (span != nullptr) {
|
||||
return span->num_pages * TlsHeapStatic::PageSize;
|
||||
} else {
|
||||
AMS_ASSERT(span != nullptr);
|
||||
return 0;
|
||||
}
|
||||
} else {
|
||||
/* TODO: Handle error? */
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
size_t GetClassFromPointer(const void *ptr) {
|
||||
std::atomic_thread_fence(std::memory_order_acquire);
|
||||
|
||||
const size_t idx = (reinterpret_cast<uintptr_t>(ptr) - reinterpret_cast<uintptr_t>(this)) / TlsHeapStatic::PageSize;
|
||||
if (idx < this->span_table.total_pages) {
|
||||
if (ptr != nullptr) {
|
||||
std::scoped_lock lk(this->lock);
|
||||
Span *span = GetSpanFromPointer(std::addressof(this->span_table), ptr);
|
||||
if (span != nullptr) {
|
||||
AMS_ASSERT(span->page_class == this->span_table.pageclass_cache[idx]);
|
||||
} else {
|
||||
AMS_ASSERT(span != nullptr);
|
||||
}
|
||||
}
|
||||
return this->span_table.pageclass_cache[idx];
|
||||
} else {
|
||||
/* TODO: Handle error? */
|
||||
return 0xFFFFFFFF;
|
||||
}
|
||||
}
|
||||
|
||||
errno_t GetColor(const void *ptr, int *out) {
|
||||
if (out == nullptr) {
|
||||
return EINVAL;
|
||||
}
|
||||
|
||||
std::scoped_lock lk(this->lock);
|
||||
if (Span *span = GetSpanFromPointer(std::addressof(this->span_table), ptr); span != nullptr && !span->page_class) {
|
||||
*out = (span->aux.large.color[0] << 0) | (span->aux.large.color[1] << 0) | (span->aux.large.color[2] << 16);
|
||||
return 0;
|
||||
} else {
|
||||
return EINVAL;
|
||||
}
|
||||
}
|
||||
|
||||
errno_t SetColor(const void *ptr, int color) {
|
||||
std::scoped_lock lk(this->lock);
|
||||
if (Span *span = GetSpanFromPointer(std::addressof(this->span_table), ptr); span != nullptr && !span->page_class) {
|
||||
span->aux.large.color[0] = (color >> 0) & 0xFF;
|
||||
span->aux.large.color[1] = (color >> 8) & 0xFF;
|
||||
span->aux.large.color[2] = (color >> 16) & 0xFF;
|
||||
return 0;
|
||||
} else {
|
||||
return EINVAL;
|
||||
}
|
||||
}
|
||||
|
||||
errno_t GetMappedMemStats(size_t *out_free_size, size_t *out_max_allocatable_size) {
|
||||
std::scoped_lock lk(this->lock);
|
||||
|
||||
return this->GetMappedMemStatsImpl(out_free_size, out_max_allocatable_size);
|
||||
}
|
||||
|
||||
errno_t GetMemStats(TlsHeapMemStats *out) {
|
||||
std::scoped_lock lk(this->lock);
|
||||
|
||||
return this->GetMemStatsImpl(out);
|
||||
}
|
||||
|
||||
errno_t GetName(const void *ptr, char *dst, size_t dst_size) {
|
||||
std::scoped_lock lk(this->lock);
|
||||
if (Span *span = GetSpanFromPointer(std::addressof(this->span_table), ptr); span != nullptr && !span->page_class) {
|
||||
strlcpy(dst, span->aux.large.name, dst_size);
|
||||
return 0;
|
||||
} else {
|
||||
return EINVAL;
|
||||
}
|
||||
}
|
||||
|
||||
errno_t SetName(const void *ptr, const char *name) {
|
||||
std::scoped_lock lk(this->lock);
|
||||
if (Span *span = GetSpanFromPointer(std::addressof(this->span_table), ptr); span != nullptr && !span->page_class) {
|
||||
strlcpy(span->aux.large.name, name, sizeof(span->aux.large.name));
|
||||
return 0;
|
||||
} else {
|
||||
return EINVAL;
|
||||
}
|
||||
}
|
||||
|
||||
size_t GetTotalHeapSize() const {
|
||||
return this->span_table.total_pages * TlsHeapStatic::PageSize;
|
||||
}
|
||||
|
||||
errno_t UncacheLargeMemory(void *ptr) {
|
||||
if (TlsHeapStatic::IsPageAligned(ptr)) {
|
||||
std::scoped_lock lk(this->lock);
|
||||
if (Span *span = GetSpanFromPointer(std::addressof(this->span_table), ptr); span != nullptr) {
|
||||
this->FreePagesImpl(span);
|
||||
return 0;
|
||||
} else {
|
||||
return EFAULT;
|
||||
}
|
||||
} else {
|
||||
return EFAULT;
|
||||
}
|
||||
}
|
||||
|
||||
errno_t UncacheSmallMemory(void *ptr) {
|
||||
std::scoped_lock lk(this->lock);
|
||||
return this->UncacheSmallMemoryImpl(ptr);
|
||||
}
|
||||
|
||||
errno_t UncacheSmallMemoryList(TlsHeapCache *cache, void *ptr) {
|
||||
std::scoped_lock lk(this->lock);
|
||||
|
||||
while (true) {
|
||||
if (ptr == nullptr) {
|
||||
return 0;
|
||||
}
|
||||
ptr = cache->ManglePointer(ptr);
|
||||
void *next = *reinterpret_cast<void **>(ptr);
|
||||
if (auto err = this->UncacheSmallMemoryImpl(ptr); err != 0) {
|
||||
return err;
|
||||
}
|
||||
ptr = next;
|
||||
}
|
||||
}
|
||||
|
||||
errno_t WalkAllocatedPointers(HeapWalkCallback callback, void *user_data) {
|
||||
/* Explicitly handle locking, as we will release the lock during callback. */
|
||||
this->lock.lock();
|
||||
ON_SCOPE_EXIT { this->lock.unlock(); };
|
||||
|
||||
return this->WalkAllocatedPointersImpl(callback, user_data);
|
||||
}
|
||||
private:
|
||||
SpanPage *AllocateSpanPage();
|
||||
Span *AllocateSpanFromSpanPage(SpanPage *sp);
|
||||
|
||||
Span *SplitSpan(Span *span, size_t num_pages, Span *new_span);
|
||||
void MergeFreeSpans(Span *span, Span *span_to_merge, uintptr_t start);
|
||||
|
||||
bool DestroySpanPageIfEmpty(SpanPage *sp, bool full);
|
||||
Span *GetFirstSpan() const;
|
||||
Span *MakeFreeSpan(size_t num_pages);
|
||||
Span *SearchFreeSpan(size_t num_pages) const;
|
||||
|
||||
void FreeSpanToSpanPage(Span *span, SpanPage *sp);
|
||||
void FreeSpanToSpanPage(Span *span);
|
||||
|
||||
void MergeIntoFreeList(Span *&span);
|
||||
|
||||
errno_t AllocatePhysical(void *start, size_t size);
|
||||
errno_t FreePhysical(void *start, size_t size);
|
||||
private:
|
||||
Span *AllocatePagesImpl(size_t num_pages);
|
||||
Span *AllocatePagesWithBigAlignImpl(size_t num_pages, size_t align);
|
||||
void FreePagesImpl(Span *span);
|
||||
|
||||
void *CacheSmallMemoryImpl(size_t cls, size_t align, bool for_system);
|
||||
errno_t UncacheSmallMemoryImpl(void *ptr);
|
||||
|
||||
size_t CacheSmallMemoryListImpl(TlsHeapCache *cache, size_t *cls, size_t count, void **p, s32 cpu_id, size_t align);
|
||||
|
||||
errno_t WalkAllocatedPointersImpl(HeapWalkCallback callback, void *user_data);
|
||||
|
||||
errno_t GetMappedMemStatsImpl(size_t *out_free_size, size_t *out_max_allocatable_size);
|
||||
errno_t GetMemStatsImpl(TlsHeapMemStats *out);
|
||||
|
||||
void DumpImpl(DumpMode dump_mode, int fd, bool json);
|
||||
private:
|
||||
size_t FreeListFirstNonEmpty(size_t start) const {
|
||||
if (start < FreeListCount) {
|
||||
for (size_t i = FreeListAvailableIndex(start); i < util::size(this->freelists_bitmap); i++) {
|
||||
const FreeListAvailableWord masked = this->freelists_bitmap[i] & ~(FreeListAvailableMask(start) - 1);
|
||||
if (masked) {
|
||||
const size_t b = __builtin_ctzll(masked);
|
||||
const size_t res = i * BITSIZEOF(FreeListAvailableWord) + b;
|
||||
AMS_ASSERT(res < FreeListCount);
|
||||
return res;
|
||||
}
|
||||
start = (i + 1) * BITSIZEOF(FreeListAvailableWord);
|
||||
}
|
||||
}
|
||||
return FreeListCount;
|
||||
}
|
||||
|
||||
ALWAYS_INLINE void AddToFreeBlockList(Span *span) {
|
||||
AMS_ASSERT(GetSpanPageSpan(GetSpanPage(span)) != span);
|
||||
AMS_ASSERT(span->status == Span::Status_InFreeList);
|
||||
const size_t which = std::min(span->num_pages, FreeListCount) - 1;
|
||||
ListInsertAfter(std::addressof(this->freelists[which]), span);
|
||||
this->freelists_bitmap[FreeListAvailableIndex(which)] |= FreeListAvailableMask(which);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE void RemoveFromFreeBlockList(Span *span) {
|
||||
const size_t which = std::min(span->num_pages, FreeListCount) - 1;
|
||||
ListRemoveSelf(span);
|
||||
if (!ListGetNext(std::addressof(this->freelists[which]))) {
|
||||
this->freelists_bitmap[FreeListAvailableIndex(which)] &= ~FreeListAvailableMask(which);
|
||||
}
|
||||
}
|
||||
|
||||
Span *AllocateSpanStruct() {
|
||||
SpanPage *sp = ListGetNext(std::addressof(this->spanpage_list));
|
||||
while (sp && (sp->info.is_sticky || !CanAllocateSpan(sp))) {
|
||||
sp = ListGetNext(sp);
|
||||
}
|
||||
|
||||
if (sp == nullptr) {
|
||||
sp = this->AllocateSpanPage();
|
||||
}
|
||||
|
||||
if (sp != nullptr) {
|
||||
return this->AllocateSpanFromSpanPage(sp);
|
||||
} else {
|
||||
return nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
s32 CallWalkCallback(HeapWalkCallback callback, void *ptr, size_t size, void *user_data) {
|
||||
this->lock.unlock();
|
||||
int res = callback(ptr, size, user_data);
|
||||
this->lock.lock();
|
||||
if (res) {
|
||||
return 0;
|
||||
} else {
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
}
|
|
@ -0,0 +1,210 @@
|
|||
/*
|
||||
* Copyright (c) 2018-2020 Atmosphère-NX
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify it
|
||||
* under the terms and conditions of the GNU General Public License,
|
||||
* version 2, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
* more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
#pragma once
|
||||
#include <stratosphere.hpp>
|
||||
#include "mem_impl_heap_platform.hpp"
|
||||
|
||||
namespace ams::mem::impl::heap {
|
||||
|
||||
class TlsHeapStatic {
|
||||
public:
|
||||
struct ClassInfo {
|
||||
u16 num_pages;
|
||||
u16 chunk_size;
|
||||
};
|
||||
|
||||
static constexpr size_t NumClassInfo = 57;
|
||||
|
||||
static constexpr size_t MaxSizeWithClass = 0xC00;
|
||||
static constexpr size_t ChunkGranularity = 0x10;
|
||||
static constexpr size_t PageSize = 4_KB;
|
||||
static constexpr size_t PhysicalPageSize = 256_KB;
|
||||
public:
|
||||
static constexpr inline std::array<ClassInfo, NumClassInfo> ClassInfos = {
|
||||
ClassInfo{ .num_pages = 0, .chunk_size = 0x000, },
|
||||
ClassInfo{ .num_pages = 1, .chunk_size = 0x010, },
|
||||
ClassInfo{ .num_pages = 1, .chunk_size = 0x020, },
|
||||
ClassInfo{ .num_pages = 3, .chunk_size = 0x030, },
|
||||
ClassInfo{ .num_pages = 1, .chunk_size = 0x040, },
|
||||
ClassInfo{ .num_pages = 1, .chunk_size = 0x050, },
|
||||
ClassInfo{ .num_pages = 3, .chunk_size = 0x060, },
|
||||
ClassInfo{ .num_pages = 2, .chunk_size = 0x070, },
|
||||
ClassInfo{ .num_pages = 1, .chunk_size = 0x080, },
|
||||
ClassInfo{ .num_pages = 3, .chunk_size = 0x090, },
|
||||
ClassInfo{ .num_pages = 2, .chunk_size = 0x0A0, },
|
||||
ClassInfo{ .num_pages = 1, .chunk_size = 0x0B0, },
|
||||
ClassInfo{ .num_pages = 3, .chunk_size = 0x0C0, },
|
||||
ClassInfo{ .num_pages = 3, .chunk_size = 0x0D0, },
|
||||
ClassInfo{ .num_pages = 1, .chunk_size = 0x0E0, },
|
||||
ClassInfo{ .num_pages = 1, .chunk_size = 0x0F0, },
|
||||
ClassInfo{ .num_pages = 1, .chunk_size = 0x100, },
|
||||
ClassInfo{ .num_pages = 1, .chunk_size = 0x110, },
|
||||
ClassInfo{ .num_pages = 1, .chunk_size = 0x120, },
|
||||
ClassInfo{ .num_pages = 3, .chunk_size = 0x130, },
|
||||
ClassInfo{ .num_pages = 3, .chunk_size = 0x140, },
|
||||
ClassInfo{ .num_pages = 1, .chunk_size = 0x150, },
|
||||
ClassInfo{ .num_pages = 2, .chunk_size = 0x160, },
|
||||
ClassInfo{ .num_pages = 1, .chunk_size = 0x170, },
|
||||
ClassInfo{ .num_pages = 3, .chunk_size = 0x180, },
|
||||
ClassInfo{ .num_pages = 1, .chunk_size = 0x190, },
|
||||
ClassInfo{ .num_pages = 3, .chunk_size = 0x1A0, },
|
||||
ClassInfo{ .num_pages = 3, .chunk_size = 0x1B0, },
|
||||
ClassInfo{ .num_pages = 1, .chunk_size = 0x1C0, },
|
||||
ClassInfo{ .num_pages = 3, .chunk_size = 0x1D0, },
|
||||
ClassInfo{ .num_pages = 2, .chunk_size = 0x1E0, },
|
||||
ClassInfo{ .num_pages = 1, .chunk_size = 0x200, },
|
||||
ClassInfo{ .num_pages = 3, .chunk_size = 0x210, },
|
||||
ClassInfo{ .num_pages = 2, .chunk_size = 0x220, },
|
||||
ClassInfo{ .num_pages = 1, .chunk_size = 0x240, },
|
||||
ClassInfo{ .num_pages = 3, .chunk_size = 0x260, },
|
||||
ClassInfo{ .num_pages = 2, .chunk_size = 0x270, },
|
||||
ClassInfo{ .num_pages = 3, .chunk_size = 0x280, },
|
||||
ClassInfo{ .num_pages = 1, .chunk_size = 0x2A0, },
|
||||
ClassInfo{ .num_pages = 3, .chunk_size = 0x2D0, },
|
||||
ClassInfo{ .num_pages = 2, .chunk_size = 0x2E0, },
|
||||
ClassInfo{ .num_pages = 3, .chunk_size = 0x300, },
|
||||
ClassInfo{ .num_pages = 1, .chunk_size = 0x330, },
|
||||
ClassInfo{ .num_pages = 3, .chunk_size = 0x360, },
|
||||
ClassInfo{ .num_pages = 2, .chunk_size = 0x380, },
|
||||
ClassInfo{ .num_pages = 3, .chunk_size = 0x3B0, },
|
||||
ClassInfo{ .num_pages = 1, .chunk_size = 0x400, },
|
||||
ClassInfo{ .num_pages = 3, .chunk_size = 0x450, },
|
||||
ClassInfo{ .num_pages = 2, .chunk_size = 0x490, },
|
||||
ClassInfo{ .num_pages = 3, .chunk_size = 0x4C0, },
|
||||
ClassInfo{ .num_pages = 1, .chunk_size = 0x550, },
|
||||
ClassInfo{ .num_pages = 3, .chunk_size = 0x600, },
|
||||
ClassInfo{ .num_pages = 2, .chunk_size = 0x660, },
|
||||
ClassInfo{ .num_pages = 3, .chunk_size = 0x6D0, },
|
||||
ClassInfo{ .num_pages = 1, .chunk_size = 0x800, },
|
||||
ClassInfo{ .num_pages = 3, .chunk_size = 0x990, },
|
||||
ClassInfo{ .num_pages = 2, .chunk_size = 0xAA0, },
|
||||
};
|
||||
|
||||
static constexpr inline std::array<size_t, MaxSizeWithClass / ChunkGranularity> SizeToClass = [] {
|
||||
std::array<size_t, MaxSizeWithClass / ChunkGranularity> arr = {};
|
||||
arr[0] = 1;
|
||||
for (size_t i = 1; i < arr.size(); i++) {
|
||||
const size_t cur_size = i * ChunkGranularity;
|
||||
for (size_t j = 0; j < ClassInfos.size(); j++) {
|
||||
if (ClassInfos[j].chunk_size >= cur_size) {
|
||||
arr[i] = j;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
return arr;
|
||||
}();
|
||||
public:
|
||||
static constexpr ALWAYS_INLINE size_t GetClassFromSize(size_t size) {
|
||||
AMS_ASSERT(size <= MaxSize);
|
||||
const size_t idx = util::AlignUp(size, ChunkGranularity) / ChunkGranularity;
|
||||
if (idx < MaxSizeWithClass / ChunkGranularity) {
|
||||
return SizeToClass[idx];
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
static constexpr ALWAYS_INLINE size_t GetRealSizeFromSizeAndAlignment(size_t size, size_t align) {
|
||||
AMS_ASSERT(size <= MaxSize);
|
||||
const size_t idx = util::AlignUp(size, ChunkGranularity) / ChunkGranularity;
|
||||
if (size == 0 || idx >= MaxSizeWithClass / ChunkGranularity) {
|
||||
return size;
|
||||
}
|
||||
const auto cls = SizeToClass[idx];
|
||||
if (!cls) {
|
||||
return PageSize;
|
||||
}
|
||||
AMS_ASSERT(align != 0);
|
||||
const size_t mask = align - 1;
|
||||
for (auto i = cls; i < ClassInfos.size(); i++) {
|
||||
if ((ClassInfos[i].chunk_size & mask) == 0) {
|
||||
return ClassInfos[i].chunk_size;
|
||||
}
|
||||
}
|
||||
return PageSize;
|
||||
}
|
||||
|
||||
static constexpr ALWAYS_INLINE bool IsPageAligned(uintptr_t ptr) {
|
||||
return util::IsAligned(ptr, PageSize);
|
||||
}
|
||||
|
||||
static ALWAYS_INLINE bool IsPageAligned(const void *ptr) {
|
||||
return IsPageAligned(reinterpret_cast<uintptr_t>(ptr));
|
||||
}
|
||||
|
||||
static constexpr ALWAYS_INLINE size_t GetPageIndex(uintptr_t ptr) {
|
||||
return ptr / PageSize;
|
||||
}
|
||||
|
||||
static constexpr ALWAYS_INLINE size_t GetPhysicalPageIndex(uintptr_t ptr) {
|
||||
return ptr / PhysicalPageSize;
|
||||
}
|
||||
|
||||
static constexpr ALWAYS_INLINE uintptr_t AlignUpPage(uintptr_t ptr) {
|
||||
return util::AlignUp(ptr, PageSize);
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
static ALWAYS_INLINE T *AlignUpPage(T *ptr) {
|
||||
static_assert(std::is_pod<T>::value);
|
||||
static_assert(util::IsAligned(PageSize, alignof(T)));
|
||||
return reinterpret_cast<T *>(AlignUpPage(reinterpret_cast<uintptr_t>(ptr)));
|
||||
}
|
||||
|
||||
static constexpr ALWAYS_INLINE uintptr_t AlignDownPage(uintptr_t ptr) {
|
||||
return util::AlignDown(ptr, PageSize);
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
static ALWAYS_INLINE T *AlignDownPage(T *ptr) {
|
||||
static_assert(std::is_pod<T>::value);
|
||||
static_assert(util::IsAligned(PageSize, alignof(T)));
|
||||
return reinterpret_cast<T *>(AlignDownPage(reinterpret_cast<uintptr_t>(ptr)));
|
||||
}
|
||||
|
||||
static constexpr ALWAYS_INLINE uintptr_t AlignUpPhysicalPage(uintptr_t ptr) {
|
||||
return util::AlignUp(ptr, PhysicalPageSize);
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
static ALWAYS_INLINE T *AlignUpPhysicalPage(T *ptr) {
|
||||
static_assert(std::is_pod<T>::value);
|
||||
static_assert(util::IsAligned(PhysicalPageSize, alignof(T)));
|
||||
return reinterpret_cast<T *>(AlignUpPhysicalPage(reinterpret_cast<uintptr_t>(ptr)));
|
||||
}
|
||||
|
||||
static constexpr ALWAYS_INLINE uintptr_t AlignDownPhysicalPage(uintptr_t ptr) {
|
||||
return util::AlignDown(ptr, PhysicalPageSize);
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
static ALWAYS_INLINE T *AlignDownPhysicalPage(T *ptr) {
|
||||
static_assert(std::is_pod<T>::value);
|
||||
static_assert(util::IsAligned(PhysicalPageSize, alignof(T)));
|
||||
return reinterpret_cast<T *>(AlignDownPhysicalPage(reinterpret_cast<uintptr_t>(ptr)));
|
||||
}
|
||||
|
||||
static constexpr ALWAYS_INLINE size_t GetChunkSize(size_t cls) {
|
||||
return ClassInfos[cls].chunk_size;
|
||||
}
|
||||
|
||||
static constexpr ALWAYS_INLINE size_t GetNumPages(size_t cls) {
|
||||
return ClassInfos[cls].num_pages;
|
||||
}
|
||||
};
|
||||
|
||||
}
|
|
@ -0,0 +1,41 @@
|
|||
/*
|
||||
* Copyright (c) 2018-2020 Atmosphère-NX
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify it
|
||||
* under the terms and conditions of the GNU General Public License,
|
||||
* version 2, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
* more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
#pragma once
|
||||
#include <stratosphere.hpp>
|
||||
|
||||
namespace ams::mem::impl {
|
||||
|
||||
enum Prot {
|
||||
Prot_none = (0 << 0),
|
||||
Prot_read = (1 << 0),
|
||||
Prot_write = (1 << 1),
|
||||
Prot_exec = (1 << 2),
|
||||
};
|
||||
|
||||
errno_t virtual_alloc(void **ptr, size_t size);
|
||||
errno_t virtual_free(void *ptr, size_t size);
|
||||
errno_t physical_alloc(void *ptr, size_t size, Prot prot);
|
||||
errno_t physical_free(void *ptr, size_t size);
|
||||
|
||||
size_t strlcpy(char *dst, const char *src, size_t size);
|
||||
|
||||
errno_t gen_random(void *dst, size_t dst_size);
|
||||
|
||||
errno_t epochtime(s64 *dst);
|
||||
|
||||
errno_t getcpu(s32 *out);
|
||||
|
||||
}
|
|
@ -0,0 +1,161 @@
|
|||
/*
|
||||
* Copyright (c) 2018-2020 Atmosphère-NX
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify it
|
||||
* under the terms and conditions of the GNU General Public License,
|
||||
* version 2, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
* more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
#include <stratosphere.hpp>
|
||||
#include "mem_impl_platform.hpp"
|
||||
|
||||
namespace ams::mem::impl {
|
||||
|
||||
namespace {
|
||||
|
||||
os::Mutex g_virt_mem_enabled_lock;
|
||||
bool g_virt_mem_enabled_detected;
|
||||
bool g_virt_mem_enabled;
|
||||
|
||||
void EnsureVirtualAddressMemoryDetected() {
|
||||
std::scoped_lock lk(g_virt_mem_enabled_lock);
|
||||
if (AMS_LIKELY(g_virt_mem_enabled_detected)) {
|
||||
return;
|
||||
}
|
||||
g_virt_mem_enabled = os::IsVirtualAddressMemoryEnabled();
|
||||
}
|
||||
|
||||
ALWAYS_INLINE bool IsVirtualAddressMemoryEnabled() {
|
||||
EnsureVirtualAddressMemoryDetected();
|
||||
return g_virt_mem_enabled;
|
||||
}
|
||||
|
||||
ALWAYS_INLINE errno_t ConvertResult(Result result) {
|
||||
/* TODO: Actually implement this in a meaningful way. */
|
||||
if (R_FAILED(result)) {
|
||||
return EINVAL;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
ALWAYS_INLINE os::MemoryPermission ConvertToOsPermission(Prot prot) {
|
||||
static_assert(static_cast<int>(Prot_read) == static_cast<int>(os::MemoryPermission_ReadOnly));
|
||||
static_assert(static_cast<int>(Prot_write) == static_cast<int>(os::MemoryPermission_WriteOnly));
|
||||
return static_cast<os::MemoryPermission>(prot & os::MemoryPermission_ReadWrite);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
errno_t virtual_alloc(void **ptr, size_t size) {
|
||||
/* Ensure size isn't too large. */
|
||||
if (size > mem::impl::MaxSize) {
|
||||
return EINVAL;
|
||||
}
|
||||
|
||||
/* Allocate virtual memory. */
|
||||
uintptr_t addr;
|
||||
if (IsVirtualAddressMemoryEnabled()) {
|
||||
/* TODO: Support virtual address memory. */
|
||||
AMS_ABORT("Virtual address memory not supported yet");
|
||||
} else {
|
||||
if (auto err = ConvertResult(os::AllocateMemoryBlock(std::addressof(addr), util::AlignUp(size, os::MemoryBlockUnitSize))); err != 0) {
|
||||
return err;
|
||||
}
|
||||
os::SetMemoryPermission(addr, size, os::MemoryPermission_None);
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
errno_t virtual_free(void *ptr, size_t size) {
|
||||
/* Ensure size isn't zero. */
|
||||
if (size == 0) {
|
||||
return EINVAL;
|
||||
}
|
||||
|
||||
if (IsVirtualAddressMemoryEnabled()) {
|
||||
/* TODO: Support virtual address memory. */
|
||||
AMS_ABORT("Virtual address memory not supported yet");
|
||||
} else {
|
||||
os::FreeMemoryBlock(reinterpret_cast<uintptr_t>(ptr), util::AlignUp(size, os::MemoryBlockUnitSize));
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
errno_t physical_alloc(void *ptr, size_t size, Prot prot) {
|
||||
/* Detect empty allocation. */
|
||||
const uintptr_t aligned_start = util::AlignDown(reinterpret_cast<uintptr_t>(ptr), os::MemoryPageSize);
|
||||
const uintptr_t aligned_end = util::AlignUp(reinterpret_cast<uintptr_t>(ptr) + size, os::MemoryPageSize);
|
||||
const size_t aligned_size = aligned_end - aligned_start;
|
||||
if (aligned_end <= aligned_start) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (IsVirtualAddressMemoryEnabled()) {
|
||||
/* TODO: Support virtual address memory. */
|
||||
AMS_ABORT("Virtual address memory not supported yet");
|
||||
} else {
|
||||
os::SetMemoryPermission(aligned_start, aligned_size, ConvertToOsPermission(prot));
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
errno_t physical_free(void *ptr, size_t size) {
|
||||
/* Detect empty allocation. */
|
||||
const uintptr_t aligned_start = util::AlignDown(reinterpret_cast<uintptr_t>(ptr), os::MemoryPageSize);
|
||||
const uintptr_t aligned_end = util::AlignUp(reinterpret_cast<uintptr_t>(ptr) + size, os::MemoryPageSize);
|
||||
const size_t aligned_size = aligned_end - aligned_start;
|
||||
if (aligned_end <= aligned_start) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (IsVirtualAddressMemoryEnabled()) {
|
||||
/* TODO: Support virtual address memory. */
|
||||
AMS_ABORT("Virtual address memory not supported yet");
|
||||
} else {
|
||||
os::SetMemoryPermission(aligned_start, aligned_size, os::MemoryPermission_None);
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
size_t strlcpy(char *dst, const char *src, size_t size) {
|
||||
const size_t src_size = std::strlen(src);
|
||||
if (src_size >= size) {
|
||||
if (size) {
|
||||
std::memcpy(dst, src, size - 1);
|
||||
dst[size - 1] = 0;
|
||||
}
|
||||
} else {
|
||||
std::memcpy(dst, src, src_size + 1);
|
||||
}
|
||||
return src_size;
|
||||
}
|
||||
|
||||
errno_t gen_random(void *dst, size_t dst_size) {
|
||||
os::GenerateRandomBytes(dst, dst_size);
|
||||
return 0;
|
||||
}
|
||||
|
||||
errno_t epochtime(s64 *dst) {
|
||||
/* TODO: What is this calc? */
|
||||
auto ts = os::ConvertToTimeSpan(os::GetSystemTick());
|
||||
*dst = (ts.GetNanoSeconds() / INT64_C(100)) + INT64_C(0x8A09F909AE60000);
|
||||
return 0;
|
||||
}
|
||||
|
||||
errno_t getcpu(s32 *out) {
|
||||
*out = os::GetCurrentCoreNumber();
|
||||
return 0;
|
||||
}
|
||||
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue