kern: s/MemoryBlock/MemoryRegion/g

This commit is contained in:
Michael Scire 2020-02-05 14:16:56 -08:00
parent 5f857cb079
commit 323858cf96
5 changed files with 217 additions and 217 deletions

View file

@ -49,9 +49,9 @@ namespace ams::kern::init {
void MapStackForCore(KInitialPageTable &page_table, KMemoryRegionType type, u32 core_id) {
constexpr size_t StackSize = PageSize;
constexpr size_t StackAlign = PageSize;
const KVirtualAddress stack_start_virt = KMemoryLayout::GetVirtualMemoryBlockTree().GetRandomAlignedRegionWithGuard(StackSize, StackAlign, KMemoryRegionType_KernelMisc, PageSize);
const KVirtualAddress stack_start_virt = KMemoryLayout::GetVirtualMemoryRegionTree().GetRandomAlignedRegionWithGuard(StackSize, StackAlign, KMemoryRegionType_KernelMisc, PageSize);
const KPhysicalAddress stack_start_phys = g_initial_page_allocator.Allocate();
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetVirtualMemoryBlockTree().Insert(GetInteger(stack_start_virt), StackSize, type, core_id));
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetVirtualMemoryRegionTree().Insert(GetInteger(stack_start_virt), StackSize, type, core_id));
page_table.Map(stack_start_virt, StackSize, stack_start_phys, KernelRwDataAttribute, g_initial_page_allocator);
}
@ -98,11 +98,11 @@ namespace ams::kern::init {
/* Initialize the slab allocator counts. */
InitializeSlabResourceCounts();
/* Insert the root block for the virtual memory tree, from which all other blocks will derive. */
KMemoryLayout::GetVirtualMemoryBlockTree().insert(*KMemoryLayout::GetMemoryBlockAllocator().Create(KernelVirtualAddressSpaceBase, KernelVirtualAddressSpaceSize, 0, 0));
/* Insert the root region for the virtual memory tree, from which all other regions will derive. */
KMemoryLayout::GetVirtualMemoryRegionTree().insert(*KMemoryLayout::GetMemoryRegionAllocator().Create(KernelVirtualAddressSpaceBase, KernelVirtualAddressSpaceSize, 0, 0));
/* Insert the root block for the physical memory tree, from which all other blocks will derive. */
KMemoryLayout::GetPhysicalMemoryBlockTree().insert(*KMemoryLayout::GetMemoryBlockAllocator().Create(KernelPhysicalAddressSpaceBase, KernelPhysicalAddressSpaceSize, 0, 0));
/* Insert the root region for the physical memory tree, from which all other regions will derive. */
KMemoryLayout::GetPhysicalMemoryRegionTree().insert(*KMemoryLayout::GetMemoryRegionAllocator().Create(KernelPhysicalAddressSpaceBase, KernelPhysicalAddressSpaceSize, 0, 0));
/* Save start and end for ease of use. */
const uintptr_t code_start_virt_addr = reinterpret_cast<uintptr_t>(_start);
@ -116,26 +116,26 @@ namespace ams::kern::init {
if (!(kernel_region_start + KernelRegionSize - 1 <= KernelVirtualAddressSpaceLast)) {
kernel_region_size = KernelVirtualAddressSpaceEnd - GetInteger(kernel_region_start);
}
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetVirtualMemoryBlockTree().Insert(GetInteger(kernel_region_start), kernel_region_size, KMemoryRegionType_Kernel));
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetVirtualMemoryRegionTree().Insert(GetInteger(kernel_region_start), kernel_region_size, KMemoryRegionType_Kernel));
/* Setup the code region. */
constexpr size_t CodeRegionAlign = PageSize;
const KVirtualAddress code_region_start = util::AlignDown(code_start_virt_addr, CodeRegionAlign);
const KVirtualAddress code_region_end = util::AlignUp(code_end_virt_addr, CodeRegionAlign);
const size_t code_region_size = GetInteger(code_region_end) - GetInteger(code_region_start);
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetVirtualMemoryBlockTree().Insert(GetInteger(code_region_start), code_region_size, KMemoryRegionType_KernelCode));
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetVirtualMemoryRegionTree().Insert(GetInteger(code_region_start), code_region_size, KMemoryRegionType_KernelCode));
/* Setup the misc region. */
constexpr size_t MiscRegionSize = 32_MB;
constexpr size_t MiscRegionAlign = KernelAslrAlignment;
const KVirtualAddress misc_region_start = KMemoryLayout::GetVirtualMemoryBlockTree().GetRandomAlignedRegion(MiscRegionSize, MiscRegionAlign, KMemoryRegionType_Kernel);
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetVirtualMemoryBlockTree().Insert(GetInteger(misc_region_start), MiscRegionSize, KMemoryRegionType_KernelMisc));
const KVirtualAddress misc_region_start = KMemoryLayout::GetVirtualMemoryRegionTree().GetRandomAlignedRegion(MiscRegionSize, MiscRegionAlign, KMemoryRegionType_Kernel);
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetVirtualMemoryRegionTree().Insert(GetInteger(misc_region_start), MiscRegionSize, KMemoryRegionType_KernelMisc));
/* Setup the stack region. */
constexpr size_t StackRegionSize = 14_MB;
constexpr size_t StackRegionAlign = KernelAslrAlignment;
const KVirtualAddress stack_region_start = KMemoryLayout::GetVirtualMemoryBlockTree().GetRandomAlignedRegion(StackRegionSize, StackRegionAlign, KMemoryRegionType_Kernel);
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetVirtualMemoryBlockTree().Insert(GetInteger(stack_region_start), StackRegionSize, KMemoryRegionType_KernelStack));
const KVirtualAddress stack_region_start = KMemoryLayout::GetVirtualMemoryRegionTree().GetRandomAlignedRegion(StackRegionSize, StackRegionAlign, KMemoryRegionType_Kernel);
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetVirtualMemoryRegionTree().Insert(GetInteger(stack_region_start), StackRegionSize, KMemoryRegionType_KernelStack));
/* Decide if Kernel should have enlarged resource region (slab region + page table heap region). */
const bool use_extra_resources = KSystemControl::Init::ShouldIncreaseThreadResourceLimit();
@ -152,66 +152,66 @@ namespace ams::kern::init {
const KPhysicalAddress slab_end_phys_addr = slab_start_phys_addr + slab_region_size;
constexpr size_t SlabRegionAlign = KernelAslrAlignment;
const size_t slab_region_needed_size = util::AlignUp(GetInteger(code_end_phys_addr) + slab_region_size, SlabRegionAlign) - util::AlignDown(GetInteger(code_end_phys_addr), SlabRegionAlign);
const KVirtualAddress slab_region_start = KMemoryLayout::GetVirtualMemoryBlockTree().GetRandomAlignedRegion(slab_region_needed_size, SlabRegionAlign, KMemoryRegionType_Kernel) + (GetInteger(code_end_phys_addr) % SlabRegionAlign);
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetVirtualMemoryBlockTree().Insert(GetInteger(slab_region_start), slab_region_size, KMemoryRegionType_KernelSlab));
const KVirtualAddress slab_region_start = KMemoryLayout::GetVirtualMemoryRegionTree().GetRandomAlignedRegion(slab_region_needed_size, SlabRegionAlign, KMemoryRegionType_Kernel) + (GetInteger(code_end_phys_addr) % SlabRegionAlign);
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetVirtualMemoryRegionTree().Insert(GetInteger(slab_region_start), slab_region_size, KMemoryRegionType_KernelSlab));
/* Set the slab region's pair block. */
KMemoryLayout::GetVirtualMemoryBlockTree().FindFirstBlockByTypeAttr(KMemoryRegionType_KernelSlab)->SetPairAddress(GetInteger(slab_start_phys_addr));
/* Set the slab region's pair region. */
KMemoryLayout::GetVirtualMemoryRegionTree().FindFirstRegionByTypeAttr(KMemoryRegionType_KernelSlab)->SetPairAddress(GetInteger(slab_start_phys_addr));
/* Setup the temp region. */
constexpr size_t TempRegionSize = 128_MB;
constexpr size_t TempRegionAlign = KernelAslrAlignment;
const KVirtualAddress temp_region_start = KMemoryLayout::GetVirtualMemoryBlockTree().GetRandomAlignedRegion(TempRegionSize, TempRegionAlign, KMemoryRegionType_Kernel);
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetVirtualMemoryBlockTree().Insert(GetInteger(temp_region_start), TempRegionSize, KMemoryRegionType_KernelTemp));
const KVirtualAddress temp_region_start = KMemoryLayout::GetVirtualMemoryRegionTree().GetRandomAlignedRegion(TempRegionSize, TempRegionAlign, KMemoryRegionType_Kernel);
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetVirtualMemoryRegionTree().Insert(GetInteger(temp_region_start), TempRegionSize, KMemoryRegionType_KernelTemp));
/* Setup the Misc Unknown Debug region, if it's not zero. */
if (misc_unk_debug_phys_addr) {
constexpr size_t MiscUnknownDebugRegionSize = PageSize;
constexpr size_t MiscUnknownDebugRegionAlign = PageSize;
const KVirtualAddress misc_unk_debug_virt_addr = KMemoryLayout::GetVirtualMemoryBlockTree().GetRandomAlignedRegionWithGuard(MiscUnknownDebugRegionSize, MiscUnknownDebugRegionAlign, KMemoryRegionType_KernelMisc, PageSize);
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetVirtualMemoryBlockTree().Insert(GetInteger(misc_unk_debug_virt_addr), MiscUnknownDebugRegionSize, KMemoryRegionType_KernelMiscUnknownDebug));
const KVirtualAddress misc_unk_debug_virt_addr = KMemoryLayout::GetVirtualMemoryRegionTree().GetRandomAlignedRegionWithGuard(MiscUnknownDebugRegionSize, MiscUnknownDebugRegionAlign, KMemoryRegionType_KernelMisc, PageSize);
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetVirtualMemoryRegionTree().Insert(GetInteger(misc_unk_debug_virt_addr), MiscUnknownDebugRegionSize, KMemoryRegionType_KernelMiscUnknownDebug));
ttbr1_table.Map(misc_unk_debug_virt_addr, MiscUnknownDebugRegionSize, misc_unk_debug_phys_addr, KernelRoDataAttribute, g_initial_page_allocator);
}
/* Setup board-specific device physical blocks. */
SetupDevicePhysicalMemoryBlocks();
/* Setup board-specific device physical regions. */
SetupDevicePhysicalMemoryRegions();
/* Automatically map in devices that have auto-map attributes. */
for (auto &block : KMemoryLayout::GetPhysicalMemoryBlockTree()) {
/* We only care about automatically-mapped blocks. */
if (!block.IsDerivedFrom(KMemoryRegionType_KernelAutoMap)) {
for (auto &region : KMemoryLayout::GetPhysicalMemoryRegionTree()) {
/* We only care about automatically-mapped regions. */
if (!region.IsDerivedFrom(KMemoryRegionType_KernelAutoMap)) {
continue;
}
/* If this block has already been mapped, no need to consider it. */
if (block.HasTypeAttribute(KMemoryRegionAttr_DidKernelMap)) {
/* If this region has already been mapped, no need to consider it. */
if (region.HasTypeAttribute(KMemoryRegionAttr_DidKernelMap)) {
continue;
}
/* Set the attribute to note we've mapped this block. */
block.SetTypeAttribute(KMemoryRegionAttr_DidKernelMap);
/* Set the attribute to note we've mapped this region. */
region.SetTypeAttribute(KMemoryRegionAttr_DidKernelMap);
/* Create a virtual pair block and insert it into the tree. */
const KPhysicalAddress map_phys_addr = util::AlignDown(block.GetAddress(), PageSize);
const size_t map_size = util::AlignUp(block.GetEndAddress(), PageSize) - GetInteger(map_phys_addr);
const KVirtualAddress map_virt_addr = KMemoryLayout::GetVirtualMemoryBlockTree().GetRandomAlignedRegionWithGuard(map_size, PageSize, KMemoryRegionType_KernelMisc, PageSize);
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetVirtualMemoryBlockTree().Insert(GetInteger(map_virt_addr), map_size, KMemoryRegionType_KernelMiscMappedDevice));
block.SetPairAddress(GetInteger(map_virt_addr) + block.GetAddress() - GetInteger(map_phys_addr));
/* Create a virtual pair region and insert it into the tree. */
const KPhysicalAddress map_phys_addr = util::AlignDown(region.GetAddress(), PageSize);
const size_t map_size = util::AlignUp(region.GetEndAddress(), PageSize) - GetInteger(map_phys_addr);
const KVirtualAddress map_virt_addr = KMemoryLayout::GetVirtualMemoryRegionTree().GetRandomAlignedRegionWithGuard(map_size, PageSize, KMemoryRegionType_KernelMisc, PageSize);
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetVirtualMemoryRegionTree().Insert(GetInteger(map_virt_addr), map_size, KMemoryRegionType_KernelMiscMappedDevice));
region.SetPairAddress(GetInteger(map_virt_addr) + region.GetAddress() - GetInteger(map_phys_addr));
/* Map the page in to our page table. */
ttbr1_table.Map(map_virt_addr, map_size, map_phys_addr, KernelMmioAttribute, g_initial_page_allocator);
}
/* Setup the basic DRAM blocks. */
SetupDramPhysicalMemoryBlocks();
/* Setup the basic DRAM regions. */
SetupDramPhysicalMemoryRegions();
/* Insert a physical block for the kernel code region. */
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryBlockTree().Insert(GetInteger(code_start_phys_addr), (code_end_virt_addr - code_start_virt_addr), KMemoryRegionType_DramKernelCode));
KMemoryLayout::GetPhysicalMemoryBlockTree().FindFirstBlockByTypeAttr(KMemoryRegionType_DramKernelCode)->SetPairAddress(code_start_virt_addr);
/* Insert a physical region for the kernel code region. */
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryRegionTree().Insert(GetInteger(code_start_phys_addr), (code_end_virt_addr - code_start_virt_addr), KMemoryRegionType_DramKernelCode));
KMemoryLayout::GetPhysicalMemoryRegionTree().FindFirstRegionByTypeAttr(KMemoryRegionType_DramKernelCode)->SetPairAddress(code_start_virt_addr);
/* Insert a physical block for the kernel slab region. */
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryBlockTree().Insert(GetInteger(slab_start_phys_addr), slab_region_size, KMemoryRegionType_DramKernelSlab));
KMemoryLayout::GetPhysicalMemoryBlockTree().FindFirstBlockByTypeAttr(KMemoryRegionType_DramKernelSlab)->SetPairAddress(GetInteger(slab_region_start));
/* Insert a physical region for the kernel slab region. */
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryRegionTree().Insert(GetInteger(slab_start_phys_addr), slab_region_size, KMemoryRegionType_DramKernelSlab));
KMemoryLayout::GetPhysicalMemoryRegionTree().FindFirstRegionByTypeAttr(KMemoryRegionType_DramKernelSlab)->SetPairAddress(GetInteger(slab_region_start));
/* Map and clear the slab region. */
ttbr1_table.Map(slab_region_start, slab_region_size, slab_start_phys_addr, KernelRwDataAttribute, g_initial_page_allocator);
@ -222,69 +222,69 @@ namespace ams::kern::init {
const size_t page_table_heap_size = GetInteger(resource_end_phys_addr) - GetInteger(slab_end_phys_addr);
MESOSPHERE_INIT_ABORT_UNLESS(page_table_heap_size / 4_MB > 2);
/* Insert a physical block for the kernel page table heap region */
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryBlockTree().Insert(GetInteger(slab_end_phys_addr), page_table_heap_size, KMemoryRegionType_DramKernelPtHeap));
/* Insert a physical region for the kernel page table heap region */
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryRegionTree().Insert(GetInteger(slab_end_phys_addr), page_table_heap_size, KMemoryRegionType_DramKernelPtHeap));
/* All DRAM blocks that we haven't tagged by this point will be mapped under the linear mapping. Tag them. */
for (auto &block : KMemoryLayout::GetPhysicalMemoryBlockTree()) {
if (block.GetType() == KMemoryRegionType_Dram) {
block.SetTypeAttribute(KMemoryRegionAttr_LinearMapped);
/* All DRAM regions that we haven't tagged by this point will be mapped under the linear mapping. Tag them. */
for (auto &region : KMemoryLayout::GetPhysicalMemoryRegionTree()) {
if (region.GetType() == KMemoryRegionType_Dram) {
region.SetTypeAttribute(KMemoryRegionAttr_LinearMapped);
}
}
/* Setup the linear mapping region. */
constexpr size_t LinearRegionAlign = 1_GB;
const auto linear_extents = KMemoryLayout::GetPhysicalMemoryBlockTree().GetDerivedRegionExtents(KMemoryRegionAttr_LinearMapped);
const KPhysicalAddress aligned_linear_phys_start = util::AlignDown(linear_extents.first_block->GetAddress(), LinearRegionAlign);
const size_t linear_region_size = util::AlignUp(linear_extents.last_block->GetEndAddress(), LinearRegionAlign) - GetInteger(aligned_linear_phys_start);
const KVirtualAddress linear_region_start = KMemoryLayout::GetVirtualMemoryBlockTree().GetRandomAlignedRegionWithGuard(linear_region_size, LinearRegionAlign, KMemoryRegionType_None, LinearRegionAlign);
const auto linear_extents = KMemoryLayout::GetPhysicalMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionAttr_LinearMapped);
const KPhysicalAddress aligned_linear_phys_start = util::AlignDown(linear_extents.first_region->GetAddress(), LinearRegionAlign);
const size_t linear_region_size = util::AlignUp(linear_extents.last_region->GetEndAddress(), LinearRegionAlign) - GetInteger(aligned_linear_phys_start);
const KVirtualAddress linear_region_start = KMemoryLayout::GetVirtualMemoryRegionTree().GetRandomAlignedRegionWithGuard(linear_region_size, LinearRegionAlign, KMemoryRegionType_None, LinearRegionAlign);
const uintptr_t linear_region_phys_to_virt_diff = GetInteger(linear_region_start) - GetInteger(aligned_linear_phys_start);
/* Map and create blocks for all the linearly-mapped data. */
for (auto &block : KMemoryLayout::GetPhysicalMemoryBlockTree()) {
if (!block.HasTypeAttribute(KMemoryRegionAttr_LinearMapped)) {
/* Map and create regions for all the linearly-mapped data. */
for (auto &region : KMemoryLayout::GetPhysicalMemoryRegionTree()) {
if (!region.HasTypeAttribute(KMemoryRegionAttr_LinearMapped)) {
continue;
}
const uintptr_t block_virt_addr = block.GetAddress() + linear_region_phys_to_virt_diff;
ttbr1_table.Map(block_virt_addr, block.GetSize(), block.GetAddress(), KernelRwDataAttribute, g_initial_page_allocator);
const uintptr_t region_virt_addr = region.GetAddress() + linear_region_phys_to_virt_diff;
ttbr1_table.Map(region_virt_addr, region.GetSize(), region.GetAddress(), KernelRwDataAttribute, g_initial_page_allocator);
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetVirtualMemoryBlockTree().Insert(block_virt_addr, block.GetSize(), GetTypeForVirtualLinearMapping(block.GetType())));
block.SetPairAddress(block_virt_addr);
KMemoryLayout::GetVirtualMemoryBlockTree().FindContainingBlock(block_virt_addr)->SetPairAddress(block.GetAddress());
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetVirtualMemoryRegionTree().Insert(region_virt_addr, region.GetSize(), GetTypeForVirtualLinearMapping(region.GetType())));
region.SetPairAddress(region_virt_addr);
KMemoryLayout::GetVirtualMemoryRegionTree().FindContainingRegion(region_virt_addr)->SetPairAddress(region.GetAddress());
}
/* Create blocks for and map all core-specific stacks. */
/* Create regions for and map all core-specific stacks. */
for (size_t i = 0; i < cpu::NumCores; i++) {
MapStackForCore(ttbr1_table, KMemoryRegionType_KernelMiscMainStack, i);
MapStackForCore(ttbr1_table, KMemoryRegionType_KernelMiscIdleStack, i);
MapStackForCore(ttbr1_table, KMemoryRegionType_KernelMiscExceptionStack, i);
}
/* Setup the KCoreLocalRegion blocks. */
SetupCoreLocalRegionMemoryBlocks(ttbr1_table, g_initial_page_allocator);
/* Setup the KCoreLocalRegion regions. */
SetupCoreLocalRegionMemoryRegions(ttbr1_table, g_initial_page_allocator);
/* Finalize the page allocator, we're done allocating at this point. */
const KPhysicalAddress final_init_page_table_end_address = g_initial_page_allocator.GetFinalNextAddress();
const size_t init_page_table_region_size = GetInteger(final_init_page_table_end_address) - GetInteger(resource_end_phys_addr);
/* Insert blocks for the initial page table region. */
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryBlockTree().Insert(GetInteger(resource_end_phys_addr), init_page_table_region_size, KMemoryRegionType_DramKernelInitPt));
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetVirtualMemoryBlockTree().Insert(GetInteger(resource_end_phys_addr) + linear_region_phys_to_virt_diff, init_page_table_region_size, KMemoryRegionType_VirtualKernelInitPt));
/* Insert regions for the initial page table region. */
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryRegionTree().Insert(GetInteger(resource_end_phys_addr), init_page_table_region_size, KMemoryRegionType_DramKernelInitPt));
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetVirtualMemoryRegionTree().Insert(GetInteger(resource_end_phys_addr) + linear_region_phys_to_virt_diff, init_page_table_region_size, KMemoryRegionType_VirtualKernelInitPt));
/* All linear-mapped DRAM blocks that we haven't tagged by this point will be allocated to some pool partition. Tag them. */
for (auto &block : KMemoryLayout::GetPhysicalMemoryBlockTree()) {
if (block.GetType() == KMemoryRegionType_DramLinearMapped) {
block.SetType(KMemoryRegionType_DramPoolPartition);
/* All linear-mapped DRAM regions that we haven't tagged by this point will be allocated to some pool partition. Tag them. */
for (auto &region : KMemoryLayout::GetPhysicalMemoryRegionTree()) {
if (region.GetType() == KMemoryRegionType_DramLinearMapped) {
region.SetType(KMemoryRegionType_DramPoolPartition);
}
}
/* Setup all other memory blocks needed to arrange the pool partitions. */
SetupPoolPartitionMemoryBlocks();
/* Setup all other memory regions needed to arrange the pool partitions. */
SetupPoolPartitionMemoryRegions();
/* Cache all linear blocks in their own trees for faster access, later. */
KMemoryLayout::InitializeLinearMemoryBlockTrees(aligned_linear_phys_start, linear_region_start);
/* Cache all linear regions in their own trees for faster access, later. */
KMemoryLayout::InitializeLinearMemoryRegionTrees(aligned_linear_phys_start, linear_region_start);
/* Turn on all other cores. */
TurnOnAllCores(GetInteger(ttbr1_table.GetPhysicalAddress(reinterpret_cast<uintptr_t>(::ams::kern::init::StartOtherCore))));