kern: s/MemoryBlock/MemoryRegion/g

This commit is contained in:
Michael Scire 2020-02-05 14:16:56 -08:00
parent 6f2b517d86
commit eeda912cd1
4 changed files with 142 additions and 142 deletions

View File

@ -112,17 +112,17 @@ namespace ams::kern {
}
}
class KMemoryBlock : public util::IntrusiveRedBlackTreeBaseNode<KMemoryBlock> {
NON_COPYABLE(KMemoryBlock);
NON_MOVEABLE(KMemoryBlock);
class KMemoryRegion : public util::IntrusiveRedBlackTreeBaseNode<KMemoryRegion> {
NON_COPYABLE(KMemoryRegion);
NON_MOVEABLE(KMemoryRegion);
private:
uintptr_t address;
uintptr_t pair_address;
size_t block_size;
size_t region_size;
u32 attributes;
u32 type_id;
public:
static constexpr ALWAYS_INLINE int Compare(const KMemoryBlock &lhs, const KMemoryBlock &rhs) {
static constexpr ALWAYS_INLINE int Compare(const KMemoryRegion &lhs, const KMemoryRegion &rhs) {
if (lhs.GetAddress() < rhs.GetAddress()) {
return -1;
} else if (lhs.GetLastAddress() > rhs.GetLastAddress()) {
@ -132,13 +132,13 @@ namespace ams::kern {
}
}
public:
constexpr ALWAYS_INLINE KMemoryBlock() : address(0), pair_address(0), block_size(0), attributes(0), type_id(0) { /* ... */ }
constexpr ALWAYS_INLINE KMemoryBlock(uintptr_t a, size_t bl, uintptr_t p, u32 r, u32 t) :
address(a), pair_address(p), block_size(bl), attributes(r), type_id(t)
constexpr ALWAYS_INLINE KMemoryRegion() : address(0), pair_address(0), region_size(0), attributes(0), type_id(0) { /* ... */ }
constexpr ALWAYS_INLINE KMemoryRegion(uintptr_t a, size_t rs, uintptr_t p, u32 r, u32 t) :
address(a), pair_address(p), region_size(rs), attributes(r), type_id(t)
{
/* ... */
}
constexpr ALWAYS_INLINE KMemoryBlock(uintptr_t a, size_t bl, u32 r, u32 t) : KMemoryBlock(a, bl, std::numeric_limits<uintptr_t>::max(), r, t) { /* ... */ }
constexpr ALWAYS_INLINE KMemoryRegion(uintptr_t a, size_t rs, u32 r, u32 t) : KMemoryRegion(a, rs, std::numeric_limits<uintptr_t>::max(), r, t) { /* ... */ }
constexpr ALWAYS_INLINE uintptr_t GetAddress() const {
return this->address;
@ -149,7 +149,7 @@ namespace ams::kern {
}
constexpr ALWAYS_INLINE size_t GetSize() const {
return this->block_size;
return this->region_size;
}
constexpr ALWAYS_INLINE uintptr_t GetEndAddress() const {
@ -197,16 +197,16 @@ namespace ams::kern {
this->type_id |= attr;
}
};
static_assert(std::is_trivially_destructible<KMemoryBlock>::value);
static_assert(std::is_trivially_destructible<KMemoryRegion>::value);
class KMemoryBlockTree {
class KMemoryRegionTree {
public:
struct DerivedRegionExtents {
const KMemoryBlock *first_block;
const KMemoryBlock *last_block;
const KMemoryRegion *first_region;
const KMemoryRegion *last_region;
};
private:
using TreeType = util::IntrusiveRedBlackTreeBaseTraits<KMemoryBlock>::TreeType<KMemoryBlock>;
using TreeType = util::IntrusiveRedBlackTreeBaseTraits<KMemoryRegion>::TreeType<KMemoryRegion>;
using value_type = TreeType::value_type;
using size_type = TreeType::size_type;
using difference_type = TreeType::difference_type;
@ -219,17 +219,17 @@ namespace ams::kern {
private:
TreeType tree;
public:
constexpr ALWAYS_INLINE KMemoryBlockTree() : tree() { /* ... */ }
constexpr ALWAYS_INLINE KMemoryRegionTree() : tree() { /* ... */ }
public:
iterator FindContainingBlock(uintptr_t address) {
auto it = this->find(KMemoryBlock(address, 1, 0, 0));
iterator FindContainingRegion(uintptr_t address) {
auto it = this->find(KMemoryRegion(address, 1, 0, 0));
MESOSPHERE_INIT_ABORT_UNLESS(it != this->end());
MESOSPHERE_INIT_ABORT_UNLESS(it->Contains(address));
return it;
}
iterator FindFirstBlockByTypeAttr(u32 type_id, u32 attr = 0) {
iterator FindFirstRegionByTypeAttr(u32 type_id, u32 attr = 0) {
for (auto it = this->begin(); it != this->end(); it++) {
if (it->GetType() == type_id && it->GetAttributes() == attr) {
return it;
@ -238,7 +238,7 @@ namespace ams::kern {
MESOSPHERE_INIT_ABORT();
}
iterator FindFirstBlockByType(u32 type_id) {
iterator FindFirstRegionByType(u32 type_id) {
for (auto it = this->begin(); it != this->end(); it++) {
if (it->GetType() == type_id) {
return it;
@ -247,7 +247,7 @@ namespace ams::kern {
MESOSPHERE_INIT_ABORT();
}
iterator FindFirstDerivedBlock(u32 type_id) {
iterator FindFirstDerivedRegion(u32 type_id) {
for (auto it = this->begin(); it != this->end(); it++) {
if (it->IsDerivedFrom(type_id)) {
return it;
@ -258,19 +258,19 @@ namespace ams::kern {
DerivedRegionExtents GetDerivedRegionExtents(u32 type_id) {
DerivedRegionExtents extents = { .first_block = nullptr, .last_block = nullptr };
DerivedRegionExtents extents = { .first_region = nullptr, .last_region = nullptr };
for (auto it = this->cbegin(); it != this->cend(); it++) {
if (it->IsDerivedFrom(type_id)) {
if (extents.first_block == nullptr) {
extents.first_block = std::addressof(*it);
if (extents.first_region == nullptr) {
extents.first_region = std::addressof(*it);
}
extents.last_block = std::addressof(*it);
extents.last_region = std::addressof(*it);
}
}
MESOSPHERE_INIT_ABORT_UNLESS(extents.first_block != nullptr);
MESOSPHERE_INIT_ABORT_UNLESS(extents.last_block != nullptr);
MESOSPHERE_INIT_ABORT_UNLESS(extents.first_region != nullptr);
MESOSPHERE_INIT_ABORT_UNLESS(extents.last_region != nullptr);
return extents;
}
@ -354,30 +354,30 @@ namespace ams::kern {
}
};
class KMemoryBlockAllocator {
NON_COPYABLE(KMemoryBlockAllocator);
NON_MOVEABLE(KMemoryBlockAllocator);
class KMemoryRegionAllocator {
NON_COPYABLE(KMemoryRegionAllocator);
NON_MOVEABLE(KMemoryRegionAllocator);
public:
static constexpr size_t MaxMemoryBlocks = 1000;
static constexpr size_t MaxMemoryRegions = 1000;
friend class KMemoryLayout;
private:
KMemoryBlock block_heap[MaxMemoryBlocks];
size_t num_blocks;
KMemoryRegion region_heap[MaxMemoryRegions];
size_t num_regions;
private:
constexpr ALWAYS_INLINE KMemoryBlockAllocator() : block_heap(), num_blocks() { /* ... */ }
constexpr ALWAYS_INLINE KMemoryRegionAllocator() : region_heap(), num_regions() { /* ... */ }
public:
ALWAYS_INLINE KMemoryBlock *Allocate() {
ALWAYS_INLINE KMemoryRegion *Allocate() {
/* Ensure we stay within the bounds of our heap. */
MESOSPHERE_INIT_ABORT_UNLESS(this->num_blocks < MaxMemoryBlocks);
MESOSPHERE_INIT_ABORT_UNLESS(this->num_regions < MaxMemoryRegions);
return &this->block_heap[this->num_blocks++];
return &this->region_heap[this->num_regions++];
}
template<typename... Args>
ALWAYS_INLINE KMemoryBlock *Create(Args&&... args) {
KMemoryBlock *block = this->Allocate();
new (block) KMemoryBlock(std::forward<Args>(args)...);
return block;
ALWAYS_INLINE KMemoryRegion *Create(Args&&... args) {
KMemoryRegion *region = this->Allocate();
new (region) KMemoryRegion(std::forward<Args>(args)...);
return region;
}
};
@ -385,17 +385,17 @@ namespace ams::kern {
private:
static /* constinit */ inline uintptr_t s_linear_phys_to_virt_diff;
static /* constinit */ inline uintptr_t s_linear_virt_to_phys_diff;
static /* constinit */ inline KMemoryBlockAllocator s_block_allocator;
static /* constinit */ inline KMemoryBlockTree s_virtual_tree;
static /* constinit */ inline KMemoryBlockTree s_physical_tree;
static /* constinit */ inline KMemoryBlockTree s_virtual_linear_tree;
static /* constinit */ inline KMemoryBlockTree s_physical_linear_tree;
static /* constinit */ inline KMemoryRegionAllocator s_region_allocator;
static /* constinit */ inline KMemoryRegionTree s_virtual_tree;
static /* constinit */ inline KMemoryRegionTree s_physical_tree;
static /* constinit */ inline KMemoryRegionTree s_virtual_linear_tree;
static /* constinit */ inline KMemoryRegionTree s_physical_linear_tree;
public:
static ALWAYS_INLINE KMemoryBlockAllocator &GetMemoryBlockAllocator() { return s_block_allocator; }
static ALWAYS_INLINE KMemoryBlockTree &GetVirtualMemoryBlockTree() { return s_virtual_tree; }
static ALWAYS_INLINE KMemoryBlockTree &GetPhysicalMemoryBlockTree() { return s_physical_tree; }
static ALWAYS_INLINE KMemoryBlockTree &GetVirtualLinearMemoryBlockTree() { return s_virtual_linear_tree; }
static ALWAYS_INLINE KMemoryBlockTree &GetPhysicalLinearMemoryBlockTree() { return s_physical_linear_tree; }
static ALWAYS_INLINE KMemoryRegionAllocator &GetMemoryRegionAllocator() { return s_region_allocator; }
static ALWAYS_INLINE KMemoryRegionTree &GetVirtualMemoryRegionTree() { return s_virtual_tree; }
static ALWAYS_INLINE KMemoryRegionTree &GetPhysicalMemoryRegionTree() { return s_physical_tree; }
static ALWAYS_INLINE KMemoryRegionTree &GetVirtualLinearMemoryRegionTree() { return s_virtual_linear_tree; }
static ALWAYS_INLINE KMemoryRegionTree &GetPhysicalLinearMemoryRegionTree() { return s_physical_linear_tree; }
static ALWAYS_INLINE KVirtualAddress GetLinearVirtualAddress(KPhysicalAddress address) {
return GetInteger(address) + s_linear_phys_to_virt_diff;
@ -406,46 +406,46 @@ namespace ams::kern {
}
static NOINLINE KVirtualAddress GetMainStackTopAddress(s32 core_id) {
return GetVirtualMemoryBlockTree().FindFirstBlockByTypeAttr(KMemoryRegionType_KernelMiscMainStack, static_cast<u32>(core_id))->GetEndAddress();
return GetVirtualMemoryRegionTree().FindFirstRegionByTypeAttr(KMemoryRegionType_KernelMiscMainStack, static_cast<u32>(core_id))->GetEndAddress();
}
static NOINLINE KVirtualAddress GetIdleStackTopAddress(s32 core_id) {
return GetVirtualMemoryBlockTree().FindFirstBlockByTypeAttr(KMemoryRegionType_KernelMiscIdleStack, static_cast<u32>(core_id))->GetEndAddress();
return GetVirtualMemoryRegionTree().FindFirstRegionByTypeAttr(KMemoryRegionType_KernelMiscIdleStack, static_cast<u32>(core_id))->GetEndAddress();
}
static NOINLINE KVirtualAddress GetExceptionStackBottomAddress(s32 core_id) {
return GetVirtualMemoryBlockTree().FindFirstBlockByTypeAttr(KMemoryRegionType_KernelMiscExceptionStack, static_cast<u32>(core_id))->GetAddress();
return GetVirtualMemoryRegionTree().FindFirstRegionByTypeAttr(KMemoryRegionType_KernelMiscExceptionStack, static_cast<u32>(core_id))->GetAddress();
}
static NOINLINE KVirtualAddress GetSlabRegionAddress() {
return GetVirtualMemoryBlockTree().FindFirstBlockByType(KMemoryRegionType_KernelSlab)->GetAddress();
return GetVirtualMemoryRegionTree().FindFirstRegionByType(KMemoryRegionType_KernelSlab)->GetAddress();
}
static NOINLINE KVirtualAddress GetCoreLocalRegionAddress() {
return GetVirtualMemoryBlockTree().FindFirstBlockByType(KMemoryRegionType_CoreLocal)->GetAddress();
return GetVirtualMemoryRegionTree().FindFirstRegionByType(KMemoryRegionType_CoreLocal)->GetAddress();
}
static NOINLINE KVirtualAddress GetInterruptDistributorAddress() {
return GetPhysicalMemoryBlockTree().FindFirstDerivedBlock(KMemoryRegionType_InterruptDistributor)->GetPairAddress();
return GetPhysicalMemoryRegionTree().FindFirstDerivedRegion(KMemoryRegionType_InterruptDistributor)->GetPairAddress();
}
static NOINLINE KVirtualAddress GetInterruptCpuInterfaceAddress() {
return GetPhysicalMemoryBlockTree().FindFirstDerivedBlock(KMemoryRegionType_InterruptCpuInterface)->GetPairAddress();
return GetPhysicalMemoryRegionTree().FindFirstDerivedRegion(KMemoryRegionType_InterruptCpuInterface)->GetPairAddress();
}
static void InitializeLinearMemoryBlockTrees(KPhysicalAddress aligned_linear_phys_start, KVirtualAddress linear_virtual_start);
static void InitializeLinearMemoryRegionTrees(KPhysicalAddress aligned_linear_phys_start, KVirtualAddress linear_virtual_start);
};
namespace init {
/* These should be generic, regardless of board. */
void SetupCoreLocalRegionMemoryBlocks(KInitialPageTable &page_table, KInitialPageAllocator &page_allocator);
void SetupPoolPartitionMemoryBlocks();
void SetupCoreLocalRegionMemoryRegions(KInitialPageTable &page_table, KInitialPageAllocator &page_allocator);
void SetupPoolPartitionMemoryRegions();
/* These may be implemented in a board-specific manner. */
void SetupDevicePhysicalMemoryBlocks();
void SetupDramPhysicalMemoryBlocks();
void SetupDevicePhysicalMemoryRegions();
void SetupDramPhysicalMemoryRegions();
}

View File

@ -26,29 +26,29 @@ namespace ams::kern {
namespace init {
void SetupDevicePhysicalMemoryBlocks() {
void SetupDevicePhysicalMemoryRegions() {
/* TODO: Give these constexpr defines somewhere? */
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryBlockTree().Insert(0x70006000, 0x40, KMemoryRegionType_Uart | KMemoryRegionAttr_ShouldKernelMap));
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryBlockTree().Insert(0x70019000, 0x1000, KMemoryRegionType_MemoryController | KMemoryRegionAttr_NoUserMap));
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryBlockTree().Insert(0x7001C000, 0x1000, KMemoryRegionType_MemoryController0 | KMemoryRegionAttr_NoUserMap));
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryBlockTree().Insert(0x7001D000, 0x1000, KMemoryRegionType_MemoryController1 | KMemoryRegionAttr_NoUserMap));
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryBlockTree().Insert(0x7000E000, 0x400, KMemoryRegionType_None | KMemoryRegionAttr_NoUserMap));
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryBlockTree().Insert(0x7000E400, 0xC00, KMemoryRegionType_PowerManagementController | KMemoryRegionAttr_NoUserMap));
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryBlockTree().Insert(0x50040000, 0x1000, KMemoryRegionType_None | KMemoryRegionAttr_NoUserMap));
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryBlockTree().Insert(0x50041000, 0x1000, KMemoryRegionType_InterruptDistributor | KMemoryRegionAttr_ShouldKernelMap));
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryBlockTree().Insert(0x50042000, 0x1000, KMemoryRegionType_InterruptCpuInterface | KMemoryRegionAttr_ShouldKernelMap));
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryBlockTree().Insert(0x50043000, 0x1D000, KMemoryRegionType_None | KMemoryRegionAttr_NoUserMap));
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryBlockTree().Insert(0x6000F000, 0x1000, KMemoryRegionType_None | KMemoryRegionAttr_NoUserMap));
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryBlockTree().Insert(0x6001DC00, 0x400, KMemoryRegionType_None | KMemoryRegionAttr_NoUserMap));
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryRegionTree().Insert(0x70006000, 0x40, KMemoryRegionType_Uart | KMemoryRegionAttr_ShouldKernelMap));
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryRegionTree().Insert(0x70019000, 0x1000, KMemoryRegionType_MemoryController | KMemoryRegionAttr_NoUserMap));
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryRegionTree().Insert(0x7001C000, 0x1000, KMemoryRegionType_MemoryController0 | KMemoryRegionAttr_NoUserMap));
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryRegionTree().Insert(0x7001D000, 0x1000, KMemoryRegionType_MemoryController1 | KMemoryRegionAttr_NoUserMap));
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryRegionTree().Insert(0x7000E000, 0x400, KMemoryRegionType_None | KMemoryRegionAttr_NoUserMap));
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryRegionTree().Insert(0x7000E400, 0xC00, KMemoryRegionType_PowerManagementController | KMemoryRegionAttr_NoUserMap));
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryRegionTree().Insert(0x50040000, 0x1000, KMemoryRegionType_None | KMemoryRegionAttr_NoUserMap));
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryRegionTree().Insert(0x50041000, 0x1000, KMemoryRegionType_InterruptDistributor | KMemoryRegionAttr_ShouldKernelMap));
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryRegionTree().Insert(0x50042000, 0x1000, KMemoryRegionType_InterruptCpuInterface | KMemoryRegionAttr_ShouldKernelMap));
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryRegionTree().Insert(0x50043000, 0x1D000, KMemoryRegionType_None | KMemoryRegionAttr_NoUserMap));
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryRegionTree().Insert(0x6000F000, 0x1000, KMemoryRegionType_None | KMemoryRegionAttr_NoUserMap));
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryRegionTree().Insert(0x6001DC00, 0x400, KMemoryRegionType_None | KMemoryRegionAttr_NoUserMap));
}
void SetupDramPhysicalMemoryBlocks() {
void SetupDramPhysicalMemoryRegions() {
const size_t intended_memory_size = KSystemControl::Init::GetIntendedMemorySize();
const KPhysicalAddress physical_memory_base_address = KSystemControl::Init::GetKernelPhysicalBaseAddress(DramPhysicalAddress);
/* Insert blocks into the tree. */
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryBlockTree().Insert(GetInteger(physical_memory_base_address), intended_memory_size, KMemoryRegionType_Dram));
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryBlockTree().Insert(GetInteger(physical_memory_base_address), ReservedEarlyDramSize, KMemoryRegionType_DramReservedEarly));
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryRegionTree().Insert(GetInteger(physical_memory_base_address), intended_memory_size, KMemoryRegionType_Dram));
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryRegionTree().Insert(GetInteger(physical_memory_base_address), ReservedEarlyDramSize, KMemoryRegionType_DramReservedEarly));
}
}

View File

@ -98,7 +98,7 @@ namespace ams::kern::init {
KVirtualAddress start = util::AlignUp(GetInteger(address), alignof(T));
if (size > 0) {
MESOSPHERE_ABORT_UNLESS(KMemoryLayout::GetVirtualMemoryBlockTree().FindContainingBlock(GetInteger(start) + size - 1)->IsDerivedFrom(KMemoryRegionType_KernelSlab));
MESOSPHERE_ABORT_UNLESS(KMemoryLayout::GetVirtualMemoryRegionTree().FindContainingRegion(GetInteger(start) + size - 1)->IsDerivedFrom(KMemoryRegionType_KernelSlab));
T::InitializeSlabHeap(GetVoidPointer(start), size);
}

View File

@ -17,19 +17,19 @@
namespace ams::kern {
bool KMemoryBlockTree::Insert(uintptr_t address, size_t size, u32 type_id, u32 new_attr, u32 old_attr) {
/* Locate the memory block that contains the address. */
auto it = this->FindContainingBlock(address);
bool KMemoryRegionTree::Insert(uintptr_t address, size_t size, u32 type_id, u32 new_attr, u32 old_attr) {
/* Locate the memory region that contains the address. */
auto it = this->FindContainingRegion(address);
/* We require that the old attr is correct. */
if (it->GetAttributes() != old_attr) {
return false;
}
/* We further require that the block can be split from the old block. */
const uintptr_t inserted_block_end = address + size;
const uintptr_t inserted_block_last = inserted_block_end - 1;
if (it->GetLastAddress() < inserted_block_last) {
/* We further require that the region can be split from the old region. */
const uintptr_t inserted_region_end = address + size;
const uintptr_t inserted_region_last = inserted_region_end - 1;
if (it->GetLastAddress() < inserted_region_last) {
return false;
}
@ -38,8 +38,8 @@ namespace ams::kern {
return false;
}
/* Cache information from the block before we remove it. */
KMemoryBlock *cur_block = std::addressof(*it);
/* Cache information from the region before we remove it. */
KMemoryRegion *cur_region = std::addressof(*it);
const uintptr_t old_address = it->GetAddress();
const size_t old_size = it->GetSize();
const uintptr_t old_end = old_address + old_size;
@ -47,39 +47,39 @@ namespace ams::kern {
const uintptr_t old_pair = it->GetPairAddress();
const u32 old_type = it->GetType();
/* Erase the existing block from the tree. */
/* Erase the existing region from the tree. */
this->erase(it);
/* If we need to insert a block before the region, do so. */
/* If we need to insert a region before the region, do so. */
if (old_address != address) {
new (cur_block) KMemoryBlock(old_address, address - old_address, old_pair, old_attr, old_type);
this->insert(*cur_block);
cur_block = KMemoryLayout::GetMemoryBlockAllocator().Allocate();
new (cur_region) KMemoryRegion(old_address, address - old_address, old_pair, old_attr, old_type);
this->insert(*cur_region);
cur_region = KMemoryLayout::GetMemoryRegionAllocator().Allocate();
}
/* Insert a new block. */
/* Insert a new region. */
const uintptr_t new_pair = (old_pair != std::numeric_limits<uintptr_t>::max()) ? old_pair + (address - old_address) : old_pair;
new (cur_block) KMemoryBlock(address, size, new_pair, new_attr, type_id);
this->insert(*cur_block);
new (cur_region) KMemoryRegion(address, size, new_pair, new_attr, type_id);
this->insert(*cur_region);
/* If we need to insert a block after the region, do so. */
if (old_last != inserted_block_last) {
const uintptr_t after_pair = (old_pair != std::numeric_limits<uintptr_t>::max()) ? old_pair + (inserted_block_end - old_address) : old_pair;
this->insert(*KMemoryLayout::GetMemoryBlockAllocator().Create(inserted_block_end, old_end - inserted_block_end, after_pair, old_attr, old_type));
/* If we need to insert a region after the region, do so. */
if (old_last != inserted_region_last) {
const uintptr_t after_pair = (old_pair != std::numeric_limits<uintptr_t>::max()) ? old_pair + (inserted_region_end - old_address) : old_pair;
this->insert(*KMemoryLayout::GetMemoryRegionAllocator().Create(inserted_region_end, old_end - inserted_region_end, after_pair, old_attr, old_type));
}
return true;
}
KVirtualAddress KMemoryBlockTree::GetRandomAlignedRegion(size_t size, size_t alignment, u32 type_id) {
KVirtualAddress KMemoryRegionTree::GetRandomAlignedRegion(size_t size, size_t alignment, u32 type_id) {
/* We want to find the total extents of the type id. */
const auto extents = this->GetDerivedRegionExtents(type_id);
/* Ensure that our alignment is correct. */
MESOSPHERE_INIT_ABORT_UNLESS(util::IsAligned(extents.first_block->GetAddress(), alignment));
MESOSPHERE_INIT_ABORT_UNLESS(util::IsAligned(extents.first_region->GetAddress(), alignment));
const uintptr_t first_address = extents.first_block->GetAddress();
const uintptr_t last_address = extents.last_block->GetLastAddress();
const uintptr_t first_address = extents.first_region->GetAddress();
const uintptr_t last_address = extents.last_region->GetLastAddress();
while (true) {
const uintptr_t candidate = util::AlignDown(KSystemControl::Init::GenerateRandomRange(first_address, last_address), alignment);
@ -96,38 +96,38 @@ namespace ams::kern {
continue;
}
/* Locate the candidate block, and ensure it fits. */
const KMemoryBlock *candidate_block = std::addressof(*this->FindContainingBlock(candidate));
if (candidate_last > candidate_block->GetLastAddress()) {
/* Locate the candidate region, and ensure it fits. */
const KMemoryRegion *candidate_region = std::addressof(*this->FindContainingRegion(candidate));
if (candidate_last > candidate_region->GetLastAddress()) {
continue;
}
/* Ensure that the block has the correct type id. */
if (candidate_block->GetType() != type_id)
/* Ensure that the region has the correct type id. */
if (candidate_region->GetType() != type_id)
continue;
return candidate;
}
}
void KMemoryLayout::InitializeLinearMemoryBlockTrees(KPhysicalAddress aligned_linear_phys_start, KVirtualAddress linear_virtual_start) {
void KMemoryLayout::InitializeLinearMemoryRegionTrees(KPhysicalAddress aligned_linear_phys_start, KVirtualAddress linear_virtual_start) {
/* Set static differences. */
s_linear_phys_to_virt_diff = GetInteger(linear_virtual_start) - GetInteger(aligned_linear_phys_start);
s_linear_virt_to_phys_diff = GetInteger(aligned_linear_phys_start) - GetInteger(linear_virtual_start);
/* Initialize linear trees. */
for (auto &block : GetPhysicalMemoryBlockTree()) {
if (!block.HasTypeAttribute(KMemoryRegionAttr_LinearMapped)) {
for (auto &region : GetPhysicalMemoryRegionTree()) {
if (!region.HasTypeAttribute(KMemoryRegionAttr_LinearMapped)) {
continue;
}
GetPhysicalLinearMemoryBlockTree().insert(*GetMemoryBlockAllocator().Create(block.GetAddress(), block.GetSize(), block.GetAttributes(), block.GetType()));
GetPhysicalLinearMemoryRegionTree().insert(*GetMemoryRegionAllocator().Create(region.GetAddress(), region.GetSize(), region.GetAttributes(), region.GetType()));
}
for (auto &block : GetVirtualMemoryBlockTree()) {
if (!block.IsDerivedFrom(KMemoryRegionType_Dram)) {
for (auto &region : GetVirtualMemoryRegionTree()) {
if (!region.IsDerivedFrom(KMemoryRegionType_Dram)) {
continue;
}
GetVirtualLinearMemoryBlockTree().insert(*GetMemoryBlockAllocator().Create(block.GetAddress(), block.GetSize(), block.GetAttributes(), block.GetType()));
GetVirtualLinearMemoryRegionTree().insert(*GetMemoryRegionAllocator().Create(region.GetAddress(), region.GetSize(), region.GetAttributes(), region.GetType()));
}
}
@ -149,17 +149,17 @@ namespace ams::kern {
KVirtualAddress GetCoreLocalRegionVirtualAddress() {
while (true) {
const uintptr_t candidate_start = GetInteger(KMemoryLayout::GetVirtualMemoryBlockTree().GetRandomAlignedRegion(CoreLocalRegionSizeWithGuards, CoreLocalRegionAlign, KMemoryRegionType_None));
const uintptr_t candidate_start = GetInteger(KMemoryLayout::GetVirtualMemoryRegionTree().GetRandomAlignedRegion(CoreLocalRegionSizeWithGuards, CoreLocalRegionAlign, KMemoryRegionType_None));
const uintptr_t candidate_end = candidate_start + CoreLocalRegionSizeWithGuards;
const uintptr_t candidate_last = candidate_end - 1;
const KMemoryBlock *containing_block = std::addressof(*KMemoryLayout::GetVirtualMemoryBlockTree().FindContainingBlock(candidate_start));
const KMemoryRegion *containing_region = std::addressof(*KMemoryLayout::GetVirtualMemoryRegionTree().FindContainingRegion(candidate_start));
if (candidate_last > containing_block->GetLastAddress()) {
if (candidate_last > containing_region->GetLastAddress()) {
continue;
}
if (containing_block->GetType() != KMemoryRegionType_None) {
if (containing_region->GetType() != KMemoryRegionType_None) {
continue;
}
@ -167,11 +167,11 @@ namespace ams::kern {
continue;
}
if (containing_block->GetAddress() > util::AlignDown(candidate_start, CoreLocalRegionBoundsAlign)) {
if (containing_region->GetAddress() > util::AlignDown(candidate_start, CoreLocalRegionBoundsAlign)) {
continue;
}
if (util::AlignUp(candidate_last, CoreLocalRegionBoundsAlign) - 1 > containing_block->GetLastAddress()) {
if (util::AlignUp(candidate_last, CoreLocalRegionBoundsAlign) - 1 > containing_region->GetLastAddress()) {
continue;
}
@ -180,17 +180,17 @@ namespace ams::kern {
}
void InsertPoolPartitionBlockIntoBothTrees(size_t start, size_t size, KMemoryRegionType phys_type, KMemoryRegionType virt_type, u32 &cur_attr) {
void InsertPoolPartitionRegionIntoBothTrees(size_t start, size_t size, KMemoryRegionType phys_type, KMemoryRegionType virt_type, u32 &cur_attr) {
const u32 attr = cur_attr++;
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryBlockTree().Insert(start, size, phys_type, attr));
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetVirtualMemoryBlockTree().Insert(KMemoryLayout::GetPhysicalMemoryBlockTree().FindFirstBlockByTypeAttr(phys_type, attr)->GetPairAddress(), size, virt_type, attr));
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryRegionTree().Insert(start, size, phys_type, attr));
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetVirtualMemoryRegionTree().Insert(KMemoryLayout::GetPhysicalMemoryRegionTree().FindFirstRegionByTypeAttr(phys_type, attr)->GetPairAddress(), size, virt_type, attr));
}
}
void SetupCoreLocalRegionMemoryBlocks(KInitialPageTable &page_table, KInitialPageAllocator &page_allocator) {
void SetupCoreLocalRegionMemoryRegions(KInitialPageTable &page_table, KInitialPageAllocator &page_allocator) {
const KVirtualAddress core_local_virt_start = GetCoreLocalRegionVirtualAddress();
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetVirtualMemoryBlockTree().Insert(GetInteger(core_local_virt_start), CoreLocalRegionSize, KMemoryRegionType_CoreLocal));
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetVirtualMemoryRegionTree().Insert(GetInteger(core_local_virt_start), CoreLocalRegionSize, KMemoryRegionType_CoreLocal));
/* Allocate a page for each core. */
KPhysicalAddress core_local_region_start_phys[cpu::NumCores] = {};
@ -222,9 +222,9 @@ namespace ams::kern {
StoreInitArguments();
}
void SetupPoolPartitionMemoryBlocks() {
void SetupPoolPartitionMemoryRegions() {
/* Start by identifying the extents of the DRAM memory region. */
const auto dram_extents = KMemoryLayout::GetPhysicalMemoryBlockTree().GetDerivedRegionExtents(KMemoryRegionType_Dram);
const auto dram_extents = KMemoryLayout::GetPhysicalMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_Dram);
/* Get Application and Applet pool sizes. */
const size_t application_pool_size = KSystemControl::Init::GetApplicationPoolSize();
@ -232,40 +232,40 @@ namespace ams::kern {
const size_t unsafe_system_pool_min_size = KSystemControl::Init::GetMinimumNonSecureSystemPoolSize();
/* Find the start of the kernel DRAM region. */
const uintptr_t kernel_dram_start = KMemoryLayout::GetPhysicalMemoryBlockTree().FindFirstDerivedBlock(KMemoryRegionType_DramKernel)->GetAddress();
const uintptr_t kernel_dram_start = KMemoryLayout::GetPhysicalMemoryRegionTree().FindFirstDerivedRegion(KMemoryRegionType_DramKernel)->GetAddress();
MESOSPHERE_INIT_ABORT_UNLESS(util::IsAligned(kernel_dram_start, CarveoutAlignment));
/* Find the start of the pool partitions region. */
const uintptr_t pool_partitions_start = KMemoryLayout::GetPhysicalMemoryBlockTree().FindFirstBlockByTypeAttr(KMemoryRegionType_DramPoolPartition)->GetAddress();
const uintptr_t pool_partitions_start = KMemoryLayout::GetPhysicalMemoryRegionTree().FindFirstRegionByTypeAttr(KMemoryRegionType_DramPoolPartition)->GetAddress();
/* Decide on starting addresses for our pools. */
const uintptr_t application_pool_start = dram_extents.last_block->GetEndAddress() - application_pool_size;
const uintptr_t application_pool_start = dram_extents.last_region->GetEndAddress() - application_pool_size;
const uintptr_t applet_pool_start = application_pool_start - applet_pool_size;
const uintptr_t unsafe_system_pool_start = std::min(kernel_dram_start + CarveoutSizeMax, util::AlignDown(applet_pool_start - unsafe_system_pool_min_size, CarveoutAlignment));
const size_t unsafe_system_pool_size = applet_pool_start - unsafe_system_pool_start;
/* We want to arrange application pool depending on where the middle of dram is. */
const uintptr_t dram_midpoint = (dram_extents.first_block->GetAddress() + dram_extents.last_block->GetEndAddress()) / 2;
const uintptr_t dram_midpoint = (dram_extents.first_region->GetAddress() + dram_extents.last_region->GetEndAddress()) / 2;
u32 cur_pool_attr = 0;
size_t total_overhead_size = 0;
if (dram_extents.last_block->GetEndAddress() <= dram_midpoint || dram_midpoint <= application_pool_start) {
InsertPoolPartitionBlockIntoBothTrees(application_pool_start, application_pool_size, KMemoryRegionType_DramApplicationPool, KMemoryRegionType_VirtualDramApplicationPool, cur_pool_attr);
if (dram_extents.last_region->GetEndAddress() <= dram_midpoint || dram_midpoint <= application_pool_start) {
InsertPoolPartitionRegionIntoBothTrees(application_pool_start, application_pool_size, KMemoryRegionType_DramApplicationPool, KMemoryRegionType_VirtualDramApplicationPool, cur_pool_attr);
total_overhead_size += KMemoryManager::CalculateMetadataOverheadSize(application_pool_size);
} else {
const size_t first_application_pool_size = dram_midpoint - application_pool_start;
const size_t second_application_pool_size = application_pool_start + application_pool_size - dram_midpoint;
InsertPoolPartitionBlockIntoBothTrees(application_pool_start, first_application_pool_size, KMemoryRegionType_DramApplicationPool, KMemoryRegionType_VirtualDramApplicationPool, cur_pool_attr);
InsertPoolPartitionBlockIntoBothTrees(dram_midpoint, second_application_pool_size, KMemoryRegionType_DramApplicationPool, KMemoryRegionType_VirtualDramApplicationPool, cur_pool_attr);
InsertPoolPartitionRegionIntoBothTrees(application_pool_start, first_application_pool_size, KMemoryRegionType_DramApplicationPool, KMemoryRegionType_VirtualDramApplicationPool, cur_pool_attr);
InsertPoolPartitionRegionIntoBothTrees(dram_midpoint, second_application_pool_size, KMemoryRegionType_DramApplicationPool, KMemoryRegionType_VirtualDramApplicationPool, cur_pool_attr);
total_overhead_size += KMemoryManager::CalculateMetadataOverheadSize(first_application_pool_size);
total_overhead_size += KMemoryManager::CalculateMetadataOverheadSize(second_application_pool_size);
}
/* Insert the applet pool. */
InsertPoolPartitionBlockIntoBothTrees(applet_pool_start, applet_pool_size, KMemoryRegionType_DramAppletPool, KMemoryRegionType_VirtualDramAppletPool, cur_pool_attr);
InsertPoolPartitionRegionIntoBothTrees(applet_pool_start, applet_pool_size, KMemoryRegionType_DramAppletPool, KMemoryRegionType_VirtualDramAppletPool, cur_pool_attr);
total_overhead_size += KMemoryManager::CalculateMetadataOverheadSize(applet_pool_size);
/* Insert the nonsecure system pool. */
InsertPoolPartitionBlockIntoBothTrees(unsafe_system_pool_start, unsafe_system_pool_size, KMemoryRegionType_DramSystemNonSecurePool, KMemoryRegionType_VirtualDramSystemNonSecurePool, cur_pool_attr);
InsertPoolPartitionRegionIntoBothTrees(unsafe_system_pool_start, unsafe_system_pool_size, KMemoryRegionType_DramSystemNonSecurePool, KMemoryRegionType_VirtualDramSystemNonSecurePool, cur_pool_attr);
total_overhead_size += KMemoryManager::CalculateMetadataOverheadSize(unsafe_system_pool_size);
/* Insert the metadata pool. */
@ -273,11 +273,11 @@ namespace ams::kern {
const uintptr_t metadata_pool_start = unsafe_system_pool_start - total_overhead_size;
const size_t metadata_pool_size = total_overhead_size;
u32 metadata_pool_attr = 0;
InsertPoolPartitionBlockIntoBothTrees(metadata_pool_start, metadata_pool_size, KMemoryRegionType_DramMetadataPool, KMemoryRegionType_VirtualDramMetadataPool, metadata_pool_attr);
InsertPoolPartitionRegionIntoBothTrees(metadata_pool_start, metadata_pool_size, KMemoryRegionType_DramMetadataPool, KMemoryRegionType_VirtualDramMetadataPool, metadata_pool_attr);
/* Insert the system pool. */
const uintptr_t system_pool_size = metadata_pool_start - pool_partitions_start;
InsertPoolPartitionBlockIntoBothTrees(pool_partitions_start, system_pool_size, KMemoryRegionType_DramSystemPool, KMemoryRegionType_VirtualDramSystemPool, cur_pool_attr);
InsertPoolPartitionRegionIntoBothTrees(pool_partitions_start, system_pool_size, KMemoryRegionType_DramSystemPool, KMemoryRegionType_VirtualDramSystemPool, cur_pool_attr);
}