| | |
| | |
| |
|
| | #include "common/address_space.h" |
| | #include "common/assert.h" |
| |
|
| | #define MAP_MEMBER(returnType) \ |
| | template <typename VaType, VaType UnmappedVa, typename PaType, PaType UnmappedPa, \ |
| | bool PaContigSplit, size_t AddressSpaceBits, typename ExtraBlockInfo> \ |
| | requires AddressSpaceValid<VaType, AddressSpaceBits> returnType FlatAddressSpaceMap< \ |
| | VaType, UnmappedVa, PaType, UnmappedPa, PaContigSplit, AddressSpaceBits, ExtraBlockInfo> |
| | #define MAP_MEMBER_CONST() \ |
| | template <typename VaType, VaType UnmappedVa, typename PaType, PaType UnmappedPa, \ |
| | bool PaContigSplit, size_t AddressSpaceBits, typename ExtraBlockInfo> \ |
| | requires AddressSpaceValid<VaType, AddressSpaceBits> FlatAddressSpaceMap< \ |
| | VaType, UnmappedVa, PaType, UnmappedPa, PaContigSplit, AddressSpaceBits, ExtraBlockInfo> |
| |
|
| | #define MM_MEMBER(returnType) \ |
| | template <typename VaType, VaType UnmappedVa, size_t AddressSpaceBits> \ |
| | requires AddressSpaceValid<VaType, AddressSpaceBits> returnType \ |
| | FlatMemoryManager<VaType, UnmappedVa, AddressSpaceBits> |
| |
|
| | #define ALLOC_MEMBER(returnType) \ |
| | template <typename VaType, VaType UnmappedVa, size_t AddressSpaceBits> \ |
| | requires AddressSpaceValid<VaType, AddressSpaceBits> returnType \ |
| | FlatAllocator<VaType, UnmappedVa, AddressSpaceBits> |
| | #define ALLOC_MEMBER_CONST() \ |
| | template <typename VaType, VaType UnmappedVa, size_t AddressSpaceBits> \ |
| | requires AddressSpaceValid<VaType, AddressSpaceBits> \ |
| | FlatAllocator<VaType, UnmappedVa, AddressSpaceBits> |
| |
|
| | namespace Common { |
| | MAP_MEMBER_CONST()::FlatAddressSpaceMap(VaType va_limit_, |
| | std::function<void(VaType, VaType)> unmap_callback_) |
| | : va_limit{va_limit_}, unmap_callback{std::move(unmap_callback_)} { |
| | if (va_limit > VaMaximum) { |
| | ASSERT_MSG(false, "Invalid VA limit!"); |
| | } |
| | } |
| |
|
| | MAP_MEMBER(void)::MapLocked(VaType virt, PaType phys, VaType size, ExtraBlockInfo extra_info) { |
| | VaType virt_end{virt + size}; |
| |
|
| | if (virt_end > va_limit) { |
| | ASSERT_MSG(false, |
| | "Trying to map a block past the VA limit: virt_end: 0x{:X}, va_limit: 0x{:X}", |
| | virt_end, va_limit); |
| | } |
| |
|
| | auto block_end_successor{std::lower_bound(blocks.begin(), blocks.end(), virt_end)}; |
| | if (block_end_successor == blocks.begin()) { |
| | ASSERT_MSG(false, "Trying to map a block before the VA start: virt_end: 0x{:X}", virt_end); |
| | } |
| |
|
| | auto block_end_predecessor{std::prev(block_end_successor)}; |
| |
|
| | if (block_end_successor != blocks.end()) { |
| | |
| | |
| | if (block_end_successor->virt != virt_end) { |
| | PaType tailPhys{[&]() -> PaType { |
| | if constexpr (!PaContigSplit) { |
| | |
| | return block_end_predecessor->phys; |
| | } else { |
| | if (block_end_predecessor->Unmapped()) { |
| | |
| | return block_end_predecessor->phys; |
| | } else { |
| | return block_end_predecessor->phys + virt_end - block_end_predecessor->virt; |
| | } |
| | } |
| | }()}; |
| |
|
| | if (block_end_predecessor != blocks.begin() && block_end_predecessor->virt >= virt) { |
| | |
| | |
| | block_end_predecessor->virt = virt_end; |
| | block_end_predecessor->phys = tailPhys; |
| | block_end_predecessor->extra_info = block_end_predecessor->extra_info; |
| |
|
| | |
| | block_end_successor = block_end_predecessor--; |
| | } else { |
| | |
| | blocks.insert(block_end_successor, |
| | {Block(virt, phys, extra_info), |
| | Block(virt_end, tailPhys, block_end_predecessor->extra_info)}); |
| | if (unmap_callback) { |
| | unmap_callback(virt, size); |
| | } |
| |
|
| | return; |
| | } |
| | } |
| | } else { |
| | |
| | |
| | if (block_end_predecessor != blocks.begin() && block_end_predecessor->virt >= virt) { |
| | |
| | block_end_predecessor->virt = virt_end; |
| |
|
| | |
| | block_end_successor = block_end_predecessor--; |
| | } else { |
| | |
| | blocks.insert(block_end_successor, |
| | {Block(virt, phys, extra_info), Block(virt_end, UnmappedPa, {})}); |
| | if (unmap_callback) { |
| | unmap_callback(virt, size); |
| | } |
| |
|
| | return; |
| | } |
| | } |
| |
|
| | auto block_start_successor{block_end_successor}; |
| |
|
| | |
| | |
| | while (std::prev(block_start_successor)->virt >= virt) { |
| | block_start_successor--; |
| | } |
| |
|
| | |
| | if (block_start_successor->virt > virt_end) { |
| | ASSERT_MSG(false, "Unsorted block in AS map: virt: 0x{:X}", block_start_successor->virt); |
| | } else if (block_start_successor->virt == virt_end) { |
| | |
| | blocks.insert(block_start_successor, Block(virt, phys, extra_info)); |
| | } else { |
| | |
| | if (auto eraseStart{std::next(block_start_successor)}; eraseStart != block_end_successor) { |
| | blocks.erase(eraseStart, block_end_successor); |
| | } |
| |
|
| | |
| | block_start_successor->virt = virt; |
| | block_start_successor->phys = phys; |
| | block_start_successor->extra_info = extra_info; |
| | } |
| |
|
| | if (unmap_callback) { |
| | unmap_callback(virt, size); |
| | } |
| | } |
| |
|
| | MAP_MEMBER(void)::UnmapLocked(VaType virt, VaType size) { |
| | VaType virt_end{virt + size}; |
| |
|
| | if (virt_end > va_limit) { |
| | ASSERT_MSG(false, |
| | "Trying to map a block past the VA limit: virt_end: 0x{:X}, va_limit: 0x{:X}", |
| | virt_end, va_limit); |
| | } |
| |
|
| | auto block_end_successor{std::lower_bound(blocks.begin(), blocks.end(), virt_end)}; |
| | if (block_end_successor == blocks.begin()) { |
| | ASSERT_MSG(false, "Trying to unmap a block before the VA start: virt_end: 0x{:X}", |
| | virt_end); |
| | } |
| |
|
| | auto block_end_predecessor{std::prev(block_end_successor)}; |
| |
|
| | auto walk_back_to_predecessor{[&](auto iter) { |
| | while (iter->virt >= virt) { |
| | iter--; |
| | } |
| |
|
| | return iter; |
| | }}; |
| |
|
| | auto erase_blocks_with_end_unmapped{[&](auto unmappedEnd) { |
| | auto block_start_predecessor{walk_back_to_predecessor(unmappedEnd)}; |
| | auto block_start_successor{std::next(block_start_predecessor)}; |
| |
|
| | auto eraseEnd{[&]() { |
| | if (block_start_predecessor->Unmapped()) { |
| | |
| | |
| | return std::next(unmappedEnd); |
| | } else { |
| | |
| | |
| | unmappedEnd->virt = virt; |
| | return unmappedEnd; |
| | } |
| | }()}; |
| |
|
| | |
| | if (eraseEnd != blocks.end() && |
| | (eraseEnd == block_start_successor || |
| | (block_start_predecessor->Unmapped() && eraseEnd->Unmapped()))) { |
| | ASSERT_MSG(false, "Multiple contiguous unmapped regions are unsupported!"); |
| | } |
| |
|
| | blocks.erase(block_start_successor, eraseEnd); |
| | }}; |
| |
|
| | |
| | if (block_end_predecessor->Unmapped()) { |
| | if (block_end_predecessor->virt > virt) { |
| | erase_blocks_with_end_unmapped(block_end_predecessor); |
| | } |
| |
|
| | if (unmap_callback) { |
| | unmap_callback(virt, size); |
| | } |
| |
|
| | return; |
| | } else if (block_end_successor->virt == virt_end && block_end_successor->Unmapped()) { |
| | erase_blocks_with_end_unmapped(block_end_successor); |
| |
|
| | if (unmap_callback) { |
| | unmap_callback(virt, size); |
| | } |
| |
|
| | return; |
| | } else if (block_end_successor == blocks.end()) { |
| | |
| | ASSERT_MSG(false, "Unexpected Memory Manager state!"); |
| | } else if (block_end_successor->virt != virt_end) { |
| | |
| |
|
| | |
| | PaType tailPhys{[&]() { |
| | if constexpr (PaContigSplit) { |
| | return block_end_predecessor->phys + virt_end - block_end_predecessor->virt; |
| | } else { |
| | return block_end_predecessor->phys; |
| | } |
| | }()}; |
| |
|
| | if (block_end_predecessor->virt >= virt) { |
| | |
| | block_end_predecessor->virt = virt_end; |
| | block_end_predecessor->phys = tailPhys; |
| |
|
| | |
| | block_end_successor = block_end_predecessor--; |
| | } else { |
| | blocks.insert(block_end_successor, |
| | {Block(virt, UnmappedPa, {}), |
| | Block(virt_end, tailPhys, block_end_predecessor->extra_info)}); |
| | if (unmap_callback) { |
| | unmap_callback(virt, size); |
| | } |
| |
|
| | |
| | return; |
| | } |
| | } |
| |
|
| | |
| | |
| | auto block_start_predecessor{walk_back_to_predecessor(block_end_successor)}; |
| | auto block_start_successor{std::next(block_start_predecessor)}; |
| |
|
| | if (block_start_successor->virt > virt_end) { |
| | ASSERT_MSG(false, "Unsorted block in AS map: virt: 0x{:X}", block_start_successor->virt); |
| | } else if (block_start_successor->virt == virt_end) { |
| | |
| | |
| |
|
| | |
| | if (block_start_predecessor->Mapped()) { |
| | blocks.insert(block_start_successor, Block(virt, UnmappedPa, {})); |
| | } |
| | } else if (block_start_predecessor->Unmapped()) { |
| | |
| | blocks.erase(block_start_successor, block_end_predecessor); |
| | } else { |
| | |
| | |
| | if (auto eraseStart{std::next(block_start_successor)}; eraseStart != block_end_successor) { |
| | blocks.erase(eraseStart, block_end_successor); |
| | } |
| |
|
| | |
| | block_start_successor->virt = virt; |
| | block_start_successor->phys = UnmappedPa; |
| | } |
| |
|
| | if (unmap_callback) |
| | unmap_callback(virt, size); |
| | } |
| |
|
| | ALLOC_MEMBER_CONST()::FlatAllocator(VaType virt_start_, VaType va_limit_) |
| | : Base{va_limit_}, virt_start{virt_start_}, current_linear_alloc_end{virt_start_} {} |
| |
|
| | ALLOC_MEMBER(VaType)::Allocate(VaType size) { |
| | std::scoped_lock lock(this->block_mutex); |
| |
|
| | VaType alloc_start{UnmappedVa}; |
| | VaType alloc_end{current_linear_alloc_end + size}; |
| |
|
| | |
| | if (alloc_end >= current_linear_alloc_end && alloc_end <= this->va_limit) { |
| | auto alloc_end_successor{ |
| | std::lower_bound(this->blocks.begin(), this->blocks.end(), alloc_end)}; |
| | if (alloc_end_successor == this->blocks.begin()) { |
| | ASSERT_MSG(false, "First block in AS map is invalid!"); |
| | } |
| |
|
| | auto alloc_end_predecessor{std::prev(alloc_end_successor)}; |
| | if (alloc_end_predecessor->virt <= current_linear_alloc_end) { |
| | alloc_start = current_linear_alloc_end; |
| | } else { |
| | |
| | while (alloc_end_successor != this->blocks.end()) { |
| | if (alloc_end_successor->virt - alloc_end_predecessor->virt < size || |
| | alloc_end_predecessor->Mapped()) { |
| | alloc_start = alloc_end_predecessor->virt; |
| | break; |
| | } |
| |
|
| | alloc_end_predecessor = alloc_end_successor++; |
| |
|
| | |
| | |
| | if (alloc_end_successor == this->blocks.end()) { |
| | alloc_end = alloc_end_predecessor->virt + size; |
| |
|
| | if (alloc_end >= alloc_end_predecessor->virt && alloc_end <= this->va_limit) { |
| | alloc_start = alloc_end_predecessor->virt; |
| | } |
| | } |
| | } |
| | } |
| | } |
| |
|
| | if (alloc_start != UnmappedVa) { |
| | current_linear_alloc_end = alloc_start + size; |
| | } else { |
| | if (this->blocks.size() <= 2) { |
| | ASSERT_MSG(false, "Unexpected allocator state!"); |
| | } |
| |
|
| | auto search_predecessor{std::next(this->blocks.begin())}; |
| | auto search_successor{std::next(search_predecessor)}; |
| |
|
| | while (search_successor != this->blocks.end() && |
| | (search_successor->virt - search_predecessor->virt < size || |
| | search_predecessor->Mapped())) { |
| | search_predecessor = search_successor++; |
| | } |
| |
|
| | if (search_successor != this->blocks.end()) { |
| | alloc_start = search_predecessor->virt; |
| | } else { |
| | return {}; |
| | } |
| | } |
| |
|
| | this->MapLocked(alloc_start, true, size, {}); |
| | return alloc_start; |
| | } |
| |
|
| | ALLOC_MEMBER(void)::AllocateFixed(VaType virt, VaType size) { |
| | this->Map(virt, true, size); |
| | } |
| |
|
| | ALLOC_MEMBER(void)::Free(VaType virt, VaType size) { |
| | this->Unmap(virt, size); |
| | } |
| | } |
| |
|