some allocator changes
This commit is contained in:
parent
10df1c6cfa
commit
be015d1967
|
@ -2,6 +2,7 @@
|
|||
#![feature(default_alloc_error_handler)]
|
||||
#![feature(panic_info_message)]
|
||||
#![feature(asm_const)]
|
||||
#![feature(const_mut_refs)]
|
||||
#![feature(alloc_error_handler)]
|
||||
#![no_std]
|
||||
#![no_main]
|
||||
|
|
|
@ -1,15 +1,102 @@
|
|||
use core::alloc::GlobalAlloc;
|
||||
use core::mem;
|
||||
use core::ptr::NonNull;
|
||||
use x86_64::structures::paging::{FrameAllocator, Mapper, Page, PageTableFlags, Size4KiB};
|
||||
use x86_64::structures::paging::mapper::MapToError;
|
||||
use x86_64::VirtAddr;
|
||||
use super::Locked;
|
||||
|
||||
pub const HEAP_START: u64 = 0x_4444_4444_0000;
|
||||
pub const HEAP_SIZE: u64 = 100 * 1024; // 100 KiB
|
||||
|
||||
#[cfg(feature = "f_ll_alloc")]
|
||||
use linked_list_allocator::LockedHeap;
|
||||
|
||||
#[global_allocator]
|
||||
static ALLOCATOR: LockedHeap = LockedHeap::empty();
|
||||
static ALLOCATOR: Locked<FixedSizeBlockAllocator> = Locked::new(FixedSizeBlockAllocator::new());
|
||||
|
||||
const BLOCK_SIZES: &[usize] = &[8, 16, 32, 64, 128, 256, 512, 1024, 2048];
|
||||
|
||||
pub struct FixedSizeBlockAllocator {
|
||||
list_heads: [Option<&'static mut FixedSizeBlockNode>; BLOCK_SIZES.len()],
|
||||
#[cfg(feature = "f_ll_alloc")]
|
||||
fallback_allocator: linked_list_allocator::Heap,
|
||||
}
|
||||
|
||||
pub struct FixedSizeBlockNode {
|
||||
next: Option<&'static mut FixedSizeBlockNode>,
|
||||
}
|
||||
|
||||
impl FixedSizeBlockAllocator {
|
||||
pub const fn new() -> Self {
|
||||
const EMPTY: Option<&'static mut FixedSizeBlockNode> = None;
|
||||
Self {
|
||||
list_heads: [EMPTY; BLOCK_SIZES.len()],
|
||||
#[cfg(feature = "f_ll_alloc")]
|
||||
fallback_allocator: linked_list_allocator::Heap::empty(),
|
||||
}
|
||||
}
|
||||
|
||||
pub unsafe fn init(&mut self, heap_start: usize, heap_size: usize) {
|
||||
self.fallback_allocator.init(heap_start, heap_size);
|
||||
}
|
||||
|
||||
fn fallback_alloc(&mut self, layout: core::alloc::Layout) -> *mut u8 {
|
||||
match self.fallback_allocator.allocate_first_fit(layout) {
|
||||
Ok(ptr) => ptr.as_ptr(),
|
||||
Err(_) => core::ptr::null_mut(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
unsafe impl GlobalAlloc for Locked<FixedSizeBlockAllocator> {
|
||||
unsafe fn alloc(&self, layout: core::alloc::Layout) -> *mut u8 {
|
||||
let mut allocator = self.lock();
|
||||
match list_index(&layout) {
|
||||
Some(index) => {
|
||||
match allocator.list_heads[index].take() {
|
||||
Some(node) => {
|
||||
allocator.list_heads[index] = node.next.take();
|
||||
node as *mut FixedSizeBlockNode as *mut u8
|
||||
}
|
||||
None => {
|
||||
// no block exists in list => allocate new block
|
||||
let block_size = BLOCK_SIZES[index];
|
||||
// only works if all block sizes are a power of 2
|
||||
let block_align = block_size;
|
||||
let layout = core::alloc::Layout::from_size_align(block_size, block_align)
|
||||
.unwrap();
|
||||
allocator.fallback_alloc(layout)
|
||||
}
|
||||
}
|
||||
}
|
||||
None => allocator.fallback_alloc(layout),
|
||||
}
|
||||
}
|
||||
|
||||
unsafe fn dealloc(&self, ptr: *mut u8, layout: core::alloc::Layout) {
|
||||
let mut allocator = self.lock();
|
||||
match list_index(&layout) {
|
||||
Some(index) => {
|
||||
let new_node = FixedSizeBlockNode {
|
||||
next: allocator.list_heads[index].take(),
|
||||
};
|
||||
// verify that block has size and alignment required for storing node
|
||||
assert!(mem::size_of::<FixedSizeBlockNode>() <= BLOCK_SIZES[index]);
|
||||
assert!(mem::align_of::<FixedSizeBlockNode>() <= BLOCK_SIZES[index]);
|
||||
let new_node_ptr = ptr as *mut FixedSizeBlockNode;
|
||||
new_node_ptr.write(new_node);
|
||||
allocator.list_heads[index] = Some(&mut *new_node_ptr);
|
||||
}
|
||||
None => {
|
||||
let ptr = NonNull::new(ptr).unwrap();
|
||||
allocator.fallback_allocator.deallocate(ptr, layout);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn list_index(layout: &core::alloc::Layout) -> Option<usize> {
|
||||
let required_block_size = layout.size().max(layout.align());
|
||||
BLOCK_SIZES.iter().position(|&s| s >= required_block_size)
|
||||
}
|
||||
|
||||
pub fn init_heap(
|
||||
mapper: &mut impl Mapper<Size4KiB>,
|
||||
|
|
|
@ -3,6 +3,22 @@ pub mod allocator;
|
|||
use x86_64::structures::paging::{FrameAllocator, OffsetPageTable, PageTable, PhysFrame, Size4KiB};
|
||||
use x86_64::{PhysAddr, VirtAddr};
|
||||
|
||||
pub struct Locked<A> {
|
||||
inner: spin::Mutex<A>,
|
||||
}
|
||||
|
||||
impl<A> Locked<A> {
|
||||
pub const fn new(inner: A) -> Self {
|
||||
Locked {
|
||||
inner: spin::Mutex::new(inner),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn lock(&self) -> spin::MutexGuard<A> {
|
||||
self.inner.lock()
|
||||
}
|
||||
}
|
||||
|
||||
pub unsafe fn init(phys_mem_offset: VirtAddr) -> OffsetPageTable<'static> {
|
||||
let level_4_table = active_level_4_table(phys_mem_offset);
|
||||
OffsetPageTable::new(level_4_table, phys_mem_offset)
|
||||
|
|
Loading…
Reference in New Issue