Call rescue when low memory in heap allocator

This commit is contained in:
Yuke Peng 2024-10-06 13:55:43 +08:00 committed by Tate, Hongliang Tian
parent 96efd62007
commit 909fb23f8c
2 changed files with 101 additions and 67 deletions

View File

@ -13,7 +13,8 @@ use super::paddr_to_vaddr;
use crate::{
mm::{page::allocator::PAGE_ALLOCATOR, PAGE_SIZE},
prelude::*,
sync::{LocalIrqDisabled, SpinLock, SpinLockGuard},
sync::SpinLock,
trap::disable_local,
Error,
};
@ -54,25 +55,93 @@ impl LockedHeapWithRescue {
self.heap
.call_once(|| SpinLock::new(Heap::new(start as usize, size)));
}
/// SAFETY: The range [start, start + size) must be a valid memory region.
unsafe fn add_to_heap(&self, start: usize, size: usize) {
self.heap
.get()
.unwrap()
.disable_irq()
.lock()
.add_memory(start, size);
}
fn rescue_if_low_memory(&self, remain_bytes: usize, layout: Layout) {
if remain_bytes <= PAGE_SIZE * 4 {
debug!(
"Low memory in heap allocator, try to call rescue. Remaining bytes: {:x?}",
remain_bytes
);
// We don't care if the rescue returns ok or not since we can still do heap allocation.
let _ = self.rescue(&layout);
}
}
fn rescue(&self, layout: &Layout) -> Result<()> {
const MIN_NUM_FRAMES: usize = 0x4000000 / PAGE_SIZE; // 64MB
debug!("enlarge heap, layout = {:?}", layout);
let mut num_frames = {
let align = PAGE_SIZE.max(layout.align());
debug_assert!(align % PAGE_SIZE == 0);
let size = layout.size().align_up(align);
size / PAGE_SIZE
};
let allocation_start = {
let mut page_allocator = PAGE_ALLOCATOR.get().unwrap().lock();
if num_frames >= MIN_NUM_FRAMES {
page_allocator.alloc(num_frames).ok_or(Error::NoMemory)?
} else {
match page_allocator.alloc(MIN_NUM_FRAMES) {
None => page_allocator.alloc(num_frames).ok_or(Error::NoMemory)?,
Some(start) => {
num_frames = MIN_NUM_FRAMES;
start
}
}
}
};
// FIXME: the alloc function internally allocates heap memory(inside FrameAllocator).
// So if the heap is nearly run out, allocating frame will fail too.
let vaddr = paddr_to_vaddr(allocation_start * PAGE_SIZE);
// SAFETY: the frame is allocated from FrameAllocator and never be deallocated,
// so the addr is always valid.
unsafe {
debug!(
"add frames to heap: addr = 0x{:x}, size = 0x{:x}",
vaddr,
PAGE_SIZE * num_frames
);
self.add_to_heap(vaddr, PAGE_SIZE * num_frames);
}
Ok(())
}
}
unsafe impl GlobalAlloc for LockedHeapWithRescue {
unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
let mut heap = self.heap.get().unwrap().disable_irq().lock();
let _guard = disable_local();
if let Ok(allocation) = heap.allocate(layout) {
return allocation as *mut u8;
let res = self.heap.get().unwrap().lock().allocate(layout);
if let Ok((allocation, remain_bytes)) = res {
self.rescue_if_low_memory(remain_bytes, layout);
return allocation;
}
// Avoid locking self.heap when calling rescue.
if rescue(&mut heap, &layout).is_err() {
if self.rescue(&layout).is_err() {
return core::ptr::null_mut::<u8>();
}
heap.allocate(layout)
.map_or(core::ptr::null_mut::<u8>(), |allocation| {
allocation as *mut u8
})
let res = self.heap.get().unwrap().lock().allocate(layout);
if let Ok((allocation, remain_bytes)) = res {
self.rescue_if_low_memory(remain_bytes, layout);
allocation
} else {
core::ptr::null_mut::<u8>()
}
}
unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
@ -82,49 +151,6 @@ unsafe impl GlobalAlloc for LockedHeapWithRescue {
.unwrap()
.disable_irq()
.lock()
.deallocate(ptr as usize, layout)
.deallocate(ptr, layout)
}
}
fn rescue(heap: &mut SpinLockGuard<Heap, LocalIrqDisabled>, layout: &Layout) -> Result<()> {
const MIN_NUM_FRAMES: usize = 0x4000000 / PAGE_SIZE; // 64MB
debug!("enlarge heap, layout = {:?}", layout);
let mut num_frames = {
let align = PAGE_SIZE.max(layout.align());
debug_assert!(align % PAGE_SIZE == 0);
let size = layout.size().align_up(align);
size / PAGE_SIZE
};
let allocation_start = {
let mut page_allocator = PAGE_ALLOCATOR.get().unwrap().lock();
if num_frames >= MIN_NUM_FRAMES {
page_allocator.alloc(num_frames).ok_or(Error::NoMemory)?
} else {
match page_allocator.alloc(MIN_NUM_FRAMES) {
None => page_allocator.alloc(num_frames).ok_or(Error::NoMemory)?,
Some(start) => {
num_frames = MIN_NUM_FRAMES;
start
}
}
}
};
// FIXME: the alloc function internally allocates heap memory(inside FrameAllocator).
// So if the heap is nearly run out, allocating frame will fail too.
let vaddr = paddr_to_vaddr(allocation_start * PAGE_SIZE);
// SAFETY: the frame is allocated from FrameAllocator and never be deallocated,
// so the addr is always valid.
unsafe {
debug!(
"add frames to heap: addr = 0x{:x}, size = 0x{:x}",
vaddr,
PAGE_SIZE * num_frames
);
heap.add_memory(vaddr, PAGE_SIZE * num_frames);
}
Ok(())
}

View File

@ -152,38 +152,45 @@ impl Heap {
}
/// Allocates a chunk of the given size with the given alignment. Returns a pointer to the
/// beginning of that chunk if it was successful. Else it returns `Err`.
/// beginning of that chunk and remaining bytes in buddy system allocator if it was successful.
/// Else it returns `Err`.
///
/// This function finds the slab of lowest size which can still accommodate the given chunk.
/// The runtime is in `O(1)` for chunks of size <= 4096, and `O(n)` when chunk size is > 4096,
pub fn allocate(&mut self, layout: Layout) -> Result<usize, AllocError> {
match Heap::layout_to_allocator(&layout) {
pub fn allocate(&mut self, layout: Layout) -> Result<(*mut u8, usize), AllocError> {
let addr = match Heap::layout_to_allocator(&layout) {
HeapAllocator::Slab64Bytes => self
.slab_64_bytes
.allocate(layout, &mut self.buddy_allocator),
.allocate(layout, &mut self.buddy_allocator)?,
HeapAllocator::Slab128Bytes => self
.slab_128_bytes
.allocate(layout, &mut self.buddy_allocator),
.allocate(layout, &mut self.buddy_allocator)?,
HeapAllocator::Slab256Bytes => self
.slab_256_bytes
.allocate(layout, &mut self.buddy_allocator),
.allocate(layout, &mut self.buddy_allocator)?,
HeapAllocator::Slab512Bytes => self
.slab_512_bytes
.allocate(layout, &mut self.buddy_allocator),
.allocate(layout, &mut self.buddy_allocator)?,
HeapAllocator::Slab1024Bytes => self
.slab_1024_bytes
.allocate(layout, &mut self.buddy_allocator),
.allocate(layout, &mut self.buddy_allocator)?,
HeapAllocator::Slab2048Bytes => self
.slab_2048_bytes
.allocate(layout, &mut self.buddy_allocator),
.allocate(layout, &mut self.buddy_allocator)?,
HeapAllocator::Slab4096Bytes => self
.slab_4096_bytes
.allocate(layout, &mut self.buddy_allocator),
.allocate(layout, &mut self.buddy_allocator)?,
HeapAllocator::BuddyAllocator => self
.buddy_allocator
.alloc(layout)
.map(|ptr| ptr.as_ptr() as usize)
.map_err(|_| AllocError),
}
.map_err(|_| AllocError)?,
};
Ok((
addr as *mut u8,
self.buddy_allocator.stats_total_bytes() - self.buddy_allocator.stats_alloc_actual(),
))
}
/// Frees the given allocation. `ptr` must be a pointer returned
@ -197,7 +204,8 @@ impl Heap {
/// # Safety
/// This function is unsafe because it can cause undefined behavior if the
/// given address is invalid.
pub unsafe fn deallocate(&mut self, ptr: usize, layout: Layout) {
pub unsafe fn deallocate(&mut self, ptr: *mut u8, layout: Layout) {
let ptr = ptr as usize;
match Heap::layout_to_allocator(&layout) {
HeapAllocator::Slab64Bytes => self.slab_64_bytes.deallocate(ptr),
HeapAllocator::Slab128Bytes => self.slab_128_bytes.deallocate(ptr),