Use slab allocator to allocate heap

This commit is contained in:
Yuke Peng 2024-09-10 14:41:51 +08:00 committed by Tate, Hongliang Tian
parent 228cfe2262
commit e995ed3ebc
3 changed files with 42 additions and 27 deletions

16
Cargo.lock generated
View File

@ -312,12 +312,9 @@ dependencies = [
[[package]]
name = "buddy_system_allocator"
version = "0.9.0"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "43f9365b6b0c9e1663ca4ca9440c00eda46bc85a3407070be8b5e0d8d1f29629"
dependencies = [
"spin 0.9.8",
]
checksum = "a7913f22349ffcfc6ca0ca9a656ec26cfbba538ed49c31a273dff2c5d1ea83d9"
[[package]]
name = "bytemuck"
@ -1087,6 +1084,7 @@ dependencies = [
"ostd-test",
"owo-colors 3.5.0",
"rsdp",
"slab_allocator",
"spin 0.9.8",
"static_assertions",
"tdx-guest",
@ -1322,6 +1320,14 @@ dependencies = [
"serde",
]
[[package]]
name = "slab_allocator"
version = "0.3.1"
source = "git+https://github.com/arceos-org/slab_allocator/?rev=3c13499#3c13499d664ccd36f66786985b753340aea57f5a"
dependencies = [
"buddy_system_allocator",
]
[[package]]
name = "smallvec"
version = "1.13.2"

View File

@ -17,7 +17,7 @@ targets = ["x86_64-unknown-none"]
[dependencies]
align_ext = { path = "libs/align_ext", version = "0.1.0" }
bit_field = "0.10.1"
buddy_system_allocator = "0.9.0"
buddy_system_allocator = { version = "0.10", default-features = false, features = ["alloc"] }
bitflags = "1.3"
bitvec = { version = "1.0", default-features = false, features = ["alloc"] }
cfg-if = "1.0"
@ -42,6 +42,7 @@ static_assertions = "1.1.0"
unwinding = { version = "0.2.2", default-features = false, features = ["fde-gnu-eh-frame-hdr", "hide-trace", "panic", "personality", "unwinder"] }
volatile = { version = "0.4.5", features = ["unstable"] }
xarray = { git = "https://github.com/asterinas/xarray", version = "0.1.0" }
slab_allocator = { git = "https://github.com/arceos-org/slab_allocator/", rev = "3c13499" , version = "0.3.1" }
[target.x86_64-unknown-none.dependencies]
x86_64 = "0.14.2"

View File

@ -1,13 +1,11 @@
// SPDX-License-Identifier: MPL-2.0
use core::{
alloc::{GlobalAlloc, Layout},
ptr::NonNull,
};
use core::alloc::{GlobalAlloc, Layout};
use align_ext::AlignExt;
use buddy_system_allocator::Heap;
use log::debug;
use slab_allocator::Heap;
use spin::Once;
use super::paddr_to_vaddr;
use crate::{
@ -19,7 +17,7 @@ use crate::{
};
#[global_allocator]
static HEAP_ALLOCATOR: LockedHeapWithRescue<32> = LockedHeapWithRescue::new(rescue);
static HEAP_ALLOCATOR: LockedHeapWithRescue = LockedHeapWithRescue::new(rescue);
#[alloc_error_handler]
pub fn handle_alloc_error(layout: core::alloc::Layout) -> ! {
@ -28,49 +26,55 @@ pub fn handle_alloc_error(layout: core::alloc::Layout) -> ! {
const INIT_KERNEL_HEAP_SIZE: usize = PAGE_SIZE * 256;
static mut HEAP_SPACE: [u8; INIT_KERNEL_HEAP_SIZE] = [0; INIT_KERNEL_HEAP_SIZE];
#[repr(align(4096))]
struct InitHeapSpace([u8; INIT_KERNEL_HEAP_SIZE]);
static mut HEAP_SPACE: InitHeapSpace = InitHeapSpace([0; INIT_KERNEL_HEAP_SIZE]);
pub fn init() {
// SAFETY: The HEAP_SPACE is a static memory range, so it's always valid.
unsafe {
HEAP_ALLOCATOR.init(HEAP_SPACE.as_ptr(), INIT_KERNEL_HEAP_SIZE);
HEAP_ALLOCATOR.init(HEAP_SPACE.0.as_ptr(), INIT_KERNEL_HEAP_SIZE);
}
}
struct LockedHeapWithRescue<const ORDER: usize> {
heap: SpinLock<Heap<ORDER>>,
struct LockedHeapWithRescue {
heap: Once<SpinLock<Heap>>,
rescue: fn(&Self, &Layout) -> Result<()>,
}
impl<const ORDER: usize> LockedHeapWithRescue<ORDER> {
impl LockedHeapWithRescue {
/// Creates an new heap
pub const fn new(rescue: fn(&Self, &Layout) -> Result<()>) -> Self {
Self {
heap: SpinLock::new(Heap::<ORDER>::new()),
heap: Once::new(),
rescue,
}
}
/// SAFETY: The range [start, start + size) must be a valid memory region.
pub unsafe fn init(&self, start: *const u8, size: usize) {
self.heap.disable_irq().lock().init(start as usize, size);
self.heap
.call_once(|| SpinLock::new(Heap::new(start as usize, size)));
}
/// SAFETY: The range [start, start + size) must be a valid memory region.
unsafe fn add_to_heap(&self, start: usize, size: usize) {
self.heap
.get()
.unwrap()
.disable_irq()
.lock()
.add_to_heap(start, start + size)
.add_memory(start, size);
}
}
unsafe impl<const ORDER: usize> GlobalAlloc for LockedHeapWithRescue<ORDER> {
unsafe impl GlobalAlloc for LockedHeapWithRescue {
unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
let _guard = disable_local();
if let Ok(allocation) = self.heap.lock().alloc(layout) {
return allocation.as_ptr();
if let Ok(allocation) = self.heap.get().unwrap().lock().allocate(layout) {
return allocation as *mut u8;
}
// Avoid locking self.heap when calling rescue.
@ -80,10 +84,12 @@ unsafe impl<const ORDER: usize> GlobalAlloc for LockedHeapWithRescue<ORDER> {
let res = self
.heap
.get()
.unwrap()
.lock()
.alloc(layout)
.allocate(layout)
.map_or(core::ptr::null_mut::<u8>(), |allocation| {
allocation.as_ptr()
allocation as *mut u8
});
res
}
@ -91,13 +97,15 @@ unsafe impl<const ORDER: usize> GlobalAlloc for LockedHeapWithRescue<ORDER> {
unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
debug_assert!(ptr as usize != 0);
self.heap
.get()
.unwrap()
.disable_irq()
.lock()
.dealloc(NonNull::new_unchecked(ptr), layout)
.deallocate(ptr as usize, layout)
}
}
fn rescue<const ORDER: usize>(heap: &LockedHeapWithRescue<ORDER>, layout: &Layout) -> Result<()> {
fn rescue(heap: &LockedHeapWithRescue, layout: &Layout) -> Result<()> {
const MIN_NUM_FRAMES: usize = 0x4000000 / PAGE_SIZE; // 64MB
debug!("enlarge heap, layout = {:?}", layout);