Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Implementation of a segmented heap #84

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 11 additions & 7 deletions src/hole.rs
Original file line number Diff line number Diff line change
Expand Up @@ -388,17 +388,21 @@ impl HoleList {
// NOTE: We could probably replace this with an `Option` instead of a `Result` in a later
// release to remove this clippy warning
#[allow(clippy::result_unit_err)]
pub fn allocate_first_fit(&mut self, layout: Layout) -> Result<(NonNull<u8>, Layout), ()> {
let aligned_layout = Self::align_layout(layout).map_err(|_| ())?;
let mut cursor = self.cursor().ok_or(())?;
pub fn allocate_first_fit(&mut self, layout: Layout) -> Option<(NonNull<u8>, Layout)> {
let aligned_layout = Self::align_layout(layout).ok()?;
let mut cursor = self.cursor()?;

loop {
match cursor.split_current(aligned_layout) {
Ok((ptr, _len)) => {
return Ok((NonNull::new(ptr).ok_or(())?, aligned_layout));
return Some((
// SAFETY: This can not be null as it is derived from a NonNull pointer in `split_current`
unsafe { NonNull::new_unchecked(ptr) }
, aligned_layout
));
}
Err(curs) => {
cursor = curs.next().ok_or(())?;
cursor = curs.next()?;
}
}
}
Expand All @@ -419,7 +423,7 @@ impl HoleList {
/// The function performs exactly the same layout adjustments as [`allocate_first_fit`] and
/// returns the aligned layout.
pub unsafe fn deallocate(&mut self, ptr: NonNull<u8>, layout: Layout) -> Layout {
let aligned_layout = Self::align_layout(layout).unwrap();
let aligned_layout = Self::align_layout(layout).expect("This should never error, as the validity was checked during allocation.");
deallocate(self, ptr.as_ptr(), aligned_layout.size());
aligned_layout
}
Expand Down Expand Up @@ -689,7 +693,7 @@ pub mod test {
#[test]
fn cursor() {
let mut heap = new_heap();
let curs = heap.holes.cursor().unwrap();
let curs = heap.holes[0].cursor().unwrap();
// This is the "dummy" node
assert_eq!(curs.previous().size, 0);
// This is the "full" heap
Expand Down
230 changes: 160 additions & 70 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -28,36 +28,180 @@ pub mod hole;
mod test;

/// A fixed size heap backed by a linked list of free memory blocks.
pub struct Heap {
/*pub struct Heap {
used: usize,
holes: HoleList,
} */
pub struct SegmentedHeap<const SEGMENTS: usize> {
used: usize,
holes: [HoleList; SEGMENTS]
}

#[cfg(fuzzing)]
pub type Heap = SegmentedHeap<1>;

#[cfg(any(test, fuzzing))]
impl Heap {
pub fn debug(&mut self) {
println!(
"bottom: {:?}, top: {:?}, size: {}, pending: {}",
self.bottom(),
self.top(),
self.size(),
self.holes.first.size,
self.holes[0].first.size,
);
self.holes.debug();
self.holes[0].debug();
}
}

unsafe impl Send for Heap {}

impl Heap {

impl<const SEGMENTS: usize> SegmentedHeap<SEGMENTS> {
/// Creates an empty heap. All allocate calls will return `None`.
pub const fn empty() -> Heap {
Heap {
pub const fn empty() -> Self {
Self {
used: 0,
holes: HoleList::empty(),
holes: unsafe { core::mem::zeroed() },
}
}

/// Allocates a chunk of the given size with the given alignment. Returns a pointer to the
/// beginning of that chunk if it was successful. Else it returns `None`.
/// This function scans the list of free memory blocks and uses the first block that is big
/// enough. The runtime is in O(n) where n is the number of free blocks, but it should be
/// reasonably fast for small allocations.
pub fn allocate_first_fit(&mut self, layout: Layout) -> Option<NonNull<u8>> {
let allocation = self.holes.iter_mut()
// Applies the allocation function to each hole list and returns the first successful allocation
.find_map(|list| list.allocate_first_fit(layout));

if let Some((allocation, layout)) = allocation {
self.used += layout.size();
Some(allocation)
} else {
None
}
}

/// Frees the given allocation. `ptr` must be a pointer returned
/// by a call to the `allocate_first_fit` function with identical size and alignment.
///
/// This function walks the list of free memory blocks and inserts the freed block at the
/// correct place. If the freed block is adjacent to another free block, the blocks are merged
/// again. This operation is in `O(n)` since the list needs to be sorted by address.
///
/// # Safety
///
/// `ptr` must be a pointer returned by a call to the [`allocate_first_fit`] function with
/// identical layout. Undefined behavior may occur for invalid arguments. Panics when none of the hole lists contain the passed pointer.
pub unsafe fn deallocate(&mut self, ptr: NonNull<u8>, layout: Layout) {
for hole_list in self.holes.iter_mut() {
// If the pointer is in the hole list's memory range, deallocate from that list and exit
if ptr.as_ptr() >= hole_list.bottom && ptr.as_ptr() < hole_list.top {
self.used -= hole_list.deallocate(ptr, layout).size();
return;
}
}
panic!("The pointer for deallocation does not match any of the allocator's memory ranges.");
}

/// Returns the size of the heap.
///
/// This is the size the heap is using for allocations, not necessarily the
/// total amount of bytes given to the heap. To determine the exact memory
/// boundaries, use [`bottom`][Self::bottom] and [`top`][Self::top].
pub fn size(&self) -> usize {
unsafe {
self.holes
.iter()
.map(|hole_list| hole_list.top.offset_from(hole_list.bottom) as usize)
.sum()
}
}

/// Returns the size of the used part of the heap
pub fn used(&self) -> usize {
self.used
}

/// Returns the size of the free part of the heap
pub fn free(&self) -> usize {
self.size() - self.used
}

/// Initializes an empty heap
///
/// The `heap_bottom` pointer is automatically aligned, so the [`bottom()`][Self::bottom]
/// method might return a pointer that is larger than `heap_bottom` after construction.
///
/// The given `heap_size`s must be large enough to store the required
/// metadata, otherwise this function will panic. Depending on the
/// alignment of the `hole_addr` pointer, the minimum size is between
/// `2 * size_of::<usize>` and `3 * size_of::<usize>`.
///
/// The usable size for allocations will be truncated to the nearest
/// alignment of `align_of::<usize>`. Any extra bytes left at the end
/// will be reclaimed once sufficient additional space is given to
/// [`extend`][Heap::extend].
///
/// # Safety
///
/// This function must be called at most once and must only be used on an
/// empty heap.
///
/// The bottom addresses must be valid and the memory in the
/// `[heap_bottom, heap_bottom + heap_size)` range must not be used for anything else and must not overlap.
/// This function is unsafe because it can cause undefined behavior if the given addresses
/// are invalid.
///
/// The provided memory range must be valid for the `'static` lifetime.
pub unsafe fn init_from_pointers(&mut self, heaps: [(*mut u8, usize); SEGMENTS]) {
self.used = 0;
for (index, (heap_bottom, heap_size)) in heaps.iter().enumerate() {
self.holes[index] = HoleList::new(*heap_bottom, *heap_size)
}
}

/// Initialize an empty heap with provided memory.
///
/// The caller is responsible for procuring a region of raw memory that may be utilized by the
/// allocator. This might be done via any method such as (unsafely) taking a region from the
/// program's memory, from a mutable static, or by allocating and leaking such memory from
/// another allocator.
///
/// The latter approach may be especially useful if the underlying allocator does not perform
/// deallocation (e.g. a simple bump allocator). Then the overlaid linked-list-allocator can
/// provide memory reclamation.
///
/// The usable size for allocations will be truncated to the nearest
/// alignment of `align_of::<usize>`. Any extra bytes left at the end
/// will be reclaimed once sufficient additional space is given to
/// [`extend`][Heap::extend].
///
/// # Panics
///
/// This method panics if the heap is already initialized.
///
/// It also panics when the length of the given `mem` slice is not large enough to
/// store the required metadata. Depending on the alignment of the slice, the minimum
/// size is between `2 * size_of::<usize>` and `3 * size_of::<usize>`.
pub fn init_from_slices(&mut self, mut regions: [&'static mut [MaybeUninit<u8>]; SEGMENTS]) {
assert!(
self.holes.iter().all(|h| h.bottom.is_null()),
"The heap has already been initialized."
);
for (index, region) in regions.iter_mut().enumerate() {
// SAFETY: All initialization requires the bottom address to be valid, which implies it
// must not be 0. Initially the address is 0. The assertion above ensures that no
// initialization had been called before.
// The given address and size is valid according to the safety invariants of the mutable
// reference handed to us by the caller.
self.holes[index] = unsafe {HoleList::new((*region).as_mut_ptr().cast(), region.len())};
}
}
}

impl Heap {
/// Initializes an empty heap
///
/// The `heap_bottom` pointer is automatically aligned, so the [`bottom()`][Self::bottom]
Expand Down Expand Up @@ -86,7 +230,7 @@ impl Heap {
/// The provided memory range must be valid for the `'static` lifetime.
pub unsafe fn init(&mut self, heap_bottom: *mut u8, heap_size: usize) {
self.used = 0;
self.holes = HoleList::new(heap_bottom, heap_size);
self.holes[0] = HoleList::new(heap_bottom, heap_size);
}

/// Initialize an empty heap with provided memory.
Expand Down Expand Up @@ -153,7 +297,7 @@ impl Heap {
pub unsafe fn new(heap_bottom: *mut u8, heap_size: usize) -> Heap {
Heap {
used: 0,
holes: HoleList::new(heap_bottom, heap_size),
holes: [HoleList::new(heap_bottom, heap_size)],
}
}

Expand All @@ -170,55 +314,12 @@ impl Heap {
unsafe { Self::new(address, size) }
}

/// Allocates a chunk of the given size with the given alignment. Returns a pointer to the
/// beginning of that chunk if it was successful. Else it returns `None`.
/// This function scans the list of free memory blocks and uses the first block that is big
/// enough. The runtime is in O(n) where n is the number of free blocks, but it should be
/// reasonably fast for small allocations.
//
// NOTE: We could probably replace this with an `Option` instead of a `Result` in a later
// release to remove this clippy warning
#[allow(clippy::result_unit_err)]
pub fn allocate_first_fit(&mut self, layout: Layout) -> Result<NonNull<u8>, ()> {
match self.holes.allocate_first_fit(layout) {
Ok((ptr, aligned_layout)) => {
self.used += aligned_layout.size();
Ok(ptr)
}
Err(err) => Err(err),
}
}

/// Frees the given allocation. `ptr` must be a pointer returned
/// by a call to the `allocate_first_fit` function with identical size and alignment.
///
/// This function walks the list of free memory blocks and inserts the freed block at the
/// correct place. If the freed block is adjacent to another free block, the blocks are merged
/// again. This operation is in `O(n)` since the list needs to be sorted by address.
///
/// # Safety
///
/// `ptr` must be a pointer returned by a call to the [`allocate_first_fit`] function with
/// identical layout. Undefined behavior may occur for invalid arguments.
pub unsafe fn deallocate(&mut self, ptr: NonNull<u8>, layout: Layout) {
self.used -= self.holes.deallocate(ptr, layout).size();
}

/// Returns the bottom address of the heap.
///
/// The bottom pointer is automatically aligned, so the returned pointer
/// might be larger than the bottom pointer used for initialization.
pub fn bottom(&self) -> *mut u8 {
self.holes.bottom
}

/// Returns the size of the heap.
///
/// This is the size the heap is using for allocations, not necessarily the
/// total amount of bytes given to the heap. To determine the exact memory
/// boundaries, use [`bottom`][Self::bottom] and [`top`][Self::top].
pub fn size(&self) -> usize {
unsafe { self.holes.top.offset_from(self.holes.bottom) as usize }
self.holes[0].bottom
}

/// Return the top address of the heap.
Expand All @@ -227,17 +328,7 @@ impl Heap {
/// until there is enough room for metadata, but it still retains ownership
/// over memory from [`bottom`][Self::bottom] to the address returned.
pub fn top(&self) -> *mut u8 {
unsafe { self.holes.top.add(self.holes.pending_extend as usize) }
}

/// Returns the size of the used part of the heap
pub fn used(&self) -> usize {
self.used
}

/// Returns the size of the free part of the heap
pub fn free(&self) -> usize {
self.size() - self.used
unsafe { self.holes[0].top.add(self.holes[0].pending_extend as usize) }
}

/// Extends the size of the heap by creating a new hole at the end.
Expand All @@ -259,7 +350,7 @@ impl Heap {
/// by exactly `by` bytes, those bytes are still owned by the Heap for
/// later use.
pub unsafe fn extend(&mut self, by: usize) {
self.holes.extend(by);
self.holes[0].extend(by);
}
}

Expand All @@ -270,8 +361,8 @@ unsafe impl Allocator for LockedHeap {
return Ok(NonNull::slice_from_raw_parts(layout.dangling(), 0));
}
match self.0.lock().allocate_first_fit(layout) {
Ok(ptr) => Ok(NonNull::slice_from_raw_parts(ptr, layout.size())),
Err(()) => Err(AllocError),
Some(ptr) => Ok(NonNull::slice_from_raw_parts(ptr, layout.size())),
None => Err(AllocError),
}
}

Expand Down Expand Up @@ -312,7 +403,7 @@ impl LockedHeap {
pub unsafe fn new(heap_bottom: *mut u8, heap_size: usize) -> LockedHeap {
LockedHeap(Spinlock::new(Heap {
used: 0,
holes: HoleList::new(heap_bottom, heap_size),
holes: [HoleList::new(heap_bottom, heap_size)],
}))
}
}
Expand All @@ -332,7 +423,6 @@ unsafe impl GlobalAlloc for LockedHeap {
self.0
.lock()
.allocate_first_fit(layout)
.ok()
.map_or(core::ptr::null_mut(), |allocation| allocation.as_ptr())
}

Expand Down
Loading