Update allocator code to use unsafe also in unsafe functions

Required since Rust 2024
This commit is contained in:
Philipp Oppermann
2025-03-27 15:32:15 +01:00
parent a26d6c1b54
commit 6de3aeaac3
2 changed files with 22 additions and 10 deletions

View File

@@ -44,7 +44,9 @@ impl FixedSizeBlockAllocator {
/// heap bounds are valid and that the heap is unused. This method must be
/// called only once.
pub unsafe fn init(&mut self, heap_start: usize, heap_size: usize) {
self.fallback_allocator.init(heap_start, heap_size);
unsafe {
self.fallback_allocator.init(heap_start, heap_size);
}
}
/// Allocates using the fallback allocator.
@@ -91,12 +93,16 @@ unsafe impl GlobalAlloc for Locked<FixedSizeBlockAllocator> {
assert!(mem::size_of::<ListNode>() <= BLOCK_SIZES[index]);
assert!(mem::align_of::<ListNode>() <= BLOCK_SIZES[index]);
let new_node_ptr = ptr as *mut ListNode;
new_node_ptr.write(new_node);
allocator.list_heads[index] = Some(&mut *new_node_ptr);
unsafe {
new_node_ptr.write(new_node);
allocator.list_heads[index] = Some(&mut *new_node_ptr);
}
}
None => {
let ptr = NonNull::new(ptr).unwrap();
allocator.fallback_allocator.deallocate(ptr, layout);
unsafe {
allocator.fallback_allocator.deallocate(ptr, layout);
}
}
}
}

View File

@@ -1,4 +1,4 @@
use super::{align_up, Locked};
use super::{Locked, align_up};
use alloc::alloc::{GlobalAlloc, Layout};
use core::{mem, ptr};
@@ -39,7 +39,9 @@ impl LinkedListAllocator {
/// heap bounds are valid and that the heap is unused. This method must be
/// called only once.
pub unsafe fn init(&mut self, heap_start: usize, heap_size: usize) {
self.add_free_region(heap_start, heap_size);
unsafe {
self.add_free_region(heap_start, heap_size);
}
}
/// Adds the given memory region to the front of the list.
@@ -52,8 +54,10 @@ impl LinkedListAllocator {
let mut node = ListNode::new(size);
node.next = self.head.next.take();
let node_ptr = addr as *mut ListNode;
node_ptr.write(node);
self.head.next = Some(&mut *node_ptr)
unsafe {
node_ptr.write(node);
self.head.next = Some(&mut *node_ptr);
}
}
/// Looks for a free region with the given size and alignment and removes
@@ -128,7 +132,9 @@ unsafe impl GlobalAlloc for Locked<LinkedListAllocator> {
let alloc_end = alloc_start.checked_add(size).expect("overflow");
let excess_size = region.end_addr() - alloc_end;
if excess_size > 0 {
allocator.add_free_region(alloc_end, excess_size);
unsafe {
allocator.add_free_region(alloc_end, excess_size);
}
}
alloc_start as *mut u8
} else {
@@ -140,6 +146,6 @@ unsafe impl GlobalAlloc for Locked<LinkedListAllocator> {
// perform layout adjustments
let (size, _) = LinkedListAllocator::size_align(layout);
self.lock().add_free_region(ptr as usize, size)
unsafe { self.lock().add_free_region(ptr as usize, size) }
}
}