From 6de3aeaac3b055aac66c6d7cd872ce5202cf7588 Mon Sep 17 00:00:00 2001 From: Philipp Oppermann Date: Thu, 27 Mar 2025 15:32:15 +0100 Subject: [PATCH] Update allocator code to use unsafe also in unsafe functions Required since Rust 2024 --- src/allocator/fixed_size_block.rs | 14 ++++++++++---- src/allocator/linked_list.rs | 18 ++++++++++++------ 2 files changed, 22 insertions(+), 10 deletions(-) diff --git a/src/allocator/fixed_size_block.rs b/src/allocator/fixed_size_block.rs index fa3985a0..2938703a 100644 --- a/src/allocator/fixed_size_block.rs +++ b/src/allocator/fixed_size_block.rs @@ -44,7 +44,9 @@ impl FixedSizeBlockAllocator { /// heap bounds are valid and that the heap is unused. This method must be /// called only once. pub unsafe fn init(&mut self, heap_start: usize, heap_size: usize) { - self.fallback_allocator.init(heap_start, heap_size); + unsafe { + self.fallback_allocator.init(heap_start, heap_size); + } } /// Allocates using the fallback allocator. @@ -91,12 +93,16 @@ unsafe impl GlobalAlloc for Locked { assert!(mem::size_of::() <= BLOCK_SIZES[index]); assert!(mem::align_of::() <= BLOCK_SIZES[index]); let new_node_ptr = ptr as *mut ListNode; - new_node_ptr.write(new_node); - allocator.list_heads[index] = Some(&mut *new_node_ptr); + unsafe { + new_node_ptr.write(new_node); + allocator.list_heads[index] = Some(&mut *new_node_ptr); + } } None => { let ptr = NonNull::new(ptr).unwrap(); - allocator.fallback_allocator.deallocate(ptr, layout); + unsafe { + allocator.fallback_allocator.deallocate(ptr, layout); + } } } } diff --git a/src/allocator/linked_list.rs b/src/allocator/linked_list.rs index f7164113..fc8f65f5 100644 --- a/src/allocator/linked_list.rs +++ b/src/allocator/linked_list.rs @@ -1,4 +1,4 @@ -use super::{align_up, Locked}; +use super::{Locked, align_up}; use alloc::alloc::{GlobalAlloc, Layout}; use core::{mem, ptr}; @@ -39,7 +39,9 @@ impl LinkedListAllocator { /// heap bounds are valid and that the heap is unused. This method must be /// called only once. pub unsafe fn init(&mut self, heap_start: usize, heap_size: usize) { - self.add_free_region(heap_start, heap_size); + unsafe { + self.add_free_region(heap_start, heap_size); + } } /// Adds the given memory region to the front of the list. @@ -52,8 +54,10 @@ impl LinkedListAllocator { let mut node = ListNode::new(size); node.next = self.head.next.take(); let node_ptr = addr as *mut ListNode; - node_ptr.write(node); - self.head.next = Some(&mut *node_ptr) + unsafe { + node_ptr.write(node); + self.head.next = Some(&mut *node_ptr); + } } /// Looks for a free region with the given size and alignment and removes @@ -128,7 +132,9 @@ unsafe impl GlobalAlloc for Locked { let alloc_end = alloc_start.checked_add(size).expect("overflow"); let excess_size = region.end_addr() - alloc_end; if excess_size > 0 { - allocator.add_free_region(alloc_end, excess_size); + unsafe { + allocator.add_free_region(alloc_end, excess_size); + } } alloc_start as *mut u8 } else { @@ -140,6 +146,6 @@ unsafe impl GlobalAlloc for Locked { // perform layout adjustments let (size, _) = LinkedListAllocator::size_align(layout); - self.lock().add_free_region(ptr as usize, size) + unsafe { self.lock().add_free_region(ptr as usize, size) } } }