mirror of
https://github.com/phil-opp/blog_os.git
synced 2025-12-16 22:37:49 +00:00
Update allocator code to use unsafe also in unsafe functions
Required since Rust 2024
This commit is contained in:
@@ -44,7 +44,9 @@ impl FixedSizeBlockAllocator {
|
|||||||
/// heap bounds are valid and that the heap is unused. This method must be
|
/// heap bounds are valid and that the heap is unused. This method must be
|
||||||
/// called only once.
|
/// called only once.
|
||||||
pub unsafe fn init(&mut self, heap_start: usize, heap_size: usize) {
|
pub unsafe fn init(&mut self, heap_start: usize, heap_size: usize) {
|
||||||
self.fallback_allocator.init(heap_start, heap_size);
|
unsafe {
|
||||||
|
self.fallback_allocator.init(heap_start, heap_size);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Allocates using the fallback allocator.
|
/// Allocates using the fallback allocator.
|
||||||
@@ -91,12 +93,16 @@ unsafe impl GlobalAlloc for Locked<FixedSizeBlockAllocator> {
|
|||||||
assert!(mem::size_of::<ListNode>() <= BLOCK_SIZES[index]);
|
assert!(mem::size_of::<ListNode>() <= BLOCK_SIZES[index]);
|
||||||
assert!(mem::align_of::<ListNode>() <= BLOCK_SIZES[index]);
|
assert!(mem::align_of::<ListNode>() <= BLOCK_SIZES[index]);
|
||||||
let new_node_ptr = ptr as *mut ListNode;
|
let new_node_ptr = ptr as *mut ListNode;
|
||||||
new_node_ptr.write(new_node);
|
unsafe {
|
||||||
allocator.list_heads[index] = Some(&mut *new_node_ptr);
|
new_node_ptr.write(new_node);
|
||||||
|
allocator.list_heads[index] = Some(&mut *new_node_ptr);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
let ptr = NonNull::new(ptr).unwrap();
|
let ptr = NonNull::new(ptr).unwrap();
|
||||||
allocator.fallback_allocator.deallocate(ptr, layout);
|
unsafe {
|
||||||
|
allocator.fallback_allocator.deallocate(ptr, layout);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
use super::{align_up, Locked};
|
use super::{Locked, align_up};
|
||||||
use alloc::alloc::{GlobalAlloc, Layout};
|
use alloc::alloc::{GlobalAlloc, Layout};
|
||||||
use core::{mem, ptr};
|
use core::{mem, ptr};
|
||||||
|
|
||||||
@@ -39,7 +39,9 @@ impl LinkedListAllocator {
|
|||||||
/// heap bounds are valid and that the heap is unused. This method must be
|
/// heap bounds are valid and that the heap is unused. This method must be
|
||||||
/// called only once.
|
/// called only once.
|
||||||
pub unsafe fn init(&mut self, heap_start: usize, heap_size: usize) {
|
pub unsafe fn init(&mut self, heap_start: usize, heap_size: usize) {
|
||||||
self.add_free_region(heap_start, heap_size);
|
unsafe {
|
||||||
|
self.add_free_region(heap_start, heap_size);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Adds the given memory region to the front of the list.
|
/// Adds the given memory region to the front of the list.
|
||||||
@@ -52,8 +54,10 @@ impl LinkedListAllocator {
|
|||||||
let mut node = ListNode::new(size);
|
let mut node = ListNode::new(size);
|
||||||
node.next = self.head.next.take();
|
node.next = self.head.next.take();
|
||||||
let node_ptr = addr as *mut ListNode;
|
let node_ptr = addr as *mut ListNode;
|
||||||
node_ptr.write(node);
|
unsafe {
|
||||||
self.head.next = Some(&mut *node_ptr)
|
node_ptr.write(node);
|
||||||
|
self.head.next = Some(&mut *node_ptr);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Looks for a free region with the given size and alignment and removes
|
/// Looks for a free region with the given size and alignment and removes
|
||||||
@@ -128,7 +132,9 @@ unsafe impl GlobalAlloc for Locked<LinkedListAllocator> {
|
|||||||
let alloc_end = alloc_start.checked_add(size).expect("overflow");
|
let alloc_end = alloc_start.checked_add(size).expect("overflow");
|
||||||
let excess_size = region.end_addr() - alloc_end;
|
let excess_size = region.end_addr() - alloc_end;
|
||||||
if excess_size > 0 {
|
if excess_size > 0 {
|
||||||
allocator.add_free_region(alloc_end, excess_size);
|
unsafe {
|
||||||
|
allocator.add_free_region(alloc_end, excess_size);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
alloc_start as *mut u8
|
alloc_start as *mut u8
|
||||||
} else {
|
} else {
|
||||||
@@ -140,6 +146,6 @@ unsafe impl GlobalAlloc for Locked<LinkedListAllocator> {
|
|||||||
// perform layout adjustments
|
// perform layout adjustments
|
||||||
let (size, _) = LinkedListAllocator::size_align(layout);
|
let (size, _) = LinkedListAllocator::size_align(layout);
|
||||||
|
|
||||||
self.lock().add_free_region(ptr as usize, size)
|
unsafe { self.lock().add_free_region(ptr as usize, size) }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user