diff --git a/src/allocator/bump.rs b/src/allocator/bump.rs index 355daef3..7d104eff 100644 --- a/src/allocator/bump.rs +++ b/src/allocator/bump.rs @@ -26,7 +26,7 @@ impl BumpAllocator { /// memory range is unused. Also, this method must be called only once. pub unsafe fn init(&mut self, heap_start: usize, heap_size: usize) { self.heap_start = heap_start; - self.heap_end = heap_start + heap_size; + self.heap_end = heap_start.saturating_add(heap_size); self.next = heap_start; } } @@ -36,7 +36,7 @@ unsafe impl GlobalAlloc for Locked { let mut bump = self.lock(); // get a mutable reference let alloc_start = align_up(bump.next, layout.align()); - let alloc_end = alloc_start + layout.size(); + let alloc_end = alloc_start.checked_add(layout.size()).expect("overflow"); if alloc_end > bump.heap_end { ptr::null_mut() // out of memory diff --git a/src/allocator/linked_list.rs b/src/allocator/linked_list.rs index 699efbdf..3753511e 100644 --- a/src/allocator/linked_list.rs +++ b/src/allocator/linked_list.rs @@ -86,7 +86,7 @@ impl LinkedListAllocator { /// Returns the allocation start address on success. fn alloc_from_region(region: &ListNode, size: usize, align: usize) -> Result { let alloc_start = align_up(region.start_addr(), align); - let alloc_end = alloc_start + size; + let alloc_end = alloc_start.checked_add(size).expect("overflow"); if alloc_end > region.end_addr() { // region too small @@ -125,7 +125,7 @@ unsafe impl GlobalAlloc for Locked { let mut allocator = self.inner.lock(); if let Some((region, alloc_start)) = allocator.find_region(size, align) { - let alloc_end = alloc_start + size; + let alloc_end = alloc_start.checked_add(size).expect("overflow"); let excess_size = region.end_addr() - alloc_end; if excess_size > 0 { allocator.add_free_region(alloc_end, excess_size);