Update to use the new API for custom allocators (#348)

* Update to new allocator API

* Change linked_list_allocator dependency to link directly to git repository

* Add Cargo.lock to gitignore
This commit is contained in:
Will
2017-08-20 12:36:30 +00:00
committed by Philipp Oppermann
parent 2ebd4ed954
commit 0175e83387
5 changed files with 46 additions and 50 deletions

View File

@@ -4,7 +4,7 @@ name = "hole_list_allocator"
version = "0.1.0"
[dependencies]
linked_list_allocator = "0.2.0"
linked_list_allocator = { git = "https://github.com/phil-opp/linked-list-allocator.git"}
spin = "0.4.5"
[dependencies.lazy_static]

View File

@@ -7,62 +7,51 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(allocator)]
#![feature(const_fn)]
#![allocator]
#![feature(allocator_api)]
#![feature(alloc)]
#![feature(global_allocator)]
#![no_std]
#![deny(warnings)]
use spin::Mutex;
use linked_list_allocator::Heap;
extern crate alloc;
extern crate spin;
extern crate linked_list_allocator;
#[macro_use]
extern crate lazy_static;
use alloc::heap::{Alloc, AllocErr, Layout};
use spin::Mutex;
use linked_list_allocator::Heap;
pub const HEAP_START: usize = 0o_000_001_000_000_0000;
pub const HEAP_SIZE: usize = 100 * 1024; // 100 KiB
lazy_static! {
static ref HEAP: Mutex<Heap> = Mutex::new(unsafe {
Heap::new(HEAP_START, HEAP_SIZE)
});
static HEAP: Mutex<Option<Heap>> = Mutex::new(None);
//Set up the heap
pub unsafe fn init(offset: usize, size: usize) {
*HEAP.lock() = Some(Heap::new(offset, size));
}
#[no_mangle]
pub extern fn __rust_allocate(size: usize, align: usize) -> *mut u8 {
HEAP.lock().allocate_first_fit(size, align).expect("out of memory")
pub struct Allocator;
unsafe impl<'a> Alloc for &'a Allocator {
unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
if let Some(ref mut heap) = *HEAP.lock() {
heap.allocate_first_fit(layout)
} else {
panic!("Heap not initialized!");
}
}
unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) {
if let Some(ref mut heap) = *HEAP.lock() {
heap.deallocate(ptr, layout)
} else {
panic!("heap not initalized");
}
}
}
#[no_mangle]
pub extern fn __rust_deallocate(ptr: *mut u8, size: usize, align: usize) {
unsafe { HEAP.lock().deallocate(ptr, size, align) };
}
#[no_mangle]
pub extern fn __rust_usable_size(size: usize, _align: usize) -> usize {
size
}
#[no_mangle]
pub extern fn __rust_reallocate_inplace(_ptr: *mut u8, size: usize,
_new_size: usize, _align: usize) -> usize
{
size
}
#[no_mangle]
pub extern fn __rust_reallocate(ptr: *mut u8, size: usize, new_size: usize,
align: usize) -> *mut u8 {
use core::{ptr, cmp};
// from: https://github.com/rust-lang/rust/blob/
// c66d2380a810c9a2b3dbb4f93a830b101ee49cc2/
// src/liballoc_system/lib.rs#L98-L101
let new_ptr = __rust_allocate(new_size, align);
unsafe { ptr::copy(ptr, new_ptr, cmp::min(size, new_size)) };
__rust_deallocate(ptr, size, align);
new_ptr
}
//Our allocator static
#[global_allocator]
static GLOBAL_ALLOC: Allocator = Allocator;