Skip to content

Commit ccfd63d

Browse files
dancrossnycphil-opp
authored andcommitted
Update for the latest nightly alloc interface. (#12)
Update `linked_list_allocator` to work with the latest alloc::allocator interface from nightly Rust. Tested with `cargo test`. Signed-off-by: Dan Cross <[email protected]>
1 parent 4d6f892 commit ccfd63d

File tree

2 files changed

+16
-13
lines changed

2 files changed

+16
-13
lines changed

src/hole.rs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
use core::alloc::{AllocErr, Layout, Opaque};
1+
use alloc::allocator::{AllocErr, Layout};
22
use core::mem::size_of;
33
use core::ptr::NonNull;
44

@@ -46,7 +46,7 @@ impl HoleList {
4646
/// block is returned.
4747
/// This function uses the “first fit” strategy, so it uses the first hole that is big
4848
/// enough. Thus the runtime is in O(n) but it should be reasonably fast for small allocations.
49-
pub fn allocate_first_fit(&mut self, layout: Layout) -> Result<NonNull<Opaque>, AllocErr> {
49+
pub fn allocate_first_fit(&mut self, layout: Layout) -> Result<NonNull<u8>, AllocErr> {
5050
assert!(layout.size() >= Self::min_size());
5151

5252
allocate_first_fit(&mut self.first, layout).map(|allocation| {
@@ -56,7 +56,7 @@ impl HoleList {
5656
if let Some(padding) = allocation.back_padding {
5757
deallocate(&mut self.first, padding.addr, padding.size);
5858
}
59-
NonNull::new(allocation.info.addr as *mut Opaque).unwrap()
59+
NonNull::new(allocation.info.addr as *mut u8).unwrap()
6060
})
6161
}
6262

@@ -66,7 +66,7 @@ impl HoleList {
6666
/// This function walks the list and inserts the given block at the correct place. If the freed
6767
/// block is adjacent to another free block, the blocks are merged again.
6868
/// This operation is in `O(n)` since the list needs to be sorted by address.
69-
pub unsafe fn deallocate(&mut self, ptr: NonNull<Opaque>, layout: Layout) {
69+
pub unsafe fn deallocate(&mut self, ptr: NonNull<u8>, layout: Layout) {
7070
deallocate(&mut self.first, ptr.as_ptr() as usize, layout.size())
7171
}
7272

src/lib.rs

Lines changed: 12 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,10 @@ extern crate std;
1010
#[cfg(feature = "use_spin")]
1111
extern crate spin;
1212

13-
use core::alloc::{Alloc, AllocErr, GlobalAlloc, Layout, Opaque};
13+
extern crate alloc;
14+
15+
use alloc::allocator::{Alloc, AllocErr, Layout};
16+
use core::alloc::{GlobalAlloc};
1417
use core::mem;
1518
#[cfg(feature = "use_spin")]
1619
use core::ops::Deref;
@@ -69,7 +72,7 @@ impl Heap {
6972
/// This function scans the list of free memory blocks and uses the first block that is big
7073
/// enough. The runtime is in O(n) where n is the number of free blocks, but it should be
7174
/// reasonably fast for small allocations.
72-
pub fn allocate_first_fit(&mut self, layout: Layout) -> Result<NonNull<Opaque>, AllocErr> {
75+
pub fn allocate_first_fit(&mut self, layout: Layout) -> Result<NonNull<u8>, AllocErr> {
7376
let mut size = layout.size();
7477
if size < HoleList::min_size() {
7578
size = HoleList::min_size();
@@ -87,7 +90,7 @@ impl Heap {
8790
/// This function walks the list of free memory blocks and inserts the freed block at the
8891
/// correct place. If the freed block is adjacent to another free block, the blocks are merged
8992
/// again. This operation is in `O(n)` since the list needs to be sorted by address.
90-
pub unsafe fn deallocate(&mut self, ptr: NonNull<Opaque>, layout: Layout) {
93+
pub unsafe fn deallocate(&mut self, ptr: NonNull<u8>, layout: Layout) {
9194
let mut size = layout.size();
9295
if size < HoleList::min_size() {
9396
size = HoleList::min_size();
@@ -122,17 +125,17 @@ impl Heap {
122125
let top = self.top();
123126
let layout = Layout::from_size_align(by, 1).unwrap();
124127
self.holes
125-
.deallocate(NonNull::new_unchecked(top as *mut Opaque), layout);
128+
.deallocate(NonNull::new_unchecked(top as *mut u8), layout);
126129
self.size += by;
127130
}
128131
}
129132

130133
unsafe impl Alloc for Heap {
131-
unsafe fn alloc(&mut self, layout: Layout) -> Result<NonNull<Opaque>, AllocErr> {
134+
unsafe fn alloc(&mut self, layout: Layout) -> Result<NonNull<u8>, AllocErr> {
132135
self.allocate_first_fit(layout)
133136
}
134137

135-
unsafe fn dealloc(&mut self, ptr: NonNull<Opaque>, layout: Layout) {
138+
unsafe fn dealloc(&mut self, ptr: NonNull<u8>, layout: Layout) {
136139
self.deallocate(ptr, layout)
137140
}
138141
}
@@ -171,15 +174,15 @@ impl Deref for LockedHeap {
171174

172175
#[cfg(feature = "use_spin")]
173176
unsafe impl GlobalAlloc for LockedHeap {
174-
unsafe fn alloc(&self, layout: Layout) -> *mut Opaque {
177+
unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
175178
self.0
176179
.lock()
177180
.allocate_first_fit(layout)
178181
.ok()
179-
.map_or(0 as *mut Opaque, |allocation| allocation.as_ptr())
182+
.map_or(0 as *mut u8, |allocation| allocation.as_ptr())
180183
}
181184

182-
unsafe fn dealloc(&self, ptr: *mut Opaque, layout: Layout) {
185+
unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
183186
self.0
184187
.lock()
185188
.deallocate(NonNull::new_unchecked(ptr), layout)

0 commit comments

Comments
 (0)