Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 5 additions & 2 deletions oscars/src/alloc/arena2/alloc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -342,9 +342,12 @@ impl<'arena> Arena<'arena> {
value_ref: &T,
) -> Result<ArenaAllocationData, ArenaAllocError> {
let size = core::mem::size_of::<ArenaHeapItem<T>>();
let alignment = core::mem::align_of_val(value_ref);
let alignment = core::mem::align_of::<ArenaHeapItem<T>>();

assert!(alignment <= self.layout.align());
// The arena's buffer must be at least as aligned as the value we are storing.
if alignment > self.layout.align() {
return Err(ArenaAllocError::AlignmentNotPossible);
}

// Safety: This is safe as `current_offset` must be less then the length
// of the buffer.
Expand Down
48 changes: 37 additions & 11 deletions oscars/src/alloc/arena2/mod.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
//! An Arena allocator that manages multiple backing arenas

use core::mem;

use rust_alloc::alloc::LayoutError;
use rust_alloc::collections::LinkedList;

Expand Down Expand Up @@ -48,13 +50,17 @@ const DEFAULT_ARENA_SIZE: usize = 4096;
/// Default upper limit of 2MB (2 ^ 21)
const DEFAULT_HEAP_THRESHOLD: usize = 2_097_152;

/// Minimum guaranteed alignment for every arena buffer.
const DEFAULT_MIN_ALIGNMENT: usize = 8;

/// Maximum number of idle arenas held (4 idle pages x 4KB = 16KB of OS memory pressure buffered)
const MAX_RECYCLED_ARENAS: usize = 4;

#[derive(Debug)]
pub struct ArenaAllocator<'alloc> {
heap_threshold: usize,
arena_size: usize,
min_alignment: usize,
arenas: LinkedList<Arena<'alloc>>,
// empty arenas kept alive to avoid OS reallocation on the next cycle
recycled_arenas: [Option<Arena<'alloc>>; MAX_RECYCLED_ARENAS],
Expand All @@ -67,6 +73,7 @@ impl<'alloc> Default for ArenaAllocator<'alloc> {
Self {
heap_threshold: DEFAULT_HEAP_THRESHOLD,
arena_size: DEFAULT_ARENA_SIZE,
min_alignment: DEFAULT_MIN_ALIGNMENT,
arenas: LinkedList::default(),
recycled_arenas: core::array::from_fn(|_| None),
recycled_count: 0,
Expand All @@ -83,6 +90,11 @@ impl<'alloc> ArenaAllocator<'alloc> {
self.heap_threshold = heap_threshold;
self
}
/// Override the baseline alignment for every new arena buffer.
pub fn with_min_alignment(mut self, min_alignment: usize) -> Self {
self.min_alignment = min_alignment;
self
}

pub fn arenas_len(&self) -> usize {
self.arenas.len()
Expand All @@ -105,22 +117,26 @@ impl<'alloc> ArenaAllocator<'alloc> {

impl<'alloc> ArenaAllocator<'alloc> {
pub fn try_alloc<T>(&mut self, value: T) -> Result<ArenaPointer<'alloc, T>, ArenaAllocError> {
// Determine the minimum alignment this type requires.
let required_alignment = mem::align_of::<alloc::ArenaHeapItem<T>>();

let active = match self.get_active_arena() {
Some(arena) => arena,
None => {
// TODO: don't hard code alignment
//
// TODO: also, we need a min-alignment
self.initialize_new_arena()?;
self.initialize_new_arena(required_alignment)?;
self.get_active_arena().expect("must exist, we just set it")
}
};

match active.get_allocation_data(&value) {
// SAFETY: TODO
Ok(data) => unsafe { Ok(active.alloc_unchecked::<T>(value, data)) },
Err(ArenaAllocError::OutOfMemory) => {
self.initialize_new_arena()?;
// The active arena is either full or was created with an alignment
// that is too small for this type. Either way, close it and spin up
// a fresh arena that satisfies the alignment requirement.
Err(ArenaAllocError::OutOfMemory | ArenaAllocError::AlignmentNotPossible) => {
active.close();
self.initialize_new_arena(required_alignment)?;
let new_active = self.get_active_arena().expect("must exist");
new_active.try_alloc(value)
}
Expand All @@ -138,18 +154,28 @@ impl<'alloc> ArenaAllocator<'alloc> {
.transpose()
}

pub fn initialize_new_arena(&mut self) -> Result<(), ArenaAllocError> {
/// Initialize a fresh arena, attempting to reuse a recycled one first.
pub fn initialize_new_arena(
&mut self,
required_alignment: usize,
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

issue: I think this should really just be a const parameter vs. a regular parameter.

Had you looked at using const parameters for this?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I looked into this, here's what I have found so far. One possible design would be to make MIN_ALIGN a const parameter on ArenaAllocator and compute max(MIN_ALIGN, ALIGN) when creating a new arena. Since both values would be const, the compiler could resolve max at compile time.

But I think this doesn’t really help here since arena_size is still a runtime value and Arena::try_init uses Layout::from_size_align(arena_size, alignment). Because the size is runtime, the layout cannot be fully computed at compile time anyway.

It would also mean many parts of the code would also need MIN_ALIGN as a generic parameter but that would involve a lot of changes without improving runtime performance. Also, instead of ArenaAllocator<'alloc>, we would need types like ArenaAllocator<'alloc, 8>, and allocators with different alignments would become incompatible types.

So I think the current design keeps the code simpler

) -> Result<(), ArenaAllocError> {
let alignment = self.min_alignment.max(required_alignment);

// Check the recycle list first to avoid an OS allocation.
if self.recycled_count > 0 {
self.recycled_count -= 1;
if let Some(recycled) = self.recycled_arenas[self.recycled_count].take() {
// arena.reset() was already called when it was parked
self.arenas.push_front(recycled);
return Ok(());
// arena.reset() was already called when it was parked.
// Only reuse if its original alignment satisfies the current requirement,
// otherwise drop it and fall through to a fresh OS allocation.
if recycled.layout.align() >= alignment {
self.arenas.push_front(recycled);
return Ok(());
}
}
}

let new_arena = Arena::try_init(self.arena_size, 16)?;
let new_arena = Arena::try_init(self.arena_size, alignment)?;
self.arenas.push_front(new_arena);
Ok(())
}
Expand Down
59 changes: 59 additions & 0 deletions oscars/src/alloc/arena2/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -184,3 +184,62 @@ fn as_ptr_clears_not_flips_tag_bit() {
ptr_a.as_mut().mark_dropped();
}
}

// === test for Dynamic Alignment === //

#[test]
fn test_over_aligned_type() {
#[repr(C, align(512))]
struct HighlyAligned {
_data: [u8; 128],
}

let mut allocator = ArenaAllocator::default().with_arena_size(4096);
let ptr = allocator
.try_alloc(HighlyAligned { _data: [0; 128] })
.unwrap();

let addr = ptr.as_ptr().as_ptr() as usize;
assert_eq!(addr % 512, 0);
assert_eq!(allocator.arenas_len(), 1);
}

#[test]
fn test_alignment_upgrade_after_small_alloc() {
#[repr(C, align(512))]
struct BigAlign([u8; 16]);

let mut allocator = ArenaAllocator::default().with_arena_size(4096);

// force the first arena to use 8-byte alignment
let _small = allocator.try_alloc(0u8).unwrap();
assert_eq!(allocator.arenas_len(), 1);

let ptr = allocator.try_alloc(BigAlign([0; 16])).unwrap();

let addr = ptr.as_ptr().as_ptr() as usize;
assert_eq!(addr % 512, 0);
assert_eq!(allocator.arenas_len(), 2);
}

#[test]
fn test_alignment_upgrade_on_full_arena() {
#[repr(C, align(512))]
struct BigAlign([u8; 16]);

let mut allocator = ArenaAllocator::default().with_arena_size(4096);

// fill the first arena
let mut count = 0usize;
while allocator.arenas_len() < 2 {
let _ = allocator.try_alloc(0u64).unwrap();
count += 1;
assert!(count < 1024);
}

let ptr = allocator.try_alloc(BigAlign([0; 16])).unwrap();

let addr = ptr.as_ptr().as_ptr() as usize;
assert_eq!(addr % 512, 0);
assert_eq!(allocator.arenas_len(), 3);
}