Skip to content

Commit 22b05c1

Browse files
committed
feat: implement dynamic alignment for ArenaAllocator
1 parent f37d70e commit 22b05c1

File tree

22 files changed

+768
-86
lines changed

22 files changed

+768
-86
lines changed

.github/workflows/rust.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -67,7 +67,7 @@ jobs:
6767
- name: Set up Miri
6868
run: cargo +nightly miri setup
6969
- name: Run tests under Miri
70-
run: cargo +nightly miri test -p oscars
70+
run: cargo +nightly miri test -p oscars --all-features
7171

7272
docs:
7373
name: Documentation

Cargo.lock

Lines changed: 7 additions & 0 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

oscars/Cargo.toml

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@ allocator-api2 = { version = "0.4.0", optional = true }
88
hashbrown = "0.16.1"
99
oscars_derive = { path = "../oscars_derive", version = "0.1.0" }
1010
rustc-hash = "2.1.1"
11+
thin-vec = { version = "0.2", optional = true }
1112

1213
[dev-dependencies]
1314
criterion = { version = "0.5", features = ["html_reports"] }
@@ -32,3 +33,4 @@ default = ["mark_sweep"]
3233
std = []
3334
mark_sweep = []
3435
gc_allocator = ["dep:allocator-api2", "mark_sweep"]
36+
thin-vec = ["dep:thin-vec", "mark_sweep"]

oscars/src/alloc/arena2/alloc.rs

Lines changed: 23 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -203,7 +203,8 @@ impl<'arena, T> ArenaPointer<'arena, T> {
203203
///
204204
/// safe because the gc collector owns the arena and keeps it alive
205205
pub(crate) unsafe fn extend_lifetime(self) -> ArenaPointer<'static, T> {
206-
ArenaPointer(self.0.extend_lifetime(), PhantomData)
206+
// SAFETY: upheld by caller
207+
ArenaPointer(unsafe { self.0.extend_lifetime() }, PhantomData)
207208
}
208209
}
209210

@@ -341,9 +342,12 @@ impl<'arena> Arena<'arena> {
341342
value_ref: &T,
342343
) -> Result<ArenaAllocationData, ArenaAllocError> {
343344
let size = core::mem::size_of::<ArenaHeapItem<T>>();
344-
let alignment = core::mem::align_of_val(value_ref);
345+
let alignment = core::mem::align_of::<ArenaHeapItem<T>>();
345346

346-
assert!(alignment <= self.layout.align());
347+
// The arena's buffer must be at least as aligned as the value we are storing.
348+
if alignment > self.layout.align() {
349+
return Err(ArenaAllocError::AlignmentNotPossible);
350+
}
347351

348352
// Safety: This is safe as `current_offset` must be less then the length
349353
// of the buffer.
@@ -396,6 +400,22 @@ impl<'arena> Arena<'arena> {
396400
}
397401
result
398402
}
403+
404+
/// Reset arena to its initial empty state, reusing the existing OS buffer.
405+
/// Must only be called when `run_drop_check()` is true (all items dropped).
406+
pub fn reset(&self) {
407+
debug_assert!(
408+
self.run_drop_check(),
409+
"reset() called on an arena with live items"
410+
);
411+
// Zero the buffer so stale object graphs are not observable after recycling.
412+
// SAFETY: buffer is valid for the full layout size and was allocated with
413+
// the same layout in try_init.
414+
unsafe { core::ptr::write_bytes(self.buffer.as_ptr(), 0, self.layout.size()) };
415+
self.flags.set(ArenaState::default());
416+
self.last_allocation.set(core::ptr::null_mut());
417+
self.current_offset.set(0);
418+
}
399419
}
400420

401421
impl<'arena> Drop for Arena<'arena> {

oscars/src/alloc/arena2/mod.rs

Lines changed: 64 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,7 @@
11
//! An Arena allocator that manages multiple backing arenas
22
3+
use core::mem;
4+
35
use rust_alloc::alloc::LayoutError;
46
use rust_alloc::collections::LinkedList;
57

@@ -48,19 +50,33 @@ const DEFAULT_ARENA_SIZE: usize = 4096;
4850
/// Default upper limit of 2MB (2 ^ 21)
4951
const DEFAULT_HEAP_THRESHOLD: usize = 2_097_152;
5052

53+
/// Minimum guaranteed alignment for every arena buffer.
54+
const DEFAULT_MIN_ALIGNMENT: usize = 8;
55+
56+
/// Maximum number of idle arenas held (4 idle pages x 4KB = 16KB of OS memory pressure buffered)
57+
const MAX_RECYCLED_ARENAS: usize = 4;
58+
5159
#[derive(Debug)]
5260
pub struct ArenaAllocator<'alloc> {
5361
heap_threshold: usize,
5462
arena_size: usize,
63+
min_alignment: usize,
5564
arenas: LinkedList<Arena<'alloc>>,
65+
// empty arenas kept alive to avoid OS reallocation on the next cycle
66+
recycled_arenas: [Option<Arena<'alloc>>; MAX_RECYCLED_ARENAS],
67+
// number of idle arenas currently held
68+
recycled_count: usize,
5669
}
5770

5871
impl<'alloc> Default for ArenaAllocator<'alloc> {
5972
fn default() -> Self {
6073
Self {
6174
heap_threshold: DEFAULT_HEAP_THRESHOLD,
6275
arena_size: DEFAULT_ARENA_SIZE,
76+
min_alignment: DEFAULT_MIN_ALIGNMENT,
6377
arenas: LinkedList::default(),
78+
recycled_arenas: core::array::from_fn(|_| None),
79+
recycled_count: 0,
6480
}
6581
}
6682
}
@@ -74,17 +90,24 @@ impl<'alloc> ArenaAllocator<'alloc> {
7490
self.heap_threshold = heap_threshold;
7591
self
7692
}
93+
/// Override the baseline alignment for every new arena buffer.
94+
pub fn with_min_alignment(mut self, min_alignment: usize) -> Self {
95+
self.min_alignment = min_alignment;
96+
self
97+
}
7798

7899
pub fn arenas_len(&self) -> usize {
79100
self.arenas.len()
80101
}
81102

82103
pub fn heap_size(&self) -> usize {
104+
// recycled arenas hold no live objects, exclude them from GC pressure
83105
self.arenas_len() * self.arena_size
84106
}
85107

86108
pub fn is_below_threshold(&self) -> bool {
87-
self.heap_size() <= self.heap_threshold - self.arena_size
109+
// saturating_sub avoids underflow when heap_threshold < arena_size
110+
self.heap_size() <= self.heap_threshold.saturating_sub(self.arena_size)
88111
}
89112

90113
pub fn increase_threshold(&mut self) {
@@ -94,22 +117,26 @@ impl<'alloc> ArenaAllocator<'alloc> {
94117

95118
impl<'alloc> ArenaAllocator<'alloc> {
96119
pub fn try_alloc<T>(&mut self, value: T) -> Result<ArenaPointer<'alloc, T>, ArenaAllocError> {
120+
// Determine the minimum alignment this type requires.
121+
let required_alignment = mem::align_of::<alloc::ArenaHeapItem<T>>();
122+
97123
let active = match self.get_active_arena() {
98124
Some(arena) => arena,
99125
None => {
100-
// TODO: don't hard code alignment
101-
//
102-
// TODO: also, we need a min-alignment
103-
self.initialize_new_arena()?;
126+
self.initialize_new_arena(required_alignment)?;
104127
self.get_active_arena().expect("must exist, we just set it")
105128
}
106129
};
107130

108131
match active.get_allocation_data(&value) {
109132
// SAFETY: TODO
110133
Ok(data) => unsafe { Ok(active.alloc_unchecked::<T>(value, data)) },
111-
Err(ArenaAllocError::OutOfMemory) => {
112-
self.initialize_new_arena()?;
134+
// The active arena is either full or was created with an alignment
135+
// that is too small for this type. Either way, close it and spin up
136+
// a fresh arena that satisfies the alignment requirement.
137+
Err(ArenaAllocError::OutOfMemory | ArenaAllocError::AlignmentNotPossible) => {
138+
active.close();
139+
self.initialize_new_arena(required_alignment)?;
113140
let new_active = self.get_active_arena().expect("must exist");
114141
new_active.try_alloc(value)
115142
}
@@ -127,8 +154,28 @@ impl<'alloc> ArenaAllocator<'alloc> {
127154
.transpose()
128155
}
129156

130-
pub fn initialize_new_arena(&mut self) -> Result<(), ArenaAllocError> {
131-
let new_arena = Arena::try_init(self.arena_size, 16)?;
157+
/// Initialize a fresh arena, attempting to reuse a recycled one first.
158+
pub fn initialize_new_arena(
159+
&mut self,
160+
required_alignment: usize,
161+
) -> Result<(), ArenaAllocError> {
162+
let alignment = self.min_alignment.max(required_alignment);
163+
164+
// Check the recycle list first to avoid an OS allocation.
165+
if self.recycled_count > 0 {
166+
self.recycled_count -= 1;
167+
if let Some(recycled) = self.recycled_arenas[self.recycled_count].take() {
168+
// arena.reset() was already called when it was parked.
169+
// Only reuse if its original alignment satisfies the current requirement,
170+
// otherwise drop it and fall through to a fresh OS allocation.
171+
if recycled.layout.align() >= alignment {
172+
self.arenas.push_front(recycled);
173+
return Ok(());
174+
}
175+
}
176+
}
177+
178+
let new_arena = Arena::try_init(self.arena_size, alignment)?;
132179
self.arenas.push_front(new_arena);
133180
Ok(())
134181
}
@@ -138,8 +185,14 @@ impl<'alloc> ArenaAllocator<'alloc> {
138185
}
139186

140187
pub fn drop_dead_arenas(&mut self) {
141-
for dead_arenas in self.arenas.extract_if(|a| a.run_drop_check()) {
142-
drop(dead_arenas)
188+
for arena in self.arenas.extract_if(|a| a.run_drop_check()) {
189+
if self.recycled_count < MAX_RECYCLED_ARENAS {
190+
//reset in place and park in the reserve.
191+
arena.reset();
192+
self.recycled_arenas[self.recycled_count] = Some(arena);
193+
self.recycled_count += 1;
194+
}
195+
// else: arena drops here, returning memory to the OS
143196
}
144197
}
145198

oscars/src/alloc/arena2/tests.rs

Lines changed: 124 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -89,6 +89,71 @@ fn arc_drop() {
8989
assert_eq!(allocator.arenas_len(), 0);
9090
}
9191

92+
#[test]
93+
fn recycled_arena_avoids_realloc() {
94+
let mut allocator = ArenaAllocator::default().with_arena_size(512);
95+
96+
let mut ptrs = Vec::new();
97+
for i in 0..16 {
98+
ptrs.push(allocator.try_alloc(i).unwrap().as_ptr());
99+
}
100+
assert_eq!(allocator.arenas_len(), 1);
101+
// heap_size counts only live arenas, so capture it while one is active.
102+
let heap_while_live = allocator.heap_size();
103+
assert_eq!(heap_while_live, 512);
104+
105+
for mut ptr in ptrs {
106+
unsafe { ptr.as_mut().mark_dropped() };
107+
}
108+
allocator.drop_dead_arenas();
109+
110+
// After recycling, the arena is parked, no live arenas, so heap_size is 0.
111+
assert_eq!(allocator.arenas_len(), 0);
112+
assert_eq!(allocator.heap_size(), 0);
113+
// recycled_count == 1 proves the arena was parked in the recycle slot, not freed to the OS.
114+
assert_eq!(allocator.recycled_count, 1);
115+
116+
// Allocate again, must reuse the recycled arena without growing OS footprint.
117+
// heap_size returns to the same value as when a live arena was present.
118+
for i in 16..32 {
119+
let _ = allocator.try_alloc(i).unwrap();
120+
}
121+
assert_eq!(allocator.arenas_len(), 1);
122+
assert_eq!(allocator.heap_size(), heap_while_live);
123+
// recycled_count == 0 proves the recycled slot was consumed rather than a new OS allocation.
124+
assert_eq!(allocator.recycled_count, 0);
125+
}
126+
127+
#[test]
128+
fn max_recycled_cap_respected() {
129+
let mut allocator = ArenaAllocator::default().with_arena_size(128);
130+
131+
let mut ptrs_per_arena: Vec<Vec<NonNull<ArenaHeapItem<u64>>>> = Vec::new();
132+
133+
for _ in 0..5 {
134+
let mut ptrs = Vec::new();
135+
let target_len = allocator.arenas_len() + 1;
136+
while allocator.arenas_len() < target_len {
137+
ptrs.push(allocator.try_alloc(0u64).unwrap().as_ptr());
138+
}
139+
ptrs_per_arena.push(ptrs);
140+
}
141+
assert_eq!(allocator.arenas_len(), 5);
142+
143+
for ptrs in ptrs_per_arena {
144+
for mut ptr in ptrs {
145+
unsafe { ptr.as_mut().mark_dropped() };
146+
}
147+
}
148+
149+
allocator.drop_dead_arenas();
150+
151+
assert_eq!(allocator.arenas_len(), 0);
152+
assert_eq!(allocator.heap_size(), 0);
153+
// The recycled list holds exactly max_recycled pages.
154+
assert_eq!(allocator.recycled_count, 4);
155+
}
156+
92157
// === test for TaggedPtr::as_ptr === //
93158

94159
// `TaggedPtr::as_ptr` must use `addr & !MASK` to unconditionally clear the high
@@ -119,3 +184,62 @@ fn as_ptr_clears_not_flips_tag_bit() {
119184
ptr_a.as_mut().mark_dropped();
120185
}
121186
}
187+
188+
// === test for Dynamic Alignment === //
189+
190+
#[test]
191+
fn test_over_aligned_type() {
192+
#[repr(C, align(512))]
193+
struct HighlyAligned {
194+
_data: [u8; 128],
195+
}
196+
197+
let mut allocator = ArenaAllocator::default().with_arena_size(4096);
198+
let ptr = allocator
199+
.try_alloc(HighlyAligned { _data: [0; 128] })
200+
.unwrap();
201+
202+
let addr = ptr.as_ptr().as_ptr() as usize;
203+
assert_eq!(addr % 512, 0);
204+
assert_eq!(allocator.arenas_len(), 1);
205+
}
206+
207+
#[test]
208+
fn test_alignment_upgrade_after_small_alloc() {
209+
#[repr(C, align(512))]
210+
struct BigAlign([u8; 16]);
211+
212+
let mut allocator = ArenaAllocator::default().with_arena_size(4096);
213+
214+
// force the first arena to use 8-byte alignment
215+
let _small = allocator.try_alloc(0u8).unwrap();
216+
assert_eq!(allocator.arenas_len(), 1);
217+
218+
let ptr = allocator.try_alloc(BigAlign([0; 16])).unwrap();
219+
220+
let addr = ptr.as_ptr().as_ptr() as usize;
221+
assert_eq!(addr % 512, 0);
222+
assert_eq!(allocator.arenas_len(), 2);
223+
}
224+
225+
#[test]
226+
fn test_alignment_upgrade_on_full_arena() {
227+
#[repr(C, align(512))]
228+
struct BigAlign([u8; 16]);
229+
230+
let mut allocator = ArenaAllocator::default().with_arena_size(4096);
231+
232+
// fill the first arena
233+
let mut count = 0usize;
234+
while allocator.arenas_len() < 2 {
235+
let _ = allocator.try_alloc(0u64).unwrap();
236+
count += 1;
237+
assert!(count < 1024);
238+
}
239+
240+
let ptr = allocator.try_alloc(BigAlign([0; 16])).unwrap();
241+
242+
let addr = ptr.as_ptr().as_ptr() as usize;
243+
assert_eq!(addr % 512, 0);
244+
assert_eq!(allocator.arenas_len(), 3);
245+
}

0 commit comments

Comments
 (0)