Skip to content

Commit c526820

Browse files
committed
Fix GcArray SEGFAULTs with the standard allocator
Using the small-object-arenas allocator still causes a SEGFAULT unfortunately. However, I've made significant progress ;)
1 parent 8b5f68d commit c526820

File tree

8 files changed

+356
-225
lines changed

8 files changed

+356
-225
lines changed

Cargo.toml

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -20,9 +20,6 @@ zerogc-derive = { path = "libs/derive", version = "0.2.0-alpha.3" }
2020
[workspace]
2121
members = ["libs/simple", "libs/derive", "libs/context"]
2222

23-
[profile.dev]
24-
opt-level = 1
25-
2623
[features]
2724
default = ["std"]
2825
# Depend on the standard library (optional)

libs/context/src/state/sync.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -429,13 +429,13 @@ pub(crate) trait SyncCollectorImpl: RawCollectorImpl<Manager=CollectionManager<S
429429
unreachable!("cant free while collection is in progress")
430430
},
431431
}
432-
// Now drop the Box
433-
drop(Box::from_raw(raw));
434432
/*
435433
* Notify all threads waiting for contexts to be valid.
436434
* TODO: I think this is really only useful if we're waiting....
437435
*/
438436
self.manager().valid_contexts_wait.notify_all();
437+
// Now drop the Box
438+
drop(Box::from_raw(raw));
439439
}
440440
/// Wait until the specified collection is finished
441441
///

libs/simple/src/alloc.rs

Lines changed: 35 additions & 62 deletions
Original file line numberDiff line numberDiff line change
@@ -27,9 +27,9 @@ pub const MAXIMUM_SMALL_WORDS: usize = 32;
2727
/// The alignment of elements in the arena
2828
pub const ARENA_ELEMENT_ALIGN: usize = std::mem::align_of::<GcHeader>();
2929

30-
use crate::layout::{GcHeader};
30+
use crate::layout::{GcHeader, UnknownHeader};
3131

32-
[inline]
32+
#[inline]
3333
pub const fn fits_small_object(layout: Layout) -> bool {
3434
layout.size() <= MAXIMUM_SMALL_WORDS * std::mem::size_of::<usize>()
3535
&& layout.align() <= ARENA_ELEMENT_ALIGN
@@ -92,38 +92,11 @@ impl Drop for Chunk {
9292
}
9393
}
9494

95-
/// We use zero as our marker value.
96-
///
97-
/// `GcHeader::type_info` is the first field of the header
98-
/// and it will never be null (its a reference).
99-
/// Therefore this marker will never conflict with a valid header.
100-
pub const FREE_SLOT_MARKER: usize = 0;
95+
/// A slot in the free list
10196
#[repr(C)]
10297
pub struct FreeSlot {
103-
/// Marker for the slot, initialized to `FREE_SLOT_MARKER`
104-
pub marker: usize,
10598
/// Pointer to the previous free slot
106-
pub(crate) prev_free: Option<NonNull<MaybeFreeSlot>>,
107-
}
108-
#[repr(C)]
109-
pub(crate) union MaybeFreeSlot {
110-
pub free: FreeSlot,
111-
pub header: GcHeader,
112-
}
113-
114-
impl MaybeFreeSlot {
115-
#[inline]
116-
pub unsafe fn is_free(&self) -> bool {
117-
self.free.marker == FREE_SLOT_MARKER
118-
}
119-
#[inline]
120-
pub unsafe fn mark_free(&mut self, prev: Option<NonNull<MaybeFreeSlot>>) {
121-
debug_assert!(!self.is_free());
122-
self.free = FreeSlot {
123-
marker: FREE_SLOT_MARKER,
124-
prev_free: prev
125-
};
126-
}
99+
pub(crate) prev_free: Option<NonNull<FreeSlot>>,
127100
}
128101
pub const NUM_SMALL_ARENAS: usize = 15;
129102
const INITIAL_SIZE: usize = 512;
@@ -186,19 +159,19 @@ impl ArenaState {
186159
self.current_chunk.store(ptr);
187160
}
188161
#[inline]
189-
fn alloc(&self, element_size: usize) -> NonNull<u8> {
162+
fn alloc(&self, element_size: usize) -> NonNull<UnknownHeader> {
190163
unsafe {
191164
let chunk = &*self.current_chunk().as_ptr();
192165
match chunk.try_alloc(element_size) {
193-
Some(header) => header,
166+
Some(header) => header.cast(),
194167
None => self.alloc_fallback(element_size)
195168
}
196169
}
197170
}
198171

199172
#[cold]
200173
#[inline(never)]
201-
fn alloc_fallback(&self, element_size: usize) -> NonNull<GcHeader> {
174+
fn alloc_fallback(&self, element_size: usize) -> NonNull<UnknownHeader> {
202175
let mut chunks = self.lock_chunks();
203176
// Now that we hold the lock, check the current chunk again
204177
unsafe {
@@ -214,7 +187,7 @@ impl ArenaState {
214187
self.force_current_chunk(NonNull::from(&**chunks.last().unwrap()));
215188
self.current_chunk().as_ref()
216189
.try_alloc(element_size).unwrap()
217-
.cast::<u8>()
190+
.cast::<UnknownHeader>()
218191
}
219192
}
220193
}
@@ -224,19 +197,32 @@ impl ArenaState {
224197
/// This is a lock-free linked list
225198
#[derive(Default)]
226199
pub(crate) struct FreeList {
227-
next: AtomicCell<Option<NonNull<MaybeFreeSlot>>>
200+
next: AtomicCell<Option<NonNull<FreeSlot>>>
228201
}
229202
impl FreeList {
203+
pub(crate) unsafe fn add_free(&self, free: *mut UnknownHeader) {
204+
let new_slot = free as *mut FreeSlot;
205+
let mut next = self.next.load();
206+
loop {
207+
(*new_slot).prev_free = next;
208+
match self.next.compare_exchange(next, Some(NonNull::new_unchecked(new_slot))) {
209+
Ok(_) => break,
210+
Err(actual_next) => {
211+
next = actual_next;
212+
}
213+
}
214+
}
215+
}
230216
#[inline]
231-
pub(crate) fn next_free(&self) -> Option<NonNull<MaybeFreeSlot>> {
217+
pub(crate) fn next_free(&self) -> Option<NonNull<FreeSlot>> {
232218
self.next.load()
233219
}
234220
#[inline]
235-
pub(crate) unsafe fn set_next_free(&self, next: Option<NonNull<MaybeFreeSlot>>) {
221+
pub(crate) unsafe fn set_next_free(&self, next: Option<NonNull<FreeSlot>>) {
236222
self.next.store(next)
237223
}
238224
#[inline]
239-
fn take_free(&self) -> Option<NonNull<GcHeader>> {
225+
fn take_free(&self) -> Option<NonNull<u8>> {
240226
loop {
241227
let next_free = match self.next.load() {
242228
Some(free) => free,
@@ -246,13 +232,9 @@ impl FreeList {
246232
unsafe {
247233
if self.next.compare_exchange(
248234
Some(next_free),
249-
next_free.as_ref().free.prev_free
235+
next_free.as_ref().prev_free
250236
).is_err() { continue /* retry */ }
251-
debug_assert_eq!(
252-
next_free.as_ref().free.marker,
253-
FREE_SLOT_MARKER
254-
);
255-
return Some(NonNull::from(&next_free.as_ref().header))
237+
return Some(next_free.cast())
256238
}
257239
}
258240
}
@@ -261,9 +243,13 @@ impl FreeList {
261243
pub struct SmallArena {
262244
pub(crate) element_size: usize,
263245
state: ArenaState,
264-
pub(crate) free: FreeList
246+
free: FreeList,
265247
}
248+
266249
impl SmallArena {
250+
pub(crate) unsafe fn add_free(&self, obj: *mut UnknownHeader) {
251+
self.free.add_free(obj)
252+
}
267253
#[cold] // Initialization is the slow path
268254
fn with_words(num_words: usize) -> SmallArena {
269255
assert!(num_words >= MINIMUM_WORDS);
@@ -276,27 +262,14 @@ impl SmallArena {
276262
}
277263
}
278264
#[inline]
279-
pub(crate) fn alloc(&self) -> NonNull<u8> {
265+
pub(crate) fn alloc(&self) -> NonNull<UnknownHeader> {
280266
// Check the free list
281267
if let Some(free) = self.free.take_free() {
282-
let res = free.as_ptr().sub(match free.as_ref().type_info.layout {
283-
Layout::new
284-
})
268+
free.cast()
285269
} else {
286270
self.state.alloc(self.element_size)
287271
}
288272
}
289-
pub(crate) unsafe fn for_each<F: FnMut(*mut MaybeFreeSlot)>(&self, mut func: F) {
290-
let chunks = self.state.lock_chunks();
291-
for chunk in &*chunks {
292-
let mut ptr = chunk.start;
293-
let end = chunk.current();
294-
while ptr < end {
295-
func(ptr as *mut MaybeFreeSlot);
296-
ptr = ptr.add(self.element_size);
297-
}
298-
}
299-
}
300273
}
301274
macro_rules! arena_match {
302275
($arenas:expr, $target:ident, max = $max:expr; $($size:pat => $num_words:literal @ $idx:expr),*) => {
@@ -351,7 +324,7 @@ impl SmallArenaList {
351324
}
352325
// Divide round up
353326
let word_size = mem::size_of::<usize>();
354-
let num_words = (small_object_size(layout) + (word_size - 1))
327+
let num_words = (layout.size() + (word_size - 1))
355328
/ word_size;
356329
self.find_raw(num_words)
357330
}

0 commit comments

Comments
 (0)