Skip to content

Commit 21a5e21

Browse files
committed
[simple] Add a "simple" single-threaded context implementation
This is much simpler, because it assumes there is only one active context. There is no need to lock because only a single active thread can use the garbage collector at a time. The most important outcome of this change is that it starts to seperate the implementation of contexts from the mark/sweep collector internals. Eventually I'd like to seperate out the context implementation for indpendent use. Ideally it would be shared alongside the generational collector (#14). There is no real performance change (for the single threaded benchmark): Standard "sync" contexts: binary_trees 21 46.5 sec, 385 MB New "simple" contexts: binary_trees 21 45.4 sec, 385 MB
1 parent 3bd9123 commit 21a5e21

File tree

8 files changed

+622
-290
lines changed

8 files changed

+622
-290
lines changed

libs/simple/Cargo.toml

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,8 +12,9 @@ edition = "2018"
1212
zerogc = { path = "../..", version = "0.1.0" }
1313
once_cell = { version = "1.4.0", optional = true }
1414
# Concurrency
15+
# TODO: Make this optional for the single-threaded implementation
1516
parking_lot = { version = "0.10", features = ["nightly"] }
16-
crossbeam = "0.7"
17+
crossbeam = { version = "0.7" }
1718
# Logging
1819
slog = "2.5"
1920
# [Optional] Serde support
@@ -22,6 +23,7 @@ serde = { version = "1", optional = true }
2223
[features]
2324
default = [
2425
"small-object-arenas", # Without this, allocating small objects is slow
26+
"sync", # Thread-safety by default
2527
]
2628
# Use very fast dedicated arenas for small objects.
2729
# This makes allocation much faster
@@ -37,6 +39,11 @@ small-object-arenas = ["once_cell"]
3739
# This risks stack overflow at a possible performance gain
3840
# See commit 9a9634d68a4933d
3941
implicit-grey-stack = []
42+
# Allow multiple threads to access the garbage collector
43+
# by creating a seperate context for each.
44+
#
45+
# This can increase overhead by requiring communication between threads.
46+
sync = []
4047

4148
[dev-dependencies]
4249
# Used for examples :)

libs/simple/examples/binary_trees.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,7 @@ fn main() {
5858
o!("bench" => file!())
5959
);
6060
let collector = SimpleCollector::with_logger(logger);
61-
let mut gc = collector.create_context();
61+
let mut gc = collector.into_context();
6262
{
6363
let depth = max_depth + 1;
6464
let tree = bottom_up_tree(&gc, depth);

libs/simple/src/context/mod.rs

Lines changed: 242 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,242 @@
1+
//! The implementation of [::zerogc::CollectorContext] that is
2+
//! shared among both thread-safe and thread-unsafe code.
3+
4+
#[cfg(feature = "sync")]
5+
mod sync;
6+
#[cfg(not(feature = "sync"))]
7+
mod simple;
8+
#[cfg(feature = "sync")]
9+
pub use self::sync::*;
10+
#[cfg(not(feature = "sync"))]
11+
pub use self::simple::*;
12+
13+
use zerogc::prelude::*;
14+
use super::{SimpleCollector, RawSimpleCollector, DynTrace};
15+
use std::mem::ManuallyDrop;
16+
use std::ptr::NonNull;
17+
18+
19+
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
20+
pub enum ContextState {
21+
/// The context is active.
22+
///
23+
/// Its contents are potentially being mutated,
24+
/// so the `shadow_stack` doesn't necessarily
25+
/// reflect the actual set of thread roots.
26+
///
27+
/// New objects could be allocated that are not
28+
/// actually being tracked in the `shadow_stack`.
29+
Active,
30+
/// The context is waiting at a safepoint
31+
/// for a collection to complete.
32+
///
33+
/// The mutating thread is blocked for the
34+
/// duration of the safepoint (until collection completes).
35+
///
36+
/// Therefore, its `shadow_stack` is guarenteed to reflect
37+
/// the actual set of thread roots.
38+
SafePoint {
39+
/// The id of the collection we are waiting for
40+
collection_id: u64
41+
},
42+
/// The context is frozen.
43+
/// Allocation or mutation can't happen
44+
/// but the mutator thread isn't actually blocked.
45+
///
46+
/// Unlike a safepoint, this is explicitly unfrozen at the
47+
/// user's discretion.
48+
///
49+
/// Because no allocation or mutation can happen,
50+
/// its shadow_stack stack is guarenteed to
51+
/// accurately reflect the roots of the context.
52+
#[cfg_attr(not(feature = "sync"), allow(unused))] // TODO: Implement frozen for simple contexts?
53+
Frozen,
54+
}
55+
impl ContextState {
56+
#[cfg_attr(not(feature = "sync"), allow(unused))] // TODO: Implement frozen for simple contexts?
57+
fn is_frozen(&self) -> bool {
58+
matches!(*self, ContextState::Frozen)
59+
}
60+
}
61+
62+
/*
63+
* These form a stack of contexts,
64+
* which all share owns a pointer to the RawContext,
65+
* The raw context is implicitly bound to a single thread
66+
* and manages the state of all the contexts.
67+
*
68+
* https://llvm.org/docs/GarbageCollection.html#the-shadow-stack-gc
69+
* Essentially these objects maintain a shadow stack
70+
*
71+
* The pointer to the RawContext must be Arc, since the
72+
* collector maintains a weak reference to it.
73+
* I use double indirection with a `Rc` because I want
74+
* `recurse_context` to avoid the cost of atomic operations.
75+
*
76+
* SimpleCollectorContexts mirror the application stack.
77+
* They can be stack allocated inside `recurse_context`.
78+
* All we would need to do is internally track ownership of the original
79+
* context. The sub-collector in `recurse_context` is very clearly
80+
* restricted to the lifetime of the closure
81+
* which is a subset of the parent's lifetime.
82+
*
83+
* We still couldn't be Send, since we use interior mutablity
84+
* inside of RawContext that is not thread-safe.
85+
*/
86+
pub struct SimpleCollectorContext {
87+
raw: *mut RawContext,
88+
/// Whether we are the root context
89+
///
90+
/// Only the root actually owns the `Arc`
91+
/// and is responsible for dropping it
92+
root: bool
93+
}
94+
impl SimpleCollectorContext {
95+
#[cfg(not(feature = "sync"))]
96+
pub(crate) unsafe fn from_collector(collector: &SimpleCollector) -> Self {
97+
SimpleCollectorContext {
98+
raw: Box::into_raw(ManuallyDrop::into_inner(
99+
RawContext::from_collector(collector.0.clone())
100+
)),
101+
root: true // We are the exclusive owner
102+
}
103+
}
104+
#[cfg(feature = "sync")]
105+
pub(crate) unsafe fn register_root(collector: &SimpleCollector) -> Self {
106+
SimpleCollectorContext {
107+
raw: Box::into_raw(ManuallyDrop::into_inner(
108+
RawContext::register_new(&collector.0)
109+
)),
110+
root: true, // We are responsible for unregistering
111+
}
112+
}
113+
#[inline]
114+
pub(crate) fn collector(&self) -> &RawSimpleCollector {
115+
unsafe { &(*self.raw).collector }
116+
}
117+
#[inline(always)]
118+
unsafe fn with_shadow_stack<R, T: Trace>(
119+
&self, value: *mut &mut T, func: impl FnOnce() -> R
120+
) -> R {
121+
let old_link = (*(*self.raw).shadow_stack.get()).last;
122+
let new_link = ShadowStackLink {
123+
element: NonNull::new_unchecked(
124+
std::mem::transmute::<
125+
*mut dyn DynTrace,
126+
*mut (dyn DynTrace + 'static)
127+
>(value as *mut dyn DynTrace)
128+
),
129+
prev: old_link
130+
};
131+
(*(*self.raw).shadow_stack.get()).last = &new_link;
132+
let result = func();
133+
debug_assert_eq!(
134+
(*(*self.raw).shadow_stack.get()).last,
135+
&new_link
136+
);
137+
(*(*self.raw).shadow_stack.get()).last = new_link.prev;
138+
result
139+
}
140+
#[cold]
141+
unsafe fn trigger_basic_safepoint<T: Trace>(&self, element: &mut &mut T) {
142+
self.with_shadow_stack(element, || {
143+
(*self.raw).trigger_safepoint();
144+
})
145+
}
146+
}
147+
impl Drop for SimpleCollectorContext {
148+
#[inline]
149+
fn drop(&mut self) {
150+
if self.root {
151+
unsafe {
152+
self.collector().free_context(self.raw);
153+
}
154+
}
155+
}
156+
}
157+
unsafe impl GcContext for SimpleCollectorContext {
158+
type System = SimpleCollector;
159+
160+
#[inline]
161+
unsafe fn basic_safepoint<T: Trace>(&mut self, value: &mut &mut T) {
162+
debug_assert_eq!((*self.raw).state.get(), ContextState::Active);
163+
if (*self.raw).collector.should_collect() {
164+
self.trigger_basic_safepoint(value);
165+
}
166+
debug_assert_eq!((*self.raw).state.get(), ContextState::Active);
167+
}
168+
169+
unsafe fn freeze(&mut self) {
170+
(*self.raw).collector.manager.freeze_context(&*self.raw);
171+
}
172+
173+
unsafe fn unfreeze(&mut self) {
174+
(*self.raw).collector.manager.unfreeze_context(&*self.raw);
175+
}
176+
177+
#[inline]
178+
unsafe fn recurse_context<T, F, R>(&self, value: &mut &mut T, func: F) -> R
179+
where T: Trace, F: for<'gc> FnOnce(&'gc mut Self, &'gc mut T) -> R {
180+
debug_assert_eq!((*self.raw).state.get(), ContextState::Active);
181+
self.with_shadow_stack(value, || {
182+
let mut sub_context = ManuallyDrop::new(SimpleCollectorContext {
183+
/*
184+
* safe to copy because we wont drop it
185+
* Lifetime is guarenteed to be restricted to
186+
* the closure.
187+
*/
188+
raw: self.raw,
189+
root: false /* don't drop our pointer!!! */
190+
});
191+
let result = func(&mut *sub_context, value);
192+
debug_assert!(!sub_context.root);
193+
// No need to run drop code on context.....
194+
std::mem::forget(sub_context);
195+
debug_assert_eq!((*self.raw).state.get(), ContextState::Active);
196+
result
197+
})
198+
}
199+
}
200+
201+
/// It's not safe for a context to be sent across threads.
202+
///
203+
/// We use (thread-unsafe) interior mutability to maintain the
204+
/// shadow stack. Since we could potentially be cloned via `safepoint_recurse!`,
205+
/// implementing `Send` would allow another thread to obtain a
206+
/// reference to our internal `&RefCell`. Further mutation/access
207+
/// would be undefined.....
208+
impl !Send for SimpleCollectorContext {}
209+
210+
//
211+
// Root tracking
212+
//
213+
214+
#[repr(C)]
215+
#[derive(Debug)]
216+
pub(crate) struct ShadowStackLink {
217+
pub element: NonNull<dyn DynTrace>,
218+
/// The previous link in the chain,
219+
/// or NULL if there isn't any
220+
pub prev: *const ShadowStackLink
221+
}
222+
223+
#[derive(Clone, Debug)]
224+
pub struct ShadowStack {
225+
/// The last element in the shadow stack,
226+
/// or NULL if it's empty
227+
pub(crate) last: *const ShadowStackLink
228+
}
229+
impl ShadowStack {
230+
unsafe fn as_vec(&self) -> Vec<*mut dyn DynTrace> {
231+
let mut result: Vec<_> = self.reverse_iter().collect();
232+
result.reverse();
233+
result
234+
}
235+
#[inline]
236+
pub(crate) unsafe fn reverse_iter(&self) -> impl Iterator<Item=*mut dyn DynTrace> + '_ {
237+
std::iter::successors(
238+
self.last.as_ref(),
239+
|link| link.prev.as_ref()
240+
).map(|link| link.element.as_ptr())
241+
}
242+
}

0 commit comments

Comments
 (0)