Skip to content

Commit 3bd9123

Browse files
committed
Support #![no_std] for the zerogc API
An implementation doesn't nessicarrily require the stdlib or even a system allocator. However as a matter of practice, "zerogc-simple" currently requires both.
1 parent 9e256b0 commit 3bd9123

File tree

7 files changed

+106
-77
lines changed

7 files changed

+106
-77
lines changed

Cargo.toml

Lines changed: 13 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,4 +18,16 @@ indexmap = { version = "1.4", optional = true }
1818
members = ["libs/simple", "libs/derive"]
1919

2020
[profile.dev]
21-
opt-level = 1
21+
opt-level = 1
22+
23+
[features]
24+
default = ["std"]
25+
# Depend on the standard library (optional)
26+
#
27+
# This implements tracing
28+
std = []
29+
# Depend on `extern crate alloc` in addition to the Rust `core`
30+
# This is implied by using the standard library (feature="std")
31+
#
32+
# This implements `Trace` for `Box` and collections like `Vec`
33+
alloc = []

src/cell.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@
1313
//! for fields that are wrapped in a [GcCell].
1414
//! Just mark the field with `#[zerogc(mutable(public))]`
1515
//! and it'll generate a safe wrapper.
16-
use std::cell::Cell;
16+
use core::cell::Cell;
1717

1818
use crate::{GcSafe, Trace, GcVisitor, NullTrace, TraceImmutable, GcDirectBarrier,};
1919

src/lib.rs

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33
const_panic, // RFC 2345 - Const asserts
44
)]
55
#![deny(missing_docs)]
6+
#![cfg_attr(not(feature = "std"), no_std)]
67
//! Zero overhead tracing garbage collection for rust,
78
//! by abusing the borrow checker.
89
//!
@@ -15,13 +16,16 @@
1516
//! 6. Collection can only happen with an explicit `safepoint` call and has no overhead between these calls,
1617
//! 7. API supports moving objects (allowing copying/generational GCs)
1718
19+
#![cfg(any(feature = "alloc", feature = "std"))]
20+
extern crate alloc;
21+
1822
/*
1923
* I want this library to use 'mostly' stable features,
2024
* unless there's good justification to use an unstable feature.
2125
*/
22-
use std::mem;
23-
use std::ops::{Deref, DerefMut};
24-
use std::fmt::Debug;
26+
use core::mem;
27+
use core::ops::{Deref, DerefMut};
28+
use core::fmt::Debug;
2529

2630
#[macro_use]
2731
mod manually_traced;
@@ -558,7 +562,7 @@ unsafe impl<T> TraceImmutable for AssumeNotTraced<T> {
558562
unsafe impl<T> NullTrace for AssumeNotTraced<T> {}
559563
/// No tracing implies GcSafe
560564
unsafe impl<T> GcSafe for AssumeNotTraced<T> {
561-
const NEEDS_DROP: bool = std::mem::needs_drop::<T>();
565+
const NEEDS_DROP: bool = core::mem::needs_drop::<T>();
562566
}
563567
unsafe_gc_brand!(AssumeNotTraced, T);
564568

src/manually_traced/core.rs

Lines changed: 60 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -4,8 +4,10 @@
44
//!
55
//! `RefCell` and `Cell` are intentionally ignored and do not have implementations.
66
//! Some collectors may need write barriers to protect their internals.
7+
use core::num::Wrapping;
78

8-
use crate::{Trace, GcSafe, GcVisitor, NullTrace, GcBrand, GcSystem, TraceImmutable};
9+
use crate::prelude::*;
10+
use crate::GcDirectBarrier;
911

1012
macro_rules! trace_tuple {
1113
{ $($param:ident)* } => {
@@ -113,7 +115,7 @@ macro_rules! trace_array {
113115
}
114116
unsafe impl<T: $crate::NullTrace> $crate::NullTrace for [T; $size] {}
115117
unsafe impl<T: GcSafe> GcSafe for [T; $size] {
116-
const NEEDS_DROP: bool = std::mem::needs_drop::<T>();
118+
const NEEDS_DROP: bool = core::mem::needs_drop::<T>();
117119
}
118120
unsafe impl<'new_gc, S: GcSystem, T> $crate::GcBrand<'new_gc, S> for [T; $size]
119121
where S: GcSystem, T: GcBrand<'new_gc, S>,
@@ -203,5 +205,60 @@ unsafe impl<T: TraceImmutable> TraceImmutable for [T] {
203205
}
204206
unsafe impl<T: NullTrace> NullTrace for [T] {}
205207
unsafe impl<T: GcSafe> GcSafe for [T] {
206-
const NEEDS_DROP: bool = std::mem::needs_drop::<T>();
208+
const NEEDS_DROP: bool = core::mem::needs_drop::<T>();
207209
}
210+
211+
unsafe impl<T: Trace> Trace for Option<T> {
212+
const NEEDS_TRACE: bool = T::NEEDS_TRACE;
213+
214+
#[inline]
215+
fn visit<V: GcVisitor>(&mut self, visitor: &mut V) -> Result<(), V::Err> {
216+
match *self {
217+
None => Ok(()),
218+
Some(ref mut value) => visitor.visit(value),
219+
}
220+
}
221+
}
222+
unsafe impl<T: TraceImmutable> TraceImmutable for Option<T> {
223+
#[inline]
224+
fn visit_immutable<V: GcVisitor>(&self, visitor: &mut V) -> Result<(), <V as GcVisitor>::Err> {
225+
match *self {
226+
None => Ok(()),
227+
Some(ref value) => visitor.visit_immutable(value),
228+
}
229+
}
230+
}
231+
unsafe impl<T: NullTrace> NullTrace for Option<T> {}
232+
unsafe impl<T: GcSafe> GcSafe for Option<T> {
233+
const NEEDS_DROP: bool = T::NEEDS_DROP;
234+
}
235+
unsafe impl<'gc, OwningRef, V> GcDirectBarrier<'gc, OwningRef> for Option<V>
236+
where V: GcDirectBarrier<'gc, OwningRef> {
237+
#[inline]
238+
unsafe fn write_barrier(&self, owner: &OwningRef, start_offset: usize) {
239+
// Implementing direct write is safe because we store our value inline
240+
match *self {
241+
None => { /* Nothing to trigger the barrier for :) */ },
242+
Some(ref value) => {
243+
/*
244+
* We must manually compute the offset
245+
* Null pointer-optimized types will have offset of zero,
246+
* while other types may not
247+
*/
248+
let value_offset = (value as *const V as usize) -
249+
(self as *const Self as usize);
250+
value.write_barrier(owner, start_offset + value_offset)
251+
},
252+
}
253+
}
254+
}
255+
unsafe_gc_brand!(Option, T);
256+
257+
// We can trace `Wrapping` by simply tracing its interior
258+
unsafe_trace_deref!(Wrapping, T; immut = false; |wrapping| &mut wrapping.0);
259+
unsafe impl<T: TraceImmutable> TraceImmutable for Wrapping<T> {
260+
#[inline]
261+
fn visit_immutable<V: GcVisitor>(&self, visitor: &mut V) -> Result<(), V::Err> {
262+
visitor.visit_immutable(&self.0)
263+
}
264+
}

src/manually_traced/mod.rs

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -180,7 +180,7 @@ macro_rules! unsafe_trace_deref {
180180
/// We trust ourselves to not do anything bad as long as our paramaters don't
181181
unsafe impl<$($param),*> GcSafe for $target<$($param),*>
182182
where $($param: GcSafe + TraceImmutable),* {
183-
const NEEDS_DROP: bool = std::mem::needs_drop::<Self>();
183+
const NEEDS_DROP: bool = core::mem::needs_drop::<Self>();
184184
}
185185
};
186186
($target:ident, $($param:ident),*; immut = false; |$value:ident| $extract:expr) => {
@@ -202,7 +202,7 @@ macro_rules! unsafe_trace_deref {
202202
/// We trust ourselves to not do anything bad as long as our paramaters don't
203203
unsafe impl<$($param),*> GcSafe for $target<$($param),*>
204204
where $($param: GcSafe),* {
205-
const NEEDS_DROP: bool = std::mem::needs_drop::<Self>();
205+
const NEEDS_DROP: bool = core::mem::needs_drop::<Self>();
206206
}
207207
};
208208
}
@@ -304,7 +304,7 @@ macro_rules! unsafe_trace_primitive {
304304
unsafe impl $crate::NullTrace for $target {}
305305
/// No drop/custom behavior -> GcSafe
306306
unsafe impl GcSafe for $target {
307-
const NEEDS_DROP: bool = std::mem::needs_drop::<$target>();
307+
const NEEDS_DROP: bool = core::mem::needs_drop::<$target>();
308308
}
309309
unsafe impl<'gc, OwningRef> $crate::GcDirectBarrier<'gc, OwningRef> for $target {
310310
#[inline(always)]
@@ -363,6 +363,9 @@ macro_rules! unsafe_gc_brand {
363363
}
364364

365365
mod core;
366+
#[cfg(any(feature = "alloc", feature = "std"))]
367+
mod stdalloc;
368+
#[cfg(feature = "std")]
366369
mod stdlib;
367370
#[cfg(feature = "indexmap")]
368371
mod indexmap;

src/manually_traced/stdalloc.rs

Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,17 @@
1+
//! Implementations for types in the standard `alloc` crate
2+
//!
3+
//! These can be used in `#![no_std]` crates without requiring
4+
//! the entire standard library.
5+
use alloc::rc::Rc;
6+
use alloc::sync::Arc;
7+
use alloc::vec::Vec;
8+
use alloc::boxed::Box;
9+
10+
use crate::prelude::*;
11+
12+
// NOTE: Delegate to slice to avoid code duplication
13+
unsafe_trace_deref!(Vec, target = { [T] }; T);
14+
unsafe_trace_deref!(Box, target = T);
15+
// We can only trace `Rc` and `Arc` if the inner type implements `TraceImmutable`
16+
unsafe_trace_deref!(Rc, T; immut = required; |rc| &**rc);
17+
unsafe_trace_deref!(Arc, T; immut = required; |arc| &**arc);

src/manually_traced/stdlib.rs

Lines changed: 1 addition & 65 deletions
Original file line numberDiff line numberDiff line change
@@ -3,27 +3,9 @@
33
//! Types that are in `libcore` and are `#![no_std]` should go in the core module,
44
//! but anything that requires the rest of the stdlib (including collections and allocations),
55
//! should go in this module.
6+
use crate::prelude::*;
67

7-
use crate::{Trace, GcSafe, GcVisitor, TraceImmutable, NullTrace, GcBrand, GcSystem, GcDirectBarrier};
88
use std::collections::{HashMap, HashSet};
9-
use std::rc::Rc;
10-
use std::sync::Arc;
11-
use std::num::Wrapping;
12-
13-
// NOTE: Delegate to slice to avoid code duplication
14-
unsafe_trace_deref!(Vec, target = { [T] }; T);
15-
unsafe_trace_deref!(Box, target = T);
16-
// We can only trace `Rc` and `Arc` if the inner type implements `TraceImmutable`
17-
unsafe_trace_deref!(Rc, T; immut = required; |rc| &**rc);
18-
unsafe_trace_deref!(Arc, T; immut = required; |arc| &**arc);
19-
// We can trace `Wrapping` by simply tracing its interior
20-
unsafe_trace_deref!(Wrapping, T; immut = false; |wrapping| &mut wrapping.0);
21-
unsafe impl<T: TraceImmutable> TraceImmutable for Wrapping<T> {
22-
#[inline]
23-
fn visit_immutable<V: GcVisitor>(&self, visitor: &mut V) -> Result<(), V::Err> {
24-
visitor.visit_immutable(&self.0)
25-
}
26-
}
279

2810
unsafe_immutable_trace_iterable!(HashMap<K, V>; element = { (&K, &V) });
2911
unsafe impl<K: TraceImmutable, V: Trace> Trace for HashMap<K, V> {
@@ -64,49 +46,3 @@ unsafe impl<V: TraceImmutable> Trace for HashSet<V> {
6446
}
6547
}
6648
unsafe_gc_brand!(HashSet, immut = required; V);
67-
68-
unsafe impl<T: Trace> Trace for Option<T> {
69-
const NEEDS_TRACE: bool = T::NEEDS_TRACE;
70-
71-
#[inline]
72-
fn visit<V: GcVisitor>(&mut self, visitor: &mut V) -> Result<(), V::Err> {
73-
match *self {
74-
None => Ok(()),
75-
Some(ref mut value) => visitor.visit(value),
76-
}
77-
}
78-
}
79-
unsafe impl<T: TraceImmutable> TraceImmutable for Option<T> {
80-
#[inline]
81-
fn visit_immutable<V: GcVisitor>(&self, visitor: &mut V) -> Result<(), <V as GcVisitor>::Err> {
82-
match *self {
83-
None => Ok(()),
84-
Some(ref value) => visitor.visit_immutable(value),
85-
}
86-
}
87-
}
88-
unsafe impl<T: NullTrace> NullTrace for Option<T> {}
89-
unsafe impl<T: GcSafe> GcSafe for Option<T> {
90-
const NEEDS_DROP: bool = T::NEEDS_DROP;
91-
}
92-
unsafe impl<'gc, OwningRef, V> GcDirectBarrier<'gc, OwningRef> for Option<V>
93-
where V: GcDirectBarrier<'gc, OwningRef> {
94-
#[inline]
95-
unsafe fn write_barrier(&self, owner: &OwningRef, start_offset: usize) {
96-
// Implementing direct write is safe because we store our value inline
97-
match *self {
98-
None => { /* Nothing to trigger the barrier for :) */ },
99-
Some(ref value) => {
100-
/*
101-
* We must manually compute the offset
102-
* Null pointer-optimized types will have offset of zero,
103-
* while other types may not
104-
*/
105-
let value_offset = (value as *const V as usize) -
106-
(self as *const Self as usize);
107-
value.write_barrier(owner, start_offset + value_offset)
108-
},
109-
}
110-
}
111-
}
112-
unsafe_gc_brand!(Option, T);

0 commit comments

Comments
 (0)