Skip to content

Commit 6ade4db

Browse files
committed
Add RawRc type
1 parent c45e644 commit 6ade4db

File tree

3 files changed

+394
-0
lines changed

3 files changed

+394
-0
lines changed

library/alloc/src/raw_rc/mod.rs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -66,6 +66,7 @@ use core::cell::UnsafeCell;
6666
use core::mem;
6767
use core::sync::atomic::Atomic;
6868

69+
mod raw_rc;
6970
mod raw_weak;
7071
mod rc_alloc;
7172
mod rc_layout;

library/alloc/src/raw_rc/raw_rc.rs

Lines changed: 357 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,357 @@
1+
use core::alloc::Allocator;
2+
use core::cell::UnsafeCell;
3+
#[cfg(not(no_global_oom_handling))]
4+
use core::clone::CloneToUninit;
5+
use core::marker::PhantomData;
6+
#[cfg(not(no_global_oom_handling))]
7+
use core::mem::{self, DropGuard};
8+
#[cfg(not(no_global_oom_handling))]
9+
use core::ops::DerefMut;
10+
#[cfg(not(no_global_oom_handling))]
11+
use core::ptr;
12+
use core::ptr::NonNull;
13+
14+
#[cfg(not(no_global_oom_handling))]
15+
use crate::raw_rc::MakeMutStrategy;
16+
use crate::raw_rc::RefCounter;
17+
#[cfg(not(no_global_oom_handling))]
18+
use crate::raw_rc::raw_weak;
19+
use crate::raw_rc::raw_weak::RawWeak;
20+
#[cfg(not(no_global_oom_handling))]
21+
use crate::raw_rc::rc_alloc;
22+
#[cfg(not(no_global_oom_handling))]
23+
use crate::raw_rc::rc_layout::RcLayout;
24+
use crate::raw_rc::rc_value_pointer::RcValuePointer;
25+
26+
/// Decrements strong reference count in a reference-counted allocation with a value object that is
27+
/// pointed to by `value_ptr`.
28+
#[inline]
29+
unsafe fn decrement_strong_ref_count<R>(value_ptr: RcValuePointer) -> bool
30+
where
31+
R: RefCounter,
32+
{
33+
unsafe { R::from_raw_counter(value_ptr.strong_count_ptr().as_ref()).decrement() }
34+
}
35+
36+
/// Increments strong reference count in a reference-counted allocation with a value object that is
37+
/// pointed to by `value_ptr`.
38+
#[inline]
39+
unsafe fn increment_strong_ref_count<R>(value_ptr: RcValuePointer)
40+
where
41+
R: RefCounter,
42+
{
43+
unsafe { R::from_raw_counter(value_ptr.strong_count_ptr().as_ref()).increment() };
44+
}
45+
46+
#[inline]
47+
unsafe fn is_unique<R>(value_ptr: RcValuePointer) -> bool
48+
where
49+
R: RefCounter,
50+
{
51+
let ref_counts = unsafe { value_ptr.ref_counts_ptr().as_ref() };
52+
53+
unsafe {
54+
R::is_unique(R::from_raw_counter(&ref_counts.strong), R::from_raw_counter(&ref_counts.weak))
55+
}
56+
}
57+
58+
/// Base implementation of a strong pointer. `RawRc` does not implement `Drop`, user should call
59+
/// `RawRc::drop` manually to drop this object.
60+
#[repr(transparent)]
61+
pub(crate) struct RawRc<T, A>
62+
where
63+
T: ?Sized,
64+
{
65+
/// A `RawRc` is just a non-dangling `RawWeak` that has a strong reference count that is owned
66+
/// by the `RawRc` object. The weak pointer is always non-dangling.
67+
weak: RawWeak<T, A>,
68+
69+
// Defines the ownership of `T` for drop-check.
70+
_phantom_data: PhantomData<T>,
71+
}
72+
73+
impl<T, A> RawRc<T, A>
74+
where
75+
T: ?Sized,
76+
{
77+
/// # Safety
78+
///
79+
/// - `ptr` points to a value inside a reference-counted allocation.
80+
/// - The allocation can be freed by `A::default()`.
81+
pub(crate) unsafe fn from_raw(ptr: NonNull<T>) -> Self
82+
where
83+
A: Default,
84+
{
85+
unsafe { Self::from_raw_parts(ptr, A::default()) }
86+
}
87+
88+
/// # Safety
89+
///
90+
/// - `ptr` points to a value inside a reference-counted allocation.
91+
/// - The allocation can be freed by `alloc`.
92+
pub(crate) unsafe fn from_raw_parts(ptr: NonNull<T>, alloc: A) -> Self {
93+
unsafe { Self::from_weak(RawWeak::from_raw_parts(ptr, alloc)) }
94+
}
95+
96+
/// # Safety
97+
///
98+
/// `weak` must have at least one unowned strong reference count. The newly created `RawRc` will
99+
/// take the ownership of exactly one strong reference count.
100+
pub(super) unsafe fn from_weak(weak: RawWeak<T, A>) -> Self {
101+
Self { weak, _phantom_data: PhantomData }
102+
}
103+
104+
pub(crate) fn allocator(&self) -> &A {
105+
&self.weak.allocator()
106+
}
107+
108+
pub(crate) fn as_ptr(&self) -> NonNull<T> {
109+
self.weak.as_ptr()
110+
}
111+
112+
pub(crate) unsafe fn cast<U>(self) -> RawRc<U, A> {
113+
unsafe { RawRc::from_weak(self.weak.cast()) }
114+
}
115+
116+
#[inline]
117+
pub(crate) unsafe fn cast_with<U, F>(self, f: F) -> RawRc<U, A>
118+
where
119+
U: ?Sized,
120+
F: FnOnce(NonNull<T>) -> NonNull<U>,
121+
{
122+
unsafe { RawRc::from_weak(self.weak.cast_with(f)) }
123+
}
124+
125+
#[inline]
126+
pub(crate) unsafe fn clone<R>(&self) -> Self
127+
where
128+
A: Clone,
129+
R: RefCounter,
130+
{
131+
unsafe {
132+
increment_strong_ref_count::<R>(self.value_ptr());
133+
134+
Self::from_raw_parts(self.weak.as_ptr(), self.allocator().clone())
135+
}
136+
}
137+
138+
pub(crate) unsafe fn decrement_strong_count<R: RefCounter>(ptr: NonNull<T>)
139+
where
140+
A: Allocator + Default,
141+
{
142+
unsafe { Self::decrement_strong_count_in::<R>(ptr, A::default()) };
143+
}
144+
145+
pub(crate) unsafe fn decrement_strong_count_in<R: RefCounter>(ptr: NonNull<T>, alloc: A)
146+
where
147+
A: Allocator,
148+
{
149+
unsafe { RawRc::from_raw_parts(ptr, alloc).drop::<R>() };
150+
}
151+
152+
pub(crate) unsafe fn increment_strong_count<R: RefCounter>(ptr: NonNull<T>) {
153+
unsafe { increment_strong_ref_count::<R>(RcValuePointer::from_value_ptr(ptr.cast())) };
154+
}
155+
156+
pub(crate) unsafe fn downgrade<R>(&self) -> RawWeak<T, A>
157+
where
158+
A: Clone,
159+
R: RefCounter,
160+
{
161+
unsafe fn inner<R>(value_ptr: RcValuePointer)
162+
where
163+
R: RefCounter,
164+
{
165+
unsafe {
166+
R::from_raw_counter(value_ptr.weak_count_ptr().as_ref()).downgrade_increment_weak();
167+
}
168+
}
169+
170+
unsafe {
171+
inner::<R>(self.value_ptr());
172+
173+
RawWeak::from_raw_parts(self.weak.as_ptr(), self.allocator().clone())
174+
}
175+
}
176+
177+
#[inline]
178+
pub(crate) unsafe fn drop<R>(&mut self)
179+
where
180+
A: Allocator,
181+
R: RefCounter,
182+
{
183+
let is_last_strong_ref = unsafe { decrement_strong_ref_count::<R>(self.value_ptr()) };
184+
185+
if is_last_strong_ref {
186+
unsafe { self.weak.assume_init_drop::<R>() }
187+
}
188+
}
189+
190+
pub(crate) unsafe fn get_mut<R>(&mut self) -> Option<&mut T>
191+
where
192+
R: RefCounter,
193+
{
194+
unsafe fn inner<R>(value_ptr: RcValuePointer) -> Option<RcValuePointer>
195+
where
196+
R: RefCounter,
197+
{
198+
unsafe { is_unique::<R>(value_ptr) }.then_some(value_ptr)
199+
}
200+
201+
let (ptr, metadata) = self.weak.as_ptr().to_raw_parts();
202+
203+
unsafe { inner::<R>(RcValuePointer::from_value_ptr(ptr)) }
204+
.map(|ptr| unsafe { NonNull::from_raw_parts(ptr.as_ptr(), metadata).as_mut() })
205+
}
206+
207+
/// Returns a mutable reference to the contained value.
208+
///
209+
/// # Safety
210+
///
211+
/// No other active references to the contained value should exist, and no new references to the
212+
/// contained value will be acquired for the duration of the returned borrow.
213+
pub(crate) unsafe fn get_mut_unchecked(&mut self) -> &mut T {
214+
// SAFETY: The caller guarantees that we can access the contained value exclusively. Note
215+
// that we can't create mutable references that have access to reference counters, because
216+
// the caller only guarantee exclusive access to the contained value, not the reference
217+
// counters.
218+
unsafe { self.weak.as_ptr().as_mut() }
219+
}
220+
221+
pub(crate) fn into_raw(self) -> NonNull<T> {
222+
self.weak.into_raw()
223+
}
224+
225+
pub(crate) fn into_raw_parts(self) -> (NonNull<T>, A) {
226+
self.weak.into_raw_parts()
227+
}
228+
229+
#[cfg(not(no_global_oom_handling))]
230+
pub(crate) unsafe fn make_mut<R>(&mut self) -> &mut T
231+
where
232+
T: CloneToUninit,
233+
A: Allocator + Clone,
234+
R: RefCounter,
235+
{
236+
/// Returns a drop guard that sets the pointer in `rc` to `ptr` on drop.
237+
///
238+
/// # Safety
239+
///
240+
/// - `ptr` must point to a valid reference counted value that can be deallocated with the
241+
/// allocator associated with `rc`.
242+
/// - The value pointed to by `ptr` must have an unowned strong reference count that can be
243+
/// taken ownership by `rc`.
244+
unsafe fn set_rc_ptr_on_drop<'a, T, A>(
245+
rc: &'a mut RawRc<T, A>,
246+
ptr: NonNull<T>,
247+
) -> impl DerefMut<Target = &'a mut RawRc<T, A>>
248+
where
249+
T: ?Sized,
250+
{
251+
DropGuard::new(rc, move |rc| unsafe { rc.weak.set_ptr(ptr) })
252+
}
253+
254+
unsafe {
255+
let ref_counts = self.ref_counts();
256+
257+
if let Some(strategy) = R::make_mut(
258+
R::from_raw_counter(&ref_counts.strong),
259+
R::from_raw_counter(&ref_counts.weak),
260+
) {
261+
let rc_layout = RcLayout::from_value_ptr_unchecked(self.weak.as_ptr());
262+
263+
match strategy {
264+
MakeMutStrategy::Move => {
265+
// `R::make_mut` has made strong reference count to zero, so the `RawRc`
266+
// object is essentially a `RawWeak` object but has its value initialized.
267+
// This means we are the only owner of the value and we can safely move the
268+
// value into a new allocation.
269+
270+
// This guarantees to drop old `RawRc` object even if the allocation
271+
// panics.
272+
273+
let guard = raw_weak::new_weak_guard::<T, A, R>(&mut self.weak);
274+
275+
let new_ptr = rc_alloc::allocate_with_bytes_in::<A, 1>(
276+
guard.as_ptr().cast(),
277+
&guard.allocator(),
278+
rc_layout,
279+
);
280+
281+
// No panic happens, defuse the guard.
282+
mem::forget(guard);
283+
284+
let new_ptr = NonNull::from_raw_parts(
285+
new_ptr.as_ptr(),
286+
ptr::metadata(self.weak.as_ptr().as_ptr()),
287+
);
288+
289+
// Ensure the value pointer in `self` is updated to `new_ptr`.
290+
let mut update_ptr_on_drop = set_rc_ptr_on_drop(self, new_ptr);
291+
292+
// `MakeMutStrategy::Move` guarantees that the strong count is zero, also we
293+
// have copied the value to a new allocation, so we can pretend the original
294+
// `RawRc` is now essentially an `RawWeak` object, we can call the `RawWeak`
295+
// destructor to finish the cleanup.
296+
update_ptr_on_drop.weak.drop_unchecked::<R>();
297+
}
298+
MakeMutStrategy::Clone => {
299+
// There are multiple owners of the value, we need to clone the value into a
300+
// new allocation.
301+
302+
let new_ptr = rc_alloc::allocate_with_in::<A, _, 1>(
303+
&self.allocator(),
304+
rc_layout,
305+
|dst_ptr| {
306+
T::clone_to_uninit(
307+
self.as_ptr().as_ref(),
308+
dst_ptr.as_ptr().as_ptr().cast(),
309+
)
310+
},
311+
);
312+
313+
let new_ptr = NonNull::from_raw_parts(
314+
new_ptr.as_ptr(),
315+
ptr::metadata(self.weak.as_ptr().as_ptr()),
316+
);
317+
318+
// Ensure the value pointer in `self` is updated to `new_ptr`.
319+
let mut update_ptr_on_drop = set_rc_ptr_on_drop(self, new_ptr);
320+
321+
// Manually drop old `RawRc`.
322+
update_ptr_on_drop.drop::<R>();
323+
}
324+
}
325+
}
326+
327+
self.get_mut_unchecked()
328+
}
329+
}
330+
331+
pub(crate) fn ptr_eq(&self, other: &Self) -> bool {
332+
RawWeak::ptr_eq(&self.weak, &other.weak)
333+
}
334+
335+
pub(crate) fn ptr_ne(&self, other: &Self) -> bool {
336+
RawWeak::ptr_ne(&self.weak, &other.weak)
337+
}
338+
339+
#[cfg(not(no_global_oom_handling))]
340+
pub(crate) fn ref_counts(&self) -> &crate::raw_rc::RefCounts {
341+
unsafe { self.weak.ref_counts_unchecked() }
342+
}
343+
344+
pub(crate) fn strong_count(&self) -> &UnsafeCell<usize> {
345+
unsafe { self.weak.strong_count_unchecked() }
346+
}
347+
348+
pub(crate) fn weak_count(&self) -> &UnsafeCell<usize> {
349+
unsafe { self.weak.weak_count_unchecked() }
350+
}
351+
352+
#[inline]
353+
fn value_ptr(&self) -> RcValuePointer {
354+
// SAFETY: `self.weak` is guaranteed to be non-dangling.
355+
unsafe { self.weak.value_ptr_unchecked() }
356+
}
357+
}

0 commit comments

Comments
 (0)