Skip to content

Commit e273cb2

Browse files
committed
Add RawWeak type
1 parent c483f03 commit e273cb2

File tree

2 files changed

+344
-0
lines changed

2 files changed

+344
-0
lines changed

library/alloc/src/raw_rc/mod.rs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -66,6 +66,7 @@ use core::cell::UnsafeCell;
6666
use core::mem;
6767
use core::sync::atomic::Atomic;
6868

69+
mod raw_weak;
6970
mod rc_alloc;
7071
mod rc_layout;
7172
mod rc_value_pointer;
Lines changed: 343 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,343 @@
1+
use core::alloc::Allocator;
2+
use core::cell::UnsafeCell;
3+
use core::mem::{self, DropGuard};
4+
use core::num::NonZeroUsize;
5+
use core::ptr::{self, NonNull};
6+
7+
use crate::raw_rc::rc_layout::RcLayout;
8+
use crate::raw_rc::rc_value_pointer::RcValuePointer;
9+
use crate::raw_rc::{RefCounter, RefCounts, rc_alloc};
10+
11+
// We choose `NonZeroUsize::MAX` as the address for dangling weak pointers because:
12+
//
13+
// - It does not point to any object that is stored inside a reference counted allocation. Because
14+
// otherwise the corresponding `RefCounts` object will be placed at
15+
// `NonZeroUsize::MAX - size_of::<RefCounts>()`, which is an odd number that violates
16+
// `RefCounts`'s alignment requirement.
17+
// - All bytes in the byte representation of `NonZeroUsize::MAX` are the same, which makes it
18+
// possible to utilize `memset` in certain situations like creating an array of dangling weak
19+
// pointers.
20+
const DANGLING_WEAK_ADDRESS: NonZeroUsize = {
21+
let address = NonZeroUsize::MAX;
22+
23+
// Verifies that `address` must not be a valid address in a reference counted allocation so it
24+
// can be safely used as the dangling pointer address.
25+
assert!(address.get().wrapping_sub(size_of::<RefCounts>()) % align_of::<RefCounts>() != 0);
26+
27+
address
28+
};
29+
30+
#[inline]
31+
fn is_dangling(value_ptr: NonNull<()>) -> bool {
32+
value_ptr.addr() == DANGLING_WEAK_ADDRESS
33+
}
34+
35+
/// # Safety
36+
///
37+
/// Either `is_dangling(dangling_or_value_ptr)`, or `dangling_or_value_ptr` has a valid address for
38+
/// the value location of a reference counted allocation.
39+
#[inline]
40+
unsafe fn try_get_rc_value_ptr(dangling_or_value_ptr: NonNull<()>) -> Option<RcValuePointer> {
41+
if is_dangling(dangling_or_value_ptr) {
42+
None
43+
} else {
44+
// SAFETY: We have checked `dangling_or_value_ptr` not being dangling, and caller guarantees
45+
// the validity of `dangling_or_value_ptr`.
46+
47+
Some(unsafe { RcValuePointer::from_value_ptr(dangling_or_value_ptr) })
48+
}
49+
}
50+
51+
/// Decrements weak reference count in a reference-counted allocation with a value object that is
52+
/// pointed to by `value_ptr`.
53+
///
54+
/// # Safety
55+
///
56+
/// - `value_ptr` must point to the value location within a valid reference counted allocation.
57+
/// - The corresponding weak count must not be zero.
58+
#[inline]
59+
unsafe fn decrement_weak_ref_count<R>(value_ptr: RcValuePointer) -> bool
60+
where
61+
R: RefCounter,
62+
{
63+
unsafe { R::from_raw_counter(value_ptr.weak_count_ptr().as_ref()) }.decrement()
64+
}
65+
66+
/// Increments weak reference count in a reference-counted allocation with a value object that is
67+
/// pointed to by `value_ptr`.
68+
///
69+
/// # Safety
70+
///
71+
/// `value_ptr` must point to the value location within a valid reference counted allocation.
72+
#[inline]
73+
unsafe fn increment_weak_ref_count<R>(value_ptr: RcValuePointer)
74+
where
75+
R: RefCounter,
76+
{
77+
unsafe { R::from_raw_counter(value_ptr.weak_count_ptr().as_ref()) }.increment()
78+
}
79+
80+
/// Creates a drop guard that calls `RawWeak::drop_unchecked` on drop.
81+
///
82+
/// # Safety
83+
///
84+
/// - `weak` is non-dangling.
85+
/// - After the returned `DropGuard` being dropped, the allocation pointed to by the weak pointer
86+
/// must not be accessed anymore.
87+
/// - All accesses to `weak` must use the same `R` for `RefCounter`.
88+
pub(super) unsafe fn new_weak_guard<'a, T, A, R>(
89+
weak: &'a mut RawWeak<T, A>,
90+
) -> DropGuard<&'a mut RawWeak<T, A>, impl FnOnce(&'a mut RawWeak<T, A>)>
91+
where
92+
T: ?Sized,
93+
A: Allocator,
94+
R: RefCounter,
95+
{
96+
// SAFETY: Caller guarantees that `weak` is non-dangling and the corresponding allocation will
97+
// not be accessed after dropping.
98+
DropGuard::new(weak, |weak| unsafe { weak.drop_unchecked::<R>() })
99+
}
100+
101+
/// Base implementation of a weak pointer. `RawWeak` does not implement `Drop`, user should call
102+
/// `RawWeak::drop` or `RawWeak::drop_unchecked` manually to drop this object.
103+
///
104+
/// A `RawWeak` can be either dangling or non-dangling. A dangling `RawWeak` does not point to a
105+
/// valid value. A non-dangling `RawWeak` points to a valid reference-counted allocation. The value
106+
/// pointed to by a `RawWeak` may be uninitialized.
107+
pub(crate) struct RawWeak<T, A>
108+
where
109+
T: ?Sized,
110+
{
111+
/// Points to a (possibly uninitialized or dropped) `T` value inside of a reference-counted
112+
/// allocation.
113+
ptr: NonNull<T>,
114+
115+
/// The allocator for `ptr`.
116+
alloc: A,
117+
}
118+
119+
impl<T, A> RawWeak<T, A>
120+
where
121+
T: ?Sized,
122+
{
123+
pub(crate) const unsafe fn from_raw_parts(ptr: NonNull<T>, alloc: A) -> Self {
124+
Self { ptr, alloc }
125+
}
126+
127+
pub(crate) unsafe fn from_raw(ptr: NonNull<T>) -> Self
128+
where
129+
A: Default,
130+
{
131+
unsafe { Self::from_raw_parts(ptr, A::default()) }
132+
}
133+
134+
pub(crate) fn allocator(&self) -> &A {
135+
&self.alloc
136+
}
137+
138+
pub(crate) fn as_ptr(&self) -> NonNull<T> {
139+
self.ptr
140+
}
141+
142+
#[inline(never)]
143+
unsafe fn assume_init_drop_slow<R>(&mut self)
144+
where
145+
A: Allocator,
146+
R: RefCounter,
147+
{
148+
let guard = unsafe { new_weak_guard::<T, A, R>(self) };
149+
150+
unsafe { guard.ptr.drop_in_place() };
151+
}
152+
153+
/// Drops the value along with the `RawWeak` object, assuming the value pointed to by `ptr` is
154+
/// initialized,
155+
#[inline]
156+
pub(super) unsafe fn assume_init_drop<R>(&mut self)
157+
where
158+
A: Allocator,
159+
R: RefCounter,
160+
{
161+
if const { mem::needs_drop::<T>() } {
162+
unsafe { self.assume_init_drop_slow::<R>() };
163+
} else {
164+
unsafe { self.drop_unchecked::<R>() };
165+
}
166+
}
167+
168+
pub(crate) unsafe fn cast<U>(self) -> RawWeak<U, A> {
169+
unsafe { self.cast_with(NonNull::cast) }
170+
}
171+
172+
#[inline]
173+
pub(crate) unsafe fn cast_with<U, F>(self, f: F) -> RawWeak<U, A>
174+
where
175+
U: ?Sized,
176+
F: FnOnce(NonNull<T>) -> NonNull<U>,
177+
{
178+
unsafe { RawWeak::from_raw_parts(f(self.ptr), self.alloc) }
179+
}
180+
181+
/// Increments the weak count, and returns the corresponding `RawWeak` object.
182+
///
183+
/// # Safety
184+
///
185+
/// - `self` must only be handled by the same `RefCounter` implementation.
186+
#[inline]
187+
pub(crate) unsafe fn clone<R>(&self) -> Self
188+
where
189+
A: Clone,
190+
R: RefCounter,
191+
{
192+
// For reducing monomorphization cost.
193+
unsafe fn inner<R>(ptr: NonNull<()>)
194+
where
195+
R: RefCounter,
196+
{
197+
if let Some(value_ptr) = unsafe { try_get_rc_value_ptr(ptr) } {
198+
unsafe { increment_weak_ref_count::<R>(value_ptr) }
199+
}
200+
}
201+
202+
unsafe {
203+
inner::<R>(self.ptr.cast());
204+
205+
Self::from_raw_parts(self.ptr, self.alloc.clone())
206+
}
207+
}
208+
209+
/// Increments the weak count, and returns the corresponding `RawWeak` object, assuming `self`
210+
/// is non-dangling.
211+
///
212+
/// # Safety
213+
///
214+
/// - `self` must only be handled by the same `RefCounter` implementation.
215+
/// - `self` is non-dangling.
216+
pub(crate) unsafe fn clone_unchecked<R>(&self) -> Self
217+
where
218+
A: Clone,
219+
R: RefCounter,
220+
{
221+
unsafe {
222+
increment_weak_ref_count::<R>(self.value_ptr_unchecked());
223+
224+
Self::from_raw_parts(self.ptr, self.alloc.clone())
225+
}
226+
}
227+
228+
/// Drops this weak pointer.
229+
#[inline]
230+
pub(crate) unsafe fn drop<R>(&mut self)
231+
where
232+
A: Allocator,
233+
R: RefCounter,
234+
{
235+
if !is_dangling(self.ptr.cast()) {
236+
unsafe { self.drop_unchecked::<R>() };
237+
}
238+
}
239+
240+
/// Drops this weak pointer, assuming `self` is non-dangling.
241+
#[inline]
242+
pub(super) unsafe fn drop_unchecked<R>(&mut self)
243+
where
244+
A: Allocator,
245+
R: RefCounter,
246+
{
247+
// SAFETY: Caller guarantees `self` is non-dangling, so `self.ptr` must point to the value
248+
// location in a valid reference-counted allocation.
249+
let value_ptr = unsafe { self.value_ptr_unchecked() };
250+
251+
let is_last_weak_ref = unsafe { decrement_weak_ref_count::<R>(value_ptr) };
252+
253+
if is_last_weak_ref {
254+
let rc_layout = unsafe { RcLayout::from_value_ptr_unchecked(self.ptr) };
255+
256+
unsafe { rc_alloc::deallocate::<A>(value_ptr, &self.alloc, rc_layout) }
257+
}
258+
}
259+
260+
pub(crate) fn into_raw(self) -> NonNull<T> {
261+
self.ptr
262+
}
263+
264+
pub(crate) fn into_raw_parts(self) -> (NonNull<T>, A) {
265+
(self.ptr, self.alloc)
266+
}
267+
268+
pub(crate) fn ptr_eq(&self, other: &Self) -> bool {
269+
ptr::addr_eq(self.ptr.as_ptr(), other.ptr.as_ptr())
270+
}
271+
272+
pub(crate) fn ptr_ne(&self, other: &Self) -> bool {
273+
!ptr::addr_eq(self.ptr.as_ptr(), other.ptr.as_ptr())
274+
}
275+
276+
/// Returns the `RefCounts` object inside the reference-counted allocation, assume `self` is
277+
/// non-dangling.
278+
///
279+
/// # Safety
280+
///
281+
/// `self` is non-dangling.
282+
#[cfg(not(no_global_oom_handling))]
283+
pub(super) unsafe fn ref_counts_unchecked(&self) -> &RefCounts {
284+
unsafe { self.value_ptr_unchecked().ref_counts_ptr().as_ref() }
285+
}
286+
287+
/// Returns the strong reference count object inside the reference-counted allocation if `self`
288+
/// is non-dangling.
289+
pub(crate) fn strong_count(&self) -> Option<&UnsafeCell<usize>> {
290+
(!is_dangling(self.ptr.cast())).then(|| unsafe { self.strong_count_unchecked() })
291+
}
292+
293+
/// Returns the strong reference count object inside the reference-counted allocation, assume
294+
/// `self` is non-dangling.
295+
///
296+
/// # Safety
297+
///
298+
/// `self` is non-dangling.
299+
pub(super) unsafe fn strong_count_unchecked(&self) -> &UnsafeCell<usize> {
300+
unsafe { self.value_ptr_unchecked().strong_count_ptr().as_ref() }
301+
}
302+
303+
/// Returns the weak reference count object inside the reference-counted allocation if `self`
304+
/// is non-dangling.
305+
pub(crate) fn weak_count(&self) -> Option<&UnsafeCell<usize>> {
306+
(!is_dangling(self.ptr.cast())).then(|| unsafe { self.weak_count_unchecked() })
307+
}
308+
309+
/// Returns the weak reference count object inside the reference-counted allocation, assume
310+
/// `self` is non-dangling.
311+
///
312+
/// # Safety
313+
///
314+
/// `self` is non-dangling.
315+
pub(super) unsafe fn weak_count_unchecked(&self) -> &UnsafeCell<usize> {
316+
unsafe { self.value_ptr_unchecked().weak_count_ptr().as_ref() }
317+
}
318+
319+
/// Sets the contained pointer to a new value.
320+
///
321+
/// # Safety
322+
///
323+
/// - `ptr` must be a valid pointer to a value object that lives in a reference-counted
324+
/// allocation.
325+
/// - The allocation can be deallocated with the associated allocator.
326+
#[cfg(not(no_global_oom_handling))]
327+
pub(super) unsafe fn set_ptr(&mut self, ptr: NonNull<T>) {
328+
self.ptr = ptr;
329+
}
330+
331+
/// Returns a pointer to the value location of the reference-counted allocation, assume `self`
332+
/// is non-dangling.
333+
///
334+
/// # Safety
335+
///
336+
/// `self` is non-dangling.
337+
#[inline]
338+
pub(super) unsafe fn value_ptr_unchecked(&self) -> RcValuePointer {
339+
// SAFETY: Caller guarantees `self` is non-dangling, so `self.ptr` must point to the value
340+
// location in a valid reference-counted allocation.
341+
unsafe { RcValuePointer::from_value_ptr(self.ptr.cast()) }
342+
}
343+
}

0 commit comments

Comments
 (0)