diff --git a/library/alloc/src/lib.rs b/library/alloc/src/lib.rs index 87ad5b0ce30e6..e8e9cba15fd6f 100644 --- a/library/alloc/src/lib.rs +++ b/library/alloc/src/lib.rs @@ -109,6 +109,7 @@ #![feature(deprecated_suggestion)] #![feature(deref_pure_trait)] #![feature(dispatch_from_dyn)] +#![feature(drop_guard)] #![feature(ergonomic_clones)] #![feature(error_generic_member_access)] #![feature(exact_size_is_empty)] @@ -199,6 +200,8 @@ #[macro_use] mod macros; +#[cfg(not(no_rc))] +mod raw_rc; mod raw_vec; // Heaps provided for low-level allocation strategies diff --git a/library/alloc/src/raw_rc/mod.rs b/library/alloc/src/raw_rc/mod.rs new file mode 100644 index 0000000000000..5634603b14c9b --- /dev/null +++ b/library/alloc/src/raw_rc/mod.rs @@ -0,0 +1,174 @@ +//! Base implementation for `rc::{Rc, UniqueRc, Weak}` and `sync::{Arc, UniqueArc, Weak}`. +//! +//! # Allocation Memory Layout +//! +//! The memory layout of a reference-counted allocation is designed so that the memory that stores +//! the reference counts has a fixed offset to the memory that stores the value. In this way, +//! operations that only rely on reference counts can ignore the actual type of the contained value +//! and only care about the address of the contained value, which allows us to share code between +//! reference-counting pointers that have different types of contained values. This can potentially +//! reduce the binary size. +//! +//! Assume the type of the stored value is `T`, the allocation memory layout is designed as follows: +//! +//! - We use a `RefCounts` type to store the reference counts. +//! - The alignment of the allocation is `align_of::().max(align_of::())`. +//! - The value is stored at offset `size_of::().next_multiple_of(align_of::())`. +//! - The size of the allocation is +//! `size_of::().next_multiple_of(align_of::()) + size_of::()`. +//! - The `RefCounts` object is stored at offset +//! `size_of::().next_multiple_of(align_of::()) - size_of::()`. +//! +//! Here is a table showing the order and size of each component in an reference counted allocation +//! of a `T` value: +//! +//! | Component | Size | +//! | ----------- | ----------------------------------------------------------------------------------- | +//! | Padding | `size_of::().next_multiple_of(align_of::()) - size_of::()` | +//! | `RefCounts` | `size_of::()` | +//! | `T` | `size_of::()` | +//! +//! This works because: +//! +//! - Both `RefCounts` and the object is stored in the allocation without overlapping. +//! - The `RefCounts` object is stored at offset +//! `size_of::().next_multiple_of(align_of::()) - size_of::()`, which +//! has a valid alignment for `RefCounts` because: +//! - If `align_of::() <= align_of::()`, we have the offset being 0, which has a +//! valid alignment for `RefCounts`. +//! - If `align_of::() > align_of::()`, we have `align_of::()` being a multiple +//! of `align_of::()`, since `size_of::()` is also a multiple of +//! `align_of::()`, we conclude the offset also has a valid alignment for `RefCounts`. +//! - The value is stored at offset `size_of::().next_multiple_of(align_of::())`, +//! which trivially satisfies the alignment requirement of `T`. +//! - The distance between the `RefCounts` object and the value is `size_of::()`, a fixed +//! value. +//! +//! So both the `RefCounts` object and the value object have their alignment and size requirements +//! satisfied. And we get a fixed offset between those two objects. +//! +//! # Reference-counting Pointer Design +//! +//! Both strong and weak reference-counting pointers store a pointer that points to the value +//! object in a reference-counted allocation, instead of a pointer to the beginning of the +//! allocation. This is based on the assumption that users access the contained value more +//! frequently than the reference counters. Also, this possibly allows us to enable some +//! optimizations like: +//! +//! - Making reference-counting pointers have ABI-compatible representation as raw pointers so we +//! can use them directly in FFI interfaces. +//! - Converting `Option>` to `Option<&T>` without checking for `None` values. +//! - Converting `&[Rc]` to `&[&T]` with zero cost. + +use core::cell::UnsafeCell; +use core::mem; +use core::sync::atomic::Atomic; + +pub(crate) use crate::raw_rc::raw_rc::RawRc; +pub(crate) use crate::raw_rc::raw_unique_rc::RawUniqueRc; +pub(crate) use crate::raw_rc::raw_weak::RawWeak; + +mod raw_rc; +mod raw_unique_rc; +mod raw_weak; +mod rc_alloc; +mod rc_layout; +mod rc_value_pointer; + +/// Stores reference counts. +#[cfg_attr(target_pointer_width = "16", repr(C, align(2)))] +#[cfg_attr(target_pointer_width = "32", repr(C, align(4)))] +#[cfg_attr(target_pointer_width = "64", repr(C, align(8)))] +pub(crate) struct RefCounts { + /// Weak reference count (plus one if there are non-zero strong reference counts). + pub(crate) weak: UnsafeCell, + /// Strong reference count. + pub(crate) strong: UnsafeCell, +} + +impl RefCounts { + /// Creates a `RefCounts` with weak count of `1` and strong count of `strong_count`. + const fn new(strong_count: usize) -> Self { + Self { weak: UnsafeCell::new(1), strong: UnsafeCell::new(strong_count) } + } +} + +/// The return value type for `RefCounter::make_mut`. +#[cfg(not(no_global_oom_handling))] +pub(crate) enum MakeMutStrategy { + /// The strong reference count is 1, but weak reference count (including the one shared by all + /// strong reference count) is more than 1. Before returning, the strong reference count has + /// been set to zero to prevent new strong pointers from being created through upgrading from + /// weak pointers. + Move, + /// The strong count is more than 1. + Clone, +} + +/// A trait for `rc` and `sync` modules to define their reference-counting behaviors. +/// +/// # Safety +/// +/// - Each method must be implemented according to its description. +/// - `Self` must have transparent representation over `UnsafeCell` and every valid +/// `UnsafeCell` can also be reinterpreted as a valid `Self`. +/// - `Self` must have alignment no greater than `align_of::>()`. +pub(crate) unsafe trait RefCounter: Sized { + const VERIFY_LAYOUT: () = { + assert!(size_of::() == size_of::>()); + assert!(align_of::() <= align_of::>()); + }; + + /// Returns a reference to `Self` from a reference to `UnsafeCell`. + /// + /// # Safety + /// + /// - `count` must only be handled by the same `RefCounter` implementation. + /// - The location of `count` must have enough alignment for storing `Atomic`. + unsafe fn from_raw_counter(count: &UnsafeCell) -> &Self { + () = Self::VERIFY_LAYOUT; + + // SAFETY: The alignment requirement is guaranteed by both trait implementor and caller. + // Trait implementor guarantees the alignment of `Self` is not greater than the alignment of + // `Atomic`, and caller guarantees that the alignment of `count` is enough for + // storing `Atomic`. + unsafe { mem::transmute(count) } + } + + /// Increments the reference counter. The process will abort if overflow happens. + fn increment(&self); + + /// Decrements the reference counter. Returns whether the reference count becomes zero after + /// decrementing. + fn decrement(&self) -> bool; + + /// Increments the reference counter if and only if the reference count is non-zero. Returns + /// whether incrementing is performed. + fn try_upgrade(&self) -> bool; + + /// Increments the reference counter. If `self` needs to be called with by both + /// `downgrade_increment_weak` and `is_unique` as the `weak_count` argument concurrently, both + /// operations will be performed atomically. + fn downgrade_increment_weak(&self); + + /// Decrements the reference counter if and only if the reference count is 1. Returns true if + /// decrementing is performed. + fn try_lock_strong_count(&self) -> bool; + + /// Sets the reference count to 1. + fn unlock_strong_count(&self); + + /// Returns whether both `strong_count` and `weak_count` are 1. If `weak_count` needs to be + /// called with by both `downgrade_increment_weak` and `is_unique` concurrently, both operations + /// will be performed atomically. + fn is_unique(strong_count: &Self, weak_count: &Self) -> bool; + + /// Determines how to make a mutable reference safely to a reference counted value. + /// + /// - If both strong count and weak count are 1, returns `None`. + /// - If strong count is 1 and weak count is greater than 1, returns + /// `Some(MakeMutStrategy::Move)`. + /// - If strong count is greater than 1, returns `Some(MakeMutStrategy::Clone)`. + #[cfg(not(no_global_oom_handling))] + fn make_mut(strong_count: &Self, weak_count: &Self) -> Option; +} diff --git a/library/alloc/src/raw_rc/raw_rc.rs b/library/alloc/src/raw_rc/raw_rc.rs new file mode 100644 index 0000000000000..1ba129e6d0d62 --- /dev/null +++ b/library/alloc/src/raw_rc/raw_rc.rs @@ -0,0 +1,1149 @@ +use core::alloc::{AllocError, Allocator}; +use core::any::Any; +use core::cell::UnsafeCell; +#[cfg(not(no_global_oom_handling))] +use core::clone::CloneToUninit; +use core::error::{Error, Request}; +use core::fmt::{self, Debug, Display, Formatter, Pointer}; +use core::hash::{Hash, Hasher}; +#[cfg(not(no_global_oom_handling))] +use core::iter::TrustedLen; +use core::marker::{PhantomData, Unsize}; +#[cfg(not(no_global_oom_handling))] +use core::mem::{self, ManuallyDrop}; +use core::mem::{DropGuard, MaybeUninit}; +#[cfg(not(no_global_oom_handling))] +use core::ops::DerefMut; +use core::ops::{CoerceUnsized, DispatchFromDyn}; +use core::pin::PinCoerceUnsized; +#[cfg(not(no_global_oom_handling))] +use core::ptr; +use core::ptr::NonNull; +#[cfg(not(no_global_oom_handling))] +use core::str; + +use crate::alloc::Global; +#[cfg(not(no_global_oom_handling))] +use crate::boxed::Box; +#[cfg(not(no_global_oom_handling))] +use crate::raw_rc::MakeMutStrategy; +use crate::raw_rc::RefCounter; +#[cfg(not(no_global_oom_handling))] +use crate::raw_rc::raw_unique_rc::RawUniqueRc; +#[cfg(not(no_global_oom_handling))] +use crate::raw_rc::raw_weak; +use crate::raw_rc::raw_weak::RawWeak; +#[cfg(not(no_global_oom_handling))] +use crate::raw_rc::rc_alloc; +#[cfg(not(no_global_oom_handling))] +use crate::raw_rc::rc_layout::{RcLayout, RcLayoutExt}; +use crate::raw_rc::rc_value_pointer::RcValuePointer; +#[cfg(not(no_global_oom_handling))] +use crate::string::String; +#[cfg(not(no_global_oom_handling))] +use crate::vec::Vec; + +/// Decrements strong reference count in a reference-counted allocation with a value object that is +/// pointed to by `value_ptr`. +#[inline] +unsafe fn decrement_strong_ref_count(value_ptr: RcValuePointer) -> bool +where + R: RefCounter, +{ + unsafe { R::from_raw_counter(value_ptr.strong_count_ptr().as_ref()).decrement() } +} + +/// Increments strong reference count in a reference-counted allocation with a value object that is +/// pointed to by `value_ptr`. +#[inline] +unsafe fn increment_strong_ref_count(value_ptr: RcValuePointer) +where + R: RefCounter, +{ + unsafe { R::from_raw_counter(value_ptr.strong_count_ptr().as_ref()).increment() }; +} + +#[inline] +unsafe fn is_unique(value_ptr: RcValuePointer) -> bool +where + R: RefCounter, +{ + let ref_counts = unsafe { value_ptr.ref_counts_ptr().as_ref() }; + + unsafe { + R::is_unique(R::from_raw_counter(&ref_counts.strong), R::from_raw_counter(&ref_counts.weak)) + } +} + +/// Base implementation of a strong pointer. `RawRc` does not implement `Drop`, user should call +/// `RawRc::drop` manually to drop this object. +#[repr(transparent)] +pub(crate) struct RawRc +where + T: ?Sized, +{ + /// A `RawRc` is just a non-dangling `RawWeak` that has a strong reference count that is owned + /// by the `RawRc` object. The weak pointer is always non-dangling. + weak: RawWeak, + + // Defines the ownership of `T` for drop-check. + _phantom_data: PhantomData, +} + +impl RawRc +where + T: ?Sized, +{ + /// # Safety + /// + /// - `ptr` points to a value inside a reference-counted allocation. + /// - The allocation can be freed by `A::default()`. + pub(crate) unsafe fn from_raw(ptr: NonNull) -> Self + where + A: Default, + { + unsafe { Self::from_raw_parts(ptr, A::default()) } + } + + /// # Safety + /// + /// - `ptr` points to a value inside a reference-counted allocation. + /// - The allocation can be freed by `alloc`. + pub(crate) unsafe fn from_raw_parts(ptr: NonNull, alloc: A) -> Self { + unsafe { Self::from_weak(RawWeak::from_raw_parts(ptr, alloc)) } + } + + /// # Safety + /// + /// `weak` must have at least one unowned strong reference count. The newly created `RawRc` will + /// take the ownership of exactly one strong reference count. + pub(super) unsafe fn from_weak(weak: RawWeak) -> Self { + Self { weak, _phantom_data: PhantomData } + } + + pub(crate) fn allocator(&self) -> &A { + &self.weak.allocator() + } + + pub(crate) fn as_ptr(&self) -> NonNull { + self.weak.as_ptr() + } + + pub(crate) unsafe fn cast(self) -> RawRc { + unsafe { RawRc::from_weak(self.weak.cast()) } + } + + #[inline] + pub(crate) unsafe fn cast_with(self, f: F) -> RawRc + where + U: ?Sized, + F: FnOnce(NonNull) -> NonNull, + { + unsafe { RawRc::from_weak(self.weak.cast_with(f)) } + } + + #[inline] + pub(crate) unsafe fn clone(&self) -> Self + where + A: Clone, + R: RefCounter, + { + unsafe { + increment_strong_ref_count::(self.value_ptr()); + + Self::from_raw_parts(self.weak.as_ptr(), self.allocator().clone()) + } + } + + pub(crate) unsafe fn decrement_strong_count(ptr: NonNull) + where + A: Allocator + Default, + { + unsafe { Self::decrement_strong_count_in::(ptr, A::default()) }; + } + + pub(crate) unsafe fn decrement_strong_count_in(ptr: NonNull, alloc: A) + where + A: Allocator, + { + unsafe { RawRc::from_raw_parts(ptr, alloc).drop::() }; + } + + pub(crate) unsafe fn increment_strong_count(ptr: NonNull) { + unsafe { increment_strong_ref_count::(RcValuePointer::from_value_ptr(ptr.cast())) }; + } + + pub(crate) unsafe fn downgrade(&self) -> RawWeak + where + A: Clone, + R: RefCounter, + { + unsafe fn inner(value_ptr: RcValuePointer) + where + R: RefCounter, + { + unsafe { + R::from_raw_counter(value_ptr.weak_count_ptr().as_ref()).downgrade_increment_weak(); + } + } + + unsafe { + inner::(self.value_ptr()); + + RawWeak::from_raw_parts(self.weak.as_ptr(), self.allocator().clone()) + } + } + + #[inline] + pub(crate) unsafe fn drop(&mut self) + where + A: Allocator, + R: RefCounter, + { + let is_last_strong_ref = unsafe { decrement_strong_ref_count::(self.value_ptr()) }; + + if is_last_strong_ref { + unsafe { self.weak.assume_init_drop::() } + } + } + + pub(crate) unsafe fn get_mut(&mut self) -> Option<&mut T> + where + R: RefCounter, + { + unsafe fn inner(value_ptr: RcValuePointer) -> Option + where + R: RefCounter, + { + unsafe { is_unique::(value_ptr) }.then_some(value_ptr) + } + + let (ptr, metadata) = self.weak.as_ptr().to_raw_parts(); + + unsafe { inner::(RcValuePointer::from_value_ptr(ptr)) } + .map(|ptr| unsafe { NonNull::from_raw_parts(ptr.as_ptr(), metadata).as_mut() }) + } + + /// Returns a mutable reference to the contained value. + /// + /// # Safety + /// + /// No other active references to the contained value should exist, and no new references to the + /// contained value will be acquired for the duration of the returned borrow. + pub(crate) unsafe fn get_mut_unchecked(&mut self) -> &mut T { + // SAFETY: The caller guarantees that we can access the contained value exclusively. Note + // that we can't create mutable references that have access to reference counters, because + // the caller only guarantee exclusive access to the contained value, not the reference + // counters. + unsafe { self.weak.as_ptr().as_mut() } + } + + pub(crate) fn into_raw(self) -> NonNull { + self.weak.into_raw() + } + + pub(crate) fn into_raw_parts(self) -> (NonNull, A) { + self.weak.into_raw_parts() + } + + #[cfg(not(no_global_oom_handling))] + pub(crate) unsafe fn make_mut(&mut self) -> &mut T + where + T: CloneToUninit, + A: Allocator + Clone, + R: RefCounter, + { + /// Returns a drop guard that sets the pointer in `rc` to `ptr` on drop. + /// + /// # Safety + /// + /// - `ptr` must point to a valid reference counted value that can be deallocated with the + /// allocator associated with `rc`. + /// - The value pointed to by `ptr` must have an unowned strong reference count that can be + /// taken ownership by `rc`. + unsafe fn set_rc_ptr_on_drop<'a, T, A>( + rc: &'a mut RawRc, + ptr: NonNull, + ) -> impl DerefMut> + where + T: ?Sized, + { + DropGuard::new(rc, move |rc| unsafe { rc.weak.set_ptr(ptr) }) + } + + unsafe { + let ref_counts = self.ref_counts(); + + if let Some(strategy) = R::make_mut( + R::from_raw_counter(&ref_counts.strong), + R::from_raw_counter(&ref_counts.weak), + ) { + let rc_layout = RcLayout::from_value_ptr_unchecked(self.weak.as_ptr()); + + match strategy { + MakeMutStrategy::Move => { + // `R::make_mut` has made strong reference count to zero, so the `RawRc` + // object is essentially a `RawWeak` object but has its value initialized. + // This means we are the only owner of the value and we can safely move the + // value into a new allocation. + + // This guarantees to drop old `RawRc` object even if the allocation + // panics. + + let guard = raw_weak::new_weak_guard::(&mut self.weak); + + let new_ptr = rc_alloc::allocate_with_bytes_in::( + guard.as_ptr().cast(), + &guard.allocator(), + rc_layout, + ); + + // No panic happens, defuse the guard. + mem::forget(guard); + + let new_ptr = NonNull::from_raw_parts( + new_ptr.as_ptr(), + ptr::metadata(self.weak.as_ptr().as_ptr()), + ); + + // Ensure the value pointer in `self` is updated to `new_ptr`. + let mut update_ptr_on_drop = set_rc_ptr_on_drop(self, new_ptr); + + // `MakeMutStrategy::Move` guarantees that the strong count is zero, also we + // have copied the value to a new allocation, so we can pretend the original + // `RawRc` is now essentially an `RawWeak` object, we can call the `RawWeak` + // destructor to finish the cleanup. + update_ptr_on_drop.weak.drop_unchecked::(); + } + MakeMutStrategy::Clone => { + // There are multiple owners of the value, we need to clone the value into a + // new allocation. + + let new_ptr = rc_alloc::allocate_with_in::( + &self.allocator(), + rc_layout, + |dst_ptr| { + T::clone_to_uninit(self.as_ref(), dst_ptr.as_ptr().as_ptr().cast()) + }, + ); + + let new_ptr = NonNull::from_raw_parts( + new_ptr.as_ptr(), + ptr::metadata(self.weak.as_ptr().as_ptr()), + ); + + // Ensure the value pointer in `self` is updated to `new_ptr`. + let mut update_ptr_on_drop = set_rc_ptr_on_drop(self, new_ptr); + + // Manually drop old `RawRc`. + update_ptr_on_drop.drop::(); + } + } + } + + self.get_mut_unchecked() + } + } + + pub(crate) fn ptr_eq(&self, other: &Self) -> bool { + RawWeak::ptr_eq(&self.weak, &other.weak) + } + + pub(crate) fn ptr_ne(&self, other: &Self) -> bool { + RawWeak::ptr_ne(&self.weak, &other.weak) + } + + #[cfg(not(no_global_oom_handling))] + pub(crate) fn ref_counts(&self) -> &crate::raw_rc::RefCounts { + unsafe { self.weak.ref_counts_unchecked() } + } + + pub(crate) fn strong_count(&self) -> &UnsafeCell { + unsafe { self.weak.strong_count_unchecked() } + } + + pub(crate) fn weak_count(&self) -> &UnsafeCell { + unsafe { self.weak.weak_count_unchecked() } + } + + #[inline] + fn value_ptr(&self) -> RcValuePointer { + // SAFETY: `self.weak` is guaranteed to be non-dangling. + unsafe { self.weak.value_ptr_unchecked() } + } +} + +impl RawRc { + /// # Safety + /// + /// `weak` must be non-dangling. + unsafe fn from_weak_with_value(weak: RawWeak, value: T) -> Self { + unsafe { + weak.as_ptr().write(value); + + Self::from_weak(weak) + } + } + + #[inline] + pub(crate) fn try_new(value: T) -> Result + where + A: Allocator + Default, + { + RawWeak::try_new_uninit::<1>() + .map(|weak| unsafe { Self::from_weak_with_value(weak, value) }) + } + + #[inline] + pub(crate) fn try_new_in(value: T, alloc: A) -> Result + where + A: Allocator, + { + RawWeak::try_new_uninit_in::<1>(alloc) + .map(|weak| unsafe { Self::from_weak_with_value(weak, value) }) + } + + #[cfg(not(no_global_oom_handling))] + #[inline] + pub(crate) fn new(value: T) -> Self + where + A: Allocator + Default, + { + unsafe { Self::from_weak_with_value(RawWeak::new_uninit::<1>(), value) } + } + + #[cfg(not(no_global_oom_handling))] + #[inline] + pub(crate) fn new_in(value: T, alloc: A) -> Self + where + A: Allocator, + { + unsafe { Self::from_weak_with_value(RawWeak::new_uninit_in::<1>(alloc), value) } + } + + #[cfg(not(no_global_oom_handling))] + fn new_with(f: F) -> Self + where + A: Allocator + Default, + F: FnOnce() -> T, + { + let (ptr, alloc) = rc_alloc::allocate_with::(T::RC_LAYOUT, |ptr| unsafe { + ptr.as_ptr().cast().write(f()) + }); + + unsafe { Self::from_raw_parts(ptr.as_ptr().cast(), alloc) } + } + + #[cfg(not(no_global_oom_handling))] + unsafe fn new_cyclic_impl(mut weak: RawWeak, data_fn: F) -> Self + where + A: Allocator, + F: FnOnce(&RawWeak) -> T, + R: RefCounter, + { + let guard = unsafe { raw_weak::new_weak_guard::(&mut weak) }; + let data = data_fn(&guard); + + mem::forget(guard); + + unsafe { RawUniqueRc::from_weak_with_value(weak, data).into_rc::() } + } + + #[cfg(not(no_global_oom_handling))] + pub(crate) unsafe fn new_cyclic(data_fn: F) -> Self + where + A: Allocator + Default, + F: FnOnce(&RawWeak) -> T, + R: RefCounter, + { + let weak = RawWeak::new_uninit::<0>(); + + unsafe { Self::new_cyclic_impl::(weak, data_fn) } + } + + #[cfg(not(no_global_oom_handling))] + pub(crate) unsafe fn new_cyclic_in(data_fn: F, alloc: A) -> Self + where + A: Allocator, + F: FnOnce(&RawWeak) -> T, + R: RefCounter, + { + let weak = RawWeak::new_uninit_in::<0>(alloc); + + unsafe { Self::new_cyclic_impl::(weak, data_fn) } + } + + /// # Safety + /// + /// All accesses to `self` must use the same `RefCounter` implementation for `R`. + pub(crate) unsafe fn into_inner(self) -> Option + where + A: Allocator, + R: RefCounter, + { + let is_last_strong_ref = unsafe { decrement_strong_ref_count::(self.value_ptr()) }; + + is_last_strong_ref.then(|| unsafe { self.weak.assume_init_into_inner::() }) + } + + /// # Safety + /// + /// All accesses to `self` must use the same `RefCounter` implementation for `R`. + pub(crate) unsafe fn try_unwrap(self) -> Result> + where + A: Allocator, + R: RefCounter, + { + unsafe fn inner(value_ptr: RcValuePointer) -> bool + where + R: RefCounter, + { + unsafe { + R::from_raw_counter(value_ptr.strong_count_ptr().as_ref()).try_lock_strong_count() + } + } + + let is_last_strong_ref = unsafe { inner::(self.value_ptr()) }; + + if is_last_strong_ref { + Ok(unsafe { self.weak.assume_init_into_inner::() }) + } else { + Err(self) + } + } + + /// # Safety + /// + /// All accesses to `self` must use the same `RefCounter` implementation for `R`. + pub(crate) unsafe fn unwrap_or_clone(self) -> T + where + T: Clone, + A: Allocator, + R: RefCounter, + { + // SAFETY: Caller guarantees `rc` will only be accessed with the same `RefCounter` + // implementation. + unsafe { self.try_unwrap::() }.unwrap_or_else(|rc| { + // SAFETY: Caller guarantees `rc` will only be accessed with the same `RefCounter` + // implementation, and the `rc` local variable will not be accessed again after the + // drop guard being triggered. + let guard = DropGuard::new(rc, |mut rc| unsafe { rc.drop::() }); + + T::clone(guard.as_ref()) + }) + } +} + +impl RawRc, A> { + pub(crate) fn try_new_uninit() -> Result + where + A: Allocator + Default, + { + RawWeak::try_new_uninit::<1>().map(|weak| unsafe { Self::from_weak(weak) }) + } + + pub(crate) fn try_new_uninit_in(alloc: A) -> Result + where + A: Allocator, + { + RawWeak::try_new_uninit_in::<1>(alloc).map(|weak| unsafe { Self::from_weak(weak) }) + } + + pub(crate) fn try_new_zeroed() -> Result + where + A: Allocator + Default, + { + RawWeak::try_new_zeroed::<1>().map(|weak| unsafe { Self::from_weak(weak) }) + } + + pub(crate) fn try_new_zeroed_in(alloc: A) -> Result + where + A: Allocator, + { + RawWeak::try_new_zeroed_in::<1>(alloc).map(|weak| unsafe { Self::from_weak(weak) }) + } + + #[cfg(not(no_global_oom_handling))] + pub(crate) fn new_uninit() -> Self + where + A: Allocator + Default, + { + unsafe { Self::from_weak(RawWeak::new_uninit::<1>()) } + } + + #[cfg(not(no_global_oom_handling))] + pub(crate) fn new_uninit_in(alloc: A) -> Self + where + A: Allocator, + { + unsafe { Self::from_weak(RawWeak::new_uninit_in::<1>(alloc)) } + } + + #[cfg(not(no_global_oom_handling))] + pub(crate) fn new_zeroed() -> Self + where + A: Allocator + Default, + { + unsafe { Self::from_weak(RawWeak::new_zeroed::<1>()) } + } + + #[cfg(not(no_global_oom_handling))] + pub(crate) fn new_zeroed_in(alloc: A) -> Self + where + A: Allocator, + { + unsafe { Self::from_weak(RawWeak::new_zeroed_in::<1>(alloc)) } + } + + pub(crate) unsafe fn assume_init(self) -> RawRc { + unsafe { self.cast() } + } +} + +impl RawRc<[T], A> { + #[cfg(not(no_global_oom_handling))] + fn from_trusted_len_iter(iter: I) -> Self + where + A: Allocator + Default, + I: TrustedLen, + { + /// Returns a drop guard that calls the destructors of a slice of elements on drop. + /// + /// # Safety + /// + /// - `head..tail` must describe a valid consecutive slice of `T` values when the destructor + /// of the returned guard is called. + /// - After calling the returned function, the corresponding values should not be accessed + /// anymore. + unsafe fn drop_range_on_drop( + head: NonNull, + tail: NonNull, + ) -> impl DerefMut, NonNull)> { + // SAFETY: + DropGuard::new((head, tail), |(head, tail)| unsafe { + let length = tail.offset_from_unsigned(head); + + NonNull::<[T]>::slice_from_raw_parts(head, length).drop_in_place(); + }) + } + + let (length, Some(high)) = iter.size_hint() else { + // TrustedLen contract guarantees that `upper_bound == None` implies an iterator + // length exceeding `usize::MAX`. + // The default implementation would collect into a vec which would panic. + // Thus we panic here immediately without invoking `Vec` code. + panic!("capacity overflow"); + }; + + debug_assert_eq!( + length, + high, + "TrustedLen iterator's size hint is not exact: {:?}", + (length, high) + ); + + let rc_layout = RcLayout::new_array::(length); + + let (ptr, alloc) = rc_alloc::allocate_with::(rc_layout, |ptr| { + let ptr = ptr.as_ptr().cast::(); + let mut guard = unsafe { drop_range_on_drop::(ptr, ptr) }; + + // SAFETY: `iter` is `TrustedLen`, we can assume we will write correct number of + // elements to the buffer. + iter.for_each(|value| unsafe { + guard.1.write(value); + guard.1 = guard.1.add(1); + }); + + mem::forget(guard); + }); + + // SAFETY: We have written `length` of `T` values to the buffer, the buffer is now + // initialized. + unsafe { + Self::from_raw_parts( + NonNull::slice_from_raw_parts(ptr.as_ptr().cast::(), length), + alloc, + ) + } + } + + pub(crate) unsafe fn into_array(self) -> Option> + where + A: Allocator, + R: RefCounter, + { + match RawRc::<[T; N], A>::try_from(self) { + Ok(result) => Some(result), + Err(mut raw_rc) => { + unsafe { raw_rc.drop::() }; + + None + } + } + } +} + +impl RawRc<[MaybeUninit], A> { + #[cfg(not(no_global_oom_handling))] + pub(crate) fn new_uninit_slice(length: usize) -> Self + where + A: Allocator + Default, + { + unsafe { Self::from_weak(RawWeak::new_uninit_slice::<1>(length)) } + } + + #[cfg(not(no_global_oom_handling))] + pub(crate) fn new_uninit_slice_in(length: usize, alloc: A) -> Self + where + A: Allocator, + { + unsafe { Self::from_weak(RawWeak::new_uninit_slice_in::<1>(length, alloc)) } + } + + #[cfg(not(no_global_oom_handling))] + pub(crate) fn new_zeroed_slice(length: usize) -> Self + where + A: Allocator + Default, + { + unsafe { Self::from_weak(RawWeak::new_zeroed_slice::<1>(length)) } + } + + #[cfg(not(no_global_oom_handling))] + pub(crate) fn new_zeroed_slice_in(length: usize, alloc: A) -> Self + where + A: Allocator, + { + unsafe { Self::from_weak(RawWeak::new_zeroed_slice_in::<1>(length, alloc)) } + } + + /// # Safety + /// + /// All `MaybeUninit`s values contained by `self` must be initialized. + pub(crate) unsafe fn assume_init(self) -> RawRc<[T], A> { + unsafe { self.cast_with(|ptr| NonNull::new_unchecked(ptr.as_ptr() as _)) } + } +} + +impl RawRc { + pub(crate) fn downcast(self) -> Result, Self> + where + T: Any, + { + if self.as_ref().is::() { Ok(unsafe { self.downcast_unchecked() }) } else { Err(self) } + } + + /// # Safety + /// + /// `self` must point to a valid `T` value. + pub(crate) unsafe fn downcast_unchecked(self) -> RawRc + where + T: Any, + { + unsafe { self.cast() } + } +} + +impl AsRef for RawRc +where + T: ?Sized, +{ + fn as_ref(&self) -> &T { + unsafe { self.weak.as_ptr().as_ref() } + } +} + +impl CoerceUnsized> for RawRc +where + T: Unsize + ?Sized, + U: ?Sized, +{ +} + +impl Debug for RawRc +where + T: Debug + ?Sized, +{ + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + ::fmt(self.as_ref(), f) + } +} + +impl Display for RawRc +where + T: Display + ?Sized, +{ + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + ::fmt(self.as_ref(), f) + } +} + +impl DispatchFromDyn> for RawRc +where + T: Unsize + ?Sized, + U: ?Sized, +{ +} + +impl Error for RawRc +where + T: Error + ?Sized, +{ + fn source(&self) -> Option<&(dyn Error + 'static)> { + T::source(self.as_ref()) + } + + #[allow(deprecated)] + fn cause(&self) -> Option<&dyn Error> { + T::cause(self.as_ref()) + } + + fn provide<'a>(&'a self, request: &mut Request<'a>) { + T::provide(self.as_ref(), request); + } +} + +impl Pointer for RawRc +where + T: ?Sized, +{ + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + <&T as Pointer>::fmt(&self.as_ref(), f) + } +} + +#[cfg(not(no_global_oom_handling))] +impl Default for RawRc +where + T: Default, + A: Allocator + Default, +{ + fn default() -> Self { + Self::new_with(T::default) + } +} + +#[cfg(not(no_global_oom_handling))] +impl Default for RawRc<[T], A> +where + A: Allocator + Default, +{ + fn default() -> Self { + RawRc::<[T; 0], A>::default() + } +} + +#[cfg(not(no_global_oom_handling))] +impl Default for RawRc +where + A: Allocator + Default, +{ + fn default() -> Self { + let empty_slice = RawRc::<[u8], A>::default(); + + // SAFETY: Empty slice is a valid `str`. + unsafe { empty_slice.cast_with(|ptr| NonNull::new_unchecked(ptr.as_ptr() as *mut _)) } + } +} + +#[cfg(not(no_global_oom_handling))] +impl From for RawRc +where + A: Allocator + Default, +{ + fn from(value: T) -> Self { + Self::new(value) + } +} + +#[cfg(not(no_global_oom_handling))] +impl From> for RawRc +where + T: ?Sized, + A: Allocator, +{ + fn from(value: Box) -> Self { + let value_ref = &*value; + let alloc_ref = Box::allocator(&value); + + unsafe { + let value_ptr = rc_alloc::allocate_with_value_in::(value_ref, alloc_ref); + let (box_ptr, alloc) = Box::into_raw_with_allocator(value); + + drop(Box::from_raw_in(box_ptr as *mut ManuallyDrop, &alloc)); + + Self::from_raw_parts(value_ptr, alloc) + } + } +} + +#[cfg(not(no_global_oom_handling))] +trait SpecRawRcFromSlice { + fn spec_from_slice(slice: &[T]) -> Self; +} + +#[cfg(not(no_global_oom_handling))] +impl SpecRawRcFromSlice for RawRc<[T], A> +where + T: Clone, + A: Allocator + Default, +{ + default fn spec_from_slice(slice: &[T]) -> Self { + Self::from_trusted_len_iter(slice.iter().cloned()) + } +} + +#[cfg(not(no_global_oom_handling))] +impl SpecRawRcFromSlice for RawRc<[T], A> +where + T: Copy, + A: Allocator + Default, +{ + fn spec_from_slice(slice: &[T]) -> Self { + let (ptr, alloc) = rc_alloc::allocate_with_value::<[T], A, 1>(slice); + + unsafe { Self::from_raw_parts(ptr, alloc) } + } +} + +#[cfg(not(no_global_oom_handling))] +impl From<&[T]> for RawRc<[T], A> +where + T: Clone, + A: Allocator + Default, +{ + fn from(value: &[T]) -> Self { + Self::spec_from_slice(value) + } +} + +#[cfg(not(no_global_oom_handling))] +impl From<&mut [T]> for RawRc<[T], A> +where + T: Clone, + A: Allocator + Default, +{ + fn from(value: &mut [T]) -> Self { + Self::from(&*value) + } +} + +#[cfg(not(no_global_oom_handling))] +impl From<&str> for RawRc +where + A: Allocator + Default, +{ + #[inline] + fn from(value: &str) -> Self { + let rc_of_bytes = RawRc::<[u8], A>::from(value.as_bytes()); + + unsafe { rc_of_bytes.cast_with(|ptr| NonNull::new_unchecked(ptr.as_ptr() as _)) } + } +} + +#[cfg(not(no_global_oom_handling))] +impl From<&mut str> for RawRc +where + A: Allocator + Default, +{ + fn from(value: &mut str) -> Self { + Self::from(&*value) + } +} + +#[cfg(not(no_global_oom_handling))] +impl From for RawRc { + fn from(value: String) -> Self { + let rc_of_bytes = RawRc::<[u8], Global>::from(value.into_bytes()); + + unsafe { rc_of_bytes.cast_with(|ptr| NonNull::new_unchecked(ptr.as_ptr() as _)) } + } +} + +impl From> for RawRc<[u8], A> { + fn from(value: RawRc) -> Self { + unsafe { value.cast_with(|ptr| NonNull::new_unchecked(ptr.as_ptr() as _)) } + } +} + +#[cfg(not(no_global_oom_handling))] +impl From<[T; N]> for RawRc<[T], A> +where + A: Allocator + Default, +{ + fn from(value: [T; N]) -> Self { + RawRc::new(value) + } +} + +#[cfg(not(no_global_oom_handling))] +impl From> for RawRc<[T], A> +where + A: Allocator, +{ + fn from(value: Vec) -> Self { + let src = &*value; + let alloc = value.allocator(); + let value_ptr = rc_alloc::allocate_with_value_in::<[T], A, 1>(src, alloc); + let (vec_ptr, _length, capacity, alloc) = value.into_raw_parts_with_alloc(); + + unsafe { + drop(Vec::from_raw_parts_in(vec_ptr, 0, capacity, &alloc)); + + Self::from_raw_parts(value_ptr, alloc) + } + } +} + +impl TryFrom> for RawRc<[T; N], A> { + type Error = RawRc<[T], A>; + + fn try_from(value: RawRc<[T], A>) -> Result { + if value.as_ref().len() == N { Ok(unsafe { value.cast() }) } else { Err(value) } + } +} + +#[cfg(not(no_global_oom_handling))] +trait SpecRawRcFromIter { + fn spec_from_iter(iter: I) -> Self; +} + +#[cfg(not(no_global_oom_handling))] +impl SpecRawRcFromIter for RawRc<[I::Item], Global> +where + I: Iterator, +{ + default fn spec_from_iter(iter: I) -> Self { + Self::from(iter.collect::>()) + } +} + +#[cfg(not(no_global_oom_handling))] +impl SpecRawRcFromIter for RawRc<[I::Item], Global> +where + I: TrustedLen, +{ + fn spec_from_iter(iter: I) -> Self { + Self::from_trusted_len_iter(iter) + } +} + +#[cfg(not(no_global_oom_handling))] +impl FromIterator for RawRc<[T], Global> { + fn from_iter>(iter: I) -> Self { + Self::spec_from_iter(iter.into_iter()) + } +} + +impl Hash for RawRc +where + T: Hash + ?Sized, +{ + fn hash(&self, state: &mut H) { + T::hash(self.as_ref(), state); + } +} + +// Hack to allow specializing on `Eq` even though `Eq` has a method. +#[rustc_unsafe_specialization_marker] +trait MarkerEq: PartialEq {} + +impl MarkerEq for T where T: Eq {} + +trait SpecPartialEq { + fn spec_eq(&self, other: &Self) -> bool; + fn spec_ne(&self, other: &Self) -> bool; +} + +impl SpecPartialEq for RawRc +where + T: PartialEq + ?Sized, +{ + #[inline] + default fn spec_eq(&self, other: &Self) -> bool { + T::eq(self.as_ref(), other.as_ref()) + } + + #[inline] + default fn spec_ne(&self, other: &Self) -> bool { + T::ne(self.as_ref(), other.as_ref()) + } +} + +/// We're doing this specialization here, and not as a more general optimization on `&T`, because it +/// would otherwise add a cost to all equality checks on refs. We assume that `RawArc`s are used to +/// store large values, that are slow to clone, but also heavy to check for equality, causing this +/// cost to pay off more easily. It's also more likely to have two `RawArc` clones, that point to +/// the same value, than two `&T`s. +/// +/// We can only do this when `T: Eq` as a `PartialEq` might be deliberately irreflexive. +impl SpecPartialEq for RawRc +where + T: MarkerEq + ?Sized, +{ + #[inline] + fn spec_eq(&self, other: &Self) -> bool { + Self::ptr_eq(self, other) || T::eq(self.as_ref(), other.as_ref()) + } + + #[inline] + fn spec_ne(&self, other: &Self) -> bool { + Self::ptr_ne(self, other) && T::ne(self.as_ref(), other.as_ref()) + } +} + +impl PartialEq for RawRc +where + T: PartialEq + ?Sized, +{ + fn eq(&self, other: &Self) -> bool { + Self::spec_eq(self, other) + } + + fn ne(&self, other: &Self) -> bool { + Self::spec_ne(self, other) + } +} + +impl Eq for RawRc where T: Eq + ?Sized {} + +impl PartialOrd for RawRc +where + T: PartialOrd + ?Sized, +{ + fn partial_cmp(&self, other: &Self) -> Option { + T::partial_cmp(self.as_ref(), other.as_ref()) + } + + fn lt(&self, other: &Self) -> bool { + T::lt(self.as_ref(), other.as_ref()) + } + + fn le(&self, other: &Self) -> bool { + T::le(self.as_ref(), other.as_ref()) + } + + fn gt(&self, other: &Self) -> bool { + T::gt(self.as_ref(), other.as_ref()) + } + + fn ge(&self, other: &Self) -> bool { + T::ge(self.as_ref(), other.as_ref()) + } +} + +impl Ord for RawRc +where + T: Ord + ?Sized, +{ + fn cmp(&self, other: &Self) -> core::cmp::Ordering { + T::cmp(self.as_ref(), other.as_ref()) + } +} + +unsafe impl PinCoerceUnsized for RawRc +where + T: ?Sized, + A: Allocator, +{ +} diff --git a/library/alloc/src/raw_rc/raw_unique_rc.rs b/library/alloc/src/raw_rc/raw_unique_rc.rs new file mode 100644 index 0000000000000..57297b0c5f8ad --- /dev/null +++ b/library/alloc/src/raw_rc/raw_unique_rc.rs @@ -0,0 +1,222 @@ +use core::alloc::Allocator; +use core::fmt::{self, Debug, Display, Formatter, Pointer}; +use core::hash::{Hash, Hasher}; +use core::marker::{PhantomData, Unsize}; +use core::ops::{CoerceUnsized, DispatchFromDyn}; + +use crate::alloc::Global; +use crate::raw_rc::RefCounter; +use crate::raw_rc::raw_rc::RawRc; +use crate::raw_rc::raw_weak::RawWeak; +use crate::raw_rc::rc_value_pointer::RcValuePointer; + +/// A uniquely owned `RawRc` that allows multiple weak references but only one strong reference. +/// `RawUniqueRc` does not implement `Drop`, user should call `RawUniqueRc::drop` manually to drop +/// this object. +#[repr(transparent)] +pub(crate) struct RawUniqueRc +where + T: ?Sized, +{ + // A `RawUniqueRc` is just a non-danging `RawWeak` that has zero strong count but with the value + // initialized. + weak: RawWeak, + + // Defines the ownership of `T` for drop-check. + _marker: PhantomData, + + // Invariance is necessary for soundness: once other `RawWeak` references exist, we already have + // a form of shared mutability! + _marker2: PhantomData<*mut T>, +} + +impl RawUniqueRc +where + T: ?Sized, +{ + /// Increments the weak count and returns the corresponding `RawWeak` object. + /// + /// # Safety + /// + /// - `self`, derived `RawWeak` or `RawRc` should only be handled by the same `RefCounter` + /// implementation. + pub(crate) unsafe fn downgrade(&self) -> RawWeak + where + A: Clone, + R: RefCounter, + { + // SAFETY: Caller guarantees we only use the same `Rc` implementation and `self.weak` + // is never dangling. + unsafe { self.weak.clone_unchecked::() } + } + + pub(crate) unsafe fn drop(&mut self) + where + A: Allocator, + R: RefCounter, + { + unsafe { self.weak.assume_init_drop::() }; + } + + pub(crate) unsafe fn into_rc(self) -> RawRc + where + R: RefCounter, + { + unsafe fn inner(value_ptr: RcValuePointer) + where + R: RefCounter, + { + unsafe { R::from_raw_counter(value_ptr.strong_count_ptr().as_ref()) } + .unlock_strong_count(); + } + + unsafe { + inner::(self.weak.value_ptr_unchecked()); + + RawRc::from_weak(self.weak) + } + } +} + +impl RawUniqueRc { + #[cfg(not(no_global_oom_handling))] + pub(super) unsafe fn from_weak_with_value(weak: RawWeak, value: T) -> Self { + unsafe { weak.as_ptr().write(value) }; + + Self { weak, _marker: PhantomData, _marker2: PhantomData } + } + + #[cfg(not(no_global_oom_handling))] + pub(crate) fn new(value: T) -> Self + where + A: Allocator + Default, + { + unsafe { Self::from_weak_with_value(RawWeak::new_uninit::<0>(), value) } + } + + #[cfg(not(no_global_oom_handling))] + pub(crate) fn new_in(value: T, alloc: A) -> Self + where + A: Allocator, + { + unsafe { Self::from_weak_with_value(RawWeak::new_uninit_in::<0>(alloc), value) } + } +} + +impl AsMut for RawUniqueRc +where + T: ?Sized, +{ + fn as_mut(&mut self) -> &mut T { + unsafe { self.weak.as_ptr().as_mut() } + } +} + +impl AsRef for RawUniqueRc +where + T: ?Sized, +{ + fn as_ref(&self) -> &T { + unsafe { self.weak.as_ptr().as_ref() } + } +} + +impl CoerceUnsized> for RawUniqueRc +where + T: Unsize + ?Sized, + U: ?Sized, + A: Allocator, +{ +} + +impl Debug for RawUniqueRc +where + T: Debug + ?Sized, +{ + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + ::fmt(self.as_ref(), f) + } +} + +impl DispatchFromDyn> for RawUniqueRc +where + T: Unsize + ?Sized, + U: ?Sized, +{ +} + +impl Display for RawUniqueRc +where + T: Display + ?Sized, +{ + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + ::fmt(self.as_ref(), f) + } +} + +impl Eq for RawUniqueRc where T: Eq + ?Sized {} + +impl Hash for RawUniqueRc +where + T: Hash + ?Sized, +{ + fn hash(&self, state: &mut H) { + T::hash(self.as_ref(), state); + } +} + +impl Ord for RawUniqueRc +where + T: Ord + ?Sized, +{ + fn cmp(&self, other: &Self) -> core::cmp::Ordering { + T::cmp(self.as_ref(), other.as_ref()) + } +} + +impl PartialEq for RawUniqueRc +where + T: PartialEq + ?Sized, +{ + fn eq(&self, other: &Self) -> bool { + T::eq(self.as_ref(), other.as_ref()) + } + + fn ne(&self, other: &Self) -> bool { + T::ne(self.as_ref(), other.as_ref()) + } +} + +impl PartialOrd for RawUniqueRc +where + T: PartialOrd + ?Sized, +{ + fn partial_cmp(&self, other: &Self) -> Option { + T::partial_cmp(self.as_ref(), other.as_ref()) + } + + fn lt(&self, other: &Self) -> bool { + T::lt(self.as_ref(), other.as_ref()) + } + + fn le(&self, other: &Self) -> bool { + T::le(self.as_ref(), other.as_ref()) + } + + fn gt(&self, other: &Self) -> bool { + T::gt(self.as_ref(), other.as_ref()) + } + + fn ge(&self, other: &Self) -> bool { + T::ge(self.as_ref(), other.as_ref()) + } +} + +impl Pointer for RawUniqueRc +where + T: ?Sized, +{ + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + <&T as Pointer>::fmt(&self.as_ref(), f) + } +} diff --git a/library/alloc/src/raw_rc/raw_weak.rs b/library/alloc/src/raw_rc/raw_weak.rs new file mode 100644 index 0000000000000..63ea221d31e76 --- /dev/null +++ b/library/alloc/src/raw_rc/raw_weak.rs @@ -0,0 +1,594 @@ +use core::alloc::{AllocError, Allocator}; +use core::cell::UnsafeCell; +use core::fmt::{self, Debug, Formatter}; +use core::marker::Unsize; +use core::mem::{self, DropGuard}; +use core::num::NonZeroUsize; +use core::ops::{CoerceUnsized, DispatchFromDyn}; +use core::ptr::{self, NonNull}; + +use crate::alloc::Global; +use crate::raw_rc::raw_rc::RawRc; +use crate::raw_rc::rc_layout::{RcLayout, RcLayoutExt}; +use crate::raw_rc::rc_value_pointer::RcValuePointer; +use crate::raw_rc::{RefCounter, RefCounts, rc_alloc}; + +// We choose `NonZeroUsize::MAX` as the address for dangling weak pointers because: +// +// - It does not point to any object that is stored inside a reference counted allocation. Because +// otherwise the corresponding `RefCounts` object will be placed at +// `NonZeroUsize::MAX - size_of::()`, which is an odd number that violates +// `RefCounts`'s alignment requirement. +// - All bytes in the byte representation of `NonZeroUsize::MAX` are the same, which makes it +// possible to utilize `memset` in certain situations like creating an array of dangling weak +// pointers. +const DANGLING_WEAK_ADDRESS: NonZeroUsize = { + let address = NonZeroUsize::MAX; + + // Verifies that `address` must not be a valid address in a reference counted allocation so it + // can be safely used as the dangling pointer address. + assert!(address.get().wrapping_sub(size_of::()) % align_of::() != 0); + + address +}; + +#[inline] +fn is_dangling(value_ptr: NonNull<()>) -> bool { + value_ptr.addr() == DANGLING_WEAK_ADDRESS +} + +/// # Safety +/// +/// Either `is_dangling(dangling_or_value_ptr)`, or `dangling_or_value_ptr` has a valid address for +/// the value location of a reference counted allocation. +#[inline] +unsafe fn try_get_rc_value_ptr(dangling_or_value_ptr: NonNull<()>) -> Option { + if is_dangling(dangling_or_value_ptr) { + None + } else { + // SAFETY: We have checked `dangling_or_value_ptr` not being dangling, and caller guarantees + // the validity of `dangling_or_value_ptr`. + + Some(unsafe { RcValuePointer::from_value_ptr(dangling_or_value_ptr) }) + } +} + +/// Decrements weak reference count in a reference-counted allocation with a value object that is +/// pointed to by `value_ptr`. +/// +/// # Safety +/// +/// - `value_ptr` must point to the value location within a valid reference counted allocation. +/// - The corresponding weak count must not be zero. +#[inline] +unsafe fn decrement_weak_ref_count(value_ptr: RcValuePointer) -> bool +where + R: RefCounter, +{ + unsafe { R::from_raw_counter(value_ptr.weak_count_ptr().as_ref()) }.decrement() +} + +/// Increments weak reference count in a reference-counted allocation with a value object that is +/// pointed to by `value_ptr`. +/// +/// # Safety +/// +/// `value_ptr` must point to the value location within a valid reference counted allocation. +#[inline] +unsafe fn increment_weak_ref_count(value_ptr: RcValuePointer) +where + R: RefCounter, +{ + unsafe { R::from_raw_counter(value_ptr.weak_count_ptr().as_ref()) }.increment() +} + +/// Creates a drop guard that calls `RawWeak::drop_unchecked` on drop. +/// +/// # Safety +/// +/// - `weak` is non-dangling. +/// - After the returned `DropGuard` being dropped, the allocation pointed to by the weak pointer +/// must not be accessed anymore. +/// - All accesses to `weak` must use the same `R` for `RefCounter`. +pub(super) unsafe fn new_weak_guard<'a, T, A, R>( + weak: &'a mut RawWeak, +) -> DropGuard<&'a mut RawWeak, impl FnOnce(&'a mut RawWeak)> +where + T: ?Sized, + A: Allocator, + R: RefCounter, +{ + // SAFETY: Caller guarantees that `weak` is non-dangling and the corresponding allocation will + // not be accessed after dropping. + DropGuard::new(weak, |weak| unsafe { weak.drop_unchecked::() }) +} + +/// Base implementation of a weak pointer. `RawWeak` does not implement `Drop`, user should call +/// `RawWeak::drop` or `RawWeak::drop_unchecked` manually to drop this object. +/// +/// A `RawWeak` can be either dangling or non-dangling. A dangling `RawWeak` does not point to a +/// valid value. A non-dangling `RawWeak` points to a valid reference-counted allocation. The value +/// pointed to by a `RawWeak` may be uninitialized. +pub(crate) struct RawWeak +where + T: ?Sized, +{ + /// Points to a (possibly uninitialized or dropped) `T` value inside of a reference-counted + /// allocation. + ptr: NonNull, + + /// The allocator for `ptr`. + alloc: A, +} + +impl RawWeak +where + T: ?Sized, +{ + pub(crate) const unsafe fn from_raw_parts(ptr: NonNull, alloc: A) -> Self { + Self { ptr, alloc } + } + + pub(crate) unsafe fn from_raw(ptr: NonNull) -> Self + where + A: Default, + { + unsafe { Self::from_raw_parts(ptr, A::default()) } + } + + pub(crate) fn allocator(&self) -> &A { + &self.alloc + } + + pub(crate) fn as_ptr(&self) -> NonNull { + self.ptr + } + + #[inline(never)] + unsafe fn assume_init_drop_slow(&mut self) + where + A: Allocator, + R: RefCounter, + { + let guard = unsafe { new_weak_guard::(self) }; + + unsafe { guard.ptr.drop_in_place() }; + } + + /// Drops the value along with the `RawWeak` object, assuming the value pointed to by `ptr` is + /// initialized, + #[inline] + pub(super) unsafe fn assume_init_drop(&mut self) + where + A: Allocator, + R: RefCounter, + { + if const { mem::needs_drop::() } { + unsafe { self.assume_init_drop_slow::() }; + } else { + unsafe { self.drop_unchecked::() }; + } + } + + pub(crate) unsafe fn cast(self) -> RawWeak { + unsafe { self.cast_with(NonNull::cast) } + } + + #[inline] + pub(crate) unsafe fn cast_with(self, f: F) -> RawWeak + where + U: ?Sized, + F: FnOnce(NonNull) -> NonNull, + { + unsafe { RawWeak::from_raw_parts(f(self.ptr), self.alloc) } + } + + /// Increments the weak count, and returns the corresponding `RawWeak` object. + /// + /// # Safety + /// + /// - `self` must only be handled by the same `RefCounter` implementation. + #[inline] + pub(crate) unsafe fn clone(&self) -> Self + where + A: Clone, + R: RefCounter, + { + // For reducing monomorphization cost. + unsafe fn inner(ptr: NonNull<()>) + where + R: RefCounter, + { + if let Some(value_ptr) = unsafe { try_get_rc_value_ptr(ptr) } { + unsafe { increment_weak_ref_count::(value_ptr) } + } + } + + unsafe { + inner::(self.ptr.cast()); + + Self::from_raw_parts(self.ptr, self.alloc.clone()) + } + } + + /// Increments the weak count, and returns the corresponding `RawWeak` object, assuming `self` + /// is non-dangling. + /// + /// # Safety + /// + /// - `self` must only be handled by the same `RefCounter` implementation. + /// - `self` is non-dangling. + pub(crate) unsafe fn clone_unchecked(&self) -> Self + where + A: Clone, + R: RefCounter, + { + unsafe { + increment_weak_ref_count::(self.value_ptr_unchecked()); + + Self::from_raw_parts(self.ptr, self.alloc.clone()) + } + } + + /// Drops this weak pointer. + #[inline] + pub(crate) unsafe fn drop(&mut self) + where + A: Allocator, + R: RefCounter, + { + if !is_dangling(self.ptr.cast()) { + unsafe { self.drop_unchecked::() }; + } + } + + /// Drops this weak pointer, assuming `self` is non-dangling. + /// + /// # Safety + /// + /// `self` is non-dangling. + #[inline] + pub(super) unsafe fn drop_unchecked(&mut self) + where + A: Allocator, + R: RefCounter, + { + // SAFETY: Caller guarantees `self` is non-dangling, so `self.ptr` must point to the value + // location in a valid reference-counted allocation. + let value_ptr = unsafe { self.value_ptr_unchecked() }; + + let is_last_weak_ref = unsafe { decrement_weak_ref_count::(value_ptr) }; + + if is_last_weak_ref { + let rc_layout = unsafe { RcLayout::from_value_ptr_unchecked(self.ptr) }; + + unsafe { rc_alloc::deallocate::(value_ptr, &self.alloc, rc_layout) } + } + } + + pub(crate) fn into_raw(self) -> NonNull { + self.ptr + } + + pub(crate) fn into_raw_parts(self) -> (NonNull, A) { + (self.ptr, self.alloc) + } + + pub(crate) fn ptr_eq(&self, other: &Self) -> bool { + ptr::addr_eq(self.ptr.as_ptr(), other.ptr.as_ptr()) + } + + pub(crate) fn ptr_ne(&self, other: &Self) -> bool { + !ptr::addr_eq(self.ptr.as_ptr(), other.ptr.as_ptr()) + } + + /// Returns the `RefCounts` object inside the reference-counted allocation, assume `self` is + /// non-dangling. + /// + /// # Safety + /// + /// `self` is non-dangling. + #[cfg(not(no_global_oom_handling))] + pub(super) unsafe fn ref_counts_unchecked(&self) -> &RefCounts { + unsafe { self.value_ptr_unchecked().ref_counts_ptr().as_ref() } + } + + /// Returns the strong reference count object inside the reference-counted allocation if `self` + /// is non-dangling. + pub(crate) fn strong_count(&self) -> Option<&UnsafeCell> { + (!is_dangling(self.ptr.cast())).then(|| unsafe { self.strong_count_unchecked() }) + } + + /// Returns the strong reference count object inside the reference-counted allocation, assume + /// `self` is non-dangling. + /// + /// # Safety + /// + /// `self` is non-dangling. + pub(super) unsafe fn strong_count_unchecked(&self) -> &UnsafeCell { + unsafe { self.value_ptr_unchecked().strong_count_ptr().as_ref() } + } + + /// Returns the weak reference count object inside the reference-counted allocation if `self` + /// is non-dangling. + pub(crate) fn weak_count(&self) -> Option<&UnsafeCell> { + (!is_dangling(self.ptr.cast())).then(|| unsafe { self.weak_count_unchecked() }) + } + + /// Returns the weak reference count object inside the reference-counted allocation, assume + /// `self` is non-dangling. + /// + /// # Safety + /// + /// `self` is non-dangling. + pub(super) unsafe fn weak_count_unchecked(&self) -> &UnsafeCell { + unsafe { self.value_ptr_unchecked().weak_count_ptr().as_ref() } + } + + /// Sets the contained pointer to a new value. + /// + /// # Safety + /// + /// - `ptr` must be a valid pointer to a value object that lives in a reference-counted + /// allocation. + /// - The allocation can be deallocated with the associated allocator. + #[cfg(not(no_global_oom_handling))] + pub(super) unsafe fn set_ptr(&mut self, ptr: NonNull) { + self.ptr = ptr; + } + + /// Creates a `RawRc` object if there are non-zero strong reference counts. + /// + /// # Safety + /// + /// `self` must only be handled by the same `RefCounter` implementation. + pub(crate) unsafe fn upgrade(&self) -> Option> + where + A: Clone, + R: RefCounter, + { + // For reducing monomorphization cost. + unsafe fn inner(value_ptr: NonNull<()>) -> bool + where + R: RefCounter, + { + if let Some(value_ptr) = unsafe { try_get_rc_value_ptr(value_ptr) } { + unsafe { R::from_raw_counter(value_ptr.strong_count_ptr().as_ref()) }.try_upgrade() + } else { + false + } + } + + let upgraded = unsafe { inner::(self.ptr.cast()) }; + + if upgraded { + Some(unsafe { RawRc::from_raw_parts(self.ptr, self.alloc.clone()) }) + } else { + None + } + } + + /// Returns a pointer to the value location of the reference-counted allocation, assume `self` + /// is non-dangling. + /// + /// # Safety + /// + /// `self` is non-dangling. + #[inline] + pub(super) unsafe fn value_ptr_unchecked(&self) -> RcValuePointer { + // SAFETY: Caller guarantees `self` is non-dangling, so `self.ptr` must point to the value + // location in a valid reference-counted allocation. + unsafe { RcValuePointer::from_value_ptr(self.ptr.cast()) } + } +} + +impl RawWeak { + pub(crate) fn new_dangling() -> Self + where + A: Default, + { + Self::new_dangling_in(A::default()) + } + + pub(crate) const fn new_dangling_in(alloc: A) -> Self { + unsafe { Self::from_raw_parts(NonNull::without_provenance(DANGLING_WEAK_ADDRESS), alloc) } + } + + pub(crate) fn try_new_uninit() -> Result + where + A: Allocator + Default, + { + rc_alloc::try_allocate_uninit::(T::RC_LAYOUT) + .map(|(ptr, alloc)| unsafe { Self::from_raw_parts(ptr.as_ptr().cast(), alloc) }) + } + + pub(crate) fn try_new_uninit_in(alloc: A) -> Result + where + A: Allocator, + { + rc_alloc::try_allocate_uninit_in::(&alloc, T::RC_LAYOUT) + .map(|ptr| unsafe { Self::from_raw_parts(ptr.as_ptr().cast(), alloc) }) + } + + pub(crate) fn try_new_zeroed() -> Result + where + A: Allocator + Default, + { + rc_alloc::try_allocate_zeroed::(T::RC_LAYOUT) + .map(|(ptr, alloc)| unsafe { Self::from_raw_parts(ptr.as_ptr().cast(), alloc) }) + } + + pub(crate) fn try_new_zeroed_in(alloc: A) -> Result + where + A: Allocator, + { + rc_alloc::try_allocate_zeroed_in::(&alloc, T::RC_LAYOUT) + .map(|ptr| unsafe { Self::from_raw_parts(ptr.as_ptr().cast(), alloc) }) + } + + #[cfg(not(no_global_oom_handling))] + pub(crate) fn new_uninit() -> Self + where + A: Allocator + Default, + { + let (ptr, alloc) = rc_alloc::allocate_uninit::(T::RC_LAYOUT); + + unsafe { Self::from_raw_parts(ptr.as_ptr().cast(), alloc) } + } + + #[cfg(not(no_global_oom_handling))] + pub(crate) fn new_uninit_in(alloc: A) -> Self + where + A: Allocator, + { + unsafe { + Self::from_raw_parts( + rc_alloc::allocate_uninit_in::(&alloc, T::RC_LAYOUT) + .as_ptr() + .cast(), + alloc, + ) + } + } + + #[cfg(not(no_global_oom_handling))] + pub(crate) fn new_zeroed() -> Self + where + A: Allocator + Default, + { + let (ptr, alloc) = rc_alloc::allocate_zeroed::(T::RC_LAYOUT); + + unsafe { Self::from_raw_parts(ptr.as_ptr().cast(), alloc) } + } + + #[cfg(not(no_global_oom_handling))] + pub(crate) fn new_zeroed_in(alloc: A) -> Self + where + A: Allocator, + { + unsafe { + Self::from_raw_parts( + rc_alloc::allocate_zeroed_in::(&alloc, T::RC_LAYOUT) + .as_ptr() + .cast(), + alloc, + ) + } + } + + /// Consumes the `RawWeak` object and returns the contained value, assuming the value is + /// initialized. + /// + /// # Safety + /// + /// - `self` is non-dangling. + /// - The value pointed to by `self` is initialized. + /// - The strong reference count is zero. + pub(super) unsafe fn assume_init_into_inner(mut self) -> T + where + A: Allocator, + R: RefCounter, + { + unsafe { + let result = self.ptr.read(); + + self.drop_unchecked::(); + + result + } + } +} + +impl RawWeak<[T], A> { + #[cfg(not(no_global_oom_handling))] + fn allocate(length: usize, allocate_fn: F) -> Self + where + A: Allocator, + F: FnOnce(RcLayout) -> (RcValuePointer, A), + { + let rc_layout = RcLayout::new_array::(length); + let (ptr, alloc) = allocate_fn(rc_layout); + + unsafe { + Self::from_raw_parts(NonNull::slice_from_raw_parts(ptr.as_ptr().cast(), length), alloc) + } + } + + #[cfg(not(no_global_oom_handling))] + fn allocate_in(length: usize, alloc: A, allocate_fn: F) -> Self + where + A: Allocator, + F: FnOnce(&A, RcLayout) -> RcValuePointer, + { + let rc_layout = RcLayout::new_array::(length); + let ptr = allocate_fn(&alloc, rc_layout); + + unsafe { + Self::from_raw_parts(NonNull::slice_from_raw_parts(ptr.as_ptr().cast(), length), alloc) + } + } + + #[cfg(not(no_global_oom_handling))] + pub(crate) fn new_uninit_slice(length: usize) -> Self + where + A: Allocator + Default, + { + Self::allocate(length, rc_alloc::allocate_uninit::) + } + + #[cfg(not(no_global_oom_handling))] + pub(crate) fn new_uninit_slice_in(length: usize, alloc: A) -> Self + where + A: Allocator, + { + Self::allocate_in(length, alloc, rc_alloc::allocate_uninit_in::) + } + + #[cfg(not(no_global_oom_handling))] + pub(crate) fn new_zeroed_slice(length: usize) -> Self + where + A: Allocator + Default, + { + Self::allocate(length, rc_alloc::allocate_zeroed::) + } + + #[cfg(not(no_global_oom_handling))] + pub(crate) fn new_zeroed_slice_in(length: usize, alloc: A) -> Self + where + A: Allocator, + { + Self::allocate_in(length, alloc, rc_alloc::allocate_zeroed_in::) + } +} + +impl CoerceUnsized> for RawWeak +where + T: Unsize + ?Sized, + U: ?Sized, +{ +} + +impl Debug for RawWeak +where + T: ?Sized, +{ + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str("(Weak)") + } +} + +impl Default for RawWeak +where + A: Default, +{ + fn default() -> Self { + Self::new_dangling() + } +} + +impl DispatchFromDyn> for RawWeak +where + T: Unsize + ?Sized, + U: ?Sized, +{ +} diff --git a/library/alloc/src/raw_rc/rc_alloc.rs b/library/alloc/src/raw_rc/rc_alloc.rs new file mode 100644 index 0000000000000..d9438d31c1619 --- /dev/null +++ b/library/alloc/src/raw_rc/rc_alloc.rs @@ -0,0 +1,370 @@ +use core::alloc::{AllocError, Allocator}; +#[cfg(not(no_global_oom_handling))] +use core::mem; +#[cfg(not(no_global_oom_handling))] +use core::mem::DropGuard; +#[cfg(not(no_global_oom_handling))] +use core::ptr::{self, NonNull}; + +#[cfg(not(no_global_oom_handling))] +use crate::alloc; +use crate::raw_rc::RefCounts; +use crate::raw_rc::rc_layout::RcLayout; +use crate::raw_rc::rc_value_pointer::RcValuePointer; + +/// Allocates uninitialized memory for a reference-counted allocation with allocator `alloc` and +/// layout `RcLayout`. Returns a pointer to the value location. +#[inline] +fn allocate_uninit_raw_bytes( + alloc: &A, + rc_layout: RcLayout, +) -> Result +where + A: Allocator, +{ + let allocation_result = alloc.allocate(rc_layout.get()); + + allocation_result.map(|allocation_ptr| { + // SAFETY: `allocation_ptr` is allocated with `rc_layout`, so the safety requirement of + // `RcValuePointer::from_allocation_ptr` is trivially satisfied. + unsafe { RcValuePointer::from_allocation_ptr(allocation_ptr.cast(), rc_layout) } + }) +} + +/// Allocates zeroed memory for a reference-counted allocation with allocator `alloc` and layout +/// `RcLayout`. Returns a pointer to the value location. +#[inline] +fn allocate_zeroed_raw_bytes( + alloc: &A, + rc_layout: RcLayout, +) -> Result +where + A: Allocator, +{ + let allocation_result = alloc.allocate_zeroed(rc_layout.get()); + + allocation_result.map(|allocation_ptr| { + // SAFETY: `allocation_ptr` is allocated with `rc_layout`, so the safety requirement of + // `RcValuePointer::from_allocation_ptr` is trivially satisfied. + unsafe { RcValuePointer::from_allocation_ptr(allocation_ptr.cast(), rc_layout) } + }) +} + +/// Initializes reference counters in a reference-counted allocation pointed to by `value_ptr` +/// with strong count of `STRONG_COUNT` and weak count of 1. +/// +/// # Safety +/// +/// - `value_ptr` points to a valid reference-counted allocation. +#[inline] +unsafe fn init_rc_allocation(value_ptr: RcValuePointer) { + // SAFETY: Caller guarantees the `value_ptr` points to a valid reference-counted allocation, so + // we can write to the corresponding `RefCounts` object. + unsafe { value_ptr.ref_counts_ptr().write(const { RefCounts::new(STRONG_COUNT) }) }; +} + +/// Tries to allocate a chunk of reference-counted memory that is described by `rc_layout` with +/// `alloc`. The allocated memory has strong count of `STRONG_COUNT` and weak count of 1. +pub(crate) fn try_allocate_uninit_in( + alloc: &A, + rc_layout: RcLayout, +) -> Result +where + A: Allocator, +{ + let value_ptr = allocate_uninit_raw_bytes(alloc, rc_layout)?; + + // SAFETY: `value_ptr` is newly allocated, so it is guaranteed to be valid. + unsafe { init_rc_allocation::(value_ptr) }; + + Ok(value_ptr) +} + +/// Creates an allocator of type `A`, then tries to allocate a chunk of reference-counted memory +/// that is described by `rc_layout`. +pub(crate) fn try_allocate_uninit( + rc_layout: RcLayout, +) -> Result<(RcValuePointer, A), AllocError> +where + A: Allocator + Default, +{ + let alloc = A::default(); + + try_allocate_uninit_in::(&alloc, rc_layout).map(|value_ptr| (value_ptr, alloc)) +} + +/// Tries to allocate a reference-counted memory that is described by `rc_layout` with `alloc`. The +/// allocated memory has strong count of `STRONG_COUNT` and weak count of 1, and the value memory +/// is all zero bytes. +pub(crate) fn try_allocate_zeroed_in( + alloc: &A, + rc_layout: RcLayout, +) -> Result +where + A: Allocator, +{ + let value_ptr = allocate_zeroed_raw_bytes(alloc, rc_layout)?; + + // SAFETY: `value_ptr` is newly allocated, so it is guaranteed to be valid. + unsafe { init_rc_allocation::(value_ptr) }; + + Ok(value_ptr) +} + +/// Creates an allocator of type `A`, then tries to allocate a chunk of reference-counted memory +/// with all zero bytes memory that is described by `rc_layout`. +pub(crate) fn try_allocate_zeroed( + rc_layout: RcLayout, +) -> Result<(RcValuePointer, A), AllocError> +where + A: Allocator + Default, +{ + let alloc = A::default(); + + try_allocate_zeroed_in::(&alloc, rc_layout).map(|value_ptr| (value_ptr, alloc)) +} + +/// If `allocation_result` is `Ok`, initializes the reference counts with strong count +/// `STRONG_COUNT` and weak count of 1 and returns a pointer to the value object, otherwise panic +/// will be triggered by calling `alloc::handle_alloc_error`. +/// +/// # Safety +/// +/// If `allocation_result` is `Ok`, the pointer it contains must point to a valid reference-counted +/// allocation that is allocated with `rc_layout`. +#[cfg(not(no_global_oom_handling))] +#[inline] +unsafe fn handle_rc_allocation_result( + allocation_result: Result, + rc_layout: RcLayout, +) -> RcValuePointer { + match allocation_result { + Ok(value_ptr) => { + // SAFETY: Caller guarantees the `value_ptr` points to a valid reference-counted` + // allocation. + unsafe { init_rc_allocation::(value_ptr) }; + + value_ptr + } + Err(AllocError) => alloc::handle_alloc_error(rc_layout.get()), + } +} + +/// Allocates reference-counted memory that is described by `rc_layout` with `alloc`. The allocated +/// memory has strong count of `STRONG_COUNT` and weak count of 1. If the allocation fails, panic +/// will be triggered by calling `alloc::handle_alloc_error`. +#[cfg(not(no_global_oom_handling))] +#[inline] +pub(crate) fn allocate_uninit_in( + alloc: &A, + rc_layout: RcLayout, +) -> RcValuePointer +where + A: Allocator, +{ + let allocation_result = allocate_uninit_raw_bytes(alloc, rc_layout); + + // SAFETY: `allocation_result` is the allocation result using `rc_layout`, which satisfies the + // safety requirement of `handle_rc_allocation_result`. + unsafe { handle_rc_allocation_result::(allocation_result, rc_layout) } +} + +/// Creates an allocator of type `A`, then allocate a chunk of reference-counted memory that is +/// described by `rc_layout`. +#[cfg(not(no_global_oom_handling))] +#[inline] +pub(crate) fn allocate_uninit( + rc_layout: RcLayout, +) -> (RcValuePointer, A) +where + A: Allocator + Default, +{ + let alloc = A::default(); + let value_ptr = allocate_uninit_in::(&alloc, rc_layout); + + (value_ptr, alloc) +} + +/// Allocates reference-counted memory that is described by `rc_layout` with `alloc`. The allocated +/// memory has strong count of `STRONG_COUNT` and weak count of 1, and the value memory is all zero +/// bytes. If the allocation fails, panic will be triggered by calling `alloc::handle_alloc_error`. +#[cfg(not(no_global_oom_handling))] +pub(crate) fn allocate_zeroed_in( + alloc: &A, + rc_layout: RcLayout, +) -> RcValuePointer +where + A: Allocator, +{ + let allocation_result = allocate_zeroed_raw_bytes(alloc, rc_layout); + + // SAFETY: `allocation_result` is the allocation result using `rc_layout`, which satisfies the + // safety requirement of `handle_rc_allocation_result`. + unsafe { handle_rc_allocation_result::(allocation_result, rc_layout) } +} + +/// Creates an allocator of type `A`, then allocate a chunk of reference-counted memory with all +/// zero bytes that is described by `rc_layout`. +#[cfg(not(no_global_oom_handling))] +pub(crate) fn allocate_zeroed( + rc_layout: RcLayout, +) -> (RcValuePointer, A) +where + A: Allocator + Default, +{ + let alloc = A::default(); + let value_ptr = allocate_zeroed_in::(&alloc, rc_layout); + + (value_ptr, alloc) +} + +/// Allocates a reference-counted memory chunk for storing a value according to `rc_layout`, then +/// initialize the value with `f`. If `f` panics, the allocated memory will be deallocated. +#[cfg(not(no_global_oom_handling))] +#[inline] +pub(crate) fn allocate_with_in( + alloc: &A, + rc_layout: RcLayout, + f: F, +) -> RcValuePointer +where + A: Allocator, + F: FnOnce(RcValuePointer), +{ + /// # Safety + /// + /// - `value_ptr` points to a valid value location within a reference counted allocation + /// that can be described with `rc_layout` and can be deallocated with `alloc`. + /// - No access to the allocation can happen if the destructor of the returned guard get called. + unsafe fn deallocate_on_drop<'a, A>( + value_ptr: RcValuePointer, + alloc: &'a A, + rc_layout: RcLayout, + ) -> impl Drop + use<'a, A> + where + A: Allocator, + { + // SAFETY: Caller guarantees the validity of all arguments. + DropGuard::new((), move |()| unsafe { + deallocate::(value_ptr, alloc, rc_layout); + }) + } + + let value_ptr = allocate_uninit_in::(alloc, rc_layout); + let guard = unsafe { deallocate_on_drop(value_ptr, alloc, rc_layout) }; + + f(value_ptr); + + mem::forget(guard); + + value_ptr +} + +/// Creates an allocator of type `A`, then allocate a chunk of reference-counted memory that is +/// described by `rc_layout`. `f` will be called with a pointer that points the value storage to +/// initialize the allocated memory. If `f` panics, the allocated memory will be deallocated. +#[cfg(not(no_global_oom_handling))] +#[inline] +pub(crate) fn allocate_with( + rc_layout: RcLayout, + f: F, +) -> (RcValuePointer, A) +where + A: Allocator + Default, + F: FnOnce(RcValuePointer), +{ + let alloc = A::default(); + let value_ptr = allocate_with_in::(&alloc, rc_layout, f); + + (value_ptr, alloc) +} + +/// Allocates reference-counted memory that has strong count of `STRONG_COUNT` and weak count of 1. +/// The value will be initialized with data pointed to by `src_ptr`. +/// +/// # Safety +/// +/// - Memory pointed to by `src_ptr` has enough data to read for filling the value in an allocation +/// that is described by `rc_layout`. +#[cfg(not(no_global_oom_handling))] +#[inline] +pub(crate) unsafe fn allocate_with_bytes_in( + src_ptr: NonNull<()>, + alloc: &A, + rc_layout: RcLayout, +) -> RcValuePointer +where + A: Allocator, +{ + let value_ptr = allocate_uninit_in::(alloc, rc_layout); + let value_size = rc_layout.value_size(); + + unsafe { + ptr::copy_nonoverlapping::( + src_ptr.as_ptr().cast(), + value_ptr.as_ptr().as_ptr().cast(), + value_size, + ); + } + + value_ptr +} + +/// Allocates a chunk of reference-counted memory with a value that is copied from `value`. This is +/// safe because the return value is a pointer, which will not cause double unless caller calls the +/// destructor manually, which requires `unsafe` codes. +#[cfg(not(no_global_oom_handling))] +#[inline] +pub(crate) fn allocate_with_value_in( + src: &T, + alloc: &A, +) -> NonNull +where + T: ?Sized, + A: Allocator, +{ + let src_ptr = NonNull::from(src); + + // SAFETY: `src_ptr` is created from a reference, so it has correct metadata. + let rc_layout = unsafe { RcLayout::from_value_ptr(src_ptr) }; + + let (src_ptr, metadata) = src_ptr.to_raw_parts(); + + // SAFETY: `src_ptr` comes from a reference to `T`, so it is guaranteed to have enough data to + // fill the value in an allocation that is described by `rc_layout`. + let value_ptr = unsafe { allocate_with_bytes_in::(src_ptr, alloc, rc_layout) }; + + NonNull::from_raw_parts(value_ptr.as_ptr(), metadata) +} + +/// Creates an allocator of type `A`, then allocates a chunk of reference-counted memory with value +/// copied from `value`. +#[cfg(not(no_global_oom_handling))] +#[inline] +pub(crate) fn allocate_with_value(value: &T) -> (NonNull, A) +where + T: ?Sized, + A: Allocator + Default, +{ + let alloc = A::default(); + let value_ptr = allocate_with_value_in::(value, &alloc); + + (value_ptr, alloc) +} + +/// Deallocates a reference-counted allocation with a value object pointed to by `value_ptr`. +/// +/// # Safety +/// +/// - `value_ptr` points to a valid reference-counted allocation that is allocated using +/// `rc_layout`. +#[inline] +pub(crate) unsafe fn deallocate(value_ptr: RcValuePointer, alloc: &A, rc_layout: RcLayout) +where + A: Allocator, +{ + let value_offset = rc_layout.value_offset(); + let allocation_ptr = unsafe { value_ptr.as_ptr().byte_sub(value_offset) }; + + unsafe { alloc.deallocate(allocation_ptr.cast(), rc_layout.get()) } +} diff --git a/library/alloc/src/raw_rc/rc_layout.rs b/library/alloc/src/raw_rc/rc_layout.rs new file mode 100644 index 0000000000000..58be83074b3e9 --- /dev/null +++ b/library/alloc/src/raw_rc/rc_layout.rs @@ -0,0 +1,171 @@ +use core::alloc::{Layout, LayoutError}; +use core::mem::SizedTypeProperties; +use core::ptr::NonNull; + +use crate::raw_rc::RefCounts; + +/// A `Layout` that describes a reference-counted allocation. +#[derive(Clone, Copy)] +pub(crate) struct RcLayout(Layout); + +impl RcLayout { + /// Tries to create an `RcLayout` to store a value with layout `value_layout`. Returns `Err` if + /// `value_layout` is too big to store in a reference-counted allocation. + #[inline] + pub(crate) const fn try_from_value_layout(value_layout: Layout) -> Result { + match RefCounts::LAYOUT.extend(value_layout) { + Ok((rc_layout, _)) => Ok(Self(rc_layout)), + Err(error) => Err(error), + } + } + + /// Creates an `RcLayout` to store a value with layout `value_layout`. Panics if `value_layout` + /// is too big to store in a reference-counted allocation. + #[cfg(not(no_global_oom_handling))] + #[inline] + pub(crate) fn from_value_layout(value_layout: Layout) -> Self { + Self::try_from_value_layout(value_layout).unwrap() + } + + /// Creates an `RcLayout` to store a value with layout `value_layout`. + /// + /// # Safety + /// + /// `RcLayout::try_from_value_layout(value_layout)` must return `Ok`. + #[inline] + pub(crate) unsafe fn from_value_layout_unchecked(value_layout: Layout) -> Self { + unsafe { Self::try_from_value_layout(value_layout).unwrap_unchecked() } + } + + /// Creates an `RcLayout` to store an array of `length` elements of type `T`. Panics if the array + /// is too big to store in a reference-counted allocation. + #[cfg(not(no_global_oom_handling))] + pub(crate) fn new_array(length: usize) -> Self { + /// For minimizing monomorphization cost. + #[inline] + fn inner(value_layout: Layout, length: usize) -> RcLayout { + // We can use `repeat_packed` here because the outer function passes `T::LAYOUT` as the + // `value_layout`, which is already padded to a multiple of its alignment. + value_layout.repeat_packed(length).and_then(RcLayout::try_from_value_layout).unwrap() + } + + inner(T::LAYOUT, length) + } + + /// Returns an `Layout` object that describes the reference-counted allocation. + pub(crate) fn get(&self) -> Layout { + self.0 + } + + /// Returns the byte offset of the value stored in a reference-counted allocation that is + /// described by `self`. + #[inline] + pub(crate) fn value_offset(&self) -> usize { + // SAFETY: + // + // This essentially calculates `size_of::().next_multiple_of(self.align())`. + // + // See comments in `Layout::size_rounded_up_to_custom_align` for detailed explanation. + unsafe { + let align_m1 = self.0.align().unchecked_sub(1); + + size_of::().unchecked_add(align_m1) & !align_m1 + } + } + + /// Returns the byte size of the value stored in a reference-counted allocation that is + /// described by `self`. + #[cfg(not(no_global_oom_handling))] + #[inline] + pub(crate) fn value_size(&self) -> usize { + unsafe { self.0.size().unchecked_sub(self.value_offset()) } + } + + /// Creates an `RcLayout` for storing a value that is pointed to by `value_ptr`. + /// + /// # Safety + /// + /// `value_ptr` has correct metadata of `T`. + #[cfg(not(no_global_oom_handling))] + pub(crate) unsafe fn from_value_ptr(value_ptr: NonNull) -> Self + where + T: ?Sized, + { + /// A helper trait for computing `RcLayout` to store a `Self` object. If `Self` is + /// `Sized`, the `RcLayout` value is computed at compile time. + trait SpecRcLayout { + unsafe fn spec_rc_layout(value_ptr: NonNull) -> RcLayout; + } + + impl SpecRcLayout for T + where + T: ?Sized, + { + #[inline] + default unsafe fn spec_rc_layout(value_ptr: NonNull) -> RcLayout { + RcLayout::from_value_layout(unsafe { Layout::for_value_raw(value_ptr.as_ptr()) }) + } + } + + impl SpecRcLayout for T { + #[inline] + unsafe fn spec_rc_layout(_: NonNull) -> RcLayout { + Self::RC_LAYOUT + } + } + + unsafe { T::spec_rc_layout(value_ptr) } + } + + /// Creates an `RcLayout` for storing a value that is pointed to by `value_ptr`, assuming the + /// value is small enough to fit inside a reference-counted allocation. + /// + /// # Safety + /// + /// - `value_ptr` has correct metadata for a `T` object. + /// - It is known that the memory layout described by `value_ptr` can be used to create an + /// `RcLayout` successfully. + pub(crate) unsafe fn from_value_ptr_unchecked(value_ptr: NonNull) -> Self + where + T: ?Sized, + { + /// A helper trait for computing `RcLayout` to store a `Self` object. If `Self` is + /// `Sized`, the `RcLayout` value is computed at compile time. + trait SpecRcLayoutUnchecked { + unsafe fn spec_rc_layout_unchecked(value_ptr: NonNull) -> RcLayout; + } + + impl SpecRcLayoutUnchecked for T + where + T: ?Sized, + { + #[inline] + default unsafe fn spec_rc_layout_unchecked(value_ptr: NonNull) -> RcLayout { + unsafe { + RcLayout::from_value_layout_unchecked(Layout::for_value_raw(value_ptr.as_ptr())) + } + } + } + + impl SpecRcLayoutUnchecked for T { + #[inline] + unsafe fn spec_rc_layout_unchecked(_: NonNull) -> RcLayout { + Self::RC_LAYOUT + } + } + + unsafe { T::spec_rc_layout_unchecked(value_ptr) } + } +} + +pub(crate) trait RcLayoutExt { + /// Computes `RcLayout` at compile time if `Self` is `Sized`. + const RC_LAYOUT: RcLayout; +} + +impl RcLayoutExt for T { + const RC_LAYOUT: RcLayout = match RcLayout::try_from_value_layout(T::LAYOUT) { + Ok(rc_layout) => rc_layout, + Err(_) => panic!("value is too big to store in a reference-counted allocation"), + }; +} diff --git a/library/alloc/src/raw_rc/rc_value_pointer.rs b/library/alloc/src/raw_rc/rc_value_pointer.rs new file mode 100644 index 0000000000000..5afc401ef8808 --- /dev/null +++ b/library/alloc/src/raw_rc/rc_value_pointer.rs @@ -0,0 +1,77 @@ +use core::cell::UnsafeCell; +use core::ptr::NonNull; + +use crate::raw_rc::RefCounts; +use crate::raw_rc::rc_layout::RcLayout; + +/// A pointer to the value location in a reference-counted allocation. The reference-counted +/// allocation may be deallocated, and the value may be uninitialized. This type provides stronger +/// pointer semantics, reducing the risk of misuse. The guarantees this type provides can also +/// reduce the amount of unsafe code. +#[derive(Clone, Copy)] +#[repr(transparent)] +pub(crate) struct RcValuePointer { + inner: NonNull<()>, +} + +impl RcValuePointer { + /// Creates a new `RcValuePointer` from a raw pointer to a referenced allocation. + /// + /// # Safety + /// + /// Caller must ensure that `ptr` is a pointer to some valid reference-counted allocation that + /// can be described by `rc_layout`. + #[inline] + pub(crate) unsafe fn from_allocation_ptr( + allocation_ptr: NonNull<()>, + rc_layout: RcLayout, + ) -> Self { + // SAFETY: Caller guarantees that `allocation_ptr` point to some reference-counted + // allocation that can be described by `rc_layout`, so we can acquire the corresponding + // value pointer safely. + unsafe { Self::from_value_ptr(allocation_ptr.byte_add(rc_layout.value_offset())) } + } + + /// Creates a new `RcValuePointer` from a raw pointer to the value location in a + /// reference-counted allocation. + /// + /// # Safety + /// + /// Caller must ensure that `value_ptr` is a valid pointer to the value location inside some valid + /// reference-counted allocation. + #[inline] + pub(crate) unsafe fn from_value_ptr(value_ptr: NonNull<()>) -> Self { + Self { inner: value_ptr } + } + + #[inline] + pub(crate) fn as_ptr(self) -> NonNull<()> { + self.inner + } + + #[inline] + pub(crate) fn ref_counts_ptr(self) -> NonNull { + // SAFETY: `self.inner` is guaranteed to have a valid address inside a reference-counted + // allocation, so we are safe to assume we can get a proper pointer to the corresponding + // `RefCounts` object. + unsafe { self.inner.cast::().sub(1) } + } + + #[inline] + pub(crate) fn strong_count_ptr(self) -> NonNull> { + let ref_counts_ptr = self.ref_counts_ptr(); + + // SAFETY: `ref_counts_ptr` is guaranteed to be a valid pointer to a `RefCounts` object, so + // we can safely acquire the pointer to the corresponding strong counter object. + unsafe { NonNull::new_unchecked(&raw mut (*ref_counts_ptr.as_ptr()).strong) } + } + + #[inline] + pub(crate) fn weak_count_ptr(self) -> NonNull> { + let ref_counts_ptr = self.ref_counts_ptr(); + + // SAFETY: `ref_counts_ptr` is guaranteed to be a valid pointer to a `RefCounts` object, so + // we can safely acquire the pointer to the corresponding weak counter object. + unsafe { NonNull::new_unchecked(&raw mut (*ref_counts_ptr.as_ptr()).weak) } + } +} diff --git a/library/alloc/src/rc.rs b/library/alloc/src/rc.rs index 2b62b92d43886..8de3b1f84cc84 100644 --- a/library/alloc/src/rc.rs +++ b/library/alloc/src/rc.rs @@ -248,51 +248,137 @@ use core::clone::CloneToUninit; use core::clone::UseCloned; use core::cmp::Ordering; use core::hash::{Hash, Hasher}; -use core::intrinsics::abort; -#[cfg(not(no_global_oom_handling))] -use core::iter; -use core::marker::{PhantomData, Unsize}; -use core::mem::{self, ManuallyDrop, align_of_val_raw}; -use core::num::NonZeroUsize; +use core::marker::Unsize; +use core::mem::{self, ManuallyDrop}; use core::ops::{CoerceUnsized, Deref, DerefMut, DerefPure, DispatchFromDyn, LegacyReceiver}; use core::panic::{RefUnwindSafe, UnwindSafe}; #[cfg(not(no_global_oom_handling))] use core::pin::Pin; use core::pin::PinCoerceUnsized; -use core::ptr::{self, NonNull, drop_in_place}; -#[cfg(not(no_global_oom_handling))] -use core::slice::from_raw_parts_mut; -use core::{borrow, fmt, hint}; +use core::ptr::{self, NonNull}; +use core::{borrow, fmt, hint, intrinsics}; -#[cfg(not(no_global_oom_handling))] -use crate::alloc::handle_alloc_error; -use crate::alloc::{AllocError, Allocator, Global, Layout}; +use crate::alloc::{AllocError, Allocator, Global}; use crate::borrow::{Cow, ToOwned}; +#[cfg(not(no_global_oom_handling))] use crate::boxed::Box; #[cfg(not(no_global_oom_handling))] +use crate::raw_rc::MakeMutStrategy; +use crate::raw_rc::{self, RawRc, RawUniqueRc, RawWeak}; +#[cfg(not(no_global_oom_handling))] use crate::string::String; #[cfg(not(no_global_oom_handling))] use crate::vec::Vec; -// This is repr(C) to future-proof against possible field-reordering, which -// would interfere with otherwise safe [into|from]_raw() of transmutable -// inner types. -// repr(align(2)) (forcing alignment to at least 2) is required because usize -// has 1-byte alignment on AVR. -#[repr(C, align(2))] -struct RcInner { - strong: Cell, - weak: Cell, - value: T, +type RefCounter = Cell; + +unsafe impl raw_rc::RefCounter for RefCounter { + #[inline] + fn increment(&self) { + // NOTE: If you `mem::forget` `Rc`s (or `Weak`s), drop is skipped and the ref-count + // is not decremented, meaning the ref-count can overflow, and then you can + // free the allocation while outstanding `Rc`s (or `Weak`s) exist, which would be + // unsound. We abort because this is such a degenerate scenario that we don't + // care about what happens -- no real program should ever experience this. + // + // This should have negligible overhead since you don't actually need to + // clone these much in Rust thanks to ownership and move-semantics. + + let count = self.get(); + + // We insert an `assume` here to hint LLVM at an otherwise + // missed optimization. + // SAFETY: The reference count will never be zero when this is + // called. + unsafe { hint::assert_unchecked(count != 0) }; + + let (new_count, overflowed) = count.overflowing_add(1); + + self.set(new_count); + + // We want to abort on overflow instead of dropping the value. + // Checking for overflow after the store instead of before + // allows for slightly better code generation. + if intrinsics::unlikely(overflowed) { + intrinsics::abort(); + } + } + + #[inline] + fn decrement(&self) -> bool { + let count = self.get(); + + self.set(count.wrapping_sub(1)); + + self.get() == 0 + } + + #[inline] + fn try_upgrade(&self) -> bool { + let count = self.get(); + + if count == 0 { + false + } else { + self.set(count + 1); + + true + } + } + + #[inline] + fn downgrade_increment_weak(&self) { + self.increment(); + } + + #[inline] + fn try_lock_strong_count(&self) -> bool { + let count = self.get(); + + if count == 1 { + self.set(0); + + true + } else { + false + } + } + + #[inline] + fn unlock_strong_count(&self) { + self.set(1); + } + + #[inline] + fn is_unique(strong_count: &Self, weak_count: &Self) -> bool { + strong_count.get() == 1 && weak_count.get() == 1 + } + + #[cfg(not(no_global_oom_handling))] + #[inline] + fn make_mut(strong_count: &Self, weak_count: &Self) -> Option { + if strong_count.get() == 1 { + if weak_count.get() == 1 { + None + } else { + strong_count.set(0); + + Some(MakeMutStrategy::Move) + } + } else { + Some(MakeMutStrategy::Clone) + } + } } -/// Calculate layout for `RcInner` using the inner value's layout -fn rc_inner_layout_for_value_layout(layout: Layout) -> Layout { - // Calculate layout using the given value layout. - // Previously, layout was calculated on the expression - // `&*(ptr as *const RcInner)`, but this created a misaligned - // reference (see #54908). - Layout::new::>().extend(layout).unwrap().0.pad_to_align() +#[cfg(not(no_global_oom_handling))] +#[inline] +fn weak_fn_to_raw_weak_fn(f: F) -> impl FnOnce(&RawWeak) -> T +where + F: FnOnce(&Weak) -> T, + A: Allocator, +{ + move |raw_weak: &RawWeak| f(Weak::ref_from_raw_weak(raw_weak)) } /// A single-threaded reference-counting pointer. 'Rc' stands for 'Reference @@ -309,13 +395,12 @@ fn rc_inner_layout_for_value_layout(layout: Layout) -> Layout { #[rustc_diagnostic_item = "Rc"] #[stable(feature = "rust1", since = "1.0.0")] #[rustc_insignificant_dtor] +#[repr(transparent)] pub struct Rc< T: ?Sized, #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, > { - ptr: NonNull>, - phantom: PhantomData>, - alloc: A, + raw_rc: RawRc, } #[stable(feature = "rust1", since = "1.0.0")] @@ -344,58 +429,6 @@ impl, U: ?Sized> DispatchFromDyn> for Rc {} #[unstable(feature = "cell_get_cloned", issue = "145329")] unsafe impl CloneFromCell for Rc {} -impl Rc { - #[inline] - unsafe fn from_inner(ptr: NonNull>) -> Self { - unsafe { Self::from_inner_in(ptr, Global) } - } - - #[inline] - unsafe fn from_ptr(ptr: *mut RcInner) -> Self { - unsafe { Self::from_inner(NonNull::new_unchecked(ptr)) } - } -} - -impl Rc { - #[inline(always)] - fn inner(&self) -> &RcInner { - // This unsafety is ok because while this Rc is alive we're guaranteed - // that the inner pointer is valid. - unsafe { self.ptr.as_ref() } - } - - #[inline] - fn into_inner_with_allocator(this: Self) -> (NonNull>, A) { - let this = mem::ManuallyDrop::new(this); - (this.ptr, unsafe { ptr::read(&this.alloc) }) - } - - #[inline] - unsafe fn from_inner_in(ptr: NonNull>, alloc: A) -> Self { - Self { ptr, phantom: PhantomData, alloc } - } - - #[inline] - unsafe fn from_ptr_in(ptr: *mut RcInner, alloc: A) -> Self { - unsafe { Self::from_inner_in(NonNull::new_unchecked(ptr), alloc) } - } - - // Non-inlined part of `drop`. - #[inline(never)] - unsafe fn drop_slow(&mut self) { - // Reconstruct the "strong weak" pointer and drop it when this - // variable goes out of scope. This ensures that the memory is - // deallocated even if the destructor of `T` panics. - let _weak = Weak { ptr: self.ptr, alloc: &self.alloc }; - - // Destroy the contained object. - // We cannot use `get_mut_unchecked` here, because `self.alloc` is borrowed. - unsafe { - ptr::drop_in_place(&mut (*self.ptr.as_ptr()).value); - } - } -} - impl Rc { /// Constructs a new `Rc`. /// @@ -409,16 +442,7 @@ impl Rc { #[cfg(not(no_global_oom_handling))] #[stable(feature = "rust1", since = "1.0.0")] pub fn new(value: T) -> Rc { - // There is an implicit weak pointer owned by all the strong - // pointers, which ensures that the weak destructor never frees - // the allocation while the strong destructor is running, even - // if the weak pointer is stored inside the strong one. - unsafe { - Self::from_inner( - Box::leak(Box::new(RcInner { strong: Cell::new(1), weak: Cell::new(1), value })) - .into(), - ) - } + Self { raw_rc: RawRc::new(value) } } /// Constructs a new `Rc` while giving you a `Weak` to the allocation, @@ -478,7 +502,10 @@ impl Rc { where F: FnOnce(&Weak) -> T, { - Self::new_cyclic_in(data_fn, Global) + let data_fn = weak_fn_to_raw_weak_fn(data_fn); + let raw_rc = unsafe { RawRc::new_cyclic::<_, RefCounter>(data_fn) }; + + Self { raw_rc } } /// Constructs a new `Rc` with uninitialized contents. @@ -501,13 +528,7 @@ impl Rc { #[stable(feature = "new_uninit", since = "1.82.0")] #[must_use] pub fn new_uninit() -> Rc> { - unsafe { - Rc::from_ptr(Rc::allocate_for_layout( - Layout::new::(), - |layout| Global.allocate(layout), - <*mut u8>::cast, - )) - } + Rc { raw_rc: RawRc::new_uninit() } } /// Constructs a new `Rc` with uninitialized contents, with the memory @@ -532,13 +553,7 @@ impl Rc { #[stable(feature = "new_zeroed_alloc", since = "CURRENT_RUSTC_VERSION")] #[must_use] pub fn new_zeroed() -> Rc> { - unsafe { - Rc::from_ptr(Rc::allocate_for_layout( - Layout::new::(), - |layout| Global.allocate_zeroed(layout), - <*mut u8>::cast, - )) - } + Rc { raw_rc: RawRc::new_zeroed() } } /// Constructs a new `Rc`, returning an error if the allocation fails @@ -554,20 +569,7 @@ impl Rc { /// ``` #[unstable(feature = "allocator_api", issue = "32838")] pub fn try_new(value: T) -> Result, AllocError> { - // There is an implicit weak pointer owned by all the strong - // pointers, which ensures that the weak destructor never frees - // the allocation while the strong destructor is running, even - // if the weak pointer is stored inside the strong one. - unsafe { - Ok(Self::from_inner( - Box::leak(Box::try_new(RcInner { - strong: Cell::new(1), - weak: Cell::new(1), - value, - })?) - .into(), - )) - } + RawRc::try_new(value).map(|raw_rc| Self { raw_rc }) } /// Constructs a new `Rc` with uninitialized contents, returning an error if the allocation fails @@ -591,13 +593,7 @@ impl Rc { /// ``` #[unstable(feature = "allocator_api", issue = "32838")] pub fn try_new_uninit() -> Result>, AllocError> { - unsafe { - Ok(Rc::from_ptr(Rc::try_allocate_for_layout( - Layout::new::(), - |layout| Global.allocate(layout), - <*mut u8>::cast, - )?)) - } + RawRc::try_new_uninit().map(|raw_rc| Rc { raw_rc }) } /// Constructs a new `Rc` with uninitialized contents, with the memory @@ -623,13 +619,7 @@ impl Rc { /// [zeroed]: mem::MaybeUninit::zeroed #[unstable(feature = "allocator_api", issue = "32838")] pub fn try_new_zeroed() -> Result>, AllocError> { - unsafe { - Ok(Rc::from_ptr(Rc::try_allocate_for_layout( - Layout::new::(), - |layout| Global.allocate_zeroed(layout), - <*mut u8>::cast, - )?)) - } + RawRc::try_new_zeroed().map(|raw_rc| Rc { raw_rc }) } /// Constructs a new `Pin>`. If `T` does not implement `Unpin`, then /// `value` will be pinned in memory and unable to be moved. @@ -657,12 +647,7 @@ impl Rc { #[unstable(feature = "allocator_api", issue = "32838")] #[inline] pub fn new_in(value: T, alloc: A) -> Rc { - // NOTE: Prefer match over unwrap_or_else since closure sometimes not inlineable. - // That would make code size bigger. - match Self::try_new_in(value, alloc) { - Ok(m) => m, - Err(_) => handle_alloc_error(Layout::new::>()), - } + Self { raw_rc: RawRc::new_in(value, alloc) } } /// Constructs a new `Rc` with uninitialized contents in the provided allocator. @@ -691,16 +676,7 @@ impl Rc { #[unstable(feature = "allocator_api", issue = "32838")] #[inline] pub fn new_uninit_in(alloc: A) -> Rc, A> { - unsafe { - Rc::from_ptr_in( - Rc::allocate_for_layout( - Layout::new::(), - |layout| alloc.allocate(layout), - <*mut u8>::cast, - ), - alloc, - ) - } + Rc { raw_rc: RawRc::new_uninit_in(alloc) } } /// Constructs a new `Rc` with uninitialized contents, with the memory @@ -728,16 +704,7 @@ impl Rc { #[unstable(feature = "allocator_api", issue = "32838")] #[inline] pub fn new_zeroed_in(alloc: A) -> Rc, A> { - unsafe { - Rc::from_ptr_in( - Rc::allocate_for_layout( - Layout::new::(), - |layout| alloc.allocate_zeroed(layout), - <*mut u8>::cast, - ), - alloc, - ) - } + Rc { raw_rc: RawRc::new_zeroed_in(alloc) } } /// Constructs a new `Rc` in the given allocator while giving you a `Weak` to the allocation, @@ -775,47 +742,10 @@ impl Rc { where F: FnOnce(&Weak) -> T, { - // Construct the inner in the "uninitialized" state with a single - // weak reference. - let (uninit_raw_ptr, alloc) = Box::into_raw_with_allocator(Box::new_in( - RcInner { - strong: Cell::new(0), - weak: Cell::new(1), - value: mem::MaybeUninit::::uninit(), - }, - alloc, - )); - let uninit_ptr: NonNull<_> = (unsafe { &mut *uninit_raw_ptr }).into(); - let init_ptr: NonNull> = uninit_ptr.cast(); - - let weak = Weak { ptr: init_ptr, alloc }; + let data_fn = weak_fn_to_raw_weak_fn(data_fn); + let raw_rc = unsafe { RawRc::new_cyclic_in::<_, RefCounter>(data_fn, alloc) }; - // It's important we don't give up ownership of the weak pointer, or - // else the memory might be freed by the time `data_fn` returns. If - // we really wanted to pass ownership, we could create an additional - // weak pointer for ourselves, but this would result in additional - // updates to the weak reference count which might not be necessary - // otherwise. - let data = data_fn(&weak); - - let strong = unsafe { - let inner = init_ptr.as_ptr(); - ptr::write(&raw mut (*inner).value, data); - - let prev_value = (*inner).strong.get(); - debug_assert_eq!(prev_value, 0, "No prior strong references should exist"); - (*inner).strong.set(1); - - // Strong references should collectively own a shared weak reference, - // so don't run the destructor for our old weak reference. - // Calling into_raw_with_allocator has the double effect of giving us back the allocator, - // and forgetting the weak reference. - let alloc = weak.into_raw_with_allocator().1; - - Rc::from_inner_in(init_ptr, alloc) - }; - - strong + Self { raw_rc } } /// Constructs a new `Rc` in the provided allocator, returning an error if the allocation @@ -834,15 +764,7 @@ impl Rc { #[unstable(feature = "allocator_api", issue = "32838")] #[inline] pub fn try_new_in(value: T, alloc: A) -> Result { - // There is an implicit weak pointer owned by all the strong - // pointers, which ensures that the weak destructor never frees - // the allocation while the strong destructor is running, even - // if the weak pointer is stored inside the strong one. - let (ptr, alloc) = Box::into_unique(Box::try_new_in( - RcInner { strong: Cell::new(1), weak: Cell::new(1), value }, - alloc, - )?); - Ok(unsafe { Self::from_inner_in(ptr.into(), alloc) }) + RawRc::try_new_in(value, alloc).map(|raw_rc| Self { raw_rc }) } /// Constructs a new `Rc` with uninitialized contents, in the provided allocator, returning an @@ -872,16 +794,7 @@ impl Rc { #[unstable(feature = "allocator_api", issue = "32838")] #[inline] pub fn try_new_uninit_in(alloc: A) -> Result, A>, AllocError> { - unsafe { - Ok(Rc::from_ptr_in( - Rc::try_allocate_for_layout( - Layout::new::(), - |layout| alloc.allocate(layout), - <*mut u8>::cast, - )?, - alloc, - )) - } + RawRc::try_new_uninit_in(alloc).map(|raw_rc| Rc { raw_rc }) } /// Constructs a new `Rc` with uninitialized contents, with the memory @@ -910,16 +823,7 @@ impl Rc { #[unstable(feature = "allocator_api", issue = "32838")] #[inline] pub fn try_new_zeroed_in(alloc: A) -> Result, A>, AllocError> { - unsafe { - Ok(Rc::from_ptr_in( - Rc::try_allocate_for_layout( - Layout::new::(), - |layout| alloc.allocate_zeroed(layout), - <*mut u8>::cast, - )?, - alloc, - )) - } + RawRc::try_new_zeroed_in(alloc).map(|raw_rc| Rc { raw_rc }) } /// Constructs a new `Pin>` in the provided allocator. If `T` does not implement `Unpin`, then @@ -956,22 +860,10 @@ impl Rc { #[inline] #[stable(feature = "rc_unique", since = "1.4.0")] pub fn try_unwrap(this: Self) -> Result { - if Rc::strong_count(&this) == 1 { - let this = ManuallyDrop::new(this); - - let val: T = unsafe { ptr::read(&**this) }; // copy the contained object - let alloc: A = unsafe { ptr::read(&this.alloc) }; // copy the allocator - - // Indicate to Weaks that they can't be promoted by decrementing - // the strong count, and then remove the implicit "strong weak" - // pointer while also handling drop logic by just crafting a - // fake Weak. - this.inner().dec_strong(); - let _weak = Weak { ptr: this.ptr, alloc }; - Ok(val) - } else { - Err(this) - } + let raw_rc = Self::into_raw_rc(this); + let result = unsafe { raw_rc.try_unwrap::() }; + + result.map_err(|raw_rc| Self { raw_rc }) } /// Returns the inner value, if the `Rc` has exactly one strong reference. @@ -1007,7 +899,9 @@ impl Rc { #[inline] #[stable(feature = "rc_into_inner", since = "1.70.0")] pub fn into_inner(this: Self) -> Option { - Rc::try_unwrap(this).ok() + let raw_rc = Self::into_raw_rc(this); + + unsafe { raw_rc.into_inner::() } } } @@ -1035,7 +929,7 @@ impl Rc<[T]> { #[stable(feature = "new_uninit", since = "1.82.0")] #[must_use] pub fn new_uninit_slice(len: usize) -> Rc<[mem::MaybeUninit]> { - unsafe { Rc::from_ptr(Rc::allocate_for_slice(len)) } + Rc { raw_rc: RawRc::new_uninit_slice(len) } } /// Constructs a new reference-counted slice with uninitialized contents, with the memory being @@ -1060,16 +954,7 @@ impl Rc<[T]> { #[stable(feature = "new_zeroed_alloc", since = "CURRENT_RUSTC_VERSION")] #[must_use] pub fn new_zeroed_slice(len: usize) -> Rc<[mem::MaybeUninit]> { - unsafe { - Rc::from_ptr(Rc::allocate_for_layout( - Layout::array::(len).unwrap(), - |layout| Global.allocate_zeroed(layout), - |mem| { - ptr::slice_from_raw_parts_mut(mem.cast::(), len) - as *mut RcInner<[mem::MaybeUninit]> - }, - )) - } + Rc { raw_rc: RawRc::new_zeroed_slice(len) } } /// Converts the reference-counted slice into a reference-counted array. @@ -1081,15 +966,10 @@ impl Rc<[T]> { #[inline] #[must_use] pub fn into_array(self) -> Option> { - if self.len() == N { - let ptr = Self::into_raw(self) as *const [T; N]; + let raw_rc = Self::into_raw_rc(self); + let result = unsafe { raw_rc.into_array::() }; - // SAFETY: The underlying array of a slice has the exact same layout as an actual array `[T; N]` if `N` is equal to the slice's length. - let me = unsafe { Rc::from_raw(ptr) }; - Some(me) - } else { - None - } + result.map(|raw_rc| Rc { raw_rc }) } } @@ -1122,7 +1002,7 @@ impl Rc<[T], A> { #[unstable(feature = "allocator_api", issue = "32838")] #[inline] pub fn new_uninit_slice_in(len: usize, alloc: A) -> Rc<[mem::MaybeUninit], A> { - unsafe { Rc::from_ptr_in(Rc::allocate_for_slice_in(len, &alloc), alloc) } + Rc { raw_rc: RawRc::new_uninit_slice_in(len, alloc) } } /// Constructs a new reference-counted slice with uninitialized contents, with the memory being @@ -1150,19 +1030,7 @@ impl Rc<[T], A> { #[unstable(feature = "allocator_api", issue = "32838")] #[inline] pub fn new_zeroed_slice_in(len: usize, alloc: A) -> Rc<[mem::MaybeUninit], A> { - unsafe { - Rc::from_ptr_in( - Rc::allocate_for_layout( - Layout::array::(len).unwrap(), - |layout| alloc.allocate_zeroed(layout), - |mem| { - ptr::slice_from_raw_parts_mut(mem.cast::(), len) - as *mut RcInner<[mem::MaybeUninit]> - }, - ), - alloc, - ) - } + Rc { raw_rc: RawRc::new_zeroed_slice_in(len, alloc) } } } @@ -1196,8 +1064,10 @@ impl Rc, A> { #[stable(feature = "new_uninit", since = "1.82.0")] #[inline] pub unsafe fn assume_init(self) -> Rc { - let (ptr, alloc) = Rc::into_inner_with_allocator(self); - unsafe { Rc::from_inner_in(ptr.cast(), alloc) } + let raw_rc = Self::into_raw_rc(self); + let raw_rc = unsafe { raw_rc.assume_init() }; + + Rc { raw_rc } } } @@ -1234,8 +1104,10 @@ impl Rc<[mem::MaybeUninit], A> { #[stable(feature = "new_uninit", since = "1.82.0")] #[inline] pub unsafe fn assume_init(self) -> Rc<[T], A> { - let (ptr, alloc) = Rc::into_inner_with_allocator(self); - unsafe { Rc::from_ptr_in(ptr.as_ptr() as _, alloc) } + let raw_rc = Self::into_raw_rc(self); + let raw_rc = unsafe { raw_rc.assume_init() }; + + Rc { raw_rc } } } @@ -1304,7 +1176,7 @@ impl Rc { #[inline] #[stable(feature = "rc_raw", since = "1.17.0")] pub unsafe fn from_raw(ptr: *const T) -> Self { - unsafe { Self::from_raw_in(ptr, Global) } + Self { raw_rc: unsafe { RawRc::from_raw(NonNull::new_unchecked(ptr.cast_mut())) } } } /// Consumes the `Rc`, returning the wrapped pointer. @@ -1327,8 +1199,7 @@ impl Rc { #[stable(feature = "rc_raw", since = "1.17.0")] #[rustc_never_returns_null_ptr] pub fn into_raw(this: Self) -> *const T { - let this = ManuallyDrop::new(this); - Self::as_ptr(&*this) + Self::into_raw_rc(this).into_raw().as_ptr() } /// Increments the strong reference count on the `Rc` associated with the @@ -1364,7 +1235,11 @@ impl Rc { #[inline] #[stable(feature = "rc_mutate_strong_count", since = "1.53.0")] pub unsafe fn increment_strong_count(ptr: *const T) { - unsafe { Self::increment_strong_count_in(ptr, Global) } + unsafe { + RawRc::::increment_strong_count::(NonNull::new_unchecked( + ptr.cast_mut(), + )); + } } /// Decrements the strong reference count on the `Rc` associated with the @@ -1401,11 +1276,22 @@ impl Rc { #[inline] #[stable(feature = "rc_mutate_strong_count", since = "1.53.0")] pub unsafe fn decrement_strong_count(ptr: *const T) { - unsafe { Self::decrement_strong_count_in(ptr, Global) } + unsafe { + RawRc::::decrement_strong_count::(NonNull::new_unchecked( + ptr.cast_mut(), + )) + } } } impl Rc { + #[inline] + fn into_raw_rc(this: Self) -> RawRc { + let this = ManuallyDrop::new(this); + + unsafe { ptr::read(&this.raw_rc) } + } + /// Returns a reference to the underlying allocator. /// /// Note: this is an associated function, which means that you have @@ -1414,7 +1300,7 @@ impl Rc { #[inline] #[unstable(feature = "allocator_api", issue = "32838")] pub fn allocator(this: &Self) -> &A { - &this.alloc + this.raw_rc.allocator() } /// Consumes the `Rc`, returning the wrapped pointer and allocator. @@ -1438,11 +1324,9 @@ impl Rc { #[must_use = "losing the pointer will leak memory"] #[unstable(feature = "allocator_api", issue = "32838")] pub fn into_raw_with_allocator(this: Self) -> (*const T, A) { - let this = mem::ManuallyDrop::new(this); - let ptr = Self::as_ptr(&this); - // Safety: `this` is ManuallyDrop so the allocator will not be double-dropped - let alloc = unsafe { ptr::read(&this.alloc) }; - (ptr, alloc) + let (ptr, alloc) = Self::into_raw_rc(this).into_raw_parts(); + + (ptr.as_ptr(), alloc) } /// Provides a raw pointer to the data. @@ -1464,12 +1348,7 @@ impl Rc { #[stable(feature = "weak_into_raw", since = "1.45.0")] #[rustc_never_returns_null_ptr] pub fn as_ptr(this: &Self) -> *const T { - let ptr: *mut RcInner = NonNull::as_ptr(this.ptr); - - // SAFETY: This cannot go through Deref::deref or Rc::inner because - // this is required to retain raw/mut provenance such that e.g. `get_mut` can - // write through the pointer after the Rc is recovered through `from_raw`. - unsafe { &raw mut (*ptr).value } + this.raw_rc.as_ptr().as_ptr() } /// Constructs an `Rc` from a raw pointer in the provided allocator. @@ -1541,12 +1420,9 @@ impl Rc { /// ``` #[unstable(feature = "allocator_api", issue = "32838")] pub unsafe fn from_raw_in(ptr: *const T, alloc: A) -> Self { - let offset = unsafe { data_offset(ptr) }; - - // Reverse the offset to find the original RcInner. - let rc_ptr = unsafe { ptr.byte_sub(offset) as *mut RcInner }; - - unsafe { Self::from_ptr_in(rc_ptr, alloc) } + unsafe { + Self { raw_rc: RawRc::from_raw_parts(NonNull::new_unchecked(ptr.cast_mut()), alloc) } + } } /// Creates a new [`Weak`] pointer to this allocation. @@ -1567,10 +1443,7 @@ impl Rc { where A: Clone, { - this.inner().inc_weak(); - // Make sure we do not create a dangling Weak - debug_assert!(!is_dangling(this.ptr.as_ptr())); - Weak { ptr: this.ptr, alloc: this.alloc.clone() } + Weak { raw_weak: unsafe { this.raw_rc.downgrade::() } } } /// Gets the number of [`Weak`] pointers to this allocation. @@ -1588,7 +1461,7 @@ impl Rc { #[inline] #[stable(feature = "rc_counts", since = "1.15.0")] pub fn weak_count(this: &Self) -> usize { - this.inner().weak() - 1 + unsafe { *this.raw_rc.weak_count().get() - 1 } } /// Gets the number of strong (`Rc`) pointers to this allocation. @@ -1606,7 +1479,7 @@ impl Rc { #[inline] #[stable(feature = "rc_counts", since = "1.15.0")] pub fn strong_count(this: &Self) -> usize { - this.inner().strong() + unsafe { *this.raw_rc.strong_count().get() } } /// Increments the strong reference count on the `Rc` associated with the @@ -1648,10 +1521,13 @@ impl Rc { where A: Clone, { - // Retain Rc, but don't touch refcount by wrapping in ManuallyDrop - let rc = unsafe { mem::ManuallyDrop::new(Rc::::from_raw_in(ptr, alloc)) }; - // Now increase refcount, but don't drop new refcount either - let _rc_clone: mem::ManuallyDrop<_> = rc.clone(); + unsafe { + RawRc::::increment_strong_count::(NonNull::new_unchecked( + ptr.cast_mut(), + )); + } + + drop(alloc); } /// Decrements the strong reference count on the `Rc` associated with the @@ -1691,14 +1567,12 @@ impl Rc { #[inline] #[unstable(feature = "allocator_api", issue = "32838")] pub unsafe fn decrement_strong_count_in(ptr: *const T, alloc: A) { - unsafe { drop(Rc::from_raw_in(ptr, alloc)) }; - } - - /// Returns `true` if there are no other `Rc` or [`Weak`] pointers to - /// this allocation. - #[inline] - fn is_unique(this: &Self) -> bool { - Rc::weak_count(this) == 0 && Rc::strong_count(this) == 1 + unsafe { + RawRc::::decrement_strong_count_in::( + NonNull::new_unchecked(ptr.cast_mut()), + alloc, + ); + } } /// Returns a mutable reference into the given `Rc`, if there are @@ -1728,7 +1602,7 @@ impl Rc { #[inline] #[stable(feature = "rc_unique", since = "1.4.0")] pub fn get_mut(this: &mut Self) -> Option<&mut T> { - if Rc::is_unique(this) { unsafe { Some(Rc::get_mut_unchecked(this)) } } else { None } + unsafe { this.raw_rc.get_mut::() } } /// Returns a mutable reference into the given `Rc`, @@ -1794,9 +1668,7 @@ impl Rc { #[inline] #[unstable(feature = "get_mut_unchecked", issue = "63292")] pub unsafe fn get_mut_unchecked(this: &mut Self) -> &mut T { - // We are careful to *not* create a reference covering the "count" fields, as - // this would conflict with accesses to the reference counts (e.g. by `Weak`). - unsafe { &mut (*this.ptr.as_ptr()).value } + unsafe { this.raw_rc.get_mut_unchecked() } } #[inline] @@ -1817,7 +1689,7 @@ impl Rc { /// assert!(!Rc::ptr_eq(&five, &other_five)); /// ``` pub fn ptr_eq(this: &Self, other: &Self) -> bool { - ptr::addr_eq(this.ptr.as_ptr(), other.ptr.as_ptr()) + RawRc::ptr_eq(&this.raw_rc, &other.raw_rc) } } @@ -1876,57 +1748,7 @@ impl Rc { #[inline] #[stable(feature = "rc_unique", since = "1.4.0")] pub fn make_mut(this: &mut Self) -> &mut T { - let size_of_val = size_of_val::(&**this); - - if Rc::strong_count(this) != 1 { - // Gotta clone the data, there are other Rcs. - - let this_data_ref: &T = &**this; - // `in_progress` drops the allocation if we panic before finishing initializing it. - let mut in_progress: UniqueRcUninit = - UniqueRcUninit::new(this_data_ref, this.alloc.clone()); - - // Initialize with clone of this. - let initialized_clone = unsafe { - // Clone. If the clone panics, `in_progress` will be dropped and clean up. - this_data_ref.clone_to_uninit(in_progress.data_ptr().cast()); - // Cast type of pointer, now that it is initialized. - in_progress.into_rc() - }; - - // Replace `this` with newly constructed Rc. - *this = initialized_clone; - } else if Rc::weak_count(this) != 0 { - // Can just steal the data, all that's left is Weaks - - // We don't need panic-protection like the above branch does, but we might as well - // use the same mechanism. - let mut in_progress: UniqueRcUninit = - UniqueRcUninit::new(&**this, this.alloc.clone()); - unsafe { - // Initialize `in_progress` with move of **this. - // We have to express this in terms of bytes because `T: ?Sized`; there is no - // operation that just copies a value based on its `size_of_val()`. - ptr::copy_nonoverlapping( - ptr::from_ref(&**this).cast::(), - in_progress.data_ptr().cast::(), - size_of_val, - ); - - this.inner().dec_strong(); - // Remove implicit strong-weak ref (no need to craft a fake - // Weak here -- we know other Weaks can clean up for us) - this.inner().dec_weak(); - // Replace `this` with newly constructed Rc that has the moved data. - ptr::write(this, in_progress.into_rc()); - } - } - // This unsafety is ok because we're guaranteed that the pointer - // returned is the *only* pointer that will ever be returned to T. Our - // reference count is guaranteed to be 1 at this point, and we required - // the `Rc` itself to be `mut`, so we're returning the only possible - // reference to the allocation. - unsafe { &mut this.ptr.as_mut().value } + unsafe { this.raw_rc.make_mut::() } } } @@ -1962,7 +1784,9 @@ impl Rc { #[inline] #[stable(feature = "arc_unwrap_or_clone", since = "1.76.0")] pub fn unwrap_or_clone(this: Self) -> T { - Rc::try_unwrap(this).unwrap_or_else(|rc| (*rc).clone()) + let raw_rc = Self::into_raw_rc(this); + + unsafe { raw_rc.unwrap_or_clone::() } } } @@ -1988,13 +1812,9 @@ impl Rc { #[inline] #[stable(feature = "rc_downcast", since = "1.29.0")] pub fn downcast(self) -> Result, Self> { - if (*self).is::() { - unsafe { - let (ptr, alloc) = Rc::into_inner_with_allocator(self); - Ok(Rc::from_inner_in(ptr.cast(), alloc)) - } - } else { - Err(self) + match Self::into_raw_rc(self).downcast::() { + Ok(raw_rc) => Ok(Rc { raw_rc }), + Err(raw_rc) => Err(Self { raw_rc }), } } @@ -2027,208 +1847,10 @@ impl Rc { #[inline] #[unstable(feature = "downcast_unchecked", issue = "90850")] pub unsafe fn downcast_unchecked(self) -> Rc { - unsafe { - let (ptr, alloc) = Rc::into_inner_with_allocator(self); - Rc::from_inner_in(ptr.cast(), alloc) - } - } -} - -impl Rc { - /// Allocates an `RcInner` with sufficient space for - /// a possibly-unsized inner value where the value has the layout provided. - /// - /// The function `mem_to_rc_inner` is called with the data pointer - /// and must return back a (potentially fat)-pointer for the `RcInner`. - #[cfg(not(no_global_oom_handling))] - unsafe fn allocate_for_layout( - value_layout: Layout, - allocate: impl FnOnce(Layout) -> Result, AllocError>, - mem_to_rc_inner: impl FnOnce(*mut u8) -> *mut RcInner, - ) -> *mut RcInner { - let layout = rc_inner_layout_for_value_layout(value_layout); - unsafe { - Rc::try_allocate_for_layout(value_layout, allocate, mem_to_rc_inner) - .unwrap_or_else(|_| handle_alloc_error(layout)) - } - } - - /// Allocates an `RcInner` with sufficient space for - /// a possibly-unsized inner value where the value has the layout provided, - /// returning an error if allocation fails. - /// - /// The function `mem_to_rc_inner` is called with the data pointer - /// and must return back a (potentially fat)-pointer for the `RcInner`. - #[inline] - unsafe fn try_allocate_for_layout( - value_layout: Layout, - allocate: impl FnOnce(Layout) -> Result, AllocError>, - mem_to_rc_inner: impl FnOnce(*mut u8) -> *mut RcInner, - ) -> Result<*mut RcInner, AllocError> { - let layout = rc_inner_layout_for_value_layout(value_layout); - - // Allocate for the layout. - let ptr = allocate(layout)?; - - // Initialize the RcInner - let inner = mem_to_rc_inner(ptr.as_non_null_ptr().as_ptr()); - unsafe { - debug_assert_eq!(Layout::for_value_raw(inner), layout); - - (&raw mut (*inner).strong).write(Cell::new(1)); - (&raw mut (*inner).weak).write(Cell::new(1)); - } - - Ok(inner) - } -} - -impl Rc { - /// Allocates an `RcInner` with sufficient space for an unsized inner value - #[cfg(not(no_global_oom_handling))] - unsafe fn allocate_for_ptr_in(ptr: *const T, alloc: &A) -> *mut RcInner { - // Allocate for the `RcInner` using the given value. - unsafe { - Rc::::allocate_for_layout( - Layout::for_value_raw(ptr), - |layout| alloc.allocate(layout), - |mem| mem.with_metadata_of(ptr as *const RcInner), - ) - } - } + let raw_rc = Self::into_raw_rc(self); + let raw_rc = unsafe { raw_rc.downcast_unchecked() }; - #[cfg(not(no_global_oom_handling))] - fn from_box_in(src: Box) -> Rc { - unsafe { - let value_size = size_of_val(&*src); - let ptr = Self::allocate_for_ptr_in(&*src, Box::allocator(&src)); - - // Copy value as bytes - ptr::copy_nonoverlapping( - (&raw const *src) as *const u8, - (&raw mut (*ptr).value) as *mut u8, - value_size, - ); - - // Free the allocation without dropping its contents - let (bptr, alloc) = Box::into_raw_with_allocator(src); - let src = Box::from_raw_in(bptr as *mut mem::ManuallyDrop, alloc.by_ref()); - drop(src); - - Self::from_ptr_in(ptr, alloc) - } - } -} - -impl Rc<[T]> { - /// Allocates an `RcInner<[T]>` with the given length. - #[cfg(not(no_global_oom_handling))] - unsafe fn allocate_for_slice(len: usize) -> *mut RcInner<[T]> { - unsafe { - Self::allocate_for_layout( - Layout::array::(len).unwrap(), - |layout| Global.allocate(layout), - |mem| ptr::slice_from_raw_parts_mut(mem.cast::(), len) as *mut RcInner<[T]>, - ) - } - } - - /// Copy elements from slice into newly allocated `Rc<[T]>` - /// - /// Unsafe because the caller must either take ownership or bind `T: Copy` - #[cfg(not(no_global_oom_handling))] - unsafe fn copy_from_slice(v: &[T]) -> Rc<[T]> { - unsafe { - let ptr = Self::allocate_for_slice(v.len()); - ptr::copy_nonoverlapping(v.as_ptr(), (&raw mut (*ptr).value) as *mut T, v.len()); - Self::from_ptr(ptr) - } - } - - /// Constructs an `Rc<[T]>` from an iterator known to be of a certain size. - /// - /// Behavior is undefined should the size be wrong. - #[cfg(not(no_global_oom_handling))] - unsafe fn from_iter_exact(iter: impl Iterator, len: usize) -> Rc<[T]> { - // Panic guard while cloning T elements. - // In the event of a panic, elements that have been written - // into the new RcInner will be dropped, then the memory freed. - struct Guard { - mem: NonNull, - elems: *mut T, - layout: Layout, - n_elems: usize, - } - - impl Drop for Guard { - fn drop(&mut self) { - unsafe { - let slice = from_raw_parts_mut(self.elems, self.n_elems); - ptr::drop_in_place(slice); - - Global.deallocate(self.mem, self.layout); - } - } - } - - unsafe { - let ptr = Self::allocate_for_slice(len); - - let mem = ptr as *mut _ as *mut u8; - let layout = Layout::for_value_raw(ptr); - - // Pointer to first element - let elems = (&raw mut (*ptr).value) as *mut T; - - let mut guard = Guard { mem: NonNull::new_unchecked(mem), elems, layout, n_elems: 0 }; - - for (i, item) in iter.enumerate() { - ptr::write(elems.add(i), item); - guard.n_elems += 1; - } - - // All clear. Forget the guard so it doesn't free the new RcInner. - mem::forget(guard); - - Self::from_ptr(ptr) - } - } -} - -impl Rc<[T], A> { - /// Allocates an `RcInner<[T]>` with the given length. - #[inline] - #[cfg(not(no_global_oom_handling))] - unsafe fn allocate_for_slice_in(len: usize, alloc: &A) -> *mut RcInner<[T]> { - unsafe { - Rc::<[T]>::allocate_for_layout( - Layout::array::(len).unwrap(), - |layout| alloc.allocate(layout), - |mem| ptr::slice_from_raw_parts_mut(mem.cast::(), len) as *mut RcInner<[T]>, - ) - } - } -} - -#[cfg(not(no_global_oom_handling))] -/// Specialization trait used for `From<&[T]>`. -trait RcFromSlice { - fn from_slice(slice: &[T]) -> Self; -} - -#[cfg(not(no_global_oom_handling))] -impl RcFromSlice for Rc<[T]> { - #[inline] - default fn from_slice(v: &[T]) -> Self { - unsafe { Self::from_iter_exact(v.iter().cloned(), v.len()) } - } -} - -#[cfg(not(no_global_oom_handling))] -impl RcFromSlice for Rc<[T]> { - #[inline] - fn from_slice(v: &[T]) -> Self { - unsafe { Rc::copy_from_slice(v) } + Rc { raw_rc } } } @@ -2238,7 +1860,7 @@ impl Deref for Rc { #[inline(always)] fn deref(&self) -> &T { - &self.inner().value + self.raw_rc.as_ref() } } @@ -2291,12 +1913,7 @@ unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Rc { /// ``` #[inline] fn drop(&mut self) { - unsafe { - self.inner().dec_strong(); - if self.inner().strong() == 0 { - self.drop_slow(); - } - } + unsafe { self.raw_rc.drop::() }; } } @@ -2318,10 +1935,7 @@ impl Clone for Rc { /// ``` #[inline] fn clone(&self) -> Self { - unsafe { - self.inner().inc_strong(); - Self::from_inner_in(self.ptr, self.alloc.clone()) - } + Self { raw_rc: unsafe { self.raw_rc.clone::() } } } } @@ -2343,15 +1957,7 @@ impl Default for Rc { /// ``` #[inline] fn default() -> Self { - unsafe { - Self::from_inner( - Box::leak(Box::write( - Box::new_uninit(), - RcInner { strong: Cell::new(1), weak: Cell::new(1), value: T::default() }, - )) - .into(), - ) - } + Self { raw_rc: RawRc::default() } } } @@ -2363,9 +1969,7 @@ impl Default for Rc { /// This may or may not share an allocation with other Rcs on the same thread. #[inline] fn default() -> Self { - let rc = Rc::<[u8]>::default(); - // `[u8]` has the same layout as `str`. - unsafe { Rc::from_raw(Rc::into_raw(rc) as *const str) } + Self { raw_rc: RawRc::default() } } } @@ -2377,8 +1981,7 @@ impl Default for Rc<[T]> { /// This may or may not share an allocation with other Rcs on the same thread. #[inline] fn default() -> Self { - let arr: [T; 0] = []; - Rc::from(arr) + Self { raw_rc: RawRc::default() } } } @@ -2395,51 +1998,6 @@ where } } -#[stable(feature = "rust1", since = "1.0.0")] -trait RcEqIdent { - fn eq(&self, other: &Rc) -> bool; - fn ne(&self, other: &Rc) -> bool; -} - -#[stable(feature = "rust1", since = "1.0.0")] -impl RcEqIdent for Rc { - #[inline] - default fn eq(&self, other: &Rc) -> bool { - **self == **other - } - - #[inline] - default fn ne(&self, other: &Rc) -> bool { - **self != **other - } -} - -// Hack to allow specializing on `Eq` even though `Eq` has a method. -#[rustc_unsafe_specialization_marker] -pub(crate) trait MarkerEq: PartialEq {} - -impl MarkerEq for T {} - -/// We're doing this specialization here, and not as a more general optimization on `&T`, because it -/// would otherwise add a cost to all equality checks on refs. We assume that `Rc`s are used to -/// store large values, that are slow to clone, but also heavy to check for equality, causing this -/// cost to pay off more easily. It's also more likely to have two `Rc` clones, that point to -/// the same value, than two `&T`s. -/// -/// We can only do this when `T: Eq` as a `PartialEq` might be deliberately irreflexive. -#[stable(feature = "rust1", since = "1.0.0")] -impl RcEqIdent for Rc { - #[inline] - fn eq(&self, other: &Rc) -> bool { - Rc::ptr_eq(self, other) || **self == **other - } - - #[inline] - fn ne(&self, other: &Rc) -> bool { - !Rc::ptr_eq(self, other) && **self != **other - } -} - #[stable(feature = "rust1", since = "1.0.0")] impl PartialEq for Rc { /// Equality for two `Rc`s. @@ -2462,7 +2020,7 @@ impl PartialEq for Rc { /// ``` #[inline] fn eq(&self, other: &Rc) -> bool { - RcEqIdent::eq(self, other) + RawRc::eq(&self.raw_rc, &other.raw_rc) } /// Inequality for two `Rc`s. @@ -2484,7 +2042,7 @@ impl PartialEq for Rc { /// ``` #[inline] fn ne(&self, other: &Rc) -> bool { - RcEqIdent::ne(self, other) + RawRc::ne(&self.raw_rc, &other.raw_rc) } } @@ -2509,7 +2067,7 @@ impl PartialOrd for Rc { /// ``` #[inline(always)] fn partial_cmp(&self, other: &Rc) -> Option { - (**self).partial_cmp(&**other) + RawRc::partial_cmp(&self.raw_rc, &other.raw_rc) } /// Less-than comparison for two `Rc`s. @@ -2527,7 +2085,7 @@ impl PartialOrd for Rc { /// ``` #[inline(always)] fn lt(&self, other: &Rc) -> bool { - **self < **other + RawRc::lt(&self.raw_rc, &other.raw_rc) } /// 'Less than or equal to' comparison for two `Rc`s. @@ -2545,7 +2103,7 @@ impl PartialOrd for Rc { /// ``` #[inline(always)] fn le(&self, other: &Rc) -> bool { - **self <= **other + RawRc::le(&self.raw_rc, &other.raw_rc) } /// Greater-than comparison for two `Rc`s. @@ -2563,7 +2121,7 @@ impl PartialOrd for Rc { /// ``` #[inline(always)] fn gt(&self, other: &Rc) -> bool { - **self > **other + RawRc::gt(&self.raw_rc, &other.raw_rc) } /// 'Greater than or equal to' comparison for two `Rc`s. @@ -2581,7 +2139,7 @@ impl PartialOrd for Rc { /// ``` #[inline(always)] fn ge(&self, other: &Rc) -> bool { - **self >= **other + RawRc::ge(&self.raw_rc, &other.raw_rc) } } @@ -2603,35 +2161,35 @@ impl Ord for Rc { /// ``` #[inline] fn cmp(&self, other: &Rc) -> Ordering { - (**self).cmp(&**other) + RawRc::cmp(&self.raw_rc, &other.raw_rc) } } #[stable(feature = "rust1", since = "1.0.0")] impl Hash for Rc { fn hash(&self, state: &mut H) { - (**self).hash(state); + RawRc::hash(&self.raw_rc, state) } } #[stable(feature = "rust1", since = "1.0.0")] impl fmt::Display for Rc { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - fmt::Display::fmt(&**self, f) + as fmt::Display>::fmt(&self.raw_rc, f) } } #[stable(feature = "rust1", since = "1.0.0")] impl fmt::Debug for Rc { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - fmt::Debug::fmt(&**self, f) + as fmt::Debug>::fmt(&self.raw_rc, f) } } #[stable(feature = "rust1", since = "1.0.0")] impl fmt::Pointer for Rc { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - fmt::Pointer::fmt(&(&raw const **self), f) + as fmt::Pointer>::fmt(&self.raw_rc, f) } } @@ -2652,7 +2210,7 @@ impl From for Rc { /// assert_eq!(Rc::from(x), rc); /// ``` fn from(t: T) -> Self { - Rc::new(t) + Self { raw_rc: RawRc::from(t) } } } @@ -2673,7 +2231,7 @@ impl From<[T; N]> for Rc<[T]> { /// ``` #[inline] fn from(v: [T; N]) -> Rc<[T]> { - Rc::<[T; N]>::from(v) + Self { raw_rc: RawRc::from(v) } } } @@ -2692,7 +2250,7 @@ impl From<&[T]> for Rc<[T]> { /// ``` #[inline] fn from(v: &[T]) -> Rc<[T]> { - >::from_slice(v) + Self { raw_rc: RawRc::from(v) } } } @@ -2712,7 +2270,7 @@ impl From<&mut [T]> for Rc<[T]> { /// ``` #[inline] fn from(v: &mut [T]) -> Rc<[T]> { - Rc::from(&*v) + Self { raw_rc: RawRc::from(v) } } } @@ -2730,8 +2288,7 @@ impl From<&str> for Rc { /// ``` #[inline] fn from(v: &str) -> Rc { - let rc = Rc::<[u8]>::from(v.as_bytes()); - unsafe { Rc::from_raw(Rc::into_raw(rc) as *const str) } + Self { raw_rc: RawRc::from(v) } } } @@ -2751,7 +2308,7 @@ impl From<&mut str> for Rc { /// ``` #[inline] fn from(v: &mut str) -> Rc { - Rc::from(&*v) + Self { raw_rc: RawRc::from(v) } } } @@ -2770,7 +2327,7 @@ impl From for Rc { /// ``` #[inline] fn from(v: String) -> Rc { - Rc::from(&v[..]) + Self { raw_rc: RawRc::from(v) } } } @@ -2789,7 +2346,7 @@ impl From> for Rc { /// ``` #[inline] fn from(v: Box) -> Rc { - Rc::from_box_in(v) + Self { raw_rc: RawRc::from(v) } } } @@ -2808,18 +2365,7 @@ impl From> for Rc<[T], A> { /// ``` #[inline] fn from(v: Vec) -> Rc<[T], A> { - unsafe { - let (vec_ptr, len, cap, alloc) = v.into_raw_parts_with_alloc(); - - let rc_ptr = Self::allocate_for_slice_in(len, &alloc); - ptr::copy_nonoverlapping(vec_ptr, (&raw mut (*rc_ptr).value) as *mut T, len); - - // Create a `Vec` with length 0, to deallocate the buffer - // without dropping its contents or the allocator - let _ = Vec::from_raw_parts_in(vec_ptr, 0, cap, &alloc); - - Self::from_ptr_in(rc_ptr, alloc) - } + Self { raw_rc: RawRc::from(v) } } } @@ -2864,8 +2410,7 @@ impl From> for Rc<[u8]> { /// ``` #[inline] fn from(rc: Rc) -> Self { - // SAFETY: `str` has the same layout as `[u8]`. - unsafe { Rc::from_raw(Rc::into_raw(rc) as *const [u8]) } + Self { raw_rc: RawRc::from(Rc::into_raw_rc(rc)) } } } @@ -2874,11 +2419,9 @@ impl TryFrom> for Rc<[T; N], A> { type Error = Rc<[T], A>; fn try_from(boxed_slice: Rc<[T], A>) -> Result { - if boxed_slice.len() == N { - let (ptr, alloc) = Rc::into_inner_with_allocator(boxed_slice); - Ok(unsafe { Rc::from_inner_in(ptr.cast(), alloc) }) - } else { - Err(boxed_slice) + match RawRc::try_from(Rc::into_raw_rc(boxed_slice)) { + Ok(raw_rc) => Ok(Self { raw_rc }), + Err(raw_rc) => Err(Rc { raw_rc }), } } } @@ -2925,47 +2468,7 @@ impl FromIterator for Rc<[T]> { /// # assert_eq!(&*evens, &*(0..10).collect::>()); /// ``` fn from_iter>(iter: I) -> Self { - ToRcSlice::to_rc_slice(iter.into_iter()) - } -} - -/// Specialization trait used for collecting into `Rc<[T]>`. -#[cfg(not(no_global_oom_handling))] -trait ToRcSlice: Iterator + Sized { - fn to_rc_slice(self) -> Rc<[T]>; -} - -#[cfg(not(no_global_oom_handling))] -impl> ToRcSlice for I { - default fn to_rc_slice(self) -> Rc<[T]> { - self.collect::>().into() - } -} - -#[cfg(not(no_global_oom_handling))] -impl> ToRcSlice for I { - fn to_rc_slice(self) -> Rc<[T]> { - // This is the case for a `TrustedLen` iterator. - let (low, high) = self.size_hint(); - if let Some(high) = high { - debug_assert_eq!( - low, - high, - "TrustedLen iterator's size hint is not exact: {:?}", - (low, high) - ); - - unsafe { - // SAFETY: We need to ensure that the iterator has an exact length and we have. - Rc::from_iter_exact(self, low) - } - } else { - // TrustedLen contract guarantees that `upper_bound == None` implies an iterator - // length exceeding `usize::MAX`. - // The default implementation would collect into a vec which would panic. - // Thus we panic here immediately without invoking `Vec` code. - panic!("capacity overflow"); - } + Self { raw_rc: RawRc::from_iter(iter) } } } @@ -2993,17 +2496,12 @@ impl> ToRcSlice for I { /// [`upgrade`]: Weak::upgrade #[stable(feature = "rc_weak", since = "1.4.0")] #[rustc_diagnostic_item = "RcWeak"] +#[repr(transparent)] pub struct Weak< T: ?Sized, #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, > { - // This is a `NonNull` to allow optimizing the size of this type in enums, - // but it is not necessarily a valid pointer. - // `Weak::new` sets this to `usize::MAX` so that it doesn’t need - // to allocate space on the heap. That's not a value a real pointer - // will ever have because RcInner has alignment at least 2. - ptr: NonNull>, - alloc: A, + raw_weak: RawWeak, } #[stable(feature = "rc_weak", since = "1.4.0")] @@ -3040,7 +2538,7 @@ impl Weak { #[rustc_const_stable(feature = "const_weak_new", since = "1.73.0")] #[must_use] pub const fn new() -> Weak { - Weak { ptr: NonNull::without_provenance(NonZeroUsize::MAX), alloc: Global } + Self { raw_weak: RawWeak::new_dangling_in(Global) } } } @@ -3062,21 +2560,10 @@ impl Weak { #[inline] #[unstable(feature = "allocator_api", issue = "32838")] pub fn new_in(alloc: A) -> Weak { - Weak { ptr: NonNull::without_provenance(NonZeroUsize::MAX), alloc } + Self { raw_weak: RawWeak::new_dangling_in(alloc) } } } -pub(crate) fn is_dangling(ptr: *const T) -> bool { - (ptr.cast::<()>()).addr() == usize::MAX -} - -/// Helper type to allow accessing the reference counts without -/// making any assertions about the data field. -struct WeakInner<'a> { - weak: &'a Cell, - strong: &'a Cell, -} - impl Weak { /// Converts a raw pointer previously created by [`into_raw`] back into `Weak`. /// @@ -3123,7 +2610,7 @@ impl Weak { #[inline] #[stable(feature = "weak_into_raw", since = "1.45.0")] pub unsafe fn from_raw(ptr: *const T) -> Self { - unsafe { Self::from_raw_in(ptr, Global) } + Self { raw_weak: unsafe { RawWeak::from_raw(NonNull::new_unchecked(ptr.cast_mut())) } } } /// Consumes the `Weak` and turns it into a raw pointer. @@ -3156,16 +2643,30 @@ impl Weak { #[must_use = "losing the pointer will leak memory"] #[stable(feature = "weak_into_raw", since = "1.45.0")] pub fn into_raw(self) -> *const T { - mem::ManuallyDrop::new(self).as_ptr() + self.into_raw_weak().into_raw().as_ptr() } } impl Weak { + #[cfg(not(no_global_oom_handling))] + #[inline] + fn ref_from_raw_weak(raw_weak: &RawWeak) -> &Self { + // SAFETY: This is safe because `Weak` has transparent representation of `RawWeak`. + unsafe { mem::transmute(raw_weak) } + } + + #[inline] + fn into_raw_weak(self) -> RawWeak { + let this = ManuallyDrop::new(self); + + unsafe { ptr::read(&this.raw_weak) } + } + /// Returns a reference to the underlying allocator. #[inline] #[unstable(feature = "allocator_api", issue = "32838")] pub fn allocator(&self) -> &A { - &self.alloc + self.raw_weak.allocator() } /// Returns a raw pointer to the object `T` pointed to by this `Weak`. @@ -3196,18 +2697,7 @@ impl Weak { #[must_use] #[stable(feature = "rc_as_ptr", since = "1.45.0")] pub fn as_ptr(&self) -> *const T { - let ptr: *mut RcInner = NonNull::as_ptr(self.ptr); - - if is_dangling(ptr) { - // If the pointer is dangling, we return the sentinel directly. This cannot be - // a valid payload address, as the payload is at least as aligned as RcInner (usize). - ptr as *const T - } else { - // SAFETY: if is_dangling returns false, then the pointer is dereferenceable. - // The payload may be dropped at this point, and we have to maintain provenance, - // so use raw pointer manipulation. - unsafe { &raw mut (*ptr).value } - } + self.raw_weak.as_ptr().as_ptr() } /// Consumes the `Weak`, returning the wrapped pointer and allocator. @@ -3243,11 +2733,9 @@ impl Weak { #[inline] #[unstable(feature = "allocator_api", issue = "32838")] pub fn into_raw_with_allocator(self) -> (*const T, A) { - let this = mem::ManuallyDrop::new(self); - let result = this.as_ptr(); - // Safety: `this` is ManuallyDrop so the allocator will not be double-dropped - let alloc = unsafe { ptr::read(&this.alloc) }; - (result, alloc) + let (ptr, alloc) = self.into_raw_weak().into_raw_parts(); + + (ptr.as_ptr(), alloc) } /// Converts a raw pointer previously created by [`into_raw`] back into `Weak`. @@ -3295,22 +2783,11 @@ impl Weak { #[inline] #[unstable(feature = "allocator_api", issue = "32838")] pub unsafe fn from_raw_in(ptr: *const T, alloc: A) -> Self { - // See Weak::as_ptr for context on how the input pointer is derived. - - let ptr = if is_dangling(ptr) { - // This is a dangling Weak. - ptr as *mut RcInner - } else { - // Otherwise, we're guaranteed the pointer came from a nondangling Weak. - // SAFETY: data_offset is safe to call, as ptr references a real (potentially dropped) T. - let offset = unsafe { data_offset(ptr) }; - // Thus, we reverse the offset to get the whole RcInner. - // SAFETY: the pointer originated from a Weak, so this offset is safe. - unsafe { ptr.byte_sub(offset) as *mut RcInner } - }; - - // SAFETY: we now have recovered the original Weak pointer, so can create the Weak. - Weak { ptr: unsafe { NonNull::new_unchecked(ptr) }, alloc } + Self { + raw_weak: unsafe { + RawWeak::from_raw_parts(NonNull::new_unchecked(ptr.cast_mut()), alloc) + }, + } } /// Attempts to upgrade the `Weak` pointer to an [`Rc`], delaying @@ -3343,16 +2820,7 @@ impl Weak { where A: Clone, { - let inner = self.inner()?; - - if inner.strong() == 0 { - None - } else { - unsafe { - inner.inc_strong(); - Some(Rc::from_inner_in(self.ptr, self.alloc.clone())) - } - } + unsafe { self.raw_weak.upgrade::() }.map(|raw_rc| Rc { raw_rc }) } /// Gets the number of strong (`Rc`) pointers pointing to this allocation. @@ -3361,7 +2829,7 @@ impl Weak { #[must_use] #[stable(feature = "weak_counts", since = "1.41.0")] pub fn strong_count(&self) -> usize { - if let Some(inner) = self.inner() { inner.strong() } else { 0 } + self.raw_weak.strong_count().map_or(0, |count| unsafe { *count.get() }) } /// Gets the number of `Weak` pointers pointing to this allocation. @@ -3370,32 +2838,7 @@ impl Weak { #[must_use] #[stable(feature = "weak_counts", since = "1.41.0")] pub fn weak_count(&self) -> usize { - if let Some(inner) = self.inner() { - if inner.strong() > 0 { - inner.weak() - 1 // subtract the implicit weak ptr - } else { - 0 - } - } else { - 0 - } - } - - /// Returns `None` when the pointer is dangling and there is no allocated `RcInner`, - /// (i.e., when this `Weak` was created by `Weak::new`). - #[inline] - fn inner(&self) -> Option> { - if is_dangling(self.ptr.as_ptr()) { - None - } else { - // We are careful to *not* create a reference covering the "data" field, as - // the field may be mutated concurrently (for example, if the last `Rc` - // is dropped, the data field will be dropped in-place). - Some(unsafe { - let ptr = self.ptr.as_ptr(); - WeakInner { strong: &(*ptr).strong, weak: &(*ptr).weak } - }) - } + self.raw_weak.weak_count().map_or(0, |count| unsafe { *count.get() } - 1) } /// Returns `true` if the two `Weak`s point to the same allocation similar to [`ptr::eq`], or if @@ -3441,7 +2884,7 @@ impl Weak { #[must_use] #[stable(feature = "weak_ptr_eq", since = "1.39.0")] pub fn ptr_eq(&self, other: &Self) -> bool { - ptr::addr_eq(self.ptr.as_ptr(), other.ptr.as_ptr()) + RawWeak::ptr_eq(&self.raw_weak, &other.raw_weak) } } @@ -3472,16 +2915,7 @@ unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Weak { /// assert!(other_weak_foo.upgrade().is_none()); /// ``` fn drop(&mut self) { - let inner = if let Some(inner) = self.inner() { inner } else { return }; - - inner.dec_weak(); - // the weak count starts at 1, and will only go to zero if all - // the strong pointers have disappeared. - if inner.weak() == 0 { - unsafe { - self.alloc.deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr())); - } - } + unsafe { self.raw_weak.drop::() }; } } @@ -3500,10 +2934,7 @@ impl Clone for Weak { /// ``` #[inline] fn clone(&self) -> Weak { - if let Some(inner) = self.inner() { - inner.inc_weak() - } - Weak { ptr: self.ptr, alloc: self.alloc.clone() } + Self { raw_weak: unsafe { self.raw_weak.clone::() } } } } @@ -3513,7 +2944,7 @@ impl UseCloned for Weak {} #[stable(feature = "rc_weak", since = "1.4.0")] impl fmt::Debug for Weak { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "(Weak)") + as fmt::Debug>::fmt(&self.raw_weak, f) } } @@ -3533,154 +2964,27 @@ impl Default for Weak { /// assert!(empty.upgrade().is_none()); /// ``` fn default() -> Weak { - Weak::new() - } -} - -// NOTE: If you mem::forget Rcs (or Weaks), drop is skipped and the ref-count -// is not decremented, meaning the ref-count can overflow, and then you can -// free the allocation while outstanding Rcs (or Weaks) exist, which would be -// unsound. We abort because this is such a degenerate scenario that we don't -// care about what happens -- no real program should ever experience this. -// -// This should have negligible overhead since you don't actually need to -// clone these much in Rust thanks to ownership and move-semantics. - -#[doc(hidden)] -trait RcInnerPtr { - fn weak_ref(&self) -> &Cell; - fn strong_ref(&self) -> &Cell; - - #[inline] - fn strong(&self) -> usize { - self.strong_ref().get() - } - - #[inline] - fn inc_strong(&self) { - let strong = self.strong(); - - // We insert an `assume` here to hint LLVM at an otherwise - // missed optimization. - // SAFETY: The reference count will never be zero when this is - // called. - unsafe { - hint::assert_unchecked(strong != 0); - } - - let strong = strong.wrapping_add(1); - self.strong_ref().set(strong); - - // We want to abort on overflow instead of dropping the value. - // Checking for overflow after the store instead of before - // allows for slightly better code generation. - if core::intrinsics::unlikely(strong == 0) { - abort(); - } - } - - #[inline] - fn dec_strong(&self) { - self.strong_ref().set(self.strong() - 1); - } - - #[inline] - fn weak(&self) -> usize { - self.weak_ref().get() - } - - #[inline] - fn inc_weak(&self) { - let weak = self.weak(); - - // We insert an `assume` here to hint LLVM at an otherwise - // missed optimization. - // SAFETY: The reference count will never be zero when this is - // called. - unsafe { - hint::assert_unchecked(weak != 0); - } - - let weak = weak.wrapping_add(1); - self.weak_ref().set(weak); - - // We want to abort on overflow instead of dropping the value. - // Checking for overflow after the store instead of before - // allows for slightly better code generation. - if core::intrinsics::unlikely(weak == 0) { - abort(); - } - } - - #[inline] - fn dec_weak(&self) { - self.weak_ref().set(self.weak() - 1); - } -} - -impl RcInnerPtr for RcInner { - #[inline(always)] - fn weak_ref(&self) -> &Cell { - &self.weak - } - - #[inline(always)] - fn strong_ref(&self) -> &Cell { - &self.strong - } -} - -impl<'a> RcInnerPtr for WeakInner<'a> { - #[inline(always)] - fn weak_ref(&self) -> &Cell { - self.weak - } - - #[inline(always)] - fn strong_ref(&self) -> &Cell { - self.strong + Self { raw_weak: RawWeak::default() } } } #[stable(feature = "rust1", since = "1.0.0")] impl borrow::Borrow for Rc { fn borrow(&self) -> &T { - &**self + self.raw_rc.as_ref() } } #[stable(since = "1.5.0", feature = "smart_ptr_as_ref")] impl AsRef for Rc { fn as_ref(&self) -> &T { - &**self + self.raw_rc.as_ref() } } #[stable(feature = "pin", since = "1.33.0")] impl Unpin for Rc {} -/// Gets the offset within an `RcInner` for the payload behind a pointer. -/// -/// # Safety -/// -/// The pointer must point to (and have valid metadata for) a previously -/// valid instance of T, but the T is allowed to be dropped. -unsafe fn data_offset(ptr: *const T) -> usize { - // Align the unsized value to the end of the RcInner. - // Because RcInner is repr(C), it will always be the last field in memory. - // SAFETY: since the only unsized types possible are slices, trait objects, - // and extern types, the input safety requirement is currently enough to - // satisfy the requirements of align_of_val_raw; this is an implementation - // detail of the language that must not be relied upon outside of std. - unsafe { data_offset_align(align_of_val_raw(ptr)) } -} - -#[inline] -fn data_offset_align(align: usize) -> usize { - let layout = Layout::new::>(); - layout.size() + layout.padding_needed_for(align) -} - /// A uniquely owned [`Rc`]. /// /// This represents an `Rc` that is known to be uniquely owned -- that is, have exactly one strong @@ -3718,17 +3022,12 @@ fn data_offset_align(align: usize) -> usize { /// previous example, `UniqueRc` allows for more flexibility in the construction of cyclic data, /// including fallible or async constructors. #[unstable(feature = "unique_rc_arc", issue = "112566")] +#[repr(transparent)] pub struct UniqueRc< T: ?Sized, #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, > { - ptr: NonNull>, - // Define the ownership of `RcInner` for drop-check - _marker: PhantomData>, - // Invariance is necessary for soundness: once other `Weak` - // references exist, we already have a form of shared mutability! - _marker2: PhantomData<*mut T>, - alloc: A, + raw_unique_rc: RawUniqueRc, } // Not necessary for correctness since `UniqueRc` contains `NonNull`, @@ -3756,49 +3055,49 @@ impl, U: ?Sized> DispatchFromDyn> for UniqueRc #[unstable(feature = "unique_rc_arc", issue = "112566")] impl fmt::Display for UniqueRc { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - fmt::Display::fmt(&**self, f) + as fmt::Display>::fmt(&self.raw_unique_rc, f) } } #[unstable(feature = "unique_rc_arc", issue = "112566")] impl fmt::Debug for UniqueRc { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - fmt::Debug::fmt(&**self, f) + as fmt::Debug>::fmt(&self.raw_unique_rc, f) } } #[unstable(feature = "unique_rc_arc", issue = "112566")] impl fmt::Pointer for UniqueRc { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - fmt::Pointer::fmt(&(&raw const **self), f) + as fmt::Pointer>::fmt(&self.raw_unique_rc, f) } } #[unstable(feature = "unique_rc_arc", issue = "112566")] impl borrow::Borrow for UniqueRc { fn borrow(&self) -> &T { - &**self + self.raw_unique_rc.as_ref() } } #[unstable(feature = "unique_rc_arc", issue = "112566")] impl borrow::BorrowMut for UniqueRc { fn borrow_mut(&mut self) -> &mut T { - &mut **self + self.raw_unique_rc.as_mut() } } #[unstable(feature = "unique_rc_arc", issue = "112566")] impl AsRef for UniqueRc { fn as_ref(&self) -> &T { - &**self + self.raw_unique_rc.as_ref() } } #[unstable(feature = "unique_rc_arc", issue = "112566")] impl AsMut for UniqueRc { fn as_mut(&mut self) -> &mut T { - &mut **self + self.raw_unique_rc.as_mut() } } @@ -3823,7 +3122,7 @@ impl PartialEq for UniqueRc { /// ``` #[inline] fn eq(&self, other: &Self) -> bool { - PartialEq::eq(&**self, &**other) + RawUniqueRc::eq(&self.raw_unique_rc, &other.raw_unique_rc) } /// Inequality for two `UniqueRc`s. @@ -3842,7 +3141,7 @@ impl PartialEq for UniqueRc { /// ``` #[inline] fn ne(&self, other: &Self) -> bool { - PartialEq::ne(&**self, &**other) + RawUniqueRc::ne(&self.raw_unique_rc, &other.raw_unique_rc) } } @@ -3865,7 +3164,7 @@ impl PartialOrd for UniqueRc { /// ``` #[inline(always)] fn partial_cmp(&self, other: &UniqueRc) -> Option { - (**self).partial_cmp(&**other) + RawUniqueRc::partial_cmp(&self.raw_unique_rc, &other.raw_unique_rc) } /// Less-than comparison for two `UniqueRc`s. @@ -3884,7 +3183,7 @@ impl PartialOrd for UniqueRc { /// ``` #[inline(always)] fn lt(&self, other: &UniqueRc) -> bool { - **self < **other + RawUniqueRc::lt(&self.raw_unique_rc, &other.raw_unique_rc) } /// 'Less than or equal to' comparison for two `UniqueRc`s. @@ -3903,7 +3202,7 @@ impl PartialOrd for UniqueRc { /// ``` #[inline(always)] fn le(&self, other: &UniqueRc) -> bool { - **self <= **other + RawUniqueRc::le(&self.raw_unique_rc, &other.raw_unique_rc) } /// Greater-than comparison for two `UniqueRc`s. @@ -3922,7 +3221,7 @@ impl PartialOrd for UniqueRc { /// ``` #[inline(always)] fn gt(&self, other: &UniqueRc) -> bool { - **self > **other + RawUniqueRc::gt(&self.raw_unique_rc, &other.raw_unique_rc) } /// 'Greater than or equal to' comparison for two `UniqueRc`s. @@ -3941,7 +3240,7 @@ impl PartialOrd for UniqueRc { /// ``` #[inline(always)] fn ge(&self, other: &UniqueRc) -> bool { - **self >= **other + RawUniqueRc::ge(&self.raw_unique_rc, &other.raw_unique_rc) } } @@ -3964,7 +3263,7 @@ impl Ord for UniqueRc { /// ``` #[inline] fn cmp(&self, other: &UniqueRc) -> Ordering { - (**self).cmp(&**other) + RawUniqueRc::cmp(&self.raw_unique_rc, &other.raw_unique_rc) } } @@ -3974,7 +3273,7 @@ impl Eq for UniqueRc {} #[unstable(feature = "unique_rc_arc", issue = "112566")] impl Hash for UniqueRc { fn hash(&self, state: &mut H) { - (**self).hash(state); + RawUniqueRc::hash(&self.raw_unique_rc, state); } } @@ -3989,7 +3288,7 @@ impl UniqueRc { #[cfg(not(no_global_oom_handling))] #[unstable(feature = "unique_rc_arc", issue = "112566")] pub fn new(value: T) -> Self { - Self::new_in(value, Global) + Self { raw_unique_rc: RawUniqueRc::new(value) } } } @@ -4003,17 +3302,7 @@ impl UniqueRc { #[cfg(not(no_global_oom_handling))] #[unstable(feature = "unique_rc_arc", issue = "112566")] pub fn new_in(value: T, alloc: A) -> Self { - let (ptr, alloc) = Box::into_unique(Box::new_in( - RcInner { - strong: Cell::new(0), - // keep one weak reference so if all the weak pointers that are created are dropped - // the UniqueRc still stays valid. - weak: Cell::new(1), - value, - }, - alloc, - )); - Self { ptr: ptr.into(), _marker: PhantomData, _marker2: PhantomData, alloc } + Self { raw_unique_rc: RawUniqueRc::new_in(value, alloc) } } } @@ -4027,19 +3316,10 @@ impl UniqueRc { /// references. #[unstable(feature = "unique_rc_arc", issue = "112566")] pub fn into_rc(this: Self) -> Rc { - let mut this = ManuallyDrop::new(this); - - // Move the allocator out. - // SAFETY: `this.alloc` will not be accessed again, nor dropped because it is in - // a `ManuallyDrop`. - let alloc: A = unsafe { ptr::read(&this.alloc) }; + let this = ManuallyDrop::new(this); + let raw_rc = unsafe { ptr::read(&this.raw_unique_rc).into_rc::() }; - // SAFETY: This pointer was allocated at creation time so we know it is valid. - unsafe { - // Convert our weak reference into a strong reference - this.ptr.as_mut().strong.set(1); - Rc::from_inner_in(this.ptr, alloc) - } + Rc { raw_rc } } } @@ -4050,12 +3330,9 @@ impl UniqueRc { /// to a [`Rc`] using [`UniqueRc::into_rc`]. #[unstable(feature = "unique_rc_arc", issue = "112566")] pub fn downgrade(this: &Self) -> Weak { - // SAFETY: This pointer was allocated at creation time and we guarantee that we only have - // one strong reference before converting to a regular Rc. - unsafe { - this.ptr.as_ref().inc_weak(); - } - Weak { ptr: this.ptr, alloc: this.alloc.clone() } + let raw_weak = unsafe { this.raw_unique_rc.downgrade::() }; + + Weak { raw_weak } } } @@ -4064,99 +3341,20 @@ impl Deref for UniqueRc { type Target = T; fn deref(&self) -> &T { - // SAFETY: This pointer was allocated at creation time so we know it is valid. - unsafe { &self.ptr.as_ref().value } + self.raw_unique_rc.as_ref() } } #[unstable(feature = "unique_rc_arc", issue = "112566")] impl DerefMut for UniqueRc { fn deref_mut(&mut self) -> &mut T { - // SAFETY: This pointer was allocated at creation time so we know it is valid. We know we - // have unique ownership and therefore it's safe to make a mutable reference because - // `UniqueRc` owns the only strong reference to itself. - unsafe { &mut (*self.ptr.as_ptr()).value } + self.raw_unique_rc.as_mut() } } #[unstable(feature = "unique_rc_arc", issue = "112566")] unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for UniqueRc { fn drop(&mut self) { - unsafe { - // destroy the contained object - drop_in_place(DerefMut::deref_mut(self)); - - // remove the implicit "strong weak" pointer now that we've destroyed the contents. - self.ptr.as_ref().dec_weak(); - - if self.ptr.as_ref().weak() == 0 { - self.alloc.deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr())); - } - } - } -} - -/// A unique owning pointer to a [`RcInner`] **that does not imply the contents are initialized,** -/// but will deallocate it (without dropping the value) when dropped. -/// -/// This is a helper for [`Rc::make_mut()`] to ensure correct cleanup on panic. -/// It is nearly a duplicate of `UniqueRc, A>` except that it allows `T: !Sized`, -/// which `MaybeUninit` does not. -#[cfg(not(no_global_oom_handling))] -struct UniqueRcUninit { - ptr: NonNull>, - layout_for_value: Layout, - alloc: Option, -} - -#[cfg(not(no_global_oom_handling))] -impl UniqueRcUninit { - /// Allocates a RcInner with layout suitable to contain `for_value` or a clone of it. - fn new(for_value: &T, alloc: A) -> UniqueRcUninit { - let layout = Layout::for_value(for_value); - let ptr = unsafe { - Rc::allocate_for_layout( - layout, - |layout_for_rc_inner| alloc.allocate(layout_for_rc_inner), - |mem| mem.with_metadata_of(ptr::from_ref(for_value) as *const RcInner), - ) - }; - Self { ptr: NonNull::new(ptr).unwrap(), layout_for_value: layout, alloc: Some(alloc) } - } - - /// Returns the pointer to be written into to initialize the [`Rc`]. - fn data_ptr(&mut self) -> *mut T { - let offset = data_offset_align(self.layout_for_value.align()); - unsafe { self.ptr.as_ptr().byte_add(offset) as *mut T } - } - - /// Upgrade this into a normal [`Rc`]. - /// - /// # Safety - /// - /// The data must have been initialized (by writing to [`Self::data_ptr()`]). - unsafe fn into_rc(self) -> Rc { - let mut this = ManuallyDrop::new(self); - let ptr = this.ptr; - let alloc = this.alloc.take().unwrap(); - - // SAFETY: The pointer is valid as per `UniqueRcUninit::new`, and the caller is responsible - // for having initialized the data. - unsafe { Rc::from_ptr_in(ptr.as_ptr(), alloc) } - } -} - -#[cfg(not(no_global_oom_handling))] -impl Drop for UniqueRcUninit { - fn drop(&mut self) { - // SAFETY: - // * new() produced a pointer safe to deallocate. - // * We own the pointer unless into_rc() was called, which forgets us. - unsafe { - self.alloc.take().unwrap().deallocate( - self.ptr.cast(), - rc_inner_layout_for_value_layout(self.layout_for_value), - ); - } + unsafe { self.raw_unique_rc.drop::() }; } } diff --git a/library/alloc/src/sync.rs b/library/alloc/src/sync.rs index 5927d03646928..3376c6791cb19 100644 --- a/library/alloc/src/sync.rs +++ b/library/alloc/src/sync.rs @@ -36,12 +36,15 @@ use crate::alloc::handle_alloc_error; use crate::alloc::{AllocError, Allocator, Global, Layout}; use crate::borrow::{Cow, ToOwned}; use crate::boxed::Box; -use crate::rc::is_dangling; #[cfg(not(no_global_oom_handling))] use crate::string::String; #[cfg(not(no_global_oom_handling))] use crate::vec::Vec; +fn is_dangling(ptr: *const T) -> bool { + (ptr.cast::<()>()).addr() == usize::MAX +} + /// A soft limit on the amount of references that may be made to an `Arc`. /// /// Going above this limit will abort your program (although not @@ -3317,6 +3320,12 @@ unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Weak { } } +// Hack to allow specializing on `Eq` even though `Eq` has a method. +#[rustc_unsafe_specialization_marker] +trait MarkerEq: PartialEq {} + +impl MarkerEq for T {} + #[stable(feature = "rust1", since = "1.0.0")] trait ArcEqIdent { fn eq(&self, other: &Arc) -> bool; @@ -3343,7 +3352,7 @@ impl ArcEqIdent for Arc { /// /// We can only do this when `T: Eq` as a `PartialEq` might be deliberately irreflexive. #[stable(feature = "rust1", since = "1.0.0")] -impl ArcEqIdent for Arc { +impl ArcEqIdent for Arc { #[inline] fn eq(&self, other: &Arc) -> bool { Arc::ptr_eq(self, other) || **self == **other diff --git a/src/etc/gdb_providers.py b/src/etc/gdb_providers.py index c8f4a32cb17e2..2d10ca532690b 100644 --- a/src/etc/gdb_providers.py +++ b/src/etc/gdb_providers.py @@ -182,14 +182,36 @@ def display_hint(): return "array" +_REF_COUNTS_PTR_TYPE = None + + +def _get_ref_counts_ptr_type(): + global _REF_COUNTS_PTR_TYPE + + if _REF_COUNTS_PTR_TYPE is None: + _REF_COUNTS_PTR_TYPE = gdb.lookup_type("alloc::raw_rc::RefCounts").pointer() + + return _REF_COUNTS_PTR_TYPE + + class StdRcProvider(printer_base): def __init__(self, valobj, is_atomic=False): self._valobj = valobj self._is_atomic = is_atomic - self._ptr = unwrap_unique_or_non_null(valobj["ptr"]) - self._value = self._ptr["data" if is_atomic else "value"] - self._strong = self._ptr["strong"]["v" if is_atomic else "value"]["value"] - self._weak = self._ptr["weak"]["v" if is_atomic else "value"]["value"] - 1 + + if is_atomic: + self._ptr = unwrap_unique_or_non_null(valobj["ptr"]) + self._value = self._ptr["data"] + self._strong = self._ptr["strong"]["v"]["value"] + self._weak = self._ptr["weak"]["v"]["value"] - 1 + else: + self._ptr = unwrap_unique_or_non_null(valobj["raw_rc"]["weak"]["ptr"]) + self._value = self._ptr.dereference() + + ref_counts_ptr = self._ptr.reinterpret_cast(_get_ref_counts_ptr_type()) - 1 + + self._strong = ref_counts_ptr["strong"]["value"] + self._weak = ref_counts_ptr["weak"]["value"] - 1 def to_string(self): if self._is_atomic: diff --git a/src/etc/lldb_providers.py b/src/etc/lldb_providers.py index 3eb964d2fbab9..66bac43744c64 100644 --- a/src/etc/lldb_providers.py +++ b/src/etc/lldb_providers.py @@ -4,6 +4,7 @@ from typing import List, TYPE_CHECKING from lldb import ( + SBAddress, SBData, SBError, eBasicTypeLong, @@ -1157,6 +1158,18 @@ def StdRcSummaryProvider(valobj: SBValue, _dict: LLDBOpaque) -> str: return "strong={}, weak={}".format(strong, weak) +_REF_COUNTS_TYPE = None + + +def _get_or_init_ref_counts_type(target): + global _REF_COUNTS_TYPE + + if _REF_COUNTS_TYPE is None: + _REF_COUNTS_TYPE = target.FindFirstType("alloc::raw_rc::RefCounts") + + return _REF_COUNTS_TYPE + + class StdRcSyntheticProvider: """Pretty-printer for alloc::rc::Rc and alloc::sync::Arc @@ -1176,20 +1189,50 @@ class StdRcSyntheticProvider: def __init__(self, valobj: SBValue, _dict: LLDBOpaque, is_atomic: bool = False): self.valobj = valobj - self.ptr = unwrap_unique_or_non_null(self.valobj.GetChildMemberWithName("ptr")) + if is_atomic: + self.ptr = unwrap_unique_or_non_null( + self.valobj.GetChildMemberWithName("ptr") + ) - self.value = self.ptr.GetChildMemberWithName("data" if is_atomic else "value") + self.value = self.ptr.GetChildMemberWithName("data") - self.strong = ( - self.ptr.GetChildMemberWithName("strong") - .GetChildAtIndex(0) - .GetChildMemberWithName("value") - ) - self.weak = ( - self.ptr.GetChildMemberWithName("weak") - .GetChildAtIndex(0) - .GetChildMemberWithName("value") - ) + self.strong = ( + self.ptr.GetChildMemberWithName("strong") + .GetChildAtIndex(0) + .GetChildMemberWithName("value") + ) + self.weak = ( + self.ptr.GetChildMemberWithName("weak") + .GetChildAtIndex(0) + .GetChildMemberWithName("value") + ) + else: + ptr = ( + self.valobj.GetChildMemberWithName("raw_rc") + .GetChildMemberWithName("weak") + .GetChildMemberWithName("ptr") + .GetChildMemberWithName("pointer") + ) + + self.value = ptr.deref.Clone("value") + + target = valobj.GetTarget() + ref_counts_type = _get_or_init_ref_counts_type(target) + ref_counts_address = ptr.GetValueAsUnsigned() - ref_counts_type.size + + ref_counts_value = target.CreateValueFromAddress( + "ref_counts", + SBAddress(ref_counts_address, target), + ref_counts_type, + ) + + self.strong = ref_counts_value.GetChildMemberWithName( + "strong" + ).GetChildMemberWithName("value") + + self.weak = ref_counts_value.GetChildMemberWithName( + "weak" + ).GetChildMemberWithName("value") self.value_builder = ValueBuilder(valobj) diff --git a/src/etc/natvis/liballoc.natvis b/src/etc/natvis/liballoc.natvis index 1528a8b1226ca..9e86fd8bd7c85 100644 --- a/src/etc/natvis/liballoc.natvis +++ b/src/etc/natvis/liballoc.natvis @@ -73,59 +73,58 @@ --> - {ptr.pointer->value} + {*raw_rc.weak.ptr.pointer} - ptr.pointer->value - ptr.pointer->strong - ptr.pointer->weak + *raw_rc.weak.ptr.pointer + ((alloc::raw_rc::RefCounts *)raw_rc.weak.ptr.pointer - 1)->strong + ((alloc::raw_rc::RefCounts *)raw_rc.weak.ptr.pointer - 1)->weak - ptr.pointer.pointer->strong - ptr.pointer.pointer->weak + ((alloc::raw_rc::RefCounts *)raw_rc.weak.ptr.pointer.pointer - 1)->strong + ((alloc::raw_rc::RefCounts *)raw_rc.weak.ptr.pointer.pointer - 1)->weak - {{ len={ptr.pointer.length} }} + {{ len={raw_rc.weak.ptr.pointer.length} }} - ptr.pointer.length - ptr.pointer.data_ptr->strong - ptr.pointer.data_ptr->weak + raw_rc.weak.ptr.pointer.length + ((alloc::raw_rc::RefCounts *)raw_rc.weak.ptr.pointer.data_ptr - 1)->strong + ((alloc::raw_rc::RefCounts *)raw_rc.weak.ptr.pointer.data_ptr - 1)->weak - ptr.pointer.length - - ($T1*)(((size_t*)ptr.pointer.data_ptr) + 2) + raw_rc.weak.ptr.pointer.length + ($T1*)raw_rc.weak.ptr.pointer.data_ptr - {ptr.pointer->value} + {*raw_weak.ptr.pointer} - ptr.pointer->value - ptr.pointer->strong - ptr.pointer->weak + *raw_weak.ptr.pointer + ((alloc::raw_rc::RefCounts *)raw_weak.ptr.pointer - 1)->strong + ((alloc::raw_rc::RefCounts *)raw_weak.ptr.pointer - 1)->weak - ptr.pointer.pointer->strong - ptr.pointer.pointer->weak + ((alloc::raw_rc::RefCounts *)raw_weak.ptr.pointer.pointer - 1)->strong + ((alloc::raw_rc::RefCounts *)raw_weak.ptr.pointer.pointer - 1)->weak - {{ len={ptr.pointer.length} }} + {{ len={raw_weak.ptr.pointer.length} }} - ptr.pointer.length - ptr.pointer.data_ptr->strong - ptr.pointer.data_ptr->weak + raw_weak.ptr.pointer.length + ((alloc::raw_rc::RefCounts *)raw_weak.ptr.pointer.data_ptr - 1)->strong + ((alloc::raw_rc::RefCounts *)raw_weak.ptr.pointer.data_ptr - 1)->weak - ptr.pointer.length - ($T1*)(((size_t*)ptr.pointer.data_ptr) + 2) + raw_weak.ptr.pointer.length + ($T1*)raw_weak.ptr.pointer.data_ptr diff --git a/src/tools/miri/tests/fail/memleak_rc.stderr b/src/tools/miri/tests/fail/memleak_rc.stderr index df12eeed6ac64..f5da354de937d 100644 --- a/src/tools/miri/tests/fail/memleak_rc.stderr +++ b/src/tools/miri/tests/fail/memleak_rc.stderr @@ -1,10 +1,15 @@ error: memory leaked: ALLOC (Rust heap, SIZE, ALIGN), allocated here: - --> RUSTLIB/alloc/src/rc.rs:LL:CC + --> RUSTLIB/alloc/src/raw_rc/rc_alloc.rs:LL:CC | -LL | Box::leak(Box::new(RcInner { strong: Cell::new(1), weak: Cell::new(1), value })) - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +LL | let allocation_result = alloc.allocate(rc_layout.get()); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | = note: BACKTRACE: + = note: inside `alloc::raw_rc::rc_alloc::allocate_uninit_raw_bytes::` at RUSTLIB/alloc/src/raw_rc/rc_alloc.rs:LL:CC + = note: inside `alloc::raw_rc::rc_alloc::allocate_uninit_in::` at RUSTLIB/alloc/src/raw_rc/rc_alloc.rs:LL:CC + = note: inside `alloc::raw_rc::rc_alloc::allocate_uninit::` at RUSTLIB/alloc/src/raw_rc/rc_alloc.rs:LL:CC + = note: inside `alloc::raw_rc::raw_weak::RawWeak::>, std::alloc::Global>::new_uninit::` at RUSTLIB/alloc/src/raw_rc/raw_weak.rs:LL:CC + = note: inside `alloc::raw_rc::raw_rc::RawRc::>, std::alloc::Global>::new` at RUSTLIB/alloc/src/raw_rc/raw_rc.rs:LL:CC = note: inside `std::rc::Rc::>>::new` at RUSTLIB/alloc/src/rc.rs:LL:CC note: inside `main` --> tests/fail/memleak_rc.rs:LL:CC diff --git a/tests/codegen-llvm/lib-optimizations/rc-arc-optimizations.rs b/tests/codegen-llvm/lib-optimizations/rc-arc-optimizations.rs new file mode 100644 index 0000000000000..544d27a44d5e0 --- /dev/null +++ b/tests/codegen-llvm/lib-optimizations/rc-arc-optimizations.rs @@ -0,0 +1,27 @@ +//@ compile-flags: -O -Z merge-functions=disabled + +#![crate_type = "lib"] + +use std::rc::{self, Rc}; + +// Ensures that we can create array of `Weak`s using `memset`. + +#[no_mangle] +pub fn array_of_rc_weak() -> [rc::Weak; 100] { + // CHECK-LABEL: @array_of_rc_weak( + // CHECK-NEXT: start: + // CHECK-NEXT: call void @llvm.memset. + // CHECK-NEXT: ret void + [(); 100].map(|()| rc::Weak::new()) +} + +// Ensures that we convert `&Option>` to `Option<&T>` without checking for `None`. + +#[no_mangle] +pub fn option_rc_as_deref_no_cmp(rc: &Option>) -> Option<&u32> { + // CHECK-LABEL: @option_rc_as_deref_no_cmp(ptr + // CHECK-NEXT: start: + // CHECK-NEXT: %[[RC:.+]] = load ptr, ptr %rc + // CHECK-NEXT: ret ptr %[[RC]] + rc.as_deref() +} diff --git a/tests/codegen-llvm/placement-new.rs b/tests/codegen-llvm/placement-new.rs index 7f7f0033bece3..5022af8f52f19 100644 --- a/tests/codegen-llvm/placement-new.rs +++ b/tests/codegen-llvm/placement-new.rs @@ -22,9 +22,11 @@ pub fn box_default_inplace() -> Box<(String, String)> { #[no_mangle] pub fn rc_default_inplace() -> Rc<(String, String)> { // CHECK-NOT: alloca - // CHECK: [[RC:%.*]] = {{.*}}call {{.*}}__rust_alloc( + // CHECK: [[RC:%.*]] = {{.*}}call {{.*}}__rust_alloc(i[[#BITS:]] // CHECK-NOT: call void @llvm.memcpy - // CHECK: ret ptr [[RC]] + // CHECK: [[DATA:%.*]] = getelementptr inbounds{{( nuw)?}} i8, ptr [[RC]], i[[#BITS]] [[#div(BITS,4)]] + // CHECK-NOT: call void @llvm.memcpy + // CHECK: ret ptr [[DATA]] Rc::default() } diff --git a/tests/debuginfo/rc_arc.rs b/tests/debuginfo/rc_arc.rs index f636c60702cde..6da8338f0ebba 100644 --- a/tests/debuginfo/rc_arc.rs +++ b/tests/debuginfo/rc_arc.rs @@ -27,13 +27,13 @@ // cdb-command:dx rc,d // cdb-check:rc,d : 111 [Type: alloc::rc::Rc] -// cdb-check: [Reference count] : 11 [Type: core::cell::Cell] -// cdb-check: [Weak reference count] : 2 [Type: core::cell::Cell] +// cdb-check: [Reference count] : 11 [Type: core::cell::UnsafeCell] +// cdb-check: [Weak reference count] : 2 [Type: core::cell::UnsafeCell] // cdb-command:dx weak_rc,d // cdb-check:weak_rc,d : 111 [Type: alloc::rc::Weak] -// cdb-check: [Reference count] : 11 [Type: core::cell::Cell] -// cdb-check: [Weak reference count] : 2 [Type: core::cell::Cell] +// cdb-check: [Reference count] : 11 [Type: core::cell::UnsafeCell] +// cdb-check: [Weak reference count] : 2 [Type: core::cell::UnsafeCell] // cdb-command:dx arc,d // cdb-check:arc,d : 222 [Type: alloc::sync::Arc] @@ -47,19 +47,19 @@ // cdb-command:dx dyn_rc,d // cdb-check:dyn_rc,d [Type: alloc::rc::Rc,alloc::alloc::Global>] -// cdb-check: [Reference count] : 31 [Type: core::cell::Cell] -// cdb-check: [Weak reference count] : 2 [Type: core::cell::Cell] +// cdb-check: [Reference count] : 31 [Type: core::cell::UnsafeCell] +// cdb-check: [Weak reference count] : 2 [Type: core::cell::UnsafeCell] // cdb-command:dx dyn_rc_weak,d // cdb-check:dyn_rc_weak,d [Type: alloc::rc::Weak,alloc::alloc::Global>] -// cdb-check: [Reference count] : 31 [Type: core::cell::Cell] -// cdb-check: [Weak reference count] : 2 [Type: core::cell::Cell] +// cdb-check: [Reference count] : 31 [Type: core::cell::UnsafeCell] +// cdb-check: [Weak reference count] : 2 [Type: core::cell::UnsafeCell] // cdb-command:dx slice_rc,d // cdb-check:slice_rc,d : { len=3 } [Type: alloc::rc::Rc,alloc::alloc::Global>] // cdb-check: [Length] : 3 [Type: [...]] -// cdb-check: [Reference count] : 41 [Type: core::cell::Cell] -// cdb-check: [Weak reference count] : 2 [Type: core::cell::Cell] +// cdb-check: [Reference count] : 41 [Type: core::cell::UnsafeCell] +// cdb-check: [Weak reference count] : 2 [Type: core::cell::UnsafeCell] // cdb-check: [0] : 1 [Type: u32] // cdb-check: [1] : 2 [Type: u32] // cdb-check: [2] : 3 [Type: u32] @@ -67,8 +67,8 @@ // cdb-command:dx slice_rc_weak,d // cdb-check:slice_rc_weak,d : { len=3 } [Type: alloc::rc::Weak,alloc::alloc::Global>] // cdb-check: [Length] : 3 [Type: [...]] -// cdb-check: [Reference count] : 41 [Type: core::cell::Cell] -// cdb-check: [Weak reference count] : 2 [Type: core::cell::Cell] +// cdb-check: [Reference count] : 41 [Type: core::cell::UnsafeCell] +// cdb-check: [Weak reference count] : 2 [Type: core::cell::UnsafeCell] // cdb-check: [0] : 1 [Type: u32] // cdb-check: [1] : 2 [Type: u32] // cdb-check: [2] : 3 [Type: u32] diff --git a/tests/debuginfo/strings-and-strs.rs b/tests/debuginfo/strings-and-strs.rs index 392cf697e110b..bbb062ee948b4 100644 --- a/tests/debuginfo/strings-and-strs.rs +++ b/tests/debuginfo/strings-and-strs.rs @@ -20,7 +20,7 @@ // gdb-check:$4 = ("Hello", "World") // gdb-command:print str_in_rc -// gdb-check:$5 = alloc::rc::Rc<&str, alloc::alloc::Global> {ptr: core::ptr::non_null::NonNull> {pointer: 0x[...]}, phantom: core::marker::PhantomData>, alloc: alloc::alloc::Global} +// gdb-check:$5 = alloc::rc::Rc<&str, alloc::alloc::Global> {raw_rc: alloc::raw_rc::raw_rc::RawRc<&str, alloc::alloc::Global> {weak: alloc::raw_rc::raw_weak::RawWeak<&str, alloc::alloc::Global> {ptr: core::ptr::non_null::NonNull<&str> {pointer: 0x[...]}, alloc: alloc::alloc::Global}, _phantom_data: core::marker::PhantomData<&str>}} // === LLDB TESTS ================================================================================== // lldb-command:run @@ -39,7 +39,6 @@ // lldb-command:v str_in_rc // lldb-check:(alloc::rc::Rc<&str, alloc::alloc::Global>) str_in_rc = strong=1, weak=0 { value = "Hello" { [0] = 'H' [1] = 'e' [2] = 'l' [3] = 'l' [4] = 'o' } } - #![allow(unused_variables)] pub struct Foo<'a> {