Skip to content

Commit b2a7431

Browse files
committed
util: Sync Arc with upstream
1 parent 285498a commit b2a7431

File tree

2 files changed

+143
-104
lines changed

2 files changed

+143
-104
lines changed

portable-atomic-util/src/arc.rs

Lines changed: 104 additions & 104 deletions
Original file line numberDiff line numberDiff line change
@@ -4,20 +4,16 @@
44
//
55
// The code has been adjusted to work with stable Rust (and optionally support some unstable features).
66
//
7-
// Source: https://github.com/rust-lang/rust/blob/1.84.0/library/alloc/src/sync.rs.
7+
// Source: https://github.com/rust-lang/rust/blob/1.93.0/library/alloc/src/sync.rs
88
//
99
// Copyright & License of the original code:
10-
// - https://github.com/rust-lang/rust/blob/1.84.0/COPYRIGHT
11-
// - https://github.com/rust-lang/rust/blob/1.84.0/LICENSE-APACHE
12-
// - https://github.com/rust-lang/rust/blob/1.84.0/LICENSE-MIT
10+
// - https://github.com/rust-lang/rust/blob/1.93.0/COPYRIGHT
11+
// - https://github.com/rust-lang/rust/blob/1.93.0/LICENSE-APACHE
12+
// - https://github.com/rust-lang/rust/blob/1.93.0/LICENSE-MIT
1313

1414
#![allow(clippy::must_use_candidate)] // align to alloc::sync::Arc
1515
#![allow(clippy::undocumented_unsafe_blocks)] // TODO: most of the unsafe codes were inherited from alloc::sync::Arc
1616

17-
// TODO:
18-
// - https://github.com/rust-lang/rust/pull/132231
19-
// - https://github.com/rust-lang/rust/pull/131460 / https://github.com/rust-lang/rust/pull/132031
20-
2117
use alloc::{
2218
alloc::handle_alloc_error,
2319
borrow::{Cow, ToOwned},
@@ -30,7 +26,9 @@ use core::convert::TryFrom;
3026
use core::{
3127
alloc::Layout,
3228
any::Any,
33-
borrow, cmp, fmt,
29+
borrow,
30+
cmp::Ordering,
31+
fmt,
3432
hash::{Hash, Hasher},
3533
isize,
3634
marker::PhantomData,
@@ -207,7 +205,9 @@ impl<T: ?Sized> fmt::Debug for Weak<T> {
207205
// This is repr(C) to future-proof against possible field-reordering, which
208206
// would interfere with otherwise safe [into|from]_raw() of transmutable
209207
// inner types.
210-
#[repr(C)]
208+
// Unlike RcInner, repr(align(2)) is not strictly required because atomic types
209+
// have the alignment same as its size, but we use it for consistency and clarity.
210+
#[repr(C, align(2))]
211211
struct ArcInner<T: ?Sized> {
212212
strong: atomic::AtomicUsize,
213213

@@ -702,6 +702,23 @@ impl<T> Arc<mem::MaybeUninit<T>> {
702702
}
703703
}
704704

705+
impl<T: ?Sized + CloneToUninit> Arc<T> {
706+
fn clone_from_ref(value: &T) -> Self {
707+
// `in_progress` drops the allocation if we panic before finishing initializing it.
708+
let mut in_progress: UniqueArcUninit<T> = UniqueArcUninit::new(value);
709+
710+
// Initialize with clone of value.
711+
let initialized_clone = unsafe {
712+
// Clone. If the clone panics, `in_progress` will be dropped and clean up.
713+
value.clone_to_uninit(in_progress.data_ptr() as *mut u8);
714+
// Cast type of pointer, now that it is initialized.
715+
in_progress.into_arc()
716+
};
717+
718+
initialized_clone
719+
}
720+
}
721+
705722
#[cfg(not(portable_atomic_no_maybe_uninit))]
706723
impl<T> Arc<[mem::MaybeUninit<T>]> {
707724
/// Converts to `Arc<[T]>`.
@@ -818,6 +835,28 @@ impl<T: ?Sized> Arc<T> {
818835
}
819836
}
820837

838+
/// Consumes the `Arc`, returning the wrapped pointer.
839+
///
840+
/// To avoid a memory leak the pointer must be converted back to an `Arc` using
841+
/// [`Arc::from_raw`].
842+
///
843+
/// # Examples
844+
///
845+
/// ```
846+
/// use portable_atomic_util::Arc;
847+
///
848+
/// let x = Arc::new("hello".to_owned());
849+
/// let x_ptr = Arc::into_raw(x);
850+
/// assert_eq!(unsafe { &*x_ptr }, "hello");
851+
/// # // Prevent leaks for Miri.
852+
/// # drop(unsafe { Arc::from_raw(x_ptr) });
853+
/// ```
854+
#[must_use = "losing the pointer will leak memory"]
855+
pub fn into_raw(this: Self) -> *const T {
856+
let this = ManuallyDrop::new(this);
857+
Self::as_ptr(&*this)
858+
}
859+
821860
/// Increments the strong reference count on the `Arc<T>` associated with the
822861
/// provided pointer by one.
823862
///
@@ -893,30 +932,6 @@ impl<T: ?Sized> Arc<T> {
893932
// SAFETY: the caller must uphold the safety contract.
894933
unsafe { drop(Self::from_raw(ptr)) }
895934
}
896-
}
897-
898-
impl<T: ?Sized> Arc<T> {
899-
/// Consumes the `Arc`, returning the wrapped pointer.
900-
///
901-
/// To avoid a memory leak the pointer must be converted back to an `Arc` using
902-
/// [`Arc::from_raw`].
903-
///
904-
/// # Examples
905-
///
906-
/// ```
907-
/// use portable_atomic_util::Arc;
908-
///
909-
/// let x = Arc::new("hello".to_owned());
910-
/// let x_ptr = Arc::into_raw(x);
911-
/// assert_eq!(unsafe { &*x_ptr }, "hello");
912-
/// # // Prevent leaks for Miri.
913-
/// # drop(unsafe { Arc::from_raw(x_ptr) });
914-
/// ```
915-
#[must_use = "losing the pointer will leak memory"]
916-
pub fn into_raw(this: Self) -> *const T {
917-
let this = ManuallyDrop::new(this);
918-
Self::as_ptr(&*this)
919-
}
920935

921936
/// Provides a raw pointer to the data.
922937
///
@@ -936,7 +951,7 @@ impl<T: ?Sized> Arc<T> {
936951
/// ```
937952
#[must_use]
938953
pub fn as_ptr(this: &Self) -> *const T {
939-
let ptr: *mut ArcInner<T> = this.ptr.as_ptr();
954+
let ptr: *mut ArcInner<T> = NonNull::as_ptr(this.ptr);
940955

941956
// SAFETY: This cannot go through Deref::deref or ArcInnerPtr::inner because
942957
// this is required to retain raw/mut provenance such that e.g. `get_mut` can
@@ -1060,15 +1075,17 @@ impl<T: ?Sized> Arc<T> {
10601075
// Non-inlined part of `drop`.
10611076
#[inline(never)]
10621077
unsafe fn drop_slow(&mut self) {
1078+
// Drop the weak ref collectively held by all strong references when this
1079+
// variable goes out of scope. This ensures that the memory is deallocated
1080+
// even if the destructor of `T` panics.
1081+
// Take a reference to `self.alloc` instead of cloning because 1. it'll last long
1082+
// enough, and 2. you should be able to drop `Arc`s with unclonable allocators
1083+
let _weak = Weak { ptr: self.ptr };
1084+
10631085
// Destroy the data at this time, even though we must not free the box
10641086
// allocation itself (there might still be weak pointers lying around).
1065-
unsafe { ptr::drop_in_place(Self::get_mut_unchecked(self)) }
1066-
1067-
// Drop the weak ref collectively held by all strong references
1068-
// Take a reference to `self.alloc` instead of cloning because 1. it'll
1069-
// last long enough, and 2. you should be able to drop `Arc`s with
1070-
// unclonable allocators
1071-
drop(Weak { ptr: self.ptr });
1087+
// We cannot use `get_mut_unchecked` here, because `self.alloc` is borrowed.
1088+
unsafe { ptr::drop_in_place(&mut (*self.ptr.as_ptr()).data) };
10721089
}
10731090

10741091
/// Returns `true` if the two `Arc`s point to the same allocation in a vein similar to
@@ -1378,18 +1395,7 @@ impl<T: ?Sized + CloneToUninit> Arc<T> {
13781395
// deallocated.
13791396
if this.inner().strong.compare_exchange(1, 0, Acquire, Relaxed).is_err() {
13801397
// Another strong pointer exists, so we must clone.
1381-
1382-
let this_data_ref: &T = this;
1383-
// `in_progress` drops the allocation if we panic before finishing initializing it.
1384-
let mut in_progress: UniqueArcUninit<T> = UniqueArcUninit::new(this_data_ref);
1385-
1386-
let initialized_clone = unsafe {
1387-
// Clone. If the clone panics, `in_progress` will be dropped and clean up.
1388-
this_data_ref.clone_to_uninit(in_progress.data_ptr() as *mut u8);
1389-
// Cast type of pointer, now that it is initialized.
1390-
in_progress.into_arc()
1391-
};
1392-
*this = initialized_clone;
1398+
*this = Arc::clone_from_ref(&**this);
13931399
} else if this.inner().weak.load(Relaxed) != 1 {
13941400
// Relaxed suffices in the above because this is fundamentally an
13951401
// optimization: we are always racing with weak pointers being
@@ -1501,7 +1507,7 @@ impl<T: ?Sized> Arc<T> {
15011507
/// ```
15021508
#[inline]
15031509
pub fn get_mut(this: &mut Self) -> Option<&mut T> {
1504-
if this.is_unique() {
1510+
if Self::is_unique(this) {
15051511
// This unsafety is ok because we're guaranteed that the pointer
15061512
// returned is the *only* pointer that will ever be returned to T. Our
15071513
// reference count is guaranteed to be 1 at this point, and we required
@@ -1520,28 +1526,25 @@ impl<T: ?Sized> Arc<T> {
15201526
unsafe { &mut (*this.ptr.as_ptr()).data }
15211527
}
15221528

1523-
/// Determine whether this is the unique reference (including weak refs) to
1524-
/// the underlying data.
1525-
///
1526-
/// Note that this requires locking the weak ref count.
1527-
fn is_unique(&mut self) -> bool {
1529+
#[inline]
1530+
fn is_unique(this: &Self) -> bool {
15281531
// lock the weak pointer count if we appear to be the sole weak pointer
15291532
// holder.
15301533
//
15311534
// The acquire label here ensures a happens-before relationship with any
15321535
// writes to `strong` (in particular in `Weak::upgrade`) prior to decrements
15331536
// of the `weak` count (via `Weak::drop`, which uses release). If the upgraded
15341537
// weak ref was never dropped, the CAS here will fail so we do not care to synchronize.
1535-
if self.inner().weak.compare_exchange(1, usize::MAX, Acquire, Relaxed).is_ok() {
1538+
if this.inner().weak.compare_exchange(1, usize::MAX, Acquire, Relaxed).is_ok() {
15361539
// This needs to be an `Acquire` to synchronize with the decrement of the `strong`
15371540
// counter in `drop` -- the only access that happens when any but the last reference
15381541
// is being dropped.
1539-
let unique = self.inner().strong.load(Acquire) == 1;
1542+
let unique = this.inner().strong.load(Acquire) == 1;
15401543

15411544
// The release write here synchronizes with a read in `downgrade`,
15421545
// effectively preventing the above read of `strong` from happening
15431546
// after the write.
1544-
self.inner().weak.store(1, Release); // release the lock
1547+
this.inner().weak.store(1, Release); // release the lock
15451548
unique
15461549
} else {
15471550
false
@@ -1769,7 +1772,39 @@ impl<T /*: ?Sized */> Weak<T> {
17691772
};
17701773

17711774
// SAFETY: we now have recovered the original Weak pointer, so can create the Weak.
1772-
Weak { ptr: unsafe { NonNull::new_unchecked(ptr) } }
1775+
Self { ptr: unsafe { NonNull::new_unchecked(ptr) } }
1776+
}
1777+
1778+
/// Consumes the `Weak<T>` and turns it into a raw pointer.
1779+
///
1780+
/// This converts the weak pointer into a raw pointer, while still preserving the ownership of
1781+
/// one weak reference (the weak count is not modified by this operation). It can be turned
1782+
/// back into the `Weak<T>` with [`from_raw`].
1783+
///
1784+
/// The same restrictions of accessing the target of the pointer as with
1785+
/// [`as_ptr`] apply.
1786+
///
1787+
/// # Examples
1788+
///
1789+
/// ```
1790+
/// use portable_atomic_util::{Arc, Weak};
1791+
///
1792+
/// let strong = Arc::new("hello".to_owned());
1793+
/// let weak = Arc::downgrade(&strong);
1794+
/// let raw = weak.into_raw();
1795+
///
1796+
/// assert_eq!(1, Arc::weak_count(&strong));
1797+
/// assert_eq!("hello", unsafe { &*raw });
1798+
///
1799+
/// drop(unsafe { Weak::from_raw(raw) });
1800+
/// assert_eq!(0, Arc::weak_count(&strong));
1801+
/// ```
1802+
///
1803+
/// [`from_raw`]: Weak::from_raw
1804+
/// [`as_ptr`]: Weak::as_ptr
1805+
#[must_use = "losing the pointer will leak memory"]
1806+
pub fn into_raw(self) -> *const T {
1807+
ManuallyDrop::new(self).as_ptr()
17731808
}
17741809
}
17751810

@@ -1803,7 +1838,7 @@ impl<T /*: ?Sized */> Weak<T> {
18031838
/// [`null`]: core::ptr::null "ptr::null"
18041839
#[must_use]
18051840
pub fn as_ptr(&self) -> *const T {
1806-
let ptr: *mut ArcInner<T> = self.ptr.as_ptr();
1841+
let ptr: *mut ArcInner<T> = NonNull::as_ptr(self.ptr);
18071842

18081843
if is_dangling(ptr) {
18091844
// If the pointer is dangling, we return the sentinel directly. This cannot be
@@ -1821,38 +1856,6 @@ impl<T /*: ?Sized */> Weak<T> {
18211856
}
18221857
}
18231858
}
1824-
1825-
/// Consumes the `Weak<T>` and turns it into a raw pointer.
1826-
///
1827-
/// This converts the weak pointer into a raw pointer, while still preserving the ownership of
1828-
/// one weak reference (the weak count is not modified by this operation). It can be turned
1829-
/// back into the `Weak<T>` with [`from_raw`].
1830-
///
1831-
/// The same restrictions of accessing the target of the pointer as with
1832-
/// [`as_ptr`] apply.
1833-
///
1834-
/// # Examples
1835-
///
1836-
/// ```
1837-
/// use portable_atomic_util::{Arc, Weak};
1838-
///
1839-
/// let strong = Arc::new("hello".to_owned());
1840-
/// let weak = Arc::downgrade(&strong);
1841-
/// let raw = weak.into_raw();
1842-
///
1843-
/// assert_eq!(1, Arc::weak_count(&strong));
1844-
/// assert_eq!("hello", unsafe { &*raw });
1845-
///
1846-
/// drop(unsafe { Weak::from_raw(raw) });
1847-
/// assert_eq!(0, Arc::weak_count(&strong));
1848-
/// ```
1849-
///
1850-
/// [`from_raw`]: Weak::from_raw
1851-
/// [`as_ptr`]: Weak::as_ptr
1852-
#[must_use = "losing the pointer will leak memory"]
1853-
pub fn into_raw(self) -> *const T {
1854-
ManuallyDrop::new(self).as_ptr()
1855-
}
18561859
}
18571860

18581861
impl<T: ?Sized> Weak<T> {
@@ -2211,7 +2214,7 @@ impl<T: ?Sized + PartialOrd> PartialOrd for Arc<T> {
22112214
///
22122215
/// assert_eq!(Some(Ordering::Less), five.partial_cmp(&Arc::new(6)));
22132216
/// ```
2214-
fn partial_cmp(&self, other: &Self) -> Option<cmp::Ordering> {
2217+
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
22152218
(**self).partial_cmp(&**other)
22162219
}
22172220

@@ -2299,7 +2302,7 @@ impl<T: ?Sized + Ord> Ord for Arc<T> {
22992302
///
23002303
/// assert_eq!(Ordering::Less, five.cmp(&Arc::new(6)));
23012304
/// ```
2302-
fn cmp(&self, other: &Self) -> cmp::Ordering {
2305+
fn cmp(&self, other: &Self) -> Ordering {
23032306
(**self).cmp(&**other)
23042307
}
23052308
}
@@ -2335,6 +2338,7 @@ impl<T: Default> Default for Arc<T> {
23352338
/// assert_eq!(*x, 0);
23362339
/// ```
23372340
fn default() -> Self {
2341+
// TODO: https://github.com/rust-lang/rust/pull/131460 / https://github.com/rust-lang/rust/pull/132031
23382342
Self::new(T::default())
23392343
}
23402344
}
@@ -2360,7 +2364,7 @@ impl<T> Default for Arc<[T]> {
23602364
/// This may or may not share an allocation with other Arcs.
23612365
#[inline]
23622366
fn default() -> Self {
2363-
// TODO: we cannot use non-allocation optimization (https://github.com/rust-lang/rust/blob/1.84.0/library/alloc/src/sync.rs#L3532)
2367+
// TODO: we cannot use non-allocation optimization (https://github.com/rust-lang/rust/blob/1.93.0/library/alloc/src/sync.rs#L3807)
23642368
// for now since casting Arc<[T; N]> -> Arc<[T]> requires unstable CoerceUnsized.
23652369
let arr: [T; 0] = [];
23662370
Arc::from(arr)
@@ -2794,10 +2798,6 @@ use core::error;
27942798
use std::error;
27952799
#[cfg(any(not(portable_atomic_no_error_in_core), feature = "std"))]
27962800
impl<T: ?Sized + error::Error> error::Error for Arc<T> {
2797-
#[allow(deprecated)]
2798-
fn description(&self) -> &str {
2799-
error::Error::description(&**self)
2800-
}
28012801
#[allow(deprecated)]
28022802
fn cause(&self) -> Option<&dyn error::Error> {
28032803
error::Error::cause(&**self)

0 commit comments

Comments
 (0)