Skip to content

Commit e481f46

Browse files
committed
util: Sync Arc with upstream
1 parent e4e0588 commit e481f46

File tree

2 files changed

+141
-104
lines changed

2 files changed

+141
-104
lines changed

portable-atomic-util/src/arc.rs

Lines changed: 102 additions & 104 deletions
Original file line numberDiff line numberDiff line change
@@ -4,20 +4,16 @@
44
//
55
// The code has been adjusted to work with stable Rust (and optionally support some unstable features).
66
//
7-
// Source: https://github.com/rust-lang/rust/blob/1.84.0/library/alloc/src/sync.rs.
7+
// Source: https://github.com/rust-lang/rust/blob/1.93.0/library/alloc/src/sync.rs
88
//
99
// Copyright & License of the original code:
10-
// - https://github.com/rust-lang/rust/blob/1.84.0/COPYRIGHT
11-
// - https://github.com/rust-lang/rust/blob/1.84.0/LICENSE-APACHE
12-
// - https://github.com/rust-lang/rust/blob/1.84.0/LICENSE-MIT
10+
// - https://github.com/rust-lang/rust/blob/1.93.0/COPYRIGHT
11+
// - https://github.com/rust-lang/rust/blob/1.93.0/LICENSE-APACHE
12+
// - https://github.com/rust-lang/rust/blob/1.93.0/LICENSE-MIT
1313

1414
#![allow(clippy::must_use_candidate)] // align to alloc::sync::Arc
1515
#![allow(clippy::undocumented_unsafe_blocks)] // TODO: most of the unsafe codes were inherited from alloc::sync::Arc
1616

17-
// TODO:
18-
// - https://github.com/rust-lang/rust/pull/132231
19-
// - https://github.com/rust-lang/rust/pull/131460 / https://github.com/rust-lang/rust/pull/132031
20-
2117
use alloc::{
2218
alloc::handle_alloc_error,
2319
borrow::{Cow, ToOwned},
@@ -30,7 +26,9 @@ use core::convert::TryFrom;
3026
use core::{
3127
alloc::Layout,
3228
any::Any,
33-
borrow, cmp, fmt,
29+
borrow,
30+
cmp::Ordering,
31+
fmt,
3432
hash::{Hash, Hasher},
3533
isize,
3634
marker::PhantomData,
@@ -207,7 +205,9 @@ impl<T: ?Sized> fmt::Debug for Weak<T> {
207205
// This is repr(C) to future-proof against possible field-reordering, which
208206
// would interfere with otherwise safe [into|from]_raw() of transmutable
209207
// inner types.
210-
#[repr(C)]
208+
// Unlike RcInner, repr(align(2)) is not strictly required because atomic types
209+
// have the alignment same as its size, but we use it for consistency and clarity.
210+
#[repr(C, align(2))]
211211
struct ArcInner<T: ?Sized> {
212212
strong: atomic::AtomicUsize,
213213

@@ -702,6 +702,21 @@ impl<T> Arc<mem::MaybeUninit<T>> {
702702
}
703703
}
704704

705+
impl<T: ?Sized + CloneToUninit> Arc<T> {
706+
fn clone_from_ref(value: &T) -> Self {
707+
// `in_progress` drops the allocation if we panic before finishing initializing it.
708+
let mut in_progress: UniqueArcUninit<T> = UniqueArcUninit::new(value);
709+
710+
// Initialize with clone of value.
711+
unsafe {
712+
// Clone. If the clone panics, `in_progress` will be dropped and clean up.
713+
value.clone_to_uninit(in_progress.data_ptr() as *mut u8);
714+
// Cast type of pointer, now that it is initialized.
715+
in_progress.into_arc()
716+
}
717+
}
718+
}
719+
705720
#[cfg(not(portable_atomic_no_maybe_uninit))]
706721
impl<T> Arc<[mem::MaybeUninit<T>]> {
707722
/// Converts to `Arc<[T]>`.
@@ -818,6 +833,28 @@ impl<T: ?Sized> Arc<T> {
818833
}
819834
}
820835

836+
/// Consumes the `Arc`, returning the wrapped pointer.
837+
///
838+
/// To avoid a memory leak the pointer must be converted back to an `Arc` using
839+
/// [`Arc::from_raw`].
840+
///
841+
/// # Examples
842+
///
843+
/// ```
844+
/// use portable_atomic_util::Arc;
845+
///
846+
/// let x = Arc::new("hello".to_owned());
847+
/// let x_ptr = Arc::into_raw(x);
848+
/// assert_eq!(unsafe { &*x_ptr }, "hello");
849+
/// # // Prevent leaks for Miri.
850+
/// # drop(unsafe { Arc::from_raw(x_ptr) });
851+
/// ```
852+
#[must_use = "losing the pointer will leak memory"]
853+
pub fn into_raw(this: Self) -> *const T {
854+
let this = ManuallyDrop::new(this);
855+
Self::as_ptr(&*this)
856+
}
857+
821858
/// Increments the strong reference count on the `Arc<T>` associated with the
822859
/// provided pointer by one.
823860
///
@@ -893,30 +930,6 @@ impl<T: ?Sized> Arc<T> {
893930
// SAFETY: the caller must uphold the safety contract.
894931
unsafe { drop(Self::from_raw(ptr)) }
895932
}
896-
}
897-
898-
impl<T: ?Sized> Arc<T> {
899-
/// Consumes the `Arc`, returning the wrapped pointer.
900-
///
901-
/// To avoid a memory leak the pointer must be converted back to an `Arc` using
902-
/// [`Arc::from_raw`].
903-
///
904-
/// # Examples
905-
///
906-
/// ```
907-
/// use portable_atomic_util::Arc;
908-
///
909-
/// let x = Arc::new("hello".to_owned());
910-
/// let x_ptr = Arc::into_raw(x);
911-
/// assert_eq!(unsafe { &*x_ptr }, "hello");
912-
/// # // Prevent leaks for Miri.
913-
/// # drop(unsafe { Arc::from_raw(x_ptr) });
914-
/// ```
915-
#[must_use = "losing the pointer will leak memory"]
916-
pub fn into_raw(this: Self) -> *const T {
917-
let this = ManuallyDrop::new(this);
918-
Self::as_ptr(&*this)
919-
}
920933

921934
/// Provides a raw pointer to the data.
922935
///
@@ -936,7 +949,7 @@ impl<T: ?Sized> Arc<T> {
936949
/// ```
937950
#[must_use]
938951
pub fn as_ptr(this: &Self) -> *const T {
939-
let ptr: *mut ArcInner<T> = this.ptr.as_ptr();
952+
let ptr: *mut ArcInner<T> = NonNull::as_ptr(this.ptr);
940953

941954
// SAFETY: This cannot go through Deref::deref or ArcInnerPtr::inner because
942955
// this is required to retain raw/mut provenance such that e.g. `get_mut` can
@@ -1060,15 +1073,17 @@ impl<T: ?Sized> Arc<T> {
10601073
// Non-inlined part of `drop`.
10611074
#[inline(never)]
10621075
unsafe fn drop_slow(&mut self) {
1076+
// Drop the weak ref collectively held by all strong references when this
1077+
// variable goes out of scope. This ensures that the memory is deallocated
1078+
// even if the destructor of `T` panics.
1079+
// Take a reference to `self.alloc` instead of cloning because 1. it'll last long
1080+
// enough, and 2. you should be able to drop `Arc`s with unclonable allocators
1081+
let _weak = Weak { ptr: self.ptr };
1082+
10631083
// Destroy the data at this time, even though we must not free the box
10641084
// allocation itself (there might still be weak pointers lying around).
1065-
unsafe { ptr::drop_in_place(Self::get_mut_unchecked(self)) }
1066-
1067-
// Drop the weak ref collectively held by all strong references
1068-
// Take a reference to `self.alloc` instead of cloning because 1. it'll
1069-
// last long enough, and 2. you should be able to drop `Arc`s with
1070-
// unclonable allocators
1071-
drop(Weak { ptr: self.ptr });
1085+
// We cannot use `get_mut_unchecked` here, because `self.alloc` is borrowed.
1086+
unsafe { ptr::drop_in_place(&mut (*self.ptr.as_ptr()).data) };
10721087
}
10731088

10741089
/// Returns `true` if the two `Arc`s point to the same allocation in a vein similar to
@@ -1378,18 +1393,7 @@ impl<T: ?Sized + CloneToUninit> Arc<T> {
13781393
// deallocated.
13791394
if this.inner().strong.compare_exchange(1, 0, Acquire, Relaxed).is_err() {
13801395
// Another strong pointer exists, so we must clone.
1381-
1382-
let this_data_ref: &T = this;
1383-
// `in_progress` drops the allocation if we panic before finishing initializing it.
1384-
let mut in_progress: UniqueArcUninit<T> = UniqueArcUninit::new(this_data_ref);
1385-
1386-
let initialized_clone = unsafe {
1387-
// Clone. If the clone panics, `in_progress` will be dropped and clean up.
1388-
this_data_ref.clone_to_uninit(in_progress.data_ptr() as *mut u8);
1389-
// Cast type of pointer, now that it is initialized.
1390-
in_progress.into_arc()
1391-
};
1392-
*this = initialized_clone;
1396+
*this = Arc::clone_from_ref(&**this);
13931397
} else if this.inner().weak.load(Relaxed) != 1 {
13941398
// Relaxed suffices in the above because this is fundamentally an
13951399
// optimization: we are always racing with weak pointers being
@@ -1501,7 +1505,7 @@ impl<T: ?Sized> Arc<T> {
15011505
/// ```
15021506
#[inline]
15031507
pub fn get_mut(this: &mut Self) -> Option<&mut T> {
1504-
if this.is_unique() {
1508+
if Self::is_unique(this) {
15051509
// This unsafety is ok because we're guaranteed that the pointer
15061510
// returned is the *only* pointer that will ever be returned to T. Our
15071511
// reference count is guaranteed to be 1 at this point, and we required
@@ -1520,28 +1524,25 @@ impl<T: ?Sized> Arc<T> {
15201524
unsafe { &mut (*this.ptr.as_ptr()).data }
15211525
}
15221526

1523-
/// Determine whether this is the unique reference (including weak refs) to
1524-
/// the underlying data.
1525-
///
1526-
/// Note that this requires locking the weak ref count.
1527-
fn is_unique(&mut self) -> bool {
1527+
#[inline]
1528+
fn is_unique(this: &Self) -> bool {
15281529
// lock the weak pointer count if we appear to be the sole weak pointer
15291530
// holder.
15301531
//
15311532
// The acquire label here ensures a happens-before relationship with any
15321533
// writes to `strong` (in particular in `Weak::upgrade`) prior to decrements
15331534
// of the `weak` count (via `Weak::drop`, which uses release). If the upgraded
15341535
// weak ref was never dropped, the CAS here will fail so we do not care to synchronize.
1535-
if self.inner().weak.compare_exchange(1, usize::MAX, Acquire, Relaxed).is_ok() {
1536+
if this.inner().weak.compare_exchange(1, usize::MAX, Acquire, Relaxed).is_ok() {
15361537
// This needs to be an `Acquire` to synchronize with the decrement of the `strong`
15371538
// counter in `drop` -- the only access that happens when any but the last reference
15381539
// is being dropped.
1539-
let unique = self.inner().strong.load(Acquire) == 1;
1540+
let unique = this.inner().strong.load(Acquire) == 1;
15401541

15411542
// The release write here synchronizes with a read in `downgrade`,
15421543
// effectively preventing the above read of `strong` from happening
15431544
// after the write.
1544-
self.inner().weak.store(1, Release); // release the lock
1545+
this.inner().weak.store(1, Release); // release the lock
15451546
unique
15461547
} else {
15471548
false
@@ -1769,7 +1770,39 @@ impl<T /*: ?Sized */> Weak<T> {
17691770
};
17701771

17711772
// SAFETY: we now have recovered the original Weak pointer, so can create the Weak.
1772-
Weak { ptr: unsafe { NonNull::new_unchecked(ptr) } }
1773+
Self { ptr: unsafe { NonNull::new_unchecked(ptr) } }
1774+
}
1775+
1776+
/// Consumes the `Weak<T>` and turns it into a raw pointer.
1777+
///
1778+
/// This converts the weak pointer into a raw pointer, while still preserving the ownership of
1779+
/// one weak reference (the weak count is not modified by this operation). It can be turned
1780+
/// back into the `Weak<T>` with [`from_raw`].
1781+
///
1782+
/// The same restrictions of accessing the target of the pointer as with
1783+
/// [`as_ptr`] apply.
1784+
///
1785+
/// # Examples
1786+
///
1787+
/// ```
1788+
/// use portable_atomic_util::{Arc, Weak};
1789+
///
1790+
/// let strong = Arc::new("hello".to_owned());
1791+
/// let weak = Arc::downgrade(&strong);
1792+
/// let raw = weak.into_raw();
1793+
///
1794+
/// assert_eq!(1, Arc::weak_count(&strong));
1795+
/// assert_eq!("hello", unsafe { &*raw });
1796+
///
1797+
/// drop(unsafe { Weak::from_raw(raw) });
1798+
/// assert_eq!(0, Arc::weak_count(&strong));
1799+
/// ```
1800+
///
1801+
/// [`from_raw`]: Weak::from_raw
1802+
/// [`as_ptr`]: Weak::as_ptr
1803+
#[must_use = "losing the pointer will leak memory"]
1804+
pub fn into_raw(self) -> *const T {
1805+
ManuallyDrop::new(self).as_ptr()
17731806
}
17741807
}
17751808

@@ -1803,7 +1836,7 @@ impl<T /*: ?Sized */> Weak<T> {
18031836
/// [`null`]: core::ptr::null "ptr::null"
18041837
#[must_use]
18051838
pub fn as_ptr(&self) -> *const T {
1806-
let ptr: *mut ArcInner<T> = self.ptr.as_ptr();
1839+
let ptr: *mut ArcInner<T> = NonNull::as_ptr(self.ptr);
18071840

18081841
if is_dangling(ptr) {
18091842
// If the pointer is dangling, we return the sentinel directly. This cannot be
@@ -1821,38 +1854,6 @@ impl<T /*: ?Sized */> Weak<T> {
18211854
}
18221855
}
18231856
}
1824-
1825-
/// Consumes the `Weak<T>` and turns it into a raw pointer.
1826-
///
1827-
/// This converts the weak pointer into a raw pointer, while still preserving the ownership of
1828-
/// one weak reference (the weak count is not modified by this operation). It can be turned
1829-
/// back into the `Weak<T>` with [`from_raw`].
1830-
///
1831-
/// The same restrictions of accessing the target of the pointer as with
1832-
/// [`as_ptr`] apply.
1833-
///
1834-
/// # Examples
1835-
///
1836-
/// ```
1837-
/// use portable_atomic_util::{Arc, Weak};
1838-
///
1839-
/// let strong = Arc::new("hello".to_owned());
1840-
/// let weak = Arc::downgrade(&strong);
1841-
/// let raw = weak.into_raw();
1842-
///
1843-
/// assert_eq!(1, Arc::weak_count(&strong));
1844-
/// assert_eq!("hello", unsafe { &*raw });
1845-
///
1846-
/// drop(unsafe { Weak::from_raw(raw) });
1847-
/// assert_eq!(0, Arc::weak_count(&strong));
1848-
/// ```
1849-
///
1850-
/// [`from_raw`]: Weak::from_raw
1851-
/// [`as_ptr`]: Weak::as_ptr
1852-
#[must_use = "losing the pointer will leak memory"]
1853-
pub fn into_raw(self) -> *const T {
1854-
ManuallyDrop::new(self).as_ptr()
1855-
}
18561857
}
18571858

18581859
impl<T: ?Sized> Weak<T> {
@@ -2211,7 +2212,7 @@ impl<T: ?Sized + PartialOrd> PartialOrd for Arc<T> {
22112212
///
22122213
/// assert_eq!(Some(Ordering::Less), five.partial_cmp(&Arc::new(6)));
22132214
/// ```
2214-
fn partial_cmp(&self, other: &Self) -> Option<cmp::Ordering> {
2215+
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
22152216
(**self).partial_cmp(&**other)
22162217
}
22172218

@@ -2299,7 +2300,7 @@ impl<T: ?Sized + Ord> Ord for Arc<T> {
22992300
///
23002301
/// assert_eq!(Ordering::Less, five.cmp(&Arc::new(6)));
23012302
/// ```
2302-
fn cmp(&self, other: &Self) -> cmp::Ordering {
2303+
fn cmp(&self, other: &Self) -> Ordering {
23032304
(**self).cmp(&**other)
23042305
}
23052306
}
@@ -2335,6 +2336,7 @@ impl<T: Default> Default for Arc<T> {
23352336
/// assert_eq!(*x, 0);
23362337
/// ```
23372338
fn default() -> Self {
2339+
// TODO: https://github.com/rust-lang/rust/pull/131460 / https://github.com/rust-lang/rust/pull/132031
23382340
Self::new(T::default())
23392341
}
23402342
}
@@ -2360,7 +2362,7 @@ impl<T> Default for Arc<[T]> {
23602362
/// This may or may not share an allocation with other Arcs.
23612363
#[inline]
23622364
fn default() -> Self {
2363-
// TODO: we cannot use non-allocation optimization (https://github.com/rust-lang/rust/blob/1.84.0/library/alloc/src/sync.rs#L3532)
2365+
// TODO: we cannot use non-allocation optimization (https://github.com/rust-lang/rust/blob/1.93.0/library/alloc/src/sync.rs#L3807)
23642366
// for now since casting Arc<[T; N]> -> Arc<[T]> requires unstable CoerceUnsized.
23652367
let arr: [T; 0] = [];
23662368
Arc::from(arr)
@@ -2794,10 +2796,6 @@ use core::error;
27942796
use std::error;
27952797
#[cfg(any(not(portable_atomic_no_error_in_core), feature = "std"))]
27962798
impl<T: ?Sized + error::Error> error::Error for Arc<T> {
2797-
#[allow(deprecated)]
2798-
fn description(&self) -> &str {
2799-
error::Error::description(&**self)
2800-
}
28012799
#[allow(deprecated)]
28022800
fn cause(&self) -> Option<&dyn error::Error> {
28032801
error::Error::cause(&**self)

0 commit comments

Comments
 (0)