Skip to content

Commit 9c5be67

Browse files
committed
add {Box, (Unique){Rc, Arc}}::(try_)map
1 parent 4a54b26 commit 9c5be67

File tree

9 files changed

+599
-13
lines changed

9 files changed

+599
-13
lines changed

library/alloc/src/boxed.rs

Lines changed: 80 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -192,11 +192,15 @@ use core::fmt;
192192
use core::future::Future;
193193
use core::hash::{Hash, Hasher};
194194
use core::marker::{Tuple, Unsize};
195+
#[cfg(not(no_global_oom_handling))]
196+
use core::mem::MaybeUninit;
195197
use core::mem::{self, SizedTypeProperties};
196198
use core::ops::{
197199
AsyncFn, AsyncFnMut, AsyncFnOnce, CoerceUnsized, Coroutine, CoroutineState, Deref, DerefMut,
198200
DerefPure, DispatchFromDyn, LegacyReceiver,
199201
};
202+
#[cfg(not(no_global_oom_handling))]
203+
use core::ops::{Residual, Try};
200204
use core::pin::{Pin, PinCoerceUnsized};
201205
use core::ptr::{self, NonNull, Unique};
202206
use core::task::{Context, Poll};
@@ -385,6 +389,82 @@ impl<T> Box<T> {
385389
pub fn try_new_zeroed() -> Result<Box<mem::MaybeUninit<T>>, AllocError> {
386390
Box::try_new_zeroed_in(Global)
387391
}
392+
393+
/// Maps the value in a box, reusing the allocation if possible.
394+
///
395+
/// `f` is called on the value in the box, and the result is returned, also boxed.
396+
///
397+
/// Note: this is an associated function, which means that you have
398+
/// to call it as `Box::map(b, f)` instead of `b.map(f)`. This
399+
/// is so that there is no conflict with a method on the inner type.
400+
///
401+
/// # Examples
402+
///
403+
/// ```
404+
/// #![feature(smart_pointer_try_map)]
405+
///
406+
/// let b = Box::new(7);
407+
/// let new = Box::map(b, |i| i + 7);
408+
/// assert_eq!(*new, 14);
409+
/// ```
410+
#[cfg(not(no_global_oom_handling))]
411+
#[unstable(feature = "smart_pointer_try_map", issue = "144419")]
412+
pub fn map<U>(this: Self, f: impl FnOnce(T) -> U) -> Box<U> {
413+
if size_of::<T>() == size_of::<U>() && align_of::<T>() == align_of::<U>() {
414+
let (value, allocation) = Box::take(this);
415+
Box::write(
416+
unsafe { mem::transmute::<Box<MaybeUninit<T>>, Box<MaybeUninit<U>>>(allocation) },
417+
f(value),
418+
)
419+
} else {
420+
Box::new(f(*this))
421+
}
422+
}
423+
424+
/// Attempts to map the value in a box, reusing the allocation if possible.
425+
///
426+
/// `f` is called on the value in the box, and if the operation succeeds, the result is
427+
/// returned, also boxed.
428+
///
429+
/// Note: this is an associated function, which means that you have
430+
/// to call it as `Box::try_map(b, f)` instead of `b.try_map(f)`. This
431+
/// is so that there is no conflict with a method on the inner type.
432+
///
433+
/// # Examples
434+
///
435+
/// ```
436+
/// #![feature(smart_pointer_try_map)]
437+
///
438+
/// let b = Box::new(7);
439+
/// let new = Box::try_map(b, u32::try_from).unwrap();
440+
/// assert_eq!(*new, 7);
441+
/// ```
442+
#[cfg(not(no_global_oom_handling))]
443+
#[unstable(feature = "smart_pointer_try_map", issue = "144419")]
444+
pub fn try_map<R>(
445+
this: Self,
446+
f: impl FnOnce(T) -> R,
447+
) -> <R::Residual as Residual<Box<R::Output>>>::TryType
448+
where
449+
R: Try,
450+
R::Residual: Residual<Box<R::Output>>,
451+
{
452+
if size_of::<T>() == size_of::<R::Output>() && align_of::<T>() == align_of::<R::Output>() {
453+
let (value, allocation) = Box::take(this);
454+
try {
455+
Box::write(
456+
unsafe {
457+
mem::transmute::<Box<MaybeUninit<T>>, Box<MaybeUninit<R::Output>>>(
458+
allocation,
459+
)
460+
},
461+
f(value)?,
462+
)
463+
}
464+
} else {
465+
try { Box::new(f(*this)?) }
466+
}
467+
}
388468
}
389469

390470
impl<T, A: Allocator> Box<T, A> {

library/alloc/src/lib.rs

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -146,7 +146,9 @@
146146
#![feature(trusted_fused)]
147147
#![feature(trusted_len)]
148148
#![feature(trusted_random_access)]
149+
#![feature(try_blocks)]
149150
#![feature(try_trait_v2)]
151+
#![feature(try_trait_v2_residual)]
150152
#![feature(try_with_capacity)]
151153
#![feature(tuple_trait)]
152154
#![feature(ub_checks)]

library/alloc/src/rc.rs

Lines changed: 253 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -255,6 +255,8 @@ use core::marker::{PhantomData, Unsize};
255255
use core::mem::{self, ManuallyDrop, align_of_val_raw};
256256
use core::num::NonZeroUsize;
257257
use core::ops::{CoerceUnsized, Deref, DerefMut, DerefPure, DispatchFromDyn, LegacyReceiver};
258+
#[cfg(not(no_global_oom_handling))]
259+
use core::ops::{Residual, Try};
258260
use core::panic::{RefUnwindSafe, UnwindSafe};
259261
#[cfg(not(no_global_oom_handling))]
260262
use core::pin::Pin;
@@ -639,6 +641,93 @@ impl<T> Rc<T> {
639641
pub fn pin(value: T) -> Pin<Rc<T>> {
640642
unsafe { Pin::new_unchecked(Rc::new(value)) }
641643
}
644+
645+
/// Maps the value in an `Rc`, reusing the allocation if possible.
646+
///
647+
/// `f` is called on a reference to the value in the `Rc`, and the result is returned, also in
648+
/// an `Rc`.
649+
///
650+
/// Note: this is an associated function, which means that you have
651+
/// to call it as `Rc::map(r, f)` instead of `r.map(f)`. This
652+
/// is so that there is no conflict with a method on the inner type.
653+
///
654+
/// # Examples
655+
///
656+
/// ```
657+
/// #![feature(smart_pointer_try_map)]
658+
///
659+
/// use std::rc::Rc;
660+
///
661+
/// let r = Rc::new(7);
662+
/// let new = Rc::map(r, |i| i + 7);
663+
/// assert_eq!(*new, 14);
664+
/// ```
665+
#[cfg(not(no_global_oom_handling))]
666+
#[unstable(feature = "smart_pointer_try_map", issue = "144419")]
667+
pub fn map<U>(this: Self, f: impl FnOnce(&T) -> U) -> Rc<U> {
668+
if size_of::<T>() == size_of::<U>()
669+
&& align_of::<T>() == align_of::<U>()
670+
&& Rc::is_unique(&this)
671+
{
672+
unsafe {
673+
let ptr = Rc::into_raw(this);
674+
let value = ptr.read();
675+
let mut allocation = Rc::from_raw(ptr.cast::<mem::MaybeUninit<U>>());
676+
677+
Rc::get_mut_unchecked(&mut allocation).write(f(&value));
678+
allocation.assume_init()
679+
}
680+
} else {
681+
Rc::new(f(&*this))
682+
}
683+
}
684+
685+
/// Attempts to map the value in an `Rc`, reusing the allocation if possible.
686+
///
687+
/// `f` is called on a reference to the value in the `Rc`, and if the operation succeeds, the
688+
/// result is returned, also in an `Rc`.
689+
///
690+
/// Note: this is an associated function, which means that you have
691+
/// to call it as `Rc::try_map(r, f)` instead of `r.try_map(f)`. This
692+
/// is so that there is no conflict with a method on the inner type.
693+
///
694+
/// # Examples
695+
///
696+
/// ```
697+
/// #![feature(smart_pointer_try_map)]
698+
///
699+
/// use std::rc::Rc;
700+
///
701+
/// let b = Rc::new(7);
702+
/// let new = Rc::try_map(b, |&i| u32::try_from(i)).unwrap();
703+
/// assert_eq!(*new, 7);
704+
/// ```
705+
#[cfg(not(no_global_oom_handling))]
706+
#[unstable(feature = "smart_pointer_try_map", issue = "144419")]
707+
pub fn try_map<R>(
708+
this: Self,
709+
f: impl FnOnce(&T) -> R,
710+
) -> <R::Residual as Residual<Rc<R::Output>>>::TryType
711+
where
712+
R: Try,
713+
R::Residual: Residual<Rc<R::Output>>,
714+
{
715+
if size_of::<T>() == size_of::<R::Output>()
716+
&& align_of::<T>() == align_of::<R::Output>()
717+
&& Rc::is_unique(&this)
718+
{
719+
unsafe {
720+
let ptr = Rc::into_raw(this);
721+
let value = ptr.read();
722+
let mut allocation = Rc::from_raw(ptr.cast::<mem::MaybeUninit<R::Output>>());
723+
724+
Rc::get_mut_unchecked(&mut allocation).write(f(&value)?);
725+
try { allocation.assume_init() }
726+
}
727+
} else {
728+
try { Rc::new(f(&*this)?) }
729+
}
730+
}
642731
}
643732

644733
impl<T, A: Allocator> Rc<T, A> {
@@ -3991,6 +4080,128 @@ impl<T> UniqueRc<T> {
39914080
pub fn new(value: T) -> Self {
39924081
Self::new_in(value, Global)
39934082
}
4083+
4084+
/// Maps the value in a `UniqueRc`, reusing the allocation if possible.
4085+
///
4086+
/// `f` is called on a reference to the value in the `UniqueRc`, and the result is returned,
4087+
/// also in a `UniqueRc`.
4088+
///
4089+
/// Note: this is an associated function, which means that you have
4090+
/// to call it as `UniqueRc::map(u, f)` instead of `u.map(f)`. This
4091+
/// is so that there is no conflict with a method on the inner type.
4092+
///
4093+
/// # Examples
4094+
///
4095+
/// ```
4096+
/// #![feature(smart_pointer_try_map)]
4097+
/// #![feature(unique_rc_arc)]
4098+
///
4099+
/// use std::rc::UniqueRc;
4100+
///
4101+
/// let r = UniqueRc::new(7);
4102+
/// let new = UniqueRc::map(r, |i| i + 7);
4103+
/// assert_eq!(*new, 14);
4104+
/// ```
4105+
#[cfg(not(no_global_oom_handling))]
4106+
#[unstable(feature = "smart_pointer_try_map", issue = "144419")]
4107+
pub fn map<U>(this: Self, f: impl FnOnce(T) -> U) -> UniqueRc<U> {
4108+
if size_of::<T>() == size_of::<U>()
4109+
&& align_of::<T>() == align_of::<U>()
4110+
&& UniqueRc::weak_count(&this) == 0
4111+
{
4112+
unsafe {
4113+
let ptr = UniqueRc::into_raw(this);
4114+
let value = ptr.read();
4115+
let mut allocation = UniqueRc::from_raw(ptr.cast::<mem::MaybeUninit<U>>());
4116+
4117+
allocation.write(f(value));
4118+
allocation.assume_init()
4119+
}
4120+
} else {
4121+
UniqueRc::new(f(UniqueRc::unwrap(this)))
4122+
}
4123+
}
4124+
4125+
/// Attempts to map the value in a `UniqueRc`, reusing the allocation if possible.
4126+
///
4127+
/// `f` is called on a reference to the value in the `UniqueRc`, and if the operation succeeds,
4128+
/// the result is returned, also in a `UniqueRc`.
4129+
///
4130+
/// Note: this is an associated function, which means that you have
4131+
/// to call it as `UniqueRc::try_map(u, f)` instead of `u.try_map(f)`. This
4132+
/// is so that there is no conflict with a method on the inner type.
4133+
///
4134+
/// # Examples
4135+
///
4136+
/// ```
4137+
/// #![feature(smart_pointer_try_map)]
4138+
/// #![feature(unique_rc_arc)]
4139+
///
4140+
/// use std::rc::UniqueRc;
4141+
///
4142+
/// let b = UniqueRc::new(7);
4143+
/// let new = UniqueRc::try_map(b, u32::try_from).unwrap();
4144+
/// assert_eq!(*new, 7);
4145+
/// ```
4146+
#[cfg(not(no_global_oom_handling))]
4147+
#[unstable(feature = "smart_pointer_try_map", issue = "144419")]
4148+
pub fn try_map<R>(
4149+
this: Self,
4150+
f: impl FnOnce(T) -> R,
4151+
) -> <R::Residual as Residual<UniqueRc<R::Output>>>::TryType
4152+
where
4153+
R: Try,
4154+
R::Residual: Residual<UniqueRc<R::Output>>,
4155+
{
4156+
if size_of::<T>() == size_of::<R::Output>()
4157+
&& align_of::<T>() == align_of::<R::Output>()
4158+
&& UniqueRc::weak_count(&this) == 0
4159+
{
4160+
unsafe {
4161+
let ptr = UniqueRc::into_raw(this);
4162+
let value = ptr.read();
4163+
let mut allocation = UniqueRc::from_raw(ptr.cast::<mem::MaybeUninit<R::Output>>());
4164+
4165+
allocation.write(f(value)?);
4166+
try { allocation.assume_init() }
4167+
}
4168+
} else {
4169+
try { UniqueRc::new(f(UniqueRc::unwrap(this))?) }
4170+
}
4171+
}
4172+
4173+
#[cfg(not(no_global_oom_handling))]
4174+
fn unwrap(this: Self) -> T {
4175+
let this = ManuallyDrop::new(this);
4176+
let val: T = unsafe { ptr::read(&**this) };
4177+
4178+
let _weak = Weak { ptr: this.ptr, alloc: Global };
4179+
4180+
val
4181+
}
4182+
}
4183+
4184+
impl<T: ?Sized> UniqueRc<T> {
4185+
#[cfg(not(no_global_oom_handling))]
4186+
unsafe fn from_raw(ptr: *const T) -> Self {
4187+
let offset = unsafe { data_offset(ptr) };
4188+
4189+
// Reverse the offset to find the original RcInner.
4190+
let rc_ptr = unsafe { ptr.byte_sub(offset) as *mut RcInner<T> };
4191+
4192+
Self {
4193+
ptr: unsafe { NonNull::new_unchecked(rc_ptr) },
4194+
_marker: PhantomData,
4195+
_marker2: PhantomData,
4196+
alloc: Global,
4197+
}
4198+
}
4199+
4200+
#[cfg(not(no_global_oom_handling))]
4201+
fn into_raw(this: Self) -> *const T {
4202+
let this = ManuallyDrop::new(this);
4203+
Self::as_ptr(&*this)
4204+
}
39944205
}
39954206

39964207
impl<T, A: Allocator> UniqueRc<T, A> {
@@ -4041,6 +4252,40 @@ impl<T: ?Sized, A: Allocator> UniqueRc<T, A> {
40414252
Rc::from_inner_in(this.ptr, alloc)
40424253
}
40434254
}
4255+
4256+
#[cfg(not(no_global_oom_handling))]
4257+
fn weak_count(this: &Self) -> usize {
4258+
this.inner().weak() - 1
4259+
}
4260+
4261+
#[cfg(not(no_global_oom_handling))]
4262+
fn inner(&self) -> &RcInner<T> {
4263+
// SAFETY: while this UniqueRc is alive we're guaranteed that the inner pointer is valid.
4264+
unsafe { self.ptr.as_ref() }
4265+
}
4266+
4267+
#[cfg(not(no_global_oom_handling))]
4268+
fn as_ptr(this: &Self) -> *const T {
4269+
let ptr: *mut RcInner<T> = NonNull::as_ptr(this.ptr);
4270+
4271+
// SAFETY: This cannot go through Deref::deref or UniqueRc::inner because
4272+
// this is required to retain raw/mut provenance such that e.g. `get_mut` can
4273+
// write through the pointer after the Rc is recovered through `from_raw`.
4274+
unsafe { &raw mut (*ptr).value }
4275+
}
4276+
4277+
#[inline]
4278+
#[cfg(not(no_global_oom_handling))]
4279+
fn into_inner_with_allocator(this: Self) -> (NonNull<RcInner<T>>, A) {
4280+
let this = mem::ManuallyDrop::new(this);
4281+
(this.ptr, unsafe { ptr::read(&this.alloc) })
4282+
}
4283+
4284+
#[inline]
4285+
#[cfg(not(no_global_oom_handling))]
4286+
unsafe fn from_inner_in(ptr: NonNull<RcInner<T>>, alloc: A) -> Self {
4287+
Self { ptr, _marker: PhantomData, _marker2: PhantomData, alloc }
4288+
}
40444289
}
40454290

40464291
impl<T: ?Sized, A: Allocator + Clone> UniqueRc<T, A> {
@@ -4059,6 +4304,14 @@ impl<T: ?Sized, A: Allocator + Clone> UniqueRc<T, A> {
40594304
}
40604305
}
40614306

4307+
#[cfg(not(no_global_oom_handling))]
4308+
impl<T, A: Allocator> UniqueRc<mem::MaybeUninit<T>, A> {
4309+
unsafe fn assume_init(self) -> UniqueRc<T, A> {
4310+
let (ptr, alloc) = UniqueRc::into_inner_with_allocator(self);
4311+
unsafe { UniqueRc::from_inner_in(ptr.cast(), alloc) }
4312+
}
4313+
}
4314+
40624315
#[unstable(feature = "unique_rc_arc", issue = "112566")]
40634316
impl<T: ?Sized, A: Allocator> Deref for UniqueRc<T, A> {
40644317
type Target = T;

0 commit comments

Comments
 (0)