1
- use core:: alloc:: Allocator ;
1
+ use core:: alloc:: { AllocError , Allocator } ;
2
2
use core:: cell:: UnsafeCell ;
3
3
#[ cfg( not( no_global_oom_handling) ) ]
4
4
use core:: clone:: CloneToUninit ;
5
5
use core:: marker:: PhantomData ;
6
6
#[ cfg( not( no_global_oom_handling) ) ]
7
- use core:: mem:: { self , DropGuard } ;
7
+ use core:: mem;
8
+ use core:: mem:: DropGuard ;
8
9
#[ cfg( not( no_global_oom_handling) ) ]
9
10
use core:: ops:: DerefMut ;
10
11
#[ cfg( not( no_global_oom_handling) ) ]
@@ -20,7 +21,7 @@ use crate::raw_rc::raw_weak::RawWeak;
20
21
#[ cfg( not( no_global_oom_handling) ) ]
21
22
use crate :: raw_rc:: rc_alloc;
22
23
#[ cfg( not( no_global_oom_handling) ) ]
23
- use crate :: raw_rc:: rc_layout:: RcLayout ;
24
+ use crate :: raw_rc:: rc_layout:: { RcLayout , RcLayoutExt } ;
24
25
use crate :: raw_rc:: rc_value_pointer:: RcValuePointer ;
25
26
26
27
/// Decrements strong reference count in a reference-counted allocation with a value object that is
@@ -353,3 +354,126 @@ where
353
354
unsafe { self . weak . value_ptr_unchecked ( ) }
354
355
}
355
356
}
357
+
358
+ impl < T , A > RawRc < T , A > {
359
+ /// # Safety
360
+ ///
361
+ /// `weak` must be non-dangling.
362
+ unsafe fn from_weak_with_value ( weak : RawWeak < T , A > , value : T ) -> Self {
363
+ unsafe {
364
+ weak. as_ptr ( ) . write ( value) ;
365
+
366
+ Self :: from_weak ( weak)
367
+ }
368
+ }
369
+
370
+ #[ inline]
371
+ pub ( crate ) fn try_new ( value : T ) -> Result < Self , AllocError >
372
+ where
373
+ A : Allocator + Default ,
374
+ {
375
+ RawWeak :: try_new_uninit :: < 1 > ( )
376
+ . map ( |weak| unsafe { Self :: from_weak_with_value ( weak, value) } )
377
+ }
378
+
379
+ #[ inline]
380
+ pub ( crate ) fn try_new_in ( value : T , alloc : A ) -> Result < Self , AllocError >
381
+ where
382
+ A : Allocator ,
383
+ {
384
+ RawWeak :: try_new_uninit_in :: < 1 > ( alloc)
385
+ . map ( |weak| unsafe { Self :: from_weak_with_value ( weak, value) } )
386
+ }
387
+
388
+ #[ cfg( not( no_global_oom_handling) ) ]
389
+ #[ inline]
390
+ pub ( crate ) fn new ( value : T ) -> Self
391
+ where
392
+ A : Allocator + Default ,
393
+ {
394
+ unsafe { Self :: from_weak_with_value ( RawWeak :: new_uninit :: < 1 > ( ) , value) }
395
+ }
396
+
397
+ #[ cfg( not( no_global_oom_handling) ) ]
398
+ #[ inline]
399
+ pub ( crate ) fn new_in ( value : T , alloc : A ) -> Self
400
+ where
401
+ A : Allocator ,
402
+ {
403
+ unsafe { Self :: from_weak_with_value ( RawWeak :: new_uninit_in :: < 1 > ( alloc) , value) }
404
+ }
405
+
406
+ #[ cfg( not( no_global_oom_handling) ) ]
407
+ fn new_with < F > ( f : F ) -> Self
408
+ where
409
+ A : Allocator + Default ,
410
+ F : FnOnce ( ) -> T ,
411
+ {
412
+ let ( ptr, alloc) = rc_alloc:: allocate_with :: < A , _ , 1 > ( T :: RC_LAYOUT , |ptr| unsafe {
413
+ ptr. as_ptr ( ) . cast ( ) . write ( f ( ) )
414
+ } ) ;
415
+
416
+ unsafe { Self :: from_raw_parts ( ptr. as_ptr ( ) . cast ( ) , alloc) }
417
+ }
418
+
419
+ /// # Safety
420
+ ///
421
+ /// All accesses to `self` must use the same `RefCounter` implementation for `R`.
422
+ pub ( crate ) unsafe fn into_inner < R > ( self ) -> Option < T >
423
+ where
424
+ A : Allocator ,
425
+ R : RefCounter ,
426
+ {
427
+ let is_last_strong_ref = unsafe { decrement_strong_ref_count :: < R > ( self . value_ptr ( ) ) } ;
428
+
429
+ is_last_strong_ref. then ( || unsafe { self . weak . assume_init_into_inner :: < R > ( ) } )
430
+ }
431
+
432
+ /// # Safety
433
+ ///
434
+ /// All accesses to `self` must use the same `RefCounter` implementation for `R`.
435
+ pub ( crate ) unsafe fn try_unwrap < R > ( self ) -> Result < T , RawRc < T , A > >
436
+ where
437
+ A : Allocator ,
438
+ R : RefCounter ,
439
+ {
440
+ unsafe fn inner < R > ( value_ptr : RcValuePointer ) -> bool
441
+ where
442
+ R : RefCounter ,
443
+ {
444
+ unsafe {
445
+ R :: from_raw_counter ( value_ptr. strong_count_ptr ( ) . as_ref ( ) ) . try_lock_strong_count ( )
446
+ }
447
+ }
448
+
449
+ let is_last_strong_ref = unsafe { inner :: < R > ( self . value_ptr ( ) ) } ;
450
+
451
+ if is_last_strong_ref {
452
+ Ok ( unsafe { self . weak . assume_init_into_inner :: < R > ( ) } )
453
+ } else {
454
+ Err ( self )
455
+ }
456
+ }
457
+
458
+ /// # Safety
459
+ ///
460
+ /// All accesses to `self` must use the same `RefCounter` implementation for `R`.
461
+ pub ( crate ) unsafe fn unwrap_or_clone < R > ( self ) -> T
462
+ where
463
+ T : Clone ,
464
+ A : Allocator ,
465
+ R : RefCounter ,
466
+ {
467
+ // SAFETY: Caller guarantees `rc` will only be accessed with the same `RefCounter`
468
+ // implementation.
469
+ unsafe { self . try_unwrap :: < R > ( ) } . unwrap_or_else ( |rc| {
470
+ // SAFETY: Caller guarantees `rc` will only be accessed with the same `RefCounter`
471
+ // implementation, and the `rc` local variable will not be accessed again after the
472
+ // drop guard being triggered.
473
+ let guard = DropGuard :: new ( rc, |mut rc| unsafe { rc. drop :: < R > ( ) } ) ;
474
+
475
+ // SAFETY: `RawRc<T, A>` is guaranteed to contain a valid `T` value.
476
+ T :: clone ( unsafe { guard. as_ptr ( ) . as_ref ( ) } )
477
+ } )
478
+ }
479
+ }
0 commit comments