|
1 |
| -use core::alloc::Allocator; |
| 1 | +use core::alloc::{AllocError, Allocator}; |
2 | 2 | use core::cell::UnsafeCell;
|
3 | 3 | use core::mem::{self, DropGuard};
|
4 | 4 | use core::num::NonZeroUsize;
|
5 | 5 | use core::ptr::{self, NonNull};
|
6 | 6 |
|
7 |
| -use crate::raw_rc::rc_layout::RcLayout; |
| 7 | +use crate::raw_rc::rc_layout::{RcLayout, RcLayoutExt}; |
8 | 8 | use crate::raw_rc::rc_value_pointer::RcValuePointer;
|
9 | 9 | use crate::raw_rc::{RefCounter, RefCounts, rc_alloc};
|
10 | 10 |
|
@@ -341,3 +341,120 @@ where
|
341 | 341 | unsafe { RcValuePointer::from_value_ptr(self.ptr.cast()) }
|
342 | 342 | }
|
343 | 343 | }
|
| 344 | + |
| 345 | +impl<T, A> RawWeak<T, A> { |
| 346 | + pub(crate) fn new_dangling() -> Self |
| 347 | + where |
| 348 | + A: Default, |
| 349 | + { |
| 350 | + Self::new_dangling_in(A::default()) |
| 351 | + } |
| 352 | + |
| 353 | + pub(crate) const fn new_dangling_in(alloc: A) -> Self { |
| 354 | + unsafe { Self::from_raw_parts(NonNull::without_provenance(DANGLING_WEAK_ADDRESS), alloc) } |
| 355 | + } |
| 356 | + |
| 357 | + pub(crate) fn try_new_uninit<const STRONG_COUNT: usize>() -> Result<Self, AllocError> |
| 358 | + where |
| 359 | + A: Allocator + Default, |
| 360 | + { |
| 361 | + rc_alloc::try_allocate_uninit::<A, STRONG_COUNT>(T::RC_LAYOUT) |
| 362 | + .map(|(ptr, alloc)| unsafe { Self::from_raw_parts(ptr.as_ptr().cast(), alloc) }) |
| 363 | + } |
| 364 | + |
| 365 | + pub(crate) fn try_new_uninit_in<const STRONG_COUNT: usize>(alloc: A) -> Result<Self, AllocError> |
| 366 | + where |
| 367 | + A: Allocator, |
| 368 | + { |
| 369 | + rc_alloc::try_allocate_uninit_in::<A, STRONG_COUNT>(&alloc, T::RC_LAYOUT) |
| 370 | + .map(|ptr| unsafe { Self::from_raw_parts(ptr.as_ptr().cast(), alloc) }) |
| 371 | + } |
| 372 | + |
| 373 | + pub(crate) fn try_new_zeroed<const STRONG_COUNT: usize>() -> Result<Self, AllocError> |
| 374 | + where |
| 375 | + A: Allocator + Default, |
| 376 | + { |
| 377 | + rc_alloc::try_allocate_zeroed::<A, STRONG_COUNT>(T::RC_LAYOUT) |
| 378 | + .map(|(ptr, alloc)| unsafe { Self::from_raw_parts(ptr.as_ptr().cast(), alloc) }) |
| 379 | + } |
| 380 | + |
| 381 | + pub(crate) fn try_new_zeroed_in<const STRONG_COUNT: usize>(alloc: A) -> Result<Self, AllocError> |
| 382 | + where |
| 383 | + A: Allocator, |
| 384 | + { |
| 385 | + rc_alloc::try_allocate_zeroed_in::<A, STRONG_COUNT>(&alloc, T::RC_LAYOUT) |
| 386 | + .map(|ptr| unsafe { Self::from_raw_parts(ptr.as_ptr().cast(), alloc) }) |
| 387 | + } |
| 388 | + |
| 389 | + #[cfg(not(no_global_oom_handling))] |
| 390 | + pub(crate) fn new_uninit<const STRONG_COUNT: usize>() -> Self |
| 391 | + where |
| 392 | + A: Allocator + Default, |
| 393 | + { |
| 394 | + let (ptr, alloc) = rc_alloc::allocate_uninit::<A, STRONG_COUNT>(T::RC_LAYOUT); |
| 395 | + |
| 396 | + unsafe { Self::from_raw_parts(ptr.as_ptr().cast(), alloc) } |
| 397 | + } |
| 398 | + |
| 399 | + #[cfg(not(no_global_oom_handling))] |
| 400 | + pub(crate) fn new_uninit_in<const STRONG_COUNT: usize>(alloc: A) -> Self |
| 401 | + where |
| 402 | + A: Allocator, |
| 403 | + { |
| 404 | + unsafe { |
| 405 | + Self::from_raw_parts( |
| 406 | + rc_alloc::allocate_uninit_in::<A, STRONG_COUNT>(&alloc, T::RC_LAYOUT) |
| 407 | + .as_ptr() |
| 408 | + .cast(), |
| 409 | + alloc, |
| 410 | + ) |
| 411 | + } |
| 412 | + } |
| 413 | + |
| 414 | + #[cfg(not(no_global_oom_handling))] |
| 415 | + pub(crate) fn new_zeroed<const STRONG_COUNT: usize>() -> Self |
| 416 | + where |
| 417 | + A: Allocator + Default, |
| 418 | + { |
| 419 | + let (ptr, alloc) = rc_alloc::allocate_zeroed::<A, STRONG_COUNT>(T::RC_LAYOUT); |
| 420 | + |
| 421 | + unsafe { Self::from_raw_parts(ptr.as_ptr().cast(), alloc) } |
| 422 | + } |
| 423 | + |
| 424 | + #[cfg(not(no_global_oom_handling))] |
| 425 | + pub(crate) fn new_zeroed_in<const STRONG_COUNT: usize>(alloc: A) -> Self |
| 426 | + where |
| 427 | + A: Allocator, |
| 428 | + { |
| 429 | + unsafe { |
| 430 | + Self::from_raw_parts( |
| 431 | + rc_alloc::allocate_zeroed_in::<A, STRONG_COUNT>(&alloc, T::RC_LAYOUT) |
| 432 | + .as_ptr() |
| 433 | + .cast(), |
| 434 | + alloc, |
| 435 | + ) |
| 436 | + } |
| 437 | + } |
| 438 | + |
| 439 | + /// Consumes the `RawWeak` object and returns the contained value, assuming the value is |
| 440 | + /// initialized. |
| 441 | + /// |
| 442 | + /// # Safety |
| 443 | + /// |
| 444 | + /// - `self` is non-dangling. |
| 445 | + /// - The value pointed to by `self` is initialized. |
| 446 | + /// - The strong reference count is zero. |
| 447 | + pub(super) unsafe fn assume_init_into_inner<R>(mut self) -> T |
| 448 | + where |
| 449 | + A: Allocator, |
| 450 | + R: RefCounter, |
| 451 | + { |
| 452 | + unsafe { |
| 453 | + let result = self.ptr.read(); |
| 454 | + |
| 455 | + self.drop_unchecked::<R>(); |
| 456 | + |
| 457 | + result |
| 458 | + } |
| 459 | + } |
| 460 | +} |
0 commit comments