Skip to content

Commit 18a0866

Browse files
committed
Refactor
1 parent 6f19acc commit 18a0866

File tree

3 files changed

+142
-174
lines changed

3 files changed

+142
-174
lines changed

library/alloc/src/raw_rc.rs

Lines changed: 77 additions & 77 deletions
Original file line numberDiff line numberDiff line change
@@ -27,11 +27,8 @@ use crate::string::String;
2727
use crate::vec::Vec;
2828

2929
pub unsafe trait RcOps {
30-
unsafe fn inc_strong(strong_count: &UnsafeCell<usize>);
31-
unsafe fn dec_strong(strong_count: &UnsafeCell<usize>) -> bool;
32-
33-
unsafe fn inc_weak(weak_count: &UnsafeCell<usize>);
34-
unsafe fn dec_weak(weak_count: &UnsafeCell<usize>) -> bool;
30+
unsafe fn increment_ref_count(count: &UnsafeCell<usize>);
31+
unsafe fn decrement_ref_count(count: &UnsafeCell<usize>) -> bool;
3532

3633
unsafe fn upgrade(strong_count: &UnsafeCell<usize>) -> bool;
3734
unsafe fn downgrade(weak_count: &UnsafeCell<usize>);
@@ -129,11 +126,11 @@ unsafe fn weak_count_ptr_from_value_ptr(value_ptr: NonNull<()>) -> NonNull<Unsaf
129126
unsafe { value_ptr.byte_sub(WEAK_COUNT_OFFSET_BYTES).cast() }
130127
}
131128

132-
unsafe fn write_rc_allocation<const STRONG_COUNT: usize>(
133-
ptr: NonNull<[u8]>,
129+
unsafe fn init_rc_allocation<const STRONG_COUNT: usize>(
130+
allocation_ptr: NonNull<[u8]>,
134131
rc_layout: &RcLayout,
135132
) -> NonNull<()> {
136-
let allocation_ptr = ptr.cast::<()>();
133+
let allocation_ptr = allocation_ptr.cast::<()>();
137134
let value_ptr = unsafe { allocation_ptr.byte_add(rc_layout.allocation_offset_bytes) };
138135
let ref_counts = const { RefCounts::new(STRONG_COUNT) };
139136

@@ -142,100 +139,112 @@ unsafe fn write_rc_allocation<const STRONG_COUNT: usize>(
142139
value_ptr
143140
}
144141

145-
fn try_allocate_for_rc<A, F, const STRONG_COUNT: usize>(
146-
alloc: &A,
147-
allocate_fn: F,
142+
unsafe fn try_handle_rc_allocation<const STRONG_COUNT: usize>(
143+
allocation_result: Result<NonNull<[u8]>, AllocError>,
148144
rc_layout: &RcLayout,
149-
) -> Result<NonNull<()>, AllocError>
150-
where
151-
F: FnOnce(&A, Layout) -> Result<NonNull<[u8]>, AllocError>,
152-
{
153-
allocate_fn(alloc, rc_layout.allocation_layout)
154-
.map(|ptr| unsafe { write_rc_allocation::<STRONG_COUNT>(ptr, rc_layout) })
145+
) -> Result<NonNull<()>, AllocError> {
146+
allocation_result.map(|allocation_ptr| unsafe {
147+
init_rc_allocation::<STRONG_COUNT>(allocation_ptr, rc_layout)
148+
})
155149
}
156150

157-
#[cfg(not(no_global_oom_handling))]
158-
#[track_caller]
159-
fn allocate_for_rc<A, F, const STRONG_COUNT: usize>(
151+
fn try_allocate_uninit_for_rc<A, const STRONG_COUNT: usize>(
160152
alloc: &A,
161-
allocate_fn: F,
162153
rc_layout: &RcLayout,
163-
) -> NonNull<()>
154+
) -> Result<NonNull<()>, AllocError>
164155
where
165-
F: FnOnce(&A, Layout) -> Result<NonNull<[u8]>, AllocError>,
156+
A: Allocator,
166157
{
167-
match allocate_fn(alloc, rc_layout.allocation_layout) {
168-
Ok(ptr) => unsafe { write_rc_allocation::<STRONG_COUNT>(ptr, rc_layout) },
169-
Err(AllocError) => alloc::handle_alloc_error(rc_layout.allocation_layout),
158+
unsafe {
159+
try_handle_rc_allocation::<STRONG_COUNT>(
160+
alloc.allocate(rc_layout.allocation_layout),
161+
rc_layout,
162+
)
170163
}
171164
}
172165

173-
#[cfg(not(no_global_oom_handling))]
174-
#[track_caller]
175-
unsafe fn allocate_for_rc_with_value<T, A, const STRONG_COUNT: usize>(
176-
value: &T,
166+
fn try_allocate_zeroed_for_rc<A, const STRONG_COUNT: usize>(
177167
alloc: &A,
178-
) -> NonNull<T>
168+
rc_layout: &RcLayout,
169+
) -> Result<NonNull<()>, AllocError>
179170
where
180171
A: Allocator,
181-
T: ?Sized,
182172
{
183173
unsafe {
184-
let rc_layout = RcLayout::from_value_ptr(NonNull::from(value));
185-
let ptr = allocate_for_rc::<A, _, STRONG_COUNT>(alloc, A::allocate, &rc_layout);
186-
187-
ptr::copy_nonoverlapping::<u8>(
188-
ptr::from_ref(value).cast(),
189-
ptr.as_ptr().cast(),
190-
mem::size_of_val(value),
191-
);
192-
193-
NonNull::new_unchecked(ptr.as_ptr().with_metadata_of(value))
174+
try_handle_rc_allocation::<STRONG_COUNT>(
175+
alloc.allocate_zeroed(rc_layout.allocation_layout),
176+
rc_layout,
177+
)
194178
}
195179
}
196180

197-
fn try_allocate_uninit_for_rc<A, const STRONG_COUNT: usize>(
198-
alloc: &A,
181+
#[cfg(not(no_global_oom_handling))]
182+
#[track_caller]
183+
unsafe fn handle_rc_allocation<const STRONG_COUNT: usize>(
184+
allocation_result: Result<NonNull<[u8]>, AllocError>,
199185
rc_layout: &RcLayout,
200-
) -> Result<NonNull<()>, AllocError>
201-
where
202-
A: Allocator,
203-
{
204-
try_allocate_for_rc::<A, _, STRONG_COUNT>(alloc, A::allocate, rc_layout)
186+
) -> NonNull<()> {
187+
match allocation_result {
188+
Ok(allocation_ptr) => unsafe {
189+
init_rc_allocation::<STRONG_COUNT>(allocation_ptr, rc_layout)
190+
},
191+
Err(AllocError) => alloc::handle_alloc_error(rc_layout.allocation_layout),
192+
}
205193
}
206194

207-
fn try_allocate_zeroed_for_rc<A, const STRONG_COUNT: usize>(
195+
#[cfg(not(no_global_oom_handling))]
196+
#[track_caller]
197+
fn allocate_uninit_for_rc<A, const STRONG_COUNT: usize>(
208198
alloc: &A,
209199
rc_layout: &RcLayout,
210-
) -> Result<NonNull<()>, AllocError>
200+
) -> NonNull<()>
211201
where
212202
A: Allocator,
213203
{
214-
try_allocate_for_rc::<_, _, STRONG_COUNT>(alloc, A::allocate_zeroed, rc_layout)
204+
unsafe {
205+
handle_rc_allocation::<STRONG_COUNT>(alloc.allocate(rc_layout.allocation_layout), rc_layout)
206+
}
215207
}
216208

217209
#[cfg(not(no_global_oom_handling))]
218210
#[track_caller]
219-
fn allocate_uninit_for_rc<A, const STRONG_COUNT: usize>(
211+
fn allocate_zeroed_for_rc<A, const STRONG_COUNT: usize>(
220212
alloc: &A,
221213
rc_layout: &RcLayout,
222214
) -> NonNull<()>
223215
where
224216
A: Allocator,
225217
{
226-
allocate_for_rc::<A, _, STRONG_COUNT>(alloc, A::allocate, rc_layout)
218+
unsafe {
219+
handle_rc_allocation::<STRONG_COUNT>(
220+
alloc.allocate_zeroed(rc_layout.allocation_layout),
221+
rc_layout,
222+
)
223+
}
227224
}
228225

229226
#[cfg(not(no_global_oom_handling))]
230227
#[track_caller]
231-
fn allocate_zeroed_for_rc<A, const STRONG_COUNT: usize>(
228+
unsafe fn allocate_for_rc_with_value<T, A, const STRONG_COUNT: usize>(
229+
value: &T,
232230
alloc: &A,
233-
rc_layout: &RcLayout,
234-
) -> NonNull<()>
231+
) -> NonNull<T>
235232
where
236233
A: Allocator,
234+
T: ?Sized,
237235
{
238-
allocate_for_rc::<A, _, STRONG_COUNT>(alloc, A::allocate_zeroed, rc_layout)
236+
unsafe {
237+
let rc_layout = RcLayout::from_value_ptr(NonNull::from(value));
238+
let ptr = allocate_uninit_for_rc::<A, STRONG_COUNT>(alloc, &rc_layout);
239+
240+
ptr::copy_nonoverlapping::<u8>(
241+
ptr::from_ref(value).cast(),
242+
ptr.as_ptr().cast(),
243+
mem::size_of_val(value),
244+
);
245+
246+
NonNull::new_unchecked(ptr.as_ptr().with_metadata_of(value))
247+
}
239248
}
240249

241250
struct GuardedWeak<'a, T, A, R>
@@ -370,7 +379,7 @@ where
370379
{
371380
unsafe {
372381
if !self.is_dangling() {
373-
R::inc_weak(self.weak_count_unchecked());
382+
R::increment_ref_count(self.weak_count_unchecked());
374383
}
375384

376385
self.clone_without_inc_ref()
@@ -397,7 +406,7 @@ where
397406
R: RcOps,
398407
{
399408
unsafe {
400-
if R::dec_weak(self.weak_count_unchecked()) {
409+
if R::decrement_ref_count(self.weak_count_unchecked()) {
401410
self.deallocate();
402411
}
403412
};
@@ -750,7 +759,7 @@ where
750759
R: RcOps,
751760
{
752761
unsafe {
753-
R::inc_strong(self.strong_count());
762+
R::increment_ref_count(self.strong_count());
754763

755764
Self::from_weak(self.weak.clone_without_inc_ref())
756765
}
@@ -771,7 +780,7 @@ where
771780
}
772781

773782
pub unsafe fn increment_strong_count<R: RcOps>(ptr: NonNull<T>) {
774-
unsafe { R::inc_strong(strong_count_ptr_from_value_ptr(ptr.cast()).as_ref()) };
783+
unsafe { R::increment_ref_count(strong_count_ptr_from_value_ptr(ptr.cast()).as_ref()) };
775784
}
776785

777786
#[inline(never)]
@@ -789,7 +798,7 @@ where
789798
R: RcOps,
790799
{
791800
unsafe {
792-
if R::dec_strong(self.strong_count()) {
801+
if R::decrement_ref_count(self.strong_count()) {
793802
self.drop_slow::<R>();
794803
}
795804
};
@@ -991,11 +1000,7 @@ impl<T, A> RawRc<T, A> {
9911000

9921001
mem::forget(guard);
9931002

994-
unsafe {
995-
weak.as_ptr().write(data);
996-
997-
RawUniqueRc::from_weak(weak).into_rc::<R>()
998-
}
1003+
unsafe { RawUniqueRc::from_weak_with_value(weak, data).into_rc::<R>() }
9991004
}
10001005

10011006
#[cfg(not(no_global_oom_handling))]
@@ -1015,7 +1020,8 @@ impl<T, A> RawRc<T, A> {
10151020
R: RcOps,
10161021
{
10171022
unsafe {
1018-
R::dec_strong(&self.strong_count()).then(|| self.weak.assume_init_into_inner::<R>())
1023+
R::decrement_ref_count(&self.strong_count())
1024+
.then(|| self.weak.assume_init_into_inner::<R>())
10191025
}
10201026
}
10211027

@@ -1841,10 +1847,6 @@ impl<T, A> RawUniqueRc<T, A>
18411847
where
18421848
T: ?Sized,
18431849
{
1844-
unsafe fn from_weak(weak: RawWeak<T, A>) -> Self {
1845-
Self { weak }
1846-
}
1847-
18481850
pub unsafe fn downgrade<R>(&self) -> RawWeak<T, A>
18491851
where
18501852
A: Clone,
@@ -1876,11 +1878,9 @@ where
18761878
impl<T, A> RawUniqueRc<T, A> {
18771879
#[cfg(not(no_global_oom_handling))]
18781880
unsafe fn from_weak_with_value(weak: RawWeak<T, A>, value: T) -> Self {
1879-
unsafe {
1880-
weak.as_ptr().write(value);
1881+
unsafe { weak.as_ptr().write(value) };
18811882

1882-
Self::from_weak(weak)
1883-
}
1883+
Self { weak }
18841884
}
18851885

18861886
#[cfg(not(no_global_oom_handling))]

library/alloc/src/rc.rs

Lines changed: 23 additions & 39 deletions
Original file line numberDiff line numberDiff line change
@@ -279,53 +279,37 @@ where
279279
move |raw_weak: &RawWeak<T, A>| f(unsafe { mem::transmute(raw_weak) })
280280
}
281281

282-
unsafe fn inc_ref(count: &UnsafeCell<usize>) {
283-
let count = unsafe { &mut *count.get() };
284-
let strong = *count;
285-
286-
// We insert an `assume` here to hint LLVM at an otherwise
287-
// missed optimization.
288-
// SAFETY: The reference count will never be zero when this is
289-
// called.
290-
unsafe { hint::assert_unchecked(strong != 0) };
291-
292-
let strong = count.wrapping_add(1);
293-
294-
*count = strong;
295-
296-
// We want to abort on overflow instead of dropping the value.
297-
// Checking for overflow after the store instead of before
298-
// allows for slightly better code generation.
299-
if intrinsics::unlikely(strong == 0) {
300-
intrinsics::abort();
301-
}
302-
}
282+
enum RcOps {}
303283

304-
unsafe fn dec_ref(count: &UnsafeCell<usize>) -> bool {
305-
let count = unsafe { &mut *count.get() };
284+
unsafe impl raw_rc::RcOps for RcOps {
285+
unsafe fn increment_ref_count(count: &UnsafeCell<usize>) {
286+
let count = unsafe { &mut *count.get() };
287+
let strong = *count;
306288

307-
*count -= 1;
289+
// We insert an `assume` here to hint LLVM at an otherwise
290+
// missed optimization.
291+
// SAFETY: The reference count will never be zero when this is
292+
// called.
293+
unsafe { hint::assert_unchecked(strong != 0) };
308294

309-
*count == 0
310-
}
295+
let strong = count.wrapping_add(1);
311296

312-
enum RcOps {}
297+
*count = strong;
313298

314-
unsafe impl raw_rc::RcOps for RcOps {
315-
unsafe fn inc_strong(strong_count: &UnsafeCell<usize>) {
316-
unsafe { inc_ref(strong_count) };
299+
// We want to abort on overflow instead of dropping the value.
300+
// Checking for overflow after the store instead of before
301+
// allows for slightly better code generation.
302+
if intrinsics::unlikely(strong == 0) {
303+
intrinsics::abort();
304+
}
317305
}
318306

319-
unsafe fn dec_strong(strong_count: &UnsafeCell<usize>) -> bool {
320-
unsafe { dec_ref(strong_count) }
321-
}
307+
unsafe fn decrement_ref_count(count: &UnsafeCell<usize>) -> bool {
308+
let count = unsafe { &mut *count.get() };
322309

323-
unsafe fn inc_weak(weak_count: &UnsafeCell<usize>) {
324-
unsafe { inc_ref(weak_count) };
325-
}
310+
*count -= 1;
326311

327-
unsafe fn dec_weak(weak_count: &UnsafeCell<usize>) -> bool {
328-
unsafe { dec_ref(weak_count) }
312+
*count == 0
329313
}
330314

331315
unsafe fn upgrade(strong_count: &UnsafeCell<usize>) -> bool {
@@ -341,7 +325,7 @@ unsafe impl raw_rc::RcOps for RcOps {
341325
}
342326

343327
unsafe fn downgrade(weak_count: &UnsafeCell<usize>) {
344-
unsafe { inc_ref(weak_count) };
328+
unsafe { Self::increment_ref_count(weak_count) };
345329
}
346330

347331
unsafe fn lock_strong_count(strong_count: &UnsafeCell<usize>) -> bool {

0 commit comments

Comments
 (0)