@@ -6658,7 +6658,7 @@ mod test_map {
6658
6658
use allocator_api2:: alloc:: { AllocError , Allocator , Global } ;
6659
6659
use core:: alloc:: Layout ;
6660
6660
use core:: ptr:: NonNull ;
6661
- use core:: sync:: atomic:: { AtomicBool , Ordering } ;
6661
+ use core:: sync:: atomic:: { AtomicI8 , Ordering } ;
6662
6662
use rand:: { rngs:: SmallRng , Rng , SeedableRng } ;
6663
6663
use std:: borrow:: ToOwned ;
6664
6664
use std:: cell:: RefCell ;
@@ -8510,47 +8510,44 @@ mod test_map {
8510
8510
let _map2 = map1. clone ( ) ;
8511
8511
}
8512
8512
8513
- # [ test ]
8514
- fn test_hashmap_into_iter_bug ( ) {
8515
- let dropped : Arc < AtomicBool > = Arc :: new ( AtomicBool :: new ( false ) ) ;
8513
+ struct MyAllocInner {
8514
+ drop_count : Arc < AtomicI8 > ,
8515
+ }
8516
8516
8517
- {
8518
- struct MyAllocInner {
8519
- drop_flag : Arc < AtomicBool > ,
8520
- }
8517
+ # [ derive ( Clone ) ]
8518
+ struct MyAlloc {
8519
+ _inner : Arc < MyAllocInner > ,
8520
+ }
8521
8521
8522
- #[ derive( Clone ) ]
8523
- struct MyAlloc {
8524
- _inner : Arc < MyAllocInner > ,
8525
- }
8522
+ impl Drop for MyAllocInner {
8523
+ fn drop ( & mut self ) {
8524
+ println ! ( "MyAlloc freed." ) ;
8525
+ self . drop_count . fetch_sub ( 1 , Ordering :: SeqCst ) ;
8526
+ }
8527
+ }
8526
8528
8527
- impl Drop for MyAllocInner {
8528
- fn drop ( & mut self ) {
8529
- println ! ( "MyAlloc freed." ) ;
8530
- self . drop_flag . store ( true , Ordering :: SeqCst ) ;
8531
- }
8532
- }
8529
+ unsafe impl Allocator for MyAlloc {
8530
+ fn allocate ( & self , layout : Layout ) -> std:: result:: Result < NonNull < [ u8 ] > , AllocError > {
8531
+ let g = Global ;
8532
+ g. allocate ( layout)
8533
+ }
8533
8534
8534
- unsafe impl Allocator for MyAlloc {
8535
- fn allocate (
8536
- & self ,
8537
- layout : Layout ,
8538
- ) -> std:: result:: Result < NonNull < [ u8 ] > , AllocError > {
8539
- let g = Global ;
8540
- g. allocate ( layout)
8541
- }
8535
+ unsafe fn deallocate ( & self , ptr : NonNull < u8 > , layout : Layout ) {
8536
+ let g = Global ;
8537
+ g. deallocate ( ptr, layout)
8538
+ }
8539
+ }
8542
8540
8543
- unsafe fn deallocate ( & self , ptr : NonNull < u8 > , layout : Layout ) {
8544
- let g = Global ;
8545
- g. deallocate ( ptr, layout)
8546
- }
8547
- }
8541
+ #[ test]
8542
+ fn test_hashmap_into_iter_bug ( ) {
8543
+ let dropped: Arc < AtomicI8 > = Arc :: new ( AtomicI8 :: new ( 1 ) ) ;
8548
8544
8545
+ {
8549
8546
let mut map = crate :: HashMap :: with_capacity_in (
8550
8547
10 ,
8551
8548
MyAlloc {
8552
8549
_inner : Arc :: new ( MyAllocInner {
8553
- drop_flag : dropped. clone ( ) ,
8550
+ drop_count : dropped. clone ( ) ,
8554
8551
} ) ,
8555
8552
} ,
8556
8553
) ;
@@ -8563,6 +8560,96 @@ mod test_map {
8563
8560
}
8564
8561
}
8565
8562
8566
- assert ! ( dropped. load( Ordering :: SeqCst ) ) ;
8563
+ // All allocator clones should already be dropped.
8564
+ assert_eq ! ( dropped. load( Ordering :: SeqCst ) , 0 ) ;
8565
+ }
8566
+
8567
+ #[ test]
8568
+ #[ should_panic = "panic in clone" ]
8569
+ fn test_clone_memory_leaks_and_double_drop ( ) {
8570
+ let dropped: Arc < AtomicI8 > = Arc :: new ( AtomicI8 :: new ( 2 ) ) ;
8571
+
8572
+ {
8573
+ let mut map = crate :: HashMap :: with_capacity_in (
8574
+ 10 ,
8575
+ MyAlloc {
8576
+ _inner : Arc :: new ( MyAllocInner {
8577
+ drop_count : dropped. clone ( ) ,
8578
+ } ) ,
8579
+ } ,
8580
+ ) ;
8581
+
8582
+ const DISARMED : bool = false ;
8583
+ const ARMED : bool = true ;
8584
+
8585
+ struct CheckedCloneDrop {
8586
+ panic_in_clone : bool ,
8587
+ dropped : bool ,
8588
+ need_drop : Vec < i32 > ,
8589
+ }
8590
+
8591
+ impl Clone for CheckedCloneDrop {
8592
+ fn clone ( & self ) -> Self {
8593
+ if self . panic_in_clone {
8594
+ panic ! ( "panic in clone" )
8595
+ }
8596
+ Self {
8597
+ panic_in_clone : self . panic_in_clone ,
8598
+ dropped : self . dropped ,
8599
+ need_drop : self . need_drop . clone ( ) ,
8600
+ }
8601
+ }
8602
+ }
8603
+
8604
+ impl Drop for CheckedCloneDrop {
8605
+ fn drop ( & mut self ) {
8606
+ if self . dropped {
8607
+ panic ! ( "double drop" ) ;
8608
+ }
8609
+ self . dropped = true ;
8610
+ }
8611
+ }
8612
+
8613
+ let armed_flags = [
8614
+ DISARMED , DISARMED , ARMED , DISARMED , DISARMED , DISARMED , DISARMED ,
8615
+ ] ;
8616
+
8617
+ // Hash and Key must be equal to each other for controlling the elements placement
8618
+ // so that we can be sure that we first clone elements that don't panic during cloning.
8619
+ for ( idx, & panic_in_clone) in armed_flags. iter ( ) . enumerate ( ) {
8620
+ let idx = idx as u64 ;
8621
+ map. table . insert (
8622
+ idx,
8623
+ (
8624
+ idx,
8625
+ CheckedCloneDrop {
8626
+ panic_in_clone,
8627
+ dropped : false ,
8628
+ need_drop : vec ! [ 0 , 1 , 2 , 3 ] ,
8629
+ } ,
8630
+ ) ,
8631
+ |( k, _) | * k,
8632
+ ) ;
8633
+ }
8634
+
8635
+ let mut count = 0 ;
8636
+ // Let's check that all elements are located as we wanted
8637
+ //
8638
+ // SAFETY: We know for sure that `RawTable` will outlive
8639
+ // the returned `RawIter` iterator.
8640
+ for ( ( key, value) , panic_in_clone) in map. iter ( ) . zip ( armed_flags) {
8641
+ assert_eq ! ( * key, count) ;
8642
+ assert_eq ! ( value. panic_in_clone, panic_in_clone) ;
8643
+ count += 1 ;
8644
+ }
8645
+ assert_eq ! ( map. len( ) , 7 ) ;
8646
+ assert_eq ! ( count, 7 ) ;
8647
+
8648
+ // Clone should normally clone a few elements, and then (when the
8649
+ // clone function panics), deallocate both its own memory, memory
8650
+ // of `dropped: Arc<AtomicI8>` and the memory of already cloned
8651
+ // elements (Vec<i32> memory inside CheckedCloneDrop).
8652
+ let _table2 = map. clone ( ) ;
8653
+ }
8567
8654
}
8568
8655
}
0 commit comments