3
3
//! Eventually this should probably be replaced with salsa-based interning.
4
4
5
5
use std:: {
6
+ collections:: HashMap ,
6
7
fmt:: { self , Debug } ,
7
8
hash:: { BuildHasherDefault , Hash } ,
8
9
ops:: Deref ,
9
10
sync:: Arc ,
10
11
} ;
11
12
12
- use dashmap:: { DashMap , SharedValue } ;
13
+ use dashmap:: { lock :: RwLockWriteGuard , DashMap , SharedValue } ;
13
14
use once_cell:: sync:: OnceCell ;
14
15
use rustc_hash:: FxHasher ;
15
16
16
17
type InternMap < T > = DashMap < Arc < T > , ( ) , BuildHasherDefault < FxHasher > > ;
18
+ type Guard < T > =
19
+ RwLockWriteGuard < ' static , HashMap < Arc < T > , SharedValue < ( ) > , BuildHasherDefault < FxHasher > > > ;
17
20
18
21
#[ derive( Hash ) ]
19
22
pub struct Interned < T : Internable + ?Sized > {
@@ -22,10 +25,22 @@ pub struct Interned<T: Internable + ?Sized> {
22
25
23
26
impl < T : Internable > Interned < T > {
24
27
pub fn new ( obj : T ) -> Self {
28
+ match Interned :: lookup ( & obj) {
29
+ Ok ( this) => this,
30
+ Err ( shard) => {
31
+ let arc = Arc :: new ( obj) ;
32
+ Self :: alloc ( arc, shard)
33
+ }
34
+ }
35
+ }
36
+ }
37
+
38
+ impl < T : Internable + ?Sized > Interned < T > {
39
+ fn lookup ( obj : & T ) -> Result < Self , Guard < T > > {
25
40
let storage = T :: storage ( ) . get ( ) ;
26
- let shard_idx = storage. determine_map ( & obj) ;
41
+ let shard_idx = storage. determine_map ( obj) ;
27
42
let shard = & storage. shards ( ) [ shard_idx] ;
28
- let mut shard = shard. write ( ) ;
43
+ let shard = shard. write ( ) ;
29
44
30
45
// Atomically,
31
46
// - check if `obj` is already in the map
@@ -34,13 +49,15 @@ impl<T: Internable> Interned<T> {
34
49
// This needs to be atomic (locking the shard) to avoid races with other thread, which could
35
50
// insert the same object between us looking it up and inserting it.
36
51
37
- // FIXME: avoid double lookup by using raw entry API (once stable, or when hashbrown can be
38
- // plugged into dashmap)
39
- if let Some ( ( arc, _) ) = shard. get_key_value ( & obj) {
40
- return Self { arc : arc. clone ( ) } ;
52
+ // FIXME: avoid double lookup/hashing by using raw entry API (once stable, or when
53
+ // hashbrown can be plugged into dashmap)
54
+ match shard. get_key_value ( obj) {
55
+ Some ( ( arc, _) ) => Ok ( Self { arc : arc. clone ( ) } ) ,
56
+ None => Err ( shard) ,
41
57
}
58
+ }
42
59
43
- let arc = Arc :: new ( obj ) ;
60
+ fn alloc ( arc : Arc < T > , mut shard : Guard < T > ) -> Self {
44
61
let arc2 = arc. clone ( ) ;
45
62
46
63
shard. insert ( arc2, SharedValue :: new ( ( ) ) ) ;
@@ -49,6 +66,18 @@ impl<T: Internable> Interned<T> {
49
66
}
50
67
}
51
68
69
+ impl Interned < str > {
70
+ pub fn new_str ( s : & str ) -> Self {
71
+ match Interned :: lookup ( s) {
72
+ Ok ( this) => this,
73
+ Err ( shard) => {
74
+ let arc = Arc :: < str > :: from ( s) ;
75
+ Self :: alloc ( arc, shard)
76
+ }
77
+ }
78
+ }
79
+ }
80
+
52
81
impl < T : Internable + ?Sized > Drop for Interned < T > {
53
82
#[ inline]
54
83
fn drop ( & mut self ) {
@@ -98,6 +127,14 @@ impl<T: Internable> PartialEq for Interned<T> {
98
127
99
128
impl < T : Internable > Eq for Interned < T > { }
100
129
130
+ impl PartialEq for Interned < str > {
131
+ fn eq ( & self , other : & Self ) -> bool {
132
+ Arc :: ptr_eq ( & self . arc , & other. arc )
133
+ }
134
+ }
135
+
136
+ impl Eq for Interned < str > { }
137
+
101
138
impl < T : Internable + ?Sized > AsRef < T > for Interned < T > {
102
139
#[ inline]
103
140
fn as_ref ( & self ) -> & T {
@@ -157,4 +194,4 @@ macro_rules! impl_internable {
157
194
) + } ;
158
195
}
159
196
160
- impl_internable ! ( crate :: type_ref:: TypeRef , crate :: type_ref:: TraitRef , crate :: path:: ModPath ) ;
197
+ impl_internable ! ( crate :: type_ref:: TypeRef , crate :: type_ref:: TraitRef , crate :: path:: ModPath , str ) ;
0 commit comments