@@ -87,9 +87,26 @@ impl<T> Stack<T> {
87
87
}
88
88
89
89
#[ cfg( target_arch = "x86_64" ) ]
90
- fn anchor < T > ( ) -> * mut T {
91
- static mut ANCHOR : u8 = 0 ;
92
- ( unsafe { & mut ANCHOR } as * mut u8 as usize & !( core:: mem:: align_of :: < T > ( ) - 1 ) ) as * mut T
90
+ fn anchor < T > ( init : Option < * mut T > ) -> * mut T {
91
+ use core:: sync:: atomic:: AtomicU8 ;
92
+
93
+ use spin:: Once ;
94
+
95
+ static LAZY_ANCHOR : Once < usize > = Once :: new ( ) ;
96
+
97
+ let likely_unaligned_address = if let Some ( init) = init {
98
+ * LAZY_ANCHOR . call_once ( || init as usize )
99
+ } else {
100
+ LAZY_ANCHOR . get ( ) . copied ( ) . unwrap_or_else ( || {
101
+ // we may hit this branch with Pool of ZSTs where `grow` does not need to be called
102
+ static BSS_ANCHOR : AtomicU8 = AtomicU8 :: new ( 0 ) ;
103
+ & BSS_ANCHOR as * const _ as usize
104
+ } )
105
+ } ;
106
+
107
+ let alignment_mask = !( core:: mem:: align_of :: < T > ( ) - 1 ) ;
108
+ let well_aligned_address = likely_unaligned_address & alignment_mask;
109
+ well_aligned_address as * mut T
93
110
}
94
111
95
112
/// On x86_64, anchored pointer. This is a (signed) 32-bit offset from `anchor` plus a 32-bit tag
@@ -116,7 +133,7 @@ impl<T> Ptr<T> {
116
133
pub fn new ( p : * mut T ) -> Option < Self > {
117
134
use core:: convert:: TryFrom ;
118
135
119
- i32:: try_from ( ( p as isize ) . wrapping_sub ( anchor :: < T > ( ) as isize ) )
136
+ i32:: try_from ( ( p as isize ) . wrapping_sub ( anchor :: < T > ( Some ( p ) ) as isize ) )
120
137
. ok ( )
121
138
. map ( |offset| unsafe { Ptr :: from_parts ( initial_tag_value ( ) , offset) } )
122
139
}
@@ -166,7 +183,7 @@ impl<T> Ptr<T> {
166
183
fn as_raw ( & self ) -> NonNull < T > {
167
184
unsafe {
168
185
NonNull :: new_unchecked (
169
- ( anchor :: < T > ( ) as * mut u8 ) . offset ( self . offset ( ) as isize ) as * mut T
186
+ ( anchor :: < T > ( None ) as * mut u8 ) . offset ( self . offset ( ) as isize ) as * mut T ,
170
187
)
171
188
}
172
189
}
0 commit comments