@@ -1259,21 +1259,45 @@ impl ThreadId {
12591259 }
12601260 }
12611261 _ => {
1262- use crate :: sync:: { Mutex , PoisonError } ;
1263-
1264- static COUNTER : Mutex <u64 > = Mutex :: new( 0 ) ;
1262+ use crate :: cell:: SyncUnsafeCell ;
1263+ use crate :: hint:: spin_loop;
1264+ use crate :: sync:: atomic:: { Atomic , AtomicBool } ;
1265+ use crate :: thread:: yield_now;
1266+
1267+ // If we don't have a 64-bit atomic we use a small spinlock. We don't use Mutex
1268+ // here as we might be trying to get the current thread id in the global allocator,
1269+ // and on some platforms Mutex requires allocation.
1270+ static COUNTER_LOCKED : Atomic <bool > = AtomicBool :: new( false ) ;
1271+ static COUNTER : SyncUnsafeCell <u64 > = SyncUnsafeCell :: new( 0 ) ;
1272+
1273+ // Acquire lock.
1274+ let mut spin = 0 ;
1275+ while COUNTER_LOCKED . compare_exchange_weak( false , true , Ordering :: Acquire , Ordering :: Relaxed ) . is_err( ) {
1276+ if spin <= 3 {
1277+ for _ in 0 ..( 1 << spin) {
1278+ spin_loop( ) ;
1279+ }
1280+ } else {
1281+ yield_now( ) ;
1282+ }
1283+ spin += 1 ;
1284+ }
12651285
1266- let mut counter = COUNTER . lock( ) . unwrap_or_else( PoisonError :: into_inner) ;
1267- let Some ( id) = counter. checked_add( 1 ) else {
1268- // in case the panic handler ends up calling `ThreadId::new()`,
1269- // avoid reentrant lock acquire.
1270- drop( counter) ;
1271- exhausted( ) ;
1286+ let id;
1287+ // SAFETY: we have an exclusive lock on the counter.
1288+ unsafe {
1289+ id = ( * COUNTER . get( ) ) . saturating_add( 1 ) ;
1290+ ( * COUNTER . get( ) ) = id;
12721291 } ;
12731292
1274- * counter = id;
1275- drop( counter) ;
1276- ThreadId ( NonZero :: new( id) . unwrap( ) )
1293+ // Release the lock.
1294+ COUNTER_LOCKED . store( false , Ordering :: Release ) ;
1295+
1296+ if id == u64 :: MAX {
1297+ exhausted( )
1298+ } else {
1299+ ThreadId ( NonZero :: new( id) . unwrap( ) )
1300+ }
12771301 }
12781302 }
12791303 }
0 commit comments