Skip to content

Commit 9c1ab28

Browse files
Lazy-chunk Symbol interner
This fixes unsoundness in the Symbol::as_str by leaking the chunks (via the static memory).
1 parent d2f335d commit 9c1ab28

File tree

7 files changed

+212
-51
lines changed

7 files changed

+212
-51
lines changed

Cargo.lock

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3671,6 +3671,7 @@ dependencies = [
36713671
"either",
36723672
"elsa",
36733673
"ena",
3674+
"hashbrown 0.15.2",
36743675
"indexmap",
36753676
"jobserver",
36763677
"libc",
@@ -4568,6 +4569,7 @@ version = "0.0.0"
45684569
dependencies = [
45694570
"blake3",
45704571
"derive-where",
4572+
"hashbrown 0.15.2",
45714573
"indexmap",
45724574
"itoa",
45734575
"md-5",

compiler/rustc_data_structures/Cargo.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@ bitflags = "2.4.1"
1010
either = "1.0"
1111
elsa = "1.11.0"
1212
ena = "0.14.3"
13+
hashbrown = { version = "0.15.2", default-features = false }
1314
indexmap = "2.4.0"
1415
jobserver_crate = { version = "0.1.28", package = "jobserver" }
1516
measureme = "11"

compiler/rustc_data_structures/src/marker.rs

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -68,6 +68,7 @@ impl_dyn_send!(
6868
[std::sync::LazyLock<T, F> where T: DynSend, F: DynSend]
6969
[std::collections::HashSet<K, S> where K: DynSend, S: DynSend]
7070
[std::collections::HashMap<K, V, S> where K: DynSend, V: DynSend, S: DynSend]
71+
[hashbrown::HashTable<T> where T: DynSend]
7172
[std::collections::BTreeMap<K, V, A> where K: DynSend, V: DynSend, A: std::alloc::Allocator + Clone + DynSend]
7273
[Vec<T, A> where T: DynSend, A: std::alloc::Allocator + DynSend]
7374
[Box<T, A> where T: ?Sized + DynSend, A: std::alloc::Allocator + DynSend]
@@ -142,6 +143,7 @@ impl_dyn_sync!(
142143
[std::sync::LazyLock<T, F> where T: DynSend + DynSync, F: DynSend]
143144
[std::collections::HashSet<K, S> where K: DynSync, S: DynSync]
144145
[std::collections::HashMap<K, V, S> where K: DynSync, V: DynSync, S: DynSync]
146+
[hashbrown::HashTable<T> where T: DynSync]
145147
[std::collections::BTreeMap<K, V, A> where K: DynSync, V: DynSync, A: std::alloc::Allocator + Clone + DynSync]
146148
[Vec<T, A> where T: DynSync, A: std::alloc::Allocator + DynSync]
147149
[Box<T, A> where T: ?Sized + DynSync, A: std::alloc::Allocator + DynSync]

compiler/rustc_span/Cargo.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@ edition = "2021"
77
# tidy-alphabetical-start
88
blake3 = "1.5.2"
99
derive-where = "1.2.7"
10+
hashbrown = { version = "0.15.2", default-features = false }
1011
indexmap = { version = "2.0.0" }
1112
itoa = "1.0"
1213
md5 = { package = "md-5", version = "0.10.0" }

compiler/rustc_span/src/lib.rs

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -27,10 +27,13 @@
2727
#![feature(let_chains)]
2828
#![feature(map_try_insert)]
2929
#![feature(negative_impls)]
30+
#![feature(new_zeroed_alloc)]
3031
#![feature(read_buf)]
3132
#![feature(round_char_boundary)]
3233
#![feature(rustc_attrs)]
3334
#![feature(rustdoc_internals)]
35+
#![feature(str_from_raw_parts)]
36+
#![feature(sync_unsafe_cell)]
3437
#![warn(unreachable_pub)]
3538
// tidy-alphabetical-end
3639

compiler/rustc_span/src/symbol.rs

Lines changed: 196 additions & 46 deletions
Original file line numberDiff line numberDiff line change
@@ -2,11 +2,13 @@
22
//! allows bidirectional lookup; i.e., given a value, one can easily find the
33
//! type, and vice versa.
44
5-
use std::hash::{Hash, Hasher};
5+
use std::cell::SyncUnsafeCell;
6+
use std::hash::{BuildHasher, BuildHasherDefault, Hash, Hasher};
7+
use std::sync::atomic::{AtomicU32, Ordering};
8+
use std::sync::{LazyLock, Mutex};
69
use std::{fmt, str};
710

8-
use rustc_arena::DroplessArena;
9-
use rustc_data_structures::fx::FxIndexSet;
11+
use rustc_data_structures::fx::FxHasher;
1012
use rustc_data_structures::stable_hasher::{
1113
HashStable, StableCompare, StableHasher, ToStableHashKey,
1214
};
@@ -2475,18 +2477,9 @@ impl Symbol {
24752477
with_session_globals(|session_globals| session_globals.symbol_interner.intern(string))
24762478
}
24772479

2478-
/// Access the underlying string. This is a slowish operation because it
2479-
/// requires locking the symbol interner.
2480-
///
2481-
/// Note that the lifetime of the return value is a lie. It's not the same
2482-
/// as `&self`, but actually tied to the lifetime of the underlying
2483-
/// interner. Interners are long-lived, and there are very few of them, and
2484-
/// this function is typically used for short-lived things, so in practice
2485-
/// it works out ok.
2480+
/// Access the underlying string.
24862481
pub fn as_str(&self) -> &str {
2487-
with_session_globals(|session_globals| unsafe {
2488-
std::mem::transmute::<&str, &str>(session_globals.symbol_interner.get(*self))
2489-
})
2482+
with_session_globals(|session_globals| session_globals.symbol_interner.get(*self))
24902483
}
24912484

24922485
pub fn as_u32(self) -> u32 {
@@ -2541,53 +2534,210 @@ impl StableCompare for Symbol {
25412534
}
25422535
}
25432536

2544-
pub(crate) struct Interner(Lock<InternerInner>);
2537+
// This is never de-initialized and stores interned &str in static storage.
2538+
// Each str is stored length-prefixed (u32), and we allow for random-access indexing with a u32
2539+
// index by direct lookup in the arena. Indices <2^16 are stored in a separate structure (they are
2540+
// pre-allocated at dense addresses so we can't use the same lockless O(1) hack for them).
2541+
static GLOBAL_ARENA: LazyLock<StringArena> = LazyLock::new(|| StringArena::new());
25452542

2546-
// The `&'static str`s in this type actually point into the arena.
2547-
//
2548-
// This type is private to prevent accidentally constructing more than one
2549-
// `Interner` on the same thread, which makes it easy to mix up `Symbol`s
2550-
// between `Interner`s.
2551-
struct InternerInner {
2552-
arena: DroplessArena,
2553-
strings: FxIndexSet<&'static str>,
2543+
const CHUNK_SIZE: usize = 4 * 1024 * 1024;
2544+
const CHUNKS: usize = (u32::MAX as usize).div_ceil(CHUNK_SIZE);
2545+
2546+
struct StringChunk {
2547+
array: LazyLock<Box<[SyncUnsafeCell<u8>; CHUNK_SIZE]>>,
25542548
}
25552549

2556-
impl Interner {
2557-
fn prefill(init: &[&'static str]) -> Self {
2558-
Interner(Lock::new(InternerInner {
2559-
arena: Default::default(),
2560-
strings: init.iter().copied().collect(),
2561-
}))
2550+
impl Default for StringChunk {
2551+
fn default() -> Self {
2552+
Self {
2553+
array: LazyLock::new(|| unsafe {
2554+
// SAFETY: Zero-init'd UnsafeCell<u8> is initialized and has no other invariants to
2555+
// worry about.
2556+
Box::new_zeroed().assume_init()
2557+
}),
2558+
}
25622559
}
2560+
}
25632561

2564-
#[inline]
2565-
fn intern(&self, string: &str) -> Symbol {
2566-
let mut inner = self.0.lock();
2567-
if let Some(idx) = inner.strings.get_index_of(string) {
2568-
return Symbol::new(idx as u32);
2562+
struct StringArena {
2563+
chunks: [StringChunk; CHUNKS],
2564+
// this guards writes to `write_at`, but not reads, which proceed lock-free. write_at is only moved
2565+
// forward so this ends up being safe.
2566+
writer: std::sync::Mutex<()>,
2567+
write_at: AtomicU32,
2568+
}
2569+
2570+
impl StringArena {
2571+
fn new() -> Self {
2572+
StringArena {
2573+
chunks: std::array::from_fn(|_| StringChunk::default()),
2574+
// Reserve 2^16 u32 indices -- these will be used for pre-filled interning where we
2575+
// have a dense SymbolIndex space. We could make this exact but it doesn't really
2576+
// matter for this initial test anyway.
2577+
write_at: AtomicU32::new(u32::from(u16::MAX)),
2578+
writer: Mutex::new(()),
25692579
}
2580+
}
25702581

2571-
let string: &str = inner.arena.alloc_str(string);
2582+
/// Copy the passed &str into this buffer. Returns an index that can be passed to `get` to
2583+
/// retrieve the &str back.
2584+
///
2585+
/// u32 is guaranteed to be at least u16::MAX (for the first call).
2586+
fn alloc(&self, s: &str) -> u32 {
2587+
let _guard = self.writer.lock().unwrap();
2588+
// Allocate a chunk of the region, and fill it with the &str's length and bytes.
2589+
let start = self.write_at.load(Ordering::Acquire) as usize;
2590+
2591+
// If we have too large a string it won't fit into a single chunk, and we currently
2592+
// can't support storing it. 4MB+ Symbols feel unlikely, but if we hit issues in
2593+
// practice we can adjust the data structure to allow variable-sized chunks which would
2594+
// let us grow near-arbitrarily large for individual symbols.
2595+
assert!(s.len() <= CHUNK_SIZE);
2596+
2597+
// Assert we're in-bounds. Summing across chunks we have exactly u32::MAX bytes, so this
2598+
// implicitly checks we haven't exceeded our capacity overall.
2599+
//
2600+
// This is possible because if it crosses a chunk boundary we'll skip to that next chunk
2601+
// entirely.
2602+
let possible_end = start.checked_add(4).unwrap().checked_add(s.len()).unwrap();
2603+
2604+
// We fit into the current chunk.
2605+
let len = s.len();
2606+
2607+
// Make sure we can fit in our "utf-8 encoded" length.
2608+
//
2609+
// If all bytes we write are utf-8, we can treat the whole region of memory as just
2610+
// &str, which avoids needing to check that it's still utf-8 when indexing arbitrarily.
2611+
assert!(len < (1 << 28));
2612+
let encoded_len: u32 = ((len as u32 & (0x7f << (7 * 3))) << 3)
2613+
| ((len as u32 & (0x7f << (7 * 2))) << 2)
2614+
| ((len as u32 & (0x7f << (7 * 1))) << 1)
2615+
| (len as u32 & (0x7f << (7 * 0)));
2616+
2617+
// We're either in our initial chunk or the next one.
2618+
let (chunk, start_absolute, offset, end) =
2619+
if start / CHUNK_SIZE != possible_end / CHUNK_SIZE {
2620+
let chunk_idx = start / CHUNK_SIZE + 1;
2621+
(
2622+
&self.chunks[chunk_idx].array,
2623+
chunk_idx * CHUNK_SIZE,
2624+
0usize,
2625+
chunk_idx * CHUNK_SIZE + 4 + s.len(),
2626+
)
2627+
} else {
2628+
(&self.chunks[start / CHUNK_SIZE].array, start, start % CHUNK_SIZE, possible_end)
2629+
};
2630+
let chunk = LazyLock::force(&chunk);
2631+
2632+
// SAFETY:
2633+
//
2634+
// * _guard above protects against concurrent alloc's (so single writer for write_at)
2635+
// * write_at only increases, and shared access is only provided to memory < write_at.
2636+
// * all chunks are zero-init'd at allocation: no uninitialized memory here.
2637+
let chunk_tail = unsafe {
2638+
std::slice::from_raw_parts_mut(
2639+
chunk.as_ptr().cast::<u8>().add(offset).cast_mut(),
2640+
CHUNK_SIZE - offset,
2641+
)
2642+
};
2643+
// encoded_len is utf-8-compatible
2644+
chunk_tail[..4].copy_from_slice(&encoded_len.to_ne_bytes());
2645+
// `s` is &str, so this is trivially correct.
2646+
chunk_tail[4..][..s.len()].copy_from_slice(s.as_bytes());
2647+
2648+
// Semantically this releases the memory for readers.
2649+
self.write_at.store(end as u32, Ordering::Release);
2650+
2651+
start_absolute as u32
2652+
}
25722653

2573-
// SAFETY: we can extend the arena allocation to `'static` because we
2574-
// only access these while the arena is still alive.
2575-
let string: &'static str = unsafe { &*(string as *const str) };
2654+
/// Get the allocated string at the passed index.
2655+
///
2656+
/// Note that this **does not** check that the passed index is actually an index returned by
2657+
/// `alloc`.
2658+
fn get(&self, idx: u32) -> &str {
2659+
let end = self.write_at.load(Ordering::Acquire);
2660+
2661+
assert!(idx < end);
2662+
2663+
let chunk = &self.chunks[idx as usize / CHUNK_SIZE].array;
2664+
let chunk = LazyLock::force(&chunk);
2665+
2666+
let chunk_str_len = if idx as usize / CHUNK_SIZE == end as usize / CHUNK_SIZE {
2667+
end as usize % CHUNK_SIZE
2668+
} else {
2669+
// Note that zero-initialized bytes are valid UTF-8, so even if we don't fill a full
2670+
// chunk it remains valid for our purposes.
2671+
CHUNK_SIZE
2672+
};
2673+
2674+
// SAFETY: write_at only increases, and guarantees &str contents behind it. We checked above that
2675+
// we are < write_at.
2676+
let chunk_head =
2677+
unsafe { std::str::from_raw_parts(chunk.as_ptr().cast::<u8>(), chunk_str_len) };
2678+
let idx = idx as usize % CHUNK_SIZE;
2679+
2680+
let len = u32::from_ne_bytes(chunk_head[idx..idx + 4].as_bytes().try_into().unwrap());
2681+
let len = ((len & (0x7f << (8 * 3))) >> 3)
2682+
| ((len & (0x7f << (8 * 2))) >> 2)
2683+
| ((len & (0x7f << (8 * 1))) >> 1)
2684+
| ((len & (0x7f << (8 * 0))) >> 0);
2685+
&chunk_head[idx + 4..idx + 4 + len as usize]
2686+
}
2687+
}
2688+
2689+
pub(crate) struct Interner(&'static [&'static str], Lock<InternerInner>);
2690+
2691+
struct InternerInner {
2692+
strings: hashbrown::HashTable<Symbol>,
2693+
}
2694+
2695+
impl Interner {
2696+
fn prefill(init: &'static [&'static str]) -> Self {
2697+
assert!(init.len() < u16::MAX as usize);
2698+
let mut strings = hashbrown::HashTable::new();
2699+
2700+
for (idx, s) in init.iter().copied().enumerate() {
2701+
let mut hasher = FxHasher::default();
2702+
s.hash(&mut hasher);
2703+
let hash = hasher.finish();
2704+
strings.insert_unique(hash, Symbol::new(idx as u32), |val| {
2705+
// has to be from `init` because we haven't yet inserted anything except those.
2706+
BuildHasherDefault::<FxHasher>::default().hash_one(init[val.0.index()])
2707+
});
2708+
}
25762709

2577-
// This second hash table lookup can be avoided by using `RawEntryMut`,
2578-
// but this code path isn't hot enough for it to be worth it. See
2579-
// #91445 for details.
2580-
let (idx, is_new) = inner.strings.insert_full(string);
2581-
debug_assert!(is_new); // due to the get_index_of check above
2710+
Interner(init, Lock::new(InternerInner { strings }))
2711+
}
25822712

2583-
Symbol::new(idx as u32)
2713+
#[inline]
2714+
fn intern(&self, string: &str) -> Symbol {
2715+
let hash = BuildHasherDefault::<FxHasher>::default().hash_one(string);
2716+
let mut inner = self.1.lock();
2717+
match inner.strings.find_entry(hash, |v| self.get(*v) == string) {
2718+
Ok(e) => return *e.get(),
2719+
Err(e) => {
2720+
let idx = GLOBAL_ARENA.alloc(string);
2721+
let res = Symbol::new(idx as u32);
2722+
2723+
e.into_table().insert_unique(hash, res, |val| {
2724+
BuildHasherDefault::<FxHasher>::default().hash_one(self.get(*val))
2725+
});
2726+
2727+
res
2728+
}
2729+
}
25842730
}
25852731

25862732
/// Get the symbol as a string.
25872733
///
25882734
/// [`Symbol::as_str()`] should be used in preference to this function.
2589-
fn get(&self, symbol: Symbol) -> &str {
2590-
self.0.lock().strings.get_index(symbol.0.as_usize()).unwrap()
2735+
fn get(&self, symbol: Symbol) -> &'static str {
2736+
if symbol.0.index() < u16::MAX as usize {
2737+
self.0[symbol.0.index()]
2738+
} else {
2739+
GLOBAL_ARENA.get(symbol.0.as_u32())
2740+
}
25912741
}
25922742
}
25932743

compiler/rustc_span/src/symbol/tests.rs

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -4,15 +4,17 @@ use crate::create_default_session_globals_then;
44
#[test]
55
fn interner_tests() {
66
let i = Interner::prefill(&[]);
7+
let dog = i.intern("dog");
78
// first one is zero:
8-
assert_eq!(i.intern("dog"), Symbol::new(0));
9+
assert_eq!(i.intern("dog"), dog);
910
// re-use gets the same entry:
10-
assert_eq!(i.intern("dog"), Symbol::new(0));
11+
assert_eq!(i.intern("dog"), dog);
1112
// different string gets a different #:
12-
assert_eq!(i.intern("cat"), Symbol::new(1));
13-
assert_eq!(i.intern("cat"), Symbol::new(1));
13+
let cat = i.intern("cat");
14+
assert_eq!(i.intern("cat"), cat);
15+
assert_eq!(i.intern("cat"), cat);
1416
// dog is still at zero
15-
assert_eq!(i.intern("dog"), Symbol::new(0));
17+
assert_eq!(i.intern("dog"), dog);
1618
}
1719

1820
#[test]

0 commit comments

Comments
 (0)