Skip to content

Commit e0a4ded

Browse files
authored
Merge pull request #4606 from RalfJung/alloc_addrs
alloc_addresses: track more explicitly whether we are in charge of generating addresses
2 parents 5d8b81c + f9874f6 commit e0a4ded

File tree

2 files changed

+44
-40
lines changed

2 files changed

+44
-40
lines changed

src/alloc_addresses/mod.rs

Lines changed: 42 additions & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -42,20 +42,19 @@ pub struct GlobalStateInner {
4242
/// they do not have an `AllocExtra`.
4343
/// This is the inverse of `int_to_ptr_map`.
4444
base_addr: FxHashMap<AllocId, u64>,
45-
/// Temporarily store prepared memory space for global allocations the first time their memory
46-
/// address is required. This is used to ensure that the memory is allocated before Miri assigns
47-
/// it an internal address, which is important for matching the internal address to the machine
48-
/// address so FFI can read from pointers.
49-
prepared_alloc_bytes: FxHashMap<AllocId, MiriAllocBytes>,
50-
/// A pool of addresses we can reuse for future allocations.
51-
reuse: ReusePool,
52-
/// Whether an allocation has been exposed or not. This cannot be put
45+
/// The set of exposed allocations. This cannot be put
5346
/// into `AllocExtra` for the same reason as `base_addr`.
5447
exposed: FxHashSet<AllocId>,
55-
/// The generator for new addresses in a given range.
56-
address_generator: AddressGenerator,
5748
/// The provenance to use for int2ptr casts
5849
provenance_mode: ProvenanceMode,
50+
/// The generator for new addresses in a given range, and a pool for address reuse. This is
51+
/// `None` if addresses are generated elsewhere (in native-lib mode or with GenMC).
52+
address_generation: Option<(AddressGenerator, ReusePool)>,
53+
/// Native-lib mode only: Temporarily store prepared memory space for global allocations the
54+
/// first time their memory address is required. This is used to ensure that the memory is
55+
/// allocated before Miri assigns it an internal address, which is important for matching the
56+
/// internal address to the machine address so FFI can read from pointers.
57+
prepared_alloc_bytes: Option<FxHashMap<AllocId, MiriAllocBytes>>,
5958
}
6059

6160
impl VisitProvenance for GlobalStateInner {
@@ -64,9 +63,8 @@ impl VisitProvenance for GlobalStateInner {
6463
int_to_ptr_map: _,
6564
base_addr: _,
6665
prepared_alloc_bytes: _,
67-
reuse: _,
6866
exposed: _,
69-
address_generator: _,
67+
address_generation: _,
7068
provenance_mode: _,
7169
} = self;
7270
// Though base_addr, int_to_ptr_map, and exposed contain AllocIds, we do not want to visit them.
@@ -83,11 +81,16 @@ impl GlobalStateInner {
8381
GlobalStateInner {
8482
int_to_ptr_map: Vec::default(),
8583
base_addr: FxHashMap::default(),
86-
prepared_alloc_bytes: FxHashMap::default(),
87-
reuse: ReusePool::new(config),
8884
exposed: FxHashSet::default(),
89-
address_generator: AddressGenerator::new(stack_addr..tcx.target_usize_max()),
9085
provenance_mode: config.provenance_mode,
86+
address_generation: (config.native_lib.is_empty() && config.genmc_config.is_none())
87+
.then(|| {
88+
(
89+
AddressGenerator::new(stack_addr..tcx.target_usize_max()),
90+
ReusePool::new(config),
91+
)
92+
}),
93+
prepared_alloc_bytes: (!config.native_lib.is_empty()).then(FxHashMap::default),
9194
}
9295
}
9396

@@ -147,6 +150,8 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> {
147150
// Store prepared allocation to be picked up for use later.
148151
global_state
149152
.prepared_alloc_bytes
153+
.as_mut()
154+
.unwrap()
150155
.try_insert(alloc_id, prepared_bytes)
151156
.unwrap();
152157
ptr
@@ -173,29 +178,25 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> {
173178
// We don't have to expose this pointer yet, we do that in `prepare_for_native_call`.
174179
return interp_ok(base_ptr.addr().to_u64());
175180
}
176-
// We are not in native lib mode, so we control the addresses ourselves.
181+
// We are not in native lib or genmc mode, so we control the addresses ourselves.
182+
let (addr_gen, reuse) = global_state.address_generation.as_mut().unwrap();
177183
let mut rng = this.machine.rng.borrow_mut();
178-
if let Some((reuse_addr, clock)) = global_state.reuse.take_addr(
179-
&mut *rng,
180-
info.size,
181-
info.align,
182-
memory_kind,
183-
this.active_thread(),
184-
) {
184+
if let Some((reuse_addr, clock)) =
185+
reuse.take_addr(&mut *rng, info.size, info.align, memory_kind, this.active_thread())
186+
{
185187
if let Some(clock) = clock {
186188
this.acquire_clock(&clock)?;
187189
}
188190
interp_ok(reuse_addr)
189191
} else {
190192
// We have to pick a fresh address.
191-
let new_addr =
192-
global_state.address_generator.generate(info.size, info.align, &mut rng)?;
193+
let new_addr = addr_gen.generate(info.size, info.align, &mut rng)?;
193194

194195
// If we filled up more than half the address space, start aggressively reusing
195196
// addresses to avoid running out.
196-
let remaining_range = global_state.address_generator.get_remaining();
197+
let remaining_range = addr_gen.get_remaining();
197198
if remaining_range.start > remaining_range.end / 2 {
198-
global_state.reuse.address_space_shortage();
199+
reuse.address_space_shortage();
199200
}
200201

201202
interp_ok(new_addr)
@@ -414,6 +415,8 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> {
414415
let mut global_state = this.machine.alloc_addresses.borrow_mut();
415416
let mut prepared_alloc_bytes = global_state
416417
.prepared_alloc_bytes
418+
.as_mut()
419+
.unwrap()
417420
.remove(&id)
418421
.unwrap_or_else(|| panic!("alloc bytes for {id:?} have not been prepared"));
419422
// Sanity-check that the prepared allocation has the right size and alignment.
@@ -496,15 +499,17 @@ impl<'tcx> MiriMachine<'tcx> {
496499
// `alloc_id_from_addr` any more.
497500
global_state.exposed.remove(&dead_id);
498501
// Also remember this address for future reuse.
499-
let thread = self.threads.active_thread();
500-
global_state.reuse.add_addr(rng, addr, size, align, kind, thread, || {
501-
// We already excluded GenMC above. We cannot use `self.release_clock` as
502-
// `self.alloc_addresses` is borrowed.
503-
if let Some(data_race) = self.data_race.as_vclocks_ref() {
504-
data_race.release_clock(&self.threads, |clock| clock.clone())
505-
} else {
506-
VClock::default()
507-
}
508-
})
502+
if let Some((_addr_gen, reuse)) = global_state.address_generation.as_mut() {
503+
let thread = self.threads.active_thread();
504+
reuse.add_addr(rng, addr, size, align, kind, thread, || {
505+
// We cannot be in GenMC mode as then `address_generation` is `None`. We cannot use
506+
// `self.release_clock` as `self.alloc_addresses` is borrowed.
507+
if let Some(data_race) = self.data_race.as_vclocks_ref() {
508+
data_race.release_clock(&self.threads, |clock| clock.clone())
509+
} else {
510+
VClock::default()
511+
}
512+
})
513+
}
509514
}
510515
}

src/machine.rs

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -777,9 +777,8 @@ impl<'tcx> MiriMachine<'tcx> {
777777
local_crates,
778778
extern_statics: FxHashMap::default(),
779779
rng: RefCell::new(rng),
780-
allocator: if !config.native_lib.is_empty() {
781-
Some(Rc::new(RefCell::new(crate::alloc::isolated_alloc::IsolatedAlloc::new())))
782-
} else { None },
780+
allocator: (!config.native_lib.is_empty())
781+
.then(|| Rc::new(RefCell::new(crate::alloc::isolated_alloc::IsolatedAlloc::new()))),
783782
tracked_alloc_ids: config.tracked_alloc_ids.clone(),
784783
track_alloc_accesses: config.track_alloc_accesses,
785784
check_alignment: config.check_alignment,

0 commit comments

Comments
 (0)