about summary refs log tree commit diff
diff options
context:
space:
mode:
-rw-r--r--src/tools/miri/bench-cargo-miri/big-allocs/src/main.rs5
-rw-r--r--src/tools/miri/src/alloc_addresses/mod.rs5
-rw-r--r--src/tools/miri/src/alloc_addresses/reuse_pool.rs15
3 files changed, 17 insertions, 8 deletions
diff --git a/src/tools/miri/bench-cargo-miri/big-allocs/src/main.rs b/src/tools/miri/bench-cargo-miri/big-allocs/src/main.rs
index 5b807ac3df1..89797c49820 100644
--- a/src/tools/miri/bench-cargo-miri/big-allocs/src/main.rs
+++ b/src/tools/miri/bench-cargo-miri/big-allocs/src/main.rs
@@ -7,10 +7,7 @@ fn main() {
     // We can't use too big of an allocation or this code will encounter an allocation failure in
     // CI. Since the allocation can't be huge, we need to do a few iterations so that the effect
     // we're trying to measure is clearly visible above the interpreter's startup time.
-    // FIXME (https://github.com/rust-lang/miri/issues/4253): On 32bit targets, we can run out of
-    // usable addresses if we don't reuse, leading to random test failures.
-    let count = if cfg!(target_pointer_width = "32") { 8 } else { 12 };
-    for _ in 0..count {
+    for _ in 0..20 {
         drop(Vec::<u8>::with_capacity(512 * 1024 * 1024));
     }
 }
diff --git a/src/tools/miri/src/alloc_addresses/mod.rs b/src/tools/miri/src/alloc_addresses/mod.rs
index 75eb0415a6f..dd389d97cdc 100644
--- a/src/tools/miri/src/alloc_addresses/mod.rs
+++ b/src/tools/miri/src/alloc_addresses/mod.rs
@@ -205,6 +205,11 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> {
             if global_state.next_base_addr > this.target_usize_max() {
                 throw_exhaust!(AddressSpaceFull);
             }
+            // If we filled up more than half the address space, start aggressively reusing
+            // addresses to avoid running out.
+            if global_state.next_base_addr > u64::try_from(this.target_isize_max()).unwrap() {
+                global_state.reuse.address_space_shortage();
+            }
 
             interp_ok(base_addr)
         }
diff --git a/src/tools/miri/src/alloc_addresses/reuse_pool.rs b/src/tools/miri/src/alloc_addresses/reuse_pool.rs
index c0d24a9fbbc..29d4f2bb7b0 100644
--- a/src/tools/miri/src/alloc_addresses/reuse_pool.rs
+++ b/src/tools/miri/src/alloc_addresses/reuse_pool.rs
@@ -20,7 +20,7 @@ pub struct ReusePool {
     /// allocations as address-size pairs, the list must be sorted by the size and then the thread ID.
     ///
     /// Each of these maps has at most MAX_POOL_SIZE elements, and since alignment is limited to
-    /// less than 64 different possible value, that bounds the overall size of the pool.
+    /// less than 64 different possible values, that bounds the overall size of the pool.
     ///
     /// We also store the ID and the data-race clock of the thread that donated this pool element,
     /// to ensure synchronization with the thread that picks up this address.
@@ -36,6 +36,15 @@ impl ReusePool {
         }
     }
 
+    /// Call this when we are using up a lot of the address space: if memory reuse is enabled at all,
+    /// this will bump the intra-thread reuse rate to 100% so that we can keep running this program as
+    /// long as possible.
+    pub fn address_space_shortage(&mut self) {
+        if self.address_reuse_rate > 0.0 {
+            self.address_reuse_rate = 1.0;
+        }
+    }
+
     fn subpool(&mut self, align: Align) -> &mut Vec<(u64, Size, ThreadId, VClock)> {
         let pool_idx: usize = align.bytes().trailing_zeros().try_into().unwrap();
         if self.pool.len() <= pool_idx {
@@ -55,9 +64,7 @@ impl ReusePool {
         clock: impl FnOnce() -> VClock,
     ) {
         // Let's see if we even want to remember this address.
-        // We don't remember stack addresses: there's a lot of them (so the perf impact is big),
-        // and we only want to reuse stack slots within the same thread or else we'll add a lot of
-        // undesired synchronization.
+        // We don't remember stack addresses since there's so many of them (so the perf impact is big).
         if kind == MemoryKind::Stack || !rng.random_bool(self.address_reuse_rate) {
             return;
         }