summary refs log tree commit diff
diff options
context:
space:
mode:
authorRalf Jung <post@ralfj.de>2025-07-19 20:04:08 +0200
committerRalf Jung <post@ralfj.de>2025-07-20 22:14:27 +0200
commit3f9be406a6fd879a99a0eba33cc196fa2cb3957b (patch)
tree640b295bc5b57132a450008fbb6cc38c7c72fb58
parent9982d6462bedf1e793f7b2dbd655a4e57cdf67d4 (diff)
downloadrust-3f9be406a6fd879a99a0eba33cc196fa2cb3957b.tar.gz
rust-3f9be406a6fd879a99a0eba33cc196fa2cb3957b.zip
fix handling of base address for TypeId allocations
-rw-r--r--compiler/rustc_const_eval/src/const_eval/machine.rs18
-rw-r--r--compiler/rustc_const_eval/src/interpret/memory.rs16
-rw-r--r--src/tools/miri/src/alloc_addresses/mod.rs58
-rw-r--r--src/tools/miri/src/borrow_tracker/stacked_borrows/mod.rs4
-rw-r--r--src/tools/miri/src/borrow_tracker/tree_borrows/mod.rs2
5 files changed, 55 insertions, 43 deletions
diff --git a/compiler/rustc_const_eval/src/const_eval/machine.rs b/compiler/rustc_const_eval/src/const_eval/machine.rs
index f24fb18f83b..a18ae79f318 100644
--- a/compiler/rustc_const_eval/src/const_eval/machine.rs
+++ b/compiler/rustc_const_eval/src/const_eval/machine.rs
@@ -279,23 +279,15 @@ impl<'tcx> CompileTimeInterpCx<'tcx> {
     fn guaranteed_cmp(&mut self, a: Scalar, b: Scalar) -> InterpResult<'tcx, u8> {
         interp_ok(match (a, b) {
             // Comparisons between integers are always known.
-            (Scalar::Int { .. }, Scalar::Int { .. }) => {
-                if a == b {
-                    1
-                } else {
-                    0
-                }
-            }
-            // Comparisons of abstract pointers with null pointers are known if the pointer
-            // is in bounds, because if they are in bounds, the pointer can't be null.
-            // Inequality with integers other than null can never be known for sure.
-            (Scalar::Int(int), ptr @ Scalar::Ptr(..))
-            | (ptr @ Scalar::Ptr(..), Scalar::Int(int))
+            (Scalar::Int(a), Scalar::Int(b)) => (a == b) as u8,
+            // Comparisons of null with an arbitrary scalar can be known if `scalar_may_be_null`
+            // indicates that the scalar can definitely *not* be null.
+            (Scalar::Int(int), ptr) | (ptr, Scalar::Int(int))
                 if int.is_null() && !self.scalar_may_be_null(ptr)? =>
             {
                 0
             }
-            // Equality with integers can never be known for sure.
+            // Other ways of comparing integers and pointers can never be known for sure.
             (Scalar::Int { .. }, Scalar::Ptr(..)) | (Scalar::Ptr(..), Scalar::Int { .. }) => 2,
             // FIXME: return a `1` for when both sides are the same pointer, *except* that
             // some things (like functions and vtables) do not have stable addresses
diff --git a/compiler/rustc_const_eval/src/interpret/memory.rs b/compiler/rustc_const_eval/src/interpret/memory.rs
index 20c8e983cea..34297a61648 100644
--- a/compiler/rustc_const_eval/src/interpret/memory.rs
+++ b/compiler/rustc_const_eval/src/interpret/memory.rs
@@ -67,8 +67,10 @@ pub enum AllocKind {
     LiveData,
     /// A function allocation (that fn ptrs point to).
     Function,
-    /// A "virtual" allocation, used for vtables and TypeId.
-    Virtual,
+    /// A vtable allocation.
+    VTable,
+    /// A TypeId allocation.
+    TypeId,
     /// A dead allocation.
     Dead,
 }
@@ -952,7 +954,8 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
             let kind = match global_alloc {
                 GlobalAlloc::Static { .. } | GlobalAlloc::Memory { .. } => AllocKind::LiveData,
                 GlobalAlloc::Function { .. } => bug!("We already checked function pointers above"),
-                GlobalAlloc::VTable { .. } | GlobalAlloc::TypeId { .. } => AllocKind::Virtual,
+                GlobalAlloc::VTable { .. } => AllocKind::VTable,
+                GlobalAlloc::TypeId { .. } => AllocKind::TypeId,
             };
             return AllocInfo::new(size, align, kind, mutbl);
         }
@@ -1617,6 +1620,13 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
                 match self.ptr_try_get_alloc_id(ptr, 0) {
                     Ok((alloc_id, offset, _)) => {
                         let info = self.get_alloc_info(alloc_id);
+                        if matches!(info.kind, AllocKind::TypeId) {
+                            // We *could* actually precisely answer this question since here,
+                            // the offset *is* the integer value. But the entire point of making
+                            // this a pointer is not to leak the integer value, so we say everything
+                            // might be null.
+                            return interp_ok(true);
+                        }
                         // If the pointer is in-bounds (including "at the end"), it is definitely not null.
                         if offset <= info.size {
                             return interp_ok(false);
diff --git a/src/tools/miri/src/alloc_addresses/mod.rs b/src/tools/miri/src/alloc_addresses/mod.rs
index 10339928ac2..334503d2994 100644
--- a/src/tools/miri/src/alloc_addresses/mod.rs
+++ b/src/tools/miri/src/alloc_addresses/mod.rs
@@ -116,14 +116,6 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> {
         let this = self.eval_context_ref();
         let info = this.get_alloc_info(alloc_id);
 
-        // Miri's address assignment leaks state across thread boundaries, which is incompatible
-        // with GenMC execution. So we instead let GenMC assign addresses to allocations.
-        if let Some(genmc_ctx) = this.machine.data_race.as_genmc_ref() {
-            let addr = genmc_ctx.handle_alloc(&this.machine, info.size, info.align, memory_kind)?;
-            return interp_ok(addr);
-        }
-
-        let mut rng = this.machine.rng.borrow_mut();
         // This is either called immediately after allocation (and then cached), or when
         // adjusting `tcx` pointers (which never get freed). So assert that we are looking
         // at a live allocation. This also ensures that we never re-assign an address to an
@@ -131,6 +123,19 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> {
         // information was removed.
         assert!(!matches!(info.kind, AllocKind::Dead));
 
+        // TypeId allocations always have a "base address" of 0 (i.e., the relative offset is the
+        // hash fragment and therefore equal to the actual integer value).
+        if matches!(info.kind, AllocKind::TypeId) {
+            return interp_ok(0);
+        }
+
+        // Miri's address assignment leaks state across thread boundaries, which is incompatible
+        // with GenMC execution. So we instead let GenMC assign addresses to allocations.
+        if let Some(genmc_ctx) = this.machine.data_race.as_genmc_ref() {
+            let addr = genmc_ctx.handle_alloc(&this.machine, info.size, info.align, memory_kind)?;
+            return interp_ok(addr);
+        }
+
         // This allocation does not have a base address yet, pick or reuse one.
         if !this.machine.native_lib.is_empty() {
             // In native lib mode, we use the "real" address of the bytes for this allocation.
@@ -157,7 +162,7 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> {
                         this.get_alloc_bytes_unchecked_raw(alloc_id)?
                     }
                 }
-                AllocKind::Function | AllocKind::Virtual => {
+                AllocKind::Function | AllocKind::VTable => {
                     // Allocate some dummy memory to get a unique address for this function/vtable.
                     let alloc_bytes = MiriAllocBytes::from_bytes(
                         &[0u8; 1],
@@ -169,12 +174,13 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> {
                     std::mem::forget(alloc_bytes);
                     ptr
                 }
-                AllocKind::Dead => unreachable!(),
+                AllocKind::TypeId | AllocKind::Dead => unreachable!(),
             };
             // We don't have to expose this pointer yet, we do that in `prepare_for_native_call`.
             return interp_ok(base_ptr.addr().to_u64());
         }
         // We are not in native lib mode, so we control the addresses ourselves.
+        let mut rng = this.machine.rng.borrow_mut();
         if let Some((reuse_addr, clock)) = global_state.reuse.take_addr(
             &mut *rng,
             info.size,
@@ -295,21 +301,25 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> {
                 // Store address in cache.
                 global_state.base_addr.try_insert(alloc_id, base_addr).unwrap();
 
-                // Also maintain the opposite mapping in `int_to_ptr_map`, ensuring we keep it sorted.
-                // We have a fast-path for the common case that this address is bigger than all previous ones.
-                let pos = if global_state
-                    .int_to_ptr_map
-                    .last()
-                    .is_some_and(|(last_addr, _)| *last_addr < base_addr)
-                {
-                    global_state.int_to_ptr_map.len()
-                } else {
-                    global_state
+                // Also maintain the opposite mapping in `int_to_ptr_map`, ensuring we keep it
+                // sorted. We have a fast-path for the common case that this address is bigger than
+                // all previous ones. We skip this for allocations at address 0; those can't be
+                // real, they must be TypeId "fake allocations".
+                if base_addr != 0 {
+                    let pos = if global_state
                         .int_to_ptr_map
-                        .binary_search_by_key(&base_addr, |(addr, _)| *addr)
-                        .unwrap_err()
-                };
-                global_state.int_to_ptr_map.insert(pos, (base_addr, alloc_id));
+                        .last()
+                        .is_some_and(|(last_addr, _)| *last_addr < base_addr)
+                    {
+                        global_state.int_to_ptr_map.len()
+                    } else {
+                        global_state
+                            .int_to_ptr_map
+                            .binary_search_by_key(&base_addr, |(addr, _)| *addr)
+                            .unwrap_err()
+                    };
+                    global_state.int_to_ptr_map.insert(pos, (base_addr, alloc_id));
+                }
 
                 interp_ok(base_addr)
             }
diff --git a/src/tools/miri/src/borrow_tracker/stacked_borrows/mod.rs b/src/tools/miri/src/borrow_tracker/stacked_borrows/mod.rs
index e834fdffdd1..2977efaae04 100644
--- a/src/tools/miri/src/borrow_tracker/stacked_borrows/mod.rs
+++ b/src/tools/miri/src/borrow_tracker/stacked_borrows/mod.rs
@@ -650,7 +650,7 @@ trait EvalContextPrivExt<'tcx, 'ecx>: crate::MiriInterpCxExt<'tcx> {
                         dcx.log_protector();
                     }
                 },
-                AllocKind::Function | AllocKind::Virtual | AllocKind::Dead => {
+                AllocKind::Function | AllocKind::VTable | AllocKind::TypeId | AllocKind::Dead => {
                     // No stacked borrows on these allocations.
                 }
             }
@@ -1021,7 +1021,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> {
                 trace!("Stacked Borrows tag {tag:?} exposed in {alloc_id:?}");
                 alloc_extra.borrow_tracker_sb().borrow_mut().exposed_tags.insert(tag);
             }
-            AllocKind::Function | AllocKind::Virtual | AllocKind::Dead => {
+            AllocKind::Function | AllocKind::VTable | AllocKind::TypeId | AllocKind::Dead => {
                 // No stacked borrows on these allocations.
             }
         }
diff --git a/src/tools/miri/src/borrow_tracker/tree_borrows/mod.rs b/src/tools/miri/src/borrow_tracker/tree_borrows/mod.rs
index aa92f8a8c30..ad2a67160f4 100644
--- a/src/tools/miri/src/borrow_tracker/tree_borrows/mod.rs
+++ b/src/tools/miri/src/borrow_tracker/tree_borrows/mod.rs
@@ -673,7 +673,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> {
                 trace!("Tree Borrows tag {tag:?} exposed in {alloc_id:?}");
                 alloc_extra.borrow_tracker_tb().borrow_mut().expose_tag(tag);
             }
-            AllocKind::Function | AllocKind::Virtual | AllocKind::Dead => {
+            AllocKind::Function | AllocKind::VTable | AllocKind::TypeId | AllocKind::Dead => {
                 // No tree borrows on these allocations.
             }
         }