about summary refs log tree commit diff
path: root/compiler/rustc_const_eval/src
diff options
context:
space:
mode:
authorbors <bors@rust-lang.org>2025-04-02 18:39:21 +0000
committerbors <bors@rust-lang.org>2025-04-02 18:39:21 +0000
commitd5b4c2e4f19b6d7037371cdaecc3cc2c701c68df (patch)
tree9402863d374e9622c33da6affc09364f6067b7a7 /compiler/rustc_const_eval/src
parent4f0de4c81d80121ac7b576bc68d8016064f4d261 (diff)
parent278bc67fdc4667c1b3a8e984a6e43ed77df6fa3d (diff)
downloadrust-d5b4c2e4f19b6d7037371cdaecc3cc2c701c68df.tar.gz
rust-d5b4c2e4f19b6d7037371cdaecc3cc2c701c68df.zip
Auto merge of #139269 - matthiaskrgr:rollup-pk78gig, r=matthiaskrgr
Rollup of 6 pull requests

Successful merges:

 - #138992 (literal pattern lowering: use the pattern's type instead of the literal's in `const_to_pat`)
 - #139211 (interpret: add a version of run_for_validation for &self)
 - #139235 (`AstValidator` tweaks)
 - #139237 (Add a dep kind for use of the anon node with zero dependencies)
 - #139260 (Add dianqk to codegen reviewers)
 - #139264 (Fix two incorrect turbofish suggestions)

r? `@ghost`
`@rustbot` modify labels: rollup
Diffstat (limited to 'compiler/rustc_const_eval/src')
-rw-r--r--compiler/rustc_const_eval/src/interpret/memory.rs43
-rw-r--r--compiler/rustc_const_eval/src/interpret/validity.rs2
2 files changed, 33 insertions, 12 deletions
diff --git a/compiler/rustc_const_eval/src/interpret/memory.rs b/compiler/rustc_const_eval/src/interpret/memory.rs
index 8f286971e63..d077900587e 100644
--- a/compiler/rustc_const_eval/src/interpret/memory.rs
+++ b/compiler/rustc_const_eval/src/interpret/memory.rs
@@ -8,8 +8,9 @@
 
 use std::assert_matches::assert_matches;
 use std::borrow::{Borrow, Cow};
+use std::cell::Cell;
 use std::collections::VecDeque;
-use std::{fmt, mem, ptr};
+use std::{fmt, ptr};
 
 use rustc_abi::{Align, HasDataLayout, Size};
 use rustc_ast::Mutability;
@@ -131,7 +132,7 @@ pub struct Memory<'tcx, M: Machine<'tcx>> {
     /// This stores whether we are currently doing reads purely for the purpose of validation.
     /// Those reads do not trigger the machine's hooks for memory reads.
     /// Needless to say, this must only be set with great care!
-    validation_in_progress: bool,
+    validation_in_progress: Cell<bool>,
 }
 
 /// A reference to some allocation that was already bounds-checked for the given region
@@ -158,7 +159,7 @@ impl<'tcx, M: Machine<'tcx>> Memory<'tcx, M> {
             alloc_map: M::MemoryMap::default(),
             extra_fn_ptr_map: FxIndexMap::default(),
             dead_alloc_map: FxIndexMap::default(),
-            validation_in_progress: false,
+            validation_in_progress: Cell::new(false),
         }
     }
 
@@ -715,7 +716,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
         // We want to call the hook on *all* accesses that involve an AllocId, including zero-sized
         // accesses. That means we cannot rely on the closure above or the `Some` branch below. We
         // do this after `check_and_deref_ptr` to ensure some basic sanity has already been checked.
-        if !self.memory.validation_in_progress {
+        if !self.memory.validation_in_progress.get() {
             if let Ok((alloc_id, ..)) = self.ptr_try_get_alloc_id(ptr, size_i64) {
                 M::before_alloc_read(self, alloc_id)?;
             }
@@ -723,7 +724,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
 
         if let Some((alloc_id, offset, prov, alloc)) = ptr_and_alloc {
             let range = alloc_range(offset, size);
-            if !self.memory.validation_in_progress {
+            if !self.memory.validation_in_progress.get() {
                 M::before_memory_read(
                     self.tcx,
                     &self.machine,
@@ -801,7 +802,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
     ) -> InterpResult<'tcx, Option<AllocRefMut<'a, 'tcx, M::Provenance, M::AllocExtra, M::Bytes>>>
     {
         let tcx = self.tcx;
-        let validation_in_progress = self.memory.validation_in_progress;
+        let validation_in_progress = self.memory.validation_in_progress.get();
 
         let size_i64 = i64::try_from(size.bytes()).unwrap(); // it would be an error to even ask for more than isize::MAX bytes
         let ptr_and_alloc = Self::check_and_deref_ptr(
@@ -1087,23 +1088,43 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
     ///
     /// We do this so Miri's allocation access tracking does not show the validation
     /// reads as spurious accesses.
-    pub fn run_for_validation<R>(&mut self, f: impl FnOnce(&mut Self) -> R) -> R {
+    pub fn run_for_validation_mut<R>(&mut self, f: impl FnOnce(&mut Self) -> R) -> R {
         // This deliberately uses `==` on `bool` to follow the pattern
         // `assert!(val.replace(new) == old)`.
         assert!(
-            mem::replace(&mut self.memory.validation_in_progress, true) == false,
+            self.memory.validation_in_progress.replace(true) == false,
             "`validation_in_progress` was already set"
         );
         let res = f(self);
         assert!(
-            mem::replace(&mut self.memory.validation_in_progress, false) == true,
+            self.memory.validation_in_progress.replace(false) == true,
+            "`validation_in_progress` was unset by someone else"
+        );
+        res
+    }
+
+    /// Runs the closure in "validation" mode, which means the machine's memory read hooks will be
+    /// suppressed. Needless to say, this must only be set with great care! Cannot be nested.
+    ///
+    /// We do this so Miri's allocation access tracking does not show the validation
+    /// reads as spurious accesses.
+    pub fn run_for_validation_ref<R>(&self, f: impl FnOnce(&Self) -> R) -> R {
+        // This deliberately uses `==` on `bool` to follow the pattern
+        // `assert!(val.replace(new) == old)`.
+        assert!(
+            self.memory.validation_in_progress.replace(true) == false,
+            "`validation_in_progress` was already set"
+        );
+        let res = f(self);
+        assert!(
+            self.memory.validation_in_progress.replace(false) == true,
             "`validation_in_progress` was unset by someone else"
         );
         res
     }
 
     pub(super) fn validation_in_progress(&self) -> bool {
-        self.memory.validation_in_progress
+        self.memory.validation_in_progress.get()
     }
 }
 
@@ -1375,7 +1396,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
         };
         let src_alloc = self.get_alloc_raw(src_alloc_id)?;
         let src_range = alloc_range(src_offset, size);
-        assert!(!self.memory.validation_in_progress, "we can't be copying during validation");
+        assert!(!self.memory.validation_in_progress.get(), "we can't be copying during validation");
         // For the overlapping case, it is crucial that we trigger the read hook
         // before the write hook -- the aliasing model cares about the order.
         M::before_memory_read(
diff --git a/compiler/rustc_const_eval/src/interpret/validity.rs b/compiler/rustc_const_eval/src/interpret/validity.rs
index eb3f552cd27..fb7ba6d7ef5 100644
--- a/compiler/rustc_const_eval/src/interpret/validity.rs
+++ b/compiler/rustc_const_eval/src/interpret/validity.rs
@@ -1322,7 +1322,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
         trace!("validate_operand_internal: {:?}, {:?}", *val, val.layout.ty);
 
         // Run the visitor.
-        self.run_for_validation(|ecx| {
+        self.run_for_validation_mut(|ecx| {
             let reset_padding = reset_provenance_and_padding && {
                 // Check if `val` is actually stored in memory. If not, padding is not even
                 // represented and we need not reset it.