about summary refs log tree commit diff
path: root/compiler/rustc_const_eval/src/interpret/intern.rs
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/rustc_const_eval/src/interpret/intern.rs')
-rw-r--r--compiler/rustc_const_eval/src/interpret/intern.rs127
1 files changed, 57 insertions, 70 deletions
diff --git a/compiler/rustc_const_eval/src/interpret/intern.rs b/compiler/rustc_const_eval/src/interpret/intern.rs
index 107e5bec614..3a7fe8bd478 100644
--- a/compiler/rustc_const_eval/src/interpret/intern.rs
+++ b/compiler/rustc_const_eval/src/interpret/intern.rs
@@ -164,75 +164,6 @@ impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx, const_eval::Memory
         &self.ecx
     }
 
-    fn visit_aggregate(
-        &mut self,
-        mplace: &MPlaceTy<'tcx>,
-        fields: impl Iterator<Item = InterpResult<'tcx, Self::V>>,
-    ) -> InterpResult<'tcx> {
-        // We want to walk the aggregate to look for references to intern. While doing that we
-        // also need to take special care of interior mutability.
-        //
-        // As an optimization, however, if the allocation does not contain any references: we don't
-        // need to do the walk. It can be costly for big arrays for example (e.g. issue #93215).
-        let is_walk_needed = |mplace: &MPlaceTy<'tcx>| -> InterpResult<'tcx, bool> {
-            // ZSTs cannot contain pointers, we can avoid the interning walk.
-            if mplace.layout.is_zst() {
-                return Ok(false);
-            }
-
-            // Now, check whether this allocation could contain references.
-            //
-            // Note, this check may sometimes not be cheap, so we only do it when the walk we'd like
-            // to avoid could be expensive: on the potentially larger types, arrays and slices,
-            // rather than on all aggregates unconditionally.
-            if matches!(mplace.layout.ty.kind(), ty::Array(..) | ty::Slice(..)) {
-                let Some((size, align)) = self.ecx.size_and_align_of_mplace(&mplace)? else {
-                    // We do the walk if we can't determine the size of the mplace: we may be
-                    // dealing with extern types here in the future.
-                    return Ok(true);
-                };
-
-                // If there is no provenance in this allocation, it does not contain references
-                // that point to another allocation, and we can avoid the interning walk.
-                if let Some(alloc) = self.ecx.get_ptr_alloc(mplace.ptr, size, align)? {
-                    if !alloc.has_provenance() {
-                        return Ok(false);
-                    }
-                } else {
-                    // We're encountering a ZST here, and can avoid the walk as well.
-                    return Ok(false);
-                }
-            }
-
-            // In the general case, we do the walk.
-            Ok(true)
-        };
-
-        // If this allocation contains no references to intern, we avoid the potentially costly
-        // walk.
-        //
-        // We can do this before the checks for interior mutability below, because only references
-        // are relevant in that situation, and we're checking if there are any here.
-        if !is_walk_needed(mplace)? {
-            return Ok(());
-        }
-
-        if let Some(def) = mplace.layout.ty.ty_adt_def() {
-            if def.is_unsafe_cell() {
-                // We are crossing over an `UnsafeCell`, we can mutate again. This means that
-                // References we encounter inside here are interned as pointing to mutable
-                // allocations.
-                // Remember the `old` value to handle nested `UnsafeCell`.
-                let old = std::mem::replace(&mut self.inside_unsafe_cell, true);
-                let walked = self.walk_aggregate(mplace, fields);
-                self.inside_unsafe_cell = old;
-                return walked;
-            }
-        }
-
-        self.walk_aggregate(mplace, fields)
-    }
-
     fn visit_value(&mut self, mplace: &MPlaceTy<'tcx>) -> InterpResult<'tcx> {
         // Handle Reference types, as these are the only types with provenance supported by const eval.
         // Raw pointers (and boxes) are handled by the `leftover_allocations` logic.
@@ -315,7 +246,63 @@ impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx, const_eval::Memory
             }
             Ok(())
         } else {
-            // Not a reference -- proceed recursively.
+            // Not a reference. Check if we want to recurse.
+            let is_walk_needed = |mplace: &MPlaceTy<'tcx>| -> InterpResult<'tcx, bool> {
+                // ZSTs cannot contain pointers, we can avoid the interning walk.
+                if mplace.layout.is_zst() {
+                    return Ok(false);
+                }
+
+                // Now, check whether this allocation could contain references.
+                //
+                // Note, this check may sometimes not be cheap, so we only do it when the walk we'd like
+                // to avoid could be expensive: on the potentially larger types, arrays and slices,
+                // rather than on all aggregates unconditionally.
+                if matches!(mplace.layout.ty.kind(), ty::Array(..) | ty::Slice(..)) {
+                    let Some((size, align)) = self.ecx.size_and_align_of_mplace(&mplace)? else {
+                        // We do the walk if we can't determine the size of the mplace: we may be
+                        // dealing with extern types here in the future.
+                        return Ok(true);
+                    };
+
+                    // If there is no provenance in this allocation, it does not contain references
+                    // that point to another allocation, and we can avoid the interning walk.
+                    if let Some(alloc) = self.ecx.get_ptr_alloc(mplace.ptr, size, align)? {
+                        if !alloc.has_provenance() {
+                            return Ok(false);
+                        }
+                    } else {
+                        // We're encountering a ZST here, and can avoid the walk as well.
+                        return Ok(false);
+                    }
+                }
+
+                // In the general case, we do the walk.
+                Ok(true)
+            };
+
+            // If this allocation contains no references to intern, we avoid the potentially costly
+            // walk.
+            //
+            // We can do this before the checks for interior mutability below, because only references
+            // are relevant in that situation, and we're checking if there are any here.
+            if !is_walk_needed(mplace)? {
+                return Ok(());
+            }
+
+            if let Some(def) = mplace.layout.ty.ty_adt_def() {
+                if def.is_unsafe_cell() {
+                    // We are crossing over an `UnsafeCell`, we can mutate again. This means that
+                    // References we encounter inside here are interned as pointing to mutable
+                    // allocations.
+                    // Remember the `old` value to handle nested `UnsafeCell`.
+                    let old = std::mem::replace(&mut self.inside_unsafe_cell, true);
+                    let walked = self.walk_value(mplace);
+                    self.inside_unsafe_cell = old;
+                    return walked;
+                }
+            }
+
             self.walk_value(mplace)
         }
     }